Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
torvalds
GitHub Repository: torvalds/linux
Path: blob/master/drivers/crypto/allwinner/sun8i-ce/sun8i-ce-core.c
29537 views
1
// SPDX-License-Identifier: GPL-2.0
2
/*
3
* sun8i-ce-core.c - hardware cryptographic offloader for
4
* Allwinner H3/A64/H5/H2+/H6/R40 SoC
5
*
6
* Copyright (C) 2015-2019 Corentin Labbe <[email protected]>
7
*
8
* Core file which registers crypto algorithms supported by the CryptoEngine.
9
*
10
* You could find a link for the datasheet in Documentation/arch/arm/sunxi.rst
11
*/
12
13
#include <crypto/engine.h>
14
#include <crypto/internal/hash.h>
15
#include <crypto/internal/rng.h>
16
#include <crypto/internal/skcipher.h>
17
#include <linux/clk.h>
18
#include <linux/delay.h>
19
#include <linux/dma-mapping.h>
20
#include <linux/err.h>
21
#include <linux/interrupt.h>
22
#include <linux/io.h>
23
#include <linux/irq.h>
24
#include <linux/kernel.h>
25
#include <linux/module.h>
26
#include <linux/of.h>
27
#include <linux/platform_device.h>
28
#include <linux/pm_runtime.h>
29
#include <linux/reset.h>
30
31
#include "sun8i-ce.h"
32
33
/*
34
* mod clock is lower on H3 than other SoC due to some DMA timeout occurring
35
* with high value.
36
* If you want to tune mod clock, loading driver and passing selftest is
37
* insufficient, you need to test with some LUKS test (mount and write to it)
38
*/
39
static const struct ce_variant ce_h3_variant = {
40
.alg_cipher = { CE_ALG_AES, CE_ALG_DES, CE_ALG_3DES,
41
},
42
.alg_hash = { CE_ALG_MD5, CE_ALG_SHA1, CE_ALG_SHA224, CE_ALG_SHA256,
43
CE_ALG_SHA384, CE_ALG_SHA512
44
},
45
.op_mode = { CE_OP_ECB, CE_OP_CBC
46
},
47
.ce_clks = {
48
{ "bus", 0, 200000000 },
49
{ "mod", 50000000, 0 },
50
},
51
.esr = ESR_H3,
52
.prng = CE_ALG_PRNG,
53
.trng = CE_ID_NOTSUPP,
54
};
55
56
static const struct ce_variant ce_h5_variant = {
57
.alg_cipher = { CE_ALG_AES, CE_ALG_DES, CE_ALG_3DES,
58
},
59
.alg_hash = { CE_ALG_MD5, CE_ALG_SHA1, CE_ALG_SHA224, CE_ALG_SHA256,
60
CE_ID_NOTSUPP, CE_ID_NOTSUPP
61
},
62
.op_mode = { CE_OP_ECB, CE_OP_CBC
63
},
64
.ce_clks = {
65
{ "bus", 0, 200000000 },
66
{ "mod", 300000000, 0 },
67
},
68
.esr = ESR_H5,
69
.prng = CE_ALG_PRNG,
70
.trng = CE_ID_NOTSUPP,
71
};
72
73
static const struct ce_variant ce_h6_variant = {
74
.alg_cipher = { CE_ALG_AES, CE_ALG_DES, CE_ALG_3DES,
75
},
76
.alg_hash = { CE_ALG_MD5, CE_ALG_SHA1, CE_ALG_SHA224, CE_ALG_SHA256,
77
CE_ALG_SHA384, CE_ALG_SHA512
78
},
79
.op_mode = { CE_OP_ECB, CE_OP_CBC
80
},
81
.cipher_t_dlen_in_bytes = true,
82
.hash_t_dlen_in_bits = true,
83
.prng_t_dlen_in_bytes = true,
84
.trng_t_dlen_in_bytes = true,
85
.ce_clks = {
86
{ "bus", 0, 200000000 },
87
{ "mod", 300000000, 0 },
88
{ "ram", 0, 400000000 },
89
},
90
.esr = ESR_H6,
91
.prng = CE_ALG_PRNG_V2,
92
.trng = CE_ALG_TRNG_V2,
93
};
94
95
static const struct ce_variant ce_h616_variant = {
96
.alg_cipher = { CE_ALG_AES, CE_ALG_DES, CE_ALG_3DES,
97
},
98
.alg_hash = { CE_ALG_MD5, CE_ALG_SHA1, CE_ALG_SHA224, CE_ALG_SHA256,
99
CE_ALG_SHA384, CE_ALG_SHA512
100
},
101
.op_mode = { CE_OP_ECB, CE_OP_CBC
102
},
103
.cipher_t_dlen_in_bytes = true,
104
.hash_t_dlen_in_bits = true,
105
.prng_t_dlen_in_bytes = true,
106
.trng_t_dlen_in_bytes = true,
107
.needs_word_addresses = true,
108
.ce_clks = {
109
{ "bus", 0, 200000000 },
110
{ "mod", 300000000, 0 },
111
{ "ram", 0, 400000000 },
112
{ "trng", 0, 0 },
113
},
114
.esr = ESR_H6,
115
.prng = CE_ALG_PRNG_V2,
116
.trng = CE_ALG_TRNG_V2,
117
};
118
119
static const struct ce_variant ce_a64_variant = {
120
.alg_cipher = { CE_ALG_AES, CE_ALG_DES, CE_ALG_3DES,
121
},
122
.alg_hash = { CE_ALG_MD5, CE_ALG_SHA1, CE_ALG_SHA224, CE_ALG_SHA256,
123
CE_ID_NOTSUPP, CE_ID_NOTSUPP
124
},
125
.op_mode = { CE_OP_ECB, CE_OP_CBC
126
},
127
.ce_clks = {
128
{ "bus", 0, 200000000 },
129
{ "mod", 300000000, 0 },
130
},
131
.esr = ESR_A64,
132
.prng = CE_ALG_PRNG,
133
.trng = CE_ID_NOTSUPP,
134
};
135
136
static const struct ce_variant ce_d1_variant = {
137
.alg_cipher = { CE_ALG_AES, CE_ALG_DES, CE_ALG_3DES,
138
},
139
.alg_hash = { CE_ALG_MD5, CE_ALG_SHA1, CE_ALG_SHA224, CE_ALG_SHA256,
140
CE_ALG_SHA384, CE_ALG_SHA512
141
},
142
.op_mode = { CE_OP_ECB, CE_OP_CBC
143
},
144
.ce_clks = {
145
{ "bus", 0, 200000000 },
146
{ "mod", 300000000, 0 },
147
{ "ram", 0, 400000000 },
148
{ "trng", 0, 0 },
149
},
150
.esr = ESR_D1,
151
.prng = CE_ALG_PRNG,
152
.trng = CE_ALG_TRNG,
153
};
154
155
static const struct ce_variant ce_r40_variant = {
156
.alg_cipher = { CE_ALG_AES, CE_ALG_DES, CE_ALG_3DES,
157
},
158
.alg_hash = { CE_ALG_MD5, CE_ALG_SHA1, CE_ALG_SHA224, CE_ALG_SHA256,
159
CE_ID_NOTSUPP, CE_ID_NOTSUPP
160
},
161
.op_mode = { CE_OP_ECB, CE_OP_CBC
162
},
163
.ce_clks = {
164
{ "bus", 0, 200000000 },
165
{ "mod", 300000000, 0 },
166
},
167
.esr = ESR_R40,
168
.prng = CE_ALG_PRNG,
169
.trng = CE_ID_NOTSUPP,
170
};
171
172
static void sun8i_ce_dump_task_descriptors(struct sun8i_ce_flow *chan)
173
{
174
print_hex_dump(KERN_INFO, "TASK: ", DUMP_PREFIX_NONE, 16, 4,
175
chan->tl, sizeof(struct ce_task), false);
176
}
177
178
/*
179
* sun8i_ce_get_engine_number() get the next channel slot
180
* This is a simple round-robin way of getting the next channel
181
* The flow 3 is reserve for xRNG operations
182
*/
183
int sun8i_ce_get_engine_number(struct sun8i_ce_dev *ce)
184
{
185
return atomic_inc_return(&ce->flow) % (MAXFLOW - 1);
186
}
187
188
int sun8i_ce_run_task(struct sun8i_ce_dev *ce, int flow, const char *name)
189
{
190
u32 v;
191
int err = 0;
192
193
#ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG
194
ce->chanlist[flow].stat_req++;
195
#endif
196
197
mutex_lock(&ce->mlock);
198
199
v = readl(ce->base + CE_ICR);
200
v |= 1 << flow;
201
writel(v, ce->base + CE_ICR);
202
203
reinit_completion(&ce->chanlist[flow].complete);
204
writel(desc_addr_val(ce, ce->chanlist[flow].t_phy), ce->base + CE_TDQ);
205
206
ce->chanlist[flow].status = 0;
207
/* Be sure all data is written before enabling the task */
208
wmb();
209
210
/* Only H6 needs to write a part of t_common_ctl along with "1", but since it is ignored
211
* on older SoCs, we have no reason to complicate things.
212
*/
213
v = 1 | ((le32_to_cpu(ce->chanlist[flow].tl->t_common_ctl) & 0x7F) << 8);
214
writel(v, ce->base + CE_TLR);
215
mutex_unlock(&ce->mlock);
216
217
wait_for_completion_interruptible_timeout(&ce->chanlist[flow].complete,
218
msecs_to_jiffies(CE_DMA_TIMEOUT_MS));
219
220
if (ce->chanlist[flow].status == 0) {
221
dev_err(ce->dev, "DMA timeout for %s on flow %d\n", name, flow);
222
err = -EFAULT;
223
}
224
/* No need to lock for this read, the channel is locked so
225
* nothing could modify the error value for this channel
226
*/
227
v = readl(ce->base + CE_ESR);
228
switch (ce->variant->esr) {
229
case ESR_H3:
230
/* Sadly, the error bit is not per flow */
231
if (v) {
232
dev_err(ce->dev, "CE ERROR: %x for flow %x\n", v, flow);
233
sun8i_ce_dump_task_descriptors(&ce->chanlist[flow]);
234
err = -EFAULT;
235
}
236
if (v & CE_ERR_ALGO_NOTSUP)
237
dev_err(ce->dev, "CE ERROR: algorithm not supported\n");
238
if (v & CE_ERR_DATALEN)
239
dev_err(ce->dev, "CE ERROR: data length error\n");
240
if (v & CE_ERR_KEYSRAM)
241
dev_err(ce->dev, "CE ERROR: keysram access error for AES\n");
242
break;
243
case ESR_A64:
244
case ESR_D1:
245
case ESR_H5:
246
case ESR_R40:
247
v >>= (flow * 4);
248
v &= 0xF;
249
if (v) {
250
dev_err(ce->dev, "CE ERROR: %x for flow %x\n", v, flow);
251
sun8i_ce_dump_task_descriptors(&ce->chanlist[flow]);
252
err = -EFAULT;
253
}
254
if (v & CE_ERR_ALGO_NOTSUP)
255
dev_err(ce->dev, "CE ERROR: algorithm not supported\n");
256
if (v & CE_ERR_DATALEN)
257
dev_err(ce->dev, "CE ERROR: data length error\n");
258
if (v & CE_ERR_KEYSRAM)
259
dev_err(ce->dev, "CE ERROR: keysram access error for AES\n");
260
break;
261
case ESR_H6:
262
v >>= (flow * 8);
263
v &= 0xFF;
264
if (v) {
265
dev_err(ce->dev, "CE ERROR: %x for flow %x\n", v, flow);
266
sun8i_ce_dump_task_descriptors(&ce->chanlist[flow]);
267
err = -EFAULT;
268
}
269
if (v & CE_ERR_ALGO_NOTSUP)
270
dev_err(ce->dev, "CE ERROR: algorithm not supported\n");
271
if (v & CE_ERR_DATALEN)
272
dev_err(ce->dev, "CE ERROR: data length error\n");
273
if (v & CE_ERR_KEYSRAM)
274
dev_err(ce->dev, "CE ERROR: keysram access error for AES\n");
275
if (v & CE_ERR_ADDR_INVALID)
276
dev_err(ce->dev, "CE ERROR: address invalid\n");
277
if (v & CE_ERR_KEYLADDER)
278
dev_err(ce->dev, "CE ERROR: key ladder configuration error\n");
279
break;
280
}
281
282
return err;
283
}
284
285
static irqreturn_t ce_irq_handler(int irq, void *data)
286
{
287
struct sun8i_ce_dev *ce = (struct sun8i_ce_dev *)data;
288
int flow = 0;
289
u32 p;
290
291
p = readl(ce->base + CE_ISR);
292
for (flow = 0; flow < MAXFLOW; flow++) {
293
if (p & (BIT(flow))) {
294
writel(BIT(flow), ce->base + CE_ISR);
295
ce->chanlist[flow].status = 1;
296
complete(&ce->chanlist[flow].complete);
297
}
298
}
299
300
return IRQ_HANDLED;
301
}
302
303
static struct sun8i_ce_alg_template ce_algs[] = {
304
{
305
.type = CRYPTO_ALG_TYPE_SKCIPHER,
306
.ce_algo_id = CE_ID_CIPHER_AES,
307
.ce_blockmode = CE_ID_OP_CBC,
308
.alg.skcipher.base = {
309
.base = {
310
.cra_name = "cbc(aes)",
311
.cra_driver_name = "cbc-aes-sun8i-ce",
312
.cra_priority = 400,
313
.cra_blocksize = AES_BLOCK_SIZE,
314
.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
315
CRYPTO_ALG_ASYNC |
316
CRYPTO_ALG_NEED_FALLBACK,
317
.cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
318
.cra_module = THIS_MODULE,
319
.cra_alignmask = 0xf,
320
.cra_init = sun8i_ce_cipher_init,
321
.cra_exit = sun8i_ce_cipher_exit,
322
},
323
.min_keysize = AES_MIN_KEY_SIZE,
324
.max_keysize = AES_MAX_KEY_SIZE,
325
.ivsize = AES_BLOCK_SIZE,
326
.setkey = sun8i_ce_aes_setkey,
327
.encrypt = sun8i_ce_skencrypt,
328
.decrypt = sun8i_ce_skdecrypt,
329
},
330
.alg.skcipher.op = {
331
.do_one_request = sun8i_ce_cipher_do_one,
332
},
333
},
334
{
335
.type = CRYPTO_ALG_TYPE_SKCIPHER,
336
.ce_algo_id = CE_ID_CIPHER_AES,
337
.ce_blockmode = CE_ID_OP_ECB,
338
.alg.skcipher.base = {
339
.base = {
340
.cra_name = "ecb(aes)",
341
.cra_driver_name = "ecb-aes-sun8i-ce",
342
.cra_priority = 400,
343
.cra_blocksize = AES_BLOCK_SIZE,
344
.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
345
CRYPTO_ALG_ASYNC |
346
CRYPTO_ALG_NEED_FALLBACK,
347
.cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
348
.cra_module = THIS_MODULE,
349
.cra_alignmask = 0xf,
350
.cra_init = sun8i_ce_cipher_init,
351
.cra_exit = sun8i_ce_cipher_exit,
352
},
353
.min_keysize = AES_MIN_KEY_SIZE,
354
.max_keysize = AES_MAX_KEY_SIZE,
355
.setkey = sun8i_ce_aes_setkey,
356
.encrypt = sun8i_ce_skencrypt,
357
.decrypt = sun8i_ce_skdecrypt,
358
},
359
.alg.skcipher.op = {
360
.do_one_request = sun8i_ce_cipher_do_one,
361
},
362
},
363
{
364
.type = CRYPTO_ALG_TYPE_SKCIPHER,
365
.ce_algo_id = CE_ID_CIPHER_DES3,
366
.ce_blockmode = CE_ID_OP_CBC,
367
.alg.skcipher.base = {
368
.base = {
369
.cra_name = "cbc(des3_ede)",
370
.cra_driver_name = "cbc-des3-sun8i-ce",
371
.cra_priority = 400,
372
.cra_blocksize = DES3_EDE_BLOCK_SIZE,
373
.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
374
CRYPTO_ALG_ASYNC |
375
CRYPTO_ALG_NEED_FALLBACK,
376
.cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
377
.cra_module = THIS_MODULE,
378
.cra_alignmask = 0xf,
379
.cra_init = sun8i_ce_cipher_init,
380
.cra_exit = sun8i_ce_cipher_exit,
381
},
382
.min_keysize = DES3_EDE_KEY_SIZE,
383
.max_keysize = DES3_EDE_KEY_SIZE,
384
.ivsize = DES3_EDE_BLOCK_SIZE,
385
.setkey = sun8i_ce_des3_setkey,
386
.encrypt = sun8i_ce_skencrypt,
387
.decrypt = sun8i_ce_skdecrypt,
388
},
389
.alg.skcipher.op = {
390
.do_one_request = sun8i_ce_cipher_do_one,
391
},
392
},
393
{
394
.type = CRYPTO_ALG_TYPE_SKCIPHER,
395
.ce_algo_id = CE_ID_CIPHER_DES3,
396
.ce_blockmode = CE_ID_OP_ECB,
397
.alg.skcipher.base = {
398
.base = {
399
.cra_name = "ecb(des3_ede)",
400
.cra_driver_name = "ecb-des3-sun8i-ce",
401
.cra_priority = 400,
402
.cra_blocksize = DES3_EDE_BLOCK_SIZE,
403
.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
404
CRYPTO_ALG_ASYNC |
405
CRYPTO_ALG_NEED_FALLBACK,
406
.cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
407
.cra_module = THIS_MODULE,
408
.cra_alignmask = 0xf,
409
.cra_init = sun8i_ce_cipher_init,
410
.cra_exit = sun8i_ce_cipher_exit,
411
},
412
.min_keysize = DES3_EDE_KEY_SIZE,
413
.max_keysize = DES3_EDE_KEY_SIZE,
414
.setkey = sun8i_ce_des3_setkey,
415
.encrypt = sun8i_ce_skencrypt,
416
.decrypt = sun8i_ce_skdecrypt,
417
},
418
.alg.skcipher.op = {
419
.do_one_request = sun8i_ce_cipher_do_one,
420
},
421
},
422
#ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_HASH
423
{ .type = CRYPTO_ALG_TYPE_AHASH,
424
.ce_algo_id = CE_ID_HASH_MD5,
425
.alg.hash.base = {
426
.init = sun8i_ce_hash_init,
427
.update = sun8i_ce_hash_update,
428
.final = sun8i_ce_hash_final,
429
.finup = sun8i_ce_hash_finup,
430
.digest = sun8i_ce_hash_digest,
431
.export = sun8i_ce_hash_export,
432
.import = sun8i_ce_hash_import,
433
.init_tfm = sun8i_ce_hash_init_tfm,
434
.exit_tfm = sun8i_ce_hash_exit_tfm,
435
.halg = {
436
.digestsize = MD5_DIGEST_SIZE,
437
.statesize = sizeof(struct md5_state),
438
.base = {
439
.cra_name = "md5",
440
.cra_driver_name = "md5-sun8i-ce",
441
.cra_priority = 300,
442
.cra_flags = CRYPTO_ALG_TYPE_AHASH |
443
CRYPTO_ALG_ASYNC |
444
CRYPTO_ALG_NEED_FALLBACK,
445
.cra_blocksize = MD5_HMAC_BLOCK_SIZE,
446
.cra_ctxsize = sizeof(struct sun8i_ce_hash_tfm_ctx),
447
.cra_module = THIS_MODULE,
448
}
449
}
450
},
451
.alg.hash.op = {
452
.do_one_request = sun8i_ce_hash_run,
453
},
454
455
},
456
{ .type = CRYPTO_ALG_TYPE_AHASH,
457
.ce_algo_id = CE_ID_HASH_SHA1,
458
.alg.hash.base = {
459
.init = sun8i_ce_hash_init,
460
.update = sun8i_ce_hash_update,
461
.final = sun8i_ce_hash_final,
462
.finup = sun8i_ce_hash_finup,
463
.digest = sun8i_ce_hash_digest,
464
.export = sun8i_ce_hash_export,
465
.import = sun8i_ce_hash_import,
466
.init_tfm = sun8i_ce_hash_init_tfm,
467
.exit_tfm = sun8i_ce_hash_exit_tfm,
468
.halg = {
469
.digestsize = SHA1_DIGEST_SIZE,
470
.statesize = sizeof(struct sha1_state),
471
.base = {
472
.cra_name = "sha1",
473
.cra_driver_name = "sha1-sun8i-ce",
474
.cra_priority = 300,
475
.cra_flags = CRYPTO_ALG_TYPE_AHASH |
476
CRYPTO_ALG_ASYNC |
477
CRYPTO_ALG_NEED_FALLBACK,
478
.cra_blocksize = SHA1_BLOCK_SIZE,
479
.cra_ctxsize = sizeof(struct sun8i_ce_hash_tfm_ctx),
480
.cra_module = THIS_MODULE,
481
}
482
}
483
},
484
.alg.hash.op = {
485
.do_one_request = sun8i_ce_hash_run,
486
},
487
},
488
{ .type = CRYPTO_ALG_TYPE_AHASH,
489
.ce_algo_id = CE_ID_HASH_SHA224,
490
.alg.hash.base = {
491
.init = sun8i_ce_hash_init,
492
.update = sun8i_ce_hash_update,
493
.final = sun8i_ce_hash_final,
494
.finup = sun8i_ce_hash_finup,
495
.digest = sun8i_ce_hash_digest,
496
.export = sun8i_ce_hash_export,
497
.import = sun8i_ce_hash_import,
498
.init_tfm = sun8i_ce_hash_init_tfm,
499
.exit_tfm = sun8i_ce_hash_exit_tfm,
500
.halg = {
501
.digestsize = SHA224_DIGEST_SIZE,
502
.statesize = sizeof(struct sha256_state),
503
.base = {
504
.cra_name = "sha224",
505
.cra_driver_name = "sha224-sun8i-ce",
506
.cra_priority = 300,
507
.cra_flags = CRYPTO_ALG_TYPE_AHASH |
508
CRYPTO_ALG_ASYNC |
509
CRYPTO_ALG_NEED_FALLBACK,
510
.cra_blocksize = SHA224_BLOCK_SIZE,
511
.cra_ctxsize = sizeof(struct sun8i_ce_hash_tfm_ctx),
512
.cra_module = THIS_MODULE,
513
}
514
}
515
},
516
.alg.hash.op = {
517
.do_one_request = sun8i_ce_hash_run,
518
},
519
},
520
{ .type = CRYPTO_ALG_TYPE_AHASH,
521
.ce_algo_id = CE_ID_HASH_SHA256,
522
.alg.hash.base = {
523
.init = sun8i_ce_hash_init,
524
.update = sun8i_ce_hash_update,
525
.final = sun8i_ce_hash_final,
526
.finup = sun8i_ce_hash_finup,
527
.digest = sun8i_ce_hash_digest,
528
.export = sun8i_ce_hash_export,
529
.import = sun8i_ce_hash_import,
530
.init_tfm = sun8i_ce_hash_init_tfm,
531
.exit_tfm = sun8i_ce_hash_exit_tfm,
532
.halg = {
533
.digestsize = SHA256_DIGEST_SIZE,
534
.statesize = sizeof(struct sha256_state),
535
.base = {
536
.cra_name = "sha256",
537
.cra_driver_name = "sha256-sun8i-ce",
538
.cra_priority = 300,
539
.cra_flags = CRYPTO_ALG_TYPE_AHASH |
540
CRYPTO_ALG_ASYNC |
541
CRYPTO_ALG_NEED_FALLBACK,
542
.cra_blocksize = SHA256_BLOCK_SIZE,
543
.cra_ctxsize = sizeof(struct sun8i_ce_hash_tfm_ctx),
544
.cra_module = THIS_MODULE,
545
}
546
}
547
},
548
.alg.hash.op = {
549
.do_one_request = sun8i_ce_hash_run,
550
},
551
},
552
{ .type = CRYPTO_ALG_TYPE_AHASH,
553
.ce_algo_id = CE_ID_HASH_SHA384,
554
.alg.hash.base = {
555
.init = sun8i_ce_hash_init,
556
.update = sun8i_ce_hash_update,
557
.final = sun8i_ce_hash_final,
558
.finup = sun8i_ce_hash_finup,
559
.digest = sun8i_ce_hash_digest,
560
.export = sun8i_ce_hash_export,
561
.import = sun8i_ce_hash_import,
562
.init_tfm = sun8i_ce_hash_init_tfm,
563
.exit_tfm = sun8i_ce_hash_exit_tfm,
564
.halg = {
565
.digestsize = SHA384_DIGEST_SIZE,
566
.statesize = sizeof(struct sha512_state),
567
.base = {
568
.cra_name = "sha384",
569
.cra_driver_name = "sha384-sun8i-ce",
570
.cra_priority = 300,
571
.cra_flags = CRYPTO_ALG_TYPE_AHASH |
572
CRYPTO_ALG_ASYNC |
573
CRYPTO_ALG_NEED_FALLBACK,
574
.cra_blocksize = SHA384_BLOCK_SIZE,
575
.cra_ctxsize = sizeof(struct sun8i_ce_hash_tfm_ctx),
576
.cra_module = THIS_MODULE,
577
}
578
}
579
},
580
.alg.hash.op = {
581
.do_one_request = sun8i_ce_hash_run,
582
},
583
},
584
{ .type = CRYPTO_ALG_TYPE_AHASH,
585
.ce_algo_id = CE_ID_HASH_SHA512,
586
.alg.hash.base = {
587
.init = sun8i_ce_hash_init,
588
.update = sun8i_ce_hash_update,
589
.final = sun8i_ce_hash_final,
590
.finup = sun8i_ce_hash_finup,
591
.digest = sun8i_ce_hash_digest,
592
.export = sun8i_ce_hash_export,
593
.import = sun8i_ce_hash_import,
594
.init_tfm = sun8i_ce_hash_init_tfm,
595
.exit_tfm = sun8i_ce_hash_exit_tfm,
596
.halg = {
597
.digestsize = SHA512_DIGEST_SIZE,
598
.statesize = sizeof(struct sha512_state),
599
.base = {
600
.cra_name = "sha512",
601
.cra_driver_name = "sha512-sun8i-ce",
602
.cra_priority = 300,
603
.cra_flags = CRYPTO_ALG_TYPE_AHASH |
604
CRYPTO_ALG_ASYNC |
605
CRYPTO_ALG_NEED_FALLBACK,
606
.cra_blocksize = SHA512_BLOCK_SIZE,
607
.cra_ctxsize = sizeof(struct sun8i_ce_hash_tfm_ctx),
608
.cra_module = THIS_MODULE,
609
}
610
}
611
},
612
.alg.hash.op = {
613
.do_one_request = sun8i_ce_hash_run,
614
},
615
},
616
#endif
617
#ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_PRNG
618
{
619
.type = CRYPTO_ALG_TYPE_RNG,
620
.alg.rng = {
621
.base = {
622
.cra_name = "stdrng",
623
.cra_driver_name = "sun8i-ce-prng",
624
.cra_priority = 300,
625
.cra_ctxsize = sizeof(struct sun8i_ce_rng_tfm_ctx),
626
.cra_module = THIS_MODULE,
627
.cra_init = sun8i_ce_prng_init,
628
.cra_exit = sun8i_ce_prng_exit,
629
},
630
.generate = sun8i_ce_prng_generate,
631
.seed = sun8i_ce_prng_seed,
632
.seedsize = PRNG_SEED_SIZE,
633
}
634
},
635
#endif
636
};
637
638
static int sun8i_ce_debugfs_show(struct seq_file *seq, void *v)
639
{
640
struct sun8i_ce_dev *ce __maybe_unused = seq->private;
641
unsigned int i;
642
643
for (i = 0; i < MAXFLOW; i++)
644
seq_printf(seq, "Channel %d: nreq %lu\n", i,
645
#ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG
646
ce->chanlist[i].stat_req);
647
#else
648
0ul);
649
#endif
650
651
for (i = 0; i < ARRAY_SIZE(ce_algs); i++) {
652
if (!ce_algs[i].ce)
653
continue;
654
switch (ce_algs[i].type) {
655
case CRYPTO_ALG_TYPE_SKCIPHER:
656
seq_printf(seq, "%s %s reqs=%lu fallback=%lu\n",
657
ce_algs[i].alg.skcipher.base.base.cra_driver_name,
658
ce_algs[i].alg.skcipher.base.base.cra_name,
659
ce_algs[i].stat_req, ce_algs[i].stat_fb);
660
seq_printf(seq, "\tLast fallback is: %s\n",
661
ce_algs[i].fbname);
662
seq_printf(seq, "\tFallback due to 0 length: %lu\n",
663
ce_algs[i].stat_fb_len0);
664
seq_printf(seq, "\tFallback due to length !mod16: %lu\n",
665
ce_algs[i].stat_fb_mod16);
666
seq_printf(seq, "\tFallback due to length < IV: %lu\n",
667
ce_algs[i].stat_fb_leniv);
668
seq_printf(seq, "\tFallback due to source alignment: %lu\n",
669
ce_algs[i].stat_fb_srcali);
670
seq_printf(seq, "\tFallback due to dest alignment: %lu\n",
671
ce_algs[i].stat_fb_dstali);
672
seq_printf(seq, "\tFallback due to source length: %lu\n",
673
ce_algs[i].stat_fb_srclen);
674
seq_printf(seq, "\tFallback due to dest length: %lu\n",
675
ce_algs[i].stat_fb_dstlen);
676
seq_printf(seq, "\tFallback due to SG numbers: %lu\n",
677
ce_algs[i].stat_fb_maxsg);
678
break;
679
case CRYPTO_ALG_TYPE_AHASH:
680
seq_printf(seq, "%s %s reqs=%lu fallback=%lu\n",
681
ce_algs[i].alg.hash.base.halg.base.cra_driver_name,
682
ce_algs[i].alg.hash.base.halg.base.cra_name,
683
ce_algs[i].stat_req, ce_algs[i].stat_fb);
684
seq_printf(seq, "\tLast fallback is: %s\n",
685
ce_algs[i].fbname);
686
seq_printf(seq, "\tFallback due to 0 length: %lu\n",
687
ce_algs[i].stat_fb_len0);
688
seq_printf(seq, "\tFallback due to length: %lu\n",
689
ce_algs[i].stat_fb_srclen);
690
seq_printf(seq, "\tFallback due to alignment: %lu\n",
691
ce_algs[i].stat_fb_srcali);
692
seq_printf(seq, "\tFallback due to SG numbers: %lu\n",
693
ce_algs[i].stat_fb_maxsg);
694
break;
695
case CRYPTO_ALG_TYPE_RNG:
696
seq_printf(seq, "%s %s reqs=%lu bytes=%lu\n",
697
ce_algs[i].alg.rng.base.cra_driver_name,
698
ce_algs[i].alg.rng.base.cra_name,
699
ce_algs[i].stat_req, ce_algs[i].stat_bytes);
700
break;
701
}
702
}
703
#if defined(CONFIG_CRYPTO_DEV_SUN8I_CE_TRNG) && \
704
defined(CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG)
705
seq_printf(seq, "HWRNG %lu %lu\n",
706
ce->hwrng_stat_req, ce->hwrng_stat_bytes);
707
#endif
708
return 0;
709
}
710
711
DEFINE_SHOW_ATTRIBUTE(sun8i_ce_debugfs);
712
713
static void sun8i_ce_free_chanlist(struct sun8i_ce_dev *ce, int i)
714
{
715
while (i >= 0) {
716
crypto_engine_exit(ce->chanlist[i].engine);
717
if (ce->chanlist[i].tl)
718
dma_free_coherent(ce->dev, sizeof(struct ce_task),
719
ce->chanlist[i].tl,
720
ce->chanlist[i].t_phy);
721
i--;
722
}
723
}
724
725
/*
726
* Allocate the channel list structure
727
*/
728
static int sun8i_ce_allocate_chanlist(struct sun8i_ce_dev *ce)
729
{
730
int i, err;
731
732
ce->chanlist = devm_kcalloc(ce->dev, MAXFLOW,
733
sizeof(struct sun8i_ce_flow), GFP_KERNEL);
734
if (!ce->chanlist)
735
return -ENOMEM;
736
737
for (i = 0; i < MAXFLOW; i++) {
738
init_completion(&ce->chanlist[i].complete);
739
740
ce->chanlist[i].engine = crypto_engine_alloc_init(ce->dev, true);
741
if (!ce->chanlist[i].engine) {
742
dev_err(ce->dev, "Cannot allocate engine\n");
743
i--;
744
err = -ENOMEM;
745
goto error_engine;
746
}
747
err = crypto_engine_start(ce->chanlist[i].engine);
748
if (err) {
749
dev_err(ce->dev, "Cannot start engine\n");
750
goto error_engine;
751
}
752
ce->chanlist[i].tl = dma_alloc_coherent(ce->dev,
753
sizeof(struct ce_task),
754
&ce->chanlist[i].t_phy,
755
GFP_KERNEL);
756
if (!ce->chanlist[i].tl) {
757
dev_err(ce->dev, "Cannot get DMA memory for task %d\n",
758
i);
759
err = -ENOMEM;
760
goto error_engine;
761
}
762
}
763
return 0;
764
error_engine:
765
sun8i_ce_free_chanlist(ce, i);
766
return err;
767
}
768
769
/*
770
* Power management strategy: The device is suspended unless a TFM exists for
771
* one of the algorithms proposed by this driver.
772
*/
773
static int sun8i_ce_pm_suspend(struct device *dev)
774
{
775
struct sun8i_ce_dev *ce = dev_get_drvdata(dev);
776
int i;
777
778
reset_control_assert(ce->reset);
779
for (i = 0; i < CE_MAX_CLOCKS; i++)
780
clk_disable_unprepare(ce->ceclks[i]);
781
return 0;
782
}
783
784
static int sun8i_ce_pm_resume(struct device *dev)
785
{
786
struct sun8i_ce_dev *ce = dev_get_drvdata(dev);
787
int err, i;
788
789
for (i = 0; i < CE_MAX_CLOCKS; i++) {
790
if (!ce->variant->ce_clks[i].name)
791
continue;
792
err = clk_prepare_enable(ce->ceclks[i]);
793
if (err) {
794
dev_err(ce->dev, "Cannot prepare_enable %s\n",
795
ce->variant->ce_clks[i].name);
796
goto error;
797
}
798
}
799
err = reset_control_deassert(ce->reset);
800
if (err) {
801
dev_err(ce->dev, "Cannot deassert reset control\n");
802
goto error;
803
}
804
return 0;
805
error:
806
sun8i_ce_pm_suspend(dev);
807
return err;
808
}
809
810
static const struct dev_pm_ops sun8i_ce_pm_ops = {
811
SET_RUNTIME_PM_OPS(sun8i_ce_pm_suspend, sun8i_ce_pm_resume, NULL)
812
};
813
814
static int sun8i_ce_pm_init(struct sun8i_ce_dev *ce)
815
{
816
int err;
817
818
pm_runtime_use_autosuspend(ce->dev);
819
pm_runtime_set_autosuspend_delay(ce->dev, 2000);
820
821
err = pm_runtime_set_suspended(ce->dev);
822
if (err)
823
return err;
824
825
err = devm_pm_runtime_enable(ce->dev);
826
if (err)
827
return err;
828
829
return 0;
830
}
831
832
static int sun8i_ce_get_clks(struct sun8i_ce_dev *ce)
833
{
834
unsigned long cr;
835
int err, i;
836
837
for (i = 0; i < CE_MAX_CLOCKS; i++) {
838
if (!ce->variant->ce_clks[i].name)
839
continue;
840
ce->ceclks[i] = devm_clk_get(ce->dev, ce->variant->ce_clks[i].name);
841
if (IS_ERR(ce->ceclks[i])) {
842
err = PTR_ERR(ce->ceclks[i]);
843
dev_err(ce->dev, "Cannot get %s CE clock err=%d\n",
844
ce->variant->ce_clks[i].name, err);
845
return err;
846
}
847
cr = clk_get_rate(ce->ceclks[i]);
848
if (!cr)
849
return -EINVAL;
850
if (ce->variant->ce_clks[i].freq > 0 &&
851
cr != ce->variant->ce_clks[i].freq) {
852
dev_info(ce->dev, "Set %s clock to %lu (%lu Mhz) from %lu (%lu Mhz)\n",
853
ce->variant->ce_clks[i].name,
854
ce->variant->ce_clks[i].freq,
855
ce->variant->ce_clks[i].freq / 1000000,
856
cr, cr / 1000000);
857
err = clk_set_rate(ce->ceclks[i], ce->variant->ce_clks[i].freq);
858
if (err)
859
dev_err(ce->dev, "Fail to set %s clk speed to %lu hz\n",
860
ce->variant->ce_clks[i].name,
861
ce->variant->ce_clks[i].freq);
862
}
863
if (ce->variant->ce_clks[i].max_freq > 0 &&
864
cr > ce->variant->ce_clks[i].max_freq)
865
dev_warn(ce->dev, "Frequency for %s (%lu hz) is higher than datasheet's recommendation (%lu hz)",
866
ce->variant->ce_clks[i].name, cr,
867
ce->variant->ce_clks[i].max_freq);
868
}
869
return 0;
870
}
871
872
static int sun8i_ce_register_algs(struct sun8i_ce_dev *ce)
873
{
874
int ce_method, err, id;
875
unsigned int i;
876
877
for (i = 0; i < ARRAY_SIZE(ce_algs); i++) {
878
ce_algs[i].ce = ce;
879
switch (ce_algs[i].type) {
880
case CRYPTO_ALG_TYPE_SKCIPHER:
881
id = ce_algs[i].ce_algo_id;
882
ce_method = ce->variant->alg_cipher[id];
883
if (ce_method == CE_ID_NOTSUPP) {
884
dev_dbg(ce->dev,
885
"DEBUG: Algo of %s not supported\n",
886
ce_algs[i].alg.skcipher.base.base.cra_name);
887
ce_algs[i].ce = NULL;
888
break;
889
}
890
id = ce_algs[i].ce_blockmode;
891
ce_method = ce->variant->op_mode[id];
892
if (ce_method == CE_ID_NOTSUPP) {
893
dev_dbg(ce->dev, "DEBUG: Blockmode of %s not supported\n",
894
ce_algs[i].alg.skcipher.base.base.cra_name);
895
ce_algs[i].ce = NULL;
896
break;
897
}
898
dev_info(ce->dev, "Register %s\n",
899
ce_algs[i].alg.skcipher.base.base.cra_name);
900
err = crypto_engine_register_skcipher(&ce_algs[i].alg.skcipher);
901
if (err) {
902
dev_err(ce->dev, "ERROR: Fail to register %s\n",
903
ce_algs[i].alg.skcipher.base.base.cra_name);
904
ce_algs[i].ce = NULL;
905
return err;
906
}
907
break;
908
case CRYPTO_ALG_TYPE_AHASH:
909
id = ce_algs[i].ce_algo_id;
910
ce_method = ce->variant->alg_hash[id];
911
if (ce_method == CE_ID_NOTSUPP) {
912
dev_info(ce->dev,
913
"DEBUG: Algo of %s not supported\n",
914
ce_algs[i].alg.hash.base.halg.base.cra_name);
915
ce_algs[i].ce = NULL;
916
break;
917
}
918
dev_info(ce->dev, "Register %s\n",
919
ce_algs[i].alg.hash.base.halg.base.cra_name);
920
err = crypto_engine_register_ahash(&ce_algs[i].alg.hash);
921
if (err) {
922
dev_err(ce->dev, "ERROR: Fail to register %s\n",
923
ce_algs[i].alg.hash.base.halg.base.cra_name);
924
ce_algs[i].ce = NULL;
925
return err;
926
}
927
break;
928
case CRYPTO_ALG_TYPE_RNG:
929
if (ce->variant->prng == CE_ID_NOTSUPP) {
930
dev_info(ce->dev,
931
"DEBUG: Algo of %s not supported\n",
932
ce_algs[i].alg.rng.base.cra_name);
933
ce_algs[i].ce = NULL;
934
break;
935
}
936
dev_info(ce->dev, "Register %s\n",
937
ce_algs[i].alg.rng.base.cra_name);
938
err = crypto_register_rng(&ce_algs[i].alg.rng);
939
if (err) {
940
dev_err(ce->dev, "Fail to register %s\n",
941
ce_algs[i].alg.rng.base.cra_name);
942
ce_algs[i].ce = NULL;
943
}
944
break;
945
default:
946
ce_algs[i].ce = NULL;
947
dev_err(ce->dev, "ERROR: tried to register an unknown algo\n");
948
}
949
}
950
return 0;
951
}
952
953
static void sun8i_ce_unregister_algs(struct sun8i_ce_dev *ce)
954
{
955
unsigned int i;
956
957
for (i = 0; i < ARRAY_SIZE(ce_algs); i++) {
958
if (!ce_algs[i].ce)
959
continue;
960
switch (ce_algs[i].type) {
961
case CRYPTO_ALG_TYPE_SKCIPHER:
962
dev_info(ce->dev, "Unregister %d %s\n", i,
963
ce_algs[i].alg.skcipher.base.base.cra_name);
964
crypto_engine_unregister_skcipher(&ce_algs[i].alg.skcipher);
965
break;
966
case CRYPTO_ALG_TYPE_AHASH:
967
dev_info(ce->dev, "Unregister %d %s\n", i,
968
ce_algs[i].alg.hash.base.halg.base.cra_name);
969
crypto_engine_unregister_ahash(&ce_algs[i].alg.hash);
970
break;
971
case CRYPTO_ALG_TYPE_RNG:
972
dev_info(ce->dev, "Unregister %d %s\n", i,
973
ce_algs[i].alg.rng.base.cra_name);
974
crypto_unregister_rng(&ce_algs[i].alg.rng);
975
break;
976
}
977
}
978
}
979
980
static int sun8i_ce_probe(struct platform_device *pdev)
981
{
982
struct sun8i_ce_dev *ce;
983
int err, irq;
984
u32 v;
985
986
ce = devm_kzalloc(&pdev->dev, sizeof(*ce), GFP_KERNEL);
987
if (!ce)
988
return -ENOMEM;
989
990
ce->dev = &pdev->dev;
991
platform_set_drvdata(pdev, ce);
992
993
ce->variant = of_device_get_match_data(&pdev->dev);
994
if (!ce->variant) {
995
dev_err(&pdev->dev, "Missing Crypto Engine variant\n");
996
return -EINVAL;
997
}
998
999
ce->base = devm_platform_ioremap_resource(pdev, 0);
1000
if (IS_ERR(ce->base))
1001
return PTR_ERR(ce->base);
1002
1003
err = sun8i_ce_get_clks(ce);
1004
if (err)
1005
return err;
1006
1007
/* Get Non Secure IRQ */
1008
irq = platform_get_irq(pdev, 0);
1009
if (irq < 0)
1010
return irq;
1011
1012
ce->reset = devm_reset_control_get(&pdev->dev, NULL);
1013
if (IS_ERR(ce->reset))
1014
return dev_err_probe(&pdev->dev, PTR_ERR(ce->reset),
1015
"No reset control found\n");
1016
1017
mutex_init(&ce->mlock);
1018
mutex_init(&ce->rnglock);
1019
1020
err = sun8i_ce_allocate_chanlist(ce);
1021
if (err)
1022
return err;
1023
1024
err = sun8i_ce_pm_init(ce);
1025
if (err)
1026
goto error_pm;
1027
1028
err = devm_request_irq(&pdev->dev, irq, ce_irq_handler, 0,
1029
"sun8i-ce-ns", ce);
1030
if (err) {
1031
dev_err(ce->dev, "Cannot request CryptoEngine Non-secure IRQ (err=%d)\n", err);
1032
goto error_pm;
1033
}
1034
1035
err = sun8i_ce_register_algs(ce);
1036
if (err)
1037
goto error_alg;
1038
1039
err = pm_runtime_resume_and_get(ce->dev);
1040
if (err < 0)
1041
goto error_alg;
1042
1043
#ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_TRNG
1044
sun8i_ce_hwrng_register(ce);
1045
#endif
1046
1047
v = readl(ce->base + CE_CTR);
1048
v >>= CE_DIE_ID_SHIFT;
1049
v &= CE_DIE_ID_MASK;
1050
dev_info(&pdev->dev, "CryptoEngine Die ID %x\n", v);
1051
1052
pm_runtime_put_sync(ce->dev);
1053
1054
if (IS_ENABLED(CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG)) {
1055
struct dentry *dbgfs_dir;
1056
struct dentry *dbgfs_stats __maybe_unused;
1057
1058
/* Ignore error of debugfs */
1059
dbgfs_dir = debugfs_create_dir("sun8i-ce", NULL);
1060
dbgfs_stats = debugfs_create_file("stats", 0444,
1061
dbgfs_dir, ce,
1062
&sun8i_ce_debugfs_fops);
1063
1064
#ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG
1065
ce->dbgfs_dir = dbgfs_dir;
1066
ce->dbgfs_stats = dbgfs_stats;
1067
#endif
1068
}
1069
1070
return 0;
1071
error_alg:
1072
sun8i_ce_unregister_algs(ce);
1073
error_pm:
1074
sun8i_ce_free_chanlist(ce, MAXFLOW - 1);
1075
return err;
1076
}
1077
1078
static void sun8i_ce_remove(struct platform_device *pdev)
1079
{
1080
struct sun8i_ce_dev *ce = platform_get_drvdata(pdev);
1081
1082
#ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_TRNG
1083
sun8i_ce_hwrng_unregister(ce);
1084
#endif
1085
1086
sun8i_ce_unregister_algs(ce);
1087
1088
#ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG
1089
debugfs_remove_recursive(ce->dbgfs_dir);
1090
#endif
1091
1092
sun8i_ce_free_chanlist(ce, MAXFLOW - 1);
1093
}
1094
1095
static const struct of_device_id sun8i_ce_crypto_of_match_table[] = {
1096
{ .compatible = "allwinner,sun8i-h3-crypto",
1097
.data = &ce_h3_variant },
1098
{ .compatible = "allwinner,sun8i-r40-crypto",
1099
.data = &ce_r40_variant },
1100
{ .compatible = "allwinner,sun20i-d1-crypto",
1101
.data = &ce_d1_variant },
1102
{ .compatible = "allwinner,sun50i-a64-crypto",
1103
.data = &ce_a64_variant },
1104
{ .compatible = "allwinner,sun50i-h5-crypto",
1105
.data = &ce_h5_variant },
1106
{ .compatible = "allwinner,sun50i-h6-crypto",
1107
.data = &ce_h6_variant },
1108
{ .compatible = "allwinner,sun50i-h616-crypto",
1109
.data = &ce_h616_variant },
1110
{}
1111
};
1112
MODULE_DEVICE_TABLE(of, sun8i_ce_crypto_of_match_table);
1113
1114
static struct platform_driver sun8i_ce_driver = {
1115
.probe = sun8i_ce_probe,
1116
.remove = sun8i_ce_remove,
1117
.driver = {
1118
.name = "sun8i-ce",
1119
.pm = &sun8i_ce_pm_ops,
1120
.of_match_table = sun8i_ce_crypto_of_match_table,
1121
},
1122
};
1123
1124
module_platform_driver(sun8i_ce_driver);
1125
1126
MODULE_DESCRIPTION("Allwinner Crypto Engine cryptographic offloader");
1127
MODULE_LICENSE("GPL");
1128
MODULE_AUTHOR("Corentin Labbe <[email protected]>");
1129
1130