1
2
3
4
5
6
7
8
9
10
11
12#include <linux/clk.h>
13#include <linux/completion.h>
14#include <linux/device.h>
15#include <linux/dma-mapping.h>
16#include <linux/dmaengine.h>
17#include <linux/err.h>
18#include <linux/errno.h>
19#include <linux/interrupt.h>
20#include <linux/io.h>
21#include <linux/irqreturn.h>
22#include <linux/kernel.h>
23#include <linux/klist.h>
24#include <linux/module.h>
25#include <linux/mod_devicetable.h>
26#include <linux/platform_device.h>
27#include <linux/regulator/consumer.h>
28#include <linux/semaphore.h>
29#include <linux/platform_data/dma-ste-dma40.h>
30
31#include <crypto/aes.h>
32#include <crypto/ctr.h>
33#include <crypto/internal/des.h>
34#include <crypto/internal/skcipher.h>
35#include <crypto/scatterwalk.h>
36
37#include <linux/platform_data/crypto-ux500.h>
38
39#include "cryp_p.h"
40#include "cryp.h"
41
42#define CRYP_MAX_KEY_SIZE 32
43#define BYTES_PER_WORD 4
44
45static int cryp_mode;
46static atomic_t session_id;
47
48static struct stedma40_chan_cfg *mem_to_engine;
49static struct stedma40_chan_cfg *engine_to_mem;
50
51
52
53
54
55
56
57struct cryp_driver_data {
58 struct klist device_list;
59 struct semaphore device_allocation;
60};
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78struct cryp_ctx {
79 struct cryp_config config;
80 u8 key[CRYP_MAX_KEY_SIZE];
81 u32 keylen;
82 u8 *iv;
83 const u8 *indata;
84 u8 *outdata;
85 u32 datalen;
86 u32 outlen;
87 u32 blocksize;
88 u8 updated;
89 struct cryp_device_context dev_ctx;
90 struct cryp_device_data *device;
91 u32 session_id;
92};
93
94static struct cryp_driver_data driver_data;
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118static inline u8 swap_bits_in_byte(u8 b)
119{
120#define R_SHIFT_4_MASK 0xc0
121#define R_SHIFT_2_MASK 0x28
122
123#define R_SHIFT_1_MASK 0x1e
124
125#define L_SHIFT_4_MASK 0x03
126#define L_SHIFT_2_MASK 0x14
127
128#define L_SHIFT_1_MASK 0x78
129
130
131 u8 n1;
132 u8 n2;
133
134
135
136 n1 = ((b & R_SHIFT_4_MASK) >> 4) | (b & ~(R_SHIFT_4_MASK >> 4));
137
138 n1 = ((n1 & R_SHIFT_2_MASK) >> 2) | (n1 & ~(R_SHIFT_2_MASK >> 2));
139
140 n1 = (n1 & R_SHIFT_1_MASK) >> 1;
141
142
143
144 n2 = ((b & L_SHIFT_4_MASK) << 4) | (b & ~(L_SHIFT_4_MASK << 4));
145
146 n2 = ((n2 & L_SHIFT_2_MASK) << 2) | (n2 & ~(L_SHIFT_2_MASK << 2));
147
148 n2 = (n2 & L_SHIFT_1_MASK) << 1;
149
150 return n1 | n2;
151}
152
153static inline void swap_words_in_key_and_bits_in_byte(const u8 *in,
154 u8 *out, u32 len)
155{
156 unsigned int i = 0;
157 int j;
158 int index = 0;
159
160 j = len - BYTES_PER_WORD;
161 while (j >= 0) {
162 for (i = 0; i < BYTES_PER_WORD; i++) {
163 index = len - j - BYTES_PER_WORD + i;
164 out[j + i] =
165 swap_bits_in_byte(in[index]);
166 }
167 j -= BYTES_PER_WORD;
168 }
169}
170
171static void add_session_id(struct cryp_ctx *ctx)
172{
173
174
175
176
177 if (unlikely(atomic_inc_and_test(&session_id)))
178 atomic_inc(&session_id);
179
180 ctx->session_id = atomic_read(&session_id);
181}
182
183static irqreturn_t cryp_interrupt_handler(int irq, void *param)
184{
185 struct cryp_ctx *ctx;
186 int count;
187 struct cryp_device_data *device_data;
188
189 if (param == NULL) {
190 BUG_ON(!param);
191 return IRQ_HANDLED;
192 }
193
194
195 device_data = (struct cryp_device_data *)param;
196
197 ctx = device_data->current_ctx;
198
199 if (ctx == NULL) {
200 BUG_ON(!ctx);
201 return IRQ_HANDLED;
202 }
203
204 dev_dbg(ctx->device->dev, "[%s] (len: %d) %s, ", __func__, ctx->outlen,
205 cryp_pending_irq_src(device_data, CRYP_IRQ_SRC_OUTPUT_FIFO) ?
206 "out" : "in");
207
208 if (cryp_pending_irq_src(device_data,
209 CRYP_IRQ_SRC_OUTPUT_FIFO)) {
210 if (ctx->outlen / ctx->blocksize > 0) {
211 count = ctx->blocksize / 4;
212
213 readsl(&device_data->base->dout, ctx->outdata, count);
214 ctx->outdata += count;
215 ctx->outlen -= count;
216
217 if (ctx->outlen == 0) {
218 cryp_disable_irq_src(device_data,
219 CRYP_IRQ_SRC_OUTPUT_FIFO);
220 }
221 }
222 } else if (cryp_pending_irq_src(device_data,
223 CRYP_IRQ_SRC_INPUT_FIFO)) {
224 if (ctx->datalen / ctx->blocksize > 0) {
225 count = ctx->blocksize / 4;
226
227 writesl(&device_data->base->din, ctx->indata, count);
228
229 ctx->indata += count;
230 ctx->datalen -= count;
231
232 if (ctx->datalen == 0)
233 cryp_disable_irq_src(device_data,
234 CRYP_IRQ_SRC_INPUT_FIFO);
235
236 if (ctx->config.algomode == CRYP_ALGO_AES_XTS) {
237 CRYP_PUT_BITS(&device_data->base->cr,
238 CRYP_START_ENABLE,
239 CRYP_CR_START_POS,
240 CRYP_CR_START_MASK);
241
242 cryp_wait_until_done(device_data);
243 }
244 }
245 }
246
247 return IRQ_HANDLED;
248}
249
250static int mode_is_aes(enum cryp_algo_mode mode)
251{
252 return CRYP_ALGO_AES_ECB == mode ||
253 CRYP_ALGO_AES_CBC == mode ||
254 CRYP_ALGO_AES_CTR == mode ||
255 CRYP_ALGO_AES_XTS == mode;
256}
257
258static int cfg_iv(struct cryp_device_data *device_data, u32 left, u32 right,
259 enum cryp_init_vector_index index)
260{
261 struct cryp_init_vector_value vector_value;
262
263 dev_dbg(device_data->dev, "[%s]", __func__);
264
265 vector_value.init_value_left = left;
266 vector_value.init_value_right = right;
267
268 return cryp_configure_init_vector(device_data,
269 index,
270 vector_value);
271}
272
273static int cfg_ivs(struct cryp_device_data *device_data, struct cryp_ctx *ctx)
274{
275 int i;
276 int status = 0;
277 int num_of_regs = ctx->blocksize / 8;
278 __be32 *civ = (__be32 *)ctx->iv;
279 u32 iv[AES_BLOCK_SIZE / 4];
280
281 dev_dbg(device_data->dev, "[%s]", __func__);
282
283
284
285
286
287
288 if (num_of_regs > 2) {
289 dev_err(device_data->dev, "[%s] Incorrect blocksize %d",
290 __func__, ctx->blocksize);
291 return -EINVAL;
292 }
293
294 for (i = 0; i < ctx->blocksize / 4; i++)
295 iv[i] = be32_to_cpup(civ + i);
296
297 for (i = 0; i < num_of_regs; i++) {
298 status = cfg_iv(device_data, iv[i*2], iv[i*2+1],
299 (enum cryp_init_vector_index) i);
300 if (status != 0)
301 return status;
302 }
303 return status;
304}
305
306static int set_key(struct cryp_device_data *device_data,
307 u32 left_key,
308 u32 right_key,
309 enum cryp_key_reg_index index)
310{
311 struct cryp_key_value key_value;
312 int cryp_error;
313
314 dev_dbg(device_data->dev, "[%s]", __func__);
315
316 key_value.key_value_left = left_key;
317 key_value.key_value_right = right_key;
318
319 cryp_error = cryp_configure_key_values(device_data,
320 index,
321 key_value);
322 if (cryp_error != 0)
323 dev_err(device_data->dev, "[%s]: "
324 "cryp_configure_key_values() failed!", __func__);
325
326 return cryp_error;
327}
328
329static int cfg_keys(struct cryp_ctx *ctx)
330{
331 int i;
332 int num_of_regs = ctx->keylen / 8;
333 u32 swapped_key[CRYP_MAX_KEY_SIZE / 4];
334 __be32 *ckey = (__be32 *)ctx->key;
335 int cryp_error = 0;
336
337 dev_dbg(ctx->device->dev, "[%s]", __func__);
338
339 if (mode_is_aes(ctx->config.algomode)) {
340 swap_words_in_key_and_bits_in_byte((u8 *)ckey,
341 (u8 *)swapped_key,
342 ctx->keylen);
343 } else {
344 for (i = 0; i < ctx->keylen / 4; i++)
345 swapped_key[i] = be32_to_cpup(ckey + i);
346 }
347
348 for (i = 0; i < num_of_regs; i++) {
349 cryp_error = set_key(ctx->device,
350 swapped_key[i * 2],
351 swapped_key[i * 2 + 1],
352 (enum cryp_key_reg_index) i);
353
354 if (cryp_error != 0) {
355 dev_err(ctx->device->dev, "[%s]: set_key() failed!",
356 __func__);
357 return cryp_error;
358 }
359 }
360 return cryp_error;
361}
362
363static int cryp_setup_context(struct cryp_ctx *ctx,
364 struct cryp_device_data *device_data)
365{
366 u32 control_register = CRYP_CR_DEFAULT;
367
368 switch (cryp_mode) {
369 case CRYP_MODE_INTERRUPT:
370 writel_relaxed(CRYP_IMSC_DEFAULT, &device_data->base->imsc);
371 break;
372
373 case CRYP_MODE_DMA:
374 writel_relaxed(CRYP_DMACR_DEFAULT, &device_data->base->dmacr);
375 break;
376
377 default:
378 break;
379 }
380
381 if (ctx->updated == 0) {
382 cryp_flush_inoutfifo(device_data);
383 if (cfg_keys(ctx) != 0) {
384 dev_err(ctx->device->dev, "[%s]: cfg_keys failed!",
385 __func__);
386 return -EINVAL;
387 }
388
389 if (ctx->iv &&
390 CRYP_ALGO_AES_ECB != ctx->config.algomode &&
391 CRYP_ALGO_DES_ECB != ctx->config.algomode &&
392 CRYP_ALGO_TDES_ECB != ctx->config.algomode) {
393 if (cfg_ivs(device_data, ctx) != 0)
394 return -EPERM;
395 }
396
397 cryp_set_configuration(device_data, &ctx->config,
398 &control_register);
399 add_session_id(ctx);
400 } else if (ctx->updated == 1 &&
401 ctx->session_id != atomic_read(&session_id)) {
402 cryp_flush_inoutfifo(device_data);
403 cryp_restore_device_context(device_data, &ctx->dev_ctx);
404
405 add_session_id(ctx);
406 control_register = ctx->dev_ctx.cr;
407 } else
408 control_register = ctx->dev_ctx.cr;
409
410 writel(control_register |
411 (CRYP_CRYPEN_ENABLE << CRYP_CR_CRYPEN_POS),
412 &device_data->base->cr);
413
414 return 0;
415}
416
417static int cryp_get_device_data(struct cryp_ctx *ctx,
418 struct cryp_device_data **device_data)
419{
420 int ret;
421 struct klist_iter device_iterator;
422 struct klist_node *device_node;
423 struct cryp_device_data *local_device_data = NULL;
424 pr_debug(DEV_DBG_NAME " [%s]", __func__);
425
426
427 ret = down_interruptible(&driver_data.device_allocation);
428 if (ret)
429 return ret;
430
431
432 klist_iter_init(&driver_data.device_list, &device_iterator);
433
434 device_node = klist_next(&device_iterator);
435 while (device_node) {
436 local_device_data = container_of(device_node,
437 struct cryp_device_data, list_node);
438 spin_lock(&local_device_data->ctx_lock);
439
440 if (local_device_data->current_ctx) {
441 device_node = klist_next(&device_iterator);
442 } else {
443 local_device_data->current_ctx = ctx;
444 ctx->device = local_device_data;
445 spin_unlock(&local_device_data->ctx_lock);
446 break;
447 }
448 spin_unlock(&local_device_data->ctx_lock);
449 }
450 klist_iter_exit(&device_iterator);
451
452 if (!device_node) {
453
454
455
456
457
458
459
460
461 return -EBUSY;
462 }
463
464 *device_data = local_device_data;
465
466 return 0;
467}
468
469static void cryp_dma_setup_channel(struct cryp_device_data *device_data,
470 struct device *dev)
471{
472 struct dma_slave_config mem2cryp = {
473 .direction = DMA_MEM_TO_DEV,
474 .dst_addr = device_data->phybase + CRYP_DMA_TX_FIFO,
475 .dst_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES,
476 .dst_maxburst = 4,
477 };
478 struct dma_slave_config cryp2mem = {
479 .direction = DMA_DEV_TO_MEM,
480 .src_addr = device_data->phybase + CRYP_DMA_RX_FIFO,
481 .src_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES,
482 .src_maxburst = 4,
483 };
484
485 dma_cap_zero(device_data->dma.mask);
486 dma_cap_set(DMA_SLAVE, device_data->dma.mask);
487
488 device_data->dma.cfg_mem2cryp = mem_to_engine;
489 device_data->dma.chan_mem2cryp =
490 dma_request_channel(device_data->dma.mask,
491 stedma40_filter,
492 device_data->dma.cfg_mem2cryp);
493
494 device_data->dma.cfg_cryp2mem = engine_to_mem;
495 device_data->dma.chan_cryp2mem =
496 dma_request_channel(device_data->dma.mask,
497 stedma40_filter,
498 device_data->dma.cfg_cryp2mem);
499
500 dmaengine_slave_config(device_data->dma.chan_mem2cryp, &mem2cryp);
501 dmaengine_slave_config(device_data->dma.chan_cryp2mem, &cryp2mem);
502
503 init_completion(&device_data->dma.cryp_dma_complete);
504}
505
506static void cryp_dma_out_callback(void *data)
507{
508 struct cryp_ctx *ctx = (struct cryp_ctx *) data;
509 dev_dbg(ctx->device->dev, "[%s]: ", __func__);
510
511 complete(&ctx->device->dma.cryp_dma_complete);
512}
513
514static int cryp_set_dma_transfer(struct cryp_ctx *ctx,
515 struct scatterlist *sg,
516 int len,
517 enum dma_data_direction direction)
518{
519 struct dma_async_tx_descriptor *desc;
520 struct dma_chan *channel = NULL;
521 dma_cookie_t cookie;
522
523 dev_dbg(ctx->device->dev, "[%s]: ", __func__);
524
525 if (unlikely(!IS_ALIGNED((unsigned long)sg, 4))) {
526 dev_err(ctx->device->dev, "[%s]: Data in sg list isn't "
527 "aligned! Addr: 0x%08lx", __func__, (unsigned long)sg);
528 return -EFAULT;
529 }
530
531 switch (direction) {
532 case DMA_TO_DEVICE:
533 channel = ctx->device->dma.chan_mem2cryp;
534 ctx->device->dma.sg_src = sg;
535 ctx->device->dma.sg_src_len = dma_map_sg(channel->device->dev,
536 ctx->device->dma.sg_src,
537 ctx->device->dma.nents_src,
538 direction);
539
540 if (!ctx->device->dma.sg_src_len) {
541 dev_dbg(ctx->device->dev,
542 "[%s]: Could not map the sg list (TO_DEVICE)",
543 __func__);
544 return -EFAULT;
545 }
546
547 dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer "
548 "(TO_DEVICE)", __func__);
549
550 desc = dmaengine_prep_slave_sg(channel,
551 ctx->device->dma.sg_src,
552 ctx->device->dma.sg_src_len,
553 DMA_MEM_TO_DEV, DMA_CTRL_ACK);
554 break;
555
556 case DMA_FROM_DEVICE:
557 channel = ctx->device->dma.chan_cryp2mem;
558 ctx->device->dma.sg_dst = sg;
559 ctx->device->dma.sg_dst_len = dma_map_sg(channel->device->dev,
560 ctx->device->dma.sg_dst,
561 ctx->device->dma.nents_dst,
562 direction);
563
564 if (!ctx->device->dma.sg_dst_len) {
565 dev_dbg(ctx->device->dev,
566 "[%s]: Could not map the sg list (FROM_DEVICE)",
567 __func__);
568 return -EFAULT;
569 }
570
571 dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer "
572 "(FROM_DEVICE)", __func__);
573
574 desc = dmaengine_prep_slave_sg(channel,
575 ctx->device->dma.sg_dst,
576 ctx->device->dma.sg_dst_len,
577 DMA_DEV_TO_MEM,
578 DMA_CTRL_ACK |
579 DMA_PREP_INTERRUPT);
580
581 desc->callback = cryp_dma_out_callback;
582 desc->callback_param = ctx;
583 break;
584
585 default:
586 dev_dbg(ctx->device->dev, "[%s]: Invalid DMA direction",
587 __func__);
588 return -EFAULT;
589 }
590
591 cookie = dmaengine_submit(desc);
592 if (dma_submit_error(cookie)) {
593 dev_dbg(ctx->device->dev, "[%s]: DMA submission failed\n",
594 __func__);
595 return cookie;
596 }
597
598 dma_async_issue_pending(channel);
599
600 return 0;
601}
602
603static void cryp_dma_done(struct cryp_ctx *ctx)
604{
605 struct dma_chan *chan;
606
607 dev_dbg(ctx->device->dev, "[%s]: ", __func__);
608
609 chan = ctx->device->dma.chan_mem2cryp;
610 dmaengine_terminate_all(chan);
611 dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_src,
612 ctx->device->dma.nents_src, DMA_TO_DEVICE);
613
614 chan = ctx->device->dma.chan_cryp2mem;
615 dmaengine_terminate_all(chan);
616 dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_dst,
617 ctx->device->dma.nents_dst, DMA_FROM_DEVICE);
618}
619
620static int cryp_dma_write(struct cryp_ctx *ctx, struct scatterlist *sg,
621 int len)
622{
623 int error = cryp_set_dma_transfer(ctx, sg, len, DMA_TO_DEVICE);
624 dev_dbg(ctx->device->dev, "[%s]: ", __func__);
625
626 if (error) {
627 dev_dbg(ctx->device->dev, "[%s]: cryp_set_dma_transfer() "
628 "failed", __func__);
629 return error;
630 }
631
632 return len;
633}
634
635static int cryp_dma_read(struct cryp_ctx *ctx, struct scatterlist *sg, int len)
636{
637 int error = cryp_set_dma_transfer(ctx, sg, len, DMA_FROM_DEVICE);
638 if (error) {
639 dev_dbg(ctx->device->dev, "[%s]: cryp_set_dma_transfer() "
640 "failed", __func__);
641 return error;
642 }
643
644 return len;
645}
646
647static void cryp_polling_mode(struct cryp_ctx *ctx,
648 struct cryp_device_data *device_data)
649{
650 int len = ctx->blocksize / BYTES_PER_WORD;
651 int remaining_length = ctx->datalen;
652 u32 *indata = (u32 *)ctx->indata;
653 u32 *outdata = (u32 *)ctx->outdata;
654
655 while (remaining_length > 0) {
656 writesl(&device_data->base->din, indata, len);
657 indata += len;
658 remaining_length -= (len * BYTES_PER_WORD);
659 cryp_wait_until_done(device_data);
660
661 readsl(&device_data->base->dout, outdata, len);
662 outdata += len;
663 cryp_wait_until_done(device_data);
664 }
665}
666
667static int cryp_disable_power(struct device *dev,
668 struct cryp_device_data *device_data,
669 bool save_device_context)
670{
671 int ret = 0;
672
673 dev_dbg(dev, "[%s]", __func__);
674
675 spin_lock(&device_data->power_state_spinlock);
676 if (!device_data->power_state)
677 goto out;
678
679 spin_lock(&device_data->ctx_lock);
680 if (save_device_context && device_data->current_ctx) {
681 cryp_save_device_context(device_data,
682 &device_data->current_ctx->dev_ctx,
683 cryp_mode);
684 device_data->restore_dev_ctx = true;
685 }
686 spin_unlock(&device_data->ctx_lock);
687
688 clk_disable(device_data->clk);
689 ret = regulator_disable(device_data->pwr_regulator);
690 if (ret)
691 dev_err(dev, "[%s]: "
692 "regulator_disable() failed!",
693 __func__);
694
695 device_data->power_state = false;
696
697out:
698 spin_unlock(&device_data->power_state_spinlock);
699
700 return ret;
701}
702
703static int cryp_enable_power(
704 struct device *dev,
705 struct cryp_device_data *device_data,
706 bool restore_device_context)
707{
708 int ret = 0;
709
710 dev_dbg(dev, "[%s]", __func__);
711
712 spin_lock(&device_data->power_state_spinlock);
713 if (!device_data->power_state) {
714 ret = regulator_enable(device_data->pwr_regulator);
715 if (ret) {
716 dev_err(dev, "[%s]: regulator_enable() failed!",
717 __func__);
718 goto out;
719 }
720
721 ret = clk_enable(device_data->clk);
722 if (ret) {
723 dev_err(dev, "[%s]: clk_enable() failed!",
724 __func__);
725 regulator_disable(device_data->pwr_regulator);
726 goto out;
727 }
728 device_data->power_state = true;
729 }
730
731 if (device_data->restore_dev_ctx) {
732 spin_lock(&device_data->ctx_lock);
733 if (restore_device_context && device_data->current_ctx) {
734 device_data->restore_dev_ctx = false;
735 cryp_restore_device_context(device_data,
736 &device_data->current_ctx->dev_ctx);
737 }
738 spin_unlock(&device_data->ctx_lock);
739 }
740out:
741 spin_unlock(&device_data->power_state_spinlock);
742
743 return ret;
744}
745
746static int hw_crypt_noxts(struct cryp_ctx *ctx,
747 struct cryp_device_data *device_data)
748{
749 int ret = 0;
750
751 const u8 *indata = ctx->indata;
752 u8 *outdata = ctx->outdata;
753 u32 datalen = ctx->datalen;
754 u32 outlen = datalen;
755
756 pr_debug(DEV_DBG_NAME " [%s]", __func__);
757
758 ctx->outlen = ctx->datalen;
759
760 if (unlikely(!IS_ALIGNED((unsigned long)indata, 4))) {
761 pr_debug(DEV_DBG_NAME " [%s]: Data isn't aligned! Addr: "
762 "0x%08lx", __func__, (unsigned long)indata);
763 return -EINVAL;
764 }
765
766 ret = cryp_setup_context(ctx, device_data);
767
768 if (ret)
769 goto out;
770
771 if (cryp_mode == CRYP_MODE_INTERRUPT) {
772 cryp_enable_irq_src(device_data, CRYP_IRQ_SRC_INPUT_FIFO |
773 CRYP_IRQ_SRC_OUTPUT_FIFO);
774
775
776
777
778
779
780 while (ctx->outlen > 0)
781 cpu_relax();
782 } else if (cryp_mode == CRYP_MODE_POLLING ||
783 cryp_mode == CRYP_MODE_DMA) {
784
785
786
787
788
789
790
791
792 cryp_polling_mode(ctx, device_data);
793 } else {
794 dev_err(ctx->device->dev, "[%s]: Invalid operation mode!",
795 __func__);
796 ret = -EPERM;
797 goto out;
798 }
799
800 cryp_save_device_context(device_data, &ctx->dev_ctx, cryp_mode);
801 ctx->updated = 1;
802
803out:
804 ctx->indata = indata;
805 ctx->outdata = outdata;
806 ctx->datalen = datalen;
807 ctx->outlen = outlen;
808
809 return ret;
810}
811
812static int get_nents(struct scatterlist *sg, int nbytes)
813{
814 int nents = 0;
815
816 while (nbytes > 0) {
817 nbytes -= sg->length;
818 sg = sg_next(sg);
819 nents++;
820 }
821
822 return nents;
823}
824
825static int ablk_dma_crypt(struct skcipher_request *areq)
826{
827 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
828 struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
829 struct cryp_device_data *device_data;
830
831 int bytes_written = 0;
832 int bytes_read = 0;
833 int ret;
834
835 pr_debug(DEV_DBG_NAME " [%s]", __func__);
836
837 ctx->datalen = areq->cryptlen;
838 ctx->outlen = areq->cryptlen;
839
840 ret = cryp_get_device_data(ctx, &device_data);
841 if (ret)
842 return ret;
843
844 ret = cryp_setup_context(ctx, device_data);
845 if (ret)
846 goto out;
847
848
849 ctx->device->dma.nents_src = get_nents(areq->src, ctx->datalen);
850 ctx->device->dma.nents_dst = get_nents(areq->dst, ctx->outlen);
851
852
853 cryp_configure_for_dma(device_data, CRYP_DMA_ENABLE_BOTH_DIRECTIONS);
854
855 bytes_written = cryp_dma_write(ctx, areq->src, ctx->datalen);
856 bytes_read = cryp_dma_read(ctx, areq->dst, bytes_written);
857
858 wait_for_completion(&ctx->device->dma.cryp_dma_complete);
859 cryp_dma_done(ctx);
860
861 cryp_save_device_context(device_data, &ctx->dev_ctx, cryp_mode);
862 ctx->updated = 1;
863
864out:
865 spin_lock(&device_data->ctx_lock);
866 device_data->current_ctx = NULL;
867 ctx->device = NULL;
868 spin_unlock(&device_data->ctx_lock);
869
870
871
872
873
874 up(&driver_data.device_allocation);
875
876 if (unlikely(bytes_written != bytes_read))
877 return -EPERM;
878
879 return 0;
880}
881
882static int ablk_crypt(struct skcipher_request *areq)
883{
884 struct skcipher_walk walk;
885 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
886 struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
887 struct cryp_device_data *device_data;
888 unsigned long src_paddr;
889 unsigned long dst_paddr;
890 int ret;
891 int nbytes;
892
893 pr_debug(DEV_DBG_NAME " [%s]", __func__);
894
895 ret = cryp_get_device_data(ctx, &device_data);
896 if (ret)
897 goto out;
898
899 ret = skcipher_walk_async(&walk, areq);
900
901 if (ret) {
902 pr_err(DEV_DBG_NAME "[%s]: skcipher_walk_async() failed!",
903 __func__);
904 goto out;
905 }
906
907 while ((nbytes = walk.nbytes) > 0) {
908 ctx->iv = walk.iv;
909 src_paddr = (page_to_phys(walk.src.phys.page) + walk.src.phys.offset);
910 ctx->indata = phys_to_virt(src_paddr);
911
912 dst_paddr = (page_to_phys(walk.dst.phys.page) + walk.dst.phys.offset);
913 ctx->outdata = phys_to_virt(dst_paddr);
914
915 ctx->datalen = nbytes - (nbytes % ctx->blocksize);
916
917 ret = hw_crypt_noxts(ctx, device_data);
918 if (ret)
919 goto out;
920
921 nbytes -= ctx->datalen;
922 ret = skcipher_walk_done(&walk, nbytes);
923 if (ret)
924 goto out;
925 }
926
927out:
928
929 spin_lock(&device_data->ctx_lock);
930 device_data->current_ctx = NULL;
931 ctx->device = NULL;
932 spin_unlock(&device_data->ctx_lock);
933
934
935
936
937
938 up(&driver_data.device_allocation);
939
940 return ret;
941}
942
943static int aes_skcipher_setkey(struct crypto_skcipher *cipher,
944 const u8 *key, unsigned int keylen)
945{
946 struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
947
948 pr_debug(DEV_DBG_NAME " [%s]", __func__);
949
950 switch (keylen) {
951 case AES_KEYSIZE_128:
952 ctx->config.keysize = CRYP_KEY_SIZE_128;
953 break;
954
955 case AES_KEYSIZE_192:
956 ctx->config.keysize = CRYP_KEY_SIZE_192;
957 break;
958
959 case AES_KEYSIZE_256:
960 ctx->config.keysize = CRYP_KEY_SIZE_256;
961 break;
962
963 default:
964 pr_err(DEV_DBG_NAME "[%s]: Unknown keylen!", __func__);
965 return -EINVAL;
966 }
967
968 memcpy(ctx->key, key, keylen);
969 ctx->keylen = keylen;
970
971 ctx->updated = 0;
972
973 return 0;
974}
975
976static int des_skcipher_setkey(struct crypto_skcipher *cipher,
977 const u8 *key, unsigned int keylen)
978{
979 struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
980 int err;
981
982 pr_debug(DEV_DBG_NAME " [%s]", __func__);
983
984 err = verify_skcipher_des_key(cipher, key);
985 if (err)
986 return err;
987
988 memcpy(ctx->key, key, keylen);
989 ctx->keylen = keylen;
990
991 ctx->updated = 0;
992 return 0;
993}
994
995static int des3_skcipher_setkey(struct crypto_skcipher *cipher,
996 const u8 *key, unsigned int keylen)
997{
998 struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
999 int err;
1000
1001 pr_debug(DEV_DBG_NAME " [%s]", __func__);
1002
1003 err = verify_skcipher_des3_key(cipher, key);
1004 if (err)
1005 return err;
1006
1007 memcpy(ctx->key, key, keylen);
1008 ctx->keylen = keylen;
1009
1010 ctx->updated = 0;
1011 return 0;
1012}
1013
1014static int cryp_blk_encrypt(struct skcipher_request *areq)
1015{
1016 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1017 struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
1018
1019 pr_debug(DEV_DBG_NAME " [%s]", __func__);
1020
1021 ctx->config.algodir = CRYP_ALGORITHM_ENCRYPT;
1022
1023
1024
1025 if (cryp_mode == CRYP_MODE_DMA && mode_is_aes(ctx->config.algomode))
1026 return ablk_dma_crypt(areq);
1027
1028
1029 return ablk_crypt(areq);
1030}
1031
1032static int cryp_blk_decrypt(struct skcipher_request *areq)
1033{
1034 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1035 struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
1036
1037 pr_debug(DEV_DBG_NAME " [%s]", __func__);
1038
1039 ctx->config.algodir = CRYP_ALGORITHM_DECRYPT;
1040
1041
1042 if (cryp_mode == CRYP_MODE_DMA && mode_is_aes(ctx->config.algomode))
1043 return ablk_dma_crypt(areq);
1044
1045
1046 return ablk_crypt(areq);
1047}
1048
1049struct cryp_algo_template {
1050 enum cryp_algo_mode algomode;
1051 struct skcipher_alg skcipher;
1052};
1053
1054static int cryp_init_tfm(struct crypto_skcipher *tfm)
1055{
1056 struct cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
1057 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
1058 struct cryp_algo_template *cryp_alg = container_of(alg,
1059 struct cryp_algo_template,
1060 skcipher);
1061
1062 ctx->config.algomode = cryp_alg->algomode;
1063 ctx->blocksize = crypto_skcipher_blocksize(tfm);
1064
1065 return 0;
1066}
1067
1068static struct cryp_algo_template cryp_algs[] = {
1069 {
1070 .algomode = CRYP_ALGO_AES_ECB,
1071 .skcipher = {
1072 .base.cra_name = "ecb(aes)",
1073 .base.cra_driver_name = "ecb-aes-ux500",
1074 .base.cra_priority = 300,
1075 .base.cra_flags = CRYPTO_ALG_ASYNC,
1076 .base.cra_blocksize = AES_BLOCK_SIZE,
1077 .base.cra_ctxsize = sizeof(struct cryp_ctx),
1078 .base.cra_alignmask = 3,
1079 .base.cra_module = THIS_MODULE,
1080
1081 .min_keysize = AES_MIN_KEY_SIZE,
1082 .max_keysize = AES_MAX_KEY_SIZE,
1083 .setkey = aes_skcipher_setkey,
1084 .encrypt = cryp_blk_encrypt,
1085 .decrypt = cryp_blk_decrypt,
1086 .init = cryp_init_tfm,
1087 }
1088 },
1089 {
1090 .algomode = CRYP_ALGO_AES_CBC,
1091 .skcipher = {
1092 .base.cra_name = "cbc(aes)",
1093 .base.cra_driver_name = "cbc-aes-ux500",
1094 .base.cra_priority = 300,
1095 .base.cra_flags = CRYPTO_ALG_ASYNC,
1096 .base.cra_blocksize = AES_BLOCK_SIZE,
1097 .base.cra_ctxsize = sizeof(struct cryp_ctx),
1098 .base.cra_alignmask = 3,
1099 .base.cra_module = THIS_MODULE,
1100
1101 .min_keysize = AES_MIN_KEY_SIZE,
1102 .max_keysize = AES_MAX_KEY_SIZE,
1103 .setkey = aes_skcipher_setkey,
1104 .encrypt = cryp_blk_encrypt,
1105 .decrypt = cryp_blk_decrypt,
1106 .init = cryp_init_tfm,
1107 .ivsize = AES_BLOCK_SIZE,
1108 }
1109 },
1110 {
1111 .algomode = CRYP_ALGO_AES_CTR,
1112 .skcipher = {
1113 .base.cra_name = "ctr(aes)",
1114 .base.cra_driver_name = "ctr-aes-ux500",
1115 .base.cra_priority = 300,
1116 .base.cra_flags = CRYPTO_ALG_ASYNC,
1117 .base.cra_blocksize = 1,
1118 .base.cra_ctxsize = sizeof(struct cryp_ctx),
1119 .base.cra_alignmask = 3,
1120 .base.cra_module = THIS_MODULE,
1121
1122 .min_keysize = AES_MIN_KEY_SIZE,
1123 .max_keysize = AES_MAX_KEY_SIZE,
1124 .setkey = aes_skcipher_setkey,
1125 .encrypt = cryp_blk_encrypt,
1126 .decrypt = cryp_blk_decrypt,
1127 .init = cryp_init_tfm,
1128 .ivsize = AES_BLOCK_SIZE,
1129 .chunksize = AES_BLOCK_SIZE,
1130 }
1131 },
1132 {
1133 .algomode = CRYP_ALGO_DES_ECB,
1134 .skcipher = {
1135 .base.cra_name = "ecb(des)",
1136 .base.cra_driver_name = "ecb-des-ux500",
1137 .base.cra_priority = 300,
1138 .base.cra_flags = CRYPTO_ALG_ASYNC,
1139 .base.cra_blocksize = DES_BLOCK_SIZE,
1140 .base.cra_ctxsize = sizeof(struct cryp_ctx),
1141 .base.cra_alignmask = 3,
1142 .base.cra_module = THIS_MODULE,
1143
1144 .min_keysize = DES_KEY_SIZE,
1145 .max_keysize = DES_KEY_SIZE,
1146 .setkey = des_skcipher_setkey,
1147 .encrypt = cryp_blk_encrypt,
1148 .decrypt = cryp_blk_decrypt,
1149 .init = cryp_init_tfm,
1150 }
1151 },
1152 {
1153 .algomode = CRYP_ALGO_TDES_ECB,
1154 .skcipher = {
1155 .base.cra_name = "ecb(des3_ede)",
1156 .base.cra_driver_name = "ecb-des3_ede-ux500",
1157 .base.cra_priority = 300,
1158 .base.cra_flags = CRYPTO_ALG_ASYNC,
1159 .base.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1160 .base.cra_ctxsize = sizeof(struct cryp_ctx),
1161 .base.cra_alignmask = 3,
1162 .base.cra_module = THIS_MODULE,
1163
1164 .min_keysize = DES3_EDE_KEY_SIZE,
1165 .max_keysize = DES3_EDE_KEY_SIZE,
1166 .setkey = des3_skcipher_setkey,
1167 .encrypt = cryp_blk_encrypt,
1168 .decrypt = cryp_blk_decrypt,
1169 .init = cryp_init_tfm,
1170 }
1171 },
1172 {
1173 .algomode = CRYP_ALGO_DES_CBC,
1174 .skcipher = {
1175 .base.cra_name = "cbc(des)",
1176 .base.cra_driver_name = "cbc-des-ux500",
1177 .base.cra_priority = 300,
1178 .base.cra_flags = CRYPTO_ALG_ASYNC,
1179 .base.cra_blocksize = DES_BLOCK_SIZE,
1180 .base.cra_ctxsize = sizeof(struct cryp_ctx),
1181 .base.cra_alignmask = 3,
1182 .base.cra_module = THIS_MODULE,
1183
1184 .min_keysize = DES_KEY_SIZE,
1185 .max_keysize = DES_KEY_SIZE,
1186 .setkey = des_skcipher_setkey,
1187 .encrypt = cryp_blk_encrypt,
1188 .decrypt = cryp_blk_decrypt,
1189 .ivsize = DES_BLOCK_SIZE,
1190 .init = cryp_init_tfm,
1191 }
1192 },
1193 {
1194 .algomode = CRYP_ALGO_TDES_CBC,
1195 .skcipher = {
1196 .base.cra_name = "cbc(des3_ede)",
1197 .base.cra_driver_name = "cbc-des3_ede-ux500",
1198 .base.cra_priority = 300,
1199 .base.cra_flags = CRYPTO_ALG_ASYNC,
1200 .base.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1201 .base.cra_ctxsize = sizeof(struct cryp_ctx),
1202 .base.cra_alignmask = 3,
1203 .base.cra_module = THIS_MODULE,
1204
1205 .min_keysize = DES3_EDE_KEY_SIZE,
1206 .max_keysize = DES3_EDE_KEY_SIZE,
1207 .setkey = des3_skcipher_setkey,
1208 .encrypt = cryp_blk_encrypt,
1209 .decrypt = cryp_blk_decrypt,
1210 .ivsize = DES3_EDE_BLOCK_SIZE,
1211 .init = cryp_init_tfm,
1212 }
1213 }
1214};
1215
1216
1217
1218
1219static int cryp_algs_register_all(void)
1220{
1221 int ret;
1222 int i;
1223 int count;
1224
1225 pr_debug("[%s]", __func__);
1226
1227 for (i = 0; i < ARRAY_SIZE(cryp_algs); i++) {
1228 ret = crypto_register_skcipher(&cryp_algs[i].skcipher);
1229 if (ret) {
1230 count = i;
1231 pr_err("[%s] alg registration failed",
1232 cryp_algs[i].skcipher.base.cra_driver_name);
1233 goto unreg;
1234 }
1235 }
1236 return 0;
1237unreg:
1238 for (i = 0; i < count; i++)
1239 crypto_unregister_skcipher(&cryp_algs[i].skcipher);
1240 return ret;
1241}
1242
1243
1244
1245
1246static void cryp_algs_unregister_all(void)
1247{
1248 int i;
1249
1250 pr_debug(DEV_DBG_NAME " [%s]", __func__);
1251
1252 for (i = 0; i < ARRAY_SIZE(cryp_algs); i++)
1253 crypto_unregister_skcipher(&cryp_algs[i].skcipher);
1254}
1255
1256static int ux500_cryp_probe(struct platform_device *pdev)
1257{
1258 int ret;
1259 struct resource *res;
1260 struct resource *res_irq;
1261 struct cryp_device_data *device_data;
1262 struct cryp_protection_config prot = {
1263 .privilege_access = CRYP_STATE_ENABLE
1264 };
1265 struct device *dev = &pdev->dev;
1266
1267 dev_dbg(dev, "[%s]", __func__);
1268 device_data = devm_kzalloc(dev, sizeof(*device_data), GFP_ATOMIC);
1269 if (!device_data) {
1270 ret = -ENOMEM;
1271 goto out;
1272 }
1273
1274 device_data->dev = dev;
1275 device_data->current_ctx = NULL;
1276
1277
1278 mem_to_engine = &((struct cryp_platform_data *)
1279 dev->platform_data)->mem_to_engine;
1280 engine_to_mem = &((struct cryp_platform_data *)
1281 dev->platform_data)->engine_to_mem;
1282
1283 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1284 if (!res) {
1285 dev_err(dev, "[%s]: platform_get_resource() failed",
1286 __func__);
1287 ret = -ENODEV;
1288 goto out;
1289 }
1290
1291 device_data->phybase = res->start;
1292 device_data->base = devm_ioremap_resource(dev, res);
1293 if (IS_ERR(device_data->base)) {
1294 ret = PTR_ERR(device_data->base);
1295 goto out;
1296 }
1297
1298 spin_lock_init(&device_data->ctx_lock);
1299 spin_lock_init(&device_data->power_state_spinlock);
1300
1301
1302 device_data->pwr_regulator = regulator_get(&pdev->dev, "v-ape");
1303 if (IS_ERR(device_data->pwr_regulator)) {
1304 dev_err(dev, "[%s]: could not get cryp regulator", __func__);
1305 ret = PTR_ERR(device_data->pwr_regulator);
1306 device_data->pwr_regulator = NULL;
1307 goto out;
1308 }
1309
1310
1311 device_data->clk = devm_clk_get(&pdev->dev, NULL);
1312 if (IS_ERR(device_data->clk)) {
1313 dev_err(dev, "[%s]: clk_get() failed!", __func__);
1314 ret = PTR_ERR(device_data->clk);
1315 goto out_regulator;
1316 }
1317
1318 ret = clk_prepare(device_data->clk);
1319 if (ret) {
1320 dev_err(dev, "[%s]: clk_prepare() failed!", __func__);
1321 goto out_regulator;
1322 }
1323
1324
1325 ret = cryp_enable_power(device_data->dev, device_data, false);
1326 if (ret) {
1327 dev_err(dev, "[%s]: cryp_enable_power() failed!", __func__);
1328 goto out_clk_unprepare;
1329 }
1330
1331 if (cryp_check(device_data)) {
1332 dev_err(dev, "[%s]: cryp_check() failed!", __func__);
1333 ret = -EINVAL;
1334 goto out_power;
1335 }
1336
1337 if (cryp_configure_protection(device_data, &prot)) {
1338 dev_err(dev, "[%s]: cryp_configure_protection() failed!",
1339 __func__);
1340 ret = -EINVAL;
1341 goto out_power;
1342 }
1343
1344 res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1345 if (!res_irq) {
1346 dev_err(dev, "[%s]: IORESOURCE_IRQ unavailable",
1347 __func__);
1348 ret = -ENODEV;
1349 goto out_power;
1350 }
1351
1352 ret = devm_request_irq(&pdev->dev, res_irq->start,
1353 cryp_interrupt_handler, 0, "cryp1", device_data);
1354 if (ret) {
1355 dev_err(dev, "[%s]: Unable to request IRQ", __func__);
1356 goto out_power;
1357 }
1358
1359 if (cryp_mode == CRYP_MODE_DMA)
1360 cryp_dma_setup_channel(device_data, dev);
1361
1362 platform_set_drvdata(pdev, device_data);
1363
1364
1365 klist_add_tail(&device_data->list_node, &driver_data.device_list);
1366
1367
1368 up(&driver_data.device_allocation);
1369
1370 atomic_set(&session_id, 1);
1371
1372 ret = cryp_algs_register_all();
1373 if (ret) {
1374 dev_err(dev, "[%s]: cryp_algs_register_all() failed!",
1375 __func__);
1376 goto out_power;
1377 }
1378
1379 dev_info(dev, "successfully registered\n");
1380
1381 return 0;
1382
1383out_power:
1384 cryp_disable_power(device_data->dev, device_data, false);
1385
1386out_clk_unprepare:
1387 clk_unprepare(device_data->clk);
1388
1389out_regulator:
1390 regulator_put(device_data->pwr_regulator);
1391
1392out:
1393 return ret;
1394}
1395
1396static int ux500_cryp_remove(struct platform_device *pdev)
1397{
1398 struct cryp_device_data *device_data;
1399
1400 dev_dbg(&pdev->dev, "[%s]", __func__);
1401 device_data = platform_get_drvdata(pdev);
1402 if (!device_data) {
1403 dev_err(&pdev->dev, "[%s]: platform_get_drvdata() failed!",
1404 __func__);
1405 return -ENOMEM;
1406 }
1407
1408
1409 if (down_trylock(&driver_data.device_allocation))
1410 return -EBUSY;
1411
1412
1413 spin_lock(&device_data->ctx_lock);
1414
1415 if (device_data->current_ctx) {
1416
1417 spin_unlock(&device_data->ctx_lock);
1418
1419 up(&driver_data.device_allocation);
1420 return -EBUSY;
1421 }
1422
1423 spin_unlock(&device_data->ctx_lock);
1424
1425
1426 if (klist_node_attached(&device_data->list_node))
1427 klist_remove(&device_data->list_node);
1428
1429
1430 if (list_empty(&driver_data.device_list.k_list))
1431 cryp_algs_unregister_all();
1432
1433 if (cryp_disable_power(&pdev->dev, device_data, false))
1434 dev_err(&pdev->dev, "[%s]: cryp_disable_power() failed",
1435 __func__);
1436
1437 clk_unprepare(device_data->clk);
1438 regulator_put(device_data->pwr_regulator);
1439
1440 return 0;
1441}
1442
1443static void ux500_cryp_shutdown(struct platform_device *pdev)
1444{
1445 struct cryp_device_data *device_data;
1446
1447 dev_dbg(&pdev->dev, "[%s]", __func__);
1448
1449 device_data = platform_get_drvdata(pdev);
1450 if (!device_data) {
1451 dev_err(&pdev->dev, "[%s]: platform_get_drvdata() failed!",
1452 __func__);
1453 return;
1454 }
1455
1456
1457 spin_lock(&device_data->ctx_lock);
1458
1459 if (!device_data->current_ctx) {
1460 if (down_trylock(&driver_data.device_allocation))
1461 dev_dbg(&pdev->dev, "[%s]: Cryp still in use!"
1462 "Shutting down anyway...", __func__);
1463
1464
1465
1466
1467
1468 device_data->current_ctx++;
1469 }
1470 spin_unlock(&device_data->ctx_lock);
1471
1472
1473 if (klist_node_attached(&device_data->list_node))
1474 klist_remove(&device_data->list_node);
1475
1476
1477 if (list_empty(&driver_data.device_list.k_list))
1478 cryp_algs_unregister_all();
1479
1480 if (cryp_disable_power(&pdev->dev, device_data, false))
1481 dev_err(&pdev->dev, "[%s]: cryp_disable_power() failed",
1482 __func__);
1483
1484}
1485
1486#ifdef CONFIG_PM_SLEEP
1487static int ux500_cryp_suspend(struct device *dev)
1488{
1489 int ret;
1490 struct platform_device *pdev = to_platform_device(dev);
1491 struct cryp_device_data *device_data;
1492 struct resource *res_irq;
1493 struct cryp_ctx *temp_ctx = NULL;
1494
1495 dev_dbg(dev, "[%s]", __func__);
1496
1497
1498 device_data = platform_get_drvdata(pdev);
1499 if (!device_data) {
1500 dev_err(dev, "[%s]: platform_get_drvdata() failed!", __func__);
1501 return -ENOMEM;
1502 }
1503
1504 res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1505 if (!res_irq)
1506 dev_err(dev, "[%s]: IORESOURCE_IRQ, unavailable", __func__);
1507 else
1508 disable_irq(res_irq->start);
1509
1510 spin_lock(&device_data->ctx_lock);
1511 if (!device_data->current_ctx)
1512 device_data->current_ctx++;
1513 spin_unlock(&device_data->ctx_lock);
1514
1515 if (device_data->current_ctx == ++temp_ctx) {
1516 if (down_interruptible(&driver_data.device_allocation))
1517 dev_dbg(dev, "[%s]: down_interruptible() failed",
1518 __func__);
1519 ret = cryp_disable_power(dev, device_data, false);
1520
1521 } else
1522 ret = cryp_disable_power(dev, device_data, true);
1523
1524 if (ret)
1525 dev_err(dev, "[%s]: cryp_disable_power()", __func__);
1526
1527 return ret;
1528}
1529
1530static int ux500_cryp_resume(struct device *dev)
1531{
1532 int ret = 0;
1533 struct platform_device *pdev = to_platform_device(dev);
1534 struct cryp_device_data *device_data;
1535 struct resource *res_irq;
1536 struct cryp_ctx *temp_ctx = NULL;
1537
1538 dev_dbg(dev, "[%s]", __func__);
1539
1540 device_data = platform_get_drvdata(pdev);
1541 if (!device_data) {
1542 dev_err(dev, "[%s]: platform_get_drvdata() failed!", __func__);
1543 return -ENOMEM;
1544 }
1545
1546 spin_lock(&device_data->ctx_lock);
1547 if (device_data->current_ctx == ++temp_ctx)
1548 device_data->current_ctx = NULL;
1549 spin_unlock(&device_data->ctx_lock);
1550
1551
1552 if (!device_data->current_ctx)
1553 up(&driver_data.device_allocation);
1554 else
1555 ret = cryp_enable_power(dev, device_data, true);
1556
1557 if (ret)
1558 dev_err(dev, "[%s]: cryp_enable_power() failed!", __func__);
1559 else {
1560 res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1561 if (res_irq)
1562 enable_irq(res_irq->start);
1563 }
1564
1565 return ret;
1566}
1567#endif
1568
1569static SIMPLE_DEV_PM_OPS(ux500_cryp_pm, ux500_cryp_suspend, ux500_cryp_resume);
1570
1571static const struct of_device_id ux500_cryp_match[] = {
1572 { .compatible = "stericsson,ux500-cryp" },
1573 { },
1574};
1575MODULE_DEVICE_TABLE(of, ux500_cryp_match);
1576
1577static struct platform_driver cryp_driver = {
1578 .probe = ux500_cryp_probe,
1579 .remove = ux500_cryp_remove,
1580 .shutdown = ux500_cryp_shutdown,
1581 .driver = {
1582 .name = "cryp1",
1583 .of_match_table = ux500_cryp_match,
1584 .pm = &ux500_cryp_pm,
1585 }
1586};
1587
1588static int __init ux500_cryp_mod_init(void)
1589{
1590 pr_debug("[%s] is called!", __func__);
1591 klist_init(&driver_data.device_list, NULL, NULL);
1592
1593 sema_init(&driver_data.device_allocation, 0);
1594 return platform_driver_register(&cryp_driver);
1595}
1596
1597static void __exit ux500_cryp_mod_fini(void)
1598{
1599 pr_debug("[%s] is called!", __func__);
1600 platform_driver_unregister(&cryp_driver);
1601}
1602
1603module_init(ux500_cryp_mod_init);
1604module_exit(ux500_cryp_mod_fini);
1605
1606module_param(cryp_mode, int, 0);
1607
1608MODULE_DESCRIPTION("Driver for ST-Ericsson UX500 CRYP crypto engine.");
1609MODULE_ALIAS_CRYPTO("aes-all");
1610MODULE_ALIAS_CRYPTO("des-all");
1611
1612MODULE_LICENSE("GPL");
1613