Lines Matching refs:dd

46 #define omap_aes_read(dd, offset)				\  argument
49 _read_ret = __raw_readl(dd->io_base + offset); \
55 inline u32 omap_aes_read(struct omap_aes_dev *dd, u32 offset) in omap_aes_read() argument
57 return __raw_readl(dd->io_base + offset); in omap_aes_read()
62 #define omap_aes_write(dd, offset, value) \ argument
66 __raw_writel(value, dd->io_base + offset); \
69 inline void omap_aes_write(struct omap_aes_dev *dd, u32 offset, in omap_aes_write() argument
72 __raw_writel(value, dd->io_base + offset); in omap_aes_write()
76 static inline void omap_aes_write_mask(struct omap_aes_dev *dd, u32 offset, in omap_aes_write_mask() argument
81 val = omap_aes_read(dd, offset); in omap_aes_write_mask()
84 omap_aes_write(dd, offset, val); in omap_aes_write_mask()
87 static void omap_aes_write_n(struct omap_aes_dev *dd, u32 offset, in omap_aes_write_n() argument
91 omap_aes_write(dd, offset, *value); in omap_aes_write_n()
94 static int omap_aes_hw_init(struct omap_aes_dev *dd) in omap_aes_hw_init() argument
98 if (!(dd->flags & FLAGS_INIT)) { in omap_aes_hw_init()
99 dd->flags |= FLAGS_INIT; in omap_aes_hw_init()
100 dd->err = 0; in omap_aes_hw_init()
103 err = pm_runtime_resume_and_get(dd->dev); in omap_aes_hw_init()
105 dev_err(dd->dev, "failed to get sync: %d\n", err); in omap_aes_hw_init()
112 void omap_aes_clear_copy_flags(struct omap_aes_dev *dd) in omap_aes_clear_copy_flags() argument
114 dd->flags &= ~(OMAP_CRYPTO_COPY_MASK << FLAGS_IN_DATA_ST_SHIFT); in omap_aes_clear_copy_flags()
115 dd->flags &= ~(OMAP_CRYPTO_COPY_MASK << FLAGS_OUT_DATA_ST_SHIFT); in omap_aes_clear_copy_flags()
116 dd->flags &= ~(OMAP_CRYPTO_COPY_MASK << FLAGS_ASSOC_DATA_ST_SHIFT); in omap_aes_clear_copy_flags()
119 int omap_aes_write_ctrl(struct omap_aes_dev *dd) in omap_aes_write_ctrl() argument
126 err = omap_aes_hw_init(dd); in omap_aes_write_ctrl()
130 key32 = dd->ctx->keylen / sizeof(u32); in omap_aes_write_ctrl()
133 if (dd->flags & FLAGS_GCM) in omap_aes_write_ctrl()
135 omap_aes_write(dd, i, 0x0); in omap_aes_write_ctrl()
138 omap_aes_write(dd, AES_REG_KEY(dd, i), in omap_aes_write_ctrl()
139 (__force u32)cpu_to_le32(dd->ctx->key[i])); in omap_aes_write_ctrl()
142 if ((dd->flags & (FLAGS_CBC | FLAGS_CTR)) && dd->req->iv) in omap_aes_write_ctrl()
143 omap_aes_write_n(dd, AES_REG_IV(dd, 0), (void *)dd->req->iv, 4); in omap_aes_write_ctrl()
145 if ((dd->flags & (FLAGS_GCM)) && dd->aead_req->iv) { in omap_aes_write_ctrl()
146 rctx = aead_request_ctx(dd->aead_req); in omap_aes_write_ctrl()
147 omap_aes_write_n(dd, AES_REG_IV(dd, 0), (u32 *)rctx->iv, 4); in omap_aes_write_ctrl()
150 val = FLD_VAL(((dd->ctx->keylen >> 3) - 1), 4, 3); in omap_aes_write_ctrl()
151 if (dd->flags & FLAGS_CBC) in omap_aes_write_ctrl()
154 if (dd->flags & (FLAGS_CTR | FLAGS_GCM)) in omap_aes_write_ctrl()
157 if (dd->flags & FLAGS_GCM) in omap_aes_write_ctrl()
160 if (dd->flags & FLAGS_ENCRYPT) in omap_aes_write_ctrl()
163 omap_aes_write_mask(dd, AES_REG_CTRL(dd), val, AES_REG_CTRL_MASK); in omap_aes_write_ctrl()
168 static void omap_aes_dma_trigger_omap2(struct omap_aes_dev *dd, int length) in omap_aes_dma_trigger_omap2() argument
172 val = dd->pdata->dma_start; in omap_aes_dma_trigger_omap2()
174 if (dd->dma_lch_out != NULL) in omap_aes_dma_trigger_omap2()
175 val |= dd->pdata->dma_enable_out; in omap_aes_dma_trigger_omap2()
176 if (dd->dma_lch_in != NULL) in omap_aes_dma_trigger_omap2()
177 val |= dd->pdata->dma_enable_in; in omap_aes_dma_trigger_omap2()
179 mask = dd->pdata->dma_enable_out | dd->pdata->dma_enable_in | in omap_aes_dma_trigger_omap2()
180 dd->pdata->dma_start; in omap_aes_dma_trigger_omap2()
182 omap_aes_write_mask(dd, AES_REG_MASK(dd), val, mask); in omap_aes_dma_trigger_omap2()
186 static void omap_aes_dma_trigger_omap4(struct omap_aes_dev *dd, int length) in omap_aes_dma_trigger_omap4() argument
188 omap_aes_write(dd, AES_REG_LENGTH_N(0), length); in omap_aes_dma_trigger_omap4()
189 omap_aes_write(dd, AES_REG_LENGTH_N(1), 0); in omap_aes_dma_trigger_omap4()
190 if (dd->flags & FLAGS_GCM) in omap_aes_dma_trigger_omap4()
191 omap_aes_write(dd, AES_REG_A_LEN, dd->assoc_len); in omap_aes_dma_trigger_omap4()
193 omap_aes_dma_trigger_omap2(dd, length); in omap_aes_dma_trigger_omap4()
196 static void omap_aes_dma_stop(struct omap_aes_dev *dd) in omap_aes_dma_stop() argument
200 mask = dd->pdata->dma_enable_out | dd->pdata->dma_enable_in | in omap_aes_dma_stop()
201 dd->pdata->dma_start; in omap_aes_dma_stop()
203 omap_aes_write_mask(dd, AES_REG_MASK(dd), 0, mask); in omap_aes_dma_stop()
208 struct omap_aes_dev *dd; in omap_aes_find_dev() local
211 dd = list_first_entry(&dev_list, struct omap_aes_dev, list); in omap_aes_find_dev()
212 list_move_tail(&dd->list, &dev_list); in omap_aes_find_dev()
213 rctx->dd = dd; in omap_aes_find_dev()
216 return dd; in omap_aes_find_dev()
221 struct omap_aes_dev *dd = data; in omap_aes_dma_out_callback() local
224 tasklet_schedule(&dd->done_task); in omap_aes_dma_out_callback()
227 static int omap_aes_dma_init(struct omap_aes_dev *dd) in omap_aes_dma_init() argument
231 dd->dma_lch_out = NULL; in omap_aes_dma_init()
232 dd->dma_lch_in = NULL; in omap_aes_dma_init()
234 dd->dma_lch_in = dma_request_chan(dd->dev, "rx"); in omap_aes_dma_init()
235 if (IS_ERR(dd->dma_lch_in)) { in omap_aes_dma_init()
236 dev_err(dd->dev, "Unable to request in DMA channel\n"); in omap_aes_dma_init()
237 return PTR_ERR(dd->dma_lch_in); in omap_aes_dma_init()
240 dd->dma_lch_out = dma_request_chan(dd->dev, "tx"); in omap_aes_dma_init()
241 if (IS_ERR(dd->dma_lch_out)) { in omap_aes_dma_init()
242 dev_err(dd->dev, "Unable to request out DMA channel\n"); in omap_aes_dma_init()
243 err = PTR_ERR(dd->dma_lch_out); in omap_aes_dma_init()
250 dma_release_channel(dd->dma_lch_in); in omap_aes_dma_init()
255 static void omap_aes_dma_cleanup(struct omap_aes_dev *dd) in omap_aes_dma_cleanup() argument
257 if (dd->pio_only) in omap_aes_dma_cleanup()
260 dma_release_channel(dd->dma_lch_out); in omap_aes_dma_cleanup()
261 dma_release_channel(dd->dma_lch_in); in omap_aes_dma_cleanup()
264 static int omap_aes_crypt_dma(struct omap_aes_dev *dd, in omap_aes_crypt_dma() argument
273 if (dd->pio_only) { in omap_aes_crypt_dma()
274 dd->in_sg_offset = 0; in omap_aes_crypt_dma()
276 dd->out_sg_offset = 0; in omap_aes_crypt_dma()
280 omap_aes_write(dd, AES_REG_IRQ_ENABLE(dd), 0x2); in omap_aes_crypt_dma()
284 dma_sync_sg_for_device(dd->dev, dd->in_sg, in_sg_len, DMA_TO_DEVICE); in omap_aes_crypt_dma()
288 cfg.src_addr = dd->phys_base + AES_REG_DATA_N(dd, 0); in omap_aes_crypt_dma()
289 cfg.dst_addr = dd->phys_base + AES_REG_DATA_N(dd, 0); in omap_aes_crypt_dma()
296 ret = dmaengine_slave_config(dd->dma_lch_in, &cfg); in omap_aes_crypt_dma()
298 dev_err(dd->dev, "can't configure IN dmaengine slave: %d\n", in omap_aes_crypt_dma()
303 tx_in = dmaengine_prep_slave_sg(dd->dma_lch_in, in_sg, in_sg_len, in omap_aes_crypt_dma()
307 dev_err(dd->dev, "IN prep_slave_sg() failed\n"); in omap_aes_crypt_dma()
312 tx_in->callback_param = dd; in omap_aes_crypt_dma()
317 ret = dmaengine_slave_config(dd->dma_lch_out, &cfg); in omap_aes_crypt_dma()
319 dev_err(dd->dev, "can't configure OUT dmaengine slave: %d\n", in omap_aes_crypt_dma()
324 tx_out = dmaengine_prep_slave_sg(dd->dma_lch_out, out_sg, in omap_aes_crypt_dma()
329 dev_err(dd->dev, "OUT prep_slave_sg() failed\n"); in omap_aes_crypt_dma()
338 if (dd->flags & FLAGS_GCM) in omap_aes_crypt_dma()
342 cb_desc->callback_param = dd; in omap_aes_crypt_dma()
349 dma_async_issue_pending(dd->dma_lch_in); in omap_aes_crypt_dma()
351 dma_async_issue_pending(dd->dma_lch_out); in omap_aes_crypt_dma()
354 dd->pdata->trigger(dd, dd->total); in omap_aes_crypt_dma()
359 int omap_aes_crypt_dma_start(struct omap_aes_dev *dd) in omap_aes_crypt_dma_start() argument
363 pr_debug("total: %zu\n", dd->total); in omap_aes_crypt_dma_start()
365 if (!dd->pio_only) { in omap_aes_crypt_dma_start()
366 err = dma_map_sg(dd->dev, dd->in_sg, dd->in_sg_len, in omap_aes_crypt_dma_start()
369 dev_err(dd->dev, "dma_map_sg() error\n"); in omap_aes_crypt_dma_start()
373 if (dd->out_sg_len) { in omap_aes_crypt_dma_start()
374 err = dma_map_sg(dd->dev, dd->out_sg, dd->out_sg_len, in omap_aes_crypt_dma_start()
377 dev_err(dd->dev, "dma_map_sg() error\n"); in omap_aes_crypt_dma_start()
383 err = omap_aes_crypt_dma(dd, dd->in_sg, dd->out_sg, dd->in_sg_len, in omap_aes_crypt_dma_start()
384 dd->out_sg_len); in omap_aes_crypt_dma_start()
385 if (err && !dd->pio_only) { in omap_aes_crypt_dma_start()
386 dma_unmap_sg(dd->dev, dd->in_sg, dd->in_sg_len, DMA_TO_DEVICE); in omap_aes_crypt_dma_start()
387 if (dd->out_sg_len) in omap_aes_crypt_dma_start()
388 dma_unmap_sg(dd->dev, dd->out_sg, dd->out_sg_len, in omap_aes_crypt_dma_start()
395 static void omap_aes_finish_req(struct omap_aes_dev *dd, int err) in omap_aes_finish_req() argument
397 struct skcipher_request *req = dd->req; in omap_aes_finish_req()
401 crypto_finalize_skcipher_request(dd->engine, req, err); in omap_aes_finish_req()
403 pm_runtime_mark_last_busy(dd->dev); in omap_aes_finish_req()
404 pm_runtime_put_autosuspend(dd->dev); in omap_aes_finish_req()
407 int omap_aes_crypt_dma_stop(struct omap_aes_dev *dd) in omap_aes_crypt_dma_stop() argument
409 pr_debug("total: %zu\n", dd->total); in omap_aes_crypt_dma_stop()
411 omap_aes_dma_stop(dd); in omap_aes_crypt_dma_stop()
417 static int omap_aes_handle_queue(struct omap_aes_dev *dd, in omap_aes_handle_queue() argument
421 return crypto_transfer_skcipher_request_to_engine(dd->engine, req); in omap_aes_handle_queue()
427 struct omap_aes_dev *dd) in omap_aes_prepare_req() argument
436 dd->req = req; in omap_aes_prepare_req()
437 dd->total = req->cryptlen; in omap_aes_prepare_req()
438 dd->total_save = req->cryptlen; in omap_aes_prepare_req()
439 dd->in_sg = req->src; in omap_aes_prepare_req()
440 dd->out_sg = req->dst; in omap_aes_prepare_req()
441 dd->orig_out = req->dst; in omap_aes_prepare_req()
447 ret = omap_crypto_align_sg(&dd->in_sg, dd->total, AES_BLOCK_SIZE, in omap_aes_prepare_req()
448 dd->in_sgl, flags, in omap_aes_prepare_req()
449 FLAGS_IN_DATA_ST_SHIFT, &dd->flags); in omap_aes_prepare_req()
453 ret = omap_crypto_align_sg(&dd->out_sg, dd->total, AES_BLOCK_SIZE, in omap_aes_prepare_req()
454 &dd->out_sgl, 0, in omap_aes_prepare_req()
455 FLAGS_OUT_DATA_ST_SHIFT, &dd->flags); in omap_aes_prepare_req()
459 dd->in_sg_len = sg_nents_for_len(dd->in_sg, dd->total); in omap_aes_prepare_req()
460 if (dd->in_sg_len < 0) in omap_aes_prepare_req()
461 return dd->in_sg_len; in omap_aes_prepare_req()
463 dd->out_sg_len = sg_nents_for_len(dd->out_sg, dd->total); in omap_aes_prepare_req()
464 if (dd->out_sg_len < 0) in omap_aes_prepare_req()
465 return dd->out_sg_len; in omap_aes_prepare_req()
468 dd->flags = (dd->flags & ~FLAGS_MODE_MASK) | rctx->mode; in omap_aes_prepare_req()
470 dd->ctx = ctx; in omap_aes_prepare_req()
471 rctx->dd = dd; in omap_aes_prepare_req()
473 return omap_aes_write_ctrl(dd); in omap_aes_prepare_req()
481 struct omap_aes_dev *dd = rctx->dd; in omap_aes_crypt_req() local
483 if (!dd) in omap_aes_crypt_req()
486 return omap_aes_prepare_req(req, dd) ?: in omap_aes_crypt_req()
487 omap_aes_crypt_dma_start(dd); in omap_aes_crypt_req()
490 static void omap_aes_copy_ivout(struct omap_aes_dev *dd, u8 *ivbuf) in omap_aes_copy_ivout() argument
495 ((u32 *)ivbuf)[i] = omap_aes_read(dd, AES_REG_IV(dd, i)); in omap_aes_copy_ivout()
500 struct omap_aes_dev *dd = (struct omap_aes_dev *)data; in omap_aes_done_task() local
504 if (!dd->pio_only) { in omap_aes_done_task()
505 dma_sync_sg_for_device(dd->dev, dd->out_sg, dd->out_sg_len, in omap_aes_done_task()
507 dma_unmap_sg(dd->dev, dd->in_sg, dd->in_sg_len, DMA_TO_DEVICE); in omap_aes_done_task()
508 dma_unmap_sg(dd->dev, dd->out_sg, dd->out_sg_len, in omap_aes_done_task()
510 omap_aes_crypt_dma_stop(dd); in omap_aes_done_task()
513 omap_crypto_cleanup(dd->in_sg, NULL, 0, dd->total_save, in omap_aes_done_task()
514 FLAGS_IN_DATA_ST_SHIFT, dd->flags); in omap_aes_done_task()
516 omap_crypto_cleanup(dd->out_sg, dd->orig_out, 0, dd->total_save, in omap_aes_done_task()
517 FLAGS_OUT_DATA_ST_SHIFT, dd->flags); in omap_aes_done_task()
520 if (dd->flags & (FLAGS_CBC | FLAGS_CTR)) in omap_aes_done_task()
521 omap_aes_copy_ivout(dd, dd->req->iv); in omap_aes_done_task()
523 omap_aes_finish_req(dd, 0); in omap_aes_done_task()
533 struct omap_aes_dev *dd; in omap_aes_crypt() local
558 dd = omap_aes_find_dev(rctx); in omap_aes_crypt()
559 if (!dd) in omap_aes_crypt()
564 return omap_aes_handle_queue(dd, req); in omap_aes_crypt()
863 struct omap_aes_dev *dd = dev_id; in omap_aes_irq() local
867 status = omap_aes_read(dd, AES_REG_IRQ_STATUS(dd)); in omap_aes_irq()
869 omap_aes_write(dd, AES_REG_IRQ_ENABLE(dd), 0x0); in omap_aes_irq()
871 BUG_ON(!dd->in_sg); in omap_aes_irq()
873 BUG_ON(dd->in_sg_offset > dd->in_sg->length); in omap_aes_irq()
875 src = sg_virt(dd->in_sg) + dd->in_sg_offset; in omap_aes_irq()
878 omap_aes_write(dd, AES_REG_DATA_N(dd, i), *src); in omap_aes_irq()
879 dd->in_sg_offset += 4; in omap_aes_irq()
880 if (dd->in_sg_offset == dd->in_sg->length) { in omap_aes_irq()
881 dd->in_sg = sg_next(dd->in_sg); in omap_aes_irq()
882 if (dd->in_sg) { in omap_aes_irq()
883 dd->in_sg_offset = 0; in omap_aes_irq()
884 src = sg_virt(dd->in_sg); in omap_aes_irq()
893 omap_aes_write(dd, AES_REG_IRQ_STATUS(dd), status); in omap_aes_irq()
896 omap_aes_write(dd, AES_REG_IRQ_ENABLE(dd), 0x4); in omap_aes_irq()
899 omap_aes_write(dd, AES_REG_IRQ_ENABLE(dd), 0x0); in omap_aes_irq()
901 BUG_ON(!dd->out_sg); in omap_aes_irq()
903 BUG_ON(dd->out_sg_offset > dd->out_sg->length); in omap_aes_irq()
905 dst = sg_virt(dd->out_sg) + dd->out_sg_offset; in omap_aes_irq()
908 *dst = omap_aes_read(dd, AES_REG_DATA_N(dd, i)); in omap_aes_irq()
909 dd->out_sg_offset += 4; in omap_aes_irq()
910 if (dd->out_sg_offset == dd->out_sg->length) { in omap_aes_irq()
911 dd->out_sg = sg_next(dd->out_sg); in omap_aes_irq()
912 if (dd->out_sg) { in omap_aes_irq()
913 dd->out_sg_offset = 0; in omap_aes_irq()
914 dst = sg_virt(dd->out_sg); in omap_aes_irq()
921 dd->total -= min_t(size_t, AES_BLOCK_SIZE, dd->total); in omap_aes_irq()
925 omap_aes_write(dd, AES_REG_IRQ_STATUS(dd), status); in omap_aes_irq()
927 if (!dd->total) in omap_aes_irq()
929 tasklet_schedule(&dd->done_task); in omap_aes_irq()
932 omap_aes_write(dd, AES_REG_IRQ_ENABLE(dd), 0x2); in omap_aes_irq()
955 static int omap_aes_get_res_of(struct omap_aes_dev *dd, in omap_aes_get_res_of() argument
961 dd->pdata = of_device_get_match_data(dev); in omap_aes_get_res_of()
962 if (!dd->pdata) { in omap_aes_get_res_of()
983 static int omap_aes_get_res_of(struct omap_aes_dev *dd, in omap_aes_get_res_of() argument
990 static int omap_aes_get_res_pdev(struct omap_aes_dev *dd, in omap_aes_get_res_pdev() argument
1007 dd->pdata = &omap_aes_pdata_omap2; in omap_aes_get_res_pdev()
1043 struct omap_aes_dev *dd = dev_get_drvdata(dev); in queue_len_show() local
1045 return sprintf(buf, "%d\n", dd->engine->queue.max_qlen); in queue_len_show()
1052 struct omap_aes_dev *dd; in queue_len_store() local
1070 list_for_each_entry(dd, &dev_list, list) { in queue_len_store()
1071 spin_lock_irqsave(&dd->lock, flags); in queue_len_store()
1072 dd->engine->queue.max_qlen = value; in queue_len_store()
1073 dd->aead_queue.base.max_qlen = value; in queue_len_store()
1074 spin_unlock_irqrestore(&dd->lock, flags); in queue_len_store()
1097 struct omap_aes_dev *dd; in omap_aes_probe() local
1104 dd = devm_kzalloc(dev, sizeof(struct omap_aes_dev), GFP_KERNEL); in omap_aes_probe()
1105 if (dd == NULL) { in omap_aes_probe()
1109 dd->dev = dev; in omap_aes_probe()
1110 platform_set_drvdata(pdev, dd); in omap_aes_probe()
1112 aead_init_queue(&dd->aead_queue, OMAP_AES_QUEUE_LENGTH); in omap_aes_probe()
1114 err = (dev->of_node) ? omap_aes_get_res_of(dd, dev, &res) : in omap_aes_probe()
1115 omap_aes_get_res_pdev(dd, pdev, &res); in omap_aes_probe()
1119 dd->io_base = devm_ioremap_resource(dev, &res); in omap_aes_probe()
1120 if (IS_ERR(dd->io_base)) { in omap_aes_probe()
1121 err = PTR_ERR(dd->io_base); in omap_aes_probe()
1124 dd->phys_base = res.start; in omap_aes_probe()
1137 omap_aes_dma_stop(dd); in omap_aes_probe()
1139 reg = omap_aes_read(dd, AES_REG_REV(dd)); in omap_aes_probe()
1144 (reg & dd->pdata->major_mask) >> dd->pdata->major_shift, in omap_aes_probe()
1145 (reg & dd->pdata->minor_mask) >> dd->pdata->minor_shift); in omap_aes_probe()
1147 tasklet_init(&dd->done_task, omap_aes_done_task, (unsigned long)dd); in omap_aes_probe()
1149 err = omap_aes_dma_init(dd); in omap_aes_probe()
1152 } else if (err && AES_REG_IRQ_STATUS(dd) && AES_REG_IRQ_ENABLE(dd)) { in omap_aes_probe()
1153 dd->pio_only = 1; in omap_aes_probe()
1162 dev_name(dev), dd); in omap_aes_probe()
1169 spin_lock_init(&dd->lock); in omap_aes_probe()
1171 INIT_LIST_HEAD(&dd->list); in omap_aes_probe()
1173 list_add_tail(&dd->list, &dev_list); in omap_aes_probe()
1177 dd->engine = crypto_engine_alloc_init(dev, 1); in omap_aes_probe()
1178 if (!dd->engine) { in omap_aes_probe()
1183 err = crypto_engine_start(dd->engine); in omap_aes_probe()
1187 for (i = 0; i < dd->pdata->algs_info_size; i++) { in omap_aes_probe()
1188 if (!dd->pdata->algs_info[i].registered) { in omap_aes_probe()
1189 for (j = 0; j < dd->pdata->algs_info[i].size; j++) { in omap_aes_probe()
1190 algp = &dd->pdata->algs_info[i].algs_list[j]; in omap_aes_probe()
1198 dd->pdata->algs_info[i].registered++; in omap_aes_probe()
1203 if (dd->pdata->aead_algs_info && in omap_aes_probe()
1204 !dd->pdata->aead_algs_info->registered) { in omap_aes_probe()
1205 for (i = 0; i < dd->pdata->aead_algs_info->size; i++) { in omap_aes_probe()
1206 aalg = &dd->pdata->aead_algs_info->algs_list[i]; in omap_aes_probe()
1214 dd->pdata->aead_algs_info->registered++; in omap_aes_probe()
1226 for (i = dd->pdata->aead_algs_info->registered - 1; i >= 0; i--) { in omap_aes_probe()
1227 aalg = &dd->pdata->aead_algs_info->algs_list[i]; in omap_aes_probe()
1231 for (i = dd->pdata->algs_info_size - 1; i >= 0; i--) in omap_aes_probe()
1232 for (j = dd->pdata->algs_info[i].registered - 1; j >= 0; j--) in omap_aes_probe()
1234 &dd->pdata->algs_info[i].algs_list[j]); in omap_aes_probe()
1237 if (dd->engine) in omap_aes_probe()
1238 crypto_engine_exit(dd->engine); in omap_aes_probe()
1240 omap_aes_dma_cleanup(dd); in omap_aes_probe()
1242 tasklet_kill(&dd->done_task); in omap_aes_probe()
1246 dd = NULL; in omap_aes_probe()
1254 struct omap_aes_dev *dd = platform_get_drvdata(pdev); in omap_aes_remove() local
1259 list_del(&dd->list); in omap_aes_remove()
1262 for (i = dd->pdata->algs_info_size - 1; i >= 0; i--) in omap_aes_remove()
1263 for (j = dd->pdata->algs_info[i].registered - 1; j >= 0; j--) { in omap_aes_remove()
1265 &dd->pdata->algs_info[i].algs_list[j]); in omap_aes_remove()
1266 dd->pdata->algs_info[i].registered--; in omap_aes_remove()
1269 for (i = dd->pdata->aead_algs_info->registered - 1; i >= 0; i--) { in omap_aes_remove()
1270 aalg = &dd->pdata->aead_algs_info->algs_list[i]; in omap_aes_remove()
1272 dd->pdata->aead_algs_info->registered--; in omap_aes_remove()
1275 crypto_engine_exit(dd->engine); in omap_aes_remove()
1277 tasklet_kill(&dd->done_task); in omap_aes_remove()
1278 omap_aes_dma_cleanup(dd); in omap_aes_remove()
1279 pm_runtime_disable(dd->dev); in omap_aes_remove()
1281 sysfs_remove_group(&dd->dev->kobj, &omap_aes_attr_group); in omap_aes_remove()