Lines Matching defs:hdev

85 	struct img_hash_dev	*hdev;
112 struct img_hash_dev *hdev;
149 static inline u32 img_hash_read(struct img_hash_dev *hdev, u32 offset)
151 return readl_relaxed(hdev->io_base + offset);
154 static inline void img_hash_write(struct img_hash_dev *hdev,
157 writel_relaxed(value, hdev->io_base + offset);
160 static inline __be32 img_hash_read_result_queue(struct img_hash_dev *hdev)
162 return cpu_to_be32(img_hash_read(hdev, CR_RESULT_QUEUE));
165 static void img_hash_start(struct img_hash_dev *hdev, bool dma)
167 struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req);
178 dev_dbg(hdev->dev, "Starting hash process\n");
179 img_hash_write(hdev, CR_CONTROL, cr);
188 img_hash_read(hdev, CR_CONTROL);
191 static int img_hash_xmit_cpu(struct img_hash_dev *hdev, const u8 *buf,
197 dev_dbg(hdev->dev, "xmit_cpu: length: %zu bytes\n", length);
200 hdev->flags |= DRIVER_FLAGS_FINAL;
205 writel_relaxed(buffer[count], hdev->cpu_addr);
212 struct img_hash_dev *hdev = data;
213 struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req);
216 img_hash_xmit_cpu(hdev, ctx->buffer, ctx->bufcnt, 0);
220 tasklet_schedule(&hdev->dma_task);
223 static int img_hash_xmit_dma(struct img_hash_dev *hdev, struct scatterlist *sg)
226 struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req);
228 ctx->dma_ct = dma_map_sg(hdev->dev, sg, 1, DMA_TO_DEVICE);
230 dev_err(hdev->dev, "Invalid DMA sg\n");
231 hdev->err = -EINVAL;
235 desc = dmaengine_prep_slave_sg(hdev->dma_lch,
241 dev_err(hdev->dev, "Null DMA descriptor\n");
242 hdev->err = -EINVAL;
243 dma_unmap_sg(hdev->dev, sg, 1, DMA_TO_DEVICE);
247 desc->callback_param = hdev;
249 dma_async_issue_pending(hdev->dma_lch);
254 static int img_hash_write_via_cpu(struct img_hash_dev *hdev)
256 struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req);
258 ctx->bufcnt = sg_copy_to_buffer(hdev->req->src, sg_nents(ctx->sg),
259 ctx->buffer, hdev->req->nbytes);
261 ctx->total = hdev->req->nbytes;
264 hdev->flags |= (DRIVER_FLAGS_CPU | DRIVER_FLAGS_FINAL);
266 img_hash_start(hdev, false);
268 return img_hash_xmit_cpu(hdev, ctx->buffer, ctx->total, 1);
290 hash[i] = img_hash_read_result_queue(ctx->hdev);
296 struct img_hash_dev *hdev = ctx->hdev;
300 if (DRIVER_FLAGS_FINAL & hdev->flags)
303 dev_warn(hdev->dev, "Hash failed with error %d\n", err);
307 hdev->flags &= ~(DRIVER_FLAGS_DMA_READY | DRIVER_FLAGS_OUTPUT_READY |
314 static int img_hash_write_via_dma(struct img_hash_dev *hdev)
316 struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req);
318 img_hash_start(hdev, true);
320 dev_dbg(hdev->dev, "xmit dma size: %d\n", ctx->total);
323 hdev->flags |= DRIVER_FLAGS_FINAL;
325 hdev->flags |= DRIVER_FLAGS_DMA_ACTIVE | DRIVER_FLAGS_FINAL;
327 tasklet_schedule(&hdev->dma_task);
332 static int img_hash_dma_init(struct img_hash_dev *hdev)
337 hdev->dma_lch = dma_request_chan(hdev->dev, "tx");
338 if (IS_ERR(hdev->dma_lch)) {
339 dev_err(hdev->dev, "Couldn't acquire a slave DMA channel.\n");
340 return PTR_ERR(hdev->dma_lch);
343 dma_conf.dst_addr = hdev->bus_addr;
348 err = dmaengine_slave_config(hdev->dma_lch, &dma_conf);
350 dev_err(hdev->dev, "Couldn't configure DMA slave.\n");
351 dma_release_channel(hdev->dma_lch);
360 struct img_hash_dev *hdev = (struct img_hash_dev *)d;
366 if (!hdev->req)
369 ctx = ahash_request_ctx(hdev->req);
391 if (img_hash_xmit_dma(hdev, &tsg)) {
392 dev_err(hdev->dev, "DMA failed, falling back to CPU");
394 hdev->err = 0;
395 img_hash_xmit_cpu(hdev, addr + ctx->offset,
427 img_hash_dma_callback(hdev);
434 static int img_hash_write_via_dma_stop(struct img_hash_dev *hdev)
436 struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req);
439 dma_unmap_sg(hdev->dev, ctx->sg, ctx->dma_ct, DMA_TO_DEVICE);
444 static int img_hash_process_data(struct img_hash_dev *hdev)
446 struct ahash_request *req = hdev->req;
453 dev_dbg(hdev->dev, "process data request(%d bytes) using DMA\n",
455 err = img_hash_write_via_dma(hdev);
457 dev_dbg(hdev->dev, "process data request(%d bytes) using CPU\n",
459 err = img_hash_write_via_cpu(hdev);
464 static int img_hash_hw_init(struct img_hash_dev *hdev)
469 img_hash_write(hdev, CR_RESET, CR_RESET_SET);
470 img_hash_write(hdev, CR_RESET, CR_RESET_UNSET);
471 img_hash_write(hdev, CR_INTENAB, CR_INT_NEW_RESULTS_SET);
473 nbits = (u64)hdev->req->nbytes << 3;
476 img_hash_write(hdev, CR_MESSAGE_LENGTH_H, u);
477 img_hash_write(hdev, CR_MESSAGE_LENGTH_L, l);
479 if (!(DRIVER_FLAGS_INIT & hdev->flags)) {
480 hdev->flags |= DRIVER_FLAGS_INIT;
481 hdev->err = 0;
483 dev_dbg(hdev->dev, "hw initialized, nbits: %llx\n", nbits);
500 static int img_hash_handle_queue(struct img_hash_dev *hdev,
508 spin_lock_irqsave(&hdev->lock, flags);
511 res = ahash_enqueue_request(&hdev->queue, req);
513 if (DRIVER_FLAGS_BUSY & hdev->flags) {
514 spin_unlock_irqrestore(&hdev->lock, flags);
518 backlog = crypto_get_backlog(&hdev->queue);
519 async_req = crypto_dequeue_request(&hdev->queue);
521 hdev->flags |= DRIVER_FLAGS_BUSY;
523 spin_unlock_irqrestore(&hdev->lock, flags);
532 hdev->req = req;
536 dev_info(hdev->dev, "processing req, op: %lu, bytes: %d\n",
539 err = img_hash_hw_init(hdev);
542 err = img_hash_process_data(hdev);
627 struct img_hash_dev *hdev = NULL;
632 if (!tctx->hdev) {
634 hdev = tmp;
637 tctx->hdev = hdev;
640 hdev = tctx->hdev;
644 ctx->hdev = hdev;
673 err = img_hash_handle_queue(tctx->hdev, req);
725 struct img_hash_dev *hdev = dev_id;
728 reg = img_hash_read(hdev, CR_INTSTAT);
729 img_hash_write(hdev, CR_INTCLEAR, reg);
732 dev_dbg(hdev->dev, "IRQ CR_INT_NEW_RESULTS_SET\n");
733 if (DRIVER_FLAGS_BUSY & hdev->flags) {
734 hdev->flags |= DRIVER_FLAGS_OUTPUT_READY;
735 if (!(DRIVER_FLAGS_CPU & hdev->flags))
736 hdev->flags |= DRIVER_FLAGS_DMA_READY;
737 tasklet_schedule(&hdev->done_task);
739 dev_warn(hdev->dev,
743 dev_warn(hdev->dev,
746 dev_warn(hdev->dev,
749 dev_warn(hdev->dev,
862 static int img_register_algs(struct img_hash_dev *hdev)
880 static int img_unregister_algs(struct img_hash_dev *hdev)
891 struct img_hash_dev *hdev = (struct img_hash_dev *)data;
894 if (hdev->err == -EINVAL) {
895 err = hdev->err;
899 if (!(DRIVER_FLAGS_BUSY & hdev->flags)) {
900 img_hash_handle_queue(hdev, NULL);
904 if (DRIVER_FLAGS_CPU & hdev->flags) {
905 if (DRIVER_FLAGS_OUTPUT_READY & hdev->flags) {
906 hdev->flags &= ~DRIVER_FLAGS_OUTPUT_READY;
909 } else if (DRIVER_FLAGS_DMA_READY & hdev->flags) {
910 if (DRIVER_FLAGS_DMA_ACTIVE & hdev->flags) {
911 hdev->flags &= ~DRIVER_FLAGS_DMA_ACTIVE;
912 img_hash_write_via_dma_stop(hdev);
913 if (hdev->err) {
914 err = hdev->err;
918 if (DRIVER_FLAGS_OUTPUT_READY & hdev->flags) {
919 hdev->flags &= ~(DRIVER_FLAGS_DMA_READY |
927 img_hash_finish_req(hdev->req, err);
938 struct img_hash_dev *hdev;
944 hdev = devm_kzalloc(dev, sizeof(*hdev), GFP_KERNEL);
945 if (hdev == NULL)
948 spin_lock_init(&hdev->lock);
950 hdev->dev = dev;
952 platform_set_drvdata(pdev, hdev);
954 INIT_LIST_HEAD(&hdev->list);
956 tasklet_init(&hdev->done_task, img_hash_done_task, (unsigned long)hdev);
957 tasklet_init(&hdev->dma_task, img_hash_dma_task, (unsigned long)hdev);
959 crypto_init_queue(&hdev->queue, IMG_HASH_QUEUE_LENGTH);
962 hdev->io_base = devm_platform_ioremap_resource(pdev, 0);
963 if (IS_ERR(hdev->io_base)) {
964 err = PTR_ERR(hdev->io_base);
969 hdev->cpu_addr = devm_platform_get_and_ioremap_resource(pdev, 1, &hash_res);
970 if (IS_ERR(hdev->cpu_addr)) {
971 err = PTR_ERR(hdev->cpu_addr);
974 hdev->bus_addr = hash_res->start;
983 dev_name(dev), hdev);
990 hdev->hash_clk = devm_clk_get(&pdev->dev, "hash");
991 if (IS_ERR(hdev->hash_clk)) {
993 err = PTR_ERR(hdev->hash_clk);
997 hdev->sys_clk = devm_clk_get(&pdev->dev, "sys");
998 if (IS_ERR(hdev->sys_clk)) {
1000 err = PTR_ERR(hdev->sys_clk);
1004 err = clk_prepare_enable(hdev->hash_clk);
1008 err = clk_prepare_enable(hdev->sys_clk);
1012 err = img_hash_dma_init(hdev);
1017 dma_chan_name(hdev->dma_lch));
1020 list_add_tail(&hdev->list, &img_hash.dev_list);
1023 err = img_register_algs(hdev);
1032 list_del(&hdev->list);
1034 dma_release_channel(hdev->dma_lch);
1036 clk_disable_unprepare(hdev->sys_clk);
1038 clk_disable_unprepare(hdev->hash_clk);
1040 tasklet_kill(&hdev->done_task);
1041 tasklet_kill(&hdev->dma_task);
1048 struct img_hash_dev *hdev;
1050 hdev = platform_get_drvdata(pdev);
1052 list_del(&hdev->list);
1055 img_unregister_algs(hdev);
1057 tasklet_kill(&hdev->done_task);
1058 tasklet_kill(&hdev->dma_task);
1060 dma_release_channel(hdev->dma_lch);
1062 clk_disable_unprepare(hdev->hash_clk);
1063 clk_disable_unprepare(hdev->sys_clk);
1069 struct img_hash_dev *hdev = dev_get_drvdata(dev);
1071 clk_disable_unprepare(hdev->hash_clk);
1072 clk_disable_unprepare(hdev->sys_clk);
1079 struct img_hash_dev *hdev = dev_get_drvdata(dev);
1082 ret = clk_prepare_enable(hdev->hash_clk);
1086 ret = clk_prepare_enable(hdev->sys_clk);
1088 clk_disable_unprepare(hdev->hash_clk);