Lines Matching defs:rxd

1010 static void sa_sync_from_device(struct sa_rx_data *rxd)
1014 if (rxd->mapped_sg[0].dir == DMA_BIDIRECTIONAL)
1015 sgt = &rxd->mapped_sg[0].sgt;
1017 sgt = &rxd->mapped_sg[1].sgt;
1019 dma_sync_sgtable_for_cpu(rxd->ddev, sgt, DMA_FROM_DEVICE);
1022 static void sa_free_sa_rx_data(struct sa_rx_data *rxd)
1026 for (i = 0; i < ARRAY_SIZE(rxd->mapped_sg); i++) {
1027 struct sa_mapped_sg *mapped_sg = &rxd->mapped_sg[i];
1030 dma_unmap_sgtable(rxd->ddev, &mapped_sg->sgt,
1036 kfree(rxd);
1041 struct sa_rx_data *rxd = data;
1048 sa_sync_from_device(rxd);
1049 req = container_of(rxd->req, struct skcipher_request, base);
1052 mdptr = (__be32 *)dmaengine_desc_get_metadata_ptr(rxd->tx_in, &pl,
1056 for (i = 0; i < (rxd->enc_iv_size / 4); i++)
1057 result[i] = be32_to_cpu(mdptr[i + rxd->iv_idx]);
1060 sa_free_sa_rx_data(rxd);
1082 struct sa_rx_data *rxd;
1102 rxd = kzalloc(sizeof(*rxd), gfp_flags);
1103 if (!rxd)
1128 rxd->ddev = ddev;
1156 mapped_sg = &rxd->mapped_sg[0];
1168 kfree(rxd);
1179 kfree(rxd);
1203 mapped_sg = &rxd->mapped_sg[1];
1244 rxd->tx_in = dmaengine_prep_slave_sg(dma_rx, dst, dst_nents,
1247 if (!rxd->tx_in) {
1253 rxd->req = (void *)req->base;
1254 rxd->enc = req->enc;
1255 rxd->iv_idx = req->ctx->iv_idx;
1256 rxd->enc_iv_size = sa_ctx->cmdl_upd_info.enc_iv.size;
1257 rxd->tx_in->callback = req->callback;
1258 rxd->tx_in->callback_param = rxd;
1284 dmaengine_submit(rxd->tx_in);
1292 sa_free_sa_rx_data(rxd);
1355 struct sa_rx_data *rxd = data;
1364 sa_sync_from_device(rxd);
1365 req = container_of(rxd->req, struct ahash_request, base);
1369 mdptr = (__be32 *)dmaengine_desc_get_metadata_ptr(rxd->tx_in, &pl, &ml);
1375 sa_free_sa_rx_data(rxd);
1693 struct sa_rx_data *rxd = data;
1704 sa_sync_from_device(rxd);
1705 req = container_of(rxd->req, struct aead_request, base);
1710 mdptr = (u32 *)dmaengine_desc_get_metadata_ptr(rxd->tx_in, &pl, &ml);
1714 if (rxd->enc) {
1725 sa_free_sa_rx_data(rxd);