Lines Matching defs:qr

776 		struct qat_ring *qr = &qb->qb_et_rings[i];
783 qr->qr_inflight = qat_alloc_mem(sizeof(uint32_t));
787 qr->qr_inflight =
811 struct qat_ring *qr;
817 qr = &qb->qb_et_rings[i];
818 qat_free_mem(qr->qr_inflight);
881 qat_etr_ap_bank_setup_ring(struct qat_softc *sc, struct qat_ring *qr)
889 ap_bank = ETR_RING_AP_BANK_NUMBER(qr->qr_ring);
893 if (qr->qr_cb == NULL) {
894 qat_etr_ap_bank_set_ring_mask(&qab->qab_ne_mask, qr->qr_ring, 1);
897 qr->qr_ring, 1);
902 qat_etr_ap_bank_set_ring_mask(&qab->qab_nf_mask, qr->qr_ring, 1);
905 qr->qr_ring, 1);
930 struct qat_ring *qr = NULL;
946 qr = &qb->qb_et_rings[ring];
950 qr->qr_ring = ring;
951 qr->qr_bank = bank;
952 qr->qr_name = name;
953 qr->qr_ring_id = qr->qr_bank * sc->sc_hw.qhw_num_rings_per_bank + ring;
954 qr->qr_ring_mask = (1 << ring);
955 qr->qr_cb = cb;
956 qr->qr_cb_arg = cb_arg;
959 qr->qr_head = 0;
960 qr->qr_tail = 0;
961 qr->qr_msg_size = QAT_BYTES_TO_MSG_SIZE(msg_size);
962 qr->qr_ring_size = qat_etr_verify_ring_size(msg_size, num_msgs);
968 ring_size_bytes = QAT_SIZE_TO_RING_SIZE_IN_BYTES(qr->qr_ring_size);
970 error = qat_alloc_dmamem(sc, &qr->qr_dma, 1, ring_size_bytes,
975 qr->qr_ring_vaddr = qr->qr_dma.qdm_dma_vaddr;
976 qr->qr_ring_paddr = qr->qr_dma.qdm_dma_seg.ds_addr;
978 memset(qr->qr_ring_vaddr, QAT_RING_PATTERN,
979 qr->qr_dma.qdm_dma_seg.ds_len);
981 bus_dmamap_sync(qr->qr_dma.qdm_dma_tag, qr->qr_dma.qdm_dma_map,
985 ring_config = ETR_RING_CONFIG_BUILD(qr->qr_ring_size);
988 ETR_RING_CONFIG_BUILD_RESP(qr->qr_ring_size, wm_nf, wm_ne);
992 ring_base = ETR_RING_BASE_BUILD(qr->qr_ring_paddr, qr->qr_ring_size);
998 mtx_init(&qr->qr_ring_mtx, "qr ring", NULL, MTX_DEF);
1000 qat_etr_ap_bank_setup_ring(sc, qr);
1005 qb->qb_intr_mask |= qr->qr_ring_mask;
1013 *rqr = qr;
1027 qat_etr_put_msg(struct qat_softc *sc, struct qat_ring *qr, uint32_t *msg)
1032 mtx_lock(&qr->qr_ring_mtx);
1034 inflight = atomic_fetchadd_32(qr->qr_inflight, 1) + 1;
1035 if (inflight > QAT_MAX_INFLIGHTS(qr->qr_ring_size, qr->qr_msg_size)) {
1036 atomic_subtract_32(qr->qr_inflight, 1);
1037 qr->qr_need_wakeup = true;
1038 mtx_unlock(&qr->qr_ring_mtx);
1043 addr = (uint32_t *)((uintptr_t)qr->qr_ring_vaddr + qr->qr_tail);
1045 memcpy(addr, msg, QAT_MSG_SIZE_TO_BYTES(qr->qr_msg_size));
1047 bus_dmamap_sync(qr->qr_dma.qdm_dma_tag, qr->qr_dma.qdm_dma_map,
1050 qr->qr_tail = qat_modulo(qr->qr_tail +
1051 QAT_MSG_SIZE_TO_BYTES(qr->qr_msg_size),
1052 QAT_RING_SIZE_MODULO(qr->qr_ring_size));
1054 qat_etr_bank_ring_write_4(sc, qr->qr_bank, qr->qr_ring,
1055 ETR_RING_TAIL_OFFSET, qr->qr_tail);
1057 mtx_unlock(&qr->qr_ring_mtx);
1064 struct qat_ring *qr)
1070 mtx_lock(&qr->qr_ring_mtx);
1072 msg = (uint32_t *)((uintptr_t)qr->qr_ring_vaddr + qr->qr_head);
1074 bus_dmamap_sync(qr->qr_dma.qdm_dma_tag, qr->qr_dma.qdm_dma_map,
1078 atomic_subtract_32(qr->qr_inflight, 1);
1080 if (qr->qr_cb != NULL) {
1081 mtx_unlock(&qr->qr_ring_mtx);
1082 handled |= qr->qr_cb(sc, qr->qr_cb_arg, msg);
1083 mtx_lock(&qr->qr_ring_mtx);
1088 qr->qr_head = qat_modulo(qr->qr_head +
1089 QAT_MSG_SIZE_TO_BYTES(qr->qr_msg_size),
1090 QAT_RING_SIZE_MODULO(qr->qr_ring_size));
1093 msg = (uint32_t *)((uintptr_t)qr->qr_ring_vaddr + qr->qr_head);
1096 bus_dmamap_sync(qr->qr_dma.qdm_dma_tag, qr->qr_dma.qdm_dma_map,
1100 qat_etr_bank_ring_write_4(sc, qr->qr_bank, qr->qr_ring,
1101 ETR_RING_HEAD_OFFSET, qr->qr_head);
1102 if (qr->qr_need_wakeup) {
1104 qr->qr_need_wakeup = false;
1108 mtx_unlock(&qr->qr_ring_mtx);
1138 struct qat_ring *qr = &qb->qb_et_rings[--i];
1140 handled |= qat_etr_ring_intr(sc, qb, qr);