Searched refs:mq_hctx (Results 1 - 14 of 14) sorted by relevance

/linux-master/block/
H A Dblk-mq-sched.c53 return rqa->mq_hctx > rqb->mq_hctx;
59 list_first_entry(rq_list, struct request, queuelist)->mq_hctx;
65 if (rq->mq_hctx != hctx) {
141 if (rq->mq_hctx != hctx)
264 } while (blk_mq_dispatch_rq_list(rq->mq_hctx, &rq_list, 1));
H A Dblk-flush.c324 flush_rq->mq_hctx = first_rq->mq_hctx;
355 struct blk_mq_hw_ctx *hctx = rq->mq_hctx;
H A Dblk-mq.c356 rq->mq_hctx = hctx;
566 if (blk_mq_get_hctx_type(opf) != rq->mq_hctx->type)
708 struct blk_mq_hw_ctx *hctx = rq->mq_hctx;
713 rq->mq_hctx = NULL;
1109 if (nr_tags == TAG_COMP_BATCH || cur_hctx != rq->mq_hctx) {
1113 cur_hctx = rq->mq_hctx;
1204 if ((rq->mq_hctx->nr_ctx == 1 &&
1262 rq->mq_hctx->tags->rqs[rq->tag] = rq;
1269 WRITE_ONCE(rq->bio->bi_cookie, rq->mq_hctx->queue_num);
1326 struct blk_mq_hw_ctx *hctx = rq->mq_hctx;
[all...]
H A Dblk-mq-tag.c291 if (rq->q == q && (!hctx || rq->mq_hctx == hctx))
680 return (rq->mq_hctx->queue_num << BLK_MQ_UNIQUE_TAG_BITS) |
H A Dblk-mq.h347 __blk_mq_put_driver_tag(rq->mq_hctx, rq);
H A Dmq-deadline.c943 if (dd_has_write_work(rq->mq_hctx))
944 blk_mq_sched_mark_restart_hctx(rq->mq_hctx);
H A Dblk-mq-debugfs.c357 if (rq->mq_hctx == params->hctx)
/linux-master/drivers/block/
H A Dvirtio_blk.c339 struct virtio_blk *vblk = req->mq_hctx->queue->queuedata;
468 struct virtio_blk *vblk = req->mq_hctx->queue->queuedata;
471 return virtblk_prep_rq(req->mq_hctx, vblk, req, vbr) == BLK_STS_OK;
507 struct virtio_blk_vq *vq = get_virtio_blk_vq(req->mq_hctx);
517 if (!next || req->mq_hctx != next->mq_hctx) {
H A Dublk_drv.c971 struct ublk_queue *ubq = req->mq_hctx->driver_data;
1081 struct ublk_queue *ubq = req->mq_hctx->driver_data;
1189 struct ublk_queue *ubq = rq->mq_hctx->driver_data;
1911 if (!req->mq_hctx || !req->mq_hctx->driver_data)
1936 ubq = req->mq_hctx->driver_data;
1954 ubq = req->mq_hctx->driver_data;
/linux-master/drivers/nvme/host/
H A Dpci.c510 struct nvme_queue *nvmeq = req->mq_hctx->driver_data;
777 struct nvme_queue *nvmeq = req->mq_hctx->driver_data;
936 struct nvme_queue *nvmeq = req->mq_hctx->driver_data;
947 if (!next || req->mq_hctx != next->mq_hctx) {
962 struct nvme_queue *nvmeq = req->mq_hctx->driver_data;
1219 struct nvme_queue *nvmeq = req->mq_hctx->driver_data;
1282 struct nvme_queue *nvmeq = req->mq_hctx->driver_data;
1311 nvme_poll(req->mq_hctx, NULL);
H A Dnvme.h205 return req->mq_hctx->queue_num + 1;
H A Dcore.c718 if (req->mq_hctx->type == HCTX_TYPE_POLL)
/linux-master/include/linux/
H A Dblk-mq.h82 struct blk_mq_hw_ctx *mq_hctx; member in struct:request
/linux-master/drivers/block/null_blk/
H A Dmain.c1518 struct blk_mq_hw_ctx *hctx = rq->mq_hctx;
1615 ret = null_queue_rq(rq->mq_hctx, &bd);

Completed in 244 milliseconds