Searched refs:rq (Results 201 - 225 of 636) sorted by relevance

1234567891011>>

/linux-master/drivers/gpu/drm/i915/gt/
H A Dintel_engine_cs.c1296 struct i915_request rq; member in struct:measure_breadcrumb
1313 frame->rq.i915 = engine->i915;
1314 frame->rq.engine = engine;
1315 frame->rq.context = ce;
1316 rcu_assign_pointer(frame->rq.timeline, ce->timeline);
1317 frame->rq.hwsp_seqno = ce->timeline->hwsp_seqno;
1325 frame->rq.ring = &frame->ring;
1330 dw = engine->emit_fini_breadcrumb(&frame->rq, frame->cs) - frame->cs;
1986 static struct intel_timeline *get_timeline(struct i915_request *rq) argument
1999 tl = rcu_dereference(rq
2007 print_ring(char *buf, int sz, struct i915_request *rq) argument
2135 struct i915_request * const *port, *rq; local
2210 print_request_ring(struct drm_printer *m, struct i915_request *rq) argument
2279 engine_dump_request(struct i915_request *rq, struct drm_printer *m, const char *msg) argument
2314 struct i915_request *rq; local
2371 struct i915_request *rq; local
2508 intel_engine_get_hung_entity(struct intel_engine_cs *engine, struct intel_context **ce, struct i915_request **rq) argument
[all...]
H A Dintel_renderstate.h48 struct i915_request *rq);
H A Dselftest_engine_heartbeat.c207 struct i915_request *rq; local
240 rq = READ_ONCE(engine->heartbeat.systole);
241 if (rq)
242 rq = i915_request_get_rcu(rq);
244 } while (!rq);
247 while (rq == READ_ONCE(engine->heartbeat.systole))
251 i915_request_put(rq);
H A Dintel_engine_types.h526 int (*request_alloc)(struct i915_request *rq);
532 int (*emit_bb_start)(struct i915_request *rq,
537 int (*emit_init_breadcrumb)(struct i915_request *rq);
538 u32 *(*emit_fini_breadcrumb)(struct i915_request *rq,
548 void (*submit_request)(struct i915_request *rq);
555 void (*add_active_request)(struct i915_request *rq);
556 void (*remove_active_request)(struct i915_request *rq);
H A Dintel_gt.c546 struct i915_request *rq; local
561 rq = i915_request_create(ce);
562 if (IS_ERR(rq)) {
563 err = PTR_ERR(rq);
567 err = intel_engine_emit_ctx_wa(rq);
571 err = intel_renderstate_emit(&so, rq);
576 requests[id] = i915_request_get(rq);
577 i915_request_add(rq);
594 struct i915_request *rq; local
597 rq
630 struct i915_request *rq; local
[all...]
/linux-master/drivers/net/ethernet/intel/igb/
H A Digb_ptp.c490 struct ptp_clock_request *rq, int on)
503 switch (rq->type) {
506 if (rq->extts.flags & ~(PTP_ENABLE_FEATURE |
514 rq->extts.index);
518 if (rq->extts.index == 1) {
529 igb_pin_extts(igb, rq->extts.index, pin);
543 if (rq->perout.flags)
548 rq->perout.index);
552 ts.tv_sec = rq->perout.period.sec;
553 ts.tv_nsec = rq
489 igb_ptp_feature_enable_82580(struct ptp_clock_info *ptp, struct ptp_clock_request *rq, int on) argument
641 igb_ptp_feature_enable_i210(struct ptp_clock_info *ptp, struct ptp_clock_request *rq, int on) argument
787 igb_ptp_feature_enable(struct ptp_clock_info *ptp, struct ptp_clock_request *rq, int on) argument
[all...]
/linux-master/arch/alpha/include/asm/
H A Dagp_backend.h15 u32 rq : 8; member in struct:_alpha_agp_mode::__anon3
/linux-master/drivers/net/ethernet/cisco/enic/
H A Denic_clsf.h10 int enic_addfltr_5t(struct enic *enic, struct flow_keys *keys, u16 rq);
/linux-master/drivers/net/ethernet/mellanox/mlx5/core/fpga/
H A Dconn.h82 } rq; member in struct:mlx5_fpga_conn::__anon1228
H A Dconn.c110 if (unlikely(conn->qp.rq.pc - conn->qp.rq.cc >= conn->qp.rq.size)) {
115 ix = conn->qp.rq.pc & (conn->qp.rq.size - 1);
116 data = mlx5_wq_cyc_get_wqe(&conn->qp.wq.rq, ix);
121 conn->qp.rq.pc++;
122 conn->qp.rq.bufs[ix] = buf;
126 *conn->qp.wq.rq.db = cpu_to_be32(conn->qp.rq
[all...]
/linux-master/drivers/gpu/drm/i915/gem/
H A Di915_gem_wait.c95 struct i915_request *rq; local
101 rq = to_request(fence);
102 engine = rq->engine;
106 engine->sched_engine->schedule(rq, attr);
H A Di915_gem_execbuffer.c2043 struct i915_request *rq = eb->requests[j]; local
2045 if (!rq)
2048 rq->capture_list = eb->capture_lists[j];
2221 static int i915_reset_gen7_sol_offsets(struct i915_request *rq) argument
2226 if (GRAPHICS_VER(rq->i915) != 7 || rq->engine->id != RCS0) {
2227 drm_dbg(&rq->i915->drm, "sol reset is gen7/rcs only\n");
2231 cs = intel_ring_begin(rq, 4 * 2 + 2);
2241 intel_ring_advance(rq, cs);
2385 struct i915_request *rq,
2384 eb_request_submit(struct i915_execbuffer *eb, struct i915_request *rq, struct i915_vma *batch, u64 batch_len) argument
2484 struct i915_request *rq; local
2518 struct i915_request *rq = NULL; local
2998 await_fence_array(struct i915_execbuffer *eb, struct i915_request *rq) argument
3059 struct i915_request *rq, *rn; local
3066 eb_request_add(struct i915_execbuffer *eb, struct i915_request *rq, int err, bool last_parallel) argument
3121 struct i915_request *rq = eb->requests[i]; local
3229 eb_fences_add(struct i915_execbuffer *eb, struct i915_request *rq, struct dma_fence *in_fence, int out_fence_fd) argument
[all...]
/linux-master/drivers/infiniband/sw/rdmavt/
H A Dqp.h28 int rvt_alloc_rq(struct rvt_rq *rq, u32 size, int node,
/linux-master/drivers/net/ethernet/mellanox/mlx5/core/en/
H A Dxdp.h51 struct mlx5e_rq *rq; member in struct:mlx5e_xdp_buff
97 struct mlx5e_rq *rq; member in union:mlx5e_xdp_info::__anon1457
106 bool mlx5e_xdp_handle(struct mlx5e_rq *rq,
112 void mlx5e_xdp_rx_poll_complete(struct mlx5e_rq *rq);
/linux-master/drivers/mmc/core/
H A Dqueue.h25 static inline struct mmc_queue_req *req_to_mmc_queue_req(struct request *rq) argument
27 return blk_mq_rq_to_pdu(rq);
/linux-master/drivers/infiniband/hw/mlx5/
H A Dqp.h40 struct mlx5_core_qp *rq);
47 struct mlx5_core_qp *rq);
/linux-master/drivers/gpu/drm/i915/selftests/
H A Dintel_scheduler_helpers.c92 int intel_selftest_wait_for_rq(struct i915_request *rq) argument
96 ret = i915_request_wait(rq, 0, msecs_to_jiffies(WAIT_FOR_RESET_TIME_MS));
/linux-master/drivers/infiniband/hw/qedr/
H A Dqedr_roce_cm.c106 qp->rqe_wr_id[qp->rq.gsi_cons].rc = data->u.data_length_error ?
108 qp->rqe_wr_id[qp->rq.gsi_cons].vlan = data->vlan;
110 qp->rqe_wr_id[qp->rq.gsi_cons].sg_list[0].length =
112 *((u32 *)&qp->rqe_wr_id[qp->rq.gsi_cons].smac[0]) =
114 *((u16 *)&qp->rqe_wr_id[qp->rq.gsi_cons].smac[4]) =
117 qedr_inc_sw_gsi_cons(&qp->rq);
339 qp->rq.max_wr = attrs->cap.max_recv_wr;
342 qp->rqe_wr_id = kcalloc(qp->rq.max_wr, sizeof(*qp->rqe_wr_id),
651 memset(&qp->rqe_wr_id[qp->rq.prod], 0,
652 sizeof(qp->rqe_wr_id[qp->rq
[all...]
/linux-master/drivers/scsi/
H A Dsr.c301 struct request *rq = scsi_cmd_to_rq(SCpnt); local
302 struct scsi_cd *cd = scsi_cd(rq->q->disk);
324 if (rq->bio != NULL)
325 block_sectors = bio_sectors(rq->bio);
331 good_bytes = (error_sector - blk_rq_pos(rq)) << 9;
363 struct request *rq = scsi_cmd_to_rq(SCpnt); local
369 cd = scsi_cd(rq->q->disk);
376 "Finishing %u sectors\n", blk_rq_sectors(rq)));
396 switch (req_op(rq)) {
407 blk_dump_rq_flags(rq, "Unknow
932 struct request *rq; local
[all...]
H A Dsd_zbc.c299 struct request *rq = scsi_cmd_to_rq(cmd); local
300 struct scsi_disk *sdkp = scsi_disk(rq->q->disk);
301 sector_t sector = blk_rq_pos(rq);
330 struct request *rq = scsi_cmd_to_rq(cmd); local
331 sector_t sector = blk_rq_pos(rq);
332 struct scsi_disk *sdkp = scsi_disk(rq->q->disk);
349 rq->timeout = SD_TIMEOUT;
370 struct request *rq = scsi_cmd_to_rq(cmd); local
372 if (op_is_zone_mgmt(req_op(rq)) &&
381 rq
[all...]
/linux-master/drivers/infiniband/sw/rxe/
H A Drxe_qp.c176 spin_lock_init(&qp->rq.producer_lock);
177 spin_lock_init(&qp->rq.consumer_lock);
286 qp->rq.max_wr = init->cap.max_recv_wr;
287 qp->rq.max_sge = init->cap.max_recv_sge;
289 qp->rq.max_sge*sizeof(struct ib_sge);
291 qp->rq.queue = rxe_queue_init(rxe, &qp->rq.max_wr, wqe_size,
293 if (!qp->rq.queue) {
301 qp->rq.queue->buf, qp->rq
[all...]
/linux-master/drivers/infiniband/hw/cxgb4/
H A Drestrack.c70 if (rdma_nl_put_driver_u32(msg, "rqid", wq->rq.qid))
72 if (rdma_nl_put_driver_u32(msg, "memsize", wq->rq.memsize))
74 if (rdma_nl_put_driver_u32(msg, "cidx", wq->rq.cidx))
76 if (rdma_nl_put_driver_u32(msg, "pidx", wq->rq.pidx))
78 if (rdma_nl_put_driver_u32(msg, "wq_pidx", wq->rq.wq_pidx))
80 if (rdma_nl_put_driver_u32(msg, "msn", wq->rq.msn))
82 if (rdma_nl_put_driver_u32_hex(msg, "rqt_hwaddr", wq->rq.rqt_hwaddr))
84 if (rdma_nl_put_driver_u32(msg, "rqt_size", wq->rq.rqt_size))
86 if (rdma_nl_put_driver_u32(msg, "in_use", wq->rq.in_use))
88 if (rdma_nl_put_driver_u32(msg, "size", wq->rq
[all...]
/linux-master/drivers/net/dsa/mv88e6xxx/
H A Dptp.c274 struct ptp_clock_request *rq, int on)
276 int rising = (rq->extts.flags & PTP_RISING_EDGE);
282 if (rq->extts.flags & ~(PTP_ENABLE_FEATURE |
289 if ((rq->extts.flags & PTP_STRICT_FLAGS) &&
290 (rq->extts.flags & PTP_ENABLE_FEATURE) &&
291 (rq->extts.flags & PTP_EXTTS_EDGES) == PTP_EXTTS_EDGES)
294 pin = ptp_find_pin(chip->ptp_clock, PTP_PF_EXTTS, rq->extts.index);
327 struct ptp_clock_request *rq, int on)
331 switch (rq->type) {
333 return mv88e6352_ptp_enable_extts(chip, rq, o
273 mv88e6352_ptp_enable_extts(struct mv88e6xxx_chip *chip, struct ptp_clock_request *rq, int on) argument
326 mv88e6352_ptp_enable(struct ptp_clock_info *ptp, struct ptp_clock_request *rq, int on) argument
[all...]
/linux-master/kernel/trace/
H A Dblktrace.c802 blk_trace_request_get_cgid(struct request *rq) argument
804 if (!rq->bio)
807 return blk_trace_bio_get_cgid(rq->q, rq->bio);
816 * @rq: the source request
826 static void blk_add_trace_rq(struct request *rq, blk_status_t error, argument
832 bt = rcu_dereference(rq->q->blk_trace);
838 if (blk_rq_is_passthrough(rq))
843 __blk_add_trace(bt, blk_rq_trace_sector(rq), nr_bytes, rq
848 blk_add_trace_rq_insert(void *ignore, struct request *rq) argument
854 blk_add_trace_rq_issue(void *ignore, struct request *rq) argument
860 blk_add_trace_rq_merge(void *ignore, struct request *rq) argument
866 blk_add_trace_rq_requeue(void *ignore, struct request *rq) argument
872 blk_add_trace_rq_complete(void *ignore, struct request *rq, blk_status_t error, unsigned int nr_bytes) argument
1039 blk_add_trace_rq_remap(void *ignore, struct request *rq, dev_t dev, sector_t from) argument
1072 blk_add_driver_data(struct request *rq, void *data, size_t len) argument
[all...]
/linux-master/drivers/net/ethernet/microchip/lan966x/
H A Dlan966x_ptp.c808 struct ptp_clock_request *rq, int on)
818 if (rq->perout.flags & ~(PTP_PEROUT_DUTY_CYCLE |
822 pin = ptp_find_pin(phc->clock, PTP_PF_PEROUT, rq->perout.index);
839 if (rq->perout.period.sec == 1 &&
840 rq->perout.period.nsec == 0)
843 if (rq->perout.flags & PTP_PEROUT_PHASE) {
844 ts_phase.tv_sec = rq->perout.phase.sec;
845 ts_phase.tv_nsec = rq->perout.phase.nsec;
847 ts_phase.tv_sec = rq->perout.start.sec;
848 ts_phase.tv_nsec = rq
807 lan966x_ptp_perout(struct ptp_clock_info *ptp, struct ptp_clock_request *rq, int on) argument
908 lan966x_ptp_extts(struct ptp_clock_info *ptp, struct ptp_clock_request *rq, int on) argument
953 lan966x_ptp_enable(struct ptp_clock_info *ptp, struct ptp_clock_request *rq, int on) argument
[all...]

Completed in 429 milliseconds

1234567891011>>