Lines Matching refs:wqe

218 	__be32 *wqe;
233 wqe = buf + (i & ((1 << qp->sq.wqe_shift) - 1));
234 *wqe = stamp;
240 wqe = buf + i;
241 *wqe = cpu_to_be32(0xffffffff);
250 void *wqe;
253 ctrl = wqe = get_send_wqe(qp, n & (qp->sq.wqe_cnt - 1));
257 struct mlx4_wqe_datagram_seg *dgram = wqe + sizeof *ctrl;
266 inl = wqe + s;
2299 void *wqe, unsigned *mlx_seg_len)
2303 struct mlx4_wqe_mlx_seg *mlx = wqe;
2304 struct mlx4_wqe_inline_seg *inl = wqe + sizeof *mlx;
2428 void *wqe, unsigned *mlx_seg_len)
2431 struct mlx4_wqe_mlx_seg *mlx = wqe;
2432 struct mlx4_wqe_ctrl_seg *ctrl = wqe;
2433 struct mlx4_wqe_inline_seg *inl = wqe + sizeof *mlx;
2806 static void build_tunnel_header(const struct ib_ud_wr *wr, void *wqe, unsigned *mlx_seg_len)
2808 struct mlx4_wqe_inline_seg *inl = wqe;
2889 static int build_lso_seg(struct mlx4_wqe_lso_seg *wqe, const struct ib_ud_wr *wr,
2893 unsigned halign = ALIGN(sizeof *wqe + wr->hlen, 16);
2902 memcpy(wqe->header, wr->header, wr->hlen);
2924 static void add_zero_len_inline(void *wqe)
2926 struct mlx4_wqe_inline_seg *inl = wqe;
2927 memset(wqe, 0, 16);
2935 void *wqe;
3001 ctrl = wqe = get_send_wqe(qp, ind & (qp->sq.wqe_cnt - 1));
3016 wqe += sizeof *ctrl;
3026 set_raddr_seg(wqe, atomic_wr(wr)->remote_addr,
3028 wqe += sizeof (struct mlx4_wqe_raddr_seg);
3030 set_atomic_seg(wqe, atomic_wr(wr));
3031 wqe += sizeof (struct mlx4_wqe_atomic_seg);
3039 set_raddr_seg(wqe, atomic_wr(wr)->remote_addr,
3041 wqe += sizeof (struct mlx4_wqe_raddr_seg);
3043 set_masked_atomic_seg(wqe, atomic_wr(wr));
3044 wqe += sizeof (struct mlx4_wqe_masked_atomic_seg);
3054 set_raddr_seg(wqe, rdma_wr(wr)->remote_addr,
3056 wqe += sizeof (struct mlx4_wqe_raddr_seg);
3063 set_local_inv_seg(wqe, wr->ex.invalidate_rkey);
3064 wqe += sizeof (struct mlx4_wqe_local_inval_seg);
3071 set_reg_seg(wqe, reg_wr(wr));
3072 wqe += sizeof(struct mlx4_wqe_fmr_seg);
3089 wqe += seglen;
3095 set_datagram_seg(wqe, ud_wr(wr));
3097 *(__be32 *) wqe |= cpu_to_be32(0x80000000);
3098 wqe += sizeof (struct mlx4_wqe_datagram_seg);
3102 set_datagram_seg(wqe, ud_wr(wr));
3103 wqe += sizeof (struct mlx4_wqe_datagram_seg);
3107 err = build_lso_seg(wqe, ud_wr(wr), qp, &seglen,
3113 lso_wqe = (__be32 *) wqe;
3114 wqe += seglen;
3126 wqe += seglen;
3129 add_zero_len_inline(wqe);
3130 wqe += 16;
3132 build_tunnel_header(ud_wr(wr), wqe, &seglen);
3133 wqe += seglen;
3142 set_tunnel_datagram_seg(to_mdev(ibqp->device), wqe,
3145 wqe += sizeof (struct mlx4_wqe_datagram_seg);
3147 build_tunnel_header(ud_wr(wr), wqe, &seglen);
3148 wqe += seglen;
3160 wqe += seglen;
3175 dseg = wqe;
3226 * Same optimization applies to padding with NOP wqe