Lines Matching defs:rxq

132 static int qlnx_alloc_rx_buffer(qlnx_host_t *ha, struct qlnx_rx_queue *rxq);
133 static void qlnx_reuse_rx_data(struct qlnx_rx_queue *rxq);
135 struct qlnx_rx_queue *rxq);
1530 if (fp->rxq->handle != NULL) {
1532 0, fp->rxq->handle);
3989 struct qlnx_rx_queue *rxq;
3992 rxq = fp->rxq;
3997 rxq->sw_rx_cons = (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1);
3999 sw_rx_data = &rxq->sw_rx_ring[rxq->sw_rx_cons];
4005 rxq->sw_rx_cons =
4006 (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1);
4016 if (qlnx_alloc_rx_buffer(ha, rxq) != 0) {
4021 qlnx_reuse_rx_data(rxq);
4029 ecore_chain_consume(&rxq->rx_bd_ring);
4031 if (len > rxq->rx_buf_size)
4032 len_in_buffer = rxq->rx_buf_size;
4059 struct qlnx_rx_queue *rxq,
4108 sw_rx_data = &rxq->sw_rx_ring[rxq->sw_rx_cons];
4117 rxq->sw_rx_cons = (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1);
4126 rxq->sw_rx_cons, le16toh(cqe->pars_flags.flags));
4130 qlnx_reuse_rx_data(rxq);
4137 if (qlnx_alloc_rx_buffer(ha, rxq) != 0) {
4154 sw_rx_data = &rxq->sw_rx_ring[rxq->sw_rx_prod];
4156 sw_rx_data->data = rxq->tpa_info[agg_index].rx_buf.data;
4157 sw_rx_data->dma_addr = rxq->tpa_info[agg_index].rx_buf.dma_addr;
4158 sw_rx_data->map = rxq->tpa_info[agg_index].rx_buf.map;
4160 rxq->tpa_info[agg_index].rx_buf.data = mp;
4161 rxq->tpa_info[agg_index].rx_buf.dma_addr = addr;
4162 rxq->tpa_info[agg_index].rx_buf.map = map;
4165 ecore_chain_produce(&rxq->rx_bd_ring);
4173 rxq->sw_rx_prod = (rxq->sw_rx_prod + 1) & (RX_RING_SIZE - 1);
4174 rxq->sw_rx_cons = (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1);
4176 ecore_chain_consume(&rxq->rx_bd_ring);
4184 qlnx_reuse_rx_data(rxq);
4187 rxq->tpa_info[agg_index].agg_state = QLNX_AGG_STATE_ERROR;
4191 if (rxq->tpa_info[agg_index].agg_state != QLNX_AGG_STATE_NONE) {
4200 if (rxq->tpa_info[agg_index].mpf) {
4201 m_freem(rxq->tpa_info[agg_index].mpf);
4202 rxq->tpa_info[agg_index].mpl = NULL;
4204 rxq->tpa_info[agg_index].mpf = mp;
4205 rxq->tpa_info[agg_index].mpl = NULL;
4207 rxq->sw_rx_cons = (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1);
4208 ecore_chain_consume(&rxq->rx_bd_ring);
4216 qlnx_reuse_rx_data(rxq);
4218 rxq->tpa_info[agg_index].agg_state = QLNX_AGG_STATE_ERROR;
4227 ecore_chain_consume(&rxq->rx_bd_ring);
4228 rxq->sw_rx_cons = (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1);
4237 sw_rx_data = &rxq->sw_rx_ring[rxq->sw_rx_cons];
4249 rxq->tpa_info[agg_index].agg_state =
4251 ecore_chain_consume(&rxq->rx_bd_ring);
4252 rxq->sw_rx_cons =
4253 (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1);
4257 if (qlnx_alloc_rx_buffer(ha, rxq) != 0) {
4262 qlnx_reuse_rx_data(rxq);
4268 rxq->tpa_info[agg_index].agg_state =
4271 ecore_chain_consume(&rxq->rx_bd_ring);
4272 rxq->sw_rx_cons =
4273 (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1);
4291 ecore_chain_consume(&rxq->rx_bd_ring);
4292 rxq->sw_rx_cons =
4293 (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1);
4296 if (rxq->tpa_info[agg_index].agg_state != QLNX_AGG_STATE_NONE) {
4304 rxq->tpa_info[agg_index].mpf = mp;
4305 rxq->tpa_info[agg_index].mpl = NULL;
4310 rxq->tpa_info[agg_index].placement_offset = cqe->placement_offset;
4315 rxq->tpa_info[agg_index].mpf = mp;
4316 rxq->tpa_info[agg_index].mpl = mpl;
4319 rxq->tpa_info[agg_index].mpf = mp;
4320 rxq->tpa_info[agg_index].mpl = mp;
4378 rxq->tpa_info[agg_index].agg_state = QLNX_AGG_STATE_START;
4381 fp->rss_id, rxq->tpa_info[agg_index].agg_state,
4382 rxq->tpa_info[agg_index].mpf, rxq->tpa_info[agg_index].mpl);
4389 struct qlnx_rx_queue *rxq,
4430 if (rxq->tpa_info[agg_index].agg_state !=
4432 qlnx_reuse_rx_data(rxq);
4436 sw_rx_data = &rxq->sw_rx_ring[rxq->sw_rx_cons];
4450 rxq->tpa_info[agg_index].agg_state =
4452 ecore_chain_consume(&rxq->rx_bd_ring);
4453 rxq->sw_rx_cons =
4454 (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1);
4458 if (qlnx_alloc_rx_buffer(ha, rxq) != 0) {
4464 qlnx_reuse_rx_data(rxq);
4470 rxq->tpa_info[agg_index].agg_state =
4473 ecore_chain_consume(&rxq->rx_bd_ring);
4474 rxq->sw_rx_cons =
4475 (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1);
4493 ecore_chain_consume(&rxq->rx_bd_ring);
4494 rxq->sw_rx_cons =
4495 (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1);
4502 mp = rxq->tpa_info[agg_index].mpl;
4505 rxq->tpa_info[agg_index].mpl = mpl;
4513 struct qlnx_rx_queue *rxq,
4563 if (rxq->tpa_info[agg_index].agg_state !=
4568 qlnx_reuse_rx_data(rxq);
4572 sw_rx_data = &rxq->sw_rx_ring[rxq->sw_rx_cons];
4586 rxq->tpa_info[agg_index].agg_state =
4588 ecore_chain_consume(&rxq->rx_bd_ring);
4589 rxq->sw_rx_cons =
4590 (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1);
4594 if (qlnx_alloc_rx_buffer(ha, rxq) != 0) {
4599 qlnx_reuse_rx_data(rxq);
4605 rxq->tpa_info[agg_index].agg_state =
4608 ecore_chain_consume(&rxq->rx_bd_ring);
4609 rxq->sw_rx_cons =
4610 (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1);
4628 ecore_chain_consume(&rxq->rx_bd_ring);
4629 rxq->sw_rx_cons =
4630 (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1);
4639 mp = rxq->tpa_info[agg_index].mpl;
4644 if (rxq->tpa_info[agg_index].agg_state != QLNX_AGG_STATE_START) {
4648 if (rxq->tpa_info[agg_index].mpf != NULL)
4649 m_freem(rxq->tpa_info[agg_index].mpf);
4650 rxq->tpa_info[agg_index].mpf = NULL;
4651 rxq->tpa_info[agg_index].mpl = NULL;
4652 rxq->tpa_info[agg_index].agg_state = QLNX_AGG_STATE_NONE;
4656 mp = rxq->tpa_info[agg_index].mpf;
4657 m_adj(mp, rxq->tpa_info[agg_index].placement_offset);
4671 mpl = rxq->tpa_info[agg_index].mpl;
4686 rxq->tpa_info[agg_index].mpf = NULL;
4687 rxq->tpa_info[agg_index].mpl = NULL;
4688 rxq->tpa_info[agg_index].agg_state = QLNX_AGG_STATE_NONE;
4699 struct qlnx_rx_queue *rxq = fp->rxq;
4707 lro = &rxq->lro;
4710 hw_comp_cons = le16toh(*rxq->hw_cons_ptr);
4711 sw_comp_cons = ecore_chain_get_cons_idx(&rxq->rx_comp_ring);
4736 ecore_chain_consume(&rxq->rx_comp_ring);
4752 qlnx_tpa_start(ha, fp, rxq,
4758 qlnx_tpa_cont(ha, fp, rxq,
4764 rx_pkt += qlnx_tpa_end(ha, fp, rxq,
4777 sw_rx_data = &rxq->sw_rx_ring[rxq->sw_rx_cons];
4783 rxq->sw_rx_cons =
4784 (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1);
4822 qlnx_reuse_rx_data(rxq);
4829 if (qlnx_alloc_rx_buffer(ha, rxq) != 0) {
4833 qlnx_reuse_rx_data(rxq);
4842 ecore_chain_consume(&rxq->rx_bd_ring);
4959 rxq->sw_rx_cons = (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1);
4962 ecore_chain_recycle_consumed(&rxq->rx_comp_ring);
4963 sw_comp_cons = ecore_chain_get_cons_idx(&rxq->rx_comp_ring);
4972 qlnx_update_rx_prod(p_hwfn, rxq);
5065 lro = &fp->rxq->lro;
6071 fp->rxq = &ha->rxq_array[rss_id];
6072 fp->rxq->rxq_id = rss_id;
6187 qlnx_free_rx_buffers(qlnx_host_t *ha, struct qlnx_rx_queue *rxq)
6192 for (i = 0; i < rxq->num_rx_buffers; i++) {
6194 rx_buf = &rxq->sw_rx_ring[i];
6210 qlnx_free_mem_rxq(qlnx_host_t *ha, struct qlnx_rx_queue *rxq)
6217 qlnx_free_rx_buffers(ha, rxq);
6220 qlnx_free_tpa_mbuf(ha, &rxq->tpa_info[i]);
6221 if (rxq->tpa_info[i].mpf != NULL)
6222 m_freem(rxq->tpa_info[i].mpf);
6225 bzero((void *)&rxq->sw_rx_ring[0],
6229 if (rxq->rx_bd_ring.p_virt_addr) {
6230 ecore_chain_free(cdev, &rxq->rx_bd_ring);
6231 rxq->rx_bd_ring.p_virt_addr = NULL;
6235 if (rxq->rx_comp_ring.p_virt_addr &&
6236 rxq->rx_comp_ring.pbl_sp.p_virt_table) {
6237 ecore_chain_free(cdev, &rxq->rx_comp_ring);
6238 rxq->rx_comp_ring.p_virt_addr = NULL;
6239 rxq->rx_comp_ring.pbl_sp.p_virt_table = NULL;
6246 lro = &rxq->lro;
6255 qlnx_alloc_rx_buffer(qlnx_host_t *ha, struct qlnx_rx_queue *rxq)
6270 rx_buf_size = rxq->rx_buf_size;
6294 sw_rx_data = &rxq->sw_rx_ring[rxq->sw_rx_prod];
6300 rx_bd = (struct eth_rx_bd *)ecore_chain_produce(&rxq->rx_bd_ring);
6305 rxq->sw_rx_prod = (rxq->sw_rx_prod + 1) & (RX_RING_SIZE - 1);
6378 qlnx_alloc_mem_rxq(qlnx_host_t *ha, struct qlnx_rx_queue *rxq)
6387 rxq->num_rx_buffers = RX_RING_SIZE;
6389 rxq->rx_buf_size = ha->rx_buf_size;
6392 bzero((void *)&rxq->sw_rx_ring[0],
6403 &rxq->rx_bd_ring, NULL);
6415 &rxq->rx_comp_ring, NULL);
6423 rc = qlnx_alloc_tpa_mbuf(ha, rxq->rx_buf_size,
6424 &rxq->tpa_info[i]);
6430 for (i = 0; i < rxq->num_rx_buffers; i++) {
6431 rc = qlnx_alloc_rx_buffer(ha, rxq);
6439 } else if (num_allocated < rxq->num_rx_buffers) {
6449 lro = &rxq->lro;
6452 if (tcp_lro_init_args(lro, ifp, 0, rxq->num_rx_buffers)) {
6454 rxq->rxq_id);
6460 rxq->rxq_id);
6471 qlnx_free_mem_rxq(ha, rxq);
6565 qlnx_free_mem_rxq(ha, fp->rxq);
6616 rc = qlnx_alloc_mem_rxq(ha, fp->rxq);
6771 rss->rss_ind_table[j] = fp->rxq->handle;
6811 qlnx_reuse_rx_data(struct qlnx_rx_queue *rxq)
6814 ecore_chain_consume(&rxq->rx_bd_ring);
6816 ecore_chain_produce(&rxq->rx_bd_ring);
6818 &rxq->sw_rx_ring[rxq->sw_rx_cons];
6820 &rxq->sw_rx_ring[rxq->sw_rx_prod];
6825 rxq->sw_rx_cons = (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1);
6826 rxq->sw_rx_prod = (rxq->sw_rx_prod + 1) & (RX_RING_SIZE - 1);
6832 qlnx_update_rx_prod(struct ecore_hwfn *p_hwfn, struct qlnx_rx_queue *rxq)
6842 bd_prod = ecore_chain_get_prod_idx(&rxq->rx_bd_ring);
6843 cqe_prod = ecore_chain_get_prod_idx(&rxq->rx_comp_ring);
6855 internal_ram_wr(p_hwfn, rxq->hw_rxq_prod_addr,
6943 fp->rxq->rx_buf_size, /* bd_max_bytes */
6945 fp->rxq->rx_bd_ring.p_phys_addr,
6947 ecore_chain_get_pbl_phys(&fp->rxq->rx_comp_ring),
6949 ecore_chain_get_page_cnt(&fp->rxq->rx_comp_ring),
6957 fp->rxq->hw_rxq_prod_addr = rx_ret_params.p_prod;
6958 fp->rxq->handle = rx_ret_params.p_handle;
6959 fp->rxq->hw_cons_ptr =
6962 qlnx_update_rx_prod(p_hwfn, fp->rxq);
7023 rss_params->rss_ind_table[i] = fp->rxq->handle;
7174 rc = ecore_eth_rx_queue_stop(p_hwfn, fp->rxq->handle, false,
7572 lro = &fp->rxq->lro;