Lines Matching refs:ring

101  * @ringh: Handle to the ring
109 __hal_ring_t *ring = (__hal_ring_t *) ringh;
112 dma_object = __hal_mempool_memblock_dma(ring->mempool, 0);
123 * @mempoolh: Handle to the memory pool of the ring
165 * @mempoolh: Handle to the memory pool of the ring
222 * @mempoolh: Handle to the memory pool of the ring
223 * @ring: ring
232 __hal_ring_t *ring,
241 vxge_assert((mempoolh != NULL) && (ring != NULL));
243 hldev = (__hal_device_t *) ring->channel.devh;
249 "mempoolh = 0x"VXGE_OS_STXFMT", ring = 0x"VXGE_OS_STXFMT", "
250 "from = %d, to = %d", (ptr_t) mempoolh, (ptr_t) ring, from, to);
277 vxge_os_dma_sync(ring->channel.pdev,
321 __hal_ring_t *ring = (__hal_ring_t *) userdata;
324 vxge_assert((item != NULL) && (ring != NULL));
326 hldev = (__hal_device_t *) ring->channel.devh;
340 for (i = 0; i < ring->rxds_per_block; i++) {
347 u32 dtr_index = item_index * ring->rxds_per_block + i;
349 ring->channel.dtr_arr[dtr_index].dtr =
350 ((u8 *) item) + i * ring->rxd_size;
364 ring->channel.dtr_arr[dtr_index].dtr;
366 uld_priv = ((u8 *) rxdblock_priv + ring->rxd_priv_size * i);
369 ring->per_rxd_space));
373 ring->channel.dtr_arr[dtr_index].uld_priv = (void *)uld_priv;
374 ring->channel.dtr_arr[dtr_index].hal_priv = (void *)rxd_priv;
384 rxd_priv->db_bytes = ring->rxd_size;
386 if (i == (ring->rxds_per_block - 1)) {
389 (ring->rxds_per_block * ring->rxd_size));
396 __hal_ring_rxdblock_link(mempoolh, ring, item_index, 0);
401 __hal_ring_rxdblock_link(mempoolh, ring, item_index - 1, item_index);
435 __hal_ring_t *ring = (__hal_ring_t *) userdata;
438 vxge_assert((item != NULL) && (ring != NULL));
440 hldev = (__hal_device_t *) ring->channel.devh;
461 * @ring: ring
468 __hal_ring_t *ring,
476 vxge_assert(ring != NULL);
478 hldev = (__hal_device_t *) ring->channel.devh;
483 vxge_hal_trace_log_ring("ring = 0x"VXGE_OS_STXFMT", reopen = %d",
484 (ptr_t) ring, reopen);
486 while (vxge_hal_ring_rxd_reserve(ring->channel.vph, &rxd, &uld_priv) ==
489 if (ring->rxd_init) {
490 status = ring->rxd_init(ring->channel.vph,
494 ring->channel.userdata,
497 vxge_hal_ring_rxd_free(ring->channel.vph, rxd);
505 vxge_hal_ring_rxd_post(ring->channel.vph, rxd);
527 __hal_ring_t *ring;
552 &vp->vpath->hldev->header.config.vp_config[vp->vpath->vp_id].ring;
559 ring = (__hal_ring_t *) vxge_hal_channel_allocate(
567 if (ring == NULL) {
576 vp->vpath->ringh = (vxge_hal_ring_h) ring;
578 ring->stats = &vp->vpath->sw_stats->ring_stats;
580 ring->config = config;
581 ring->callback = attr->callback;
582 ring->rxd_init = attr->rxd_init;
583 ring->rxd_term = attr->rxd_term;
585 ring->indicate_max_pkts = config->indicate_max_pkts;
586 ring->buffer_mode = config->buffer_mode;
589 vxge_os_spin_lock_init(&ring->channel.post_lock, hldev->pdev);
591 vxge_os_spin_lock_init_irq(&ring->channel.post_lock, hldev->irqh);
594 ring->rxd_size = vxge_hal_ring_rxd_size_get(config->buffer_mode);
595 ring->rxd_priv_size =
597 ring->per_rxd_space = attr->per_rxd_space;
599 ring->rxd_priv_size =
600 ((ring->rxd_priv_size + __vxge_os_cacheline_size - 1) /
607 ring->rxds_per_block =
611 ring->rxdblock_priv_size = ring->rxd_priv_size * ring->rxds_per_block;
613 ring->rxd_mem_avail =
614 ((__hal_vpath_handle_t *) ring->channel.vph)->vpath->rxd_mem_size;
616 ring->db_byte_count = 0;
618 ring->mempool = vxge_hal_mempool_create(
622 ring->rxdblock_priv_size,
623 ring->config->ring_length / ring->rxds_per_block,
624 ring->config->ring_length / ring->rxds_per_block,
627 ring);
629 if (ring->mempool == NULL) {
636 status = vxge_hal_channel_initialize(&ring->channel);
653 if (ring->rxd_init) {
655 ring,
669 ring->stats->common_stats.usage_cnt = 0;
681 * This function terminates the RxDs of ring
692 __hal_ring_t *ring = (__hal_ring_t *) ringh;
696 hldev = (__hal_device_t *) ring->channel.devh;
701 vxge_hal_trace_log_ring("ring = 0x"VXGE_OS_STXFMT", reopen = %d",
704 if (ring->rxd_term) {
705 __hal_channel_for_each_dtr(&ring->channel, rxdh, i) {
706 if (!__hal_channel_is_posted_dtr(&ring->channel, i)) {
707 ring->rxd_term(ring->channel.vph, rxdh,
708 VXGE_HAL_RING_ULD_PRIV(ring, rxdh),
710 ring->channel.userdata,
717 __hal_channel_dtr_try_complete(&ring->channel, &rxdh);
721 __hal_channel_dtr_complete(&ring->channel);
722 if (ring->rxd_term) {
723 ring->rxd_term(ring->channel.vph, rxdh,
724 VXGE_HAL_RING_ULD_PRIV(ring, rxdh),
726 ring->channel.userdata,
729 __hal_channel_dtr_free(&ring->channel,
738 * __hal_ring_reset - Resets the ring
741 * This function resets the ring during vpath reset operation
747 __hal_ring_t *ring = (__hal_ring_t *) ringh;
750 __hal_vpath_handle_t *vph = (__hal_vpath_handle_t *) ring->channel.vph;
754 hldev = (__hal_device_t *) ring->channel.devh;
759 vxge_hal_trace_log_ring("ring = 0x"VXGE_OS_STXFMT,
764 status = __hal_channel_reset(&ring->channel);
773 ring->rxd_mem_avail = vph->vpath->rxd_mem_size;
774 ring->db_byte_count = 0;
777 if (ring->rxd_init) {
779 ring,
795 * __hal_ring_delete - Removes the ring
798 * This function freeup the memory pool and removes the ring
806 __hal_ring_t *ring;
818 ring = (__hal_ring_t *) vp->vpath->ringh;
820 vxge_assert(ring != NULL);
822 vxge_assert(ring->channel.pdev);
827 if (ring->mempool) {
828 vxge_hal_mempool_destroy(ring->mempool);
831 vxge_hal_channel_terminate(&ring->channel);
834 vxge_os_spin_lock_destroy(&ring->channel.post_lock, hldev->pdev);
836 vxge_os_spin_lock_destroy_irq(&ring->channel.post_lock, hldev->pdev);
839 vxge_hal_channel_free(&ring->channel);
891 if (vpath->vp_config->ring.max_frm_len !=
895 vpath->vp_config->ring.max_frm_len +
917 * vxge_hal_ring_rxd_reserve - Reserve ring descriptor.
943 __hal_ring_t *ring;
958 ring = (__hal_ring_t *) vp->vpath->ringh;
960 vxge_assert(ring != NULL);
963 vxge_os_spin_lock(&ring->channel.post_lock);
965 vxge_os_spin_lock_irq(&ring->channel.post_lock, flags);
968 status = __hal_channel_dtr_reserve(&ring->channel, rxdh);
971 vxge_os_spin_unlock(&ring->channel.post_lock);
973 vxge_os_spin_unlock_irq(&ring->channel.post_lock, flags);
982 *rxd_priv = VXGE_HAL_RING_ULD_PRIV(ring, rxdp);
985 VXGE_HAL_RING_HAL_PRIV(ring, rxdp)->allocated = 1;
1018 __hal_ring_t *ring;
1031 ring = (__hal_ring_t *) vp->vpath->ringh;
1033 vxge_assert(ring != NULL);
1042 vxge_os_spin_lock(&ring->channel.post_lock);
1044 vxge_os_spin_lock_irq(&ring->channel.post_lock, flags);
1054 rxdp_priv = VXGE_HAL_RING_HAL_PRIV(ring, rxdp);
1057 index = ring->channel.length;
1061 prev_rxdh = ring->channel.dtr_arr[index].dtr;
1067 ring->rxd_size == rxdh);
1073 __hal_channel_dtr_post(&ring->channel, VXGE_HAL_RING_RXD_INDEX(rxdh));
1075 ring->db_byte_count +=
1076 VXGE_HAL_RING_HAL_PRIV(ring, rxdh)->db_bytes;
1079 vxge_os_spin_unlock(&ring->channel.post_lock);
1081 vxge_os_spin_unlock_irq(&ring->channel.post_lock, flags);
1108 __hal_ring_t *ring;
1121 ring = (__hal_ring_t *) vp->vpath->ringh;
1123 vxge_assert(ring != NULL);
1131 priv = __hal_ring_rxd_priv(ring, rxdp);
1132 vxge_os_dma_sync(ring->channel.pdev,
1136 ring->rxd_size,
1139 if (ring->stats->common_stats.usage_cnt > 0)
1140 ring->stats->common_stats.usage_cnt--;
1147 * vxge_hal_ring_rxd_post - Post descriptor on the ring.
1151 * Post descriptor on the ring.
1169 __hal_ring_t *ring;
1182 ring = (__hal_ring_t *) vp->vpath->ringh;
1184 vxge_assert(ring != NULL);
1201 rxdp_temp1 = VXGE_HAL_RING_HAL_PRIV(ring, rxdp);
1202 vxge_os_dma_sync(ring->channel.pdev,
1206 ring->rxd_size,
1212 vxge_os_spin_lock(&ring->channel.post_lock);
1214 vxge_os_spin_lock_irq(&ring->channel.post_lock, flags);
1224 rxdp_temp2 = VXGE_HAL_RING_HAL_PRIV(ring, rxdp);
1226 ring->channel.dtr_arr[VXGE_HAL_RING_RXD_INDEX(rxdp) - 1].dtr;
1230 vxge_assert((char *) prev_rxdh + ring->rxd_size == rxdh);
1235 __hal_channel_dtr_post(&ring->channel, VXGE_HAL_RING_RXD_INDEX(rxdh));
1237 ring->db_byte_count +=
1238 VXGE_HAL_RING_HAL_PRIV(ring, rxdp)->db_bytes;
1241 vxge_os_spin_unlock(&ring->channel.post_lock);
1243 vxge_os_spin_unlock_irq(&ring->channel.post_lock, flags);
1246 if (ring->stats->common_stats.usage_cnt > 0)
1247 ring->stats->common_stats.usage_cnt--;
1300 __hal_ring_t *ring;
1306 ring = (__hal_ring_t *) vp->vpath->ringh;
1315 vxge_os_spin_lock(&ring->channel.post_lock);
1317 vxge_os_spin_lock_irq(&ring->channel.post_lock, flags);
1320 if (ring->db_byte_count <= ring->rxd_mem_avail) {
1321 __hal_rxd_db_post(vpath_handle, ring->db_byte_count);
1322 ring->rxd_mem_avail -= ring->db_byte_count;
1323 ring->db_byte_count = 0;
1325 __hal_rxd_db_post(vpath_handle, ring->rxd_mem_avail);
1326 ring->db_byte_count -= ring->rxd_mem_avail;
1327 ring->rxd_mem_avail = 0;
1331 vxge_os_spin_unlock(&ring->channel.post_lock);
1333 vxge_os_spin_unlock_irq(&ring->channel.post_lock, flags);
1354 __hal_ring_t *ring;
1371 ring = (__hal_ring_t *) vp->vpath->ringh;
1373 vxge_assert(ring != NULL);
1376 vxge_os_spin_lock(&ring->channel.post_lock);
1378 vxge_os_spin_lock_irq(&ring->channel.post_lock, flags);
1381 __hal_channel_dtr_try_complete(&ring->channel, &rxdh);
1397 vxge_os_spin_unlock(&ring->channel.post_lock);
1399 vxge_os_spin_unlock_irq(&ring->channel.post_lock, flags);
1416 * HAL uses ring callback (*vxge_hal_ring_callback_f) to notifiy
1424 * ring callback, or in a deferred fashion and separate (from HAL)
1448 __hal_ring_t *ring;
1472 ring = (__hal_ring_t *) vp->vpath->ringh;
1474 vxge_assert(ring != NULL);
1480 vxge_os_spin_lock(&ring->channel.post_lock);
1482 vxge_os_spin_lock_irq(&ring->channel.post_lock, flags);
1485 __hal_channel_dtr_try_complete(&ring->channel, rxdh);
1499 priv = __hal_ring_rxd_priv(ring, rxdp);
1500 vxge_os_dma_sync(ring->channel.pdev,
1518 if (++ring->cmpl_cnt > ring->indicate_max_pkts) {
1527 __hal_channel_dtr_complete(&ring->channel);
1529 *rxd_priv = VXGE_HAL_RING_ULD_PRIV(ring, rxdp);
1531 ring->rxd_mem_avail +=
1532 (VXGE_HAL_RING_HAL_PRIV(ring, rxdp))->db_bytes;
1534 ring->stats->common_stats.usage_cnt++;
1535 if (ring->stats->common_stats.usage_max <
1536 ring->stats->common_stats.usage_cnt)
1537 ring->stats->common_stats.usage_max =
1538 ring->stats->common_stats.usage_cnt;
1540 switch (ring->buffer_mode) {
1542 ring->channel.poll_bytes +=
1547 ring->channel.poll_bytes +=
1556 ring->channel.poll_bytes +=
1578 vxge_os_spin_unlock(&ring->channel.post_lock);
1580 vxge_os_spin_unlock_irq(&ring->channel.post_lock, flags);
1777 __hal_ring_t *ring;
1792 ring = (__hal_ring_t *) vp->vpath->ringh;
1794 vxge_assert(ring != NULL);
1797 vxge_os_spin_lock(&ring->channel.post_lock);
1799 vxge_os_spin_lock_irq(&ring->channel.post_lock, flags);
1802 __hal_channel_dtr_free(&ring->channel, VXGE_HAL_RING_RXD_INDEX(rxdh));
1804 VXGE_HAL_RING_HAL_PRIV(ring, rxdh)->allocated = 0;
1808 vxge_os_spin_unlock(&ring->channel.post_lock);
1810 vxge_os_spin_unlock_irq(&ring->channel.post_lock, flags);