1171095Ssam/*-
2171095Ssam * Copyright (c) 2002-2007 Neterion, Inc.
3171095Ssam * All rights reserved.
4171095Ssam *
5171095Ssam * Redistribution and use in source and binary forms, with or without
6171095Ssam * modification, are permitted provided that the following conditions
7171095Ssam * are met:
8171095Ssam * 1. Redistributions of source code must retain the above copyright
9171095Ssam *    notice, this list of conditions and the following disclaimer.
10171095Ssam * 2. Redistributions in binary form must reproduce the above copyright
11171095Ssam *    notice, this list of conditions and the following disclaimer in the
12171095Ssam *    documentation and/or other materials provided with the distribution.
13171095Ssam *
14171095Ssam * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
15171095Ssam * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16171095Ssam * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17171095Ssam * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
18171095Ssam * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19171095Ssam * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20171095Ssam * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21171095Ssam * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22171095Ssam * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23171095Ssam * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24171095Ssam * SUCH DAMAGE.
25171095Ssam *
26171095Ssam * $FreeBSD$
27171095Ssam */
28171095Ssam
29171095Ssam#ifdef XGE_DEBUG_FP
30171095Ssam#include <dev/nxge/include/xgehal-ring.h>
31171095Ssam#endif
32171095Ssam
33173139Srwatson__HAL_STATIC_RING __HAL_INLINE_RING xge_hal_ring_rxd_priv_t*
34173139Srwatson__hal_ring_rxd_priv(xge_hal_ring_t *ring, xge_hal_dtr_h dtrh)
35171095Ssam{
36171095Ssam
37171095Ssam	xge_hal_ring_rxd_1_t *rxdp = (xge_hal_ring_rxd_1_t *)dtrh;
38173139Srwatson	xge_hal_ring_rxd_priv_t *rxd_priv;
39171095Ssam
40171095Ssam	xge_assert(rxdp);
41171095Ssam
42173139Srwatson#if defined(XGE_HAL_USE_5B_MODE)
43171095Ssam	xge_assert(ring);
44171095Ssam	if (ring->buffer_mode == XGE_HAL_RING_QUEUE_BUFFER_MODE_5) {
45173139Srwatson	    xge_hal_ring_rxd_5_t *rxdp_5 = (xge_hal_ring_rxd_5_t *)dtrh;
46173139Srwatson#if defined (XGE_OS_PLATFORM_64BIT)
47173139Srwatson	    int memblock_idx = rxdp_5->host_control >> 16;
48173139Srwatson	    int i = rxdp_5->host_control & 0xFFFF;
49173139Srwatson	    rxd_priv = (xge_hal_ring_rxd_priv_t *)
50173139Srwatson	        ((char*)ring->mempool->memblocks_priv_arr[memblock_idx] + ring->rxd_priv_size * i);
51171095Ssam#else
52173139Srwatson	    /* 32-bit case */
53173139Srwatson	    rxd_priv = (xge_hal_ring_rxd_priv_t *)rxdp_5->host_control;
54171095Ssam#endif
55171095Ssam	} else
56171095Ssam#endif
57171095Ssam	{
58173139Srwatson	    rxd_priv = (xge_hal_ring_rxd_priv_t *)
59173139Srwatson	            (ulong_t)rxdp->host_control;
60171095Ssam	}
61171095Ssam
62171095Ssam	xge_assert(rxd_priv);
63171095Ssam	xge_assert(rxd_priv->dma_object);
64171095Ssam
65173139Srwatson	xge_assert(rxd_priv->dma_object->handle == rxd_priv->dma_handle);
66171095Ssam
67173139Srwatson	xge_assert(rxd_priv->dma_object->addr + rxd_priv->dma_offset ==
68173139Srwatson	                        rxd_priv->dma_addr);
69171095Ssam
70171095Ssam	return rxd_priv;
71171095Ssam}
72171095Ssam
73173139Srwatson__HAL_STATIC_RING __HAL_INLINE_RING int
74171095Ssam__hal_ring_block_memblock_idx(xge_hal_ring_block_t *block)
75171095Ssam{
76171095Ssam	   return (int)*((u64 *)(void *)((char *)block +
77173139Srwatson	                           XGE_HAL_RING_MEMBLOCK_IDX_OFFSET));
78171095Ssam}
79171095Ssam
80173139Srwatson__HAL_STATIC_RING __HAL_INLINE_RING void
81171095Ssam__hal_ring_block_memblock_idx_set(xge_hal_ring_block_t*block, int memblock_idx)
82171095Ssam{
83171095Ssam	   *((u64 *)(void *)((char *)block +
84173139Srwatson	                   XGE_HAL_RING_MEMBLOCK_IDX_OFFSET)) =
85173139Srwatson	                   memblock_idx;
86171095Ssam}
87171095Ssam
88171095Ssam
89173139Srwatson__HAL_STATIC_RING __HAL_INLINE_RING dma_addr_t
90171095Ssam__hal_ring_block_next_pointer(xge_hal_ring_block_t *block)
91171095Ssam{
92171095Ssam	return (dma_addr_t)*((u64 *)(void *)((char *)block +
93173139Srwatson	        XGE_HAL_RING_NEXT_BLOCK_POINTER_OFFSET));
94171095Ssam}
95171095Ssam
96173139Srwatson__HAL_STATIC_RING __HAL_INLINE_RING void
97171095Ssam__hal_ring_block_next_pointer_set(xge_hal_ring_block_t *block,
98173139Srwatson	        dma_addr_t dma_next)
99171095Ssam{
100173139Srwatson	*((u64 *)(void *)((char *)block +
101173139Srwatson	          XGE_HAL_RING_NEXT_BLOCK_POINTER_OFFSET)) = dma_next;
102171095Ssam}
103171095Ssam
104171095Ssam/**
105173139Srwatson * xge_hal_ring_dtr_private - Get ULD private per-descriptor data.
106171095Ssam * @channelh: Channel handle.
107171095Ssam * @dtrh: Descriptor handle.
108171095Ssam *
109173139Srwatson * Returns: private ULD info associated with the descriptor.
110173139Srwatson * ULD requests per-descriptor space via xge_hal_channel_open().
111171095Ssam *
112171095Ssam * See also: xge_hal_fifo_dtr_private().
113171095Ssam * Usage: See ex_rx_compl{}.
114171095Ssam */
115173139Srwatson__HAL_STATIC_RING __HAL_INLINE_RING void*
116171095Ssamxge_hal_ring_dtr_private(xge_hal_channel_h channelh, xge_hal_dtr_h dtrh)
117171095Ssam{
118173139Srwatson	return (char *)__hal_ring_rxd_priv((xge_hal_ring_t *) channelh, dtrh) +
119173139Srwatson	                sizeof(xge_hal_ring_rxd_priv_t);
120171095Ssam}
121171095Ssam
122171095Ssam/**
123173139Srwatson * xge_hal_ring_dtr_reserve - Reserve ring descriptor.
124171095Ssam * @channelh: Channel handle.
125173139Srwatson * @dtrh: Reserved descriptor. On success HAL fills this "out" parameter
126173139Srwatson *        with a valid handle.
127171095Ssam *
128173139Srwatson * Reserve Rx descriptor for the subsequent filling-in (by upper layer
129173139Srwatson * driver (ULD)) and posting on the corresponding channel (@channelh)
130171095Ssam * via xge_hal_ring_dtr_post().
131171095Ssam *
132173139Srwatson * Returns: XGE_HAL_OK - success.
133173139Srwatson * XGE_HAL_INF_OUT_OF_DESCRIPTORS - Currently no descriptors available.
134171095Ssam *
135171095Ssam * See also: xge_hal_fifo_dtr_reserve(), xge_hal_ring_dtr_free(),
136171095Ssam * xge_hal_fifo_dtr_reserve_sp(), xge_hal_status_e{}.
137171095Ssam * Usage: See ex_post_all_rx{}.
138171095Ssam */
139173139Srwatson__HAL_STATIC_RING __HAL_INLINE_RING xge_hal_status_e
140171095Ssamxge_hal_ring_dtr_reserve(xge_hal_channel_h channelh, xge_hal_dtr_h *dtrh)
141171095Ssam{
142171095Ssam	xge_hal_status_e status;
143173139Srwatson#if defined(XGE_HAL_RX_MULTI_RESERVE_IRQ)
144171095Ssam	unsigned long flags;
145171095Ssam#endif
146171095Ssam
147173139Srwatson#if defined(XGE_HAL_RX_MULTI_RESERVE)
148171095Ssam	xge_os_spin_lock(&((xge_hal_channel_t*)channelh)->reserve_lock);
149171095Ssam#elif defined(XGE_HAL_RX_MULTI_RESERVE_IRQ)
150171095Ssam	xge_os_spin_lock_irq(&((xge_hal_channel_t*)channelh)->reserve_lock,
151171095Ssam	flags);
152171095Ssam#endif
153171095Ssam
154171095Ssam	status = __hal_channel_dtr_alloc(channelh, dtrh);
155171095Ssam
156173139Srwatson#if defined(XGE_HAL_RX_MULTI_RESERVE)
157171095Ssam	xge_os_spin_unlock(&((xge_hal_channel_t*)channelh)->reserve_lock);
158171095Ssam#elif defined(XGE_HAL_RX_MULTI_RESERVE_IRQ)
159171095Ssam	xge_os_spin_unlock_irq(&((xge_hal_channel_t*)channelh)->reserve_lock,
160173139Srwatson	             flags);
161171095Ssam#endif
162171095Ssam
163171095Ssam	if (status == XGE_HAL_OK) {
164173139Srwatson	    xge_hal_ring_rxd_1_t *rxdp = (xge_hal_ring_rxd_1_t *)*dtrh;
165171095Ssam
166173139Srwatson	    /* instead of memset: reset this RxD */
167173139Srwatson	    rxdp->control_1 = rxdp->control_2 = 0;
168171095Ssam
169173139Srwatson#if defined(XGE_OS_MEMORY_CHECK)
170173139Srwatson	    __hal_ring_rxd_priv((xge_hal_ring_t *) channelh, rxdp)->allocated = 1;
171171095Ssam#endif
172171095Ssam	}
173171095Ssam
174171095Ssam	return status;
175171095Ssam}
176171095Ssam
177171095Ssam/**
178173139Srwatson * xge_hal_ring_dtr_info_get - Get extended information associated with
179171095Ssam * a completed receive descriptor for 1b mode.
180171095Ssam * @channelh: Channel handle.
181171095Ssam * @dtrh: Descriptor handle.
182173139Srwatson * @ext_info: See xge_hal_dtr_info_t{}. Returned by HAL.
183171095Ssam *
184173139Srwatson * Retrieve extended information associated with a completed receive descriptor.
185171095Ssam *
186171095Ssam * See also: xge_hal_dtr_info_t{}, xge_hal_ring_dtr_1b_get(),
187171095Ssam * xge_hal_ring_dtr_5b_get().
188171095Ssam */
189173139Srwatson__HAL_STATIC_RING __HAL_INLINE_RING void
190173139Srwatsonxge_hal_ring_dtr_info_get(xge_hal_channel_h channelh, xge_hal_dtr_h dtrh,
191173139Srwatson	        xge_hal_dtr_info_t *ext_info)
192171095Ssam{
193173139Srwatson	/* cast to 1-buffer mode RxD: the code below relies on the fact
194173139Srwatson	 * that control_1 and control_2 are formatted the same way.. */
195171095Ssam	xge_hal_ring_rxd_1_t *rxdp = (xge_hal_ring_rxd_1_t *)dtrh;
196171095Ssam
197171095Ssam	ext_info->l3_cksum = XGE_HAL_RXD_GET_L3_CKSUM(rxdp->control_1);
198171095Ssam	ext_info->l4_cksum = XGE_HAL_RXD_GET_L4_CKSUM(rxdp->control_1);
199173139Srwatson	    ext_info->frame = XGE_HAL_RXD_GET_FRAME_TYPE(rxdp->control_1);
200173139Srwatson	    ext_info->proto = XGE_HAL_RXD_GET_FRAME_PROTO(rxdp->control_1);
201171095Ssam	ext_info->vlan = XGE_HAL_RXD_GET_VLAN_TAG(rxdp->control_2);
202171095Ssam
203173139Srwatson	/* Herc only, a few extra cycles imposed on Xena and/or
204173139Srwatson	 * when RTH is not enabled.
205173139Srwatson	 * Alternatively, could check
206173139Srwatson	 * xge_hal_device_check_id(), hldev->config.rth_en, queue->rth_en */
207171095Ssam	ext_info->rth_it_hit = XGE_HAL_RXD_GET_RTH_IT_HIT(rxdp->control_1);
208171095Ssam	ext_info->rth_spdm_hit =
209171095Ssam	XGE_HAL_RXD_GET_RTH_SPDM_HIT(rxdp->control_1);
210173139Srwatson	ext_info->rth_hash_type =
211171095Ssam	XGE_HAL_RXD_GET_RTH_HASH_TYPE(rxdp->control_1);
212173139Srwatson	ext_info->rth_value = XGE_HAL_RXD_1_GET_RTH_VALUE(rxdp->control_2);
213171095Ssam}
214171095Ssam
215171095Ssam/**
216173139Srwatson * xge_hal_ring_dtr_info_nb_get - Get extended information associated
217173139Srwatson * with a completed receive descriptor for 3b or 5b
218171095Ssam * modes.
219171095Ssam * @channelh: Channel handle.
220171095Ssam * @dtrh: Descriptor handle.
221173139Srwatson * @ext_info: See xge_hal_dtr_info_t{}. Returned by HAL.
222171095Ssam *
223173139Srwatson * Retrieve extended information associated with a completed receive descriptor.
224171095Ssam *
225171095Ssam * See also: xge_hal_dtr_info_t{}, xge_hal_ring_dtr_1b_get(),
226173139Srwatson *           xge_hal_ring_dtr_5b_get().
227171095Ssam */
228173139Srwatson__HAL_STATIC_RING __HAL_INLINE_RING void
229171095Ssamxge_hal_ring_dtr_info_nb_get(xge_hal_channel_h channelh, xge_hal_dtr_h dtrh,
230173139Srwatson	        xge_hal_dtr_info_t *ext_info)
231171095Ssam{
232173139Srwatson	/* cast to 1-buffer mode RxD: the code below relies on the fact
233173139Srwatson	 * that control_1 and control_2 are formatted the same way.. */
234171095Ssam	xge_hal_ring_rxd_1_t *rxdp = (xge_hal_ring_rxd_1_t *)dtrh;
235171095Ssam
236171095Ssam	ext_info->l3_cksum = XGE_HAL_RXD_GET_L3_CKSUM(rxdp->control_1);
237171095Ssam	ext_info->l4_cksum = XGE_HAL_RXD_GET_L4_CKSUM(rxdp->control_1);
238173139Srwatson	    ext_info->frame = XGE_HAL_RXD_GET_FRAME_TYPE(rxdp->control_1);
239173139Srwatson	    ext_info->proto = XGE_HAL_RXD_GET_FRAME_PROTO(rxdp->control_1);
240173139Srwatson	    ext_info->vlan = XGE_HAL_RXD_GET_VLAN_TAG(rxdp->control_2);
241173139Srwatson	/* Herc only, a few extra cycles imposed on Xena and/or
242173139Srwatson	 * when RTH is not enabled. Same comment as above. */
243171095Ssam	ext_info->rth_it_hit = XGE_HAL_RXD_GET_RTH_IT_HIT(rxdp->control_1);
244171095Ssam	ext_info->rth_spdm_hit =
245171095Ssam	XGE_HAL_RXD_GET_RTH_SPDM_HIT(rxdp->control_1);
246173139Srwatson	ext_info->rth_hash_type =
247171095Ssam	XGE_HAL_RXD_GET_RTH_HASH_TYPE(rxdp->control_1);
248173139Srwatson	ext_info->rth_value = (u32)rxdp->buffer0_ptr;
249171095Ssam}
250171095Ssam
251171095Ssam/**
252171095Ssam * xge_hal_ring_dtr_1b_set - Prepare 1-buffer-mode descriptor.
253171095Ssam * @dtrh: Descriptor handle.
254173139Srwatson * @dma_pointer: DMA address of a single receive buffer this descriptor
255173139Srwatson *               should carry. Note that by the time
256173139Srwatson *               xge_hal_ring_dtr_1b_set
257173139Srwatson *               is called, the receive buffer should be already mapped
258173139Srwatson *               to the corresponding Xframe device.
259171095Ssam * @size: Size of the receive @dma_pointer buffer.
260171095Ssam *
261173139Srwatson * Prepare 1-buffer-mode Rx descriptor for posting
262173139Srwatson * (via xge_hal_ring_dtr_post()).
263171095Ssam *
264173139Srwatson * This inline helper-function does not return any parameters and always
265171095Ssam * succeeds.
266171095Ssam *
267173139Srwatson * See also: xge_hal_ring_dtr_3b_set(), xge_hal_ring_dtr_5b_set().
268171095Ssam * Usage: See ex_post_all_rx{}.
269171095Ssam */
270173139Srwatson__HAL_STATIC_RING __HAL_INLINE_RING void
271173139Srwatsonxge_hal_ring_dtr_1b_set(xge_hal_dtr_h dtrh, dma_addr_t dma_pointer, int size)
272171095Ssam{
273171095Ssam	xge_hal_ring_rxd_1_t *rxdp = (xge_hal_ring_rxd_1_t *)dtrh;
274173139Srwatson	rxdp->buffer0_ptr = dma_pointer;
275173139Srwatson	rxdp->control_2 &= (~XGE_HAL_RXD_1_MASK_BUFFER0_SIZE);
276173139Srwatson	rxdp->control_2 |= XGE_HAL_RXD_1_SET_BUFFER0_SIZE(size);
277171095Ssam
278171095Ssam	xge_debug_ring(XGE_TRACE, "xge_hal_ring_dtr_1b_set: rxdp %p control_2 %p buffer0_ptr %p",
279173139Srwatson	            (xge_hal_ring_rxd_1_t *)dtrh,
280173139Srwatson	            rxdp->control_2,
281173139Srwatson	            rxdp->buffer0_ptr);
282171095Ssam}
283171095Ssam
284171095Ssam/**
285171095Ssam * xge_hal_ring_dtr_1b_get - Get data from the completed 1-buf
286171095Ssam * descriptor.
287171095Ssam * @channelh: Channel handle.
288171095Ssam * @dtrh: Descriptor handle.
289173139Srwatson * @dma_pointer: DMA address of a single receive buffer _this_ descriptor
290173139Srwatson *               carries. Returned by HAL.
291173139Srwatson * @pkt_length: Length (in bytes) of the data in the buffer pointed by
292173139Srwatson *              @dma_pointer. Returned by HAL.
293171095Ssam *
294173139Srwatson * Retrieve protocol data from the completed 1-buffer-mode Rx descriptor.
295173139Srwatson * This inline helper-function uses completed descriptor to populate receive
296173139Srwatson * buffer pointer and other "out" parameters. The function always succeeds.
297171095Ssam *
298173139Srwatson * See also: xge_hal_ring_dtr_3b_get(), xge_hal_ring_dtr_5b_get().
299171095Ssam * Usage: See ex_rx_compl{}.
300171095Ssam */
301173139Srwatson__HAL_STATIC_RING __HAL_INLINE_RING void
302173139Srwatsonxge_hal_ring_dtr_1b_get(xge_hal_channel_h channelh, xge_hal_dtr_h dtrh,
303173139Srwatson	    dma_addr_t *dma_pointer, int *pkt_length)
304171095Ssam{
305171095Ssam	xge_hal_ring_rxd_1_t *rxdp = (xge_hal_ring_rxd_1_t *)dtrh;
306171095Ssam
307171095Ssam	*pkt_length = XGE_HAL_RXD_1_GET_BUFFER0_SIZE(rxdp->control_2);
308171095Ssam	*dma_pointer = rxdp->buffer0_ptr;
309171095Ssam
310171095Ssam	((xge_hal_channel_t *)channelh)->poll_bytes += *pkt_length;
311171095Ssam}
312171095Ssam
313171095Ssam/**
314171095Ssam * xge_hal_ring_dtr_3b_set - Prepare 3-buffer-mode descriptor.
315171095Ssam * @dtrh: Descriptor handle.
316173139Srwatson * @dma_pointers: Array of DMA addresses. Contains exactly 3 receive buffers
317173139Srwatson *               _this_ descriptor should carry.
318173139Srwatson *               Note that by the time xge_hal_ring_dtr_3b_set
319173139Srwatson *               is called, the receive buffers should be mapped
320173139Srwatson *               to the corresponding Xframe device.
321173139Srwatson * @sizes: Array of receive buffer sizes. Contains 3 sizes: one size per
322173139Srwatson *         buffer from @dma_pointers.
323171095Ssam *
324173139Srwatson * Prepare 3-buffer-mode Rx descriptor for posting (via
325171095Ssam * xge_hal_ring_dtr_post()).
326173139Srwatson * This inline helper-function does not return any parameters and always
327171095Ssam * succeeds.
328171095Ssam *
329173139Srwatson * See also: xge_hal_ring_dtr_1b_set(), xge_hal_ring_dtr_5b_set().
330171095Ssam */
331173139Srwatson__HAL_STATIC_RING __HAL_INLINE_RING void
332173139Srwatsonxge_hal_ring_dtr_3b_set(xge_hal_dtr_h dtrh, dma_addr_t dma_pointers[],
333173139Srwatson	        int sizes[])
334171095Ssam{
335171095Ssam	xge_hal_ring_rxd_3_t *rxdp = (xge_hal_ring_rxd_3_t *)dtrh;
336173139Srwatson	rxdp->buffer0_ptr = dma_pointers[0];
337173139Srwatson	rxdp->control_2 &= (~XGE_HAL_RXD_3_MASK_BUFFER0_SIZE);
338173139Srwatson	rxdp->control_2 |= XGE_HAL_RXD_3_SET_BUFFER0_SIZE(sizes[0]);
339173139Srwatson	rxdp->buffer1_ptr = dma_pointers[1];
340173139Srwatson	rxdp->control_2 &= (~XGE_HAL_RXD_3_MASK_BUFFER1_SIZE);
341173139Srwatson	rxdp->control_2 |= XGE_HAL_RXD_3_SET_BUFFER1_SIZE(sizes[1]);
342173139Srwatson	rxdp->buffer2_ptr = dma_pointers[2];
343173139Srwatson	rxdp->control_2 &= (~XGE_HAL_RXD_3_MASK_BUFFER2_SIZE);
344173139Srwatson	rxdp->control_2 |= XGE_HAL_RXD_3_SET_BUFFER2_SIZE(sizes[2]);
345171095Ssam}
346171095Ssam
347171095Ssam/**
348171095Ssam * xge_hal_ring_dtr_3b_get - Get data from the completed 3-buf
349171095Ssam * descriptor.
350171095Ssam * @channelh: Channel handle.
351171095Ssam * @dtrh: Descriptor handle.
352173139Srwatson * @dma_pointers: DMA addresses of the 3 receive buffers _this_ descriptor
353173139Srwatson *                carries. The first two buffers contain ethernet and
354173139Srwatson *                (IP + transport) headers. The 3rd buffer contains packet
355173139Srwatson *                data.
356173139Srwatson *                Returned by HAL.
357173139Srwatson * @sizes: Array of receive buffer sizes. Contains 3 sizes: one size per
358171095Ssam * buffer from @dma_pointers. Returned by HAL.
359171095Ssam *
360173139Srwatson * Retrieve protocol data from the completed 3-buffer-mode Rx descriptor.
361173139Srwatson * This inline helper-function uses completed descriptor to populate receive
362173139Srwatson * buffer pointer and other "out" parameters. The function always succeeds.
363171095Ssam *
364173139Srwatson * See also: xge_hal_ring_dtr_3b_get(), xge_hal_ring_dtr_5b_get().
365171095Ssam */
366173139Srwatson__HAL_STATIC_RING __HAL_INLINE_RING void
367173139Srwatsonxge_hal_ring_dtr_3b_get(xge_hal_channel_h channelh, xge_hal_dtr_h dtrh,
368173139Srwatson	    dma_addr_t dma_pointers[], int sizes[])
369171095Ssam{
370171095Ssam	xge_hal_ring_rxd_3_t *rxdp = (xge_hal_ring_rxd_3_t *)dtrh;
371171095Ssam
372173139Srwatson	dma_pointers[0] = rxdp->buffer0_ptr;
373171095Ssam	sizes[0] = XGE_HAL_RXD_3_GET_BUFFER0_SIZE(rxdp->control_2);
374171095Ssam
375173139Srwatson	dma_pointers[1] = rxdp->buffer1_ptr;
376171095Ssam	sizes[1] = XGE_HAL_RXD_3_GET_BUFFER1_SIZE(rxdp->control_2);
377171095Ssam
378173139Srwatson	dma_pointers[2] = rxdp->buffer2_ptr;
379171095Ssam	sizes[2] = XGE_HAL_RXD_3_GET_BUFFER2_SIZE(rxdp->control_2);
380171095Ssam
381171095Ssam	((xge_hal_channel_t *)channelh)->poll_bytes += sizes[0] + sizes[1] +
382173139Srwatson	    sizes[2];
383171095Ssam}
384171095Ssam
385171095Ssam/**
386171095Ssam * xge_hal_ring_dtr_5b_set - Prepare 5-buffer-mode descriptor.
387171095Ssam * @dtrh: Descriptor handle.
388173139Srwatson * @dma_pointers: Array of DMA addresses. Contains exactly 5 receive buffers
389173139Srwatson *               _this_ descriptor should carry.
390173139Srwatson *               Note that by the time xge_hal_ring_dtr_5b_set
391173139Srwatson *               is called, the receive buffers should be mapped
392173139Srwatson *               to the corresponding Xframe device.
393173139Srwatson * @sizes: Array of receive buffer sizes. Contains 5 sizes: one size per
394173139Srwatson *         buffer from @dma_pointers.
395171095Ssam *
396173139Srwatson * Prepare 3-buffer-mode Rx descriptor for posting (via
397171095Ssam * xge_hal_ring_dtr_post()).
398173139Srwatson * This inline helper-function does not return any parameters and always
399171095Ssam * succeeds.
400171095Ssam *
401173139Srwatson * See also: xge_hal_ring_dtr_1b_set(), xge_hal_ring_dtr_3b_set().
402171095Ssam */
403173139Srwatson__HAL_STATIC_RING __HAL_INLINE_RING void
404173139Srwatsonxge_hal_ring_dtr_5b_set(xge_hal_dtr_h dtrh, dma_addr_t dma_pointers[],
405173139Srwatson	        int sizes[])
406171095Ssam{
407171095Ssam	xge_hal_ring_rxd_5_t *rxdp = (xge_hal_ring_rxd_5_t *)dtrh;
408173139Srwatson	rxdp->buffer0_ptr = dma_pointers[0];
409173139Srwatson	rxdp->control_2 &= (~XGE_HAL_RXD_5_MASK_BUFFER0_SIZE);
410173139Srwatson	rxdp->control_2 |= XGE_HAL_RXD_5_SET_BUFFER0_SIZE(sizes[0]);
411173139Srwatson	rxdp->buffer1_ptr = dma_pointers[1];
412173139Srwatson	rxdp->control_2 &= (~XGE_HAL_RXD_5_MASK_BUFFER1_SIZE);
413173139Srwatson	rxdp->control_2 |= XGE_HAL_RXD_5_SET_BUFFER1_SIZE(sizes[1]);
414173139Srwatson	rxdp->buffer2_ptr = dma_pointers[2];
415173139Srwatson	rxdp->control_2 &= (~XGE_HAL_RXD_5_MASK_BUFFER2_SIZE);
416173139Srwatson	rxdp->control_2 |= XGE_HAL_RXD_5_SET_BUFFER2_SIZE(sizes[2]);
417173139Srwatson	rxdp->buffer3_ptr = dma_pointers[3];
418173139Srwatson	rxdp->control_3 &= (~XGE_HAL_RXD_5_MASK_BUFFER3_SIZE);
419173139Srwatson	rxdp->control_3 |= XGE_HAL_RXD_5_SET_BUFFER3_SIZE(sizes[3]);
420173139Srwatson	rxdp->buffer4_ptr = dma_pointers[4];
421173139Srwatson	rxdp->control_3 &= (~XGE_HAL_RXD_5_MASK_BUFFER4_SIZE);
422171095Ssam	rxdp->control_3 |= XGE_HAL_RXD_5_SET_BUFFER4_SIZE(sizes[4]);
423171095Ssam}
424171095Ssam
425171095Ssam/**
426171095Ssam * xge_hal_ring_dtr_5b_get - Get data from the completed 5-buf
427171095Ssam * descriptor.
428171095Ssam * @channelh: Channel handle.
429171095Ssam * @dtrh: Descriptor handle.
430173139Srwatson * @dma_pointers: DMA addresses of the 5 receive buffers _this_ descriptor
431173139Srwatson *                carries. The first 4 buffers contains L2 (ethernet) through
432173139Srwatson *                L5 headers. The 5th buffer contain received (applicaion)
433173139Srwatson *                data. Returned by HAL.
434173139Srwatson * @sizes: Array of receive buffer sizes. Contains 5 sizes: one size per
435171095Ssam * buffer from @dma_pointers. Returned by HAL.
436171095Ssam *
437173139Srwatson * Retrieve protocol data from the completed 5-buffer-mode Rx descriptor.
438173139Srwatson * This inline helper-function uses completed descriptor to populate receive
439173139Srwatson * buffer pointer and other "out" parameters. The function always succeeds.
440171095Ssam *
441173139Srwatson * See also: xge_hal_ring_dtr_3b_get(), xge_hal_ring_dtr_5b_get().
442171095Ssam */
443173139Srwatson__HAL_STATIC_RING __HAL_INLINE_RING void
444173139Srwatsonxge_hal_ring_dtr_5b_get(xge_hal_channel_h channelh, xge_hal_dtr_h dtrh,
445173139Srwatson	    dma_addr_t dma_pointers[], int sizes[])
446171095Ssam{
447171095Ssam	xge_hal_ring_rxd_5_t *rxdp = (xge_hal_ring_rxd_5_t *)dtrh;
448171095Ssam
449173139Srwatson	dma_pointers[0] = rxdp->buffer0_ptr;
450171095Ssam	sizes[0] = XGE_HAL_RXD_5_GET_BUFFER0_SIZE(rxdp->control_2);
451171095Ssam
452173139Srwatson	dma_pointers[1] = rxdp->buffer1_ptr;
453171095Ssam	sizes[1] = XGE_HAL_RXD_5_GET_BUFFER1_SIZE(rxdp->control_2);
454171095Ssam
455173139Srwatson	dma_pointers[2] = rxdp->buffer2_ptr;
456171095Ssam	sizes[2] = XGE_HAL_RXD_5_GET_BUFFER2_SIZE(rxdp->control_2);
457171095Ssam
458173139Srwatson	dma_pointers[3] = rxdp->buffer3_ptr;
459171095Ssam	sizes[3] = XGE_HAL_RXD_5_GET_BUFFER3_SIZE(rxdp->control_3);
460171095Ssam
461173139Srwatson	dma_pointers[4] = rxdp->buffer4_ptr;
462171095Ssam	sizes[4] = XGE_HAL_RXD_5_GET_BUFFER4_SIZE(rxdp->control_3);
463171095Ssam
464171095Ssam	((xge_hal_channel_t *)channelh)->poll_bytes += sizes[0] + sizes[1] +
465173139Srwatson	    sizes[2] + sizes[3] + sizes[4];
466171095Ssam}
467171095Ssam
468171095Ssam
469171095Ssam/**
470171095Ssam * xge_hal_ring_dtr_pre_post - FIXME.
471171095Ssam * @channelh: Channel handle.
472171095Ssam * @dtrh: Descriptor handle.
473171095Ssam *
474171095Ssam * TBD
475171095Ssam */
476173139Srwatson__HAL_STATIC_RING __HAL_INLINE_RING void
477173139Srwatsonxge_hal_ring_dtr_pre_post(xge_hal_channel_h channelh, xge_hal_dtr_h dtrh)
478171095Ssam{
479171095Ssam	xge_hal_ring_rxd_1_t *rxdp = (xge_hal_ring_rxd_1_t *)dtrh;
480173139Srwatson#if defined(XGE_OS_DMA_REQUIRES_SYNC) && defined(XGE_HAL_DMA_DTR_STREAMING)
481173139Srwatson	xge_hal_ring_rxd_priv_t *priv;
482171095Ssam	xge_hal_ring_t *ring = (xge_hal_ring_t *)channelh;
483171095Ssam#endif
484173139Srwatson#if defined(XGE_HAL_RX_MULTI_POST_IRQ)
485171095Ssam	unsigned long flags;
486171095Ssam#endif
487171095Ssam
488173139Srwatson	rxdp->control_2 |= XGE_HAL_RXD_NOT_COMPLETED;
489171095Ssam
490171095Ssam#ifdef XGE_DEBUG_ASSERT
491173139Srwatson	    /* make sure Xena overwrites the (illegal) t_code on completion */
492173139Srwatson	    XGE_HAL_RXD_SET_T_CODE(rxdp->control_1, XGE_HAL_RXD_T_CODE_UNUSED_C);
493171095Ssam#endif
494171095Ssam
495173139Srwatson	xge_debug_ring(XGE_TRACE, "xge_hal_ring_dtr_pre_post: rxd 0x"XGE_OS_LLXFMT" posted %d  post_qid %d",
496173139Srwatson	        (unsigned long long)(ulong_t)dtrh,
497173139Srwatson	        ((xge_hal_ring_t *)channelh)->channel.post_index,
498173139Srwatson	        ((xge_hal_ring_t *)channelh)->channel.post_qid);
499171095Ssam
500173139Srwatson#if defined(XGE_HAL_RX_MULTI_POST)
501171095Ssam	xge_os_spin_lock(&((xge_hal_channel_t*)channelh)->post_lock);
502171095Ssam#elif defined(XGE_HAL_RX_MULTI_POST_IRQ)
503171095Ssam	xge_os_spin_lock_irq(&((xge_hal_channel_t*)channelh)->post_lock,
504171095Ssam	flags);
505171095Ssam#endif
506171095Ssam
507173139Srwatson#if defined(XGE_DEBUG_ASSERT) && defined(XGE_HAL_RING_ENFORCE_ORDER)
508171095Ssam	{
509173139Srwatson	    xge_hal_channel_t *channel = (xge_hal_channel_t *)channelh;
510171095Ssam
511173139Srwatson	    if (channel->post_index != 0) {
512173139Srwatson	        xge_hal_dtr_h prev_dtrh;
513173139Srwatson	        xge_hal_ring_rxd_priv_t *rxdp_priv;
514171095Ssam
515173139Srwatson	        rxdp_priv = __hal_ring_rxd_priv((xge_hal_ring_t*)channel, rxdp);
516173139Srwatson	        prev_dtrh = channel->work_arr[channel->post_index - 1];
517171095Ssam
518173139Srwatson	        if (prev_dtrh != NULL &&
519173139Srwatson	            (rxdp_priv->dma_offset & (~0xFFF)) !=
520173139Srwatson	                    rxdp_priv->dma_offset) {
521173139Srwatson	            xge_assert((char *)prev_dtrh +
522173139Srwatson	                ((xge_hal_ring_t*)channel)->rxd_size == dtrh);
523173139Srwatson	        }
524173139Srwatson	    }
525171095Ssam	}
526171095Ssam#endif
527171095Ssam
528171095Ssam	__hal_channel_dtr_post(channelh, dtrh);
529171095Ssam
530173139Srwatson#if defined(XGE_HAL_RX_MULTI_POST)
531171095Ssam	xge_os_spin_unlock(&((xge_hal_channel_t*)channelh)->post_lock);
532171095Ssam#elif defined(XGE_HAL_RX_MULTI_POST_IRQ)
533171095Ssam	xge_os_spin_unlock_irq(&((xge_hal_channel_t*)channelh)->post_lock,
534173139Srwatson	               flags);
535171095Ssam#endif
536171095Ssam}
537171095Ssam
538171095Ssam
539171095Ssam/**
540173139Srwatson * xge_hal_ring_dtr_post_post - FIXME.
541171095Ssam * @channelh: Channel handle.
542171095Ssam * @dtrh: Descriptor handle.
543171095Ssam *
544171095Ssam * TBD
545171095Ssam */
546173139Srwatson__HAL_STATIC_RING __HAL_INLINE_RING void
547171095Ssamxge_hal_ring_dtr_post_post(xge_hal_channel_h channelh, xge_hal_dtr_h dtrh)
548171095Ssam{
549171095Ssam	xge_hal_ring_rxd_1_t *rxdp = (xge_hal_ring_rxd_1_t *)dtrh;
550171095Ssam	xge_hal_ring_t *ring = (xge_hal_ring_t *)channelh;
551173139Srwatson#if defined(XGE_OS_DMA_REQUIRES_SYNC) && defined(XGE_HAL_DMA_DTR_STREAMING)
552173139Srwatson	xge_hal_ring_rxd_priv_t *priv;
553171095Ssam#endif
554171095Ssam	/* do POST */
555173139Srwatson	rxdp->control_1 |= XGE_HAL_RXD_POSTED_4_XFRAME;
556171095Ssam
557173139Srwatson#if defined(XGE_OS_DMA_REQUIRES_SYNC) && defined(XGE_HAL_DMA_DTR_STREAMING)
558171095Ssam	priv = __hal_ring_rxd_priv(ring, rxdp);
559171095Ssam	xge_os_dma_sync(ring->channel.pdev,
560173139Srwatson	              priv->dma_handle, priv->dma_addr,
561173139Srwatson	          priv->dma_offset, ring->rxd_size,
562173139Srwatson	          XGE_OS_DMA_DIR_TODEVICE);
563171095Ssam#endif
564171095Ssam
565171095Ssam	xge_debug_ring(XGE_TRACE, "xge_hal_ring_dtr_post_post: rxdp %p control_1 %p",
566173139Srwatson	              (xge_hal_ring_rxd_1_t *)dtrh,
567173139Srwatson	              rxdp->control_1);
568171095Ssam
569173139Srwatson	if (ring->channel.usage_cnt > 0)
570173139Srwatson	    ring->channel.usage_cnt--;
571171095Ssam}
572171095Ssam
573171095Ssam/**
574171095Ssam * xge_hal_ring_dtr_post_post_wmb.
575171095Ssam * @channelh: Channel handle.
576171095Ssam * @dtrh: Descriptor handle.
577171095Ssam *
578171095Ssam * Similar as xge_hal_ring_dtr_post_post, but in addition it does memory barrier.
579171095Ssam */
580173139Srwatson__HAL_STATIC_RING __HAL_INLINE_RING void
581171095Ssamxge_hal_ring_dtr_post_post_wmb(xge_hal_channel_h channelh, xge_hal_dtr_h dtrh)
582171095Ssam{
583171095Ssam	xge_hal_ring_rxd_1_t *rxdp = (xge_hal_ring_rxd_1_t *)dtrh;
584171095Ssam	xge_hal_ring_t *ring = (xge_hal_ring_t *)channelh;
585173139Srwatson#if defined(XGE_OS_DMA_REQUIRES_SYNC) && defined(XGE_HAL_DMA_DTR_STREAMING)
586173139Srwatson	xge_hal_ring_rxd_priv_t *priv;
587171095Ssam#endif
588173139Srwatson	/* Do memory barrier before changing the ownership */
589173139Srwatson	xge_os_wmb();
590173139Srwatson
591171095Ssam	/* do POST */
592173139Srwatson	rxdp->control_1 |= XGE_HAL_RXD_POSTED_4_XFRAME;
593171095Ssam
594173139Srwatson#if defined(XGE_OS_DMA_REQUIRES_SYNC) && defined(XGE_HAL_DMA_DTR_STREAMING)
595171095Ssam	priv = __hal_ring_rxd_priv(ring, rxdp);
596171095Ssam	xge_os_dma_sync(ring->channel.pdev,
597173139Srwatson	              priv->dma_handle, priv->dma_addr,
598173139Srwatson	          priv->dma_offset, ring->rxd_size,
599173139Srwatson	          XGE_OS_DMA_DIR_TODEVICE);
600171095Ssam#endif
601171095Ssam
602173139Srwatson	if (ring->channel.usage_cnt > 0)
603173139Srwatson	    ring->channel.usage_cnt--;
604171095Ssam
605171095Ssam	xge_debug_ring(XGE_TRACE, "xge_hal_ring_dtr_post_post_wmb: rxdp %p control_1 %p rxds_with_host %d",
606173139Srwatson	              (xge_hal_ring_rxd_1_t *)dtrh,
607173139Srwatson	              rxdp->control_1, ring->channel.usage_cnt);
608171095Ssam
609171095Ssam}
610171095Ssam
611171095Ssam/**
612173139Srwatson * xge_hal_ring_dtr_post - Post descriptor on the ring channel.
613171095Ssam * @channelh: Channel handle.
614171095Ssam * @dtrh: Descriptor obtained via xge_hal_ring_dtr_reserve().
615171095Ssam *
616173139Srwatson * Post descriptor on the 'ring' type channel.
617173139Srwatson * Prior to posting the descriptor should be filled in accordance with
618173139Srwatson * Host/Xframe interface specification for a given service (LL, etc.).
619171095Ssam *
620171095Ssam * See also: xge_hal_fifo_dtr_post_many(), xge_hal_fifo_dtr_post().
621171095Ssam * Usage: See ex_post_all_rx{}.
622171095Ssam */
623173139Srwatson__HAL_STATIC_RING __HAL_INLINE_RING void
624173139Srwatsonxge_hal_ring_dtr_post(xge_hal_channel_h channelh, xge_hal_dtr_h dtrh)
625171095Ssam{
626173139Srwatson	xge_hal_ring_dtr_pre_post(channelh, dtrh);
627171095Ssam	xge_hal_ring_dtr_post_post(channelh, dtrh);
628171095Ssam}
629171095Ssam
630171095Ssam/**
631173139Srwatson * xge_hal_ring_dtr_next_completed - Get the _next_ completed
632171095Ssam * descriptor.
633171095Ssam * @channelh: Channel handle.
634171095Ssam * @dtrh: Descriptor handle. Returned by HAL.
635173139Srwatson * @t_code: Transfer code, as per Xframe User Guide,
636173139Srwatson *          Receive Descriptor Format. Returned by HAL.
637171095Ssam *
638173139Srwatson * Retrieve the _next_ completed descriptor.
639173139Srwatson * HAL uses channel callback (*xge_hal_channel_callback_f) to notifiy
640173139Srwatson * upper-layer driver (ULD) of new completed descriptors. After that
641171095Ssam * the ULD can use xge_hal_ring_dtr_next_completed to retrieve the rest
642173139Srwatson * completions (the very first completion is passed by HAL via
643171095Ssam * xge_hal_channel_callback_f).
644171095Ssam *
645173139Srwatson * Implementation-wise, the upper-layer driver is free to call
646171095Ssam * xge_hal_ring_dtr_next_completed either immediately from inside the
647173139Srwatson * channel callback, or in a deferred fashion and separate (from HAL)
648171095Ssam * context.
649171095Ssam *
650173139Srwatson * Non-zero @t_code means failure to fill-in receive buffer(s)
651171095Ssam * of the descriptor.
652173139Srwatson * For instance, parity error detected during the data transfer.
653173139Srwatson * In this case Xframe will complete the descriptor and indicate
654173139Srwatson * for the host that the received data is not to be used.
655173139Srwatson * For details please refer to Xframe User Guide.
656171095Ssam *
657173139Srwatson * Returns: XGE_HAL_OK - success.
658173139Srwatson * XGE_HAL_INF_NO_MORE_COMPLETED_DESCRIPTORS - No completed descriptors
659171095Ssam * are currently available for processing.
660171095Ssam *
661171095Ssam * See also: xge_hal_channel_callback_f{},
662171095Ssam * xge_hal_fifo_dtr_next_completed(), xge_hal_status_e{}.
663171095Ssam * Usage: See ex_rx_compl{}.
664171095Ssam */
665173139Srwatson__HAL_STATIC_RING __HAL_INLINE_RING xge_hal_status_e
666173139Srwatsonxge_hal_ring_dtr_next_completed(xge_hal_channel_h channelh, xge_hal_dtr_h *dtrh,
667173139Srwatson	            u8 *t_code)
668171095Ssam{
669173139Srwatson	xge_hal_ring_rxd_1_t *rxdp; /* doesn't matter 1, 3 or 5... */
670171095Ssam	xge_hal_ring_t *ring = (xge_hal_ring_t *)channelh;
671173139Srwatson#if defined(XGE_OS_DMA_REQUIRES_SYNC) && defined(XGE_HAL_DMA_DTR_STREAMING)
672173139Srwatson	xge_hal_ring_rxd_priv_t *priv;
673171095Ssam#endif
674171095Ssam
675171095Ssam	__hal_channel_dtr_try_complete(ring, dtrh);
676171095Ssam	rxdp = (xge_hal_ring_rxd_1_t *)*dtrh;
677173139Srwatson	if (rxdp == NULL) {
678173139Srwatson	    return XGE_HAL_INF_NO_MORE_COMPLETED_DESCRIPTORS;
679171095Ssam	}
680171095Ssam
681173139Srwatson#if defined(XGE_OS_DMA_REQUIRES_SYNC) && defined(XGE_HAL_DMA_DTR_STREAMING)
682173139Srwatson	/* Note: 24 bytes at most means:
683173139Srwatson	 *  - Control_3 in case of 5-buffer mode
684173139Srwatson	 *  - Control_1 and Control_2
685171095Ssam	 *
686173139Srwatson	 * This is the only length needs to be invalidated
687173139Srwatson	 * type of channels.*/
688171095Ssam	priv = __hal_ring_rxd_priv(ring, rxdp);
689171095Ssam	xge_os_dma_sync(ring->channel.pdev,
690173139Srwatson	              priv->dma_handle, priv->dma_addr,
691173139Srwatson	          priv->dma_offset, 24,
692173139Srwatson	          XGE_OS_DMA_DIR_FROMDEVICE);
693171095Ssam#endif
694171095Ssam
695173139Srwatson	/* check whether it is not the end */
696173139Srwatson	if (!(rxdp->control_2 & XGE_HAL_RXD_NOT_COMPLETED) &&
697173139Srwatson	    !(rxdp->control_1 & XGE_HAL_RXD_POSTED_4_XFRAME)) {
698173139Srwatson#ifndef XGE_HAL_IRQ_POLLING
699173139Srwatson	    if (++ring->cmpl_cnt > ring->indicate_max_pkts) {
700173139Srwatson	        /* reset it. since we don't want to return
701173139Srwatson	         * garbage to the ULD */
702173139Srwatson	        *dtrh = 0;
703173139Srwatson	        return XGE_HAL_COMPLETIONS_REMAIN;
704173139Srwatson	    }
705171095Ssam#endif
706171095Ssam
707171095Ssam#ifdef XGE_DEBUG_ASSERT
708173139Srwatson#if defined(XGE_HAL_USE_5B_MODE)
709173139Srwatson#if !defined(XGE_OS_PLATFORM_64BIT)
710173139Srwatson	    if (ring->buffer_mode == XGE_HAL_RING_QUEUE_BUFFER_MODE_5) {
711173139Srwatson	        xge_assert(((xge_hal_ring_rxd_5_t *)
712173139Srwatson	                rxdp)->host_control!=0);
713173139Srwatson	    }
714171095Ssam#endif
715171095Ssam
716171095Ssam#else
717173139Srwatson	    xge_assert(rxdp->host_control!=0);
718171095Ssam#endif
719171095Ssam#endif
720171095Ssam
721173139Srwatson	    __hal_channel_dtr_complete(ring);
722171095Ssam
723173139Srwatson	    *t_code = (u8)XGE_HAL_RXD_GET_T_CODE(rxdp->control_1);
724171095Ssam
725173139Srwatson	            /* see XGE_HAL_SET_RXD_T_CODE() above.. */
726173139Srwatson	    xge_assert(*t_code != XGE_HAL_RXD_T_CODE_UNUSED_C);
727171095Ssam
728173139Srwatson	    xge_debug_ring(XGE_TRACE,
729173139Srwatson	        "compl_index %d post_qid %d t_code %d rxd 0x"XGE_OS_LLXFMT,
730173139Srwatson	        ((xge_hal_channel_t*)ring)->compl_index,
731173139Srwatson	        ((xge_hal_channel_t*)ring)->post_qid, *t_code,
732173139Srwatson	        (unsigned long long)(ulong_t)rxdp);
733171095Ssam
734173139Srwatson	    ring->channel.usage_cnt++;
735173139Srwatson	    if (ring->channel.stats.usage_max < ring->channel.usage_cnt)
736173139Srwatson	        ring->channel.stats.usage_max = ring->channel.usage_cnt;
737171095Ssam
738173139Srwatson	    return XGE_HAL_OK;
739171095Ssam	}
740171095Ssam
741173139Srwatson	/* reset it. since we don't want to return
742171095Ssam	 * garbage to the ULD */
743173139Srwatson	*dtrh = 0;
744171095Ssam	return XGE_HAL_INF_NO_MORE_COMPLETED_DESCRIPTORS;
745171095Ssam}
746171095Ssam
747171095Ssam/**
748173139Srwatson * xge_hal_ring_dtr_free - Free descriptor.
749171095Ssam * @channelh: Channel handle.
750171095Ssam * @dtrh: Descriptor handle.
751171095Ssam *
752173139Srwatson * Free the reserved descriptor. This operation is "symmetrical" to
753173139Srwatson * xge_hal_ring_dtr_reserve. The "free-ing" completes the descriptor's
754171095Ssam * lifecycle.
755171095Ssam *
756173139Srwatson * After free-ing (see xge_hal_ring_dtr_free()) the descriptor again can
757171095Ssam * be:
758171095Ssam *
759171095Ssam * - reserved (xge_hal_ring_dtr_reserve);
760171095Ssam *
761173139Srwatson * - posted (xge_hal_ring_dtr_post);
762171095Ssam *
763171095Ssam * - completed (xge_hal_ring_dtr_next_completed);
764171095Ssam *
765173139Srwatson * - and recycled again (xge_hal_ring_dtr_free).
766171095Ssam *
767171095Ssam * For alternative state transitions and more details please refer to
768171095Ssam * the design doc.
769171095Ssam *
770171095Ssam * See also: xge_hal_ring_dtr_reserve(), xge_hal_fifo_dtr_free().
771171095Ssam * Usage: See ex_rx_compl{}.
772171095Ssam */
773173139Srwatson__HAL_STATIC_RING __HAL_INLINE_RING void
774173139Srwatsonxge_hal_ring_dtr_free(xge_hal_channel_h channelh, xge_hal_dtr_h dtrh)
775171095Ssam{
776173139Srwatson#if defined(XGE_HAL_RX_MULTI_FREE_IRQ)
777171095Ssam	unsigned long flags;
778171095Ssam#endif
779171095Ssam
780173139Srwatson#if defined(XGE_HAL_RX_MULTI_FREE)
781171095Ssam	xge_os_spin_lock(&((xge_hal_channel_t*)channelh)->free_lock);
782171095Ssam#elif defined(XGE_HAL_RX_MULTI_FREE_IRQ)
783171095Ssam	xge_os_spin_lock_irq(&((xge_hal_channel_t*)channelh)->free_lock,
784171095Ssam	flags);
785171095Ssam#endif
786171095Ssam
787171095Ssam	__hal_channel_dtr_free(channelh, dtrh);
788173139Srwatson#if defined(XGE_OS_MEMORY_CHECK)
789171095Ssam	__hal_ring_rxd_priv((xge_hal_ring_t * ) channelh, dtrh)->allocated = 0;
790171095Ssam#endif
791171095Ssam
792173139Srwatson#if defined(XGE_HAL_RX_MULTI_FREE)
793171095Ssam	xge_os_spin_unlock(&((xge_hal_channel_t*)channelh)->free_lock);
794171095Ssam#elif defined(XGE_HAL_RX_MULTI_FREE_IRQ)
795171095Ssam	xge_os_spin_unlock_irq(&((xge_hal_channel_t*)channelh)->free_lock,
796171095Ssam	flags);
797171095Ssam#endif
798171095Ssam}
799171095Ssam
800171095Ssam/**
801173139Srwatson * xge_hal_ring_is_next_dtr_completed - Check if the next dtr is completed
802171095Ssam * @channelh: Channel handle.
803171095Ssam *
804218909Sbrucec * Checks if the _next_ completed descriptor is in host memory
805171095Ssam *
806173139Srwatson * Returns: XGE_HAL_OK - success.
807173139Srwatson * XGE_HAL_INF_NO_MORE_COMPLETED_DESCRIPTORS - No completed descriptors
808171095Ssam * are currently available for processing.
809171095Ssam */
810173139Srwatson__HAL_STATIC_RING __HAL_INLINE_RING xge_hal_status_e
811171095Ssamxge_hal_ring_is_next_dtr_completed(xge_hal_channel_h channelh)
812171095Ssam{
813173139Srwatson	xge_hal_ring_rxd_1_t *rxdp; /* doesn't matter 1, 3 or 5... */
814171095Ssam	xge_hal_ring_t *ring = (xge_hal_ring_t *)channelh;
815171095Ssam	xge_hal_dtr_h dtrh;
816171095Ssam
817171095Ssam	__hal_channel_dtr_try_complete(ring, &dtrh);
818171095Ssam	rxdp = (xge_hal_ring_rxd_1_t *)dtrh;
819173139Srwatson	if (rxdp == NULL) {
820173139Srwatson	    return XGE_HAL_INF_NO_MORE_COMPLETED_DESCRIPTORS;
821171095Ssam	}
822171095Ssam
823173139Srwatson	/* check whether it is not the end */
824173139Srwatson	if (!(rxdp->control_2 & XGE_HAL_RXD_NOT_COMPLETED) &&
825173139Srwatson	    !(rxdp->control_1 & XGE_HAL_RXD_POSTED_4_XFRAME)) {
826171095Ssam
827171095Ssam#ifdef XGE_DEBUG_ASSERT
828173139Srwatson#if defined(XGE_HAL_USE_5B_MODE)
829173139Srwatson#if !defined(XGE_OS_PLATFORM_64BIT)
830173139Srwatson	    if (ring->buffer_mode == XGE_HAL_RING_QUEUE_BUFFER_MODE_5) {
831173139Srwatson	        xge_assert(((xge_hal_ring_rxd_5_t *)
832173139Srwatson	                rxdp)->host_control!=0);
833173139Srwatson	    }
834171095Ssam#endif
835171095Ssam
836171095Ssam#else
837173139Srwatson	    xge_assert(rxdp->host_control!=0);
838171095Ssam#endif
839171095Ssam#endif
840173139Srwatson	    return XGE_HAL_OK;
841171095Ssam	}
842171095Ssam
843171095Ssam	return XGE_HAL_INF_NO_MORE_COMPLETED_DESCRIPTORS;
844171095Ssam}
845