Searched refs:pg_chunk (Results 1 - 8 of 8) sorted by relevance

/asuswrt-rt-n18u-9.0.0.4.380.2695/release/src-rt-6.x.4708/linux/linux-2.6/drivers/net/cxgb3/
H A Dsge.c119 struct fl_pg_chunk pg_chunk; member in union:rx_sw_desc::__anon16453
352 if (q->use_pages && d->pg_chunk.page) {
353 (*d->pg_chunk.p_cnt)--;
354 if (!*d->pg_chunk.p_cnt)
356 d->pg_chunk.mapping,
359 put_page(d->pg_chunk.page);
360 d->pg_chunk.page = NULL;
390 if (q->pg_chunk.page) {
391 __free_pages(q->pg_chunk.page, q->order);
392 q->pg_chunk
[all...]
H A Dadapter.h119 struct fl_pg_chunk pg_chunk;/* page chunk cache */ member in struct:sge_fl
/asuswrt-rt-n18u-9.0.0.4.380.2695/release/src-rt-6.x.4708/linux/linux-2.6.36/drivers/net/cxgb3/
H A Dsge.c119 struct fl_pg_chunk pg_chunk; member in union:rx_sw_desc::__anon28146
352 if (q->use_pages && d->pg_chunk.page) {
353 (*d->pg_chunk.p_cnt)--;
354 if (!*d->pg_chunk.p_cnt)
356 d->pg_chunk.mapping,
359 put_page(d->pg_chunk.page);
360 d->pg_chunk.page = NULL;
390 if (q->pg_chunk.page) {
391 __free_pages(q->pg_chunk.page, q->order);
392 q->pg_chunk
[all...]
H A Dadapter.h119 struct fl_pg_chunk pg_chunk;/* page chunk cache */ member in struct:sge_fl
/asuswrt-rt-n18u-9.0.0.4.380.2695/release/src-rt-6.x.4708/linux/linux-2.6/drivers/net/qlge/
H A Dqlge_main.c1083 if ((lbq_desc->p.pg_chunk.offset + rx_ring->lbq_buf_size)
1086 lbq_desc->p.pg_chunk.map,
1122 if (!rx_ring->pg_chunk.page) {
1124 rx_ring->pg_chunk.page = alloc_pages(__GFP_COLD | __GFP_COMP |
1127 if (unlikely(!rx_ring->pg_chunk.page)) {
1132 rx_ring->pg_chunk.offset = 0;
1133 map = pci_map_page(qdev->pdev, rx_ring->pg_chunk.page,
1137 __free_pages(rx_ring->pg_chunk.page,
1143 rx_ring->pg_chunk.map = map;
1144 rx_ring->pg_chunk
[all...]
H A Dqlge.h1369 struct page_chunk pg_chunk; member in union:bq_desc::__anon16986
1441 struct page_chunk pg_chunk; /* current page for chunks */ member in struct:rx_ring
/asuswrt-rt-n18u-9.0.0.4.380.2695/release/src-rt-6.x.4708/linux/linux-2.6.36/drivers/net/qlge/
H A Dqlge_main.c1083 if ((lbq_desc->p.pg_chunk.offset + rx_ring->lbq_buf_size)
1086 lbq_desc->p.pg_chunk.map,
1122 if (!rx_ring->pg_chunk.page) {
1124 rx_ring->pg_chunk.page = alloc_pages(__GFP_COLD | __GFP_COMP |
1127 if (unlikely(!rx_ring->pg_chunk.page)) {
1132 rx_ring->pg_chunk.offset = 0;
1133 map = pci_map_page(qdev->pdev, rx_ring->pg_chunk.page,
1137 __free_pages(rx_ring->pg_chunk.page,
1143 rx_ring->pg_chunk.map = map;
1144 rx_ring->pg_chunk
[all...]
H A Dqlge.h1369 struct page_chunk pg_chunk; member in union:bq_desc::__anon28679
1441 struct page_chunk pg_chunk; /* current page for chunks */ member in struct:rx_ring

Completed in 428 milliseconds