Searched refs:tq (Results 1 - 25 of 26) sorted by relevance

12

/linux-master/drivers/net/vmxnet3/
H A Dvmxnet3_xdp.c22 struct vmxnet3_tx_queue *tq; local
29 tq = &adapter->tx_queue[cpu];
31 tq = &adapter->tx_queue[reciprocal_scale(cpu, tq_number)];
33 return tq;
117 struct vmxnet3_tx_queue *tq, bool dma_map)
127 dw2 = (tq->tx_ring.gen ^ 0x1) << VMXNET3_TXD_GEN_SHIFT;
129 ctx.sop_txd = tq->tx_ring.base + tq->tx_ring.next2fill;
133 tbi = tq->buf_info + tq
115 vmxnet3_xdp_xmit_frame(struct vmxnet3_adapter *adapter, struct xdp_frame *xdpf, struct vmxnet3_tx_queue *tq, bool dma_map) argument
204 struct vmxnet3_tx_queue *tq; local
227 struct vmxnet3_tx_queue *tq; local
[all...]
H A Dvmxnet3_drv.c116 vmxnet3_tq_stopped(struct vmxnet3_tx_queue *tq, struct vmxnet3_adapter *adapter) argument
118 return tq->stopped;
123 vmxnet3_tq_start(struct vmxnet3_tx_queue *tq, struct vmxnet3_adapter *adapter) argument
125 tq->stopped = false;
126 netif_start_subqueue(adapter->netdev, tq - adapter->tx_queue);
131 vmxnet3_tq_wake(struct vmxnet3_tx_queue *tq, struct vmxnet3_adapter *adapter) argument
133 tq->stopped = false;
134 netif_wake_subqueue(adapter->netdev, (tq - adapter->tx_queue));
139 vmxnet3_tq_stop(struct vmxnet3_tx_queue *tq, struct vmxnet3_adapter *adapter) argument
141 tq
358 vmxnet3_unmap_pkt(u32 eop_idx, struct vmxnet3_tx_queue *tq, struct pci_dev *pdev, struct vmxnet3_adapter *adapter, struct xdp_frame_bulk *bq) argument
401 vmxnet3_tq_tx_complete(struct vmxnet3_tx_queue *tq, struct vmxnet3_adapter *adapter) argument
443 vmxnet3_tq_cleanup(struct vmxnet3_tx_queue *tq, struct vmxnet3_adapter *adapter) argument
486 vmxnet3_tq_destroy(struct vmxnet3_tx_queue *tq, struct vmxnet3_adapter *adapter) argument
524 vmxnet3_tq_init(struct vmxnet3_tx_queue *tq, struct vmxnet3_adapter *adapter) argument
554 vmxnet3_tq_create(struct vmxnet3_tx_queue *tq, struct vmxnet3_adapter *adapter) argument
734 vmxnet3_map_pkt(struct sk_buff *skb, struct vmxnet3_tx_ctx *ctx, struct vmxnet3_tx_queue *tq, struct pci_dev *pdev, struct vmxnet3_adapter *adapter) argument
898 vmxnet3_parse_hdr(struct sk_buff *skb, struct vmxnet3_tx_queue *tq, struct vmxnet3_tx_ctx *ctx, struct vmxnet3_adapter *adapter) argument
1001 vmxnet3_copy_hdr(struct sk_buff *skb, struct vmxnet3_tx_queue *tq, struct vmxnet3_tx_ctx *ctx, struct vmxnet3_adapter *adapter) argument
1081 vmxnet3_tq_xmit(struct sk_buff *skb, struct vmxnet3_tx_queue *tq, struct vmxnet3_adapter *adapter, struct net_device *netdev) argument
2267 struct vmxnet3_tx_queue *tq = local
2292 struct vmxnet3_tx_queue *tq = data; local
2805 struct vmxnet3_tx_queue *tq = &adapter->tx_queue[i]; local
3293 struct vmxnet3_tx_queue *tq = &adapter->tx_queue[i]; local
[all...]
H A Dvmxnet3_ethtool.c41 /* per tq stats maintained by the device */
58 /* per tq stats maintained by the driver */
550 struct vmxnet3_tx_queue *tq = &adapter->tx_queue[i]; local
555 buf[j++] = VMXNET3_GET_ADDR_LO(tq->tx_ring.basePA);
556 buf[j++] = VMXNET3_GET_ADDR_HI(tq->tx_ring.basePA);
557 buf[j++] = tq->tx_ring.size;
558 buf[j++] = tq->tx_ring.next2fill;
559 buf[j++] = tq->tx_ring.next2comp;
560 buf[j++] = tq->tx_ring.gen;
562 buf[j++] = VMXNET3_GET_ADDR_LO(tq
[all...]
H A Dvmxnet3_int.h447 #define VMXNET3_WAKE_QUEUE_THRESHOLD(tq) (5)
/linux-master/drivers/net/wireless/ath/ath5k/
H A Dqcu.c286 struct ath5k_txq_info *tq = &ah->ah_txq[queue]; local
292 (tq->tqi_cw_min << AR5K_NODCU_RETRY_LMT_CW_MIN_S)
326 struct ath5k_txq_info *tq = &ah->ah_txq[queue]; local
333 (tq->tqi_type == AR5K_TX_QUEUE_INACTIVE))
341 AR5K_REG_SM(tq->tqi_cw_min, AR5K_DCU_LCL_IFS_CW_MIN) |
342 AR5K_REG_SM(tq->tqi_cw_max, AR5K_DCU_LCL_IFS_CW_MAX) |
343 AR5K_REG_SM(tq->tqi_aifs, AR5K_DCU_LCL_IFS_AIFS),
366 if (tq->tqi_cbr_period) {
367 ath5k_hw_reg_write(ah, AR5K_REG_SM(tq->tqi_cbr_period,
369 AR5K_REG_SM(tq
[all...]
/linux-master/drivers/net/ethernet/chelsio/cxgb4vf/
H A Dsge.c229 * @tq: the TX queue
233 static inline unsigned int txq_avail(const struct sge_txq *tq) argument
235 return tq->size - 1 - tq->in_use;
307 const struct ulptx_sgl *sgl, const struct sge_txq *tq)
326 if (likely((u8 *)(p + 1) <= (u8 *)tq->stat)) {
333 } else if ((u8 *)p == (u8 *)tq->stat) {
334 p = (const struct ulptx_sge_pair *)tq->desc;
336 } else if ((u8 *)p + 8 == (u8 *)tq->stat) {
337 const __be64 *addr = (const __be64 *)tq
306 unmap_sgl(struct device *dev, const struct sk_buff *skb, const struct ulptx_sgl *sgl, const struct sge_txq *tq) argument
377 free_tx_desc(struct adapter *adapter, struct sge_txq *tq, unsigned int n, bool unmap) argument
411 reclaimable(const struct sge_txq *tq) argument
430 reclaim_completed_tx(struct adapter *adapter, struct sge_txq *tq, bool unmap) argument
901 write_sgl(const struct sk_buff *skb, struct sge_txq *tq, struct ulptx_sgl *sgl, u64 *end, unsigned int start, const dma_addr_t *addr) argument
964 ring_tx_db(struct adapter *adapter, struct sge_txq *tq, int n) argument
1052 inline_tx_skb(const struct sk_buff *skb, const struct sge_txq *tq, void *pos) argument
1142 txq_advance(struct sge_txq *tq, unsigned int n) argument
1417 struct sge_txq *tq = &txq->q; local
2511 free_txq(struct adapter *adapter, struct sge_txq *tq) argument
[all...]
H A Dcxgb4vf_main.c535 struct sge_txq *tq; local
552 tq = s->egr_map[eq_idx];
553 if (unlikely(tq == NULL)) {
558 txq = container_of(tq, struct sge_eth_txq, q);
559 if (unlikely(tq->abs_id != qid)) {
562 qid, tq->abs_id);
/linux-master/drivers/net/can/dev/
H A Dbittiming.c85 brp64 = (u64)priv->clock.freq * (u64)bt->tq;
107 bt->tq = DIV_U64_ROUND_CLOSEST(mul_u32_u32(bt->brp, NSEC_PER_SEC),
141 * alternatively the CAN timing parameters (tq, prop_seg, etc.) are
144 if (!bt->tq && bt->bitrate && btc)
146 if (bt->tq && !bt->bitrate && btc)
148 if (!bt->tq && bt->bitrate && bitrate_const)
H A Dcalc_bittiming.c154 bt->tq = (u32)v64;
/linux-master/drivers/media/v4l2-core/
H A Dv4l2-jpeg.c212 int c, h_v, tq; local
233 tq = jpeg_get_byte(stream);
234 if (tq < 0)
235 return tq;
242 component->quantization_table_selector = tq;
318 u8 pq, tq, *qk; local
336 tq = pq_tq & 0xf;
337 if (tq > 3)
347 tables[tq].start = qk;
348 tables[tq]
[all...]
/linux-master/net/tipc/
H A Dtrace.h284 TP_PROTO(struct tipc_link *r, u16 f, u16 t, struct sk_buff_head *tq),
286 TP_ARGS(r, f, t, tq),
301 __entry->len = skb_queue_len(tq);
303 msg_seqno(buf_msg(skb_peek(tq))) : 0;
305 msg_seqno(buf_msg(skb_peek_tail(tq))) : 0;
314 TP_PROTO(struct tipc_link *r, u16 f, u16 t, struct sk_buff_head *tq),
315 TP_ARGS(r, f, t, tq),
320 TP_PROTO(struct tipc_link *r, u16 f, u16 t, struct sk_buff_head *tq),
321 TP_ARGS(r, f, t, tq),
/linux-master/drivers/net/
H A Difb.c53 struct sk_buff_head tq; member in struct:ifb_q_private
96 skb = skb_peek(&txp->tq);
100 skb_queue_splice_tail_init(&txp->rq, &txp->tq);
104 while ((skb = __skb_dequeue(&txp->tq)) != NULL) {
120 if (skb_queue_len(&txp->tq) != 0)
198 __skb_queue_head_init(&txp->tq);
305 __skb_queue_purge(&txp->tq);
/linux-master/drivers/input/keyboard/
H A Dsunkbd.c60 struct work_struct tq; member in struct:sunkbd
100 schedule_work(&sunkbd->tq);
227 struct sunkbd *sunkbd = container_of(work, struct sunkbd, tq);
250 cancel_work_sync(&sunkbd->tq);
274 INIT_WORK(&sunkbd->tq, sunkbd_reinit);
H A Dlkkbd.c267 struct work_struct tq; member in struct:lkkbd
454 schedule_work(&lk->tq);
565 struct lkkbd *lk = container_of(work, struct lkkbd, tq);
620 INIT_WORK(&lk->tq, lkkbd_reinit);
/linux-master/drivers/input/serio/
H A Dhp_sdc.c190 curr = hp_sdc.tq[hp_sdc.rcurr];
316 curr = hp_sdc.tq[hp_sdc.rcurr];
378 if (hp_sdc.tq[curridx] != NULL)
392 if (hp_sdc.tq[curridx] != NULL)
412 curr = hp_sdc.tq[curridx];
416 hp_sdc.tq[curridx] = NULL;
430 hp_sdc.tq[curridx] = NULL;
573 hp_sdc.tq[curridx] = NULL;
609 if (hp_sdc.tq[i] == this)
617 if (hp_sdc.tq[
[all...]
/linux-master/include/uapi/linux/can/
H A Dnetlink.h34 __u32 tq; /* Time quanta (TQ) in nanoseconds */ member in struct:can_bittiming
/linux-master/drivers/gpu/drm/i915/gem/selftests/
H A Di915_gem_context.c677 struct i915_request *tq[5] = {}; local
735 err = throttle(ce, tq, ARRAY_SIZE(tq));
770 throttle_release(tq, ARRAY_SIZE(tq));
787 struct i915_request *tq[5] = {}; local
869 err = throttle(ce, tq, ARRAY_SIZE(tq));
905 throttle_release(tq, ARRAY_SIZE(tq));
1371 struct i915_request *tq[5] = {}; local
[all...]
/linux-master/net/batman-adv/
H A Dbat_iv_ogm.c218 batadv_ogm_packet->tq = BATADV_TQ_MAX_VALUE;
294 static u8 batadv_hop_penalty(u8 tq, const struct batadv_priv *bat_priv) argument
299 new_tq = tq * (BATADV_TQ_MAX_VALUE - hop_penalty);
373 batadv_ogm_packet->tq, batadv_ogm_packet->ttl,
716 batadv_ogm_packet->tq = batadv_hop_penalty(batadv_ogm_packet->tq,
720 "Forwarding packet: tq: %i, ttl: %i\n",
721 batadv_ogm_packet->tq, batadv_ogm_packet->ttl);
998 batadv_ogm_packet->tq);
1053 * batadv_iv_ogm_calc_tq() - calculate tq fo
[all...]
H A Dnetwork-coding.c779 if (ogm_packet->tq < bat_priv->nc.min_tq)
1005 * @tq: to be weighted tq value
1007 * Return: scaled tq value
1009 static u8 batadv_nc_random_weight_tq(u8 tq) argument
1012 u8 rand_tq = get_random_u32_below(BATADV_TQ_MAX_VALUE + 1 - tq);
1014 /* convert to (randomized) estimated tq again */
/linux-master/include/linux/
H A Dhp_sdc.h281 hp_sdc_transaction *tq[HP_SDC_QUEUE_LEN]; /* All pending read/writes */ member in struct:__anon3
/linux-master/include/uapi/linux/
H A Dbatadv_packet.h213 * @tq: transmission quality
225 __u8 tq; member in struct:batadv_ogm_packet
/linux-master/ipc/
H A Dsem.c285 struct sem_queue *q, *tq; local
295 list_for_each_entry_safe(q, tq, &sma->pending_alter, list) {
1146 struct sem_queue *q, *tq; local
1163 list_for_each_entry_safe(q, tq, &sma->pending_const, list) {
1168 list_for_each_entry_safe(q, tq, &sma->pending_alter, list) {
1174 list_for_each_entry_safe(q, tq, &sem->pending_const, list) {
1178 list_for_each_entry_safe(q, tq, &sem->pending_alter, list) {
/linux-master/drivers/net/can/usb/
H A Ducan.c141 __le32 tq; /* Time quanta (TQ) in nanoseconds */ member in struct:ucan_ctl_cmd_set_bittiming
151 __le32 freq; /* Clock Frequency for tq generation */
1253 cmd_set_bittiming->tq = cpu_to_le32(up->can.bittiming.tq);
/linux-master/drivers/tty/ipwireless/
H A Dhardware.c1735 struct ipw_tx_packet *tp, *tq; local
1745 list_for_each_entry_safe(tp, tq, &hw->tx_queue[i], queue) {
/linux-master/drivers/perf/
H A Dxgene_pmu.c295 XGENE_PMU_EVENT_ATTR(tq-full, 0x08),
398 XGENE_PMU_EVENT_ATTR(tq-bank-conflict-issue-stall, 0x0b),
399 XGENE_PMU_EVENT_ATTR(tq-full, 0x0c),

Completed in 522 milliseconds

12