Lines Matching refs:trans

27 void iwl_txq_gen2_unmap(struct iwl_trans *trans, int txq_id);
29 static inline void iwl_wake_queue(struct iwl_trans *trans,
32 if (test_and_clear_bit(txq->id, trans->txqs.queue_stopped)) {
33 IWL_DEBUG_TX_QUEUES(trans, "Wake hwq %d\n", txq->id);
34 iwl_op_mode_queue_not_full(trans->op_mode, txq->id);
38 static inline void *iwl_txq_get_tfd(struct iwl_trans *trans,
41 if (trans->trans_cfg->gen2)
44 return (u8 *)txq->tfds + trans->txqs.tfd.size * idx;
47 int iwl_txq_alloc(struct iwl_trans *trans, struct iwl_txq *txq, int slots_num,
59 int iwl_txq_space(struct iwl_trans *trans, const struct iwl_txq *q);
61 static inline void iwl_txq_stop(struct iwl_trans *trans, struct iwl_txq *txq)
63 if (!test_and_set_bit(txq->id, trans->txqs.queue_stopped)) {
64 iwl_op_mode_queue_full(trans->op_mode, txq->id);
65 IWL_DEBUG_TX_QUEUES(trans, "Stop hwq %d\n", txq->id);
67 IWL_DEBUG_TX_QUEUES(trans, "hwq %d already stopped\n",
74 * @trans: the transport (for configuration data)
77 static inline int iwl_txq_inc_wrap(struct iwl_trans *trans, int index)
80 (trans->trans_cfg->base_params->max_tfd_queue_size - 1);
85 * @trans: the transport (for configuration data)
88 static inline int iwl_txq_dec_wrap(struct iwl_trans *trans, int index)
91 (trans->trans_cfg->base_params->max_tfd_queue_size - 1);
105 void iwl_txq_free_tso_page(struct iwl_trans *trans, struct sk_buff *skb);
107 void iwl_txq_log_scd_error(struct iwl_trans *trans, struct iwl_txq *txq);
109 int iwl_txq_gen2_set_tb(struct iwl_trans *trans,
113 void iwl_txq_gen2_tfd_unmap(struct iwl_trans *trans,
117 int iwl_txq_dyn_alloc(struct iwl_trans *trans, u32 flags,
121 int iwl_txq_gen2_tx(struct iwl_trans *trans, struct sk_buff *skb,
124 void iwl_txq_dyn_free(struct iwl_trans *trans, int queue);
125 void iwl_txq_gen2_free_tfd(struct iwl_trans *trans, struct iwl_txq *txq);
126 void iwl_txq_inc_wr_ptr(struct iwl_trans *trans, struct iwl_txq *txq);
127 void iwl_txq_gen2_tx_free(struct iwl_trans *trans);
128 int iwl_txq_init(struct iwl_trans *trans, struct iwl_txq *txq, int slots_num,
130 int iwl_txq_gen2_init(struct iwl_trans *trans, int txq_id, int queue_size);
132 struct iwl_tso_hdr_page *get_page_hdr(struct iwl_trans *trans, size_t len,
135 static inline u8 iwl_txq_gen1_tfd_get_num_tbs(struct iwl_trans *trans,
141 static inline u16 iwl_txq_gen1_tfd_tb_get_len(struct iwl_trans *trans,
147 if (trans->trans_cfg->gen2) {
160 static inline void iwl_pcie_gen1_tfd_set_tb(struct iwl_trans *trans,
175 void iwl_txq_gen1_tfd_unmap(struct iwl_trans *trans,
178 void iwl_txq_gen1_inval_byte_cnt_tbl(struct iwl_trans *trans,
180 void iwl_txq_gen1_update_byte_cnt_tbl(struct iwl_trans *trans,
183 void iwl_txq_reclaim(struct iwl_trans *trans, int txq_id, int ssn,
185 void iwl_txq_set_q_ptrs(struct iwl_trans *trans, int txq_id, int ptr);
186 void iwl_trans_txq_freeze_timer(struct iwl_trans *trans, unsigned long txqs,
189 void iwl_txq_free_tfd(struct iwl_trans *trans, struct iwl_txq *txq);
190 int iwl_trans_txq_send_hcmd(struct iwl_trans *trans, struct iwl_host_cmd *cmd);