Lines Matching refs:aq

18 	ret_code = iavf_allocate_dma_mem(hw, &hw->aq.asq.desc_buf,
20 (hw->aq.num_asq_entries *
26 ret_code = iavf_allocate_virt_mem(hw, &hw->aq.asq.cmd_buf,
27 (hw->aq.num_asq_entries *
30 iavf_free_dma_mem(hw, &hw->aq.asq.desc_buf);
45 ret_code = iavf_allocate_dma_mem(hw, &hw->aq.arq.desc_buf,
47 (hw->aq.num_arq_entries *
63 iavf_free_dma_mem(hw, &hw->aq.asq.desc_buf);
75 iavf_free_dma_mem(hw, &hw->aq.arq.desc_buf);
94 ret_code = iavf_allocate_virt_mem(hw, &hw->aq.arq.dma_head,
95 (hw->aq.num_arq_entries *
99 hw->aq.arq.r.arq_bi = (struct iavf_dma_mem *)hw->aq.arq.dma_head.va;
102 for (i = 0; i < hw->aq.num_arq_entries; i++) {
103 bi = &hw->aq.arq.r.arq_bi[i];
106 hw->aq.arq_buf_size,
112 desc = IAVF_ADMINQ_DESC(hw->aq.arq, i);
115 if (hw->aq.arq_buf_size > IAVF_AQ_LARGE_BUF)
140 iavf_free_dma_mem(hw, &hw->aq.arq.r.arq_bi[i]);
141 iavf_free_virt_mem(hw, &hw->aq.arq.dma_head);
157 ret_code = iavf_allocate_virt_mem(hw, &hw->aq.asq.dma_head,
158 (hw->aq.num_asq_entries *
162 hw->aq.asq.r.asq_bi = (struct iavf_dma_mem *)hw->aq.asq.dma_head.va;
165 for (i = 0; i < hw->aq.num_asq_entries; i++) {
166 bi = &hw->aq.asq.r.asq_bi[i];
169 hw->aq.asq_buf_size,
181 iavf_free_dma_mem(hw, &hw->aq.asq.r.asq_bi[i]);
182 iavf_free_virt_mem(hw, &hw->aq.asq.dma_head);
196 for (i = 0; i < hw->aq.num_arq_entries; i++)
197 iavf_free_dma_mem(hw, &hw->aq.arq.r.arq_bi[i]);
200 iavf_free_dma_mem(hw, &hw->aq.arq.desc_buf);
203 iavf_free_virt_mem(hw, &hw->aq.arq.dma_head);
215 for (i = 0; i < hw->aq.num_asq_entries; i++)
216 if (hw->aq.asq.r.asq_bi[i].pa)
217 iavf_free_dma_mem(hw, &hw->aq.asq.r.asq_bi[i]);
220 iavf_free_virt_mem(hw, &hw->aq.asq.cmd_buf);
223 iavf_free_dma_mem(hw, &hw->aq.asq.desc_buf);
226 iavf_free_virt_mem(hw, &hw->aq.asq.dma_head);
245 wr32(hw, IAVF_VF_ATQLEN1, (hw->aq.num_asq_entries |
247 wr32(hw, IAVF_VF_ATQBAL1, lower_32_bits(hw->aq.asq.desc_buf.pa));
248 wr32(hw, IAVF_VF_ATQBAH1, upper_32_bits(hw->aq.asq.desc_buf.pa));
252 if (reg != lower_32_bits(hw->aq.asq.desc_buf.pa))
274 wr32(hw, IAVF_VF_ARQLEN1, (hw->aq.num_arq_entries |
276 wr32(hw, IAVF_VF_ARQBAL1, lower_32_bits(hw->aq.arq.desc_buf.pa));
277 wr32(hw, IAVF_VF_ARQBAH1, upper_32_bits(hw->aq.arq.desc_buf.pa));
280 wr32(hw, IAVF_VF_ARQT1, hw->aq.num_arq_entries - 1);
284 if (reg != lower_32_bits(hw->aq.arq.desc_buf.pa))
296 * in the hw->aq structure:
297 * - hw->aq.num_asq_entries
298 * - hw->aq.arq_buf_size
308 if (hw->aq.asq.count > 0) {
315 if ((hw->aq.num_asq_entries == 0) ||
316 (hw->aq.asq_buf_size == 0)) {
321 hw->aq.asq.next_to_use = 0;
322 hw->aq.asq.next_to_clean = 0;
340 hw->aq.asq.count = hw->aq.num_asq_entries;
344 for (i = 0; i < hw->aq.num_asq_entries; i++)
345 iavf_free_dma_mem(hw, &hw->aq.asq.r.asq_bi[i]);
346 iavf_free_virt_mem(hw, &hw->aq.asq.dma_head);
361 * in the hw->aq structure:
362 * - hw->aq.num_asq_entries
363 * - hw->aq.arq_buf_size
373 if (hw->aq.arq.count > 0) {
380 if ((hw->aq.num_arq_entries == 0) ||
381 (hw->aq.arq_buf_size == 0)) {
386 hw->aq.arq.next_to_use = 0;
387 hw->aq.arq.next_to_clean = 0;
405 hw->aq.arq.count = hw->aq.num_arq_entries;
409 for (i = 0; i < hw->aq.num_arq_entries; i++)
410 iavf_free_dma_mem(hw, &hw->aq.arq.r.arq_bi[i]);
411 iavf_free_virt_mem(hw, &hw->aq.arq.dma_head);
429 mutex_lock(&hw->aq.asq_mutex);
431 if (hw->aq.asq.count == 0) {
443 hw->aq.asq.count = 0; /* to indicate uninitialized queue */
449 mutex_unlock(&hw->aq.asq_mutex);
463 mutex_lock(&hw->aq.arq_mutex);
465 if (hw->aq.arq.count == 0) {
477 hw->aq.arq.count = 0; /* to indicate uninitialized queue */
483 mutex_unlock(&hw->aq.arq_mutex);
492 * in the hw->aq structure:
493 * - hw->aq.num_asq_entries
494 * - hw->aq.num_arq_entries
495 * - hw->aq.arq_buf_size
496 * - hw->aq.asq_buf_size
503 if ((hw->aq.num_arq_entries == 0) ||
504 (hw->aq.num_asq_entries == 0) ||
505 (hw->aq.arq_buf_size == 0) ||
506 (hw->aq.asq_buf_size == 0)) {
512 hw->aq.asq_cmd_timeout = IAVF_ASQ_CMD_TIMEOUT;
558 struct iavf_adminq_ring *asq = &hw->aq.asq;
603 return rd32(hw, IAVF_VF_ATQH1) == hw->aq.asq.next_to_use;
631 mutex_lock(&hw->aq.asq_mutex);
633 if (hw->aq.asq.count == 0) {
640 hw->aq.asq_last_status = IAVF_AQ_RC_OK;
643 if (val >= hw->aq.num_asq_entries) {
650 details = IAVF_ADMINQ_DETAILS(hw->aq.asq, hw->aq.asq.next_to_use);
672 if (buff_size > hw->aq.asq_buf_size) {
705 desc_on_ring = IAVF_ADMINQ_DESC(hw->aq.asq, hw->aq.asq.next_to_use);
712 dma_buff = &hw->aq.asq.r.asq_bi[hw->aq.asq.next_to_use];
730 (hw->aq.asq.next_to_use)++;
731 if (hw->aq.asq.next_to_use == hw->aq.asq.count)
732 hw->aq.asq.next_to_use = 0;
734 wr32(hw, IAVF_VF_ATQT1, hw->aq.asq.next_to_use);
750 } while (total_delay < hw->aq.asq_cmd_timeout);
775 hw->aq.asq_last_status = (enum iavf_admin_queue_err)retval;
782 /* save writeback aq if requested */
801 mutex_unlock(&hw->aq.asq_mutex);
834 u16 ntc = hw->aq.arq.next_to_clean;
847 mutex_lock(&hw->aq.arq_mutex);
849 if (hw->aq.arq.count == 0) {
865 desc = IAVF_ADMINQ_DESC(hw->aq.arq, ntc);
868 hw->aq.arq_last_status =
876 hw->aq.arq_last_status);
883 memcpy(e->msg_buf, hw->aq.arq.r.arq_bi[desc_idx].va,
888 hw->aq.arq_buf_size);
894 bi = &hw->aq.arq.r.arq_bi[ntc];
898 if (hw->aq.arq_buf_size > IAVF_AQ_LARGE_BUF)
908 if (ntc == hw->aq.num_arq_entries)
910 hw->aq.arq.next_to_clean = ntc;
911 hw->aq.arq.next_to_use = ntu;
916 *pending = (ntc > ntu ? hw->aq.arq.count : 0) + (ntu - ntc);
919 mutex_unlock(&hw->aq.arq_mutex);