• Home
  • History
  • Annotate
  • Raw
  • Download
  • only in /barrelfish-2018-10-04/lib/devif/backends/net/mlx4/drivers/infiniband/hw/mthca/

Lines Matching defs:eq

172 static inline void tavor_set_eq_ci(struct mthca_dev *dev, struct mthca_eq *eq, u32 ci)
183 mthca_write64(MTHCA_EQ_DB_SET_CI | eq->eqn, ci & (eq->nent - 1),
188 static inline void arbel_set_eq_ci(struct mthca_dev *dev, struct mthca_eq *eq, u32 ci)
193 dev->eq_regs.arbel.eq_set_ci_base + eq->eqn * 8);
198 static inline void set_eq_ci(struct mthca_dev *dev, struct mthca_eq *eq, u32 ci)
201 arbel_set_eq_ci(dev, eq, ci);
203 tavor_set_eq_ci(dev, eq, ci);
227 static inline struct mthca_eqe *get_eqe(struct mthca_eq *eq, u32 entry)
229 unsigned long off = (entry & (eq->nent - 1)) * MTHCA_EQ_ENTRY_SIZE;
230 return eq->page_list[off / PAGE_SIZE].buf + off % PAGE_SIZE;
233 static inline struct mthca_eqe *next_eqe_sw(struct mthca_eq *eq)
236 eqe = get_eqe(eq, eq->cons_index);
259 static int mthca_eq_int(struct mthca_dev *dev, struct mthca_eq *eq)
266 while ((eqe = next_eqe_sw(eq))) {
276 disarm_cq(dev, eq->eqn, disarm_cqn);
348 mthca_warn(dev, "EQ overrun on EQN %d\n", eq->eqn);
357 eqe->type, eqe->subtype, eq->eqn);
362 ++eq->cons_index;
378 set_eq_ci(dev, eq, eq->cons_index);
407 if (ecr & dev->eq_table.eq[i].eqn_mask) {
408 if (mthca_eq_int(dev, &dev->eq_table.eq[i]))
409 tavor_set_eq_ci(dev, &dev->eq_table.eq[i],
410 dev->eq_table.eq[i].cons_index);
411 tavor_eq_req_not(dev, dev->eq_table.eq[i].eqn);
419 struct mthca_eq *eq = eq_ptr;
420 struct mthca_dev *dev = eq->dev;
422 mthca_eq_int(dev, eq);
423 tavor_set_eq_ci(dev, eq, eq->cons_index);
424 tavor_eq_req_not(dev, eq->eqn);
440 if (mthca_eq_int(dev, &dev->eq_table.eq[i])) {
442 arbel_set_eq_ci(dev, &dev->eq_table.eq[i],
443 dev->eq_table.eq[i].cons_index);
453 struct mthca_eq *eq = eq_ptr;
454 struct mthca_dev *dev = eq->dev;
456 mthca_eq_int(dev, eq);
457 arbel_set_eq_ci(dev, eq, eq->cons_index);
458 arbel_eq_req_not(dev, eq->eqn_mask);
467 struct mthca_eq *eq)
478 eq->dev = dev;
479 eq->nent = roundup_pow_of_two(max(nent, 2));
480 npages = ALIGN(eq->nent * MTHCA_EQ_ENTRY_SIZE, PAGE_SIZE) / PAGE_SIZE;
482 eq->page_list = kmalloc(npages * sizeof *eq->page_list,
484 if (!eq->page_list)
488 eq->page_list[i].buf = NULL;
500 eq->page_list[i].buf = dma_alloc_coherent(&dev->pdev->dev,
502 if (!eq->page_list[i].buf)
506 pci_unmap_addr_set(&eq->page_list[i], mapping, t);
508 clear_page(eq->page_list[i].buf);
511 for (i = 0; i < eq->nent; ++i)
512 set_eqe_hw(get_eqe(eq, i));
514 eq->eqn = mthca_alloc(&dev->eq_table.alloc);
515 if (eq->eqn == -1)
523 &eq->mr);
535 eq_context->logsize_usrpage = cpu_to_be32((ffs(eq->nent) - 1) << 24);
543 eq_context->lkey = cpu_to_be32(eq->mr.ibmr.lkey);
545 err = mthca_SW2HW_EQ(dev, mailbox, eq->eqn, &status);
560 eq->eqn_mask = swab32(1 << eq->eqn);
561 eq->cons_index = 0;
563 dev->eq_table.arm_mask |= eq->eqn_mask;
566 eq->eqn, eq->nent);
571 mthca_free_mr(dev, &eq->mr);
574 mthca_free(&dev->eq_table.alloc, eq->eqn);
578 if (eq->page_list[i].buf)
580 eq->page_list[i].buf,
581 pci_unmap_addr(&eq->page_list[i],
587 kfree(eq->page_list);
595 struct mthca_eq *eq)
600 int npages = (eq->nent * MTHCA_EQ_ENTRY_SIZE + PAGE_SIZE - 1) /
608 err = mthca_HW2SW_EQ(dev, mailbox, eq->eqn, &status);
614 dev->eq_table.arm_mask &= ~eq->eqn_mask;
617 mthca_dbg(dev, "Dumping EQ context %02x:\n", eq->eqn);
627 mthca_free_mr(dev, &eq->mr);
630 eq->page_list[i].buf,
631 pci_unmap_addr(&eq->page_list[i], mapping));
633 kfree(eq->page_list);
644 if (dev->eq_table.eq[i].have_irq) {
645 free_irq(dev->eq_table.eq[i].msi_x_vector,
646 dev->eq_table.eq + i);
647 dev->eq_table.eq[i].have_irq = 0;
814 &dev->eq_table.eq[MTHCA_EQ_COMP]);
820 &dev->eq_table.eq[MTHCA_EQ_ASYNC]);
826 &dev->eq_table.eq[MTHCA_EQ_CMD]);
838 err = request_irq(dev->eq_table.eq[i].msi_x_vector,
842 0, eq_name[i], dev->eq_table.eq + i);
845 dev->eq_table.eq[i].have_irq = 1;
859 0, dev->eq_table.eq[MTHCA_EQ_ASYNC].eqn, &status);
862 dev->eq_table.eq[MTHCA_EQ_ASYNC].eqn, err);
865 dev->eq_table.eq[MTHCA_EQ_ASYNC].eqn, status);
868 0, dev->eq_table.eq[MTHCA_EQ_CMD].eqn, &status);
871 dev->eq_table.eq[MTHCA_EQ_CMD].eqn, err);
874 dev->eq_table.eq[MTHCA_EQ_CMD].eqn, status);
878 arbel_eq_req_not(dev, dev->eq_table.eq[i].eqn_mask);
880 tavor_eq_req_not(dev, dev->eq_table.eq[i].eqn);
886 mthca_free_eq(dev, &dev->eq_table.eq[MTHCA_EQ_CMD]);
889 mthca_free_eq(dev, &dev->eq_table.eq[MTHCA_EQ_ASYNC]);
892 mthca_free_eq(dev, &dev->eq_table.eq[MTHCA_EQ_COMP]);
910 1, dev->eq_table.eq[MTHCA_EQ_ASYNC].eqn, &status);
912 1, dev->eq_table.eq[MTHCA_EQ_CMD].eqn, &status);
915 mthca_free_eq(dev, &dev->eq_table.eq[i]);