Lines Matching defs:epkt

40     px_rc_err_t *epkt);
41 static uint_t px_err_intr(px_fault_t *fault_p, px_rc_err_t *epkt);
43 px_rc_err_t *epkt, pf_data_t *pfd_p);
45 static void px_err_log_handle(dev_info_t *dip, px_rc_err_t *epkt,
47 static void px_err_send_epkt_erpt(dev_info_t *dip, px_rc_err_t *epkt,
51 px_rc_err_t *epkt, pf_data_t *pfd_p);
53 px_rc_err_t *epkt, pf_data_t *pfd_p);
55 px_rc_err_t *epkt, pf_data_t *pfd_p);
57 px_rc_err_t *epkt, pf_data_t *pfd_p);
59 px_rc_err_t *epkt, pf_data_t *pfd_p);
61 px_rc_err_t *epkt, pf_data_t *pfd_p);
63 px_rc_err_t *epkt, pf_data_t *pfd_p);
65 px_rc_err_t *epkt);
67 px_rc_err_t *epkt);
69 /* Include the code generated sun4v epkt checking code */
87 px_rc_err_t *epkt = (px_rc_err_t *)fault_p->px_intr_payload;
89 if (epkt != NULL) {
90 return (px_err_intr(fault_p, epkt));
104 px_rc_err_t *epkt = (px_rc_err_t *)fault_p->px_intr_payload;
106 if (epkt != NULL) {
107 return (px_err_intr(fault_p, epkt));
141 px_err_fill_pfd(dev_info_t *dip, pf_data_t *pfd_p, px_rc_err_t *epkt) {
150 switch (epkt->rc_descr.block) {
156 if (epkt->rc_descr.H) {
157 fault_bdf = (pcie_req_id_t)(epkt->hdr[0] >> 16);
165 pec_p = (px_pec_err_t *)epkt;
257 * o px_err_check_severity(epkt)
264 px_err_intr(px_fault_t *fault_p, px_rc_err_t *epkt)
277 PCIE_ROOT_EH_SRC(pfd_p)->intr_data = epkt;
282 derr.fme_ena = fm_ena_generate(epkt->stick, FM_ENA_FMT1);
289 rc_err = px_err_epkt_severity(px_p, &derr, epkt, pfd_p);
311 switch (epkt->rc_descr.block) {
335 * Check the severity of the fire error based the epkt received
339 * @param epkt epkt recevied from HV
342 px_err_epkt_severity(px_t *px_p, ddi_fm_error_t *derr, px_rc_err_t *epkt,
379 px_fix_legacy_epkt(dip, derr, epkt);
387 switch (epkt->rc_descr.block) {
389 err = px_cb_epkt_severity(dip, derr, epkt, pfd_p);
392 err = px_mmu_epkt_severity(dip, derr, epkt, pfd_p);
395 err = px_intr_epkt_severity(dip, derr, epkt, pfd_p);
398 err = px_port_epkt_severity(dip, derr, epkt, pfd_p);
402 err = px_pcie_epkt_severity(dip, derr, epkt, pfd_p);
408 px_err_fill_pfd(dip, pfd_p, epkt);
412 px_err_log_handle(dip, epkt, is_block_pci, "PANIC");
416 px_err_log_handle(dip, epkt, is_block_pci, "PROTECTED");
420 px_err_log_handle(dip, epkt, is_block_pci, "NO PANIC");
424 px_err_log_handle(dip, epkt, is_block_pci, "NO ERROR");
427 px_err_log_handle(dip, epkt, is_block_pci, "UNRECOGNIZED");
430 /* Panic on a unrecognized epkt */
434 px_err_send_epkt_erpt(dip, epkt, is_block_pci, err, derr,
445 px_err_send_epkt_erpt(dev_info_t *dip, px_rc_err_t *epkt,
455 px_pec_err_t *pec = (px_pec_err_t *)epkt;
500 epkt->rc_descr.block, epkt->rc_descr.op,
501 epkt->rc_descr.phase, epkt->rc_descr.cond,
502 epkt->rc_descr.dir, epkt->rc_descr.STOP,
503 epkt->rc_descr.H, epkt->rc_descr.R,
504 epkt->rc_descr.D, epkt->rc_descr.M,
505 epkt->rc_descr.S, epkt->size, epkt->addr,
506 epkt->hdr[0], epkt->hdr[1], epkt->reserved,
512 is_valid_epkt ? epkt->sysino : 0,
514 is_valid_epkt ? epkt->ehdl : 0,
516 is_valid_epkt ? epkt->stick : 0,
517 EPKT_DW0, DATA_TYPE_UINT64, ((uint64_t *)epkt)[3],
518 EPKT_DW1, DATA_TYPE_UINT64, ((uint64_t *)epkt)[4],
519 EPKT_DW2, DATA_TYPE_UINT64, ((uint64_t *)epkt)[5],
520 EPKT_DW3, DATA_TYPE_UINT64, ((uint64_t *)epkt)[6],
521 EPKT_DW4, DATA_TYPE_UINT64, ((uint64_t *)epkt)[7],
527 px_err_log_handle(dev_info_t *dip, px_rc_err_t *epkt, boolean_t is_block_pci,
531 px_pec_err_t *pec = (px_pec_err_t *)epkt;
555 msg, epkt->rc_descr.block, epkt->rc_descr.op,
556 epkt->rc_descr.phase, epkt->rc_descr.cond,
557 epkt->rc_descr.dir, epkt->rc_descr.STOP, epkt->rc_descr.H,
558 epkt->rc_descr.R, epkt->rc_descr.D, epkt->rc_descr.M,
559 epkt->rc_descr.S, epkt->size, epkt->addr, epkt->hdr[0],
560 epkt->hdr[1], epkt->reserved);
566 px_fix_legacy_epkt(dev_info_t *dip, ddi_fm_error_t *derr, px_rc_err_t *epkt)
572 switch (epkt->rc_descr.block) {
574 switch (epkt->rc_descr.op) {
576 switch (epkt->rc_descr.phase) {
578 switch (epkt->rc_descr.cond) {
580 switch (epkt->rc_descr.dir) {
582 epkt->rc_descr.dir = DIR_READ;
590 switch (epkt->rc_descr.op) {
592 switch (epkt->rc_descr.phase) {
594 switch (epkt->rc_descr.cond) {
596 switch (epkt->rc_descr.dir) {
598 epkt->rc_descr.dir = DIR_WRITE;
604 switch (epkt->rc_descr.cond) {
606 switch (epkt->rc_descr.dir) {
608 epkt->rc_descr.phase = PH_ADDR;
609 epkt->rc_descr.cond = CND_IRR;
616 switch (epkt->rc_descr.op) {
618 switch (epkt->rc_descr.phase) {
620 switch (epkt->rc_descr.cond) {
622 switch (epkt->rc_descr.dir) {
624 epkt->rc_descr.dir = DIR_IRR;
629 switch (epkt->rc_descr.dir) {
631 epkt->rc_descr.cond = CND_OV;
638 switch (epkt->rc_descr.phase) {
640 switch (epkt->rc_descr.cond) {
642 switch (epkt->rc_descr.dir) {
644 epkt->rc_descr.op = OP_MSI32;
645 epkt->rc_descr.phase = PH_DATA;
651 switch (epkt->rc_descr.cond) {
653 switch (epkt->rc_descr.dir) {
655 epkt->rc_descr.op = OP_MSI32;
666 px_intr_handle_errors(dev_info_t *dip, ddi_fm_error_t *derr, px_rc_err_t *epkt,
674 px_port_handle_errors(dev_info_t *dip, ddi_fm_error_t *derr, px_rc_err_t *epkt,
685 if (!((epkt->rc_descr.op == OP_PIO) &&
686 (epkt->rc_descr.phase == PH_IRR))) {
695 if (!epkt->rc_descr.H) {
700 adv_reg.pcie_ue_hdr[0] = (uint32_t)(epkt->hdr[0] >> 32);
701 adv_reg.pcie_ue_hdr[1] = (uint32_t)(epkt->hdr[0]);
702 adv_reg.pcie_ue_hdr[2] = (uint32_t)(epkt->hdr[1] >> 32);
703 adv_reg.pcie_ue_hdr[3] = (uint32_t)(epkt->hdr[1]);
707 if (epkt->rc_descr.M)
708 adv_reg.pcie_ue_tgt_addr = epkt->addr;
710 if (!((sts == DDI_SUCCESS) || (epkt->rc_descr.M))) {
725 switch (epkt->rc_descr.cond) {
746 px_pcie_epkt_severity(dev_info_t *dip, ddi_fm_error_t *derr, px_rc_err_t *epkt,
749 px_pec_err_t *pec_p = (px_pec_err_t *)epkt;
750 px_err_pcie_t *pcie = (px_err_pcie_t *)epkt;
797 * and find the first error that occured. Because the sun4v epkt spec
833 px_mmu_handle_lookup(dev_info_t *dip, ddi_fm_error_t *derr, px_rc_err_t *epkt)
835 uint64_t addr = (uint64_t)epkt->addr;
838 if (epkt->rc_descr.H) {
839 bdf = (uint32_t)((epkt->hdr[0] >> 16) && 0xFFFF);