Lines Matching refs:mci

165  * error_lock is to protect concurrent writes to the mci->error_desc through
365 static void dmc520_handle_dram_ecc_errors(struct mem_ctl_info *mci,
368 struct dmc520_edac *pvt = mci->pvt_info;
387 mci, cnt, 0, 0, 0, info.rank, -1, -1,
392 static irqreturn_t dmc520_edac_dram_ecc_isr(int irq, struct mem_ctl_info *mci,
395 struct dmc520_edac *pvt = mci->pvt_info;
400 dmc520_handle_dram_ecc_errors(mci, is_ce);
407 static irqreturn_t dmc520_edac_dram_all_isr(int irq, struct mem_ctl_info *mci,
410 struct dmc520_edac *pvt = mci->pvt_info;
418 irq_ret = dmc520_edac_dram_ecc_isr(irq, mci, true);
422 irq_ret = dmc520_edac_dram_ecc_isr(irq, mci, false);
429 struct mem_ctl_info *mci = data;
430 struct dmc520_edac *pvt = mci->pvt_info;
440 return dmc520_edac_dram_all_isr(irq, mci, mask);
443 static void dmc520_init_csrow(struct mem_ctl_info *mci)
445 struct dmc520_edac *pvt = mci->pvt_info;
459 for (row = 0; row < mci->nr_csrows; row++) {
460 csi = mci->csrows[row];
480 struct mem_ctl_info *mci;
520 mci = edac_mc_alloc(dmc520_mc_idx++, ARRAY_SIZE(layers), layers, sizeof(*pvt));
521 if (!mci) {
528 pvt = mci->pvt_info;
535 platform_set_drvdata(pdev, mci);
537 mci->pdev = dev;
538 mci->mtype_cap = MEM_FLAG_DDR3 | MEM_FLAG_DDR4;
539 mci->edac_ctl_cap = EDAC_FLAG_NONE | EDAC_FLAG_SECDED;
540 mci->edac_cap = EDAC_FLAG_SECDED;
541 mci->scrub_cap = SCRUB_FLAG_HW_SRC;
542 mci->scrub_mode = dmc520_get_scrub_type(pvt);
543 mci->ctl_name = EDAC_CTL_NAME;
544 mci->dev_name = dev_name(mci->pdev);
545 mci->mod_name = EDAC_MOD_NAME;
551 dmc520_init_csrow(mci);
564 dev_name(&pdev->dev), mci);
581 ret = edac_mc_add_mc(mci);
597 devm_free_irq(&pdev->dev, pvt->irqs[idx], mci);
599 if (mci)
600 edac_mc_free(mci);
608 struct mem_ctl_info *mci;
611 mci = platform_get_drvdata(pdev);
612 pvt = mci->pvt_info;
623 devm_free_irq(&pdev->dev, pvt->irqs[idx], mci);
628 edac_mc_free(mci);