Lines Matching refs:adap

112  *	@adap: the adapter
122 static void t3_read_indirect(adapter_t *adap, unsigned int addr_reg,
127 t3_write_reg(adap, addr_reg, start_idx);
128 *vals++ = t3_read_reg(adap, data_reg);
150 adapter_t *adap = mc7->adapter;
164 t3_write_reg(adap, mc7->offset + A_MC7_BD_ADDR,
166 t3_write_reg(adap, mc7->offset + A_MC7_BD_OP, 0);
167 val = t3_read_reg(adap, mc7->offset + A_MC7_BD_OP);
169 val = t3_read_reg(adap,
174 val = t3_read_reg(adap, mc7->offset + A_MC7_BD_DATA1);
176 val64 = t3_read_reg(adap,
250 static void mi1_init(adapter_t *adap, const struct adapter_info *ai)
252 u32 clkdiv = adap->params.vpd.cclk / (2 * adap->params.vpd.mdc) - 1;
255 t3_write_reg(adap, A_MI1_CFG, val);
1412 * @adap: the adapter
1419 int t3_cim_ctl_blk_read(adapter_t *adap, unsigned int addr, unsigned int n,
1424 if (t3_read_reg(adap, A_CIM_HOST_ACC_CTRL) & F_HOSTBUSY)
1428 t3_write_reg(adap, A_CIM_HOST_ACC_CTRL, CIM_CTL_BASE + addr);
1429 ret = t3_wait_op_done(adap, A_CIM_HOST_ACC_CTRL, F_HOSTBUSY,
1432 *valp++ = t3_read_reg(adap, A_CIM_HOST_ACC_DATA);
2146 static int mac_intr_handler(adapter_t *adap, unsigned int idx)
2152 idx = idx == 0 ? 0 : adapter_info(adap)->nports0; /* MAC idx -> port */
2153 pi = adap2pinfo(adap, idx);
2162 cause = (t3_read_reg(adap, A_XGM_INT_CAUSE + mac->offset)
2167 CH_ALERT(adap, "port%d: MAC TX FIFO parity error\n", idx);
2171 CH_ALERT(adap, "port%d: MAC RX FIFO parity error\n", idx);
2184 t3_read_reg(adap, A_XGM_INT_ENABLE + mac->offset)) {
2185 t3_set_reg_field(adap, A_XGM_INT_ENABLE + mac->offset,
2194 t3_fatal_err(adap);
2196 t3_write_reg(adap, A_XGM_INT_CAUSE + mac->offset, cause);
2293 static unsigned int calc_gpio_intr(adapter_t *adap)
2297 for_each_port(adap, i)
2298 if ((adap2pinfo(adap, i)->phy.caps & SUPPORTED_IRQ) &&
2299 adapter_info(adap)->gpio_intr[i])
2300 gpi_intr |= 1 << adapter_info(adap)->gpio_intr[i];
2523 static int clear_sge_ctxt(adapter_t *adap, unsigned int id, unsigned int type)
2525 t3_write_reg(adap, A_SG_CONTEXT_DATA0, 0);
2526 t3_write_reg(adap, A_SG_CONTEXT_DATA1, 0);
2527 t3_write_reg(adap, A_SG_CONTEXT_DATA2, 0);
2528 t3_write_reg(adap, A_SG_CONTEXT_DATA3, 0);
2529 t3_write_reg(adap, A_SG_CONTEXT_MASK0, 0xffffffff);
2530 t3_write_reg(adap, A_SG_CONTEXT_MASK1, 0xffffffff);
2531 t3_write_reg(adap, A_SG_CONTEXT_MASK2, 0xffffffff);
2532 t3_write_reg(adap, A_SG_CONTEXT_MASK3, 0xffffffff);
2533 t3_write_reg(adap, A_SG_CONTEXT_CMD,
2535 return t3_wait_op_done(adap, A_SG_CONTEXT_CMD, F_CONTEXT_CMD_BUSY,
3014 * @adap: the adapter
3019 void t3_tp_set_offload_mode(adapter_t *adap, int enable)
3021 if (is_offload(adap) || !enable)
3022 t3_set_reg_field(adap, A_TP_IN_CONFIG, F_NICMODE,
3028 * @adap: the adapter
3035 static void tp_wr_bits_indirect(adapter_t *adap, unsigned int addr,
3038 t3_write_reg(adap, A_TP_PIO_ADDR, addr);
3039 val |= t3_read_reg(adap, A_TP_PIO_DATA) & ~mask;
3040 t3_write_reg(adap, A_TP_PIO_DATA, val);
3045 * @adap: the adapter
3049 void t3_enable_filters(adapter_t *adap)
3051 t3_set_reg_field(adap, A_TP_IN_CONFIG, F_NICMODE, 0);
3052 t3_set_reg_field(adap, A_MC5_DB_CONFIG, 0, F_FILTEREN);
3053 t3_set_reg_field(adap, A_TP_GLOBAL_CONFIG, 0, V_FIVETUPLELOOKUP(3));
3054 tp_wr_bits_indirect(adap, A_TP_INGRESS_CONFIG, 0, F_LOOKUPEVERYPKT);
3059 * @adap: the adapter
3063 void t3_disable_filters(adapter_t *adap)
3066 t3_set_reg_field(adap, A_MC5_DB_CONFIG, F_FILTEREN, 0);
3067 t3_set_reg_field(adap, A_TP_GLOBAL_CONFIG,
3069 tp_wr_bits_indirect(adap, A_TP_INGRESS_CONFIG, F_LOOKUPEVERYPKT, 0);
3089 #define mem_region(adap, start, size, reg) \
3090 t3_write_reg((adap), A_ ## reg, (start)); \
3095 * @adap: the adapter
3101 static void partition_mem(adapter_t *adap, const struct tp_params *p)
3103 unsigned int m, pstructs, tids = t3_mc5_size(&adap->mc5);
3106 if (adap->params.rev > 0) {
3119 t3_write_reg(adap, A_TP_PMM_SIZE,
3122 t3_write_reg(adap, A_TP_PMM_TX_BASE, 0);
3123 t3_write_reg(adap, A_TP_PMM_TX_PAGE_SIZE, p->tx_pg_size);
3124 t3_write_reg(adap, A_TP_PMM_TX_MAX_PAGE, p->tx_num_pgs);
3125 t3_set_reg_field(adap, A_TP_PARA_REG3, V_TXDATAACKIDX(M_TXDATAACKIDX),
3128 t3_write_reg(adap, A_TP_PMM_RX_BASE, 0);
3129 t3_write_reg(adap, A_TP_PMM_RX_PAGE_SIZE, p->rx_pg_size);
3130 t3_write_reg(adap, A_TP_PMM_RX_MAX_PAGE, p->rx_num_pgs);
3136 t3_write_reg(adap, A_TP_CMM_MM_MAX_PSTRUCT, pstructs);
3139 mem_region(adap, m, (64 << 10) * 64, SG_EGR_CNTX_BADDR);
3140 mem_region(adap, m, (64 << 10) * 64, SG_CQ_CONTEXT_BADDR);
3141 t3_write_reg(adap, A_TP_CMM_TIMER_BASE, V_CMTIMERMAXNUM(timers) | m);
3143 mem_region(adap, m, pstructs * 64, TP_CMM_MM_BASE);
3144 mem_region(adap, m, 64 * (pstructs / 24), TP_CMM_MM_PS_FLST_BASE);
3145 mem_region(adap, m, 64 * (p->rx_num_pgs / 24), TP_CMM_MM_RX_FLST_BASE);
3146 mem_region(adap, m, 64 * (p->tx_num_pgs / 24), TP_CMM_MM_TX_FLST_BASE);
3149 t3_write_reg(adap, A_CIM_SDRAM_BASE_ADDR, m);
3150 t3_write_reg(adap, A_CIM_SDRAM_ADDR_SIZE, p->cm_size - m);
3153 m = t3_mc5_size(&adap->mc5) - adap->params.mc5.nservers -
3154 adap->params.mc5.nfilters - adap->params.mc5.nroutes;
3156 adap->params.mc5.nservers += m - tids;
3159 static inline void tp_wr_indirect(adapter_t *adap, unsigned int addr, u32 val)
3161 t3_write_reg(adap, A_TP_PIO_ADDR, addr);
3162 t3_write_reg(adap, A_TP_PIO_DATA, val);
3165 static inline u32 tp_rd_indirect(adapter_t *adap, unsigned int addr)
3167 t3_write_reg(adap, A_TP_PIO_ADDR, addr);
3168 return t3_read_reg(adap, A_TP_PIO_DATA);
3171 static void tp_config(adapter_t *adap, const struct tp_params *p)
3173 t3_write_reg(adap, A_TP_GLOBAL_CONFIG, F_TXPACINGENABLE | F_PATHMTU |
3176 t3_write_reg(adap, A_TP_TCP_OPTIONS, V_MTUDEFAULT(576) |
3179 t3_write_reg(adap, A_TP_DACK_CONFIG, V_AUTOSTATE3(1) |
3183 t3_set_reg_field(adap, A_TP_IN_CONFIG, F_RXFBARBPRIO | F_TXFBARBPRIO,
3185 t3_write_reg(adap, A_TP_TX_RESOURCE_LIMIT, 0x18141814);
3186 t3_write_reg(adap, A_TP_PARA_REG4, 0x5050105);
3187 t3_set_reg_field(adap, A_TP_PARA_REG6, 0,
3188 adap->params.rev > 0 ? F_ENABLEESND :
3190 t3_set_reg_field(adap, A_TP_PC_CONFIG,
3194 t3_set_reg_field(adap, A_TP_PC_CONFIG2, F_CHDRAFULL,
3197 t3_write_reg(adap, A_TP_PROXY_FLOW_CNTL, 1080);
3198 t3_write_reg(adap, A_TP_PROXY_FLOW_CNTL, 1000);
3200 if (adap->params.rev > 0) {
3201 tp_wr_indirect(adap, A_TP_EGRESS_CONFIG, F_REWRITEFORCETOSIZE);
3202 t3_set_reg_field(adap, A_TP_PARA_REG3, 0,
3204 t3_set_reg_field(adap, A_TP_PC_CONFIG, F_LOCKTID, F_LOCKTID);
3205 tp_wr_indirect(adap, A_TP_VLAN_PRI_MAP, 0xfa50);
3206 tp_wr_indirect(adap, A_TP_MAC_MATCH_MAP0, 0xfac688);
3207 tp_wr_indirect(adap, A_TP_MAC_MATCH_MAP1, 0xfac688);
3209 t3_set_reg_field(adap, A_TP_PARA_REG3, 0, F_TXPACEFIXED);
3211 if (adap->params.rev == T3_REV_C)
3212 t3_set_reg_field(adap, A_TP_PC_CONFIG,
3216 t3_write_reg(adap, A_TP_TX_MOD_QUEUE_WEIGHT1, 0);
3217 t3_write_reg(adap, A_TP_TX_MOD_QUEUE_WEIGHT0, 0);
3218 t3_write_reg(adap, A_TP_MOD_CHANNEL_WEIGHT, 0);
3219 t3_write_reg(adap, A_TP_MOD_RATE_LIMIT, 0xf2200000);
3221 if (adap->params.nports > 2) {
3222 t3_set_reg_field(adap, A_TP_PC_CONFIG2, 0,
3225 tp_wr_bits_indirect(adap, A_TP_QOS_RX_MAP_MODE,
3227 tp_wr_indirect(adap, A_TP_INGRESS_CONFIG, V_BITPOS0(48) |
3231 tp_wr_indirect(adap, A_TP_PREAMBLE_MSB, 0xfb000000);
3232 tp_wr_indirect(adap, A_TP_PREAMBLE_LSB, 0xd5);
3233 tp_wr_indirect(adap, A_TP_INTF_FROM_TX_PKT, F_INTFFROMTXPKT);
3243 * @adap: the adapter to set
3249 static void tp_set_timers(adapter_t *adap, unsigned int core_clk)
3251 unsigned int tre = adap->params.tp.tre;
3252 unsigned int dack_re = adap->params.tp.dack_re;
3256 t3_write_reg(adap, A_TP_TIMER_RESOLUTION, V_TIMERRESOLUTION(tre) |
3259 t3_write_reg(adap, A_TP_DACK_TIMER,
3261 t3_write_reg(adap, A_TP_TCP_BACKOFF_REG0, 0x3020100);
3262 t3_write_reg(adap, A_TP_TCP_BACKOFF_REG1, 0x7060504);
3263 t3_write_reg(adap, A_TP_TCP_BACKOFF_REG2, 0xb0a0908);
3264 t3_write_reg(adap, A_TP_TCP_BACKOFF_REG3, 0xf0e0d0c);
3265 t3_write_reg(adap, A_TP_SHIFT_CNT, V_SYNSHIFTMAX(6) |
3272 t3_write_reg(adap, A_TP_MSL,
3273 adap->params.rev > 0 ? 0 : 2 SECONDS);
3274 t3_write_reg(adap, A_TP_RXT_MIN, tps / (1000 / TP_RTO_MIN));
3275 t3_write_reg(adap, A_TP_RXT_MAX, 64 SECONDS);
3276 t3_write_reg(adap, A_TP_PERS_MIN, 5 SECONDS);
3277 t3_write_reg(adap, A_TP_PERS_MAX, 64 SECONDS);
3278 t3_write_reg(adap, A_TP_KEEP_IDLE, 7200 SECONDS);
3279 t3_write_reg(adap, A_TP_KEEP_INTVL, 75 SECONDS);
3280 t3_write_reg(adap, A_TP_INIT_SRTT, 3 SECONDS);
3281 t3_write_reg(adap, A_TP_FINWAIT2_TIMER, 600 SECONDS);
3288 * @adap: the adapter
3294 int t3_tp_set_coalescing_size(adapter_t *adap, unsigned int size, int psh)
3301 val = t3_read_reg(adap, A_TP_PARA_REG3);
3309 t3_write_reg(adap, A_TP_PARA_REG2, V_RXCOALESCESIZE(size) |
3312 t3_write_reg(adap, A_TP_PARA_REG3, val);
3318 * @adap: the adapter
3324 void t3_tp_set_max_rxsize(adapter_t *adap, unsigned int size)
3326 t3_write_reg(adap, A_TP_PARA_REG7,
3404 * @adap: the adapter
3414 void t3_load_mtus(adapter_t *adap, unsigned short mtus[NMTUS],
3431 t3_write_reg(adap, A_TP_MTU_TABLE,
3440 t3_write_reg(adap, A_TP_CCTRL_TABLE, (i << 21) |
3448 * @adap: the adapter
3453 void t3_read_hw_mtus(adapter_t *adap, unsigned short mtus[NMTUS])
3460 t3_write_reg(adap, A_TP_MTU_TABLE, 0xff000000 | i);
3461 val = t3_read_reg(adap, A_TP_MTU_TABLE);
3468 * @adap: the adapter
3474 void t3_get_cong_cntl_tab(adapter_t *adap,
3481 t3_write_reg(adap, A_TP_CCTRL_TABLE,
3483 incr[mtu][w] = (unsigned short)t3_read_reg(adap,
3490 * @adap: the adapter
3495 void t3_tp_get_mib_stats(adapter_t *adap, struct tp_mib_stats *tps)
3497 t3_read_indirect(adap, A_TP_MIB_INDEX, A_TP_MIB_RDATA, (u32 *)tps,
3503 * @adap: the adapter
3508 void t3_read_pace_tbl(adapter_t *adap, unsigned int pace_vals[NTX_SCHED])
3510 unsigned int i, tick_ns = dack_ticks_to_usec(adap, 1000);
3513 t3_write_reg(adap, A_TP_PACE_TABLE, 0xffff0000 + i);
3514 pace_vals[i] = t3_read_reg(adap, A_TP_PACE_TABLE) * tick_ns;
3520 * @adap: the adapter
3527 void t3_set_pace_tbl(adapter_t *adap, unsigned int *pace_vals,
3530 unsigned int tick_ns = dack_ticks_to_usec(adap, 1000);
3533 t3_write_reg(adap, A_TP_PACE_TABLE, (start << 16) |
3537 #define ulp_region(adap, name, start, len) \
3538 t3_write_reg((adap), A_ULPRX_ ## name ## _LLIMIT, (start)); \
3539 t3_write_reg((adap), A_ULPRX_ ## name ## _ULIMIT, \
3543 #define ulptx_region(adap, name, start, len) \
3544 t3_write_reg((adap), A_ULPTX_ ## name ## _LLIMIT, (start)); \
3545 t3_write_reg((adap), A_ULPTX_ ## name ## _ULIMIT, \
3548 static void ulp_config(adapter_t *adap, const struct tp_params *p)
3552 ulp_region(adap, ISCSI, m, p->chan_rx_size / 8);
3553 ulp_region(adap, TDDP, m, p->chan_rx_size / 8);
3554 ulptx_region(adap, TPT, m, p->chan_rx_size / 4);
3555 ulp_region(adap, STAG, m, p->chan_rx_size / 4);
3556 ulp_region(adap, RQ, m, p->chan_rx_size / 4);
3557 ulptx_region(adap, PBL, m, p->chan_rx_size / 4);
3558 ulp_region(adap, PBL, m, p->chan_rx_size / 4);
3559 t3_write_reg(adap, A_ULPRX_TDDP_TAGMASK, 0xffffffff);
3570 int t3_set_proto_sram(adapter_t *adap, const u8 *data)
3576 t3_write_reg(adap, A_TP_EMBED_OP_FIELD5, cpu_to_be32(*buf++));
3577 t3_write_reg(adap, A_TP_EMBED_OP_FIELD4, cpu_to_be32(*buf++));
3578 t3_write_reg(adap, A_TP_EMBED_OP_FIELD3, cpu_to_be32(*buf++));
3579 t3_write_reg(adap, A_TP_EMBED_OP_FIELD2, cpu_to_be32(*buf++));
3580 t3_write_reg(adap, A_TP_EMBED_OP_FIELD1, cpu_to_be32(*buf++));
3582 t3_write_reg(adap, A_TP_EMBED_OP_FIELD0, i << 1 | 1 << 31);
3583 if (t3_wait_op_done(adap, A_TP_EMBED_OP_FIELD0, 1, 1, 5, 1))
3678 * @adap: the adapter
3684 int t3_config_sched(adapter_t *adap, unsigned int kbps, int sched)
3687 unsigned int clk = adap->params.vpd.cclk * 1000;
3709 t3_write_reg(adap, A_TP_TM_PIO_ADDR,
3711 v = t3_read_reg(adap, A_TP_TM_PIO_DATA);
3716 t3_write_reg(adap, A_TP_TM_PIO_DATA, v);
3722 * @adap: the adapter
3728 int t3_set_sched_ipg(adapter_t *adap, int sched, unsigned int ipg)
3733 ipg *= core_ticks_per_usec(adap);
3738 t3_write_reg(adap, A_TP_TM_PIO_ADDR, addr);
3739 v = t3_read_reg(adap, A_TP_TM_PIO_DATA);
3744 t3_write_reg(adap, A_TP_TM_PIO_DATA, v);
3745 t3_read_reg(adap, A_TP_TM_PIO_DATA);
3751 * @adap: the adapter
3758 void t3_get_tx_sched(adapter_t *adap, unsigned int sched, unsigned int *kbps,
3765 t3_write_reg(adap, A_TP_TM_PIO_ADDR, addr);
3766 v = t3_read_reg(adap, A_TP_TM_PIO_DATA);
3774 v = (adap->params.vpd.cclk * 1000) / cpt;
3780 t3_write_reg(adap, A_TP_TM_PIO_ADDR, addr);
3781 v = t3_read_reg(adap, A_TP_TM_PIO_DATA);
3785 *ipg = (10000 * v) / core_ticks_per_usec(adap);
3791 * @adap: the adapter
3796 static int tp_init(adapter_t *adap, const struct tp_params *p)
3800 tp_config(adap, p);
3801 t3_set_vlan_accel(adap, 3, 0);
3803 if (is_offload(adap)) {
3804 tp_set_timers(adap, adap->params.vpd.cclk * 1000);
3805 t3_write_reg(adap, A_TP_RESET, F_FLSTINITENABLE);
3806 busy = t3_wait_op_done(adap, A_TP_RESET, F_FLSTINITENABLE,
3809 CH_ERR(adap, "TP initialization timed out\n");
3813 t3_write_reg(adap, A_TP_RESET, F_TPRESET);
3819 * @adap: the adapter
3824 int t3_mps_set_active_ports(adapter_t *adap, unsigned int port_mask)
3826 if (port_mask & ~((1 << adap->params.nports) - 1))
3828 t3_set_reg_field(adap, A_MPS_CFG, F_PORT1ACTIVE | F_PORT0ACTIVE,
3835 * @adap: the adapter
3841 static void chan_init_hw(adapter_t *adap, unsigned int chan_map)
3846 t3_set_reg_field(adap, A_ULPRX_CTL, F_ROUND_ROBIN, 0);
3847 t3_set_reg_field(adap, A_ULPTX_CONFIG, F_CFG_RR_ARB, 0);
3848 t3_write_reg(adap, A_MPS_CFG, F_TPRXPORTEN | F_ENFORCEPKT |
3851 t3_write_reg(adap, A_PM1_TX_CFG,
3854 t3_write_reg(adap, A_TP_TX_MOD_QUEUE_REQ_MAP,
3856 t3_write_reg(adap, A_TP_TX_MOD_QUE_TABLE, (12 << 16) | 0xd9c8);
3857 t3_write_reg(adap, A_TP_TX_MOD_QUE_TABLE, (13 << 16) | 0xfbea);
3859 t3_set_reg_field(adap, A_ULPRX_CTL, 0, F_ROUND_ROBIN);
3860 t3_set_reg_field(adap, A_ULPTX_CONFIG, 0, F_CFG_RR_ARB);
3861 t3_write_reg(adap, A_ULPTX_DMA_WEIGHT,
3863 t3_write_reg(adap, A_MPS_CFG, F_TPTXPORT0EN | F_TPTXPORT1EN |
3866 t3_write_reg(adap, A_PM1_TX_CFG, 0x80008000);
3867 t3_set_reg_field(adap, A_TP_PC_CONFIG, 0, F_TXTOSQUEUEMAPMODE);
3868 t3_write_reg(adap, A_TP_TX_MOD_QUEUE_REQ_MAP,
3871 t3_write_reg(adap, A_TP_TX_MOD_QUE_TABLE,
3873 t3_write_reg(adap, A_TP_TX_MOD_QUE_TABLE, (12 << 16) | 0xba98);
3874 t3_write_reg(adap, A_TP_TX_MOD_QUE_TABLE, (13 << 16) | 0xfedc);
4060 static void config_pcie(adapter_t *adap)
4079 t3_os_pci_read_config_2(adap,
4080 adap->params.pci.pcie_cap_addr + PCI_EXP_DEVCTL,
4088 t3_os_pci_read_config_2(adap, 0x2, &devid);
4090 t3_os_pci_write_config_2(adap,
4091 adap->params.pci.pcie_cap_addr + PCI_EXP_DEVCTL,
4096 t3_os_pci_read_config_2(adap,
4097 adap->params.pci.pcie_cap_addr + PCI_EXP_LNKCTL,
4100 fst_trn_tx = G_NUMFSTTRNSEQ(t3_read_reg(adap, A_PCIE_PEX_CTRL0));
4101 fst_trn_rx = adap->params.rev == 0 ? fst_trn_tx :
4102 G_NUMFSTTRNSEQRX(t3_read_reg(adap, A_PCIE_MODE));
4103 log2_width = fls(adap->params.pci.width) - 1;
4109 if (adap->params.rev == 0)
4110 t3_set_reg_field(adap, A_PCIE_PEX_CTRL1,
4114 t3_set_reg_field(adap, A_PCIE_PEX_CTRL1, V_ACKLAT(M_ACKLAT),
4117 t3_set_reg_field(adap, A_PCIE_PEX_CTRL0, V_REPLAYLMT(M_REPLAYLMT),
4120 t3_write_reg(adap, A_PCIE_PEX_ERR, 0xffffffff);
4121 t3_set_reg_field(adap, A_PCIE_CFG, 0,
4414 static int init_parity(adapter_t *adap)
4418 if (t3_read_reg(adap, A_SG_CONTEXT_CMD) & F_CONTEXT_CMD_BUSY)
4422 err = clear_sge_ctxt(adap, i, F_EGRESS);
4424 err = clear_sge_ctxt(adap, i, F_EGRESS);
4426 err = clear_sge_ctxt(adap, i, F_RESPONSEQ);
4430 t3_write_reg(adap, A_CIM_IBQ_DBG_DATA, 0);
4433 t3_write_reg(adap, A_CIM_IBQ_DBG_CFG, F_IBQDBGEN |
4436 err = t3_wait_op_done(adap, A_CIM_IBQ_DBG_CFG,
4600 int t3_reinit_adapter(adapter_t *adap)
4605 early_hw_init(adap, adap->params.info);
4606 ret = init_parity(adap);
4610 if (adap->params.nports > 2 &&
4611 (ret = t3_vsc7323_init(adap, adap->params.nports)))
4614 for_each_port(adap, i) {
4616 struct port_info *p = adap2pinfo(adap, i);
4619 unsigned port_type = adap->params.vpd.port_type[j];
4628 if (j >= ARRAY_SIZE(adap->params.vpd.port_type))