Lines Matching refs:igu_sb_id

1491 				   u16 igu_sb_id, u32 pi_index,
1501 sb_offset = igu_sb_id * PIS_PER_SB_E4;
1528 _ecore_int_cau_conf_pi(p_hwfn, p_ptt, p_sb->igu_sb_id,
1534 dma_addr_t sb_phys, u16 igu_sb_id,
1548 igu_sb_id * sizeof(u64), 2,
1552 igu_sb_id * sizeof(u64), 2,
1557 CAU_REG_SB_ADDR_MEMORY_RT_OFFSET+igu_sb_id*2,
1561 CAU_REG_SB_VAR_MEMORY_RT_OFFSET+igu_sb_id*2,
1583 _ecore_int_cau_conf_pi(p_hwfn, p_ptt, igu_sb_id, RX_PI,
1596 igu_sb_id, TX_PI(i),
1613 sb_info->igu_sb_id, 0, 0);
1661 u16 igu_sb_id;
1665 igu_sb_id = p_hwfn->hw_info.p_igu_info->igu_dsb_id;
1667 igu_sb_id = ecore_get_pf_igu_sb_id(p_hwfn, sb_id + 1);
1669 igu_sb_id = ecore_vf_get_igu_sb_id(p_hwfn, sb_id);
1671 if (igu_sb_id == ECORE_SB_INVALID_IDX)
1677 "Slowpath SB index in IGU is 0x%04x\n", igu_sb_id);
1680 "SB [%04x] <--> IGU SB [%04x]\n", sb_id, igu_sb_id);
1682 return igu_sb_id;
1695 sb_info->igu_sb_id = ecore_get_igu_sb_id(p_hwfn, sb_id);
1697 if (sb_info->igu_sb_id == ECORE_SB_INVALID_IDX)
1707 p_block = &p_info->entry[sb_info->igu_sb_id];
1728 (sb_info->igu_sb_id << 3);
1734 ((IGU_CMD_INT_ACK_BASE + sb_info->igu_sb_id) << 3);
1764 p_block = &p_info->entry[sb_info->igu_sb_id];
1875 return p_hwfn->p_sp_sb->sb_info.igu_sb_id;
1975 u16 igu_sb_id,
1980 u32 pxp_addr = IGU_CMD_INT_ACK_BASE + igu_sb_id;
2011 sb_bit = 1 << (igu_sb_id % 32);
2012 sb_bit_addr = igu_sb_id / 32 * sizeof(u32);
2027 val, igu_sb_id);
2032 u16 igu_sb_id, u16 opaque, bool b_set)
2037 p_block = &p_hwfn->hw_info.p_igu_info->entry[igu_sb_id];
2040 igu_sb_id, p_block->function_id, p_block->is_pf,
2045 ecore_int_igu_cleanup_sb(p_hwfn, p_ptt, igu_sb_id, 1, opaque);
2048 ecore_int_igu_cleanup_sb(p_hwfn, p_ptt, igu_sb_id, 0, opaque);
2056 ((igu_sb_id / 32) * 4));
2057 if (val & (1 << (igu_sb_id % 32)))
2065 igu_sb_id);
2070 CAU_REG_PI_MEMORY + (igu_sb_id * 12 + pi) * 4, 0);
2080 u16 igu_sb_id = 0;
2090 for (igu_sb_id = 0;
2091 igu_sb_id < ECORE_MAPPING_MEMORY_SIZE(p_hwfn->p_dev);
2092 igu_sb_id++) {
2093 p_block = &p_info->entry[igu_sb_id];
2100 ecore_int_igu_init_pure_rt_single(p_hwfn, p_ptt, igu_sb_id,
2118 u16 igu_sb_id;
2183 for (igu_sb_id = p_info->igu_dsb_id;
2184 igu_sb_id < ECORE_MAPPING_MEMORY_SIZE(p_hwfn->p_dev);
2185 igu_sb_id++) {
2186 p_block = &p_info->entry[igu_sb_id];
2233 sizeof(u32) * igu_sb_id);
2238 sizeof(u32) * igu_sb_id,
2243 igu_sb_id, p_block->function_id,
2274 u16 igu_sb_id)
2277 IGU_REG_MAPPING_MEMORY + sizeof(u32) * igu_sb_id);
2280 p_block = &p_hwfn->hw_info.p_igu_info->entry[igu_sb_id];
2288 p_block->igu_sb_id = igu_sb_id;
2297 u16 igu_sb_id;
2317 for (igu_sb_id = 0;
2318 igu_sb_id < ECORE_MAPPING_MEMORY_SIZE(p_hwfn->p_dev);
2319 igu_sb_id++) {
2321 ecore_int_igu_read_cam_block(p_hwfn, p_ptt, igu_sb_id);
2322 p_block = &p_igu_info->entry[igu_sb_id];
2348 p_igu_info->igu_dsb_id = igu_sb_id;
2360 igu_sb_id, p_block->function_id,
2389 u16 igu_sb_id = 0, vf_num = 0;
2407 igu_sb_id = ecore_get_pf_igu_sb_id(p_hwfn, sb_id + 1);
2408 if (igu_sb_id == ECORE_SB_INVALID_IDX)
2425 for (; igu_sb_id < ECORE_MAPPING_MEMORY_SIZE(p_hwfn->p_dev);
2426 igu_sb_id++) {
2427 p_block = &p_info->entry[igu_sb_id];
2441 if (igu_sb_id == ECORE_MAPPING_MEMORY_SIZE(p_hwfn->p_dev)) {
2501 IGU_REG_MAPPING_MEMORY + sizeof(u32) * igu_sb_id,
2505 igu_sb_id, vf_num,
2510 igu_sb_id, p_block->function_id,
2678 u16 sbid = p_sb->igu_sb_id;