Lines Matching defs:ufs

21 #include <ufs/ufshcd.h>
23 #include <ufs/ufshci.h>
24 #include <ufs/unipro.h>
26 #include "ufs-exynos.h"
155 static void exynos_ufs_auto_ctrl_hcc(struct exynos_ufs *ufs, bool en);
156 static void exynos_ufs_ctrl_clkstop(struct exynos_ufs *ufs, bool en);
158 static inline void exynos_ufs_enable_auto_ctrl_hcc(struct exynos_ufs *ufs)
160 exynos_ufs_auto_ctrl_hcc(ufs, true);
163 static inline void exynos_ufs_disable_auto_ctrl_hcc(struct exynos_ufs *ufs)
165 exynos_ufs_auto_ctrl_hcc(ufs, false);
169 struct exynos_ufs *ufs, u32 *val)
171 *val = hci_readl(ufs, HCI_MISC);
172 exynos_ufs_auto_ctrl_hcc(ufs, false);
176 struct exynos_ufs *ufs, u32 *val)
178 hci_writel(ufs, *val, HCI_MISC);
181 static inline void exynos_ufs_gate_clks(struct exynos_ufs *ufs)
183 exynos_ufs_ctrl_clkstop(ufs, true);
186 static inline void exynos_ufs_ungate_clks(struct exynos_ufs *ufs)
188 exynos_ufs_ctrl_clkstop(ufs, false);
191 static int exynos7_ufs_drv_init(struct device *dev, struct exynos_ufs *ufs)
196 static int exynosauto_ufs_drv_init(struct device *dev, struct exynos_ufs *ufs)
198 struct exynos_ufs_uic_attr *attr = ufs->drv_data->uic_attr;
201 if (ufs->sysreg) {
202 return regmap_update_bits(ufs->sysreg,
203 ufs->shareability_reg_offset,
212 static int exynosauto_ufs_post_hce_enable(struct exynos_ufs *ufs)
214 struct ufs_hba *hba = ufs->hba;
219 hci_writel(ufs, ALLOW_TRANS_VH_DEFAULT, HCI_MH_ALLOWABLE_TRAN_OF_VH);
221 hci_writel(ufs, 0x1, HCI_MH_IID_IN_TASK_TAG);
226 static int exynosauto_ufs_pre_link(struct exynos_ufs *ufs)
228 struct ufs_hba *hba = ufs->hba;
232 rx_line_reset_period = (RX_LINE_RESET_TIME * ufs->mclk_rate) / NSEC_PER_MSEC;
233 tx_line_reset_period = (TX_LINE_RESET_TIME * ufs->mclk_rate) / NSEC_PER_MSEC;
236 for_each_ufs_rx_lane(ufs, i) {
238 DIV_ROUND_UP(NSEC_PER_SEC, ufs->mclk_rate));
253 for_each_ufs_tx_lane(ufs, i) {
255 DIV_ROUND_UP(NSEC_PER_SEC, ufs->mclk_rate));
280 static int exynosauto_ufs_pre_pwr_change(struct exynos_ufs *ufs,
283 struct ufs_hba *hba = ufs->hba;
293 static int exynosauto_ufs_post_pwr_change(struct exynos_ufs *ufs,
296 struct ufs_hba *hba = ufs->hba;
307 static int exynos7_ufs_pre_link(struct exynos_ufs *ufs)
309 struct ufs_hba *hba = ufs->hba;
310 u32 val = ufs->drv_data->uic_attr->pa_dbg_option_suite;
314 for_each_ufs_tx_lane(ufs, i)
316 for_each_ufs_rx_lane(ufs, i) {
322 for_each_ufs_tx_lane(ufs, i)
337 static int exynos7_ufs_post_link(struct exynos_ufs *ufs)
339 struct ufs_hba *hba = ufs->hba;
343 for_each_ufs_tx_lane(ufs, i) {
347 TX_LINERESET_N(exynos_ufs_calc_time_cntr(ufs, 200000)));
358 static int exynos7_ufs_pre_pwr_change(struct exynos_ufs *ufs,
361 unipro_writel(ufs, 0x22, UNIPRO_DBG_FORCE_DME_CTRL_STATE);
366 static int exynos7_ufs_post_pwr_change(struct exynos_ufs *ufs,
369 struct ufs_hba *hba = ufs->hba;
389 static void exynos_ufs_auto_ctrl_hcc(struct exynos_ufs *ufs, bool en)
391 u32 misc = hci_readl(ufs, HCI_MISC);
394 hci_writel(ufs, misc | HCI_CORECLK_CTRL_EN, HCI_MISC);
396 hci_writel(ufs, misc & ~HCI_CORECLK_CTRL_EN, HCI_MISC);
399 static void exynos_ufs_ctrl_clkstop(struct exynos_ufs *ufs, bool en)
401 u32 ctrl = hci_readl(ufs, HCI_CLKSTOP_CTRL);
402 u32 misc = hci_readl(ufs, HCI_MISC);
405 hci_writel(ufs, misc | CLK_CTRL_EN_MASK, HCI_MISC);
406 hci_writel(ufs, ctrl | CLK_STOP_MASK, HCI_CLKSTOP_CTRL);
408 hci_writel(ufs, ctrl & ~CLK_STOP_MASK, HCI_CLKSTOP_CTRL);
409 hci_writel(ufs, misc & ~CLK_CTRL_EN_MASK, HCI_MISC);
413 static int exynos_ufs_get_clk_info(struct exynos_ufs *ufs)
415 struct ufs_hba *hba = ufs->hba;
429 ufs->clk_hci_core = clki->clk;
431 ufs->clk_unipro_main = clki->clk;
435 if (!ufs->clk_hci_core || !ufs->clk_unipro_main) {
441 ufs->mclk_rate = clk_get_rate(ufs->clk_unipro_main);
442 pclk_rate = clk_get_rate(ufs->clk_hci_core);
443 f_min = ufs->pclk_avail_min;
444 f_max = ufs->pclk_avail_max;
446 if (ufs->opts & EXYNOS_UFS_OPT_HAS_APB_CLK_CTRL) {
462 ufs->pclk_rate = pclk_rate;
463 ufs->pclk_div = div;
469 static void exynos_ufs_set_unipro_pclk_div(struct exynos_ufs *ufs)
471 if (ufs->opts & EXYNOS_UFS_OPT_HAS_APB_CLK_CTRL) {
474 val = hci_readl(ufs, HCI_UNIPRO_APB_CLK_CTRL);
475 hci_writel(ufs, UNIPRO_APB_CLK(val, ufs->pclk_div),
480 static void exynos_ufs_set_pwm_clk_div(struct exynos_ufs *ufs)
482 struct ufs_hba *hba = ufs->hba;
483 struct exynos_ufs_uic_attr *attr = ufs->drv_data->uic_attr;
489 static void exynos_ufs_calc_pwm_clk_div(struct exynos_ufs *ufs)
491 struct ufs_hba *hba = ufs->hba;
492 struct exynos_ufs_uic_attr *attr = ufs->drv_data->uic_attr;
500 clk_period = UNIPRO_PCLK_PERIOD(ufs);
520 long exynos_ufs_calc_time_cntr(struct exynos_ufs *ufs, long period)
523 long pclk_rate = ufs->pclk_rate;
526 clk_period = UNIPRO_PCLK_PERIOD(ufs);
532 static void exynos_ufs_specify_phy_time_attr(struct exynos_ufs *ufs)
534 struct exynos_ufs_uic_attr *attr = ufs->drv_data->uic_attr;
535 struct ufs_phy_time_cfg *t_cfg = &ufs->t_cfg;
538 exynos_ufs_calc_time_cntr(ufs, attr->tx_dif_p_nsec);
540 exynos_ufs_calc_time_cntr(ufs, attr->tx_dif_n_nsec);
542 exynos_ufs_calc_time_cntr(ufs, attr->tx_high_z_cnt_nsec);
544 exynos_ufs_calc_time_cntr(ufs, attr->tx_base_unit_nsec);
546 exynos_ufs_calc_time_cntr(ufs, attr->tx_gran_unit_nsec);
548 exynos_ufs_calc_time_cntr(ufs, attr->tx_sleep_cnt);
551 exynos_ufs_calc_time_cntr(ufs, attr->rx_dif_p_nsec);
553 exynos_ufs_calc_time_cntr(ufs, attr->rx_hibern8_wait_nsec);
555 exynos_ufs_calc_time_cntr(ufs, attr->rx_base_unit_nsec);
557 exynos_ufs_calc_time_cntr(ufs, attr->rx_gran_unit_nsec);
559 exynos_ufs_calc_time_cntr(ufs, attr->rx_sleep_cnt);
561 exynos_ufs_calc_time_cntr(ufs, attr->rx_stall_cnt);
564 static void exynos_ufs_config_phy_time_attr(struct exynos_ufs *ufs)
566 struct ufs_hba *hba = ufs->hba;
567 struct ufs_phy_time_cfg *t_cfg = &ufs->t_cfg;
570 exynos_ufs_set_pwm_clk_div(ufs);
574 for_each_ufs_rx_lane(ufs, i) {
576 ufs->drv_data->uic_attr->rx_filler_enable);
593 for_each_ufs_tx_lane(ufs, i) {
612 ufs->drv_data->uic_attr->tx_min_activatetime);
618 static void exynos_ufs_config_phy_cap_attr(struct exynos_ufs *ufs)
620 struct ufs_hba *hba = ufs->hba;
621 struct exynos_ufs_uic_attr *attr = ufs->drv_data->uic_attr;
626 for_each_ufs_rx_lane(ufs, i) {
648 for_each_ufs_rx_lane(ufs, i) {
664 for_each_ufs_rx_lane(ufs, i) {
688 static void exynos_ufs_establish_connt(struct exynos_ufs *ufs)
690 struct ufs_hba *hba = ufs->hba;
711 static void exynos_ufs_config_smu(struct exynos_ufs *ufs)
715 exynos_ufs_disable_auto_ctrl_hcc_save(ufs, &val);
718 reg = ufsp_readl(ufs, UFSPRSECURITY);
719 ufsp_writel(ufs, reg | NSSMU, UFSPRSECURITY);
720 ufsp_writel(ufs, 0x0, UFSPSBEGIN0);
721 ufsp_writel(ufs, 0xffffffff, UFSPSEND0);
722 ufsp_writel(ufs, 0xff, UFSPSLUN0);
723 ufsp_writel(ufs, 0xf1, UFSPSCTRL0);
725 exynos_ufs_auto_ctrl_hcc_restore(ufs, &val);
728 static void exynos_ufs_config_sync_pattern_mask(struct exynos_ufs *ufs,
731 struct ufs_hba *hba = ufs->hba;
750 mask = exynos_ufs_calc_time_cntr(ufs, sync_len);
755 for_each_ufs_rx_lane(ufs, i)
766 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
767 struct phy *generic_phy = ufs->phy;
785 if (ufs->drv_data->pre_pwr_change)
786 ufs->drv_data->pre_pwr_change(ufs, dev_req_params);
789 exynos_ufs_config_sync_pattern_mask(ufs, dev_req_params);
813 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
814 struct phy *generic_phy = ufs->phy;
826 if (ufs->drv_data->post_pwr_change)
827 ufs->drv_data->post_pwr_change(ufs, pwr_req);
853 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
856 type = hci_readl(ufs, HCI_UTRL_NEXUS_TYPE);
859 hci_writel(ufs, type | (1 << tag), HCI_UTRL_NEXUS_TYPE);
861 hci_writel(ufs, type & ~(1 << tag), HCI_UTRL_NEXUS_TYPE);
867 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
870 type = hci_readl(ufs, HCI_UTMRL_NEXUS_TYPE);
875 hci_writel(ufs, type | (1 << tag), HCI_UTMRL_NEXUS_TYPE);
881 hci_writel(ufs, type & ~(1 << tag), HCI_UTMRL_NEXUS_TYPE);
886 static int exynos_ufs_phy_init(struct exynos_ufs *ufs)
888 struct ufs_hba *hba = ufs->hba;
889 struct phy *generic_phy = ufs->phy;
892 if (ufs->avail_ln_rx == 0 || ufs->avail_ln_tx == 0) {
894 &ufs->avail_ln_rx);
896 &ufs->avail_ln_tx);
897 WARN(ufs->avail_ln_rx != ufs->avail_ln_tx,
899 ufs->avail_ln_rx, ufs->avail_ln_tx);
902 phy_set_bus_width(generic_phy, ufs->avail_ln_rx);
922 static void exynos_ufs_config_unipro(struct exynos_ufs *ufs)
924 struct ufs_hba *hba = ufs->hba;
927 DIV_ROUND_UP(NSEC_PER_SEC, ufs->mclk_rate));
929 ufs->drv_data->uic_attr->tx_trailingclks);
931 ufs->drv_data->uic_attr->pa_dbg_option_suite);
934 static void exynos_ufs_config_intr(struct exynos_ufs *ufs, u32 errs, u8 index)
938 hci_writel(ufs, DFES_ERR_EN | errs, HCI_ERR_EN_PA_LAYER);
941 hci_writel(ufs, DFES_ERR_EN | errs, HCI_ERR_EN_DL_LAYER);
944 hci_writel(ufs, DFES_ERR_EN | errs, HCI_ERR_EN_N_LAYER);
947 hci_writel(ufs, DFES_ERR_EN | errs, HCI_ERR_EN_T_LAYER);
950 hci_writel(ufs, DFES_ERR_EN | errs, HCI_ERR_EN_DME_LAYER);
958 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
960 if (!ufs)
964 if (ufs->opts & EXYNOS_UFS_OPT_BROKEN_AUTO_CLK_CTRL)
965 exynos_ufs_disable_auto_ctrl_hcc(ufs);
966 exynos_ufs_ungate_clks(ufs);
968 exynos_ufs_gate_clks(ufs);
969 if (ufs->opts & EXYNOS_UFS_OPT_BROKEN_AUTO_CLK_CTRL)
970 exynos_ufs_enable_auto_ctrl_hcc(ufs);
978 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
981 exynos_ufs_config_intr(ufs, DFES_DEF_L2_ERRS, UNIPRO_L2);
982 exynos_ufs_config_intr(ufs, DFES_DEF_L3_ERRS, UNIPRO_L3);
983 exynos_ufs_config_intr(ufs, DFES_DEF_L4_ERRS, UNIPRO_L4);
984 exynos_ufs_set_unipro_pclk_div(ufs);
987 exynos_ufs_config_unipro(ufs);
990 exynos_ufs_phy_init(ufs);
991 if (!(ufs->opts & EXYNOS_UFS_OPT_SKIP_CONFIG_PHY_ATTR)) {
992 exynos_ufs_config_phy_time_attr(ufs);
993 exynos_ufs_config_phy_cap_attr(ufs);
998 if (ufs->drv_data->pre_link)
999 ufs->drv_data->pre_link(ufs);
1004 static void exynos_ufs_fit_aggr_timeout(struct exynos_ufs *ufs)
1008 val = exynos_ufs_calc_time_cntr(ufs, IATOVAL_NSEC / CNTR_DIV_VAL);
1009 hci_writel(ufs, val & CNT_VAL_1US_MASK, HCI_1US_TO_CNT_VAL);
1014 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
1015 struct phy *generic_phy = ufs->phy;
1016 struct exynos_ufs_uic_attr *attr = ufs->drv_data->uic_attr;
1018 exynos_ufs_establish_connt(ufs);
1019 exynos_ufs_fit_aggr_timeout(ufs);
1021 hci_writel(ufs, 0xa, HCI_DATA_REORDER);
1022 hci_writel(ufs, PRDT_SET_SIZE(12), HCI_TXPRDT_ENTRY_SIZE);
1023 hci_writel(ufs, PRDT_SET_SIZE(12), HCI_RXPRDT_ENTRY_SIZE);
1024 hci_writel(ufs, (1 << hba->nutrs) - 1, HCI_UTRL_NEXUS_TYPE);
1025 hci_writel(ufs, (1 << hba->nutmrs) - 1, HCI_UTMRL_NEXUS_TYPE);
1026 hci_writel(ufs, 0xf, HCI_AXIDMA_RWDATA_BURST_LEN);
1028 if (ufs->opts & EXYNOS_UFS_OPT_SKIP_CONNECTION_ESTAB)
1042 !(ufs->opts & EXYNOS_UFS_OPT_USE_SW_HIBERN8_TIMER))
1047 if (ufs->opts & EXYNOS_UFS_OPT_USE_SW_HIBERN8_TIMER) {
1071 if (ufs->drv_data->post_link)
1072 ufs->drv_data->post_link(ufs);
1077 static int exynos_ufs_parse_dt(struct device *dev, struct exynos_ufs *ufs)
1083 ufs->drv_data = device_get_match_data(dev);
1085 if (ufs->drv_data && ufs->drv_data->uic_attr) {
1086 attr = ufs->drv_data->uic_attr;
1093 ufs->sysreg = syscon_regmap_lookup_by_phandle(np, "samsung,sysreg");
1094 if (IS_ERR(ufs->sysreg))
1095 ufs->sysreg = NULL;
1098 &ufs->shareability_reg_offset)) {
1100 ufs->shareability_reg_offset = UFS_SHAREABILITY_OFFSET;
1104 ufs->pclk_avail_min = PCLK_AVAIL_MIN;
1105 ufs->pclk_avail_max = PCLK_AVAIL_MAX;
1119 struct exynos_ufs *ufs)
1121 ufs->hba = hba;
1122 ufs->opts = ufs->drv_data->opts;
1123 ufs->rx_sel_idx = PA_MAXDATALANES;
1124 if (ufs->opts & EXYNOS_UFS_OPT_BROKEN_RX_SEL_IDX)
1125 ufs->rx_sel_idx = 0;
1126 hba->priv = (void *)ufs;
1127 hba->quirks = ufs->drv_data->quirks;
1134 struct exynos_ufs *ufs;
1137 ufs = devm_kzalloc(dev, sizeof(*ufs), GFP_KERNEL);
1138 if (!ufs)
1142 ufs->reg_hci = devm_platform_ioremap_resource_byname(pdev, "vs_hci");
1143 if (IS_ERR(ufs->reg_hci)) {
1145 return PTR_ERR(ufs->reg_hci);
1149 ufs->reg_unipro = devm_platform_ioremap_resource_byname(pdev, "unipro");
1150 if (IS_ERR(ufs->reg_unipro)) {
1152 return PTR_ERR(ufs->reg_unipro);
1155 /* ufs protector */
1156 ufs->reg_ufsp = devm_platform_ioremap_resource_byname(pdev, "ufsp");
1157 if (IS_ERR(ufs->reg_ufsp)) {
1158 dev_err(dev, "cannot ioremap for ufs protector register\n");
1159 return PTR_ERR(ufs->reg_ufsp);
1162 ret = exynos_ufs_parse_dt(dev, ufs);
1168 ufs->phy = devm_phy_get(dev, "ufs-phy");
1169 if (IS_ERR(ufs->phy)) {
1170 ret = PTR_ERR(ufs->phy);
1171 dev_err(dev, "failed to get ufs-phy\n");
1175 exynos_ufs_priv_init(hba, ufs);
1177 if (ufs->drv_data->drv_init) {
1178 ret = ufs->drv_data->drv_init(dev, ufs);
1185 ret = exynos_ufs_get_clk_info(ufs);
1188 exynos_ufs_specify_phy_time_attr(ufs);
1189 exynos_ufs_config_smu(ufs);
1199 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
1204 exynos_ufs_disable_auto_ctrl_hcc_save(ufs, &val);
1206 hci_writel(ufs, UFS_SW_RST_MASK, HCI_SW_RST);
1209 if (!(hci_readl(ufs, HCI_SW_RST) & UFS_SW_RST_MASK))
1217 exynos_ufs_auto_ctrl_hcc_restore(ufs, &val);
1223 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
1225 hci_writel(ufs, 0 << 0, HCI_GPIO_OUT);
1227 hci_writel(ufs, 1 << 0, HCI_GPIO_OUT);
1232 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
1233 struct exynos_ufs_uic_attr *attr = ufs->drv_data->uic_attr;
1236 if (ufs->opts & EXYNOS_UFS_OPT_BROKEN_AUTO_CLK_CTRL)
1237 exynos_ufs_disable_auto_ctrl_hcc(ufs);
1238 exynos_ufs_ungate_clks(ufs);
1240 if (ufs->opts & EXYNOS_UFS_OPT_USE_SW_HIBERN8_TIMER) {
1251 ufs->entry_hibern8_t);
1265 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
1271 if (ufshcd_is_hs_mode(&ufs->dev_req_params))
1284 if (!(ufs->opts & EXYNOS_UFS_OPT_SKIP_CONNECTION_ESTAB))
1285 exynos_ufs_establish_connt(ufs);
1287 ufs->entry_hibern8_t = ktime_get();
1288 exynos_ufs_gate_clks(ufs);
1289 if (ufs->opts & EXYNOS_UFS_OPT_BROKEN_AUTO_CLK_CTRL)
1290 exynos_ufs_enable_auto_ctrl_hcc(ufs);
1297 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
1310 if (ufs->drv_data->pre_hce_enable) {
1311 ret = ufs->drv_data->pre_hce_enable(ufs);
1322 exynos_ufs_calc_pwm_clk_div(ufs);
1323 if (!(ufs->opts & EXYNOS_UFS_OPT_BROKEN_AUTO_CLK_CTRL))
1324 exynos_ufs_enable_auto_ctrl_hcc(ufs);
1326 if (ufs->drv_data->post_hce_enable)
1327 ret = ufs->drv_data->post_hce_enable(ufs);
1389 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
1395 phy_power_off(ufs->phy);
1402 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
1405 phy_power_on(ufs->phy);
1407 exynos_ufs_config_smu(ufs);
1449 struct exynos_ufs *ufs;
1452 ufs = devm_kzalloc(dev, sizeof(*ufs), GFP_KERNEL);
1453 if (!ufs)
1457 ufs->reg_hci = devm_platform_ioremap_resource_byname(pdev, "vs_hci");
1458 if (IS_ERR(ufs->reg_hci)) {
1460 return PTR_ERR(ufs->reg_hci);
1467 ufs->drv_data = device_get_match_data(dev);
1468 if (!ufs->drv_data)
1471 exynos_ufs_priv_init(hba, ufs);
1476 static int fsd_ufs_pre_link(struct exynos_ufs *ufs)
1479 struct ufs_hba *hba = ufs->hba;
1482 DIV_ROUND_UP(NSEC_PER_SEC, ufs->mclk_rate));
1486 for_each_ufs_tx_lane(ufs, i) {
1488 DIV_ROUND_UP(NSEC_PER_SEC, ufs->mclk_rate));
1492 for_each_ufs_rx_lane(ufs, i) {
1494 DIV_ROUND_UP(NSEC_PER_SEC, ufs->mclk_rate));
1508 exynos_ufs_establish_connt(ufs);
1518 static int fsd_ufs_post_link(struct exynos_ufs *ufs)
1521 struct ufs_hba *hba = ufs->hba;
1544 for_each_ufs_rx_lane(ufs, i) {
1556 static int fsd_ufs_pre_pwr_change(struct exynos_ufs *ufs,
1559 struct ufs_hba *hba = ufs->hba;
1567 unipro_writel(ufs, 12000, UNIPRO_DME_POWERMODE_REQ_REMOTEL2TIMER0);
1568 unipro_writel(ufs, 32000, UNIPRO_DME_POWERMODE_REQ_REMOTEL2TIMER1);
1569 unipro_writel(ufs, 16000, UNIPRO_DME_POWERMODE_REQ_REMOTEL2TIMER2);
1616 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
1621 phy_power_off(ufs->phy);
1622 phy_exit(ufs->phy);
1741 { .compatible = "samsung,exynos7-ufs",
1743 { .compatible = "samsung,exynosautov9-ufs",
1745 { .compatible = "samsung,exynosautov9-ufs-vh",
1747 { .compatible = "tesla,fsd-ufs",