Lines Matching refs:dm

164  * **dm**) sits between DRM and DC. It acts as a liaison, converting DRM
220 static void amdgpu_dm_destroy_drm_device(struct amdgpu_display_manager *dm);
222 static int amdgpu_dm_connector_init(struct amdgpu_display_manager *dm,
279 struct dc *dc = adev->dm.dc;
563 adev->dm.freesync_module,
568 adev->dm.dc,
616 dc_stream_fc_disable_writeback(adev->dm.dc,
655 mod_freesync_handle_v_update(adev->dm.freesync_module,
659 dc_stream_adjust_vmin_vmax(adev->dm.dc, acrtc->dm_irq_params.stream,
722 if (adev->dm.dmub_notify)
723 memcpy(adev->dm.dmub_notify, notify, sizeof(struct dmub_notification));
725 complete(&adev->dm.dmub_aux_transfer_done);
755 if (notify->link_index > adev->dm.dc->link_count) {
761 link = adev->dm.dc->links[link_index];
762 dev = adev->dm.ddev;
811 if (callback != NULL && type < ARRAY_SIZE(adev->dm.dmub_thread_offload)) {
812 adev->dm.dmub_callback[type] = callback;
813 adev->dm.dmub_thread_offload[type] = dmub_int_thread_offload;
831 if (dmub_hpd_wrk->dmub_notify->type < ARRAY_SIZE(dmub_hpd_wrk->adev->dm.dmub_callback)) {
832 dmub_hpd_wrk->adev->dm.dmub_callback[dmub_hpd_wrk->dmub_notify->type](dmub_hpd_wrk->adev,
854 struct amdgpu_display_manager *dm = &adev->dm;
860 if (dc_enable_dmub_notifications(adev->dm.dc) &&
864 dc_stat_get_dmub_notification(adev->dm.dc, &notify);
865 if (notify.type >= ARRAY_SIZE(dm->dmub_thread_offload)) {
869 if (!dm->dmub_callback[notify.type]) {
873 if (dm->dmub_thread_offload[notify.type] == true) {
889 plink = adev->dm.dc->links[notify.link_index];
895 queue_work(adev->dm.delayed_hpd_wq, &dmub_hpd_wrk->handle_hpd_work);
897 dm->dmub_callback[notify.type](adev, &notify);
904 if (dc_dmub_srv_get_dmub_outbox0_msg(dm->dc, &entry)) {
940 struct dm_compressor_info *compressor = &adev->dm.compressor;
945 if (adev->dm.dc->fbc_compressor == NULL)
968 adev->dm.dc->ctx->fbc_gpu_addr = compressor->gpu_addr;
989 mutex_lock(&adev->dm.audio_lock);
1009 mutex_unlock(&adev->dm.audio_lock);
1029 adev->dm.audio_component = acomp;
1042 adev->dm.audio_component = NULL;
1059 adev->mode_info.audio.num_pins = adev->dm.dc->res_pool->audio_count;
1069 adev->dm.dc->res_pool->audios[i]->inst;
1077 adev->dm.audio_registered = true;
1090 if (adev->dm.audio_registered) {
1092 adev->dm.audio_registered = false;
1102 struct drm_audio_component *acomp = adev->dm.audio_component;
1115 struct dmub_srv *dmub_srv = adev->dm.dmub_srv;
1116 struct dmub_srv_fb_info *fb_info = adev->dm.dmub_fb_info;
1117 const struct firmware *dmub_fw = adev->dm.dmub_fw;
1118 struct dmcu *dmcu = adev->dm.dc->res_pool->dmcu;
1119 struct abm *abm = adev->dm.dc->res_pool->abm;
1120 struct dc_context *ctx = adev->dm.dc->ctx;
1227 hw_params.disable_dpia = adev->dm.dc->debug.dpia_debug.bits.disable_dpia;
1259 if (!adev->dm.dc->ctx->dmub_srv)
1260 adev->dm.dc->ctx->dmub_srv = dc_dmub_srv_create(adev->dm.dc, dmub_srv);
1261 if (!adev->dm.dc->ctx->dmub_srv) {
1267 adev->dm.dmcub_fw_version);
1274 struct dmub_srv *dmub_srv = adev->dm.dmub_srv;
1433 mutex_lock(&adev->dm.dc_lock);
1477 mutex_unlock(&adev->dm.dc_lock);
1609 static void retrieve_dmi_info(struct amdgpu_display_manager *dm)
1613 dm->aux_hpd_discon_quirk = false;
1617 dm->aux_hpd_discon_quirk = true;
1628 adev->dm.ddev = adev_to_drm(adev);
1629 adev->dm.adev = adev;
1635 mutex_init(&adev->dm.dpia_aux_lock);
1636 mutex_init(&adev->dm.dc_lock);
1637 mutex_init(&adev->dm.audio_lock);
1657 adev->dm.cgs_device = amdgpu_cgs_create_device(adev);
1659 if (!adev->dm.cgs_device) {
1664 init_data.cgs_device = adev->dm.cgs_device;
1670 switch (adev->dm.dmcub_fw_version) {
1747 INIT_LIST_HEAD(&adev->dm.da_list);
1749 retrieve_dmi_info(&adev->dm);
1752 adev->dm.dc = dc_create(&init_data);
1754 if (adev->dm.dc) {
1756 dce_version_to_string(adev->dm.dc->ctx->dce_version));
1763 adev->dm.dc->debug.force_single_disp_pipe_split = false;
1764 adev->dm.dc->debug.pipe_split_policy = MPC_SPLIT_AVOID;
1768 adev->dm.dc->debug.disable_stutter = amdgpu_pp_feature_mask & PP_STUTTER_MODE ? false : true;
1770 adev->dm.dc->debug.disable_stutter = true;
1773 adev->dm.dc->debug.disable_stutter = true;
1776 adev->dm.dc->debug.disable_dsc = true;
1779 adev->dm.dc->debug.disable_clock_gate = true;
1782 adev->dm.dc->debug.force_subvp_mclk_switch = true;
1785 adev->dm.dc->debug.using_dml2 = true;
1787 adev->dm.dc->debug.visual_confirm = amdgpu_dc_visual_confirm;
1790 adev->dm.dc->debug.ignore_cable_id = true;
1792 if (adev->dm.dc->caps.dp_hdmi21_pcon_support)
1801 dc_hardware_init(adev->dm.dc);
1803 adev->dm.hpd_rx_offload_wq = hpd_rx_irq_create_workqueue(adev->dm.dc);
1804 if (!adev->dm.hpd_rx_offload_wq) {
1815 dc_setup_system_context(adev->dm.dc, &pa_config);
1818 adev->dm.freesync_module = mod_freesync_create(adev->dm.dc);
1819 if (!adev->dm.freesync_module) {
1824 adev->dm.freesync_module);
1828 if (adev->dm.dc->caps.max_links > 0) {
1829 adev->dm.vblank_control_workqueue =
1831 if (!adev->dm.vblank_control_workqueue)
1835 if (adev->dm.dc->caps.max_links > 0 && adev->family >= AMDGPU_FAMILY_RV) {
1836 adev->dm.hdcp_workqueue = hdcp_create_workqueue(adev, &init_params.cp_psp, adev->dm.dc);
1838 if (!adev->dm.hdcp_workqueue)
1841 DRM_DEBUG_DRIVER("amdgpu: hdcp_workqueue init done %p.\n", adev->dm.hdcp_workqueue);
1843 dc_init_callbacks(adev->dm.dc, &init_params);
1845 if (dc_is_dmub_outbox_supported(adev->dm.dc)) {
1846 init_completion(&adev->dm.dmub_aux_transfer_done);
1847 adev->dm.dmub_notify = kzalloc(sizeof(struct dmub_notification), GFP_KERNEL);
1848 if (!adev->dm.dmub_notify) {
1849 DRM_INFO("amdgpu: fail to allocate adev->dm.dmub_notify");
1853 adev->dm.delayed_hpd_wq = create_singlethread_workqueue("amdgpu_dm_hpd_wq");
1854 if (!adev->dm.delayed_hpd_wq) {
1871 dc_enable_dmub_outbox(adev->dm.dc);
1875 dc_dmub_srv_enable_dpia_trace(adev->dm.dc);
1890 adev_to_drm(adev)->mode_config.cursor_width = adev->dm.dc->caps.max_cursor_size;
1891 adev_to_drm(adev)->mode_config.cursor_height = adev->dm.dc->caps.max_cursor_size;
1893 if (drm_vblank_init(adev_to_drm(adev), adev->dm.display_indexes_num)) {
1900 adev->dm.secure_display_ctxs = amdgpu_dm_crtc_secure_display_create_contexts(adev);
1901 if (!adev->dm.secure_display_ctxs)
1927 if (adev->dm.vblank_control_workqueue) {
1928 destroy_workqueue(adev->dm.vblank_control_workqueue);
1929 adev->dm.vblank_control_workqueue = NULL;
1932 amdgpu_dm_destroy_drm_device(&adev->dm);
1935 if (adev->dm.secure_display_ctxs) {
1937 if (adev->dm.secure_display_ctxs[i].crtc) {
1938 flush_work(&adev->dm.secure_display_ctxs[i].notify_ta_work);
1939 flush_work(&adev->dm.secure_display_ctxs[i].forward_roi_work);
1942 kfree(adev->dm.secure_display_ctxs);
1943 adev->dm.secure_display_ctxs = NULL;
1946 if (adev->dm.hdcp_workqueue) {
1947 hdcp_destroy(&adev->dev->kobj, adev->dm.hdcp_workqueue);
1948 adev->dm.hdcp_workqueue = NULL;
1951 if (adev->dm.dc) {
1952 dc_deinit_callbacks(adev->dm.dc);
1953 dc_dmub_srv_destroy(&adev->dm.dc->ctx->dmub_srv);
1954 if (dc_enable_dmub_notifications(adev->dm.dc)) {
1955 kfree(adev->dm.dmub_notify);
1956 adev->dm.dmub_notify = NULL;
1957 destroy_workqueue(adev->dm.delayed_hpd_wq);
1958 adev->dm.delayed_hpd_wq = NULL;
1962 if (adev->dm.dmub_bo)
1963 amdgpu_bo_free_kernel(&adev->dm.dmub_bo,
1964 &adev->dm.dmub_bo_gpu_addr,
1965 &adev->dm.dmub_bo_cpu_addr);
1967 if (adev->dm.hpd_rx_offload_wq && adev->dm.dc) {
1968 for (i = 0; i < adev->dm.dc->caps.max_links; i++) {
1969 if (adev->dm.hpd_rx_offload_wq[i].wq) {
1970 destroy_workqueue(adev->dm.hpd_rx_offload_wq[i].wq);
1971 adev->dm.hpd_rx_offload_wq[i].wq = NULL;
1975 kfree(adev->dm.hpd_rx_offload_wq);
1976 adev->dm.hpd_rx_offload_wq = NULL;
1980 if (adev->dm.dc)
1981 dc_destroy(&adev->dm.dc);
1988 if (adev->dm.cgs_device) {
1989 amdgpu_cgs_destroy_device(adev->dm.cgs_device);
1990 adev->dm.cgs_device = NULL;
1992 if (adev->dm.freesync_module) {
1993 mod_freesync_destroy(adev->dm.freesync_module);
1994 adev->dm.freesync_module = NULL;
1997 mutex_destroy(&adev->dm.audio_lock);
1998 mutex_destroy(&adev->dm.dc_lock);
1999 mutex_destroy(&adev->dm.dpia_aux_lock);
2071 DRM_DEBUG_KMS("dm: DMCU firmware not supported on direct or SMU loading\n");
2075 r = amdgpu_ucode_request(adev, &adev->dm.fw_dmcu, fw_name_dmcu);
2078 DRM_DEBUG_KMS("dm: DMCU firmware not found\n");
2079 adev->dm.fw_dmcu = NULL;
2085 amdgpu_ucode_release(&adev->dm.fw_dmcu);
2089 hdr = (const struct dmcu_firmware_header_v1_0 *)adev->dm.fw_dmcu->data;
2091 adev->firmware.ucode[AMDGPU_UCODE_ID_DMCU_ERAM].fw = adev->dm.fw_dmcu;
2096 adev->firmware.ucode[AMDGPU_UCODE_ID_DMCU_INTV].fw = adev->dm.fw_dmcu;
2100 adev->dm.dmcu_fw_version = le32_to_cpu(hdr->header.ucode_version);
2111 return dm_read_reg(adev->dm.dc->ctx, address);
2119 return dm_write_reg(adev->dm.dc->ctx, address, value);
2190 hdr = (const struct dmcub_firmware_header_v1_0 *)adev->dm.dmub_fw->data;
2191 adev->dm.dmcub_fw_version = le32_to_cpu(hdr->header.ucode_version);
2197 adev->dm.dmub_fw;
2202 adev->dm.dmcub_fw_version);
2206 adev->dm.dmub_srv = kzalloc(sizeof(*adev->dm.dmub_srv), GFP_KERNEL);
2207 dmub_srv = adev->dm.dmub_srv;
2235 adev->dm.dmub_fw->data +
2239 adev->dm.dmub_fw->data +
2259 &adev->dm.dmub_bo,
2260 &adev->dm.dmub_bo_gpu_addr,
2261 &adev->dm.dmub_bo_cpu_addr);
2267 memory_params.cpu_fb_addr = adev->dm.dmub_bo_cpu_addr;
2268 memory_params.gpu_fb_addr = adev->dm.dmub_bo_gpu_addr;
2272 adev->dm.dmub_fb_info =
2273 kzalloc(sizeof(*adev->dm.dmub_fb_info), GFP_KERNEL);
2274 fb_info = adev->dm.dmub_fb_info;
2307 kfree(adev->dm.dmub_fb_info);
2308 adev->dm.dmub_fb_info = NULL;
2310 if (adev->dm.dmub_srv) {
2311 dmub_srv_destroy(adev->dm.dmub_srv);
2312 kfree(adev->dm.dmub_srv);
2313 adev->dm.dmub_srv = NULL;
2316 amdgpu_ucode_release(&adev->dm.dmub_fw);
2317 amdgpu_ucode_release(&adev->dm.fw_dmcu);
2367 dmcu = adev->dm.dc->res_pool->dmcu;
2390 } else if (adev->dm.dc->ctx->dmub_srv) {
2394 dc_get_edp_links(adev->dm.dc, edp_links, &edp_num);
2396 if (!dmub_init_abm_config(adev->dm.dc->res_pool, params, i))
2611 rc = dc_interrupt_set(adev->dm.dc, irq_source, enable) ? 0 : -EBUSY;
2629 if (!dc_interrupt_set(adev->dm.dc, irq_source, enable))
2681 static void hpd_rx_irq_work_suspend(struct amdgpu_display_manager *dm)
2685 if (dm->hpd_rx_offload_wq) {
2686 for (i = 0; i < dm->dc->caps.max_links; i++)
2687 flush_workqueue(dm->hpd_rx_offload_wq[i].wq);
2694 struct amdgpu_display_manager *dm = &adev->dm;
2698 mutex_lock(&dm->dc_lock);
2700 dc_allow_idle_optimizations(adev->dm.dc, false);
2702 dm->cached_dc_state = dc_state_create_copy(dm->dc->current_state);
2704 dm_gpureset_toggle_interrupts(adev, dm->cached_dc_state, false);
2706 amdgpu_dm_commit_zero_streams(dm->dc);
2710 hpd_rx_irq_work_suspend(dm);
2715 WARN_ON(adev->dm.cached_state);
2716 adev->dm.cached_state = drm_atomic_helper_suspend(adev_to_drm(adev));
2717 if (IS_ERR(adev->dm.cached_state))
2718 return PTR_ERR(adev->dm.cached_state);
2724 hpd_rx_irq_work_suspend(dm);
2726 dc_set_power_state(dm->dc, DC_ACPI_CM_POWER_STATE_D3);
2727 dc_dmub_srv_set_power_state(dm->dc->ctx->dmub_srv, DC_ACPI_CM_POWER_STATE_D3);
2835 struct amdgpu_display_manager *dm)
2849 drm_err(dm->ddev, "Failed to allocate update bundle\n");
2863 update_planes_and_stream_adapter(dm->dc,
2879 struct amdgpu_display_manager *dm = &adev->dm;
2889 struct dm_atomic_state *dm_state = to_dm_atomic_state(dm->atomic_obj.state);
2896 if (dm->dc->caps.ips_support) {
2897 dc_dmub_srv_apply_idle_power_optimizations(dm->dc, false);
2901 dc_state = dm->cached_dc_state;
2904 * The dc->current_state is backed up into dm->cached_dc_state
2918 link_enc_cfg_copy(adev->dm.dc->current_state, dc_state);
2924 dc_dmub_srv_set_power_state(dm->dc->ctx->dmub_srv, DC_ACPI_CM_POWER_STATE_D0);
2925 dc_set_power_state(dm->dc, DC_ACPI_CM_POWER_STATE_D0);
2927 dc_resume(dm->dc);
2939 if (dc_is_dmub_outbox_supported(adev->dm.dc)) {
2941 dc_enable_dmub_outbox(adev->dm.dc);
2946 WARN_ON(!dc_commit_streams(dm->dc, &commit_params));
2948 dm_gpureset_commit_state(dm->cached_dc_state, dm);
2950 dm_gpureset_toggle_interrupts(adev, dm->cached_dc_state, true);
2952 dc_state_release(dm->cached_dc_state);
2953 dm->cached_dc_state = NULL;
2957 mutex_unlock(&dm->dc_lock);
2963 dm_state->context = dc_state_create(dm->dc, NULL);
2970 if (dc_is_dmub_outbox_supported(adev->dm.dc)) {
2972 dc_enable_dmub_outbox(adev->dm.dc);
2976 dc_dmub_srv_set_power_state(dm->dc->ctx->dmub_srv, DC_ACPI_CM_POWER_STATE_D0);
2977 dc_set_power_state(dm->dc, DC_ACPI_CM_POWER_STATE_D0);
2980 dc_resume(dm->dc);
3017 mutex_lock(&dm->dc_lock);
3019 mutex_unlock(&dm->dc_lock);
3034 for_each_new_crtc_in_state(dm->cached_state, crtc, new_crtc_state, i)
3042 for_each_new_crtc_in_state(dm->cached_state, crtc, new_crtc_state, i) {
3052 for_each_new_plane_in_state(dm->cached_state, plane, new_plane_state, i) {
3061 drm_atomic_helper_resume(ddev, dm->cached_state);
3063 dm->cached_state = NULL;
3108 .name = "dm",
3169 caps = &adev->dm.backlight_caps[aconnector->bl_idx];
3356 if (adev->dm.disable_hpd_irq)
3365 if (adev->dm.hdcp_workqueue) {
3366 hdcp_reset_display(adev->dm.hdcp_workqueue, aconnector->dc_link->link_index);
3387 mutex_lock(&adev->dm.dc_lock);
3389 mutex_unlock(&adev->dm.dc_lock);
3446 struct hpd_rx_irq_offload_work_queue *offload_wq = &adev->dm.hpd_rx_offload_wq[idx];
3450 if (adev->dm.disable_hpd_irq)
3535 mutex_lock(&adev->dm.dc_lock);
3537 mutex_unlock(&adev->dm.dc_lock);
3554 if (adev->dm.hdcp_workqueue)
3555 hdcp_handle_cpirq(adev->dm.hdcp_workqueue, aconnector->base.index);
3575 if (dc_is_dmub_outbox_supported(adev->dm.dc)) {
3618 struct dc *dc = adev->dm.dc;
3652 c_irq_params = &adev->dm.vblank_params[int_params.irq_source - DC_IRQ_SOURCE_VBLANK1];
3674 c_irq_params = &adev->dm.pflip_params[int_params.irq_source - DC_IRQ_SOURCE_PFLIP_FIRST];
3701 struct dc *dc = adev->dm.dc;
3738 c_irq_params = &adev->dm.vblank_params[int_params.irq_source - DC_IRQ_SOURCE_VBLANK1];
3759 c_irq_params = &adev->dm.vupdate_params[int_params.irq_source - DC_IRQ_SOURCE_VUPDATE1];
3781 c_irq_params = &adev->dm.pflip_params[int_params.irq_source - DC_IRQ_SOURCE_PFLIP_FIRST];
3807 struct dc *dc = adev->dm.dc;
3853 c_irq_params = &adev->dm.vblank_params[int_params.irq_source - DC_IRQ_SOURCE_VBLANK1];
3882 c_irq_params = &adev->dm.vline0_params[int_params.irq_source
3912 c_irq_params = &adev->dm.vupdate_params[int_params.irq_source - DC_IRQ_SOURCE_VUPDATE1];
3935 c_irq_params = &adev->dm.pflip_params[int_params.irq_source - DC_IRQ_SOURCE_PFLIP_FIRST];
3960 struct dc *dc = adev->dm.dc;
3981 c_irq_params = &adev->dm.dmub_outbox_params[0];
4004 struct amdgpu_display_manager *dm = &adev->dm;
4010 priv_state = drm_atomic_get_private_obj_state(state, &dm->atomic_obj);
4024 struct amdgpu_display_manager *dm = &adev->dm;
4030 if (obj->funcs == dm->atomic_obj.funcs)
4103 state->context = dc_state_create_current_copy(adev->dm.dc);
4110 &adev->dm.atomic_obj,
4140 static void amdgpu_dm_update_backlight_caps(struct amdgpu_display_manager *dm,
4148 if (dm->backlight_caps[bl_idx].caps_valid)
4153 dm->backlight_caps[bl_idx].caps_valid = true;
4156 dm->backlight_caps[bl_idx].min_input_signal = caps.min_input_signal;
4157 dm->backlight_caps[bl_idx].max_input_signal = caps.max_input_signal;
4159 dm->backlight_caps[bl_idx].min_input_signal =
4161 dm->backlight_caps[bl_idx].max_input_signal =
4165 if (dm->backlight_caps[bl_idx].aux_support)
4168 dm->backlight_caps[bl_idx].min_input_signal = AMDGPU_DM_DEFAULT_MIN_BACKLIGHT;
4169 dm->backlight_caps[bl_idx].max_input_signal = AMDGPU_DM_DEFAULT_MAX_BACKLIGHT;
4219 static void amdgpu_dm_backlight_set_level(struct amdgpu_display_manager *dm,
4228 amdgpu_dm_update_backlight_caps(dm, bl_idx);
4229 caps = dm->backlight_caps[bl_idx];
4231 dm->brightness[bl_idx] = user_brightness;
4234 amdgpu_atombios_scratch_regs_set_backlight_level(dm->adev, dm->brightness[bl_idx]);
4235 brightness = convert_brightness_from_user(&caps, dm->brightness[bl_idx]);
4236 link = (struct dc_link *)dm->backlight_link[bl_idx];
4251 dm->actual_brightness[bl_idx] = user_brightness;
4256 struct amdgpu_display_manager *dm = bl_get_data(bd);
4259 for (i = 0; i < dm->num_of_edps; i++) {
4260 if (bd == dm->backlight_dev[i])
4265 amdgpu_dm_backlight_set_level(dm, i, bd->props.brightness);
4270 static u32 amdgpu_dm_backlight_get_level(struct amdgpu_display_manager *dm,
4275 struct dc_link *link = (struct dc_link *)dm->backlight_link[bl_idx];
4277 amdgpu_dm_update_backlight_caps(dm, bl_idx);
4278 caps = dm->backlight_caps[bl_idx];
4286 return dm->brightness[bl_idx];
4293 return dm->brightness[bl_idx];
4300 struct amdgpu_display_manager *dm = bl_get_data(bd);
4303 for (i = 0; i < dm->num_of_edps; i++) {
4304 if (bd == dm->backlight_dev[i])
4309 return amdgpu_dm_backlight_get_level(dm, i);
4322 struct amdgpu_display_manager *dm = &drm_to_adev(drm)->dm;
4343 dm->backlight_dev[aconnector->bl_idx] =
4344 backlight_device_register(bl_name, aconnector->base.kdev, dm,
4347 if (IS_ERR(dm->backlight_dev[aconnector->bl_idx])) {
4349 dm->backlight_dev[aconnector->bl_idx] = NULL;
4354 static int initialize_plane(struct amdgpu_display_manager *dm,
4377 if (plane_id >= dm->dc->caps.max_streams)
4380 ret = amdgpu_dm_plane_init(dm, plane, possible_crtcs, plane_cap);
4395 static void setup_backlight_device(struct amdgpu_display_manager *dm,
4399 int bl_idx = dm->num_of_edps;
4405 if (dm->num_of_edps >= AMDGPU_DM_MAX_NUM_EDP) {
4406 drm_warn(adev_to_drm(dm->adev), "Too much eDP connections, skipping backlight setup for additional eDPs\n");
4412 amdgpu_dm_update_backlight_caps(dm, bl_idx);
4413 dm->brightness[bl_idx] = AMDGPU_MAX_BL_LEVEL;
4414 dm->backlight_link[bl_idx] = link;
4415 dm->num_of_edps++;
4432 struct amdgpu_display_manager *dm = &adev->dm;
4443 int max_overlay = dm->dc->caps.max_slave_planes;
4445 dm->display_indexes_num = dm->dc->caps.max_streams;
4447 adev->mode_info.num_crtc = adev->dm.display_indexes_num;
4451 link_cnt = dm->dc->caps.max_links;
4452 if (amdgpu_dm_mode_config_init(dm->adev)) {
4458 primary_planes = dm->dc->caps.max_streams;
4466 plane = &dm->dc->caps.planes[i];
4468 if (initialize_plane(dm, mode_info, i,
4484 for (i = 0; i < dm->dc->caps.max_planes; ++i) {
4485 struct dc_plane_cap *plane = &dm->dc->caps.planes[i];
4500 if (initialize_plane(dm, NULL, primary_planes + i,
4507 for (i = 0; i < dm->dc->caps.max_streams; i++)
4508 if (amdgpu_dm_crtc_init(dm, mode_info->planes[i], i)) {
4526 if (register_outbox_irq_handlers(dm->adev)) {
4588 link = dc_get_link_at_index(dm->dc, i);
4598 if (amdgpu_dm_wb_connector_init(dm, wbcon, i)) {
4618 if (amdgpu_dm_encoder_init(dm->ddev, aencoder, i)) {
4623 if (amdgpu_dm_connector_init(dm, aconnector, i, aencoder)) {
4628 if (dm->hpd_rx_offload_wq)
4629 dm->hpd_rx_offload_wq[aconnector->base.index].aconnector =
4641 mutex_lock(&dm->dc_lock);
4643 mutex_unlock(&dm->dc_lock);
4647 setup_backlight_device(dm, aconnector);
4674 if (dce60_register_irq_handlers(dm->adev)) {
4696 if (dce110_register_irq_handlers(dm->adev)) {
4722 if (dcn10_register_irq_handlers(dm->adev)) {
4743 static void amdgpu_dm_destroy_drm_device(struct amdgpu_display_manager *dm)
4745 drm_atomic_private_obj_fini(&dm->atomic_obj);
4862 r = amdgpu_ucode_request(adev, &adev->dm.dmub_fw, fw_name_dmub);
6569 struct amdgpu_display_manager *dm = &adev->dm;
6579 backlight_device_unregister(dm->backlight_dev[aconnector->bl_idx]);
6580 dm->backlight_dev[aconnector->bl_idx] = NULL;
6899 dc_result = dc_validate_stream(adev->dm.dc, stream);
6904 dc_result = dm_validate_stream_and_context(adev->dm.dc, stream);
7632 void amdgpu_dm_connector_init_helper(struct amdgpu_display_manager *dm,
7638 struct amdgpu_device *adev = drm_to_adev(dm->ddev);
7688 dm->ddev->mode_config.scaling_mode_property,
7729 if (adev->dm.hdcp_workqueue)
7808 static int amdgpu_dm_connector_init(struct amdgpu_display_manager *dm,
7815 struct dc *dc = dm->dc;
7840 dm->ddev,
7857 dm,
7868 amdgpu_dm_initialize_dp_connector(dm, aconnector, link->link_index);
8144 struct amdgpu_display_manager *dm,
8152 struct amdgpu_device *adev = dm->adev;
8175 dm->freesync_module,
8183 mod_freesync_handle_v_update(dm->freesync_module,
8187 dc_stream_adjust_vmin_vmax(dm->dc,
8210 dm->freesync_module,
8239 struct amdgpu_display_manager *dm,
8245 struct amdgpu_device *adev = dm->adev;
8285 mod_freesync_build_vrr_params(dm->freesync_module,
8351 struct amdgpu_display_manager *dm,
8436 amdgpu_dm_plane_fill_dc_scaling_info(dm->adev, new_plane_state,
8452 dm->adev, new_plane_state,
8486 mutex_lock(&dm->dc_lock);
8491 mutex_unlock(&dm->dc_lock);
8528 dm,
8573 (amdgpu_display_get_crtc_scanoutpos(dm->ddev, acrtc_attach->crtc_id,
8627 if (dm->vblank_control_workqueue)
8628 flush_workqueue(dm->vblank_control_workqueue);
8657 mutex_lock(&dm->dc_lock);
8661 mutex_unlock(&dm->dc_lock);
8671 dm->dc, acrtc_state->stream,
8675 mutex_lock(&dm->dc_lock);
8676 update_planes_and_stream_adapter(dm->dc,
8751 mutex_unlock(&dm->dc_lock);
8803 mutex_lock(&adev->dm.audio_lock);
8806 mutex_unlock(&adev->dm.audio_lock);
8838 mutex_lock(&adev->dm.audio_lock);
8841 mutex_unlock(&adev->dm.audio_lock);
8861 static void dm_clear_writeback(struct amdgpu_display_manager *dm,
8864 dc_stream_remove_writeback(dm->dc, crtc_state->stream, 0);
8872 struct amdgpu_display_manager *dm = &adev->dm;
8905 dm_clear_writeback(dm, dm_old_crtc_state);
8947 mutex_lock(&dm->dc_lock);
8949 mutex_unlock(&dm->dc_lock);
9008 if (dm->vblank_control_workqueue)
9009 flush_workqueue(dm->vblank_control_workqueue);
9011 amdgpu_dm_replay_disable_all(dm);
9012 amdgpu_dm_psr_disable_all(dm);
9016 mutex_lock(&dm->dc_lock);
9017 WARN_ON(!dc_commit_streams(dm->dc, &params));
9020 if (dm->active_vblank_irq_count == 0)
9021 dc_allow_idle_optimizations(dm->dc, true);
9022 mutex_unlock(&dm->dc_lock);
9046 static void dm_set_writeback(struct amdgpu_display_manager *dm,
9052 struct amdgpu_device *adev = dm->adev;
9080 if (dm->dc->current_state->res_ctx.pipe_ctx[i].stream == crtc_state->stream) {
9081 pipe = &dm->dc->current_state->res_ctx.pipe_ctx[i];
9138 dc_stream_add_writeback(dm->dc, crtc_state->stream, wb_info);
9157 struct amdgpu_display_manager *dm = &adev->dm;
9172 if (dm->dc->caps.ips_support && dm->dc->idle_optimizations_allowed)
9173 dc_allow_idle_optimizations(dm->dc, false);
9194 if (!adev->dm.hdcp_workqueue)
9245 if (!adev->dm.hdcp_workqueue)
9260 hdcp_reset_display(adev->dm.hdcp_workqueue, aconnector->dc_link->link_index);
9267 old_con_state, connector, adev->dm.hdcp_workqueue)) {
9284 struct hdcp_workqueue *hdcp_work = adev->dm.hdcp_workqueue;
9301 adev->dm.hdcp_workqueue, aconnector->dc_link->link_index, aconnector,
9384 mutex_lock(&dm->dc_lock);
9385 dc_update_planes_and_stream(dm->dc,
9390 mutex_unlock(&dm->dc_lock);
9413 update_stream_irq_parameters(dm, dm_new_crtc_state);
9470 amdgpu_dm_commit_planes(state, dev, dm, crtc, wait_for_vblank);
9494 dm_set_writeback(dm, dm_new_crtc_state, connector, new_con_state);
9502 for (i = 0; i < dm->num_of_edps; i++) {
9503 if (dm->backlight_dev[i] &&
9504 (dm->actual_brightness[i] != dm->brightness[i]))
9505 amdgpu_dm_backlight_set_level(dm, i, dm->brightness[i]);
9791 static int dm_update_crtc_state(struct amdgpu_display_manager *dm,
9866 dm->force_timing_sync;
9957 dm->dc,
10000 dm->dc,
10607 struct dc *dc = adev->dm.dc;
10767 ret = dm_update_crtc_state(&adev->dm, state, crtc,
10780 ret = dm_update_crtc_state(&adev->dm, state, crtc,
10972 if (obj->funcs == adev->dm.atomic_obj.funcs) {
11057 static bool dm_edid_parser_send_cea(struct amdgpu_display_manager *dm,
11085 res = dc_wake_and_execute_dmub_cmd(dm->dc->ctx, &cmd, DM_DMUB_WAIT_TYPE_WAIT_WITH_REPLY);
11114 static bool parse_edid_cea_dmcu(struct amdgpu_display_manager *dm,
11126 if (!dc_edid_parser_send_cea(dm->dc, i, len, &edid_ext[i], 8))
11133 res = dc_edid_parser_recv_amd_vsdb(dm->dc, &version, &min_rate, &max_rate);
11147 res = dc_edid_parser_recv_cea_ack(dm->dc, &offset);
11155 static bool parse_edid_cea_dmub(struct amdgpu_display_manager *dm,
11164 if (!dm_edid_parser_send_cea(dm, i, len, &edid_ext[i], 8, vsdb_info))
11178 mutex_lock(&adev->dm.dc_lock);
11179 if (adev->dm.dmub_srv)
11180 ret = parse_edid_cea_dmub(&adev->dm, edid_ext, len, vsdb_info);
11182 ret = parse_edid_cea_dmcu(&adev->dm, edid_ext, len, vsdb_info);
11183 mutex_unlock(&adev->dm.dc_lock);
11305 if (!adev->dm.freesync_module)
11312 if (is_dp_capable_without_timing_msa(adev->dm.dc,
11427 struct dc *dc = adev->dm.dc;
11430 mutex_lock(&adev->dm.dc_lock);
11435 adev->dm.force_timing_sync;
11440 mutex_unlock(&adev->dm.dc_lock);
11490 struct dmub_notification *p_notify = adev->dm.dmub_notify;
11493 mutex_lock(&adev->dm.dpia_aux_lock);
11499 if (!wait_for_completion_timeout(&adev->dm.dmub_aux_transfer_done, 10 * HZ)) {
11520 payload->reply[0] = adev->dm.dmub_notify->aux_reply.command;
11540 reinit_completion(&adev->dm.dmub_aux_transfer_done);
11541 mutex_unlock(&adev->dm.dpia_aux_lock);
11555 mutex_lock(&adev->dm.dpia_aux_lock);
11557 link_index, payload, adev->dm.dmub_notify);
11559 if (is_cmd_complete || wait_for_completion_timeout(&adev->dm.dmub_aux_transfer_done, 10 * HZ)) {
11561 *operation_result = adev->dm.dmub_notify->sc_status;
11569 reinit_completion(&adev->dm.dmub_aux_transfer_done);
11570 mutex_unlock(&adev->dm.dpia_aux_lock);