switch (clocks_type) {
case DM_PP_CLOCK_TYPE_DISPLAY_CLK:
- /*if (clocks_in_khz > dc->dcn_soc->max_dispclk_vmax0p9*1000) {
+ if (clocks_in_khz > dc->dcn_soc->max_dispclk_vmax0p9*1000) {
vdd_level = dcn_bw_v_max0p91;
- //BREAK_TO_DEBUGGER();
- } else*/ if (clocks_in_khz > dc->dcn_soc->max_dispclk_vnom0p8*1000) {
+ BREAK_TO_DEBUGGER();
+ } else if (clocks_in_khz > dc->dcn_soc->max_dispclk_vnom0p8*1000) {
vdd_level = dcn_bw_v_max0p9;
} else if (clocks_in_khz > dc->dcn_soc->max_dispclk_vmid0p72*1000) {
vdd_level = dcn_bw_v_nom0p8;
vdd_level = dcn_bw_v_min0p65;
break;
case DM_PP_CLOCK_TYPE_DISPLAYPHYCLK:
- /*if (clocks_in_khz > dc->dcn_soc->phyclkv_max0p9*1000) {
+ if (clocks_in_khz > dc->dcn_soc->phyclkv_max0p9*1000) {
vdd_level = dcn_bw_v_max0p91;
BREAK_TO_DEBUGGER();
- } else*/ if (clocks_in_khz > dc->dcn_soc->phyclkv_nom0p8*1000) {
+ } else if (clocks_in_khz > dc->dcn_soc->phyclkv_nom0p8*1000) {
vdd_level = dcn_bw_v_max0p9;
} else if (clocks_in_khz > dc->dcn_soc->phyclkv_mid0p72*1000) {
vdd_level = dcn_bw_v_nom0p8;
break;
case DM_PP_CLOCK_TYPE_DPPCLK:
- /*if (clocks_in_khz > dc->dcn_soc->max_dppclk_vmax0p9*1000) {
+ if (clocks_in_khz > dc->dcn_soc->max_dppclk_vmax0p9*1000) {
vdd_level = dcn_bw_v_max0p91;
BREAK_TO_DEBUGGER();
- } else*/ if (clocks_in_khz > dc->dcn_soc->max_dppclk_vnom0p8*1000) {
+ } else if (clocks_in_khz > dc->dcn_soc->max_dppclk_vnom0p8*1000) {
vdd_level = dcn_bw_v_max0p9;
} else if (clocks_in_khz > dc->dcn_soc->max_dppclk_vmid0p72*1000) {
vdd_level = dcn_bw_v_nom0p8;
{
unsigned factor = (ddr4_dram_factor_single_Channel * dc->dcn_soc->number_of_channels);
- /*if (clocks_in_khz > dc->dcn_soc->fabric_and_dram_bandwidth_vmax0p9*1000000/factor) {
+ if (clocks_in_khz > dc->dcn_soc->fabric_and_dram_bandwidth_vmax0p9*1000000/factor) {
vdd_level = dcn_bw_v_max0p91;
BREAK_TO_DEBUGGER();
- } else */if (clocks_in_khz > dc->dcn_soc->fabric_and_dram_bandwidth_vnom0p8*1000000/factor) {
+ } else if (clocks_in_khz > dc->dcn_soc->fabric_and_dram_bandwidth_vnom0p8*1000000/factor) {
vdd_level = dcn_bw_v_max0p9;
} else if (clocks_in_khz > dc->dcn_soc->fabric_and_dram_bandwidth_vmid0p72*1000000/factor) {
vdd_level = dcn_bw_v_nom0p8;
break;
case DM_PP_CLOCK_TYPE_DCFCLK:
- /*if (clocks_in_khz > dc->dcn_soc->dcfclkv_max0p9*1000) {
+ if (clocks_in_khz > dc->dcn_soc->dcfclkv_max0p9*1000) {
vdd_level = dcn_bw_v_max0p91;
BREAK_TO_DEBUGGER();
- } else */if (clocks_in_khz > dc->dcn_soc->dcfclkv_nom0p8*1000) {
+ } else if (clocks_in_khz > dc->dcn_soc->dcfclkv_nom0p8*1000) {
vdd_level = dcn_bw_v_max0p9;
} else if (clocks_in_khz > dc->dcn_soc->dcfclkv_mid0p72*1000) {
vdd_level = dcn_bw_v_nom0p8;
max_link_rate = LINK_RATE_HIGH3;
if (link_settings.link_rate == max_link_rate) {
- if (state->dis_clk->funcs->set_min_clocks_state) {
- if (state->dis_clk->cur_min_clks_state < DM_PP_CLOCKS_STATE_NOMINAL)
- state->dis_clk->funcs->set_min_clocks_state(
- state->dis_clk, DM_PP_CLOCKS_STATE_NOMINAL);
- } else {
- uint32_t dp_phyclk_in_khz;
- const struct dc_clocks clocks_value =
- state->dis_clk->clks;
-
- /* 27mhz = 27000000hz= 27000khz */
- dp_phyclk_in_khz = link_settings.link_rate * 27000;
-
- if (dp_phyclk_in_khz > clocks_value.phyclk_khz) {
- state->dis_clk->funcs->apply_clock_voltage_request(
- state->dis_clk,
- DM_PP_CLOCK_TYPE_DISPLAYPHYCLK,
- dp_phyclk_in_khz,
- false,
- true);
- }
- }
+ struct dc_clocks clocks = state->bw.dcn.calc_clk;
+
+ /* dce/dcn compat, do not update dispclk */
+ clocks.dispclk_khz = 0;
+ /* 27mhz = 27000000hz= 27000khz */
+ clocks.phyclk_khz = link_settings.link_rate * 27000;
+
+ state->dis_clk->funcs->update_clocks(
+ state->dis_clk, &clocks, false);
}
dp_enable_link_phy(
}
static enum dm_pp_clocks_state dce_get_required_clocks_state(
struct display_clock *clk,
- struct state_dependent_clocks *req_clocks)
+ struct dc_clocks *req_clocks)
{
struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(clk);
int i;
* all required clocks
*/
for (i = clk->max_clks_state; i >= DM_PP_CLOCKS_STATE_ULTRA_LOW; i--)
- if (req_clocks->display_clk_khz >
+ if (req_clocks->dispclk_khz >
clk_dce->max_clks_by_state[i].display_clk_khz
- || req_clocks->pixel_clk_khz >
+ || req_clocks->phyclk_khz >
clk_dce->max_clks_by_state[i].pixel_clk_khz)
break;
low_req_clk = i + 1;
if (low_req_clk > clk->max_clks_state) {
- DC_LOG_WARNING("%s: clocks unsupported disp_clk %d pix_clk %d",
- __func__,
- req_clocks->display_clk_khz,
- req_clocks->pixel_clk_khz);
- low_req_clk = DM_PP_CLOCKS_STATE_INVALID;
+ /* set max clock state for high phyclock, invalid on exceeding display clock */
+ if (clk_dce->max_clks_by_state[clk->max_clks_state].display_clk_khz
+ < req_clocks->dispclk_khz)
+ low_req_clk = DM_PP_CLOCKS_STATE_INVALID;
+ else
+ low_req_clk = clk->max_clks_state;
}
return low_req_clk;
}
-static bool dce_clock_set_min_clocks_state(
- struct display_clock *clk,
- enum dm_pp_clocks_state clocks_state)
-{
- struct dm_pp_power_level_change_request level_change_req = {
- clocks_state };
-
- if (clocks_state > clk->max_clks_state) {
- /*Requested state exceeds max supported state.*/
- DC_LOG_WARNING("Requested state exceeds max supported state");
- return false;
- } else if (clocks_state == clk->cur_min_clks_state) {
- /*if we're trying to set the same state, we can just return
- * since nothing needs to be done*/
- return true;
- }
-
- /* get max clock state from PPLIB */
- if (dm_pp_apply_power_level_change_request(clk->ctx, &level_change_req))
- clk->cur_min_clks_state = clocks_state;
-
- return true;
-}
-
static int dce_set_clock(
struct display_clock *clk,
int requested_clk_khz)
if (!debug->disable_dfs_bypass && bp->integrated_info)
if (bp->integrated_info->gpu_cap_info & DFS_BYPASS_ENABLE)
clk_dce->dfs_bypass_enabled = true;
-
- clk_dce->use_max_disp_clk = debug->max_disp_clk;
}
static void dce_clock_read_ss_info(struct dce_disp_clk *clk_dce)
}
}
-static bool dce_apply_clock_voltage_request(
- struct display_clock *clk,
- enum dm_pp_clock_type clocks_type,
- int clocks_in_khz,
- bool pre_mode_set,
- bool update_dp_phyclk)
+static void dce12_update_clocks(struct display_clock *dccg,
+ struct dc_clocks *new_clocks,
+ bool safe_to_lower)
{
- bool send_request = false;
struct dm_pp_clock_for_voltage_req clock_voltage_req = {0};
- switch (clocks_type) {
- case DM_PP_CLOCK_TYPE_DISPLAY_CLK:
- case DM_PP_CLOCK_TYPE_DISPLAYPHYCLK:
- break;
- default:
- return false;
+ if ((new_clocks->dispclk_khz < dccg->clks.dispclk_khz && safe_to_lower)
+ || new_clocks->dispclk_khz > dccg->clks.dispclk_khz) {
+ clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DISPLAY_CLK;
+ clock_voltage_req.clocks_in_khz = new_clocks->dispclk_khz;
+ dccg->funcs->set_dispclk(dccg, new_clocks->dispclk_khz);
+ dccg->clks.dispclk_khz = new_clocks->dispclk_khz;
+
+ dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
}
- clock_voltage_req.clk_type = clocks_type;
- clock_voltage_req.clocks_in_khz = clocks_in_khz;
-
- /* to pplib */
- if (pre_mode_set) {
- switch (clocks_type) {
- case DM_PP_CLOCK_TYPE_DISPLAY_CLK:
- if (clocks_in_khz > clk->clks.dispclk_khz) {
- clk->dispclk_notify_pplib_done = true;
- send_request = true;
- } else
- clk->dispclk_notify_pplib_done = false;
- /* no matter incrase or decrase clock, update current clock value */
- clk->clks.dispclk_khz = clocks_in_khz;
- break;
- case DM_PP_CLOCK_TYPE_DISPLAYPHYCLK:
- if (clocks_in_khz > clk->clks.phyclk_khz) {
- clk->phyclk_notify_pplib_done = true;
- send_request = true;
- } else
- clk->phyclk_notify_pplib_done = false;
- /* no matter incrase or decrase clock, update current clock value */
- clk->clks.phyclk_khz = clocks_in_khz;
- break;
- default:
- ASSERT(0);
- break;
- }
+ if ((new_clocks->phyclk_khz < dccg->clks.phyclk_khz && safe_to_lower)
+ || new_clocks->phyclk_khz > dccg->clks.phyclk_khz) {
+ clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DISPLAYPHYCLK;
+ clock_voltage_req.clocks_in_khz = new_clocks->phyclk_khz;
+ dccg->clks.phyclk_khz = new_clocks->phyclk_khz;
- } else {
- switch (clocks_type) {
- case DM_PP_CLOCK_TYPE_DISPLAY_CLK:
- if (!clk->dispclk_notify_pplib_done)
- send_request = true;
- clk->dispclk_notify_pplib_done = true;
- break;
- case DM_PP_CLOCK_TYPE_DISPLAYPHYCLK:
- if (!clk->phyclk_notify_pplib_done)
- send_request = true;
- clk->phyclk_notify_pplib_done = true;
- break;
- default:
- ASSERT(0);
- break;
- }
+ dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
}
- if (send_request) {
-#if defined(CONFIG_DRM_AMD_DC_DCN1_0)
- if (clk->ctx->dce_version >= DCN_VERSION_1_0
+}
+
+static void dcn_update_clocks(struct display_clock *dccg,
+ struct dc_clocks *new_clocks,
+ bool safe_to_lower)
+{
+ struct dm_pp_clock_for_voltage_req clock_voltage_req = {0};
+ bool send_request_to_increase = false;
+ bool send_request_to_lower = false;
+
+ if (new_clocks->dispclk_khz > dccg->clks.dispclk_khz
+ || new_clocks->phyclk_khz > dccg->clks.phyclk_khz
+ || new_clocks->fclk_khz > dccg->clks.fclk_khz
+ || new_clocks->dcfclk_khz > dccg->clks.dcfclk_khz)
+ send_request_to_increase = true;
+
+#ifdef CONFIG_DRM_AMD_DC_DCN1_0
+ if (send_request_to_increase
) {
- struct dc *core_dc = clk->ctx->dc;
- /*use dcfclk request voltage*/
- clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DCFCLK;
- clock_voltage_req.clocks_in_khz =
- dcn_find_dcfclk_suits_all(core_dc, &clk->clks);
- }
+ struct dc *core_dc = dccg->ctx->dc;
+
+ /*use dcfclk to request voltage*/
+ clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DCFCLK;
+ clock_voltage_req.clocks_in_khz = dcn_find_dcfclk_suits_all(core_dc, new_clocks);
+ dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
+ }
#endif
- dm_pp_apply_clock_for_voltage_request(
- clk->ctx, &clock_voltage_req);
+
+ if ((new_clocks->dispclk_khz < dccg->clks.dispclk_khz && safe_to_lower)
+ || new_clocks->dispclk_khz > dccg->clks.dispclk_khz) {
+ clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DISPLAY_CLK;
+ clock_voltage_req.clocks_in_khz = new_clocks->dispclk_khz;
+ /* TODO: ramp up - dccg->funcs->set_dispclk(dccg, new_clocks->dispclk_khz);*/
+ dccg->clks.dispclk_khz = new_clocks->dispclk_khz;
+
+ dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
+ send_request_to_lower = true;
}
- if (update_dp_phyclk && (clocks_in_khz >
- clk->clks.phyclk_khz))
- clk->clks.phyclk_khz = clocks_in_khz;
- return true;
+ if ((new_clocks->phyclk_khz < dccg->clks.phyclk_khz && safe_to_lower)
+ || new_clocks->phyclk_khz > dccg->clks.phyclk_khz) {
+ dccg->clks.phyclk_khz = new_clocks->phyclk_khz;
+ clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DISPLAYPHYCLK;
+ clock_voltage_req.clocks_in_khz = new_clocks->phyclk_khz;
+
+ dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
+ send_request_to_lower = true;
+ }
+
+ if ((new_clocks->fclk_khz < dccg->clks.fclk_khz && safe_to_lower)
+ || new_clocks->fclk_khz > dccg->clks.fclk_khz) {
+ dccg->clks.phyclk_khz = new_clocks->fclk_khz;
+ clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_FCLK;
+ clock_voltage_req.clocks_in_khz = new_clocks->fclk_khz;
+
+ dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
+ send_request_to_lower = true;
+ }
+
+ if ((new_clocks->dcfclk_khz < dccg->clks.dcfclk_khz && safe_to_lower)
+ || new_clocks->dcfclk_khz > dccg->clks.dcfclk_khz) {
+ dccg->clks.phyclk_khz = new_clocks->dcfclk_khz;
+ clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DCFCLK;
+ clock_voltage_req.clocks_in_khz = new_clocks->dcfclk_khz;
+
+ send_request_to_lower = true;
+ }
+
+#ifdef CONFIG_DRM_AMD_DC_DCN1_0
+ if (!send_request_to_increase && send_request_to_lower
+ ) {
+ struct dc *core_dc = dccg->ctx->dc;
+
+ /*use dcfclk to request voltage*/
+ clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DCFCLK;
+ clock_voltage_req.clocks_in_khz = dcn_find_dcfclk_suits_all(core_dc, new_clocks);
+ dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
+ }
+#endif
+}
+
+static void dce_update_clocks(struct display_clock *dccg,
+ struct dc_clocks *new_clocks,
+ bool safe_to_lower)
+{
+ struct dm_pp_power_level_change_request level_change_req;
+
+ level_change_req.power_level = dce_get_required_clocks_state(dccg, new_clocks);
+ /* get max clock state from PPLIB */
+ if ((level_change_req.power_level < dccg->cur_min_clks_state && safe_to_lower)
+ || level_change_req.power_level > dccg->cur_min_clks_state) {
+ if (dm_pp_apply_power_level_change_request(dccg->ctx, &level_change_req))
+ dccg->cur_min_clks_state = level_change_req.power_level;
+ }
+
+ if ((new_clocks->dispclk_khz < dccg->clks.dispclk_khz && safe_to_lower)
+ || new_clocks->dispclk_khz > dccg->clks.dispclk_khz) {
+ dccg->funcs->set_dispclk(dccg, new_clocks->dispclk_khz);
+ dccg->clks.dispclk_khz = new_clocks->dispclk_khz;
+ }
}
+static const struct display_clock_funcs dcn_funcs = {
+ .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq_wrkaround,
+ .set_dispclk = dce112_set_clock,
+ .update_clocks = dcn_update_clocks
+};
static const struct display_clock_funcs dce120_funcs = {
.get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq_wrkaround,
- .apply_clock_voltage_request = dce_apply_clock_voltage_request,
- .set_clock = dce112_set_clock
+ .set_dispclk = dce112_set_clock,
+ .update_clocks = dce12_update_clocks
};
static const struct display_clock_funcs dce112_funcs = {
.get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq,
- .get_required_clocks_state = dce_get_required_clocks_state,
- .set_min_clocks_state = dce_clock_set_min_clocks_state,
- .set_clock = dce112_set_clock
+ .set_dispclk = dce112_set_clock,
+ .update_clocks = dce_update_clocks
};
static const struct display_clock_funcs dce110_funcs = {
.get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq,
- .get_required_clocks_state = dce_get_required_clocks_state,
- .set_min_clocks_state = dce_clock_set_min_clocks_state,
- .set_clock = dce_psr_set_clock
+ .set_dispclk = dce_psr_set_clock,
+ .update_clocks = dce_update_clocks
};
static const struct display_clock_funcs dce_funcs = {
.get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq,
- .get_required_clocks_state = dce_get_required_clocks_state,
- .set_min_clocks_state = dce_clock_set_min_clocks_state,
- .set_clock = dce_set_clock
+ .set_dispclk = dce_set_clock,
+ .update_clocks = dce_update_clocks
};
static void dce_disp_clk_construct(
struct display_clock *dce120_disp_clk_create(struct dc_context *ctx)
{
struct dce_disp_clk *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL);
- struct dm_pp_clock_levels_with_voltage clk_level_info = {0};
if (clk_dce == NULL) {
BREAK_TO_DEBUGGER();
clk_dce->base.funcs = &dce120_funcs;
- /* new in dce120 */
- if (!ctx->dc->debug.disable_pplib_clock_request &&
- dm_pp_get_clock_levels_by_type_with_voltage(
- ctx, DM_PP_CLOCK_TYPE_DISPLAY_CLK, &clk_level_info)
- && clk_level_info.num_levels)
- clk_dce->max_displ_clk_in_khz =
- clk_level_info.data[clk_level_info.num_levels - 1].clocks_in_khz;
- else
- clk_dce->max_displ_clk_in_khz = 1133000;
+ return &clk_dce->base;
+}
+
+struct display_clock *dcn_disp_clk_create(struct dc_context *ctx)
+{
+ struct dce_disp_clk *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL);
+
+ if (clk_dce == NULL) {
+ BREAK_TO_DEBUGGER();
+ return NULL;
+ }
+
+ /* TODO strip out useful stuff out of dce constructor */
+ dce_disp_clk_construct(
+ clk_dce, ctx, NULL, NULL, NULL);
+
+ clk_dce->base.funcs = &dcn_funcs;
return &clk_dce->base;
}
struct state_dependent_clocks max_clks_by_state[DM_PP_CLOCKS_MAX_STATES];
struct dce_divider_range divider_ranges[DIVIDER_RANGE_MAX];
- bool use_max_disp_clk;
int dentist_vco_freq_khz;
/* Cache the status of DFS-bypass feature*/
int dprefclk_ss_percentage;
/* DPREFCLK SS percentage Divider (100 or 1000) */
int dprefclk_ss_divider;
-
- /* max disp_clk from PPLIB for max validation display clock*/
- int max_displ_clk_in_khz;
};
struct display_clock *dce120_disp_clk_create(struct dc_context *ctx);
+struct display_clock *dcn_disp_clk_create(struct dc_context *ctx);
+
void dce_disp_clk_destroy(struct display_clock **disp_clk);
#endif /* _DCE_CLOCKS_H_ */
dc->prev_display_config = *pp_display_cfg;
}
+/* unit: in_khz before mode set, get pixel clock from context. ASIC register
+ * may not be programmed yet
+ */
+static uint32_t get_max_pixel_clock_for_all_paths(
+ struct dc *dc,
+ struct dc_state *context)
+{
+ uint32_t max_pix_clk = 0;
+ int i;
+
+ for (i = 0; i < MAX_PIPES; i++) {
+ struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
+
+ if (pipe_ctx->stream == NULL)
+ continue;
+
+ /* do not check under lay */
+ if (pipe_ctx->top_pipe)
+ continue;
+
+ if (pipe_ctx->stream_res.pix_clk_params.requested_pix_clk > max_pix_clk)
+ max_pix_clk =
+ pipe_ctx->stream_res.pix_clk_params.requested_pix_clk;
+ }
+
+ if (max_pix_clk == 0)
+ ASSERT(0);
+
+ return max_pix_clk;
+}
+
void dce100_set_bandwidth(
struct dc *dc,
struct dc_state *context,
bool decrease_allowed)
{
- if (decrease_allowed || context->bw.dce.dispclk_khz > dc->current_state->bw.dce.dispclk_khz) {
- dc->res_pool->display_clock->funcs->set_clock(
- dc->res_pool->display_clock,
- context->bw.dce.dispclk_khz * 115 / 100);
- dc->current_state->bw.dce.dispclk_khz = context->bw.dce.dispclk_khz;
- }
+ struct dc_clocks req_clks;
+
+ req_clks.dispclk_khz = context->bw.dce.dispclk_khz * 115 / 100;
+ req_clks.phyclk_khz = get_max_pixel_clock_for_all_paths(dc, context);
+
+ dce110_set_safe_displaymarks(&context->res_ctx, dc->res_pool);
+
+ dc->res_pool->display_clock->funcs->update_clocks(
+ dc->res_pool->display_clock,
+ &req_clks,
+ decrease_allowed);
+
dce100_pplib_apply_display_requirements(dc, context);
}
}
}
-static void set_safe_displaymarks(
+void dce110_set_safe_displaymarks(
struct resource_context *res_ctx,
const struct resource_pool *pool)
{
}
/* unit: in_khz before mode set, get pixel clock from context. ASIC register
- * may not be programmed yet.
- * TODO: after mode set, pre_mode_set = false,
- * may read PLL register to get pixel clock
+ * may not be programmed yet
*/
static uint32_t get_max_pixel_clock_for_all_paths(
struct dc *dc,
- struct dc_state *context,
- bool pre_mode_set)
+ struct dc_state *context)
{
uint32_t max_pix_clk = 0;
int i;
- if (!pre_mode_set) {
- /* TODO: read ASIC register to get pixel clock */
- ASSERT(0);
- }
-
for (i = 0; i < MAX_PIPES; i++) {
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
return max_pix_clk;
}
-/*
- * Find clock state based on clock requested. if clock value is 0, simply
- * set clock state as requested without finding clock state by clock value
- */
-
-static void apply_min_clocks(
- struct dc *dc,
- struct dc_state *context,
- enum dm_pp_clocks_state *clocks_state,
- bool pre_mode_set)
-{
- struct state_dependent_clocks req_clocks = {0};
-
- if (!pre_mode_set) {
- /* set clock_state without verification */
- if (context->dis_clk->funcs->set_min_clocks_state) {
- context->dis_clk->funcs->set_min_clocks_state(
- context->dis_clk, *clocks_state);
- return;
- }
-
- /* TODO: This is incorrect. Figure out how to fix. */
- context->dis_clk->funcs->apply_clock_voltage_request(
- context->dis_clk,
- DM_PP_CLOCK_TYPE_DISPLAY_CLK,
- context->dis_clk->clks.dispclk_khz,
- pre_mode_set,
- false);
-
- context->dis_clk->funcs->apply_clock_voltage_request(
- context->dis_clk,
- DM_PP_CLOCK_TYPE_DISPLAYPHYCLK,
- context->dis_clk->clks.phyclk_khz,
- pre_mode_set,
- false);
- return;
- }
-
- /* get the required state based on state dependent clocks:
- * display clock and pixel clock
- */
- req_clocks.display_clk_khz = context->bw.dce.dispclk_khz;
-
- req_clocks.pixel_clk_khz = get_max_pixel_clock_for_all_paths(
- dc, context, true);
-
- if (context->dis_clk->funcs->get_required_clocks_state) {
- *clocks_state = context->dis_clk->funcs->get_required_clocks_state(
- context->dis_clk, &req_clocks);
- context->dis_clk->funcs->set_min_clocks_state(
- context->dis_clk, *clocks_state);
- } else {
- context->dis_clk->funcs->apply_clock_voltage_request(
- context->dis_clk,
- DM_PP_CLOCK_TYPE_DISPLAY_CLK,
- req_clocks.display_clk_khz,
- pre_mode_set,
- false);
-
- context->dis_clk->funcs->apply_clock_voltage_request(
- context->dis_clk,
- DM_PP_CLOCK_TYPE_DISPLAYPHYCLK,
- req_clocks.pixel_clk_khz,
- pre_mode_set,
- false);
- }
-}
-
/*
* Check if FBC can be enabled
*/
struct dc_bios *dcb = dc->ctx->dc_bios;
enum dc_status status;
int i;
- enum dm_pp_clocks_state clocks_state = DM_PP_CLOCKS_STATE_INVALID;
/* Reset old context */
/* look up the targets that have been removed since last commit */
PIPE_GATING_CONTROL_DISABLE);
}
- set_safe_displaymarks(&context->res_ctx, dc->res_pool);
-
if (dc->fbc_compressor)
dc->fbc_compressor->funcs->disable_fbc(dc->fbc_compressor);
- /*TODO: when pplib works*/
- apply_min_clocks(dc, context, &clocks_state, true);
-
-#if defined(CONFIG_DRM_AMD_DC_DCN1_0)
- if (dc->ctx->dce_version >= DCN_VERSION_1_0) {
- if (context->bw.dcn.calc_clk.fclk_khz
- > dc->current_state->bw.dcn.cur_clk.fclk_khz) {
- struct dm_pp_clock_for_voltage_req clock;
-
- clock.clk_type = DM_PP_CLOCK_TYPE_FCLK;
- clock.clocks_in_khz = context->bw.dcn.calc_clk.fclk_khz;
- dm_pp_apply_clock_for_voltage_request(dc->ctx, &clock);
- dc->current_state->bw.dcn.cur_clk.fclk_khz = clock.clocks_in_khz;
- context->bw.dcn.cur_clk.fclk_khz = clock.clocks_in_khz;
- }
- if (context->bw.dcn.calc_clk.dcfclk_khz
- > dc->current_state->bw.dcn.cur_clk.dcfclk_khz) {
- struct dm_pp_clock_for_voltage_req clock;
-
- clock.clk_type = DM_PP_CLOCK_TYPE_DCFCLK;
- clock.clocks_in_khz = context->bw.dcn.calc_clk.dcfclk_khz;
- dm_pp_apply_clock_for_voltage_request(dc->ctx, &clock);
- dc->current_state->bw.dcn.cur_clk.dcfclk_khz = clock.clocks_in_khz;
- context->bw.dcn.cur_clk.dcfclk_khz = clock.clocks_in_khz;
- }
- if (context->bw.dcn.calc_clk.dispclk_khz
- > dc->current_state->bw.dcn.cur_clk.dispclk_khz) {
- dc->res_pool->display_clock->funcs->set_clock(
- dc->res_pool->display_clock,
- context->bw.dcn.calc_clk.dispclk_khz);
- dc->current_state->bw.dcn.cur_clk.dispclk_khz =
- context->bw.dcn.calc_clk.dispclk_khz;
- context->bw.dcn.cur_clk.dispclk_khz =
- context->bw.dcn.calc_clk.dispclk_khz;
- }
- } else
-#endif
- if (context->bw.dce.dispclk_khz
- > dc->current_state->bw.dce.dispclk_khz) {
- dc->res_pool->display_clock->funcs->set_clock(
- dc->res_pool->display_clock,
- context->bw.dce.dispclk_khz * 115 / 100);
- }
+ dc->hwss.set_bandwidth(dc, context, false);
dce110_setup_audio_dto(dc, context);
}
/* to save power */
- apply_min_clocks(dc, context, &clocks_state, false);
+ dc->hwss.set_bandwidth(dc, context, true);
dcb->funcs->set_scratch_critical_state(dcb, false);
struct dc_state *context,
bool decrease_allowed)
{
- dce110_set_displaymarks(dc, context);
+ struct dc_clocks req_clks;
- if (decrease_allowed || context->bw.dce.dispclk_khz > dc->current_state->bw.dce.dispclk_khz) {
- dc->res_pool->display_clock->funcs->set_clock(
- dc->res_pool->display_clock,
- context->bw.dce.dispclk_khz * 115 / 100);
- dc->current_state->bw.dce.dispclk_khz = context->bw.dce.dispclk_khz;
- }
+ req_clks.dispclk_khz = context->bw.dce.dispclk_khz * 115 / 100;
+ req_clks.phyclk_khz = get_max_pixel_clock_for_all_paths(dc, context);
+
+ if (decrease_allowed)
+ dce110_set_displaymarks(dc, context);
+ else
+ dce110_set_safe_displaymarks(&context->res_ctx, dc->res_pool);
+ dc->res_pool->display_clock->funcs->update_clocks(
+ dc->res_pool->display_clock,
+ &req_clks,
+ decrease_allowed);
pplib_apply_display_requirements(dc, context);
}
void dce110_power_down(struct dc *dc);
+void dce110_set_safe_displaymarks(
+ struct resource_context *res_ctx,
+ const struct resource_pool *pool);
+
void dce110_fill_display_configs(
const struct dc_state *context,
struct dm_pp_display_configuration *pp_display_cfg);
int dispclk_to_dpp_threshold = determine_dppclk_threshold(dc, context);
/* set disp clk to dpp clk threshold */
- dc->res_pool->display_clock->funcs->set_clock(
+ dc->res_pool->display_clock->funcs->set_dispclk(
dc->res_pool->display_clock,
dispclk_to_dpp_threshold);
/* If target clk not same as dppclk threshold, set to target clock */
if (dispclk_to_dpp_threshold != context->bw.dcn.calc_clk.dispclk_khz) {
- dc->res_pool->display_clock->funcs->set_clock(
+ dc->res_pool->display_clock->funcs->set_dispclk(
dc->res_pool->display_clock,
context->bw.dcn.calc_clk.dispclk_khz);
}
if (IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment))
return;
+ dc->res_pool->display_clock->funcs->update_clocks(
+ dc->res_pool->display_clock,
+ &context->bw.dcn.calc_clk,
+ decrease_allowed);
+
if (should_set_clock(
decrease_allowed,
context->bw.dcn.calc_clk.dcfclk_khz,
}
}
- pool->base.display_clock = dce120_disp_clk_create(ctx);
+ pool->base.display_clock = dcn_disp_clk_create(ctx);
if (pool->base.display_clock == NULL) {
dm_error("DC: failed to create display clock!\n");
BREAK_TO_DEBUGGER();
struct dc_context *ctx;
const struct display_clock_funcs *funcs;
- bool dispclk_notify_pplib_done;
- bool phyclk_notify_pplib_done;
enum dm_pp_clocks_state max_clks_state;
enum dm_pp_clocks_state cur_min_clks_state;
struct dc_clocks clks;
};
struct display_clock_funcs {
- int (*set_clock)(struct display_clock *disp_clk,
+ void (*update_clocks)(struct display_clock *dccg,
+ struct dc_clocks *new_clocks,
+ bool safe_to_lower);
+ int (*set_dispclk)(struct display_clock *disp_clk,
int requested_clock_khz);
- enum dm_pp_clocks_state (*get_required_clocks_state)(
- struct display_clock *disp_clk,
- struct state_dependent_clocks *req_clocks);
-
- bool (*set_min_clocks_state)(struct display_clock *disp_clk,
- enum dm_pp_clocks_state dm_pp_clocks_state);
-
int (*get_dp_ref_clk_frequency)(struct display_clock *disp_clk);
-
- bool (*apply_clock_voltage_request)(
- struct display_clock *disp_clk,
- enum dm_pp_clock_type clocks_type,
- int clocks_in_khz,
- bool pre_mode_set,
- bool update_dp_phyclk);
};
#endif /* __DISPLAY_CLOCK_H__ */