Blob Blame History Raw
From: Dmytro Laktyushkin <Dmytro.Laktyushkin@amd.com>
Date: Wed, 23 May 2018 17:52:04 -0400
Subject: drm/amd/display: move clock programming from set_bandwidth to dccg
Git-commit: e2e0a1dcd3229eec32ded439f69438a25ec817d6
Patch-mainline: v4.19-rc1
References: FATE#326289 FATE#326079 FATE#326049 FATE#322398 FATE#326166

This change moves dcn clock programming(with exception of dispclk)
into dccg. This should have no functional effect.

Signed-off-by: Dmytro Laktyushkin <Dmytro.Laktyushkin@amd.com>
Reviewed-by: Tony Cheng <Tony.Cheng@amd.com>
Acked-by: Harry Wentland <harry.wentland@amd.com>
Signed-off-by: Alex Deucher <alexander.deucher@amd.com>
Acked-by: Petr Tesarik <ptesarik@suse.com>
---
 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c          |    2 
 drivers/gpu/drm/amd/display/dc/dce/dce_clocks.c           |   57 ++++++++----
 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c |   62 ++------------
 3 files changed, 51 insertions(+), 70 deletions(-)

--- a/drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c
+++ b/drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c
@@ -997,7 +997,7 @@ bool dcn_validate_bandwidth(
 		}
 
 		context->bw.dcn.calc_clk.dppclk_khz = context->bw.dcn.calc_clk.dispclk_khz / v->dispclk_dppclk_ratio;
-
+		context->bw.dcn.calc_clk.phyclk_khz = v->phyclk_per_state[v->voltage_level];
 		switch (v->voltage_level) {
 		case 0:
 			context->bw.dcn.calc_clk.max_supported_dppclk_khz =
--- a/drivers/gpu/drm/amd/display/dc/dce/dce_clocks.c
+++ b/drivers/gpu/drm/amd/display/dc/dce/dce_clocks.c
@@ -523,14 +523,18 @@ static void dce_clock_read_ss_info(struc
 	}
 }
 
+static inline bool should_set_clock(bool safe_to_lower, int calc_clk, int cur_clk)
+{
+	return ((safe_to_lower && calc_clk < cur_clk) || calc_clk > cur_clk);
+}
+
 static void dce12_update_clocks(struct dccg *dccg,
 			struct dc_clocks *new_clocks,
 			bool safe_to_lower)
 {
 	struct dm_pp_clock_for_voltage_req clock_voltage_req = {0};
 
-	if ((new_clocks->dispclk_khz < dccg->clks.dispclk_khz && safe_to_lower)
-			|| new_clocks->dispclk_khz > dccg->clks.dispclk_khz) {
+	if (should_set_clock(safe_to_lower, new_clocks->dispclk_khz, dccg->clks.dispclk_khz)) {
 		clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DISPLAY_CLK;
 		clock_voltage_req.clocks_in_khz = new_clocks->dispclk_khz;
 		dccg->funcs->set_dispclk(dccg, new_clocks->dispclk_khz);
@@ -539,8 +543,7 @@ static void dce12_update_clocks(struct d
 		dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
 	}
 
-	if ((new_clocks->phyclk_khz < dccg->clks.phyclk_khz && safe_to_lower)
-			|| new_clocks->phyclk_khz > dccg->clks.phyclk_khz) {
+	if (should_set_clock(safe_to_lower, new_clocks->phyclk_khz, dccg->clks.phyclk_khz)) {
 		clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DISPLAYPHYCLK;
 		clock_voltage_req.clocks_in_khz = new_clocks->phyclk_khz;
 		dccg->clks.phyclk_khz = new_clocks->phyclk_khz;
@@ -553,6 +556,11 @@ static void dcn_update_clocks(struct dcc
 			struct dc_clocks *new_clocks,
 			bool safe_to_lower)
 {
+	struct dc *dc = dccg->ctx->dc;
+	struct pp_smu_display_requirement_rv *smu_req_cur =
+			&dc->res_pool->pp_smu_req;
+	struct pp_smu_display_requirement_rv smu_req = *smu_req_cur;
+	struct pp_smu_funcs_rv *pp_smu = dc->res_pool->pp_smu;
 	struct dm_pp_clock_for_voltage_req clock_voltage_req = {0};
 	bool send_request_to_increase = false;
 	bool send_request_to_lower = false;
@@ -566,17 +574,14 @@ static void dcn_update_clocks(struct dcc
 #ifdef CONFIG_DRM_AMD_DC_DCN1_0
 	if (send_request_to_increase
 		) {
-		struct dc *core_dc = dccg->ctx->dc;
-
 		/*use dcfclk to request voltage*/
 		clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DCFCLK;
-		clock_voltage_req.clocks_in_khz = dcn_find_dcfclk_suits_all(core_dc, new_clocks);
+		clock_voltage_req.clocks_in_khz = dcn_find_dcfclk_suits_all(dc, new_clocks);
 		dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
 	}
 #endif
 
-	if ((new_clocks->dispclk_khz < dccg->clks.dispclk_khz && safe_to_lower)
-			|| new_clocks->dispclk_khz > dccg->clks.dispclk_khz) {
+	if (should_set_clock(safe_to_lower, new_clocks->dispclk_khz, dccg->clks.dispclk_khz)) {
 		clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DISPLAY_CLK;
 		clock_voltage_req.clocks_in_khz = new_clocks->dispclk_khz;
 		/* TODO: ramp up - dccg->funcs->set_dispclk(dccg, new_clocks->dispclk_khz);*/
@@ -586,8 +591,7 @@ static void dcn_update_clocks(struct dcc
 		send_request_to_lower = true;
 	}
 
-	if ((new_clocks->phyclk_khz < dccg->clks.phyclk_khz && safe_to_lower)
-			|| new_clocks->phyclk_khz > dccg->clks.phyclk_khz) {
+	if (should_set_clock(safe_to_lower, new_clocks->phyclk_khz, dccg->clks.phyclk_khz)) {
 		dccg->clks.phyclk_khz = new_clocks->phyclk_khz;
 		clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DISPLAYPHYCLK;
 		clock_voltage_req.clocks_in_khz = new_clocks->phyclk_khz;
@@ -596,36 +600,50 @@ static void dcn_update_clocks(struct dcc
 		send_request_to_lower = true;
 	}
 
-	if ((new_clocks->fclk_khz < dccg->clks.fclk_khz && safe_to_lower)
-			|| new_clocks->fclk_khz > dccg->clks.fclk_khz) {
+	if (should_set_clock(safe_to_lower, new_clocks->fclk_khz, dccg->clks.fclk_khz)) {
 		dccg->clks.phyclk_khz = new_clocks->fclk_khz;
 		clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_FCLK;
 		clock_voltage_req.clocks_in_khz = new_clocks->fclk_khz;
+		smu_req.hard_min_fclk_khz = new_clocks->fclk_khz;
 
 		dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
 		send_request_to_lower = true;
 	}
 
-	if ((new_clocks->dcfclk_khz < dccg->clks.dcfclk_khz && safe_to_lower)
-			|| new_clocks->dcfclk_khz > dccg->clks.dcfclk_khz) {
+	if (should_set_clock(safe_to_lower, new_clocks->dcfclk_khz, dccg->clks.dcfclk_khz)) {
 		dccg->clks.phyclk_khz = new_clocks->dcfclk_khz;
 		clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DCFCLK;
 		clock_voltage_req.clocks_in_khz = new_clocks->dcfclk_khz;
+		smu_req.hard_min_dcefclk_khz = new_clocks->dcfclk_khz;
 
 		send_request_to_lower = true;
 	}
 
+	if (should_set_clock(safe_to_lower,
+			new_clocks->dcfclk_deep_sleep_khz, dccg->clks.dcfclk_deep_sleep_khz)) {
+		dccg->clks.dcfclk_deep_sleep_khz = new_clocks->dcfclk_deep_sleep_khz;
+		smu_req.min_deep_sleep_dcefclk_mhz = new_clocks->dcfclk_deep_sleep_khz;
+	}
+
 #ifdef CONFIG_DRM_AMD_DC_DCN1_0
 	if (!send_request_to_increase && send_request_to_lower
 		) {
-		struct dc *core_dc = dccg->ctx->dc;
-
 		/*use dcfclk to request voltage*/
 		clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DCFCLK;
-		clock_voltage_req.clocks_in_khz = dcn_find_dcfclk_suits_all(core_dc, new_clocks);
+		clock_voltage_req.clocks_in_khz = dcn_find_dcfclk_suits_all(dc, new_clocks);
 		dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
 	}
 #endif
+
+	if (new_clocks->phyclk_khz)
+		smu_req.display_count = 1;
+	else
+		smu_req.display_count = 0;
+
+	if (pp_smu->set_display_requirement)
+		pp_smu->set_display_requirement(&pp_smu->pp_smu, &smu_req);
+
+	*smu_req_cur = smu_req;
 }
 
 static void dce_update_clocks(struct dccg *dccg,
@@ -642,8 +660,7 @@ static void dce_update_clocks(struct dcc
 			dccg->cur_min_clks_state = level_change_req.power_level;
 	}
 
-	if ((new_clocks->dispclk_khz < dccg->clks.dispclk_khz && safe_to_lower)
-			|| new_clocks->dispclk_khz > dccg->clks.dispclk_khz) {
+	if (should_set_clock(safe_to_lower, new_clocks->dispclk_khz, dccg->clks.dispclk_khz)) {
 		dccg->funcs->set_dispclk(dccg, new_clocks->dispclk_khz);
 		dccg->clks.dispclk_khz = new_clocks->dispclk_khz;
 	}
--- a/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c
+++ b/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c
@@ -2153,11 +2153,11 @@ static void dcn10_pplib_apply_display_re
 {
 	struct dm_pp_display_configuration *pp_display_cfg = &context->pp_display_cfg;
 
-	pp_display_cfg->min_engine_clock_khz = context->bw.dcn.cur_clk.dcfclk_khz;
-	pp_display_cfg->min_memory_clock_khz = context->bw.dcn.cur_clk.fclk_khz;
-	pp_display_cfg->min_engine_clock_deep_sleep_khz = context->bw.dcn.cur_clk.dcfclk_deep_sleep_khz;
-	pp_display_cfg->min_dcfc_deep_sleep_clock_khz = context->bw.dcn.cur_clk.dcfclk_deep_sleep_khz;
-	pp_display_cfg->min_dcfclock_khz = context->bw.dcn.cur_clk.dcfclk_khz;
+	pp_display_cfg->min_engine_clock_khz = dc->res_pool->dccg->clks.dcfclk_khz;
+	pp_display_cfg->min_memory_clock_khz = dc->res_pool->dccg->clks.fclk_khz;
+	pp_display_cfg->min_engine_clock_deep_sleep_khz = dc->res_pool->dccg->clks.dcfclk_deep_sleep_khz;
+	pp_display_cfg->min_dcfc_deep_sleep_clock_khz = dc->res_pool->dccg->clks.dcfclk_deep_sleep_khz;
+	pp_display_cfg->min_dcfclock_khz = dc->res_pool->dccg->clks.dcfclk_khz;
 	pp_display_cfg->disp_clk_khz = context->bw.dcn.cur_clk.dispclk_khz;
 	dce110_fill_display_configs(context, pp_display_cfg);
 
@@ -2361,11 +2361,6 @@ static void dcn10_apply_ctx_for_surface(
 */
 }
 
-static inline bool should_set_clock(bool decrease_allowed, int calc_clk, int cur_clk)
-{
-	return ((decrease_allowed && calc_clk < cur_clk) || calc_clk > cur_clk);
-}
-
 static int determine_dppclk_threshold(struct dc *dc, struct dc_state *context)
 {
 	bool request_dpp_div = context->bw.dcn.calc_clk.dispclk_khz >
@@ -2456,16 +2451,16 @@ static void ramp_up_dispclk_with_dpp(str
 			context->bw.dcn.calc_clk.max_supported_dppclk_khz;
 }
 
+static inline bool should_set_clock(bool safe_to_lower, int calc_clk, int cur_clk)
+{
+	return ((safe_to_lower && calc_clk < cur_clk) || calc_clk > cur_clk);
+}
+
 static void dcn10_set_bandwidth(
 		struct dc *dc,
 		struct dc_state *context,
 		bool decrease_allowed)
 {
-	struct pp_smu_display_requirement_rv *smu_req_cur =
-			&dc->res_pool->pp_smu_req;
-	struct pp_smu_display_requirement_rv smu_req = *smu_req_cur;
-	struct pp_smu_funcs_rv *pp_smu = dc->res_pool->pp_smu;
-
 	if (dc->debug.sanity_checks) {
 		dcn10_verify_allow_pstate_change_high(dc);
 	}
@@ -2473,45 +2468,14 @@ static void dcn10_set_bandwidth(
 	if (IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment))
 		return;
 
+	if (context->stream_count == 0)
+		context->bw.dcn.calc_clk.phyclk_khz = 0;
+
 	dc->res_pool->dccg->funcs->update_clocks(
 			dc->res_pool->dccg,
 			&context->bw.dcn.calc_clk,
 			decrease_allowed);
 
-	if (should_set_clock(
-			decrease_allowed,
-			context->bw.dcn.calc_clk.dcfclk_khz,
-			dc->current_state->bw.dcn.cur_clk.dcfclk_khz)) {
-		context->bw.dcn.cur_clk.dcfclk_khz =
-				context->bw.dcn.calc_clk.dcfclk_khz;
-		smu_req.hard_min_dcefclk_khz =
-				context->bw.dcn.calc_clk.dcfclk_khz;
-	}
-
-	if (should_set_clock(
-			decrease_allowed,
-			context->bw.dcn.calc_clk.dcfclk_deep_sleep_khz,
-			dc->current_state->bw.dcn.cur_clk.dcfclk_deep_sleep_khz)) {
-		context->bw.dcn.cur_clk.dcfclk_deep_sleep_khz =
-				context->bw.dcn.calc_clk.dcfclk_deep_sleep_khz;
-	}
-
-	if (should_set_clock(
-			decrease_allowed,
-			context->bw.dcn.calc_clk.fclk_khz,
-			dc->current_state->bw.dcn.cur_clk.fclk_khz)) {
-		context->bw.dcn.cur_clk.fclk_khz =
-				context->bw.dcn.calc_clk.fclk_khz;
-		smu_req.hard_min_fclk_khz = context->bw.dcn.calc_clk.fclk_khz;
-	}
-
-	smu_req.display_count = context->stream_count;
-
-	if (pp_smu->set_display_requirement)
-		pp_smu->set_display_requirement(&pp_smu->pp_smu, &smu_req);
-
-	*smu_req_cur = smu_req;
-
 	/* make sure dcf clk is before dpp clk to
 	 * make sure we have enough voltage to run dpp clk
 	 */