From: Rodrigo Siqueira <Rodrigo.Siqueira@amd.com>
To: <amd-gfx@lists.freedesktop.org>
Cc: stylon.wang@amd.com, Sunpeng.Li@amd.com, Harry.Wentland@amd.com,
qingqing.zhuo@amd.com, Rodrigo.Siqueira@amd.com,
roman.li@amd.com,
Meenakshikumar Somasundaram <Meenakshikumar.Somasundaram@amd.com>,
solomon.chiu@amd.com, jerry.zuo@amd.com,
Aurabindo.Pillai@amd.com, hamza.mahfooz@amd.com,
wayne.lin@amd.com, Mustapha Ghaddar <mghaddar@amd.com>,
Bhawanpreet.Lakha@amd.com, agustin.gutierrez@amd.com,
pavle.kotarac@amd.com
Subject: [PATCH 01/37] drm/amd/display: Update BW alloc after new DMUB logic
Date: Tue, 10 Jan 2023 11:54:59 -0500 [thread overview]
Message-ID: <20230110165535.3358492-2-Rodrigo.Siqueira@amd.com> (raw)
In-Reply-To: <20230110165535.3358492-1-Rodrigo.Siqueira@amd.com>
From: Mustapha Ghaddar <mghaddar@amd.com>
[WHY]
After introducing new DPIA NOTIFICATION we will need
to update the exiting BW allocation logic
[HOW]
Updated the BW alloc source and header files
Reviewed-by: Meenakshikumar Somasundaram <Meenakshikumar.Somasundaram@amd.com>
Acked-by: Rodrigo Siqueira <Rodrigo.Siqueira@amd.com>
Signed-off-by: Mustapha Ghaddar <mghaddar@amd.com>
---
drivers/gpu/drm/amd/display/dc/dc_link.h | 2 +-
.../drm/amd/display/dc/link/link_dp_dpia_bw.h | 47 +++++++++++++++----
.../drm/amd/display/dmub/src/dmub_srv_stat.c | 18 +++----
3 files changed, 49 insertions(+), 18 deletions(-)
diff --git a/drivers/gpu/drm/amd/display/dc/dc_link.h b/drivers/gpu/drm/amd/display/dc/dc_link.h
index 8565bbb75177..3b9315a38b30 100644
--- a/drivers/gpu/drm/amd/display/dc/dc_link.h
+++ b/drivers/gpu/drm/amd/display/dc/dc_link.h
@@ -158,11 +158,11 @@ struct dc_panel_config {
struct dc_dpia_bw_alloc {
int sink_verified_bw; // The Verified BW that sink can allocated and use that has been verified already
int sink_allocated_bw; // The Actual Allocated BW that sink currently allocated
- int padding_bw; // The Padding "Un-used" BW allocated by CM for padding reasons
int sink_max_bw; // The Max BW that sink can require/support
int estimated_bw; // The estimated available BW for this DPIA
int bw_granularity; // BW Granularity
bool bw_alloc_enabled; // The BW Alloc Mode Support is turned ON for all 3: DP-Tx & Dpia & CM
+ bool response_ready; // Response ready from the CM side
};
/*
diff --git a/drivers/gpu/drm/amd/display/dc/link/link_dp_dpia_bw.h b/drivers/gpu/drm/amd/display/dc/link/link_dp_dpia_bw.h
index 669e995f825f..58eb7b581093 100644
--- a/drivers/gpu/drm/amd/display/dc/link/link_dp_dpia_bw.h
+++ b/drivers/gpu/drm/amd/display/dc/link/link_dp_dpia_bw.h
@@ -26,13 +26,13 @@
#ifndef DC_INC_LINK_DP_DPIA_BW_H_
#define DC_INC_LINK_DP_DPIA_BW_H_
-// XXX: TODO: Re-add for Phase 2
-/* Number of Host Routers per motherboard is 2 and 2 DPIA per host router */
-#define MAX_HR_NUM 2
-
-struct dc_host_router_bw_alloc {
- int max_bw[MAX_HR_NUM]; // The Max BW that each Host Router has available to be shared btw DPIAs
- int total_estimated_bw[MAX_HR_NUM]; // The Total Verified and available BW that Host Router has
+/*
+ * Host Router BW type
+ */
+enum bw_type {
+ HOST_ROUTER_BW_ESTIMATED,
+ HOST_ROUTER_BW_ALLOCATED,
+ HOST_ROUTER_BW_INVALID,
};
/*
@@ -61,9 +61,40 @@ void set_usb4_req_bw_req(struct dc_link *link, int req_bw);
* find out the result of allocating on CM and update structs accordingly
*
* @link: pointer to the dc_link struct instance
+ * @bw: Allocated or Estimated BW depending on the result
+ * @result: Response type
+ *
+ * return: none
+ */
+void get_usb4_req_bw_resp(struct dc_link *link, uint8_t bw, uint8_t result);
+
+/*
+ * Return the response_ready flag from dc_link struct
+ *
+ * @link: pointer to the dc_link struct instance
+ *
+ * return: response_ready flag from dc_link struct
+ */
+bool get_cm_response_ready_flag(struct dc_link *link);
+
+/*
+ * Get the Max Available BW or Max Estimated BW for each Host Router
+ *
+ * @link: pointer to the dc_link struct instance
+ * @type: ESTIMATD BW or MAX AVAILABLE BW
+ *
+ * return: response_ready flag from dc_link struct
+ */
+int get_host_router_total_bw(struct dc_link *link, uint8_t type);
+
+/*
+ * Cleanup function for when the dpia is unplugged to reset struct
+ * and perform any required clean up
+ *
+ * @link: pointer to the dc_link struct instance
*
* return: none
*/
-void get_usb4_req_bw_resp(struct dc_link *link);
+bool dpia_bw_alloc_unplug(struct dc_link *link);
#endif /* DC_INC_LINK_DP_DPIA_BW_H_ */
diff --git a/drivers/gpu/drm/amd/display/dmub/src/dmub_srv_stat.c b/drivers/gpu/drm/amd/display/dmub/src/dmub_srv_stat.c
index 55a534ec0794..4948f9724db2 100644
--- a/drivers/gpu/drm/amd/display/dmub/src/dmub_srv_stat.c
+++ b/drivers/gpu/drm/amd/display/dmub/src/dmub_srv_stat.c
@@ -98,19 +98,19 @@ enum dmub_status dmub_srv_stat_get_notification(struct dmub_srv *dmub,
if (cmd.dpia_notify.payload.header.type == DPIA_NOTIFY__BW_ALLOCATION) {
- if (cmd.dpia_notify.payload.data.dpia_bw_alloc.bits.bw_request_failed) {
+ notify->bw_alloc_reply.estimated_bw =
+ cmd.dpia_notify.payload.data.dpia_bw_alloc.estimated_bw;
+ notify->bw_alloc_reply.allocated_bw =
+ cmd.dpia_notify.payload.data.dpia_bw_alloc.allocated_bw;
+
+ if (cmd.dpia_notify.payload.data.dpia_bw_alloc.bits.bw_request_failed)
notify->result = DPIA_BW_REQ_FAILED;
- } else if (cmd.dpia_notify.payload.data.dpia_bw_alloc.bits.bw_request_succeeded) {
+ else if (cmd.dpia_notify.payload.data.dpia_bw_alloc.bits.bw_request_succeeded)
notify->result = DPIA_BW_REQ_SUCCESS;
- notify->bw_alloc_reply.allocated_bw =
- cmd.dpia_notify.payload.data.dpia_bw_alloc.allocated_bw;
- } else if (cmd.dpia_notify.payload.data.dpia_bw_alloc.bits.est_bw_changed) {
+ else if (cmd.dpia_notify.payload.data.dpia_bw_alloc.bits.est_bw_changed)
notify->result = DPIA_EST_BW_CHANGED;
- notify->bw_alloc_reply.estimated_bw =
- cmd.dpia_notify.payload.data.dpia_bw_alloc.estimated_bw;
- } else if (cmd.dpia_notify.payload.data.dpia_bw_alloc.bits.bw_alloc_cap_changed) {
+ else if (cmd.dpia_notify.payload.data.dpia_bw_alloc.bits.bw_alloc_cap_changed)
notify->result = DPIA_BW_ALLOC_CAPS_CHANGED;
- }
}
break;
default:
--
2.39.0
next prev parent reply other threads:[~2023-01-10 16:56 UTC|newest]
Thread overview: 42+ messages / expand[flat|nested] mbox.gz Atom feed top
2023-01-10 16:54 [PATCH 00/37] DC Patches Jan 10, 2023 Rodrigo Siqueira
2023-01-10 16:54 ` Rodrigo Siqueira [this message]
2023-01-10 16:55 ` [PATCH 02/37] Revert "drm/amd/display: Speed up DML fast_validate path" Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 03/37] drm/amd/display: fix multi edp panel instancing Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 04/37] drm/amd/display: Fix DPIA link encoder assignment issue Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 05/37] drm/amd/display: Implement FIFO enable sequence on DCN32 Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 06/37] drm/amd/display: refactor hpd logic from dc_link to link_hpd Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 07/37] drm/amd/display: refactor ddc logic from dc_link_ddc to link_ddc Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 08/37] drm/amd/display: move dpcd logic from dc_link_dpcd to link_dpcd Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 09/37] drm/amd/display: move dc_link_dpia logic to link_dp_dpia Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 10/37] drm/amd/display: Update dmub header to match DMUB Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 11/37] drm/amd/display: add hubbub_init related Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 12/37] drm/amd/display: Optimize subvp and drr validation Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 13/37] drm/amd/display: Account for DCC Meta pitch in DML MALL surface calculations Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 14/37] drm/amd/display: Account for Subvp Phantoms " Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 15/37] drm/amd/display: Use DML for MALL SS and Subvp allocation calculations Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 16/37] drm/amd/display: cleanup function args in dml Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 17/37] drm/amd/display: Change i2c speed for hdcp Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 18/37] drm/amd/display: Remove SubVp support if src/dst rect does not equal stream timing Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 19/37] drm/amd/display: move dp link training logic to link_dp_training Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 20/37] drm/amd/display: move dp phy related logic to link_dp_phy Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 21/37] drm/amd/display: move dp capability related logic to link_dp_capability Rodrigo Siqueira
2023-01-19 10:15 ` Conor Dooley
2023-01-19 19:00 ` Alex Deucher
2023-01-19 19:11 ` Conor Dooley
2023-01-10 16:55 ` [PATCH 22/37] Revert "drm/amd/display: Demote Error Level When ODM Transition Supported" Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 23/37] drm/amd/display: fix an error check condition for synced pipes Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 24/37] drm/amd/display: Optimize link power-down when link powered externally Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 25/37] drm/amd/display: Remove unused code Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 26/37] drm/amd/display: set active bit for desktop with VSDBv3 Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 27/37] drm/amd/display: Add extra mblk for DCC Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 28/37] drm/amd/display: Remove DISPCLK dentist programming for dcn32 Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 29/37] drm/amd/display: contional remove disable dig_fifo when blank Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 30/37] drm/amd/display: Skip backlight control delay on external powered links Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 31/37] drm/amd/display: fix mapping to non-allocated address Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 32/37] drm/amd/display: Request min clocks after disabling pipes on init Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 33/37] drm/amd/display: Allow subvp on vactive pipes that are 2560x1440@60 Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 34/37] drm/amd/display: Account for MPO planes in dcn32 mall alloc calculations Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 35/37] drm/amd/display: phase2 enable mst hdcp multiple displays Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 36/37] drm/amd/display: hdcp not enabled on connector 0 Rodrigo Siqueira
2023-01-10 16:55 ` [PATCH 37/37] drm/amd/display: 3.2.218 Rodrigo Siqueira
2023-01-16 15:29 ` [PATCH 00/37] DC Patches Jan 10, 2023 Wheeler, Daniel
Reply instructions:
You may reply publicly to this message via plain-text email
using any one of the following methods:
* Save the following mbox file, import it into your mail client,
and reply-to-all from there: mbox
Avoid top-posting and favor interleaved quoting:
https://en.wikipedia.org/wiki/Posting_style#Interleaved_style
* Reply using the --to, --cc, and --in-reply-to
switches of git-send-email(1):
git send-email \
--in-reply-to=20230110165535.3358492-2-Rodrigo.Siqueira@amd.com \
--to=rodrigo.siqueira@amd.com \
--cc=Aurabindo.Pillai@amd.com \
--cc=Bhawanpreet.Lakha@amd.com \
--cc=Harry.Wentland@amd.com \
--cc=Meenakshikumar.Somasundaram@amd.com \
--cc=Sunpeng.Li@amd.com \
--cc=agustin.gutierrez@amd.com \
--cc=amd-gfx@lists.freedesktop.org \
--cc=hamza.mahfooz@amd.com \
--cc=jerry.zuo@amd.com \
--cc=mghaddar@amd.com \
--cc=pavle.kotarac@amd.com \
--cc=qingqing.zhuo@amd.com \
--cc=roman.li@amd.com \
--cc=solomon.chiu@amd.com \
--cc=stylon.wang@amd.com \
--cc=wayne.lin@amd.com \
/path/to/YOUR_REPLY
https://kernel.org/pub/software/scm/git/docs/git-send-email.html
* If your mail client supports setting the In-Reply-To header
via mailto: links, try the mailto: link
Be sure your reply has a Subject: header at the top and a blank line
before the message body.
This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox