Intrenal_hpd is referenced by event thread but set by drm bridge callback context. Add mutex to protect internal_hpd to avoid conflicts between threads. Signed-off-by: Kuogee Hsieh <quic_khsieh@xxxxxxxxxxx> --- drivers/gpu/drm/msm/dp/dp_display.c | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/drivers/gpu/drm/msm/dp/dp_display.c b/drivers/gpu/drm/msm/dp/dp_display.c index 71aa944..b59ea7a 100644 --- a/drivers/gpu/drm/msm/dp/dp_display.c +++ b/drivers/gpu/drm/msm/dp/dp_display.c @@ -1792,11 +1792,13 @@ void dp_bridge_hpd_enable(struct drm_bridge *bridge) dp = container_of(dp_display, struct dp_display_private, dp_display); + mutex_lock(&dp->event_mutex); dp_display->internal_hpd = true; dp_catalog_hpd_config_intr(dp->catalog, DP_DP_HPD_PLUG_INT_MASK | DP_DP_HPD_UNPLUG_INT_MASK, true); + mutex_unlock(&dp->event_mutex); } void dp_bridge_hpd_disable(struct drm_bridge *bridge) @@ -1807,11 +1809,13 @@ void dp_bridge_hpd_disable(struct drm_bridge *bridge) dp = container_of(dp_display, struct dp_display_private, dp_display); + mutex_lock(&dp->event_mutex); dp_catalog_hpd_config_intr(dp->catalog, DP_DP_HPD_PLUG_INT_MASK | DP_DP_HPD_UNPLUG_INT_MASK, false); dp_display->internal_hpd = false; + mutex_unlock(&dp->event_mutex); } void dp_bridge_hpd_notify(struct drm_bridge *bridge, @@ -1822,8 +1826,12 @@ void dp_bridge_hpd_notify(struct drm_bridge *bridge, struct dp_display_private *dp = container_of(dp_display, struct dp_display_private, dp_display); /* Without next_bridge interrupts are handled by the DP core directly */ - if (dp_display->internal_hpd) + mutex_lock(&dp->event_mutex); + if (dp_display->internal_hpd) { + mutex_unlock(&dp->event_mutex); return; + } + mutex_unlock(&dp->event_mutex); if (!dp->core_initialized) { drm_dbg_dp(dp->drm_dev, "not initialized\n"); -- The Qualcomm Innovation Center, Inc. is a member of the Code Aurora Forum, a Linux Foundation Collaborative Project