This is for WAs that need to touch global MMIO registers related to GT. Suggested-by: Joonas Lahtinen <joonas.lahtinen@xxxxxxxxxxxxxxx> Signed-off-by: Oscar Mateo <oscar.mateo@xxxxxxxxx> Cc: Chris Wilson <chris@xxxxxxxxxxxxxxxxxx> Cc: Mika Kuoppala <mika.kuoppala@xxxxxxxxxxxxxxx> --- drivers/gpu/drm/i915/i915_drv.h | 1 + drivers/gpu/drm/i915/intel_workarounds.c | 404 +++++++++++++++++++------------ drivers/gpu/drm/i915/intel_workarounds.h | 3 + 3 files changed, 253 insertions(+), 155 deletions(-) diff --git a/drivers/gpu/drm/i915/i915_drv.h b/drivers/gpu/drm/i915/i915_drv.h index 1c73fec..72b5d80 100644 --- a/drivers/gpu/drm/i915/i915_drv.h +++ b/drivers/gpu/drm/i915/i915_drv.h @@ -2007,6 +2007,7 @@ struct i915_wa_reg_table { struct i915_workarounds { u32 ctx_count; + u32 gt_count; u32 hw_whitelist_count[I915_NUM_ENGINES]; }; diff --git a/drivers/gpu/drm/i915/intel_workarounds.c b/drivers/gpu/drm/i915/intel_workarounds.c index b00899e..b07fbd0 100644 --- a/drivers/gpu/drm/i915/intel_workarounds.c +++ b/drivers/gpu/drm/i915/intel_workarounds.c @@ -29,6 +29,10 @@ .name = (wa), \ .type = I915_WA_TYPE_CONTEXT +#define WA_GT(wa) \ + .name = (wa), \ + .type = I915_WA_TYPE_GT + #define ALL_REVS \ .since = 0, \ .until = REVID_FOREVER @@ -40,6 +44,18 @@ #define REG(a) \ .addr = (a) +#define SET_BIT(m) \ + .mask = (m), \ + .value = (m) + +#define CLEAR_BIT(m) \ + .mask = (m), \ + .value = 0 + +#define SET_FIELD(m, v) \ + .mask = (m), \ + .value = (v) + #define MASK(mask, value) ((mask) << 16 | (value)) #define MASK_ENABLE(x) (MASK((x), (x))) #define MASK_DISABLE(x) (MASK((x), 0)) @@ -575,196 +591,274 @@ int intel_ctx_workarounds_emit(struct drm_i915_gem_request *req) return 0; } -static void bdw_gt_workarounds_apply(struct drm_i915_private *dev_priv) +static uint mmio_workarounds_apply(struct drm_i915_private *dev_priv, + const struct i915_wa_reg_table *wa_table, + uint table_count) { -} + uint total_count = 0; + int i, j; -static void chv_gt_workarounds_apply(struct drm_i915_private *dev_priv) -{ -} + for (i = 0; i < table_count; i++) { + struct i915_wa_reg *wa = wa_table[i].table; -static void gen9_gt_workarounds_apply(struct drm_i915_private *dev_priv) -{ - if (HAS_LLC(dev_priv)) { - /* WaCompressedResourceSamplerPbeMediaNewHashMode:skl,kbl - * - * Must match Display Engine. See - * WaCompressedResourceDisplayNewHashMode. - */ - I915_WRITE(MMCD_MISC_CTRL, - I915_READ(MMCD_MISC_CTRL) | - MMCD_PCLA | - MMCD_HOTSPOT_EN); - } + for (j = 0; j < wa_table[i].count; j++) { + wa[j].applied = + IS_REVID(dev_priv, wa[j].since, wa[j].until); - /* WaContextSwitchWithConcurrentTLBInvalidate:skl,bxt,kbl,glk,cfl */ - I915_WRITE(GEN9_CSFE_CHICKEN1_RCS, - _MASKED_BIT_ENABLE(GEN9_PREEMPT_GPGPU_SYNC_SWITCH_DISABLE)); + if (wa[j].applied && wa[j].pre_hook) + wa[j].applied = wa[j].pre_hook(dev_priv, &wa[j]); - /* WaEnableLbsSlaRetryTimerDecrement:skl,bxt,kbl,glk,cfl */ - I915_WRITE(BDW_SCRATCH1, I915_READ(BDW_SCRATCH1) | - GEN9_LBS_SLA_RETRY_TIMER_DECREMENT_ENABLE); + if (wa[j].applied) { + i915_reg_t addr = wa[j].addr; + u32 value = wa[j].value; + u32 mask = wa[j].mask; - /* WaDisableKillLogic:bxt,skl,kbl */ - if (!IS_COFFEELAKE(dev_priv)) - I915_WRITE(GAM_ECOCHK, I915_READ(GAM_ECOCHK) | - ECOCHK_DIS_TLB); + if (wa[j].is_masked_reg) { + GEM_BUG_ON(mask & 0xffff0000); + I915_WRITE(addr, value); + } else { + I915_WRITE(addr, + (I915_READ(addr) & ~mask) | + value); + } - /* WaDisableHDCInvalidation:skl,bxt,kbl,cfl */ - I915_WRITE(GAM_ECOCHK, I915_READ(GAM_ECOCHK) | - BDW_DISABLE_HDC_INVALIDATION); + if (wa[j].post_hook) + wa[j].post_hook(dev_priv, &wa[j]); - /* WaOCLCoherentLineFlush:skl,bxt,kbl,cfl */ - I915_WRITE(GEN8_L3SQCREG4, (I915_READ(GEN8_L3SQCREG4) | - GEN8_LQSC_FLUSH_COHERENT_LINES)); + total_count++; + } + } + } - /* WaEnablePreemptionGranularityControlByUMD:skl,bxt,kbl,cfl,[cnl] */ - I915_WRITE(GEN7_FF_SLICE_CS_CHICKEN1, - _MASKED_BIT_ENABLE(GEN9_FFSC_PERCTX_PREEMPT_CTRL)); + return total_count; } -static void skl_gt_workarounds_apply(struct drm_i915_private *dev_priv) -{ - gen9_gt_workarounds_apply(dev_priv); +static struct i915_wa_reg gen8_gt_was[] = { +}; - /* WaEnableGapsTsvCreditFix:skl */ - I915_WRITE(GEN8_GARBCNTL, (I915_READ(GEN8_GARBCNTL) | - GEN9_GAPS_TSV_CREDIT_DISABLE)); +static struct i915_wa_reg bdw_gt_was[] = { +}; - /* WaDisableGafsUnitClkGating:skl */ - I915_WRITE(GEN7_UCGCTL4, (I915_READ(GEN7_UCGCTL4) | - GEN8_EU_GAUNIT_CLOCK_GATE_DISABLE)); +static struct i915_wa_reg chv_gt_was[] = { +}; - /* WaInPlaceDecompressionHang:skl */ - if (IS_SKL_REVID(dev_priv, SKL_REVID_H0, REVID_FOREVER)) - I915_WRITE(GEN9_GAMT_ECO_REG_RW_IA, - (I915_READ(GEN9_GAMT_ECO_REG_RW_IA) | - GAMT_ECO_ENABLE_IN_PLACE_DECOMPRESS)); -} +static struct i915_wa_reg gen9_gt_was[] = { + { WA_GT("WaCompressedResourceSamplerPbeMediaNewHashMode"), + ALL_REVS, REG(MMCD_MISC_CTRL), + SET_BIT(MMCD_PCLA | MMCD_HOTSPOT_EN), + .pre_hook = has_llc }, -static void bxt_gt_workarounds_apply(struct drm_i915_private *dev_priv) -{ - gen9_gt_workarounds_apply(dev_priv); + { WA_GT("WaContextSwitchWithConcurrentTLBInvalidate"), + ALL_REVS, REG(GEN9_CSFE_CHICKEN1_RCS), + SET_BIT_MASKED(GEN9_PREEMPT_GPGPU_SYNC_SWITCH_DISABLE) }, + + { WA_GT("WaEnableLbsSlaRetryTimerDecrement"), + ALL_REVS, REG(BDW_SCRATCH1), + SET_BIT(GEN9_LBS_SLA_RETRY_TIMER_DECREMENT_ENABLE) }, + + { WA_GT("WaDisableHDCInvalidation"), + ALL_REVS, REG(GAM_ECOCHK), + SET_BIT(BDW_DISABLE_HDC_INVALIDATION) }, + + { WA_GT("WaOCLCoherentLineFlush"), + ALL_REVS, REG(GEN8_L3SQCREG4), + SET_BIT(GEN8_LQSC_FLUSH_COHERENT_LINES) }, + + { WA_GT("WaEnablePreemptionGranularityControlByUMD"), + ALL_REVS, REG(GEN7_FF_SLICE_CS_CHICKEN1), + SET_BIT_MASKED(GEN9_FFSC_PERCTX_PREEMPT_CTRL) }, +}; + +static struct i915_wa_reg skl_gt_was[] = { + { WA_GT("WaDisableKillLogic"), + ALL_REVS, REG(GAM_ECOCHK), + SET_BIT(ECOCHK_DIS_TLB) }, + + { WA_GT("WaEnableGapsTsvCreditFix"), + ALL_REVS, REG(GEN8_GARBCNTL), + SET_BIT(GEN9_GAPS_TSV_CREDIT_DISABLE) }, + + { WA_GT("WaDisableGafsUnitClkGating"), + ALL_REVS, REG(GEN7_UCGCTL4), + SET_BIT(GEN8_EU_GAUNIT_CLOCK_GATE_DISABLE) }, + + { WA_GT("WaInPlaceDecompressionHang"), + REVS(SKL_REVID_H0, REVID_FOREVER), REG(GEN9_GAMT_ECO_REG_RW_IA), + SET_BIT(GAMT_ECO_ENABLE_IN_PLACE_DECOMPRESS) }, +}; + +static struct i915_wa_reg bxt_gt_was[] = { + { WA_GT("WaDisableKillLogic"), + ALL_REVS, REG(GAM_ECOCHK), + SET_BIT(ECOCHK_DIS_TLB) }, - /* WaStoreMultiplePTEenable:bxt */ /* This is a requirement according to Hardware specification */ - if (IS_BXT_REVID(dev_priv, 0, BXT_REVID_A1)) - I915_WRITE(TILECTL, I915_READ(TILECTL) | TILECTL_TLBPF); + { WA_GT("WaStoreMultiplePTEenable"), + REVS(0, BXT_REVID_A1), REG(TILECTL), + SET_BIT(TILECTL_TLBPF) }, + + { WA_GT("WaSetClckGatingDisableMedia"), + REVS(0, BXT_REVID_A1), REG(GEN7_MISCCPCTL), + CLEAR_BIT(GEN8_DOP_CLOCK_GATE_MEDIA_ENABLE) }, + + { WA_GT("WaDisablePooledEuLoadBalancingFix"), + REVS(BXT_REVID_B0, REVID_FOREVER), REG(FF_SLICE_CS_CHICKEN2), + SET_BIT_MASKED(GEN9_POOLED_EU_LOAD_BALANCING_FIX_DISABLE) }, + + { WA_GT("WaProgramL3SqcReg1DefaultForPerf"), + REVS(BXT_REVID_B0, REVID_FOREVER), REG(GEN8_L3SQCREG1), + SET_FIELD(L3_PRIO_CREDITS_MASK, L3_GENERAL_PRIO_CREDITS(62) | + L3_HIGH_PRIO_CREDITS(2)) }, + + { WA_GT("WaInPlaceDecompressionHang"), + REVS(BXT_REVID_C0, REVID_FOREVER), REG(GEN9_GAMT_ECO_REG_RW_IA), + SET_BIT(GAMT_ECO_ENABLE_IN_PLACE_DECOMPRESS) }, +}; - /* WaSetClckGatingDisableMedia:bxt */ - if (IS_BXT_REVID(dev_priv, 0, BXT_REVID_A1)) { - I915_WRITE(GEN7_MISCCPCTL, (I915_READ(GEN7_MISCCPCTL) & - ~GEN8_DOP_CLOCK_GATE_MEDIA_ENABLE)); - } +static struct i915_wa_reg kbl_gt_was[] = { + { WA_GT("WaDisableKillLogic"), + ALL_REVS, REG(GAM_ECOCHK), + SET_BIT(ECOCHK_DIS_TLB) }, - /* WaDisablePooledEuLoadBalancingFix:bxt */ - if (IS_BXT_REVID(dev_priv, BXT_REVID_B0, REVID_FOREVER)) { - I915_WRITE(FF_SLICE_CS_CHICKEN2, - _MASKED_BIT_ENABLE(GEN9_POOLED_EU_LOAD_BALANCING_FIX_DISABLE)); - } + { WA_GT("WaEnableGapsTsvCreditFix"), + ALL_REVS, REG(GEN8_GARBCNTL), + SET_BIT(GEN9_GAPS_TSV_CREDIT_DISABLE) }, - /* WaProgramL3SqcReg1DefaultForPerf:bxt */ - if (IS_BXT_REVID(dev_priv, BXT_REVID_B0, REVID_FOREVER)) { - u32 val = I915_READ(GEN8_L3SQCREG1); - val &= ~L3_PRIO_CREDITS_MASK; - val |= L3_GENERAL_PRIO_CREDITS(62) | L3_HIGH_PRIO_CREDITS(2); - I915_WRITE(GEN8_L3SQCREG1, val); - } + { WA_GT("WaDisableDynamicCreditSharing"), + REVS(0, KBL_REVID_B0), REG(GAMT_CHKN_BIT_REG), + SET_BIT(GAMT_CHKN_DISABLE_DYNAMIC_CREDIT_SHARING) }, - /* WaInPlaceDecompressionHang:bxt */ - if (IS_BXT_REVID(dev_priv, BXT_REVID_C0, REVID_FOREVER)) - I915_WRITE(GEN9_GAMT_ECO_REG_RW_IA, - (I915_READ(GEN9_GAMT_ECO_REG_RW_IA) | - GAMT_ECO_ENABLE_IN_PLACE_DECOMPRESS)); -} + { WA_GT("WaDisableGafsUnitClkGating"), + ALL_REVS, REG(GEN7_UCGCTL4), + SET_BIT(GEN8_EU_GAUNIT_CLOCK_GATE_DISABLE) }, -static void kbl_gt_workarounds_apply(struct drm_i915_private *dev_priv) -{ - gen9_gt_workarounds_apply(dev_priv); - - /* WaEnableGapsTsvCreditFix:kbl */ - I915_WRITE(GEN8_GARBCNTL, (I915_READ(GEN8_GARBCNTL) | - GEN9_GAPS_TSV_CREDIT_DISABLE)); - - /* WaDisableDynamicCreditSharing:kbl */ - if (IS_KBL_REVID(dev_priv, 0, KBL_REVID_B0)) - I915_WRITE(GAMT_CHKN_BIT_REG, - (I915_READ(GAMT_CHKN_BIT_REG) | - GAMT_CHKN_DISABLE_DYNAMIC_CREDIT_SHARING)); - - /* WaDisableGafsUnitClkGating:kbl */ - I915_WRITE(GEN7_UCGCTL4, (I915_READ(GEN7_UCGCTL4) | - GEN8_EU_GAUNIT_CLOCK_GATE_DISABLE)); - - /* WaInPlaceDecompressionHang:kbl */ - I915_WRITE(GEN9_GAMT_ECO_REG_RW_IA, - (I915_READ(GEN9_GAMT_ECO_REG_RW_IA) | - GAMT_ECO_ENABLE_IN_PLACE_DECOMPRESS)); -} + { WA_GT("WaInPlaceDecompressionHang"), + ALL_REVS, REG(GEN9_GAMT_ECO_REG_RW_IA), + SET_BIT(GAMT_ECO_ENABLE_IN_PLACE_DECOMPRESS) }, +}; -static void glk_gt_workarounds_apply(struct drm_i915_private *dev_priv) -{ - gen9_gt_workarounds_apply(dev_priv); -} +static struct i915_wa_reg glk_gt_was[] = { + { WA_GT("WaDisableKillLogic"), + ALL_REVS, REG(GAM_ECOCHK), + SET_BIT(ECOCHK_DIS_TLB) }, +}; -static void cfl_gt_workarounds_apply(struct drm_i915_private *dev_priv) -{ - gen9_gt_workarounds_apply(dev_priv); +static struct i915_wa_reg cfl_gt_was[] = { + { WA_GT("WaEnableGapsTsvCreditFix"), + ALL_REVS, REG(GEN8_GARBCNTL), + SET_BIT(GEN9_GAPS_TSV_CREDIT_DISABLE) }, - /* WaEnableGapsTsvCreditFix:cfl */ - I915_WRITE(GEN8_GARBCNTL, (I915_READ(GEN8_GARBCNTL) | - GEN9_GAPS_TSV_CREDIT_DISABLE)); + { WA_GT("WaDisableGafsUnitClkGating"), + ALL_REVS, REG(GEN7_UCGCTL4), + SET_BIT(GEN8_EU_GAUNIT_CLOCK_GATE_DISABLE) }, - /* WaDisableGafsUnitClkGating:cfl */ - I915_WRITE(GEN7_UCGCTL4, (I915_READ(GEN7_UCGCTL4) | - GEN8_EU_GAUNIT_CLOCK_GATE_DISABLE)); + { WA_GT("WaInPlaceDecompressionHang"), + ALL_REVS, REG(GEN9_GAMT_ECO_REG_RW_IA), + SET_BIT(GAMT_ECO_ENABLE_IN_PLACE_DECOMPRESS) }, +}; - /* WaInPlaceDecompressionHang:cfl */ - I915_WRITE(GEN9_GAMT_ECO_REG_RW_IA, - (I915_READ(GEN9_GAMT_ECO_REG_RW_IA) | - GAMT_ECO_ENABLE_IN_PLACE_DECOMPRESS)); -} +static struct i915_wa_reg cnl_gt_was[] = { + { WA_GT("WaDisableI2mCycleOnWRPort"), + REVS(CNL_REVID_B0, CNL_REVID_B0), REG(GAMT_CHKN_BIT_REG), + SET_BIT(GAMT_CHKN_DISABLE_I2M_CYCLE_ON_WR_PORT) }, -static void cnl_gt_workarounds_apply(struct drm_i915_private *dev_priv) -{ - /* WaDisableI2mCycleOnWRPort:cnl (pre-prod) */ - if (IS_CNL_REVID(dev_priv, CNL_REVID_B0, CNL_REVID_B0)) - I915_WRITE(GAMT_CHKN_BIT_REG, - (I915_READ(GAMT_CHKN_BIT_REG) | - GAMT_CHKN_DISABLE_I2M_CYCLE_ON_WR_PORT)); + { WA_GT("WaInPlaceDecompressionHang"), + ALL_REVS, REG(GEN9_GAMT_ECO_REG_RW_IA), + SET_BIT(GAMT_ECO_ENABLE_IN_PLACE_DECOMPRESS) }, - /* WaInPlaceDecompressionHang:cnl */ - I915_WRITE(GEN9_GAMT_ECO_REG_RW_IA, - (I915_READ(GEN9_GAMT_ECO_REG_RW_IA) | - GAMT_ECO_ENABLE_IN_PLACE_DECOMPRESS)); + { WA_GT("WaEnablePreemptionGranularityControlByUMD"), + ALL_REVS, REG(GEN7_FF_SLICE_CS_CHICKEN1), + SET_BIT_MASKED(GEN9_FFSC_PERCTX_PREEMPT_CTRL) }, +}; - /* WaEnablePreemptionGranularityControlByUMD:cnl */ - I915_WRITE(GEN7_FF_SLICE_CS_CHICKEN1, - _MASKED_BIT_ENABLE(GEN9_FFSC_PERCTX_PREEMPT_CTRL)); -} +static const struct i915_wa_reg_table bdw_gt_wa_tbl[] = { + { gen8_gt_was, ARRAY_SIZE(gen8_gt_was) }, + { bdw_gt_was, ARRAY_SIZE(bdw_gt_was) }, +}; -void intel_gt_workarounds_apply(struct drm_i915_private *dev_priv) +static const struct i915_wa_reg_table chv_gt_wa_tbl[] = { + { gen8_gt_was, ARRAY_SIZE(gen8_gt_was) }, + { chv_gt_was, ARRAY_SIZE(chv_gt_was) }, +}; + +static const struct i915_wa_reg_table skl_gt_wa_tbl[] = { + { gen9_gt_was, ARRAY_SIZE(gen9_gt_was) }, + { skl_gt_was, ARRAY_SIZE(skl_gt_was) }, +}; + +static const struct i915_wa_reg_table bxt_gt_wa_tbl[] = { + { gen9_gt_was, ARRAY_SIZE(gen9_gt_was) }, + { bxt_gt_was, ARRAY_SIZE(bxt_gt_was) }, +}; + +static const struct i915_wa_reg_table kbl_gt_wa_tbl[] = { + { gen9_gt_was, ARRAY_SIZE(gen9_gt_was) }, + { kbl_gt_was, ARRAY_SIZE(kbl_gt_was) }, +}; + +static const struct i915_wa_reg_table glk_gt_wa_tbl[] = { + { gen9_gt_was, ARRAY_SIZE(gen9_gt_was) }, + { glk_gt_was, ARRAY_SIZE(glk_gt_was) }, +}; + +static const struct i915_wa_reg_table cfl_gt_wa_tbl[] = { + { gen9_gt_was, ARRAY_SIZE(gen9_gt_was) }, + { cfl_gt_was, ARRAY_SIZE(cfl_gt_was) }, +}; + +static const struct i915_wa_reg_table cnl_gt_wa_tbl[] = { + { cnl_gt_was, ARRAY_SIZE(cnl_gt_was) }, +}; + +void intel_gt_workarounds_get(struct drm_i915_private *dev_priv, + const struct i915_wa_reg_table **wa_table, + uint *table_count) { + *wa_table = NULL; + *table_count = 0; + if (INTEL_GEN(dev_priv) < 8) return; - else if (IS_BROADWELL(dev_priv)) - bdw_gt_workarounds_apply(dev_priv); - else if (IS_CHERRYVIEW(dev_priv)) - chv_gt_workarounds_apply(dev_priv); - else if (IS_SKYLAKE(dev_priv)) - skl_gt_workarounds_apply(dev_priv); - else if (IS_BROXTON(dev_priv)) - bxt_gt_workarounds_apply(dev_priv); - else if (IS_KABYLAKE(dev_priv)) - kbl_gt_workarounds_apply(dev_priv); - else if (IS_GEMINILAKE(dev_priv)) - glk_gt_workarounds_apply(dev_priv); - else if (IS_COFFEELAKE(dev_priv)) - cfl_gt_workarounds_apply(dev_priv); - else if (IS_CANNONLAKE(dev_priv)) - cnl_gt_workarounds_apply(dev_priv); - else + else if (IS_BROADWELL(dev_priv)) { + *wa_table = bdw_gt_wa_tbl; + *table_count = ARRAY_SIZE(bdw_gt_wa_tbl); + } else if (IS_CHERRYVIEW(dev_priv)) { + *wa_table = chv_gt_wa_tbl; + *table_count = ARRAY_SIZE(chv_gt_wa_tbl); + } else if (IS_SKYLAKE(dev_priv)) { + *wa_table = skl_gt_wa_tbl; + *table_count = ARRAY_SIZE(skl_gt_wa_tbl); + } else if (IS_BROXTON(dev_priv)) { + *wa_table = bxt_gt_wa_tbl; + *table_count = ARRAY_SIZE(bxt_gt_wa_tbl); + } else if (IS_KABYLAKE(dev_priv)) { + *wa_table = kbl_gt_wa_tbl; + *table_count = ARRAY_SIZE(kbl_gt_wa_tbl); + } else if (IS_GEMINILAKE(dev_priv)) { + *wa_table = glk_gt_wa_tbl; + *table_count = ARRAY_SIZE(glk_gt_wa_tbl); + } else if (IS_COFFEELAKE(dev_priv)) { + *wa_table = cfl_gt_wa_tbl; + *table_count = ARRAY_SIZE(cfl_gt_wa_tbl); + } else if (IS_CANNONLAKE(dev_priv)) { + *wa_table = cnl_gt_wa_tbl; + *table_count = ARRAY_SIZE(cnl_gt_wa_tbl); + } else { MISSING_CASE(INTEL_GEN(dev_priv)); + return; + } +} + +void intel_gt_workarounds_apply(struct drm_i915_private *dev_priv) +{ + const struct i915_wa_reg_table *wa_table; + uint table_count, total_count; + + intel_gt_workarounds_get(dev_priv, &wa_table, &table_count); + total_count = mmio_workarounds_apply(dev_priv, wa_table, table_count); + + dev_priv->workarounds.gt_count = total_count; + DRM_DEBUG_DRIVER("Number of GT specific w/a: %u\n", total_count); } static int wa_ring_whitelist_reg(struct intel_engine_cs *engine, diff --git a/drivers/gpu/drm/i915/intel_workarounds.h b/drivers/gpu/drm/i915/intel_workarounds.h index 38763e7..9bb3c48 100644 --- a/drivers/gpu/drm/i915/intel_workarounds.h +++ b/drivers/gpu/drm/i915/intel_workarounds.h @@ -30,6 +30,9 @@ void intel_ctx_workarounds_get(struct drm_i915_private *dev_priv, uint *table_count); int intel_ctx_workarounds_emit(struct drm_i915_gem_request *req); +void intel_gt_workarounds_get(struct drm_i915_private *dev_priv, + const struct i915_wa_reg_table **wa_table, + uint *table_count); void intel_gt_workarounds_apply(struct drm_i915_private *dev_priv); int intel_whitelist_workarounds_apply(struct intel_engine_cs *engine); -- 1.9.1 _______________________________________________ Intel-gfx mailing list Intel-gfx@xxxxxxxxxxxxxxxxxxxxx https://lists.freedesktop.org/mailman/listinfo/intel-gfx