Re: [PATCH 3/4] drm/amdgpu/vcn: fix vcn2.5 instance issue

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

 



On 2020-01-21 4:06 p.m., Leo Liu wrote:

On 2020-01-21 3:55 p.m., James Zhu wrote:
Since SOC15_DPG_MODE_OFFSET is always the same for all instances, we should not put [inst]

in the argument list. I will easily make bug in the future.

Like being said, we have the consistent format throughout the entire driver for the offset as "adev->reg_offset[ip##_HWIP][inst][reg##_BASE_IDX] + reg",

so the format should be kept here as well

Really don't think it is worthy to keep this format. more than one day's debug effort is wasted.

If Format is more important, then let us choose the v2 patch.

James


Leo



James

On 2020-01-21 3:23 p.m., Leo Liu wrote:

On 2020-01-21 12:48 p.m., James Zhu wrote:

On 2020-01-21 12:40 p.m., Leo Liu wrote:

On 2020-01-21 11:19 a.m., James Zhu wrote:
Fix vcn2.5 instance issue, vcn0 and vcn1 have same register offset

Signed-off-by: James Zhu <James.Zhu@xxxxxxx>
---
  drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.h |  4 +-
  drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c   | 86 ++++++++++++++++----------------   drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c   | 88 ++++++++++++++++-----------------
  3 files changed, 89 insertions(+), 89 deletions(-)

diff --git a/drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.h b/drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.h
index c4984c5..60fe3c4 100644
--- a/drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.h
+++ b/drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.h
@@ -86,12 +86,12 @@
              (sram_sel << UVD_DPG_LMA_CTL__SRAM_SEL__SHIFT));         \
      } while (0)
  -#define SOC15_DPG_MODE_OFFSET_2_0(ip, inst, reg)                         \ +#define SOC15_DPG_MODE_OFFSET_2_0(ip, reg)                             \
      ({                                            \
          uint32_t internal_reg_offset, addr;                        \
          bool video_range, aon_range;                            \
                                                  \
-        addr = (adev->reg_offset[ip##_HWIP][inst][reg##_BASE_IDX] + reg);        \

This is based the soc15_common.h

#define SOC15_REG_OFFSET(ip, inst, reg) (adev->reg_offset[ip##_HWIP][inst][reg##_BASE_IDX] + reg)

You are saying that is not right offset for 2nd instance?

Yes, DPG mode is executed by individual instance, so the register offset  is the same.

Then you should use inst idx as 0 for the 2nd instance as well, instead of changing the Macro.

Leo




James



Leo


+        addr = (adev->reg_offset[ip##_HWIP][0][reg##_BASE_IDX] + reg);            \
          addr <<= 2; \
          video_range = ((((0xFFFFF & addr) >= (VCN_VID_SOC_ADDRESS_2_0)) &&         \                   ((0xFFFFF & addr) < ((VCN_VID_SOC_ADDRESS_2_0 + 0x2600)))));    \ diff --git a/drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c b/drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
index e2ad5afe..ad11c8e 100644
--- a/drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
+++ b/drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
@@ -352,88 +352,88 @@ static void vcn_v2_0_mc_resume_dpg_mode(struct amdgpu_device *adev, bool indirec
      if (adev->firmware.load_type == AMDGPU_FW_LOAD_PSP) {
          if (!indirect) {
              WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-                UVD, 0, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW),
+                UVD, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW),
(adev->firmware.ucode[AMDGPU_UCODE_ID_VCN].tmr_mc_addr_lo), 0, indirect);               WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-                UVD, 0, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH),
+                UVD, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH),
(adev->firmware.ucode[AMDGPU_UCODE_ID_VCN].tmr_mc_addr_hi), 0, indirect);               WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-                UVD, 0, mmUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect);
+                UVD, mmUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect);
          } else {
              WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0( -                UVD, 0, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW), 0, 0, indirect); +                UVD, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW), 0, 0, indirect);               WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0( -                UVD, 0, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH), 0, 0, indirect); +                UVD, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH), 0, 0, indirect);               WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-                UVD, 0, mmUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect);
+                UVD, mmUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect);
          }
          offset = 0;
      } else {
          WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-            UVD, 0, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW),
+            UVD, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW),
lower_32_bits(adev->vcn.inst->gpu_addr), 0, indirect);
          WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-            UVD, 0, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH),
+            UVD, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH),
upper_32_bits(adev->vcn.inst->gpu_addr), 0, indirect);
          offset = size;
          WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-            UVD, 0, mmUVD_VCPU_CACHE_OFFSET0),
+            UVD, mmUVD_VCPU_CACHE_OFFSET0),
              AMDGPU_UVD_FIRMWARE_OFFSET >> 3, 0, indirect);
      }
        if (!indirect)
          WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-            UVD, 0, mmUVD_VCPU_CACHE_SIZE0), size, 0, indirect);
+            UVD, mmUVD_VCPU_CACHE_SIZE0), size, 0, indirect);
      else
          WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-            UVD, 0, mmUVD_VCPU_CACHE_SIZE0), 0, 0, indirect);
+            UVD, mmUVD_VCPU_CACHE_SIZE0), 0, 0, indirect);
        /* cache window 1: stack */
      if (!indirect) {
          WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-            UVD, 0, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_LOW),
+            UVD, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_LOW),
lower_32_bits(adev->vcn.inst->gpu_addr + offset), 0, indirect);
          WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-            UVD, 0, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_HIGH),
+            UVD, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_HIGH),
upper_32_bits(adev->vcn.inst->gpu_addr + offset), 0, indirect);
          WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-            UVD, 0, mmUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect);
+            UVD, mmUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect);
      } else {
          WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-            UVD, 0, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_LOW), 0, 0, indirect); +            UVD, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_LOW), 0, 0, indirect);
          WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-            UVD, 0, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_HIGH), 0, 0, indirect); +            UVD, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_HIGH), 0, 0, indirect);
          WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-            UVD, 0, mmUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect);
+            UVD, mmUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect);
      }
      WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, 0, mmUVD_VCPU_CACHE_SIZE1), AMDGPU_VCN_STACK_SIZE, 0, indirect); +        UVD, mmUVD_VCPU_CACHE_SIZE1), AMDGPU_VCN_STACK_SIZE, 0, indirect);
        /* cache window 2: context */
      WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, 0, mmUVD_LMI_VCPU_CACHE2_64BIT_BAR_LOW),
+        UVD, mmUVD_LMI_VCPU_CACHE2_64BIT_BAR_LOW),
          lower_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), 0, indirect);
      WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, 0, mmUVD_LMI_VCPU_CACHE2_64BIT_BAR_HIGH),
+        UVD, mmUVD_LMI_VCPU_CACHE2_64BIT_BAR_HIGH),
          upper_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), 0, indirect);
      WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, 0, mmUVD_VCPU_CACHE_OFFSET2), 0, 0, indirect);
+        UVD, mmUVD_VCPU_CACHE_OFFSET2), 0, 0, indirect);
      WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, 0, mmUVD_VCPU_CACHE_SIZE2), AMDGPU_VCN_CONTEXT_SIZE, 0, indirect); +        UVD, mmUVD_VCPU_CACHE_SIZE2), AMDGPU_VCN_CONTEXT_SIZE, 0, indirect);
        /* non-cache window */
      WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, 0, mmUVD_LMI_VCPU_NC0_64BIT_BAR_LOW), 0, 0, indirect);
+        UVD, mmUVD_LMI_VCPU_NC0_64BIT_BAR_LOW), 0, 0, indirect);
      WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, 0, mmUVD_LMI_VCPU_NC0_64BIT_BAR_HIGH), 0, 0, indirect);
+        UVD, mmUVD_LMI_VCPU_NC0_64BIT_BAR_HIGH), 0, 0, indirect);
      WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, 0, mmUVD_VCPU_NONCACHE_OFFSET0), 0, 0, indirect);
+        UVD, mmUVD_VCPU_NONCACHE_OFFSET0), 0, 0, indirect);
      WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, 0, mmUVD_VCPU_NONCACHE_SIZE0), 0, 0, indirect);
+        UVD, mmUVD_VCPU_NONCACHE_SIZE0), 0, 0, indirect);
        /* VCN global tiling registers */
      WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, 0, mmUVD_GFX10_ADDR_CONFIG), adev->gfx.config.gb_addr_config, 0, indirect); +        UVD, mmUVD_GFX10_ADDR_CONFIG), adev->gfx.config.gb_addr_config, 0, indirect);
  }
    /**
@@ -579,19 +579,19 @@ static void vcn_v2_0_clock_gating_dpg_mode(struct amdgpu_device *adev,
           UVD_CGC_CTRL__VCPU_MODE_MASK |
           UVD_CGC_CTRL__SCPU_MODE_MASK);
      WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, 0, mmUVD_CGC_CTRL), reg_data, sram_sel, indirect);
+        UVD, mmUVD_CGC_CTRL), reg_data, sram_sel, indirect);
        /* turn off clock gating */
      WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, 0, mmUVD_CGC_GATE), 0, sram_sel, indirect);
+        UVD, mmUVD_CGC_GATE), 0, sram_sel, indirect);
        /* turn on SUVD clock gating */
      WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, 0, mmUVD_SUVD_CGC_GATE), 1, sram_sel, indirect);
+        UVD, mmUVD_SUVD_CGC_GATE), 1, sram_sel, indirect);
        /* turn on sw mode in UVD_SUVD_CGC_CTRL */
      WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, 0, mmUVD_SUVD_CGC_CTRL), 0, sram_sel, indirect);
+        UVD, mmUVD_SUVD_CGC_CTRL), 0, sram_sel, indirect);
  }
    /**
@@ -764,11 +764,11 @@ static int vcn_v2_0_start_dpg_mode(struct amdgpu_device *adev, bool indirect)
      tmp |= UVD_VCPU_CNTL__CLK_EN_MASK;
      tmp |= UVD_VCPU_CNTL__MIF_WR_LOW_THRESHOLD_BP_MASK;
      WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, 0, mmUVD_VCPU_CNTL), tmp, 0, indirect);
+        UVD, mmUVD_VCPU_CNTL), tmp, 0, indirect);
        /* disable master interupt */
      WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, 0, mmUVD_MASTINT_EN), 0, 0, indirect);
+        UVD, mmUVD_MASTINT_EN), 0, 0, indirect);
        /* setup mmUVD_LMI_CTRL */
      tmp = (UVD_LMI_CTRL__WRITE_CLEAN_TIMER_EN_MASK |
@@ -780,28 +780,28 @@ static int vcn_v2_0_start_dpg_mode(struct amdgpu_device *adev, bool indirect)
          (8 << UVD_LMI_CTRL__WRITE_CLEAN_TIMER__SHIFT) |
          0x00100000L);
      WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, 0, mmUVD_LMI_CTRL), tmp, 0, indirect);
+        UVD, mmUVD_LMI_CTRL), tmp, 0, indirect);
        WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, 0, mmUVD_MPC_CNTL),
+        UVD, mmUVD_MPC_CNTL),
          0x2 << UVD_MPC_CNTL__REPLACEMENT_MODE__SHIFT, 0, indirect);
        WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, 0, mmUVD_MPC_SET_MUXA0),
+        UVD, mmUVD_MPC_SET_MUXA0),
          ((0x1 << UVD_MPC_SET_MUXA0__VARA_1__SHIFT) |
           (0x2 << UVD_MPC_SET_MUXA0__VARA_2__SHIFT) |
           (0x3 << UVD_MPC_SET_MUXA0__VARA_3__SHIFT) |
           (0x4 << UVD_MPC_SET_MUXA0__VARA_4__SHIFT)), 0, indirect);
        WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, 0, mmUVD_MPC_SET_MUXB0),
+        UVD, mmUVD_MPC_SET_MUXB0),
          ((0x1 << UVD_MPC_SET_MUXB0__VARB_1__SHIFT) |
           (0x2 << UVD_MPC_SET_MUXB0__VARB_2__SHIFT) |
           (0x3 << UVD_MPC_SET_MUXB0__VARB_3__SHIFT) |
           (0x4 << UVD_MPC_SET_MUXB0__VARB_4__SHIFT)), 0, indirect);
        WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, 0, mmUVD_MPC_SET_MUX),
+        UVD, mmUVD_MPC_SET_MUX),
          ((0x0 << UVD_MPC_SET_MUX__SET_0__SHIFT) |
           (0x1 << UVD_MPC_SET_MUX__SET_1__SHIFT) |
           (0x2 << UVD_MPC_SET_MUX__SET_2__SHIFT)), 0, indirect);
@@ -809,22 +809,22 @@ static int vcn_v2_0_start_dpg_mode(struct amdgpu_device *adev, bool indirect)
      vcn_v2_0_mc_resume_dpg_mode(adev, indirect);
        WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, 0, mmUVD_REG_XX_MASK), 0x10, 0, indirect);
+        UVD, mmUVD_REG_XX_MASK), 0x10, 0, indirect);
      WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, 0, mmUVD_RBC_XX_IB_REG_CHECK), 0x3, 0, indirect);
+        UVD, mmUVD_RBC_XX_IB_REG_CHECK), 0x3, 0, indirect);
        /* release VCPU reset to boot */
      WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, 0, mmUVD_SOFT_RESET), 0, 0, indirect);
+        UVD, mmUVD_SOFT_RESET), 0, 0, indirect);
        /* enable LMI MC and UMC channels */
      WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, 0, mmUVD_LMI_CTRL2),
+        UVD, mmUVD_LMI_CTRL2),
          0x1F << UVD_LMI_CTRL2__RE_OFLD_MIF_WR_REQ_NUM__SHIFT, 0, indirect);
        /* enable master interrupt */
      WREG32_SOC15_DPG_MODE_2_0(0, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, 0, mmUVD_MASTINT_EN),
+        UVD, mmUVD_MASTINT_EN),
          UVD_MASTINT_EN__VCPU_EN_MASK, 0, indirect);
        if (indirect)
diff --git a/drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c b/drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
index 740a291..42ca36c 100644
--- a/drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
+++ b/drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
@@ -435,88 +435,88 @@ static void vcn_v2_5_mc_resume_dpg_mode(struct amdgpu_device *adev, int inst_idx
      if (adev->firmware.load_type == AMDGPU_FW_LOAD_PSP) {
          if (!indirect) {
              WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-                UVD, inst_idx, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW),
+                UVD, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW),
(adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx].tmr_mc_addr_lo), 0, indirect);               WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( -                UVD, inst_idx, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH),
+                UVD, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH),
(adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx].tmr_mc_addr_hi), 0, indirect);               WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( -                UVD, inst_idx, mmUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect);
+                UVD, mmUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect);
          } else {
              WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( -                UVD, inst_idx, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW), 0, 0, indirect); +                UVD, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW), 0, 0, indirect);               WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( -                UVD, inst_idx, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH), 0, 0, indirect); +                UVD, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH), 0, 0, indirect);               WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( -                UVD, inst_idx, mmUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect);
+                UVD, mmUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect);
          }
          offset = 0;
      } else {
          WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-            UVD, inst_idx, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW),
+            UVD, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW),
lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect);
          WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-            UVD, inst_idx, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH),
+            UVD, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH),
upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect);
          offset = size;
          WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-            UVD, inst_idx, mmUVD_VCPU_CACHE_OFFSET0),
+            UVD, mmUVD_VCPU_CACHE_OFFSET0),
              AMDGPU_UVD_FIRMWARE_OFFSET >> 3, 0, indirect);
      }
        if (!indirect)
          WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( -            UVD, inst_idx, mmUVD_VCPU_CACHE_SIZE0), size, 0, indirect);
+            UVD, mmUVD_VCPU_CACHE_SIZE0), size, 0, indirect);
      else
          WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( -            UVD, inst_idx, mmUVD_VCPU_CACHE_SIZE0), 0, 0, indirect);
+            UVD, mmUVD_VCPU_CACHE_SIZE0), 0, 0, indirect);
        /* cache window 1: stack */
      if (!indirect) {
          WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-            UVD, inst_idx, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_LOW),
+            UVD, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_LOW),
lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect);           WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-            UVD, inst_idx, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_HIGH),
+            UVD, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_HIGH),
upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect);           WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( -            UVD, inst_idx, mmUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect);
+            UVD, mmUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect);
      } else {
          WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( -            UVD, inst_idx, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_LOW), 0, 0, indirect); +            UVD, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_LOW), 0, 0, indirect);           WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( -            UVD, inst_idx, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_HIGH), 0, 0, indirect); +            UVD, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_HIGH), 0, 0, indirect);           WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( -            UVD, inst_idx, mmUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect);
+            UVD, mmUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect);
      }
      WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_VCPU_CACHE_SIZE1), AMDGPU_VCN_STACK_SIZE, 0, indirect); +        UVD, mmUVD_VCPU_CACHE_SIZE1), AMDGPU_VCN_STACK_SIZE, 0, indirect);
        /* cache window 2: context */
      WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_LMI_VCPU_CACHE2_64BIT_BAR_LOW),
+        UVD, mmUVD_LMI_VCPU_CACHE2_64BIT_BAR_LOW),
lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), 0, indirect);
      WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_LMI_VCPU_CACHE2_64BIT_BAR_HIGH),
+        UVD, mmUVD_LMI_VCPU_CACHE2_64BIT_BAR_HIGH),
upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), 0, indirect);
      WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_VCPU_CACHE_OFFSET2), 0, 0, indirect);
+        UVD, mmUVD_VCPU_CACHE_OFFSET2), 0, 0, indirect);
      WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_VCPU_CACHE_SIZE2), AMDGPU_VCN_CONTEXT_SIZE, 0, indirect); +        UVD, mmUVD_VCPU_CACHE_SIZE2), AMDGPU_VCN_CONTEXT_SIZE, 0, indirect);
        /* non-cache window */
      WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_LMI_VCPU_NC0_64BIT_BAR_LOW), 0, 0, indirect);
+        UVD, mmUVD_LMI_VCPU_NC0_64BIT_BAR_LOW), 0, 0, indirect);
      WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_LMI_VCPU_NC0_64BIT_BAR_HIGH), 0, 0, indirect);
+        UVD, mmUVD_LMI_VCPU_NC0_64BIT_BAR_HIGH), 0, 0, indirect);
      WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_VCPU_NONCACHE_OFFSET0), 0, 0, indirect);
+        UVD, mmUVD_VCPU_NONCACHE_OFFSET0), 0, 0, indirect);
      WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_VCPU_NONCACHE_SIZE0), 0, 0, indirect);
+        UVD, mmUVD_VCPU_NONCACHE_SIZE0), 0, 0, indirect);
        /* VCN global tiling registers */
      WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_GFX8_ADDR_CONFIG), adev->gfx.config.gb_addr_config, 0, indirect); +        UVD, mmUVD_GFX8_ADDR_CONFIG), adev->gfx.config.gb_addr_config, 0, indirect);
  }
    /**
@@ -670,19 +670,19 @@ static void vcn_v2_5_clock_gating_dpg_mode(struct amdgpu_device *adev,
           UVD_CGC_CTRL__VCPU_MODE_MASK |
           UVD_CGC_CTRL__MMSCH_MODE_MASK);
      WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_CGC_CTRL), reg_data, sram_sel, indirect);
+        UVD, mmUVD_CGC_CTRL), reg_data, sram_sel, indirect);
        /* turn off clock gating */
      WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_CGC_GATE), 0, sram_sel, indirect);
+        UVD, mmUVD_CGC_GATE), 0, sram_sel, indirect);
        /* turn on SUVD clock gating */
      WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_SUVD_CGC_GATE), 1, sram_sel, indirect);
+        UVD, mmUVD_SUVD_CGC_GATE), 1, sram_sel, indirect);
        /* turn on sw mode in UVD_SUVD_CGC_CTRL */
      WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_SUVD_CGC_CTRL), 0, sram_sel, indirect);
+        UVD, mmUVD_SUVD_CGC_CTRL), 0, sram_sel, indirect);
  }
    /**
@@ -772,11 +772,11 @@ static int vcn_v2_5_start_dpg_mode(struct amdgpu_device *adev, int inst_idx, boo
      tmp |= UVD_VCPU_CNTL__CLK_EN_MASK;
      tmp |= UVD_VCPU_CNTL__BLK_RST_MASK;
      WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_VCPU_CNTL), tmp, 0, indirect);
+        UVD, mmUVD_VCPU_CNTL), tmp, 0, indirect);
        /* disable master interupt */
      WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_MASTINT_EN), 0, 0, indirect);
+        UVD, mmUVD_MASTINT_EN), 0, 0, indirect);
        /* setup mmUVD_LMI_CTRL */
      tmp = (0x8 | UVD_LMI_CTRL__WRITE_CLEAN_TIMER_EN_MASK |
@@ -788,28 +788,28 @@ static int vcn_v2_5_start_dpg_mode(struct amdgpu_device *adev, int inst_idx, boo
          (8 << UVD_LMI_CTRL__WRITE_CLEAN_TIMER__SHIFT) |
          0x00100000L);
      WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_LMI_CTRL), tmp, 0, indirect);
+        UVD, mmUVD_LMI_CTRL), tmp, 0, indirect);
        WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_MPC_CNTL),
+        UVD, mmUVD_MPC_CNTL),
          0x2 << UVD_MPC_CNTL__REPLACEMENT_MODE__SHIFT, 0, indirect);         WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_MPC_SET_MUXA0),
+        UVD, mmUVD_MPC_SET_MUXA0),
          ((0x1 << UVD_MPC_SET_MUXA0__VARA_1__SHIFT) |
           (0x2 << UVD_MPC_SET_MUXA0__VARA_2__SHIFT) |
           (0x3 << UVD_MPC_SET_MUXA0__VARA_3__SHIFT) |
           (0x4 << UVD_MPC_SET_MUXA0__VARA_4__SHIFT)), 0, indirect);
        WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_MPC_SET_MUXB0),
+        UVD, mmUVD_MPC_SET_MUXB0),
          ((0x1 << UVD_MPC_SET_MUXB0__VARB_1__SHIFT) |
           (0x2 << UVD_MPC_SET_MUXB0__VARB_2__SHIFT) |
           (0x3 << UVD_MPC_SET_MUXB0__VARB_3__SHIFT) |
           (0x4 << UVD_MPC_SET_MUXB0__VARB_4__SHIFT)), 0, indirect);
        WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_MPC_SET_MUX),
+        UVD, mmUVD_MPC_SET_MUX),
          ((0x0 << UVD_MPC_SET_MUX__SET_0__SHIFT) |
           (0x1 << UVD_MPC_SET_MUX__SET_1__SHIFT) |
           (0x2 << UVD_MPC_SET_MUX__SET_2__SHIFT)), 0, indirect);
@@ -817,26 +817,26 @@ static int vcn_v2_5_start_dpg_mode(struct amdgpu_device *adev, int inst_idx, boo
      vcn_v2_5_mc_resume_dpg_mode(adev, inst_idx, indirect);
        WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_REG_XX_MASK), 0x10, 0, indirect);
+        UVD, mmUVD_REG_XX_MASK), 0x10, 0, indirect);
      WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_RBC_XX_IB_REG_CHECK), 0x3, 0, indirect);
+        UVD, mmUVD_RBC_XX_IB_REG_CHECK), 0x3, 0, indirect);
        /* enable LMI MC and UMC channels */
      WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_LMI_CTRL2), 0, 0, indirect);
+        UVD, mmUVD_LMI_CTRL2), 0, 0, indirect);
        /* unblock VCPU register access */
      WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_RB_ARB_CTRL), 0, 0, indirect);
+        UVD, mmUVD_RB_ARB_CTRL), 0, 0, indirect);
        tmp = (0xFF << UVD_VCPU_CNTL__PRB_TIMEOUT_VAL__SHIFT);
      tmp |= UVD_VCPU_CNTL__CLK_EN_MASK;
      WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_VCPU_CNTL), tmp, 0, indirect);
+        UVD, mmUVD_VCPU_CNTL), tmp, 0, indirect);
        /* enable master interrupt */
      WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0(
-        UVD, inst_idx, mmUVD_MASTINT_EN),
+        UVD, mmUVD_MASTINT_EN),
          UVD_MASTINT_EN__VCPU_EN_MASK, 0, indirect);
        if (indirect)
_______________________________________________
amd-gfx mailing list
amd-gfx@xxxxxxxxxxxxxxxxxxxxx
https://lists.freedesktop.org/mailman/listinfo/amd-gfx




[Index of Archives]     [Linux USB Devel]     [Linux Audio Users]     [Yosemite News]     [Linux Kernel]     [Linux SCSI]

  Powered by Linux