Hello Thomas Hellström, Commit 08a4f00e62bc ("drm/xe/bo: Simplify xe_bo_lock()") from Sep 8, 2023 (linux-next), leads to the following Smatch static checker warning: drivers/gpu/drm/xe/xe_vm.c:2095 vm_bind_ioctl_ops_create() error: we previously assumed 'bo' could be null (see line 2067) drivers/gpu/drm/xe/xe_vm.c 2061 static struct drm_gpuva_ops * 2062 vm_bind_ioctl_ops_create(struct xe_vm *vm, struct xe_bo *bo, 2063 u64 bo_offset_or_userptr, u64 addr, u64 range, 2064 u32 operation, u32 flags, 2065 u32 prefetch_region, u16 pat_index) 2066 { 2067 struct drm_gem_object *obj = bo ? &bo->ttm.base : NULL; ^^ So far as I can see bo is always valid. No need to check here. 2068 struct drm_gpuva_ops *ops; 2069 struct drm_gpuva_op *__op; 2070 struct drm_gpuvm_bo *vm_bo; 2071 int err; 2072 2073 lockdep_assert_held_write(&vm->lock); 2074 2075 vm_dbg(&vm->xe->drm, 2076 "op=%d, addr=0x%016llx, range=0x%016llx, bo_offset_or_userptr=0x%016llx", 2077 operation, (ULL)addr, (ULL)range, 2078 (ULL)bo_offset_or_userptr); 2079 2080 switch (operation) { 2081 case DRM_XE_VM_BIND_OP_MAP: 2082 case DRM_XE_VM_BIND_OP_MAP_USERPTR: 2083 ops = drm_gpuvm_sm_map_ops_create(&vm->gpuvm, addr, range, 2084 obj, bo_offset_or_userptr); 2085 break; 2086 case DRM_XE_VM_BIND_OP_UNMAP: 2087 ops = drm_gpuvm_sm_unmap_ops_create(&vm->gpuvm, addr, range); 2088 break; 2089 case DRM_XE_VM_BIND_OP_PREFETCH: 2090 ops = drm_gpuvm_prefetch_ops_create(&vm->gpuvm, addr, range); 2091 break; 2092 case DRM_XE_VM_BIND_OP_UNMAP_ALL: 2093 xe_assert(vm->xe, bo); 2094 --> 2095 err = xe_bo_lock(bo, true); ^^ Unchecked dereference here. 2096 if (err) 2097 return ERR_PTR(err); 2098 2099 vm_bo = drm_gpuvm_bo_obtain(&vm->gpuvm, obj); 2100 if (IS_ERR(vm_bo)) { 2101 xe_bo_unlock(bo); 2102 return ERR_CAST(vm_bo); 2103 } 2104 2105 ops = drm_gpuvm_bo_unmap_ops_create(vm_bo); 2106 drm_gpuvm_bo_put(vm_bo); 2107 xe_bo_unlock(bo); 2108 break; 2109 default: 2110 drm_warn(&vm->xe->drm, "NOT POSSIBLE"); 2111 ops = ERR_PTR(-EINVAL); 2112 } 2113 if (IS_ERR(ops)) 2114 return ops; 2115 2116 drm_gpuva_for_each_op(__op, ops) { 2117 struct xe_vma_op *op = gpuva_op_to_vma_op(__op); 2118 2119 if (__op->op == DRM_GPUVA_OP_MAP) { 2120 op->map.immediate = 2121 flags & DRM_XE_VM_BIND_FLAG_IMMEDIATE; 2122 op->map.read_only = 2123 flags & DRM_XE_VM_BIND_FLAG_READONLY; 2124 op->map.is_null = flags & DRM_XE_VM_BIND_FLAG_NULL; 2125 op->map.dumpable = flags & DRM_XE_VM_BIND_FLAG_DUMPABLE; 2126 op->map.pat_index = pat_index; 2127 } else if (__op->op == DRM_GPUVA_OP_PREFETCH) { 2128 op->prefetch.region = prefetch_region; 2129 } 2130 2131 print_op(vm->xe, __op); 2132 } 2133 2134 return ops; 2135 } regards, dan carpenter