bo                882 arch/mips/include/asm/sn/sn0/hubio.h 		bo:	1,  /*	  31: barrier op set in xtalk rqst*/
bo                245 arch/mips/include/asm/xtalk/xwidget.h 	unsigned	bo:1;
bo                 86 arch/powerpc/lib/sstep.c 	unsigned int bo = (instr >> 21) & 0x1f;
bo                 89 arch/powerpc/lib/sstep.c 	if ((bo & 4) == 0) {
bo                 92 arch/powerpc/lib/sstep.c 		if (((bo >> 1) & 1) ^ (regs->ctr == 1))
bo                 95 arch/powerpc/lib/sstep.c 	if ((bo & 0x10) == 0) {
bo                 98 arch/powerpc/lib/sstep.c 		if (((regs->ccr >> (31 - bi)) & 1) != ((bo >> 3) & 1))
bo               2354 arch/powerpc/xmon/ppc-opc.c #define EBD8IO(op, bo, bi) (BD8IO ((op)) | ((bo) << 10) | ((bi) << 8))
bo               2367 arch/powerpc/xmon/ppc-opc.c #define EBD15(op, aa, bo, lk) (((op) & 0x3f) << 26) | (((aa) & 0xf) << 22) | (((bo) & 0x3) << 20) | ((lk) & 1)
bo               2371 arch/powerpc/xmon/ppc-opc.c #define EBD15BI(op, aa, bo, bi, lk) (((op) & 0x3f) << 26) \
bo               2373 arch/powerpc/xmon/ppc-opc.c                                     | (((bo) & 0x3) << 20) \
bo               2383 arch/powerpc/xmon/ppc-opc.c #define BBO(op, bo, aa, lk) (B ((op), (aa), (lk)) | ((((unsigned long)(bo)) & 0x1f) << 21))
bo               2397 arch/powerpc/xmon/ppc-opc.c #define BBOCB(op, bo, cb, aa, lk) \
bo               2398 arch/powerpc/xmon/ppc-opc.c   (BBO ((op), (bo), (aa), (lk)) | ((((unsigned long)(cb)) & 0x3) << 16))
bo               2793 arch/powerpc/xmon/ppc-opc.c #define XLO(op, bo, xop, lk) \
bo               2794 arch/powerpc/xmon/ppc-opc.c   (XLLK ((op), (xop), (lk)) | ((((unsigned long)(bo)) & 0x1f) << 21))
bo               2804 arch/powerpc/xmon/ppc-opc.c #define XLOCB(op, bo, cb, xop, lk) \
bo               2805 arch/powerpc/xmon/ppc-opc.c   (XLO ((op), (bo), (xop), (lk)) | ((((unsigned long)(cb)) & 3) << 16))
bo               1153 crypto/aes_generic.c #define f_rn(bo, bi, n, k)	do {				\
bo               1154 crypto/aes_generic.c 	bo[n] = crypto_ft_tab[0][byte(bi[n], 0)] ^			\
bo               1160 crypto/aes_generic.c #define f_nround(bo, bi, k)	do {\
bo               1161 crypto/aes_generic.c 	f_rn(bo, bi, 0, k);	\
bo               1162 crypto/aes_generic.c 	f_rn(bo, bi, 1, k);	\
bo               1163 crypto/aes_generic.c 	f_rn(bo, bi, 2, k);	\
bo               1164 crypto/aes_generic.c 	f_rn(bo, bi, 3, k);	\
bo               1168 crypto/aes_generic.c #define f_rl(bo, bi, n, k)	do {				\
bo               1169 crypto/aes_generic.c 	bo[n] = crypto_fl_tab[0][byte(bi[n], 0)] ^			\
bo               1175 crypto/aes_generic.c #define f_lround(bo, bi, k)	do {\
bo               1176 crypto/aes_generic.c 	f_rl(bo, bi, 0, k);	\
bo               1177 crypto/aes_generic.c 	f_rl(bo, bi, 1, k);	\
bo               1178 crypto/aes_generic.c 	f_rl(bo, bi, 2, k);	\
bo               1179 crypto/aes_generic.c 	f_rl(bo, bi, 3, k);	\
bo               1223 crypto/aes_generic.c #define i_rn(bo, bi, n, k)	do {				\
bo               1224 crypto/aes_generic.c 	bo[n] = crypto_it_tab[0][byte(bi[n], 0)] ^			\
bo               1230 crypto/aes_generic.c #define i_nround(bo, bi, k)	do {\
bo               1231 crypto/aes_generic.c 	i_rn(bo, bi, 0, k);	\
bo               1232 crypto/aes_generic.c 	i_rn(bo, bi, 1, k);	\
bo               1233 crypto/aes_generic.c 	i_rn(bo, bi, 2, k);	\
bo               1234 crypto/aes_generic.c 	i_rn(bo, bi, 3, k);	\
bo               1238 crypto/aes_generic.c #define i_rl(bo, bi, n, k)	do {			\
bo               1239 crypto/aes_generic.c 	bo[n] = crypto_il_tab[0][byte(bi[n], 0)] ^		\
bo               1245 crypto/aes_generic.c #define i_lround(bo, bi, k)	do {\
bo               1246 crypto/aes_generic.c 	i_rl(bo, bi, 0, k);	\
bo               1247 crypto/aes_generic.c 	i_rl(bo, bi, 1, k);	\
bo               1248 crypto/aes_generic.c 	i_rl(bo, bi, 2, k);	\
bo               1249 crypto/aes_generic.c 	i_rl(bo, bi, 3, k);	\
bo                355 drivers/gpu/drm/amd/amdgpu/amdgpu.h 	struct amdgpu_bo	*bo;
bo               1255 drivers/gpu/drm/amd/amdgpu/amdgpu.h 			   uint64_t addr, struct amdgpu_bo **bo,
bo                273 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	struct amdgpu_bo *bo = NULL;
bo                289 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	r = amdgpu_bo_create(adev, &bp, &bo);
bo                297 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	r = amdgpu_bo_reserve(bo, true);
bo                303 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	r = amdgpu_bo_pin(bo, AMDGPU_GEM_DOMAIN_GTT);
bo                309 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	r = amdgpu_ttm_alloc_gart(&bo->tbo);
bo                311 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 		dev_err(adev->dev, "%p bind failed\n", bo);
bo                315 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	r = amdgpu_bo_kmap(bo, &cpu_ptr_tmp);
bo                322 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	*mem_obj = bo;
bo                323 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	*gpu_addr = amdgpu_bo_gpu_offset(bo);
bo                326 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	amdgpu_bo_unreserve(bo);
bo                331 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	amdgpu_bo_unpin(bo);
bo                333 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	amdgpu_bo_unreserve(bo);
bo                335 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	amdgpu_bo_unref(&bo);
bo                342 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	struct amdgpu_bo *bo = (struct amdgpu_bo *) mem_obj;
bo                344 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	amdgpu_bo_reserve(bo, true);
bo                345 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	amdgpu_bo_kunmap(bo);
bo                346 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	amdgpu_bo_unpin(bo);
bo                347 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	amdgpu_bo_unreserve(bo);
bo                348 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	amdgpu_bo_unref(&(bo));
bo                355 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	struct amdgpu_bo *bo = NULL;
bo                367 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	r = amdgpu_bo_create(adev, &bp, &bo);
bo                374 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	*mem_obj = bo;
bo                380 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	struct amdgpu_bo *bo = (struct amdgpu_bo *)mem_obj;
bo                382 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	amdgpu_bo_unref(&bo);
bo                511 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	struct amdgpu_bo *bo;
bo                529 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	bo = gem_to_amdgpu_bo(obj);
bo                530 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	if (!(bo->preferred_domains & (AMDGPU_GEM_DOMAIN_VRAM |
bo                539 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 		*bo_size = amdgpu_bo_size(bo);
bo                541 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 		*metadata_size = bo->metadata_size;
bo                543 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 		r = amdgpu_bo_get_metadata(bo, metadata_buffer, buffer_size,
bo                546 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 		*flags = (bo->preferred_domains & AMDGPU_GEM_DOMAIN_VRAM) ?
bo                549 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 		if (bo->flags & AMDGPU_GEM_CREATE_CPU_ACCESS_REQUIRED)
bo                693 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c void amdgpu_amdkfd_unreserve_memory_limit(struct amdgpu_bo *bo)
bo                 51 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h 	struct amdgpu_bo *bo;
bo                237 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h void amdgpu_amdkfd_unreserve_memory_limit(struct amdgpu_bo *bo);
bo                194 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c void amdgpu_amdkfd_unreserve_memory_limit(struct amdgpu_bo *bo)
bo                196 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev);
bo                197 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	u32 domain = bo->preferred_domains;
bo                198 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	bool sg = (bo->preferred_domains == AMDGPU_GEM_DOMAIN_CPU);
bo                200 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (bo->flags & AMDGPU_AMDKFD_USERPTR_BO) {
bo                205 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	unreserve_mem_limit(adev, amdgpu_bo_size(bo), domain, sg);
bo                218 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c static int amdgpu_amdkfd_remove_eviction_fence(struct amdgpu_bo *bo,
bo                221 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct dma_resv *resv = bo->tbo.base.resv;
bo                274 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c static int amdgpu_amdkfd_bo_validate(struct amdgpu_bo *bo, uint32_t domain,
bo                280 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (WARN(amdgpu_ttm_tt_get_usermm(bo->tbo.ttm),
bo                284 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	amdgpu_bo_placement_from_domain(bo, domain);
bo                286 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
bo                290 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		amdgpu_bo_sync_wait(bo, AMDGPU_FENCE_OWNER_KFD, false);
bo                296 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c static int amdgpu_amdkfd_validate(void *param, struct amdgpu_bo *bo)
bo                300 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	return amdgpu_amdkfd_bo_validate(bo, p->domain, p->wait);
bo                312 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct amdgpu_bo *pd = vm->root.base.bo;
bo                333 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	vm->pd_phys_addr = amdgpu_gmc_pd_addr(vm->root.base.bo);
bo                348 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct amdgpu_bo *pd = vm->root.base.bo;
bo                377 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct amdgpu_bo *bo = mem->bo;
bo                380 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	unsigned long bo_size = bo->tbo.mem.size;
bo                398 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	bo_va_entry->bo_va = amdgpu_vm_bo_add(adev, vm, bo);
bo                448 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct amdgpu_bo *bo = mem->bo;
bo                452 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	entry->bo = &bo->tbo;
bo                488 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct amdgpu_bo *bo = mem->bo;
bo                494 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = amdgpu_ttm_tt_set_userptr(bo->tbo.ttm, user_addr, 0);
bo                500 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = amdgpu_mn_register(bo, user_addr);
bo                507 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = amdgpu_ttm_tt_get_user_pages(bo, bo->tbo.ttm->pages);
bo                513 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = amdgpu_bo_reserve(bo, true);
bo                518 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	amdgpu_bo_placement_from_domain(bo, mem->domain);
bo                519 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
bo                522 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	amdgpu_bo_unreserve(bo);
bo                525 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	amdgpu_ttm_tt_get_user_pages_done(bo->tbo.ttm);
bo                528 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		amdgpu_mn_unregister(bo);
bo                565 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct amdgpu_bo *bo = mem->bo;
bo                582 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ctx->kfd_bo.tv.bo = &bo->tbo;
bo                615 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct amdgpu_bo *bo = mem->bo;
bo                645 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ctx->kfd_bo.tv.bo = &bo->tbo;
bo                747 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 			       amdgpu_bo_size(entry->bo_va->base.bo),
bo                812 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		struct amdgpu_bo *pd = peer_vm->root.base.bo;
bo                878 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = amdgpu_bo_reserve(vm->root.base.bo, true);
bo                886 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = amdgpu_bo_sync_wait(vm->root.base.bo,
bo                890 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = dma_resv_reserve_shared(vm->root.base.bo->tbo.base.resv, 1);
bo                893 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	amdgpu_bo_fence(vm->root.base.bo,
bo                895 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	amdgpu_bo_unreserve(vm->root.base.bo);
bo                909 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	amdgpu_bo_unreserve(vm->root.base.bo);
bo                995 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct amdgpu_bo *pd = vm->root.base.bo;
bo               1062 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct amdgpu_bo *pd = avm->root.base.bo;
bo               1080 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct amdgpu_bo *bo;
bo               1176 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = amdgpu_bo_create(adev, &bp, &bo);
bo               1183 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		bo->tbo.sg = sg;
bo               1184 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		bo->tbo.ttm->sg = sg;
bo               1186 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	bo->kfd_bo = *mem;
bo               1187 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	(*mem)->bo = bo;
bo               1189 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		bo->flags |= AMDGPU_AMDKFD_USERPTR_BO;
bo               1204 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		*offset = amdgpu_bo_mmap_offset(bo);
bo               1210 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	amdgpu_bo_unref(&bo);
bo               1230 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	unsigned long bo_size = mem->bo->tbo.mem.size;
bo               1251 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	amdgpu_mn_unregister(mem->bo);
bo               1267 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	amdgpu_amdkfd_remove_eviction_fence(mem->bo,
bo               1285 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (mem->bo->tbo.sg) {
bo               1286 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		sg_free_table(mem->bo->tbo.sg);
bo               1287 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		kfree(mem->bo->tbo.sg);
bo               1291 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	drm_gem_object_put_unlocked(&mem->bo->tbo.base);
bo               1304 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct amdgpu_bo *bo;
bo               1313 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	bo = mem->bo;
bo               1314 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (!bo) {
bo               1329 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) {
bo               1338 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	bo_size = bo->tbo.mem.size;
bo               1354 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) &&
bo               1355 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	    bo->tbo.mem.mem_type == TTM_PL_SYSTEM)
bo               1376 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	    !amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) {
bo               1381 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		ret = amdgpu_amdkfd_bo_validate(bo, domain, true);
bo               1414 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (!amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) && !bo->pin_count)
bo               1415 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		amdgpu_bo_fence(bo,
bo               1442 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	unsigned long bo_size = mem->bo->tbo.mem.size;
bo               1493 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	    !amdgpu_ttm_tt_get_usermm(mem->bo->tbo.ttm) && !mem->bo->pin_count)
bo               1494 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		amdgpu_amdkfd_remove_eviction_fence(mem->bo,
bo               1525 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct amdgpu_bo *bo = mem->bo;
bo               1527 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) {
bo               1537 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = amdgpu_bo_reserve(bo, true);
bo               1543 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = amdgpu_bo_pin(bo, AMDGPU_GEM_DOMAIN_GTT);
bo               1549 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = amdgpu_bo_kmap(bo, kptr);
bo               1556 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		bo, mem->process_info->eviction_fence);
bo               1560 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		*size = amdgpu_bo_size(bo);
bo               1562 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	amdgpu_bo_unreserve(bo);
bo               1568 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	amdgpu_bo_unpin(bo);
bo               1570 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	amdgpu_bo_unreserve(bo);
bo               1599 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct amdgpu_bo *bo;
bo               1611 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	bo = gem_to_amdgpu_bo(obj);
bo               1612 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (!(bo->preferred_domains & (AMDGPU_GEM_DOMAIN_VRAM |
bo               1622 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		*size = amdgpu_bo_size(bo);
bo               1625 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		*mmap_offset = amdgpu_bo_mmap_offset(bo);
bo               1633 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	drm_gem_object_get(&bo->tbo.base);
bo               1634 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	(*mem)->bo = bo;
bo               1636 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	(*mem)->domain = (bo->preferred_domains & AMDGPU_GEM_DOMAIN_VRAM) ?
bo               1688 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct amdgpu_bo *bo;
bo               1701 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		bo = mem->bo;
bo               1703 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		if (amdgpu_bo_reserve(bo, true))
bo               1705 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		amdgpu_bo_placement_from_domain(bo, AMDGPU_GEM_DOMAIN_CPU);
bo               1706 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
bo               1707 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		amdgpu_bo_unreserve(bo);
bo               1731 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		bo = mem->bo;
bo               1734 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		ret = amdgpu_ttm_tt_get_user_pages(bo, bo->tbo.ttm->pages);
bo               1743 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		amdgpu_ttm_tt_get_user_pages_done(bo->tbo.ttm);
bo               1770 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct amdgpu_bo *bo;
bo               1796 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		mem->resv_list.bo = mem->validate_list.bo;
bo               1819 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		bo = mem->bo;
bo               1822 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		if (bo->tbo.ttm->pages[0]) {
bo               1823 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 			amdgpu_bo_placement_from_domain(bo, mem->domain);
bo               1824 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 			ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
bo               1995 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		mem->resv_list.bo = mem->validate_list.bo;
bo               2023 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		struct amdgpu_bo *bo = mem->bo;
bo               2027 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		ret = amdgpu_amdkfd_bo_validate(bo, domain, false);
bo               2032 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		ret = amdgpu_sync_fence(NULL, &sync_obj, bo->tbo.moving, false);
bo               2079 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		amdgpu_bo_fence(mem->bo,
bo               2085 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		struct amdgpu_bo *bo = peer_vm->root.base.bo;
bo               2087 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		amdgpu_bo_fence(bo, &process_info->eviction_fence->base, true);
bo               2113 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	(*mem)->bo = amdgpu_bo_ref(gws_bo);
bo               2165 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct amdgpu_bo *gws_bo = kgd_mem->bo;
bo                 54 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 		struct amdgpu_bo *bo = ttm_to_amdgpu_bo(e->tv.bo);
bo                 56 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 		amdgpu_bo_unref(&bo);
bo                 95 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 		struct amdgpu_bo *bo;
bo                104 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 		bo = amdgpu_bo_ref(gem_to_amdgpu_bo(gobj));
bo                107 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 		usermm = amdgpu_ttm_tt_get_usermm(bo->tbo.ttm);
bo                110 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 				amdgpu_bo_unref(&bo);
bo                121 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 		entry->tv.bo = &bo->tbo;
bo                123 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 		if (bo->preferred_domains == AMDGPU_GEM_DOMAIN_GDS)
bo                124 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 			list->gds_obj = bo;
bo                125 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 		if (bo->preferred_domains == AMDGPU_GEM_DOMAIN_GWS)
bo                126 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 			list->gws_obj = bo;
bo                127 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 		if (bo->preferred_domains == AMDGPU_GEM_DOMAIN_OA)
bo                128 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 			list->oa_obj = bo;
bo                130 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 		total_size += amdgpu_bo_size(bo);
bo                131 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 		trace_amdgpu_bo_list_set(list, bo);
bo                144 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 		struct amdgpu_bo *bo = ttm_to_amdgpu_bo(array[i].tv.bo);
bo                146 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 		amdgpu_bo_unref(&bo);
bo                149 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 		struct amdgpu_bo *bo = ttm_to_amdgpu_bo(array[i].tv.bo);
bo                151 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 		amdgpu_bo_unref(&bo);
bo                204 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 		struct amdgpu_bo *bo = ttm_to_amdgpu_bo(e->tv.bo);
bo                207 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 		if (!bo->parent)
bo                 44 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	struct amdgpu_bo *bo;
bo                 52 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	bo = amdgpu_bo_ref(gem_to_amdgpu_bo(gobj));
bo                 54 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	p->uf_entry.tv.bo = &bo->tbo;
bo                 60 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	size = amdgpu_bo_size(bo);
bo                 66 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) {
bo                 76 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	amdgpu_bo_unref(&bo);
bo                238 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	if (p->uf_entry.tv.bo)
bo                399 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 				 struct amdgpu_bo *bo)
bo                401 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev);
bo                405 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		.resv = bo->tbo.base.resv,
bo                411 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	if (bo->pin_count)
bo                419 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		    (bo->flags & AMDGPU_GEM_CREATE_CPU_ACCESS_REQUIRED)) {
bo                425 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 				domain = bo->preferred_domains;
bo                427 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 				domain = bo->allowed_domains;
bo                429 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 			domain = bo->preferred_domains;
bo                432 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		domain = bo->allowed_domains;
bo                436 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	amdgpu_bo_placement_from_domain(bo, domain);
bo                437 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
bo                441 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	    amdgpu_bo_in_cpu_visible_vram(bo))
bo                444 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	if (unlikely(r == -ENOMEM) && domain != bo->allowed_domains) {
bo                445 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		domain = bo->allowed_domains;
bo                467 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		struct amdgpu_bo *bo = ttm_to_amdgpu_bo(candidate->tv.bo);
bo                468 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev);
bo                473 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		if (bo == validated)
bo                477 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		if (bo->pin_count)
bo                480 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		other = amdgpu_mem_type_to_domain(bo->tbo.mem.mem_type);
bo                487 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		other = bo->allowed_domains & ~domain;
bo                494 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 				amdgpu_bo_in_cpu_visible_vram(bo);
bo                495 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		amdgpu_bo_placement_from_domain(bo, other);
bo                496 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
bo                513 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c static int amdgpu_cs_validate(void *param, struct amdgpu_bo *bo)
bo                519 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		r = amdgpu_cs_bo_validate(p, bo);
bo                520 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	} while (r == -ENOMEM && amdgpu_cs_try_evict(p, bo));
bo                524 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	if (bo->shadow)
bo                525 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		r = amdgpu_cs_bo_validate(p, bo->shadow);
bo                538 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		struct amdgpu_bo *bo = ttm_to_amdgpu_bo(lobj->tv.bo);
bo                541 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		usermm = amdgpu_ttm_tt_get_usermm(bo->tbo.ttm);
bo                545 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		if (amdgpu_ttm_tt_is_userptr(bo->tbo.ttm) &&
bo                547 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 			amdgpu_bo_placement_from_domain(bo,
bo                549 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 			r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
bo                553 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 			amdgpu_ttm_tt_set_user_pages(bo->tbo.ttm,
bo                560 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		r = amdgpu_cs_validate(p, bo);
bo                612 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	if (p->uf_entry.tv.bo && !ttm_to_amdgpu_bo(p->uf_entry.tv.bo)->parent)
bo                620 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		struct amdgpu_bo *bo = ttm_to_amdgpu_bo(e->tv.bo);
bo                624 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		e->user_pages = kvmalloc_array(bo->tbo.ttm->num_pages,
bo                632 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		r = amdgpu_ttm_tt_get_user_pages(bo, e->user_pages);
bo                639 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		for (i = 0; i < bo->tbo.ttm->num_pages; i++) {
bo                640 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 			if (bo->tbo.ttm->pages[i] != e->user_pages[i]) {
bo                687 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		struct amdgpu_bo *bo = ttm_to_amdgpu_bo(e->tv.bo);
bo                690 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		if (bo->prime_shared_count)
bo                692 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		e->bo_va = amdgpu_vm_bo_find(vm, bo);
bo                708 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	if (!r && p->uf_entry.tv.bo) {
bo                709 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		struct amdgpu_bo *uf = ttm_to_amdgpu_bo(p->uf_entry.tv.bo);
bo                728 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		struct amdgpu_bo *bo = ttm_to_amdgpu_bo(e->tv.bo);
bo                729 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		struct dma_resv *resv = bo->tbo.base.resv;
bo                732 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 				     amdgpu_bo_explicit_sync(bo));
bo                777 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	if (parser->uf_entry.tv.bo) {
bo                778 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		struct amdgpu_bo *uf = ttm_to_amdgpu_bo(parser->uf_entry.tv.bo);
bo                792 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	struct amdgpu_bo *bo;
bo                892 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		bo = ttm_to_amdgpu_bo(e->tv.bo);
bo                893 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		if (!bo)
bo                922 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	p->job->vm_pd_addr = amdgpu_gmc_pd_addr(vm->root.base.bo);
bo                927 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 			struct amdgpu_bo *bo = ttm_to_amdgpu_bo(e->tv.bo);
bo                930 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 			if (!bo)
bo                933 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 			amdgpu_vm_bo_invalidate(adev, bo, false);
bo               1300 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		struct amdgpu_bo *bo = ttm_to_amdgpu_bo(e->tv.bo);
bo               1302 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		r |= !amdgpu_ttm_tt_get_user_pages_done(bo->tbo.ttm);
bo               1712 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 			   uint64_t addr, struct amdgpu_bo **bo,
bo               1724 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	if (!mapping || !mapping->bo_va || !mapping->bo_va->base.bo)
bo               1727 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	*bo = mapping->bo_va->base.bo;
bo               1731 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	if (dma_resv_locking_ctx((*bo)->tbo.base.resv) != &parser->ticket)
bo               1734 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	if (!((*bo)->flags & AMDGPU_GEM_CREATE_VRAM_CONTIGUOUS)) {
bo               1735 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		(*bo)->flags |= AMDGPU_GEM_CREATE_VRAM_CONTIGUOUS;
bo               1736 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		amdgpu_bo_placement_from_domain(*bo, (*bo)->allowed_domains);
bo               1737 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		r = ttm_bo_validate(&(*bo)->tbo, &(*bo)->placement, &ctx);
bo               1742 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	return amdgpu_ttm_alloc_gart(&(*bo)->tbo);
bo                 37 drivers/gpu/drm/amd/amdgpu/amdgpu_csa.c int amdgpu_allocate_static_csa(struct amdgpu_device *adev, struct amdgpu_bo **bo,
bo                 44 drivers/gpu/drm/amd/amdgpu/amdgpu_csa.c 				domain, bo,
bo                 46 drivers/gpu/drm/amd/amdgpu/amdgpu_csa.c 	if (!*bo)
bo                 54 drivers/gpu/drm/amd/amdgpu/amdgpu_csa.c void amdgpu_free_static_csa(struct amdgpu_bo **bo)
bo                 56 drivers/gpu/drm/amd/amdgpu/amdgpu_csa.c 	amdgpu_bo_free_kernel(bo, NULL, NULL);
bo                 66 drivers/gpu/drm/amd/amdgpu/amdgpu_csa.c 			  struct amdgpu_bo *bo, struct amdgpu_bo_va **bo_va,
bo                 77 drivers/gpu/drm/amd/amdgpu/amdgpu_csa.c 	csa_tv.bo = &bo->tbo;
bo                 89 drivers/gpu/drm/amd/amdgpu/amdgpu_csa.c 	*bo_va = amdgpu_vm_bo_add(adev, vm, bo);
bo                 32 drivers/gpu/drm/amd/amdgpu/amdgpu_csa.h int amdgpu_allocate_static_csa(struct amdgpu_device *adev, struct amdgpu_bo **bo,
bo                 35 drivers/gpu/drm/amd/amdgpu/amdgpu_csa.h 			  struct amdgpu_bo *bo, struct amdgpu_bo_va **bo_va,
bo                 37 drivers/gpu/drm/amd/amdgpu/amdgpu_csa.h void amdgpu_free_static_csa(struct amdgpu_bo **bo);
bo                 51 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj);
bo                 52 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	int npages = bo->tbo.num_pages;
bo                 54 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	return drm_prime_pages_to_sg(bo->tbo.ttm->pages, npages);
bo                 68 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj);
bo                 71 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	ret = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages,
bo                 72 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 			  &bo->dma_buf_vmap);
bo                 76 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	return bo->dma_buf_vmap.virtual;
bo                 88 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj);
bo                 90 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	ttm_bo_kunmap(&bo->dma_buf_vmap);
bo                107 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj);
bo                108 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev);
bo                109 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	unsigned asize = amdgpu_bo_size(bo);
bo                122 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) ||
bo                123 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	    (bo->flags & AMDGPU_GEM_CREATE_NO_CPU_ACCESS)) {
bo                126 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	vma->vm_pgoff += amdgpu_bo_mmap_offset(bo) >> PAGE_SHIFT;
bo                197 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj);
bo                198 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev);
bo                205 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	r = amdgpu_bo_reserve(bo, false);
bo                219 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 		r = __dma_resv_make_exclusive(bo->tbo.base.resv);
bo                225 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	r = amdgpu_bo_pin(bo, AMDGPU_GEM_DOMAIN_GTT);
bo                230 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 		bo->prime_shared_count++;
bo                233 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	amdgpu_bo_unreserve(bo);
bo                253 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj);
bo                254 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev);
bo                257 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	ret = amdgpu_bo_reserve(bo, true);
bo                261 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	amdgpu_bo_unpin(bo);
bo                262 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	if (attach->dev->driver != adev->dev->driver && bo->prime_shared_count)
bo                263 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 		bo->prime_shared_count--;
bo                264 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	amdgpu_bo_unreserve(bo);
bo                285 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	struct amdgpu_bo *bo = gem_to_amdgpu_bo(dma_buf->priv);
bo                286 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev);
bo                288 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	u32 domain = amdgpu_display_supported_domains(adev, bo->flags);
bo                297 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	ret = amdgpu_bo_reserve(bo, false);
bo                301 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	if (!bo->pin_count && (bo->allowed_domains & AMDGPU_GEM_DOMAIN_GTT)) {
bo                302 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 		amdgpu_bo_placement_from_domain(bo, AMDGPU_GEM_DOMAIN_GTT);
bo                303 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 		ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
bo                306 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	amdgpu_bo_unreserve(bo);
bo                336 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	struct amdgpu_bo *bo = gem_to_amdgpu_bo(gobj);
bo                339 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) ||
bo                340 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	    bo->flags & AMDGPU_GEM_CREATE_VM_ALWAYS_VALID)
bo                372 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	struct amdgpu_bo *bo;
bo                384 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	ret = amdgpu_bo_create(adev, &bp, &bo);
bo                388 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	bo->tbo.sg = sg;
bo                389 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	bo->tbo.ttm->sg = sg;
bo                390 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	bo->allowed_domains = AMDGPU_GEM_DOMAIN_GTT;
bo                391 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	bo->preferred_domains = AMDGPU_GEM_DOMAIN_GTT;
bo                393 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 		bo->prime_shared_count = 1;
bo                396 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	return &bo->tbo.base;
bo                118 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	if (adev->gart.bo == NULL) {
bo                129 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 		r = amdgpu_bo_create(adev, &bp, &adev->gart.bo);
bo                151 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	r = amdgpu_bo_reserve(adev->gart.bo, false);
bo                154 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	r = amdgpu_bo_pin(adev->gart.bo, AMDGPU_GEM_DOMAIN_VRAM);
bo                156 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 		amdgpu_bo_unreserve(adev->gart.bo);
bo                159 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	r = amdgpu_bo_kmap(adev->gart.bo, &adev->gart.ptr);
bo                161 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 		amdgpu_bo_unpin(adev->gart.bo);
bo                162 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	amdgpu_bo_unreserve(adev->gart.bo);
bo                178 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	if (adev->gart.bo == NULL) {
bo                181 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	r = amdgpu_bo_reserve(adev->gart.bo, true);
bo                183 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 		amdgpu_bo_kunmap(adev->gart.bo);
bo                184 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 		amdgpu_bo_unpin(adev->gart.bo);
bo                185 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 		amdgpu_bo_unreserve(adev->gart.bo);
bo                201 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	if (adev->gart.bo == NULL) {
bo                204 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	amdgpu_bo_unref(&adev->gart.bo);
bo                 43 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.h 	struct amdgpu_bo		*bo;
bo                 56 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	struct amdgpu_bo *bo;
bo                 71 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	r = amdgpu_bo_create(adev, &bp, &bo);
bo                 88 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	*obj = &bo->tbo.base;
bo                137 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	    abo->tbo.base.resv != vm->root.base.bo->tbo.base.resv)
bo                157 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj);
bo                158 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev);
bo                172 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	tv.bo = &bo->tbo;
bo                184 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	bo_va = amdgpu_vm_bo_find(vm, bo);
bo                198 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 				amdgpu_bo_fence(bo, fence, true);
bo                251 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 		r = amdgpu_bo_reserve(vm->root.base.bo, false);
bo                255 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 		resv = vm->root.base.bo->tbo.base.resv;
bo                265 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 			abo->parent = amdgpu_bo_ref(vm->root.base.bo);
bo                267 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 		amdgpu_bo_unreserve(vm->root.base.bo);
bo                290 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	struct amdgpu_bo *bo;
bo                318 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	bo = gem_to_amdgpu_bo(gobj);
bo                319 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	bo->preferred_domains = AMDGPU_GEM_DOMAIN_GTT;
bo                320 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	bo->allowed_domains = AMDGPU_GEM_DOMAIN_GTT;
bo                321 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	r = amdgpu_ttm_tt_set_userptr(bo->tbo.ttm, args->addr, args->flags);
bo                326 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 		r = amdgpu_mn_register(bo, args->addr);
bo                332 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 		r = amdgpu_ttm_tt_get_user_pages(bo, bo->tbo.ttm->pages);
bo                336 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 		r = amdgpu_bo_reserve(bo, true);
bo                340 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 		amdgpu_bo_placement_from_domain(bo, AMDGPU_GEM_DOMAIN_GTT);
bo                341 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 		r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
bo                342 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 		amdgpu_bo_unreserve(bo);
bo                355 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 		amdgpu_ttm_tt_get_user_pages_done(bo->tbo.ttm);
bo                603 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 		tv.bo = &abo->tbo;
bo                716 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 				amdgpu_ttm_adev(base->vm->root.base.bo->tbo.bdev))) {
bo                788 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c #define amdgpu_debugfs_gem_bo_print_flag(m, bo, flag)	\
bo                789 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	if (bo->flags & (AMDGPU_GEM_CREATE_ ## flag)) {	\
bo                796 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	struct amdgpu_bo *bo = gem_to_amdgpu_bo(gobj);
bo                805 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	domain = amdgpu_mem_type_to_domain(bo->tbo.mem.mem_type);
bo                819 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 		   id, amdgpu_bo_size(bo), placement);
bo                821 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	pin_count = READ_ONCE(bo->pin_count);
bo                825 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	dma_buf = READ_ONCE(bo->tbo.base.dma_buf);
bo                826 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	attachment = READ_ONCE(bo->tbo.base.import_attach);
bo                833 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	amdgpu_debugfs_gem_bo_print_flag(m, bo, CPU_ACCESS_REQUIRED);
bo                834 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	amdgpu_debugfs_gem_bo_print_flag(m, bo, NO_CPU_ACCESS);
bo                835 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	amdgpu_debugfs_gem_bo_print_flag(m, bo, CPU_GTT_USWC);
bo                836 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	amdgpu_debugfs_gem_bo_print_flag(m, bo, VRAM_CLEARED);
bo                837 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	amdgpu_debugfs_gem_bo_print_flag(m, bo, SHADOW);
bo                838 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	amdgpu_debugfs_gem_bo_print_flag(m, bo, VRAM_CONTIGUOUS);
bo                839 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	amdgpu_debugfs_gem_bo_print_flag(m, bo, VM_ALWAYS_VALID);
bo                840 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	amdgpu_debugfs_gem_bo_print_flag(m, bo, EXPLICIT_SYNC);
bo                205 drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.h 	struct amdgpu_bo	*bo;
bo                 41 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c void amdgpu_gmc_get_pde_for_bo(struct amdgpu_bo *bo, int level,
bo                 44 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev);
bo                 47 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	switch (bo->tbo.mem.mem_type) {
bo                 49 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 		ttm = container_of(bo->tbo.ttm, struct ttm_dma_tt, ttm);
bo                 53 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 		*addr = amdgpu_bo_gpu_offset(bo);
bo                 59 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	*flags = amdgpu_ttm_tt_pde_flags(bo->tbo.ttm, &bo->tbo.mem);
bo                 67 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c uint64_t amdgpu_gmc_pd_addr(struct amdgpu_bo *bo)
bo                 69 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev);
bo                 76 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 		amdgpu_gmc_get_pde_for_bo(bo, -1, &pd_addr, &flags);
bo                 79 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 		pd_addr = amdgpu_bo_gpu_offset(bo);
bo                119 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c uint64_t amdgpu_gmc_agp_addr(struct ttm_buffer_object *bo)
bo                121 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->bdev);
bo                124 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	if (bo->num_pages != 1 || bo->ttm->caching_state == tt_cached)
bo                127 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	ttm = container_of(bo->ttm, struct ttm_dma_tt, ttm);
bo                220 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.h void amdgpu_gmc_get_pde_for_bo(struct amdgpu_bo *bo, int level,
bo                225 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.h uint64_t amdgpu_gmc_pd_addr(struct amdgpu_bo *bo);
bo                226 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.h uint64_t amdgpu_gmc_agp_addr(struct ttm_buffer_object *bo);
bo               1073 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 	pd = amdgpu_bo_ref(fpriv->vm.root.base.bo);
bo                 78 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	struct amdgpu_bo *bo, *next_bo;
bo                 85 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 		list_for_each_entry_safe(bo, next_bo, &node->bos, mn_list) {
bo                 86 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 			bo->mn = NULL;
bo                 87 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 			list_del_init(&bo->mn_list);
bo                174 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	struct amdgpu_bo *bo;
bo                177 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	list_for_each_entry(bo, &node->bos, mn_list) {
bo                179 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 		if (!amdgpu_ttm_tt_affect_userptr(bo->tbo.ttm, start, end))
bo                182 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 		r = dma_resv_wait_timeout_rcu(bo->tbo.base.resv,
bo                266 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 		struct amdgpu_bo *bo;
bo                276 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 		list_for_each_entry(bo, &node->bos, mn_list) {
bo                277 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 			struct kgd_mem *mem = bo->kfd_bo;
bo                279 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 			if (amdgpu_ttm_tt_affect_userptr(bo->tbo.ttm,
bo                375 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c int amdgpu_mn_register(struct amdgpu_bo *bo, unsigned long addr)
bo                377 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	unsigned long end = addr + amdgpu_bo_size(bo) - 1;
bo                378 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev);
bo                380 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 		bo->kfd_bo ? AMDGPU_MN_TYPE_HSA : AMDGPU_MN_TYPE_GFX;
bo                412 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	bo->mn = amn;
bo                418 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	list_add(&bo->mn_list, &node->bos);
bo                434 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c void amdgpu_mn_unregister(struct amdgpu_bo *bo)
bo                436 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev);
bo                442 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	amn = bo->mn;
bo                451 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	head = bo->mn_list.next;
bo                453 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	bo->mn = NULL;
bo                454 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	list_del_init(&bo->mn_list);
bo                 79 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.h int amdgpu_mn_register(struct amdgpu_bo *bo, unsigned long addr);
bo                 80 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.h void amdgpu_mn_unregister(struct amdgpu_bo *bo);
bo                 90 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.h static inline int amdgpu_mn_register(struct amdgpu_bo *bo, unsigned long addr)
bo                 96 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.h static inline void amdgpu_mn_unregister(struct amdgpu_bo *bo) {}
bo                 62 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c static void amdgpu_bo_subtract_pin_size(struct amdgpu_bo *bo)
bo                 64 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev);
bo                 66 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (bo->tbo.mem.mem_type == TTM_PL_VRAM) {
bo                 67 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		atomic64_sub(amdgpu_bo_size(bo), &adev->vram_pin_size);
bo                 68 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		atomic64_sub(amdgpu_vram_mgr_bo_visible_size(bo),
bo                 70 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	} else if (bo->tbo.mem.mem_type == TTM_PL_TT) {
bo                 71 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		atomic64_sub(amdgpu_bo_size(bo), &adev->gart_pin_size);
bo                 78 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	struct amdgpu_bo *bo = ttm_to_amdgpu_bo(tbo);
bo                 80 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (bo->pin_count > 0)
bo                 81 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		amdgpu_bo_subtract_pin_size(bo);
bo                 83 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	amdgpu_bo_kunmap(bo);
bo                 85 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (bo->tbo.base.import_attach)
bo                 86 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg);
bo                 87 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	drm_gem_object_release(&bo->tbo.base);
bo                 89 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (!list_empty(&bo->shadow_list)) {
bo                 91 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		list_del_init(&bo->shadow_list);
bo                 94 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	amdgpu_bo_unref(&bo->parent);
bo                 96 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	kfree(bo->metadata);
bo                 97 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	kfree(bo);
bo                110 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c bool amdgpu_bo_is_amdgpu_bo(struct ttm_buffer_object *bo)
bo                112 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (bo->destroy == &amdgpu_bo_destroy)
bo                415 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c void amdgpu_bo_free_kernel(struct amdgpu_bo **bo, u64 *gpu_addr,
bo                418 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (*bo == NULL)
bo                421 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (likely(amdgpu_bo_reserve(*bo, true) == 0)) {
bo                423 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 			amdgpu_bo_kunmap(*bo);
bo                425 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		amdgpu_bo_unpin(*bo);
bo                426 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		amdgpu_bo_unreserve(*bo);
bo                428 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	amdgpu_bo_unref(bo);
bo                520 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	struct amdgpu_bo *bo;
bo                548 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	bo = kzalloc(sizeof(struct amdgpu_bo), GFP_KERNEL);
bo                549 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (bo == NULL)
bo                551 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	drm_gem_private_object_init(adev->ddev, &bo->tbo.base, size);
bo                552 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	INIT_LIST_HEAD(&bo->shadow_list);
bo                553 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	bo->vm_bo = NULL;
bo                554 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	bo->preferred_domains = bp->preferred_domain ? bp->preferred_domain :
bo                556 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	bo->allowed_domains = bo->preferred_domains;
bo                558 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	    bo->allowed_domains == AMDGPU_GEM_DOMAIN_VRAM)
bo                559 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		bo->allowed_domains |= AMDGPU_GEM_DOMAIN_GTT;
bo                561 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	bo->flags = bp->flags;
bo                563 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (!amdgpu_bo_support_uswc(bo->flags))
bo                564 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		bo->flags &= ~AMDGPU_GEM_CREATE_CPU_GTT_USWC;
bo                566 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	bo->tbo.bdev = &adev->mman.bdev;
bo                569 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		amdgpu_bo_placement_from_domain(bo, AMDGPU_GEM_DOMAIN_CPU);
bo                571 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		amdgpu_bo_placement_from_domain(bo, bp->domain);
bo                573 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		bo->tbo.priority = 1;
bo                575 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	r = ttm_bo_init_reserved(&adev->mman.bdev, &bo->tbo, size, bp->type,
bo                576 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 				 &bo->placement, page_align, &ctx, acc_size,
bo                582 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	    bo->tbo.mem.mem_type == TTM_PL_VRAM &&
bo                583 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	    bo->tbo.mem.start < adev->gmc.visible_vram_size >> PAGE_SHIFT)
bo                590 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	    bo->tbo.mem.placement & TTM_PL_FLAG_VRAM) {
bo                593 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		r = amdgpu_fill_buffer(bo, 0, bo->tbo.base.resv, &fence);
bo                597 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		amdgpu_bo_fence(bo, fence, false);
bo                598 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		dma_fence_put(bo->tbo.moving);
bo                599 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		bo->tbo.moving = dma_fence_get(fence);
bo                603 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		amdgpu_bo_unreserve(bo);
bo                604 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	*bo_ptr = bo;
bo                606 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	trace_amdgpu_bo_create(bo);
bo                610 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		bo->flags &= ~AMDGPU_GEM_CREATE_CPU_ACCESS_REQUIRED;
bo                616 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		dma_resv_unlock(bo->tbo.base.resv);
bo                617 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	amdgpu_bo_unref(&bo);
bo                623 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 				   struct amdgpu_bo *bo)
bo                628 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (bo->shadow)
bo                637 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	bp.resv = bo->tbo.base.resv;
bo                639 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	r = amdgpu_bo_do_create(adev, &bp, &bo->shadow);
bo                641 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		bo->shadow->parent = amdgpu_bo_ref(bo);
bo                643 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		list_add_tail(&bo->shadow->shadow_list, &adev->shadow_list);
bo                705 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c int amdgpu_bo_validate(struct amdgpu_bo *bo)
bo                711 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (bo->pin_count)
bo                714 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	domain = bo->preferred_domains;
bo                717 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	amdgpu_bo_placement_from_domain(bo, domain);
bo                718 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
bo                719 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (unlikely(r == -ENOMEM) && domain != bo->allowed_domains) {
bo                720 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		domain = bo->allowed_domains;
bo                766 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c int amdgpu_bo_kmap(struct amdgpu_bo *bo, void **ptr)
bo                771 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (bo->flags & AMDGPU_GEM_CREATE_NO_CPU_ACCESS)
bo                774 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	kptr = amdgpu_bo_kptr(bo);
bo                781 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	r = dma_resv_wait_timeout_rcu(bo->tbo.base.resv, false, false,
bo                786 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap);
bo                791 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		*ptr = amdgpu_bo_kptr(bo);
bo                805 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c void *amdgpu_bo_kptr(struct amdgpu_bo *bo)
bo                809 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	return ttm_kmap_obj_virtual(&bo->kmap, &is_iomem);
bo                818 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c void amdgpu_bo_kunmap(struct amdgpu_bo *bo)
bo                820 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (bo->kmap.bo)
bo                821 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		ttm_bo_kunmap(&bo->kmap);
bo                833 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c struct amdgpu_bo *amdgpu_bo_ref(struct amdgpu_bo *bo)
bo                835 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (bo == NULL)
bo                838 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	ttm_bo_get(&bo->tbo);
bo                839 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	return bo;
bo                848 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c void amdgpu_bo_unref(struct amdgpu_bo **bo)
bo                852 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if ((*bo) == NULL)
bo                855 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	tbo = &((*bo)->tbo);
bo                857 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	*bo = NULL;
bo                882 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c int amdgpu_bo_pin_restricted(struct amdgpu_bo *bo, u32 domain,
bo                885 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev);
bo                889 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm))
bo                896 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (bo->prime_shared_count) {
bo                908 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (bo->pin_count) {
bo                909 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		uint32_t mem_type = bo->tbo.mem.mem_type;
bo                914 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		bo->pin_count++;
bo                917 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 			u64 domain_start = bo->tbo.bdev->man[mem_type].gpu_offset;
bo                919 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 				     (amdgpu_bo_gpu_offset(bo) - domain_start));
bo                925 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	bo->flags |= AMDGPU_GEM_CREATE_VRAM_CONTIGUOUS;
bo                927 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (!(bo->flags & AMDGPU_GEM_CREATE_NO_CPU_ACCESS))
bo                928 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		bo->flags |= AMDGPU_GEM_CREATE_CPU_ACCESS_REQUIRED;
bo                929 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	amdgpu_bo_placement_from_domain(bo, domain);
bo                930 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	for (i = 0; i < bo->placement.num_placement; i++) {
bo                936 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		if (fpfn > bo->placements[i].fpfn)
bo                937 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 			bo->placements[i].fpfn = fpfn;
bo                938 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		if (!bo->placements[i].lpfn ||
bo                939 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		    (lpfn && lpfn < bo->placements[i].lpfn))
bo                940 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 			bo->placements[i].lpfn = lpfn;
bo                941 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		bo->placements[i].flags |= TTM_PL_FLAG_NO_EVICT;
bo                944 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
bo                946 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		dev_err(adev->dev, "%p pin failed\n", bo);
bo                950 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	bo->pin_count = 1;
bo                952 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	domain = amdgpu_mem_type_to_domain(bo->tbo.mem.mem_type);
bo                954 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		atomic64_add(amdgpu_bo_size(bo), &adev->vram_pin_size);
bo                955 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		atomic64_add(amdgpu_vram_mgr_bo_visible_size(bo),
bo                958 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		atomic64_add(amdgpu_bo_size(bo), &adev->gart_pin_size);
bo                977 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c int amdgpu_bo_pin(struct amdgpu_bo *bo, u32 domain)
bo                979 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	return amdgpu_bo_pin_restricted(bo, domain, 0, 0);
bo                992 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c int amdgpu_bo_unpin(struct amdgpu_bo *bo)
bo                994 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev);
bo                998 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (WARN_ON_ONCE(!bo->pin_count)) {
bo                999 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		dev_warn(adev->dev, "%p unpin not necessary\n", bo);
bo               1002 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	bo->pin_count--;
bo               1003 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (bo->pin_count)
bo               1006 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	amdgpu_bo_subtract_pin_size(bo);
bo               1008 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	for (i = 0; i < bo->placement.num_placement; i++) {
bo               1009 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		bo->placements[i].lpfn = 0;
bo               1010 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		bo->placements[i].flags &= ~TTM_PL_FLAG_NO_EVICT;
bo               1012 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
bo               1014 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		dev_err(adev->dev, "%p validate failed for unpin\n", bo);
bo               1120 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c int amdgpu_bo_fbdev_mmap(struct amdgpu_bo *bo,
bo               1123 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	return ttm_fbdev_mmap(vma, &bo->tbo);
bo               1137 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c int amdgpu_bo_set_tiling_flags(struct amdgpu_bo *bo, u64 tiling_flags)
bo               1139 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev);
bo               1145 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	bo->tiling_flags = tiling_flags;
bo               1157 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c void amdgpu_bo_get_tiling_flags(struct amdgpu_bo *bo, u64 *tiling_flags)
bo               1159 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	dma_resv_assert_held(bo->tbo.base.resv);
bo               1162 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		*tiling_flags = bo->tiling_flags;
bo               1178 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c int amdgpu_bo_set_metadata (struct amdgpu_bo *bo, void *metadata,
bo               1184 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		if (bo->metadata_size) {
bo               1185 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 			kfree(bo->metadata);
bo               1186 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 			bo->metadata = NULL;
bo               1187 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 			bo->metadata_size = 0;
bo               1199 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	kfree(bo->metadata);
bo               1200 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	bo->metadata_flags = flags;
bo               1201 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	bo->metadata = buffer;
bo               1202 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	bo->metadata_size = metadata_size;
bo               1222 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c int amdgpu_bo_get_metadata(struct amdgpu_bo *bo, void *buffer,
bo               1230 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		if (buffer_size < bo->metadata_size)
bo               1233 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		if (bo->metadata_size)
bo               1234 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 			memcpy(buffer, bo->metadata, bo->metadata_size);
bo               1238 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		*metadata_size = bo->metadata_size;
bo               1240 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		*flags = bo->metadata_flags;
bo               1255 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c void amdgpu_bo_move_notify(struct ttm_buffer_object *bo,
bo               1259 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->bdev);
bo               1261 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	struct ttm_mem_reg *old_mem = &bo->mem;
bo               1263 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (!amdgpu_bo_is_amdgpu_bo(bo))
bo               1266 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	abo = ttm_to_amdgpu_bo(bo);
bo               1290 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c void amdgpu_bo_release_notify(struct ttm_buffer_object *bo)
bo               1296 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (!amdgpu_bo_is_amdgpu_bo(bo))
bo               1299 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	abo = ttm_to_amdgpu_bo(bo);
bo               1304 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (bo->mem.mem_type != TTM_PL_VRAM || !bo->mem.mm_node ||
bo               1308 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	dma_resv_lock(bo->base.resv, NULL);
bo               1310 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	r = amdgpu_fill_buffer(abo, AMDGPU_POISON, bo->base.resv, &fence);
bo               1316 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	dma_resv_unlock(bo->base.resv);
bo               1330 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c int amdgpu_bo_fault_reserve_notify(struct ttm_buffer_object *bo)
bo               1332 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->bdev);
bo               1338 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (!amdgpu_bo_is_amdgpu_bo(bo))
bo               1341 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	abo = ttm_to_amdgpu_bo(bo);
bo               1346 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (bo->mem.mem_type != TTM_PL_VRAM)
bo               1349 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	size = bo->mem.num_pages << PAGE_SHIFT;
bo               1350 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	offset = bo->mem.start << PAGE_SHIFT;
bo               1367 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	r = ttm_bo_validate(bo, &abo->placement, &ctx);
bo               1371 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	offset = bo->mem.start << PAGE_SHIFT;
bo               1373 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (bo->mem.mem_type == TTM_PL_VRAM &&
bo               1388 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c void amdgpu_bo_fence(struct amdgpu_bo *bo, struct dma_fence *fence,
bo               1391 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	struct dma_resv *resv = bo->tbo.base.resv;
bo               1409 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c int amdgpu_bo_sync_wait(struct amdgpu_bo *bo, void *owner, bool intr)
bo               1411 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev);
bo               1416 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	amdgpu_sync_resv(adev, &sync, bo->tbo.base.resv, owner, false);
bo               1433 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c u64 amdgpu_bo_gpu_offset(struct amdgpu_bo *bo)
bo               1435 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	WARN_ON_ONCE(bo->tbo.mem.mem_type == TTM_PL_SYSTEM);
bo               1436 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	WARN_ON_ONCE(!dma_resv_is_locked(bo->tbo.base.resv) &&
bo               1437 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		     !bo->pin_count && bo->tbo.type != ttm_bo_type_kernel);
bo               1438 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	WARN_ON_ONCE(bo->tbo.mem.start == AMDGPU_BO_INVALID_OFFSET);
bo               1439 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	WARN_ON_ONCE(bo->tbo.mem.mem_type == TTM_PL_VRAM &&
bo               1440 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		     !(bo->flags & AMDGPU_GEM_CREATE_VRAM_CONTIGUOUS));
bo               1442 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	return amdgpu_gmc_sign_extend(bo->tbo.offset);
bo                152 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h static inline int amdgpu_bo_reserve(struct amdgpu_bo *bo, bool no_intr)
bo                154 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev);
bo                157 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h 	r = __ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL);
bo                160 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h 			dev_err(adev->dev, "%p reserve failed\n", bo);
bo                166 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h static inline void amdgpu_bo_unreserve(struct amdgpu_bo *bo)
bo                168 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h 	ttm_bo_unreserve(&bo->tbo);
bo                171 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h static inline unsigned long amdgpu_bo_size(struct amdgpu_bo *bo)
bo                173 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h 	return bo->tbo.num_pages << PAGE_SHIFT;
bo                176 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h static inline unsigned amdgpu_bo_ngpu_pages(struct amdgpu_bo *bo)
bo                178 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h 	return (bo->tbo.num_pages << PAGE_SHIFT) / AMDGPU_GPU_PAGE_SIZE;
bo                181 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h static inline unsigned amdgpu_bo_gpu_page_alignment(struct amdgpu_bo *bo)
bo                183 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h 	return (bo->tbo.mem.page_alignment << PAGE_SHIFT) / AMDGPU_GPU_PAGE_SIZE;
bo                192 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h static inline u64 amdgpu_bo_mmap_offset(struct amdgpu_bo *bo)
bo                194 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h 	return drm_vma_node_offset_addr(&bo->tbo.base.vma_node);
bo                200 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h static inline bool amdgpu_bo_in_cpu_visible_vram(struct amdgpu_bo *bo)
bo                202 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev);
bo                204 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h 	struct drm_mm_node *node = bo->tbo.mem.mm_node;
bo                207 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h 	if (bo->tbo.mem.mem_type != TTM_PL_VRAM)
bo                210 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h 	for (pages_left = bo->tbo.mem.num_pages; pages_left;
bo                221 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h static inline bool amdgpu_bo_explicit_sync(struct amdgpu_bo *bo)
bo                223 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h 	return bo->flags & AMDGPU_GEM_CREATE_EXPLICIT_SYNC;
bo                226 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h bool amdgpu_bo_is_amdgpu_bo(struct ttm_buffer_object *bo);
bo                243 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h void amdgpu_bo_free_kernel(struct amdgpu_bo **bo, u64 *gpu_addr,
bo                245 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h int amdgpu_bo_kmap(struct amdgpu_bo *bo, void **ptr);
bo                246 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h void *amdgpu_bo_kptr(struct amdgpu_bo *bo);
bo                247 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h void amdgpu_bo_kunmap(struct amdgpu_bo *bo);
bo                248 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h struct amdgpu_bo *amdgpu_bo_ref(struct amdgpu_bo *bo);
bo                249 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h void amdgpu_bo_unref(struct amdgpu_bo **bo);
bo                250 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h int amdgpu_bo_pin(struct amdgpu_bo *bo, u32 domain);
bo                251 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h int amdgpu_bo_pin_restricted(struct amdgpu_bo *bo, u32 domain,
bo                253 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h int amdgpu_bo_unpin(struct amdgpu_bo *bo);
bo                258 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h int amdgpu_bo_fbdev_mmap(struct amdgpu_bo *bo,
bo                260 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h int amdgpu_bo_set_tiling_flags(struct amdgpu_bo *bo, u64 tiling_flags);
bo                261 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h void amdgpu_bo_get_tiling_flags(struct amdgpu_bo *bo, u64 *tiling_flags);
bo                262 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h int amdgpu_bo_set_metadata (struct amdgpu_bo *bo, void *metadata,
bo                264 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h int amdgpu_bo_get_metadata(struct amdgpu_bo *bo, void *buffer,
bo                267 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h void amdgpu_bo_move_notify(struct ttm_buffer_object *bo,
bo                270 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h void amdgpu_bo_release_notify(struct ttm_buffer_object *bo);
bo                271 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h int amdgpu_bo_fault_reserve_notify(struct ttm_buffer_object *bo);
bo                272 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h void amdgpu_bo_fence(struct amdgpu_bo *bo, struct dma_fence *fence,
bo                274 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h int amdgpu_bo_sync_wait(struct amdgpu_bo *bo, void *owner, bool intr);
bo                275 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h u64 amdgpu_bo_gpu_offset(struct amdgpu_bo *bo);
bo                276 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h int amdgpu_bo_validate(struct amdgpu_bo *bo);
bo               1192 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c 		else if (data->bps[i].bo == NULL)
bo               1273 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c 	struct amdgpu_bo *bo = NULL;
bo               1289 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c 					       &bo, NULL))
bo               1292 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c 		data->bps[i].bo = bo;
bo               1294 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c 		bo = NULL;
bo               1306 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c 	struct amdgpu_bo *bo;
bo               1318 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c 		bo = data->bps[i].bo;
bo               1320 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c 		amdgpu_bo_free_kernel(&bo, NULL, NULL);
bo               1322 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c 		data->bps[i].bo = bo;
bo                357 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.h 		struct amdgpu_bo *bo;
bo                 57 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c 	sa_manager->bo = NULL;
bo                 66 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c 	r = amdgpu_bo_create_kernel(adev, size, align, domain, &sa_manager->bo,
bo                 82 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c 	if (sa_manager->bo == NULL) {
bo                 98 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c 	amdgpu_bo_free_kernel(&sa_manager->bo, &sa_manager->gpu_addr, &sa_manager->cpu_ptr);
bo                117 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 	    TP_PROTO(struct amdgpu_bo *bo),
bo                118 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 	    TP_ARGS(bo),
bo                120 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 			     __field(struct amdgpu_bo *, bo)
bo                129 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 			   __entry->bo = bo;
bo                130 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 			   __entry->pages = bo->tbo.num_pages;
bo                131 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 			   __entry->type = bo->tbo.mem.mem_type;
bo                132 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 			   __entry->prefer = bo->preferred_domains;
bo                133 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 			   __entry->allow = bo->allowed_domains;
bo                134 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 			   __entry->visible = bo->flags;
bo                138 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 		       __entry->bo, __entry->pages, __entry->type,
bo                248 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 			     __field(struct amdgpu_bo *, bo)
bo                256 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 			   __entry->bo = bo_va ? bo_va->base.bo : NULL;
bo                263 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 		      __entry->bo, __entry->start, __entry->last,
bo                272 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 			     __field(struct amdgpu_bo *, bo)
bo                280 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 			   __entry->bo = bo_va ? bo_va->base.bo : NULL;
bo                287 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 		      __entry->bo, __entry->start, __entry->last,
bo                411 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 	    TP_PROTO(struct amdgpu_bo_list *list, struct amdgpu_bo *bo),
bo                412 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 	    TP_ARGS(list, bo),
bo                415 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 			     __field(struct amdgpu_bo *, bo)
bo                421 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 			   __entry->bo = bo;
bo                422 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 			   __entry->bo_size = amdgpu_bo_size(bo);
bo                426 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 		      __entry->bo,
bo                447 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 	    TP_PROTO(struct amdgpu_bo* bo, uint32_t new_placement, uint32_t old_placement),
bo                448 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 	    TP_ARGS(bo, new_placement, old_placement),
bo                450 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 			__field(struct amdgpu_bo *, bo)
bo                457 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 			__entry->bo      = bo;
bo                458 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 			__entry->bo_size = amdgpu_bo_size(bo);
bo                463 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 			__entry->bo, __entry->old_placement,
bo                 60 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c static int amdgpu_map_buffer(struct ttm_buffer_object *bo,
bo                141 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c static void amdgpu_evict_flags(struct ttm_buffer_object *bo,
bo                144 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->bdev);
bo                153 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (bo->type == ttm_bo_type_sg) {
bo                160 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (!amdgpu_bo_is_amdgpu_bo(bo)) {
bo                168 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	abo = ttm_to_amdgpu_bo(bo);
bo                169 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	switch (bo->mem.mem_type) {
bo                218 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c static int amdgpu_verify_access(struct ttm_buffer_object *bo, struct file *filp)
bo                220 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_bo *abo = ttm_to_amdgpu_bo(bo);
bo                229 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (amdgpu_ttm_tt_get_usermm(bo->ttm))
bo                243 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c static void amdgpu_move_null(struct ttm_buffer_object *bo,
bo                246 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct ttm_mem_reg *old_mem = &bo->mem;
bo                261 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c static uint64_t amdgpu_mm_node_addr(struct ttm_buffer_object *bo,
bo                269 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		addr += bo->bdev->man[mem->mem_type].gpu_offset;
bo                325 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	src_node_start = amdgpu_mm_node_addr(src->bo, src_mm, src->mem) +
bo                331 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	dst_node_start = amdgpu_mm_node_addr(dst->bo, dst_mm, dst->mem) +
bo                356 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 			r = amdgpu_map_buffer(src->bo, src->mem,
bo                369 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 			r = amdgpu_map_buffer(dst->bo, dst->mem,
bo                392 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 			src_node_start = amdgpu_mm_node_addr(src->bo, ++src_mm,
bo                402 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 			dst_node_start = amdgpu_mm_node_addr(dst->bo, ++dst_mm,
bo                425 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c static int amdgpu_move_blit(struct ttm_buffer_object *bo,
bo                430 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->bdev);
bo                435 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	src.bo = bo;
bo                436 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	dst.bo = bo;
bo                444 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 				       bo->base.resv, &fence);
bo                450 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	    (ttm_to_amdgpu_bo(bo)->flags &
bo                454 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		r = amdgpu_fill_buffer(ttm_to_amdgpu_bo(bo), AMDGPU_POISON,
bo                465 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (bo->type == ttm_bo_type_kernel)
bo                466 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		r = ttm_bo_move_accel_cleanup(bo, fence, true, new_mem);
bo                468 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		r = ttm_bo_pipeline_move(bo, fence, evict, new_mem);
bo                484 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c static int amdgpu_move_vram_ram(struct ttm_buffer_object *bo, bool evict,
bo                489 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct ttm_mem_reg *old_mem = &bo->mem;
bo                495 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	adev = amdgpu_ttm_adev(bo->bdev);
bo                507 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	r = ttm_bo_mem_space(bo, &placement, &tmp_mem, ctx);
bo                514 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	r = ttm_tt_set_placement_caching(bo->ttm, tmp_mem.placement);
bo                520 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	r = ttm_tt_bind(bo->ttm, &tmp_mem, ctx);
bo                526 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	r = amdgpu_move_blit(bo, evict, ctx->no_wait_gpu, &tmp_mem, old_mem);
bo                532 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	r = ttm_bo_move_ttm(bo, ctx, new_mem);
bo                534 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	ttm_bo_mem_put(bo, &tmp_mem);
bo                543 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c static int amdgpu_move_ram_vram(struct ttm_buffer_object *bo, bool evict,
bo                548 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct ttm_mem_reg *old_mem = &bo->mem;
bo                554 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	adev = amdgpu_ttm_adev(bo->bdev);
bo                566 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	r = ttm_bo_mem_space(bo, &placement, &tmp_mem, ctx);
bo                573 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	r = ttm_bo_move_ttm(bo, ctx, &tmp_mem);
bo                579 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	r = amdgpu_move_blit(bo, evict, ctx->no_wait_gpu, new_mem, old_mem);
bo                584 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	ttm_bo_mem_put(bo, &tmp_mem);
bo                617 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c static int amdgpu_bo_move(struct ttm_buffer_object *bo, bool evict,
bo                623 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct ttm_mem_reg *old_mem = &bo->mem;
bo                627 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	abo = ttm_to_amdgpu_bo(bo);
bo                631 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	adev = amdgpu_ttm_adev(bo->bdev);
bo                633 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (old_mem->mem_type == TTM_PL_SYSTEM && bo->ttm == NULL) {
bo                634 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		amdgpu_move_null(bo, new_mem);
bo                642 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		amdgpu_move_null(bo, new_mem);
bo                652 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		amdgpu_move_null(bo, new_mem);
bo                663 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		r = amdgpu_move_vram_ram(bo, evict, ctx, new_mem);
bo                666 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		r = amdgpu_move_ram_vram(bo, evict, ctx, new_mem);
bo                668 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		r = amdgpu_move_blit(bo, evict, ctx->no_wait_gpu,
bo                681 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		r = ttm_bo_move_memcpy(bo, ctx, new_mem);
bo                686 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (bo->type == ttm_bo_type_device &&
bo                696 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	atomic64_add((u64)bo->num_pages << PAGE_SHIFT, &adev->num_bytes_moved);
bo                751 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c static unsigned long amdgpu_ttm_io_mem_pfn(struct ttm_buffer_object *bo,
bo                757 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	mm = amdgpu_find_mm_node(&bo->mem, &offset);
bo                758 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	return (bo->mem.bus.base >> PAGE_SHIFT) + mm->start +
bo                787 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c int amdgpu_ttm_tt_get_user_pages(struct amdgpu_bo *bo, struct page **pages)
bo                789 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct hmm_mirror *mirror = bo->mn ? &bo->mn->mirror : NULL;
bo                790 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct ttm_tt *ttm = bo->tbo.ttm;
bo               1094 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c int amdgpu_ttm_alloc_gart(struct ttm_buffer_object *bo)
bo               1096 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->bdev);
bo               1098 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_ttm_tt *gtt = (void*)bo->ttm;
bo               1105 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (bo->mem.start != AMDGPU_BO_INVALID_OFFSET)
bo               1108 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	addr = amdgpu_gmc_agp_addr(bo);
bo               1110 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		bo->mem.start = addr >> PAGE_SHIFT;
bo               1114 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		tmp = bo->mem;
bo               1122 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		placements.flags = (bo->mem.placement & ~TTM_PL_MASK_MEM) |
bo               1125 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		r = ttm_bo_mem_space(bo, &placement, &tmp, &ctx);
bo               1130 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		flags = amdgpu_ttm_tt_pte_flags(adev, bo->ttm, &tmp);
bo               1134 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		r = amdgpu_ttm_gart_bind(adev, bo, flags);
bo               1136 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 			ttm_bo_mem_put(bo, &tmp);
bo               1140 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		ttm_bo_mem_put(bo, &bo->mem);
bo               1141 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		bo->mem = tmp;
bo               1144 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	bo->offset = (bo->mem.start << PAGE_SHIFT) +
bo               1145 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		bo->bdev->man[bo->mem.mem_type].gpu_offset;
bo               1222 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c static struct ttm_tt *amdgpu_ttm_tt_create(struct ttm_buffer_object *bo,
bo               1228 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	adev = amdgpu_ttm_adev(bo->bdev);
bo               1237 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (ttm_sg_tt_init(&gtt->ttm, bo, page_flags)) {
bo               1474 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c static bool amdgpu_ttm_bo_eviction_valuable(struct ttm_buffer_object *bo,
bo               1477 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	unsigned long num_pages = bo->mem.num_pages;
bo               1478 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct drm_mm_node *node = bo->mem.mm_node;
bo               1486 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (bo->type == ttm_bo_type_kernel &&
bo               1487 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	    !dma_resv_test_signaled_rcu(bo->base.resv, true))
bo               1494 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	flist = dma_resv_get_list(bo->base.resv);
bo               1498 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 				dma_resv_held(bo->base.resv));
bo               1504 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	switch (bo->mem.mem_type) {
bo               1524 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	return ttm_bo_eviction_valuable(bo, place);
bo               1539 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c static int amdgpu_ttm_access_memory(struct ttm_buffer_object *bo,
bo               1543 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_bo *abo = ttm_to_amdgpu_bo(bo);
bo               1551 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (bo->mem.mem_type != TTM_PL_VRAM)
bo               1897 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c static int amdgpu_map_buffer(struct ttm_buffer_object *bo,
bo               1903 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_ttm_tt *gtt = (void *)bo->ttm;
bo               1905 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct ttm_tt *ttm = bo->ttm;
bo               1934 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	dst_addr = amdgpu_bo_gpu_offset(adev->gart.bo);
bo               1995 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		job->vm_pd_addr = amdgpu_gmc_pd_addr(adev->gart.bo);
bo               2037 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c int amdgpu_fill_buffer(struct amdgpu_bo *bo,
bo               2042 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev);
bo               2058 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (bo->tbo.mem.mem_type == TTM_PL_TT) {
bo               2059 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		r = amdgpu_ttm_alloc_gart(&bo->tbo);
bo               2064 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	num_pages = bo->tbo.num_pages;
bo               2065 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	mm_node = bo->tbo.mem.mm_node;
bo               2092 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	num_pages = bo->tbo.num_pages;
bo               2093 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	mm_node = bo->tbo.mem.mm_node;
bo               2099 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		dst_addr = amdgpu_mm_node_addr(&bo->tbo, mm_node, &bo->tbo.mem);
bo                 64 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h 	struct ttm_buffer_object	*bo;
bo                 76 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h u64 amdgpu_vram_mgr_bo_visible_size(struct amdgpu_bo *bo);
bo                 97 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h int amdgpu_fill_buffer(struct amdgpu_bo *bo,
bo                103 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h int amdgpu_ttm_alloc_gart(struct ttm_buffer_object *bo);
bo                107 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h int amdgpu_ttm_tt_get_user_pages(struct amdgpu_bo *bo, struct page **pages);
bo                110 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h static inline int amdgpu_ttm_tt_get_user_pages(struct amdgpu_bo *bo,
bo                479 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	struct amdgpu_bo *bo;
bo                484 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	r = amdgpu_cs_find_mapping(ctx->parser, addr, &bo, &mapping);
bo                496 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 			amdgpu_bo_placement_from_domain(bo, domain);
bo                498 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		amdgpu_uvd_force_into_uvd_segment(bo);
bo                500 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		r = ttm_bo_validate(&bo->tbo, &bo->placement, &tctx);
bo                711 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 			     struct amdgpu_bo *bo, unsigned offset)
bo                724 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	r = amdgpu_bo_kmap(bo, &ptr);
bo                743 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		amdgpu_bo_kunmap(bo);
bo                765 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		amdgpu_bo_kunmap(bo);
bo                787 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		amdgpu_bo_kunmap(bo);
bo                808 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	struct amdgpu_bo *bo;
bo                814 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	r = amdgpu_cs_find_mapping(ctx->parser, addr, &bo, &mapping);
bo                820 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	start = amdgpu_bo_gpu_offset(bo);
bo                871 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		r = amdgpu_uvd_cs_msg(ctx, bo, addr);
bo               1019 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c static int amdgpu_uvd_send_msg(struct amdgpu_ring *ring, struct amdgpu_bo *bo,
bo               1033 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	amdgpu_bo_kunmap(bo);
bo               1034 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	amdgpu_bo_unpin(bo);
bo               1039 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		amdgpu_bo_placement_from_domain(bo, AMDGPU_GEM_DOMAIN_VRAM);
bo               1040 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		amdgpu_uvd_force_into_uvd_segment(bo);
bo               1041 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
bo               1062 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	addr = amdgpu_bo_gpu_offset(bo);
bo               1076 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		r = dma_resv_wait_timeout_rcu(bo->tbo.base.resv,
bo               1088 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		r = amdgpu_sync_resv(adev, &job->sync, bo->tbo.base.resv,
bo               1099 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	amdgpu_bo_fence(bo, f, false);
bo               1100 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	amdgpu_bo_unreserve(bo);
bo               1101 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	amdgpu_bo_unref(&bo);
bo               1113 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	amdgpu_bo_unreserve(bo);
bo               1114 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	amdgpu_bo_unref(&bo);
bo               1125 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	struct amdgpu_bo *bo = NULL;
bo               1131 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 				      &bo, NULL, (void **)&msg);
bo               1150 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	return amdgpu_uvd_send_msg(ring, bo, true, fence);
bo               1157 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	struct amdgpu_bo *bo = NULL;
bo               1163 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 				      &bo, NULL, (void **)&msg);
bo               1175 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	return amdgpu_uvd_send_msg(ring, bo, direct, fence);
bo                432 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 			      struct amdgpu_bo *bo,
bo                448 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	addr = amdgpu_bo_gpu_offset(bo);
bo                583 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	struct amdgpu_bo *bo;
bo                598 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	r = amdgpu_cs_find_mapping(p, addr, &bo, &mapping);
bo                605 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	for (i = 0; i < bo->placement.num_placement; ++i) {
bo                606 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 		bo->placements[i].fpfn = max(bo->placements[i].fpfn, fpfn);
bo                607 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 		bo->placements[i].lpfn = bo->placements[i].lpfn ?
bo                608 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 			min(bo->placements[i].lpfn, lpfn) : lpfn;
bo                610 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	return ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
bo                628 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	struct amdgpu_bo *bo;
bo                639 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	r = amdgpu_cs_find_mapping(p, addr, &bo, &mapping);
bo                654 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	addr += amdgpu_bo_gpu_offset(bo);
bo               1114 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	struct amdgpu_bo *bo = NULL;
bo               1123 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 				      &bo, NULL, NULL);
bo               1127 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	r = amdgpu_vce_get_create_msg(ring, 1, bo, NULL);
bo               1143 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	amdgpu_bo_unreserve(bo);
bo               1144 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	amdgpu_bo_unref(&bo);
bo                 62 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.h 			      struct amdgpu_bo *bo,
bo                408 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 				   struct amdgpu_bo *bo,
bo                423 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	addr = amdgpu_bo_gpu_offset(bo);
bo                440 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	amdgpu_bo_fence(bo, f, false);
bo                441 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	amdgpu_bo_unreserve(bo);
bo                442 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	amdgpu_bo_unref(&bo);
bo                454 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	amdgpu_bo_unreserve(bo);
bo                455 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	amdgpu_bo_unref(&bo);
bo                463 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	struct amdgpu_bo *bo = NULL;
bo                469 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 				      &bo, NULL, (void **)&msg);
bo                490 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	return amdgpu_vcn_dec_send_msg(ring, bo, fence);
bo                497 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	struct amdgpu_bo *bo = NULL;
bo                503 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 				      &bo, NULL, (void **)&msg);
bo                516 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	return amdgpu_vcn_dec_send_msg(ring, bo, fence);
bo                572 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 					 struct amdgpu_bo *bo,
bo                587 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	addr = amdgpu_bo_gpu_offset(bo);
bo                625 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 					  struct amdgpu_bo *bo,
bo                640 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	addr = amdgpu_bo_gpu_offset(bo);
bo                680 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	struct amdgpu_bo *bo = NULL;
bo                685 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 				      &bo, NULL, NULL);
bo                689 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	r = amdgpu_vcn_enc_get_create_msg(ring, 1, bo, NULL);
bo                693 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	r = amdgpu_vcn_enc_get_destroy_msg(ring, 1, bo, &fence);
bo                705 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	amdgpu_bo_unreserve(bo);
bo                706 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	amdgpu_bo_unref(&bo);
bo                278 drivers/gpu/drm/amd/amdgpu/amdgpu_virt.c 				    &adev->virt.mm_table.bo,
bo                303 drivers/gpu/drm/amd/amdgpu/amdgpu_virt.c 	amdgpu_bo_free_kernel(&adev->virt.mm_table.bo,
bo                 34 drivers/gpu/drm/amd/amdgpu/amdgpu_virt.h 	struct amdgpu_bo	*bo;
bo                203 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	struct amdgpu_bo *bo = vm_bo->bo;
bo                206 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	if (bo->tbo.type == ttm_bo_type_kernel)
bo                293 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 				   struct amdgpu_bo *bo)
bo                296 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	base->bo = bo;
bo                300 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	if (!bo)
bo                302 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	base->next = bo->vm_bo;
bo                303 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	bo->vm_bo = base;
bo                305 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	if (bo->tbo.base.resv != vm->root.base.bo->tbo.base.resv)
bo                309 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	if (bo->tbo.type == ttm_bo_type_kernel && bo->parent)
bo                314 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	if (bo->preferred_domains &
bo                315 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	    amdgpu_mem_type_to_domain(bo->tbo.mem.mem_type))
bo                336 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	struct amdgpu_bo *parent = pt->base.bo->parent;
bo                392 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	BUG_ON(!cursor->entry->base.bo);
bo                562 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	entry->tv.bo = &vm->root.base.bo->tbo;
bo                569 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c void amdgpu_vm_del_from_lru_notify(struct ttm_buffer_object *bo)
bo                574 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	if (!amdgpu_bo_is_amdgpu_bo(bo))
bo                577 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	if (bo->mem.placement & TTM_PL_FLAG_NO_EVICT)
bo                580 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	abo = ttm_to_amdgpu_bo(bo);
bo                586 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		if (abo->tbo.base.resv == vm->root.base.bo->tbo.base.resv)
bo                617 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		struct amdgpu_bo *bo = bo_base->bo;
bo                619 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		if (!bo->parent)
bo                622 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		ttm_bo_move_to_lru_tail(&bo->tbo, &vm->lru_bulk_move);
bo                623 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		if (bo->shadow)
bo                624 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 			ttm_bo_move_to_lru_tail(&bo->shadow->tbo,
bo                646 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 			      int (*validate)(void *p, struct amdgpu_bo *bo),
bo                655 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		struct amdgpu_bo *bo = bo_base->bo;
bo                657 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		r = validate(param, bo);
bo                661 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		if (bo->tbo.type != ttm_bo_type_kernel) {
bo                664 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 			vm->update_funcs->map_table(bo);
bo                665 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 			if (bo->parent)
bo                704 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 			      struct amdgpu_bo *bo)
bo                709 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	struct amdgpu_bo *ancestor = bo;
bo                723 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	entries = amdgpu_bo_size(bo) / 8;
bo                727 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	} else if (!bo->parent) {
bo                745 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
bo                749 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	if (bo->shadow) {
bo                750 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		r = ttm_bo_validate(&bo->shadow->tbo, &bo->shadow->placement,
bo                756 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	r = vm->update_funcs->map_table(bo);
bo                779 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		r = vm->update_funcs->update(&params, bo, addr, 0, ats_entries,
bo                802 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		r = vm->update_funcs->update(&params, bo, addr, 0, entries,
bo                831 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	else if (!vm->root.base.bo || vm->root.base.bo->shadow)
bo                834 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	if (vm->root.base.bo)
bo                835 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		bp->resv = vm->root.base.bo->tbo.base.resv;
bo                871 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	if (entry->base.bo)
bo                883 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	pt->parent = amdgpu_bo_ref(cursor->parent->base.bo);
bo                905 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	if (entry->base.bo) {
bo                906 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		entry->base.bo->vm_bo = NULL;
bo                908 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		amdgpu_bo_unref(&entry->base.bo->shadow);
bo                909 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		amdgpu_bo_unref(&entry->base.bo);
bo               1141 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 				       struct amdgpu_bo *bo)
bo               1145 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	for (base = bo->vm_bo; base; base = base->next) {
bo               1195 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	struct amdgpu_bo *bo = parent->base.bo, *pbo;
bo               1199 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	for (level = 0, pbo = bo->parent; pbo; ++level)
bo               1203 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	amdgpu_gmc_get_pde_for_bo(entry->base.bo, level, &pt, &flags);
bo               1205 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	return vm->update_funcs->update(params, bo, pde, pt, 1, 0, flags);
bo               1223 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		if (entry->base.bo && !entry->base.moved)
bo               1283 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 				   struct amdgpu_bo *bo, unsigned level,
bo               1301 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	params->vm->update_funcs->update(params, bo, pe, addr, count, incr,
bo               1404 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		pt = cursor.entry->base.bo;
bo               1685 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	struct amdgpu_bo *bo = bo_va->base.bo;
bo               1696 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	if (clear || !bo) {
bo               1703 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		mem = &bo->tbo.mem;
bo               1706 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 			ttm = container_of(bo->tbo.ttm, struct ttm_dma_tt, ttm);
bo               1709 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		exclusive = dma_resv_get_excl(bo->tbo.base.resv);
bo               1712 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	if (bo) {
bo               1713 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		flags = amdgpu_ttm_tt_pte_flags(adev, bo->tbo.ttm, mem);
bo               1714 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		bo_adev = amdgpu_ttm_adev(bo->tbo.bdev);
bo               1719 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	if (clear || (bo && bo->tbo.base.resv == vm->root.base.bo->tbo.base.resv))
bo               1750 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	if (bo && bo->tbo.base.resv == vm->root.base.bo->tbo.base.resv) {
bo               1751 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		uint32_t mem_type = bo->tbo.mem.mem_type;
bo               1753 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		if (!(bo->preferred_domains & amdgpu_mem_type_to_domain(mem_type)))
bo               1886 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	struct dma_resv *resv = vm->root.base.bo->tbo.base.resv;
bo               2000 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		resv = bo_va->base.bo->tbo.base.resv;
bo               2040 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 				      struct amdgpu_bo *bo)
bo               2048 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	amdgpu_vm_bo_base_init(&bo_va->base, vm, bo);
bo               2054 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	if (bo && amdgpu_xgmi_same_hive(adev, amdgpu_ttm_adev(bo->tbo.bdev)) &&
bo               2055 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	    (bo->preferred_domains & AMDGPU_GEM_DOMAIN_VRAM)) {
bo               2082 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	struct amdgpu_bo *bo = bo_va->base.bo;
bo               2091 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	if (bo && bo->tbo.base.resv == vm->root.base.bo->tbo.base.resv &&
bo               2121 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	struct amdgpu_bo *bo = bo_va->base.bo;
bo               2133 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	    (bo && offset + size > amdgpu_bo_size(bo)))
bo               2143 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 			"0x%010Lx-0x%010Lx\n", bo, saddr, eaddr,
bo               2186 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	struct amdgpu_bo *bo = bo_va->base.bo;
bo               2198 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	    (bo && offset + size > amdgpu_bo_size(bo)))
bo               2419 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		if (mapping->bo_va && mapping->bo_va->base.bo) {
bo               2420 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 			struct amdgpu_bo *bo;
bo               2422 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 			bo = mapping->bo_va->base.bo;
bo               2423 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 			if (dma_resv_locking_ctx(bo->tbo.base.resv) !=
bo               2446 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	struct amdgpu_bo *bo = bo_va->base.bo;
bo               2450 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	if (bo) {
bo               2451 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		if (bo->tbo.base.resv == vm->root.base.bo->tbo.base.resv)
bo               2454 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		for (base = &bo_va->base.bo->vm_bo; *base;
bo               2484 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	if (bo && bo_va->is_xgmi) {
bo               2504 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 			     struct amdgpu_bo *bo, bool evicted)
bo               2509 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	if (bo->parent && bo->parent->shadow == bo)
bo               2510 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		bo = bo->parent;
bo               2512 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	for (bo_base = bo->vm_bo; bo_base; bo_base = bo_base->next) {
bo               2515 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		if (evicted && bo->tbo.base.resv == vm->root.base.bo->tbo.base.resv) {
bo               2524 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		if (bo->tbo.type == ttm_bo_type_kernel)
bo               2526 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		else if (bo->tbo.base.resv == vm->root.base.bo->tbo.base.resv)
bo               2656 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	return dma_resv_wait_timeout_rcu(vm->root.base.bo->tbo.base.resv,
bo               2741 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	amdgpu_bo_unreserve(vm->root.base.bo);
bo               2761 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	amdgpu_bo_unreserve(vm->root.base.bo);
bo               2764 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	amdgpu_bo_unref(&vm->root.base.bo->shadow);
bo               2765 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	amdgpu_bo_unref(&vm->root.base.bo);
bo               2766 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	vm->root.base.bo = NULL;
bo               2798 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		if (vm->root.entries[i].base.bo)
bo               2830 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	r = amdgpu_bo_reserve(vm->root.base.bo, true);
bo               2857 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		r = amdgpu_vm_clear_bo(adev, vm, vm->root.base.bo);
bo               2892 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	amdgpu_bo_unref(&vm->root.base.bo->shadow);
bo               2908 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	amdgpu_bo_unreserve(vm->root.base.bo);
bo               2981 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	root = amdgpu_bo_ref(vm->root.base.bo);
bo               2990 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	WARN_ON(vm->root.base.bo);
bo                135 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h 	struct amdgpu_bo		*bo;
bo                220 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h 	int (*map_table)(struct amdgpu_bo *bo);
bo                224 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h 		      struct amdgpu_bo *bo, uint64_t pe, uint64_t addr,
bo                357 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h 			      int (*callback)(void *p, struct amdgpu_bo *bo),
bo                371 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h 			     struct amdgpu_bo *bo, bool evicted);
bo                374 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h 				       struct amdgpu_bo *bo);
bo                377 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h 				      struct amdgpu_bo *bo);
bo                412 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h void amdgpu_vm_del_from_lru_notify(struct ttm_buffer_object *bo);
bo                 55 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_cpu.c 	r = amdgpu_bo_sync_wait(p->vm->root.base.bo, owner, true);
bo                 83 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_cpu.c 				struct amdgpu_bo *bo, uint64_t pe,
bo                 90 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_cpu.c 	pe += (unsigned long)amdgpu_bo_kptr(bo);
bo                 63 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c 	struct amdgpu_bo *root = p->vm->root.base.bo;
bo                 96 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c 	struct amdgpu_bo *root = p->vm->root.base.bo;
bo                135 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c 				     struct amdgpu_bo *bo, uint64_t pe,
bo                143 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c 	pe += amdgpu_bo_gpu_offset(bo);
bo                164 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c 				    struct amdgpu_bo *bo, uint64_t pe,
bo                170 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c 	pe += amdgpu_bo_gpu_offset(bo);
bo                196 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c 				 struct amdgpu_bo *bo, uint64_t pe,
bo                229 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c 			if (bo->shadow)
bo                230 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c 				amdgpu_vm_sdma_set_ptes(p, bo->shadow, pe, addr,
bo                232 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c 			amdgpu_vm_sdma_set_ptes(p, bo, pe, addr, count,
bo                239 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c 			(bo->shadow ? 2 : 1);
bo                254 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c 		if (bo->shadow)
bo                255 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c 			amdgpu_vm_sdma_copy_ptes(p, bo->shadow, pe, nptes);
bo                256 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c 		amdgpu_vm_sdma_copy_ptes(p, bo, pe, nptes);
bo                215 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c u64 amdgpu_vram_mgr_bo_visible_size(struct amdgpu_bo *bo)
bo                217 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev);
bo                218 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 	struct ttm_mem_reg *mem = &bo->tbo.mem;
bo                224 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 		return amdgpu_bo_size(bo);
bo               1988 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 				    &ngg_buf->bo,
bo               1995 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ngg_buf->bo_size = amdgpu_bo_size(ngg_buf->bo);
bo               2005 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 		amdgpu_bo_free_kernel(&adev->gfx.ngg.buf[i].bo,
bo                 54 drivers/gpu/drm/amd/amdgpu/gfxhub_v1_0.c 	uint64_t pt_base = amdgpu_gmc_pd_addr(adev->gart.bo);
bo                 51 drivers/gpu/drm/amd/amdgpu/gfxhub_v2_0.c 	uint64_t value = amdgpu_gmc_pd_addr(adev->gart.bo);
bo                345 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c 	job->vm_pd_addr = amdgpu_gmc_pd_addr(adev->gart.bo);
bo                647 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c 	if (adev->gart.bo) {
bo                829 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c 	if (adev->gart.bo == NULL) {
bo                866 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c 		 (unsigned long long)amdgpu_bo_gpu_offset(adev->gart.bo));
bo                492 drivers/gpu/drm/amd/amdgpu/gmc_v6_0.c 	if (adev->gart.bo == NULL) {
bo                500 drivers/gpu/drm/amd/amdgpu/gmc_v6_0.c 	table_addr = amdgpu_bo_gpu_offset(adev->gart.bo);
bo                586 drivers/gpu/drm/amd/amdgpu/gmc_v6_0.c 	if (adev->gart.bo) {
bo                588 drivers/gpu/drm/amd/amdgpu/gmc_v7_0.c 	if (adev->gart.bo == NULL) {
bo                596 drivers/gpu/drm/amd/amdgpu/gmc_v7_0.c 	table_addr = amdgpu_bo_gpu_offset(adev->gart.bo);
bo                692 drivers/gpu/drm/amd/amdgpu/gmc_v7_0.c 	if (adev->gart.bo) {
bo                815 drivers/gpu/drm/amd/amdgpu/gmc_v8_0.c 	if (adev->gart.bo == NULL) {
bo                823 drivers/gpu/drm/amd/amdgpu/gmc_v8_0.c 	table_addr = amdgpu_bo_gpu_offset(adev->gart.bo);
bo                936 drivers/gpu/drm/amd/amdgpu/gmc_v8_0.c 	if (adev->gart.bo) {
bo               1122 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 	if (adev->gart.bo) {
bo               1413 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 	if (adev->gart.bo == NULL) {
bo               1468 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 		 (unsigned long long)amdgpu_bo_gpu_offset(adev->gart.bo));
bo                 74 drivers/gpu/drm/amd/amdgpu/mmhub_v1_0.c 	uint64_t pt_base = amdgpu_gmc_pd_addr(adev->gart.bo);
bo                 36 drivers/gpu/drm/amd/amdgpu/mmhub_v2_0.c 	uint64_t value = amdgpu_gmc_pd_addr(adev->gart.bo);
bo                 80 drivers/gpu/drm/amd/amdgpu/mmhub_v9_4.c 	uint64_t pt_base = amdgpu_gmc_pd_addr(adev->gart.bo);
bo                209 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 				       struct amdgpu_bo *bo,
bo                224 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	addr = amdgpu_bo_gpu_offset(bo);
bo                272 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 					struct amdgpu_bo *bo,
bo                287 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	addr = amdgpu_bo_gpu_offset(bo);
bo                332 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	struct amdgpu_bo *bo = NULL;
bo                337 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 				      &bo, NULL, NULL);
bo                341 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	r = uvd_v6_0_enc_get_create_msg(ring, 1, bo, NULL);
bo                345 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	r = uvd_v6_0_enc_get_destroy_msg(ring, 1, bo, &fence);
bo                357 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	amdgpu_bo_unreserve(bo);
bo                358 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	amdgpu_bo_unref(&bo);
bo                217 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 				       struct amdgpu_bo *bo,
bo                232 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	addr = amdgpu_bo_gpu_offset(bo);
bo                279 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 					struct amdgpu_bo *bo,
bo                294 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	addr = amdgpu_bo_gpu_offset(bo);
bo                339 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	struct amdgpu_bo *bo = NULL;
bo                344 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 				      &bo, NULL, NULL);
bo                348 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	r = uvd_v7_0_enc_get_create_msg(ring, 1, bo, NULL);
bo                352 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	r = uvd_v7_0_enc_get_destroy_msg(ring, 1, bo, &fence);
bo                364 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	amdgpu_bo_unreserve(bo);
bo                365 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	amdgpu_bo_unref(&bo);
bo               4516 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c 	tv.bo = &rbo->tbo;
bo                937 drivers/gpu/drm/amd/powerplay/amdgpu_smu.c 					      &tables[i].bo,
bo                949 drivers/gpu/drm/amd/powerplay/amdgpu_smu.c 		amdgpu_bo_free_kernel(&tables[i].bo,
bo                970 drivers/gpu/drm/amd/powerplay/amdgpu_smu.c 		amdgpu_bo_free_kernel(&tables[i].bo,
bo               1201 drivers/gpu/drm/amd/powerplay/amdgpu_smu.c 					      &memory_pool->bo,
bo               1221 drivers/gpu/drm/amd/powerplay/amdgpu_smu.c 	amdgpu_bo_free_kernel(&memory_pool->bo,
bo                183 drivers/gpu/drm/amd/powerplay/inc/amdgpu_smu.h 	struct amdgpu_bo *bo;
bo                560 drivers/gpu/drm/ast/ast_main.c 	*obj = &gbo->bo.base;
bo                 42 drivers/gpu/drm/bochs/bochs_kms.c 			 state->fb->offsets[0] + gbo->bo.offset);
bo                151 drivers/gpu/drm/cirrus/cirrus_drv.h 	struct ttm_buffer_object bo;
bo                161 drivers/gpu/drm/cirrus/cirrus_drv.h cirrus_bo(struct ttm_buffer_object *bo)
bo                163 drivers/gpu/drm/cirrus/cirrus_drv.h 	return container_of(bo, struct cirrus_bo, bo);
bo                219 drivers/gpu/drm/cirrus/cirrus_drv.h void cirrus_ttm_placement(struct cirrus_bo *bo, int domain);
bo                224 drivers/gpu/drm/cirrus/cirrus_drv.h static inline int cirrus_bo_reserve(struct cirrus_bo *bo, bool no_wait)
bo                228 drivers/gpu/drm/cirrus/cirrus_drv.h 	ret = ttm_bo_reserve(&bo->bo, true, no_wait, NULL);
bo                231 drivers/gpu/drm/cirrus/cirrus_drv.h 			DRM_ERROR("reserve failed %p\n", bo);
bo                237 drivers/gpu/drm/cirrus/cirrus_drv.h static inline void cirrus_bo_unreserve(struct cirrus_bo *bo)
bo                239 drivers/gpu/drm/cirrus/cirrus_drv.h 	ttm_bo_unreserve(&bo->bo);
bo                242 drivers/gpu/drm/cirrus/cirrus_drv.h int cirrus_bo_push_sysram(struct cirrus_bo *bo);
bo                243 drivers/gpu/drm/cirrus/cirrus_drv.h int cirrus_bo_pin(struct cirrus_bo *bo, u32 pl_flag, u64 *gpu_addr);
bo                 29 drivers/gpu/drm/drm_gem_vram_helper.c 	drm_gem_object_release(&gbo->bo.base);
bo                 38 drivers/gpu/drm/drm_gem_vram_helper.c static void ttm_buffer_object_destroy(struct ttm_buffer_object *bo)
bo                 40 drivers/gpu/drm/drm_gem_vram_helper.c 	struct drm_gem_vram_object *gbo = drm_gem_vram_of_bo(bo);
bo                 85 drivers/gpu/drm/drm_gem_vram_helper.c 	if (!gbo->bo.base.funcs)
bo                 86 drivers/gpu/drm/drm_gem_vram_helper.c 		gbo->bo.base.funcs = &drm_gem_vram_object_funcs;
bo                 88 drivers/gpu/drm/drm_gem_vram_helper.c 	ret = drm_gem_object_init(dev, &gbo->bo.base, size);
bo                 94 drivers/gpu/drm/drm_gem_vram_helper.c 	gbo->bo.bdev = bdev;
bo                 97 drivers/gpu/drm/drm_gem_vram_helper.c 	ret = ttm_bo_init(bdev, &gbo->bo, size, ttm_bo_type_device,
bo                106 drivers/gpu/drm/drm_gem_vram_helper.c 	drm_gem_object_release(&gbo->bo.base);
bo                155 drivers/gpu/drm/drm_gem_vram_helper.c 	ttm_bo_put(&gbo->bo);
bo                171 drivers/gpu/drm/drm_gem_vram_helper.c 	return drm_vma_node_offset_addr(&gbo->bo.base.vma_node);
bo                191 drivers/gpu/drm/drm_gem_vram_helper.c 	return gbo->bo.offset;
bo                215 drivers/gpu/drm/drm_gem_vram_helper.c 	ret = ttm_bo_reserve(&gbo->bo, true, false, NULL);
bo                228 drivers/gpu/drm/drm_gem_vram_helper.c 	ret = ttm_bo_validate(&gbo->bo, &gbo->placement, &ctx);
bo                234 drivers/gpu/drm/drm_gem_vram_helper.c 	ttm_bo_unreserve(&gbo->bo);
bo                239 drivers/gpu/drm/drm_gem_vram_helper.c 	ttm_bo_unreserve(&gbo->bo);
bo                257 drivers/gpu/drm/drm_gem_vram_helper.c 	ret = ttm_bo_reserve(&gbo->bo, true, false, NULL);
bo                271 drivers/gpu/drm/drm_gem_vram_helper.c 	ret = ttm_bo_validate(&gbo->bo, &gbo->placement, &ctx);
bo                276 drivers/gpu/drm/drm_gem_vram_helper.c 	ttm_bo_unreserve(&gbo->bo);
bo                281 drivers/gpu/drm/drm_gem_vram_helper.c 	ttm_bo_unreserve(&gbo->bo);
bo                312 drivers/gpu/drm/drm_gem_vram_helper.c 	ret = ttm_bo_kmap(&gbo->bo, 0, gbo->bo.num_pages, kmap);
bo                386 drivers/gpu/drm/drm_gem_vram_helper.c 	ret = drm_gem_handle_create(file, &gbo->bo.base, &handle);
bo                390 drivers/gpu/drm/drm_gem_vram_helper.c 	drm_gem_object_put_unlocked(&gbo->bo.base);
bo                399 drivers/gpu/drm/drm_gem_vram_helper.c 	drm_gem_object_put_unlocked(&gbo->bo.base);
bo                408 drivers/gpu/drm/drm_gem_vram_helper.c static bool drm_is_gem_vram(struct ttm_buffer_object *bo)
bo                410 drivers/gpu/drm/drm_gem_vram_helper.c 	return (bo->destroy == ttm_buffer_object_destroy);
bo                419 drivers/gpu/drm/drm_gem_vram_helper.c void drm_gem_vram_bo_driver_evict_flags(struct ttm_buffer_object *bo,
bo                425 drivers/gpu/drm/drm_gem_vram_helper.c 	if (!drm_is_gem_vram(bo))
bo                428 drivers/gpu/drm/drm_gem_vram_helper.c 	gbo = drm_gem_vram_of_bo(bo);
bo                444 drivers/gpu/drm/drm_gem_vram_helper.c int drm_gem_vram_bo_driver_verify_access(struct ttm_buffer_object *bo,
bo                447 drivers/gpu/drm/drm_gem_vram_helper.c 	struct drm_gem_vram_object *gbo = drm_gem_vram_of_bo(bo);
bo                449 drivers/gpu/drm/drm_gem_vram_helper.c 	return drm_vma_node_verify_access(&gbo->bo.base.vma_node,
bo                 36 drivers/gpu/drm/drm_vram_mm_helper.c static struct ttm_tt *bo_driver_ttm_tt_create(struct ttm_buffer_object *bo,
bo                 48 drivers/gpu/drm/drm_vram_mm_helper.c 	ret = ttm_tt_init(tt, bo, page_flags);
bo                 82 drivers/gpu/drm/drm_vram_mm_helper.c static void bo_driver_evict_flags(struct ttm_buffer_object *bo,
bo                 85 drivers/gpu/drm/drm_vram_mm_helper.c 	struct drm_vram_mm *vmm = drm_vram_mm_of_bdev(bo->bdev);
bo                 88 drivers/gpu/drm/drm_vram_mm_helper.c 		vmm->funcs->evict_flags(bo, placement);
bo                 91 drivers/gpu/drm/drm_vram_mm_helper.c static int bo_driver_verify_access(struct ttm_buffer_object *bo,
bo                 94 drivers/gpu/drm/drm_vram_mm_helper.c 	struct drm_vram_mm *vmm = drm_vram_mm_of_bdev(bo->bdev);
bo                 98 drivers/gpu/drm/drm_vram_mm_helper.c 	return vmm->funcs->verify_access(bo, filp);
bo                 59 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 	struct drm_etnaviv_gem_submit_bo *bo;
bo                 65 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 	for (i = 0, bo = submit_bos; i < nr_bos; i++, bo++) {
bo                 68 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 		if (bo->flags & BO_INVALID_FLAGS) {
bo                 69 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 			DRM_ERROR("invalid flags: %x\n", bo->flags);
bo                 74 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 		submit->bos[i].flags = bo->flags;
bo                 76 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 			if (bo->presumed < ETNAVIV_SOFTPIN_START_ADDRESS) {
bo                 81 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 			submit->bos[i].va = bo->presumed;
bo                 87 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 		obj = idr_find(&file->object_idr, bo->handle);
bo                 90 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 				  bo->handle, i);
bo                181 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 		struct etnaviv_gem_submit_bo *bo = &submit->bos[i];
bo                182 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 		struct dma_resv *robj = bo->obj->base.resv;
bo                184 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 		if (!(bo->flags & ETNA_SUBMIT_BO_WRITE)) {
bo                193 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 		if (bo->flags & ETNA_SUBMIT_BO_WRITE) {
bo                194 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 			ret = dma_resv_get_fences_rcu(robj, &bo->excl,
bo                195 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 								&bo->nr_shared,
bo                196 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 								&bo->shared);
bo                200 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 			bo->excl = dma_resv_get_excl_rcu(robj);
bo                258 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 	struct etnaviv_gem_submit_bo **bo)
bo                266 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 	*bo = &submit->bos[idx];
bo                286 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 		struct etnaviv_gem_submit_bo *bo;
bo                309 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 		ret = submit_bo(submit, r->reloc_idx, &bo);
bo                313 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 		if (r->reloc_offset > bo->obj->base.size - sizeof(*ptr)) {
bo                318 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 		ptr[off] = bo->mapping->iova + r->reloc_offset;
bo                333 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 		struct etnaviv_gem_submit_bo *bo;
bo                336 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 		ret = submit_bo(submit, r->read_idx, &bo);
bo                346 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 		if (r->read_offset >= bo->obj->base.size - sizeof(u32)) {
bo                366 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 		submit->pmrs[i].bo_vma = etnaviv_gem_vmap(&bo->obj->base);
bo                543 drivers/gpu/drm/etnaviv/etnaviv_perfmon.c 	u32 *bo = pmr->bo_vma;
bo                550 drivers/gpu/drm/etnaviv/etnaviv_perfmon.c 	*(bo + pmr->offset) = val;
bo                 39 drivers/gpu/drm/etnaviv/etnaviv_sched.c 		struct etnaviv_gem_submit_bo *bo = &submit->bos[i];
bo                 42 drivers/gpu/drm/etnaviv/etnaviv_sched.c 		if (bo->excl) {
bo                 43 drivers/gpu/drm/etnaviv/etnaviv_sched.c 			fence = bo->excl;
bo                 44 drivers/gpu/drm/etnaviv/etnaviv_sched.c 			bo->excl = NULL;
bo                 52 drivers/gpu/drm/etnaviv/etnaviv_sched.c 		for (j = 0; j < bo->nr_shared; j++) {
bo                 53 drivers/gpu/drm/etnaviv/etnaviv_sched.c 			if (!bo->shared[j])
bo                 56 drivers/gpu/drm/etnaviv/etnaviv_sched.c 			fence = bo->shared[j];
bo                 57 drivers/gpu/drm/etnaviv/etnaviv_sched.c 			bo->shared[j] = NULL;
bo                 64 drivers/gpu/drm/etnaviv/etnaviv_sched.c 		kfree(bo->shared);
bo                 65 drivers/gpu/drm/etnaviv/etnaviv_sched.c 		bo->nr_shared = 0;
bo                 66 drivers/gpu/drm/etnaviv/etnaviv_sched.c 		bo->shared = NULL;
bo                 86 drivers/gpu/drm/gma500/psb_intel_drv.h 	 size_t(*bo_offset) (struct drm_device *dev, void *bo);
bo                128 drivers/gpu/drm/hisilicon/hibmc/hibmc_drm_fbdev.c 	info->fix.smem_start = gbo->bo.mem.bus.offset + gbo->bo.mem.bus.base;
bo                 69 drivers/gpu/drm/hisilicon/hibmc/hibmc_ttm.c 	*obj = &gbo->bo.base;
bo               1312 drivers/gpu/drm/i915/i915_gpu_error.c 	struct drm_i915_error_object **bo;
bo               1321 drivers/gpu/drm/i915/i915_gpu_error.c 	bo = kmalloc_array(max, sizeof(*bo), ATOMIC_MAYFAIL);
bo               1322 drivers/gpu/drm/i915/i915_gpu_error.c 	if (!bo) {
bo               1324 drivers/gpu/drm/i915/i915_gpu_error.c 		max = min_t(long, max, PAGE_SIZE / sizeof(*bo));
bo               1325 drivers/gpu/drm/i915/i915_gpu_error.c 		bo = kmalloc_array(max, sizeof(*bo), ATOMIC_MAYFAIL);
bo               1327 drivers/gpu/drm/i915/i915_gpu_error.c 	if (!bo)
bo               1332 drivers/gpu/drm/i915/i915_gpu_error.c 		capture = capture_vma(capture, c->vma, &bo[count]);
bo               1337 drivers/gpu/drm/i915/i915_gpu_error.c 	ee->user_bo = bo;
bo               1505 drivers/gpu/drm/i915/i915_perf.c 	struct drm_i915_gem_object *bo;
bo               1520 drivers/gpu/drm/i915/i915_perf.c 	bo = i915_gem_object_create_shmem(dev_priv, OA_BUFFER_SIZE);
bo               1521 drivers/gpu/drm/i915/i915_perf.c 	if (IS_ERR(bo)) {
bo               1523 drivers/gpu/drm/i915/i915_perf.c 		ret = PTR_ERR(bo);
bo               1527 drivers/gpu/drm/i915/i915_perf.c 	i915_gem_object_set_cache_coherency(bo, I915_CACHE_LLC);
bo               1530 drivers/gpu/drm/i915/i915_perf.c 	vma = i915_gem_object_ggtt_pin(bo, NULL, 0, SZ_16M, 0);
bo               1538 drivers/gpu/drm/i915/i915_perf.c 		i915_gem_object_pin_map(bo, I915_MAP_WB);
bo               1554 drivers/gpu/drm/i915/i915_perf.c 	i915_gem_object_put(bo);
bo                 24 drivers/gpu/drm/lima/lima_gem.c 	struct lima_bo *bo;
bo                 27 drivers/gpu/drm/lima/lima_gem.c 	bo = lima_bo_create(ldev, size, flags, NULL);
bo                 28 drivers/gpu/drm/lima/lima_gem.c 	if (IS_ERR(bo))
bo                 29 drivers/gpu/drm/lima/lima_gem.c 		return PTR_ERR(bo);
bo                 31 drivers/gpu/drm/lima/lima_gem.c 	err = drm_gem_handle_create(file, &bo->gem, handle);
bo                 34 drivers/gpu/drm/lima/lima_gem.c 	drm_gem_object_put_unlocked(&bo->gem);
bo                 41 drivers/gpu/drm/lima/lima_gem.c 	struct lima_bo *bo = to_lima_bo(obj);
bo                 43 drivers/gpu/drm/lima/lima_gem.c 	if (!list_empty(&bo->va))
bo                 46 drivers/gpu/drm/lima/lima_gem.c 	lima_bo_destroy(bo);
bo                 51 drivers/gpu/drm/lima/lima_gem.c 	struct lima_bo *bo = to_lima_bo(obj);
bo                 55 drivers/gpu/drm/lima/lima_gem.c 	return lima_vm_bo_add(vm, bo, true);
bo                 60 drivers/gpu/drm/lima/lima_gem.c 	struct lima_bo *bo = to_lima_bo(obj);
bo                 64 drivers/gpu/drm/lima/lima_gem.c 	lima_vm_bo_del(vm, bo);
bo                 70 drivers/gpu/drm/lima/lima_gem.c 	struct lima_bo *bo;
bo                 79 drivers/gpu/drm/lima/lima_gem.c 	bo = to_lima_bo(obj);
bo                 81 drivers/gpu/drm/lima/lima_gem.c 	*va = lima_vm_get_va(vm, bo);
bo                 95 drivers/gpu/drm/lima/lima_gem.c 	struct lima_bo *bo = to_lima_bo(obj);
bo                101 drivers/gpu/drm/lima/lima_gem.c 	pfn = __pfn_to_pfn_t(page_to_pfn(bo->pages[pgoff]), PFN_DEV);
bo                133 drivers/gpu/drm/lima/lima_gem.c static int lima_gem_sync_bo(struct lima_sched_task *task, struct lima_bo *bo,
bo                139 drivers/gpu/drm/lima/lima_gem.c 		err = dma_resv_reserve_shared(bo->gem.resv, 1);
bo                148 drivers/gpu/drm/lima/lima_gem.c 	return drm_gem_fence_array_add_implicit(&task->deps, &bo->gem, write);
bo                249 drivers/gpu/drm/lima/lima_gem.c 		struct lima_bo *bo;
bo                257 drivers/gpu/drm/lima/lima_gem.c 		bo = to_lima_bo(obj);
bo                262 drivers/gpu/drm/lima/lima_gem.c 		err = lima_vm_bo_add(vm, bo, false);
bo                268 drivers/gpu/drm/lima/lima_gem.c 		bos[i] = bo;
bo                 19 drivers/gpu/drm/lima/lima_gem_prime.c 	struct lima_bo *bo;
bo                 21 drivers/gpu/drm/lima/lima_gem_prime.c 	bo = lima_bo_create(ldev, attach->dmabuf->size, 0, sgt);
bo                 22 drivers/gpu/drm/lima/lima_gem_prime.c 	if (IS_ERR(bo))
bo                 23 drivers/gpu/drm/lima/lima_gem_prime.c 		return ERR_CAST(bo);
bo                 25 drivers/gpu/drm/lima/lima_gem_prime.c 	return &bo->gem;
bo                 30 drivers/gpu/drm/lima/lima_gem_prime.c 	struct lima_bo *bo = to_lima_bo(obj);
bo                 33 drivers/gpu/drm/lima/lima_gem_prime.c 	return drm_prime_pages_to_sg(bo->pages, npages);
bo                 10 drivers/gpu/drm/lima/lima_object.c void lima_bo_destroy(struct lima_bo *bo)
bo                 12 drivers/gpu/drm/lima/lima_object.c 	if (bo->sgt) {
bo                 13 drivers/gpu/drm/lima/lima_object.c 		kfree(bo->pages);
bo                 14 drivers/gpu/drm/lima/lima_object.c 		drm_prime_gem_destroy(&bo->gem, bo->sgt);
bo                 16 drivers/gpu/drm/lima/lima_object.c 		if (bo->pages_dma_addr) {
bo                 17 drivers/gpu/drm/lima/lima_object.c 			int i, npages = bo->gem.size >> PAGE_SHIFT;
bo                 20 drivers/gpu/drm/lima/lima_object.c 				if (bo->pages_dma_addr[i])
bo                 21 drivers/gpu/drm/lima/lima_object.c 					dma_unmap_page(bo->gem.dev->dev,
bo                 22 drivers/gpu/drm/lima/lima_object.c 						       bo->pages_dma_addr[i],
bo                 27 drivers/gpu/drm/lima/lima_object.c 		if (bo->pages)
bo                 28 drivers/gpu/drm/lima/lima_object.c 			drm_gem_put_pages(&bo->gem, bo->pages, true, true);
bo                 31 drivers/gpu/drm/lima/lima_object.c 	kfree(bo->pages_dma_addr);
bo                 32 drivers/gpu/drm/lima/lima_object.c 	drm_gem_object_release(&bo->gem);
bo                 33 drivers/gpu/drm/lima/lima_object.c 	kfree(bo);
bo                 38 drivers/gpu/drm/lima/lima_object.c 	struct lima_bo *bo;
bo                 43 drivers/gpu/drm/lima/lima_object.c 	bo = kzalloc(sizeof(*bo), GFP_KERNEL);
bo                 44 drivers/gpu/drm/lima/lima_object.c 	if (!bo)
bo                 47 drivers/gpu/drm/lima/lima_object.c 	mutex_init(&bo->lock);
bo                 48 drivers/gpu/drm/lima/lima_object.c 	INIT_LIST_HEAD(&bo->va);
bo                 50 drivers/gpu/drm/lima/lima_object.c 	err = drm_gem_object_init(dev->ddev, &bo->gem, size);
bo                 52 drivers/gpu/drm/lima/lima_object.c 		kfree(bo);
bo                 56 drivers/gpu/drm/lima/lima_object.c 	return bo;
bo                 64 drivers/gpu/drm/lima/lima_object.c 	struct lima_bo *bo, *ret;
bo                 66 drivers/gpu/drm/lima/lima_object.c 	bo = lima_bo_create_struct(dev, size, flags);
bo                 67 drivers/gpu/drm/lima/lima_object.c 	if (IS_ERR(bo))
bo                 68 drivers/gpu/drm/lima/lima_object.c 		return bo;
bo                 70 drivers/gpu/drm/lima/lima_object.c 	npages = bo->gem.size >> PAGE_SHIFT;
bo                 72 drivers/gpu/drm/lima/lima_object.c 	bo->pages_dma_addr = kcalloc(npages, sizeof(dma_addr_t), GFP_KERNEL);
bo                 73 drivers/gpu/drm/lima/lima_object.c 	if (!bo->pages_dma_addr) {
bo                 79 drivers/gpu/drm/lima/lima_object.c 		bo->sgt = sgt;
bo                 81 drivers/gpu/drm/lima/lima_object.c 		bo->pages = kcalloc(npages, sizeof(*bo->pages), GFP_KERNEL);
bo                 82 drivers/gpu/drm/lima/lima_object.c 		if (!bo->pages) {
bo                 88 drivers/gpu/drm/lima/lima_object.c 			sgt, bo->pages, bo->pages_dma_addr, npages);
bo                 94 drivers/gpu/drm/lima/lima_object.c 		mapping_set_gfp_mask(bo->gem.filp->f_mapping, GFP_DMA32);
bo                 95 drivers/gpu/drm/lima/lima_object.c 		bo->pages = drm_gem_get_pages(&bo->gem);
bo                 96 drivers/gpu/drm/lima/lima_object.c 		if (IS_ERR(bo->pages)) {
bo                 97 drivers/gpu/drm/lima/lima_object.c 			ret = ERR_CAST(bo->pages);
bo                 98 drivers/gpu/drm/lima/lima_object.c 			bo->pages = NULL;
bo                103 drivers/gpu/drm/lima/lima_object.c 			dma_addr_t addr = dma_map_page(dev->dev, bo->pages[i], 0,
bo                109 drivers/gpu/drm/lima/lima_object.c 			bo->pages_dma_addr[i] = addr;
bo                114 drivers/gpu/drm/lima/lima_object.c 	return bo;
bo                117 drivers/gpu/drm/lima/lima_object.c 	lima_bo_destroy(bo);
bo                 31 drivers/gpu/drm/lima/lima_object.h void lima_bo_destroy(struct lima_bo *bo);
bo                 32 drivers/gpu/drm/lima/lima_object.h void *lima_bo_vmap(struct lima_bo *bo);
bo                 33 drivers/gpu/drm/lima/lima_object.h void lima_bo_vunmap(struct lima_bo *bo);
bo                 86 drivers/gpu/drm/lima/lima_vm.c lima_vm_bo_find(struct lima_vm *vm, struct lima_bo *bo)
bo                 90 drivers/gpu/drm/lima/lima_vm.c 	list_for_each_entry(bo_va, &bo->va, list) {
bo                100 drivers/gpu/drm/lima/lima_vm.c int lima_vm_bo_add(struct lima_vm *vm, struct lima_bo *bo, bool create)
bo                105 drivers/gpu/drm/lima/lima_vm.c 	mutex_lock(&bo->lock);
bo                107 drivers/gpu/drm/lima/lima_vm.c 	bo_va = lima_vm_bo_find(vm, bo);
bo                110 drivers/gpu/drm/lima/lima_vm.c 		mutex_unlock(&bo->lock);
bo                116 drivers/gpu/drm/lima/lima_vm.c 		mutex_unlock(&bo->lock);
bo                131 drivers/gpu/drm/lima/lima_vm.c 	err = drm_mm_insert_node(&vm->mm, &bo_va->node, bo->gem.size);
bo                135 drivers/gpu/drm/lima/lima_vm.c 	err = lima_vm_map_page_table(vm, bo->pages_dma_addr, bo_va->node.start,
bo                142 drivers/gpu/drm/lima/lima_vm.c 	list_add_tail(&bo_va->list, &bo->va);
bo                144 drivers/gpu/drm/lima/lima_vm.c 	mutex_unlock(&bo->lock);
bo                153 drivers/gpu/drm/lima/lima_vm.c 	mutex_unlock(&bo->lock);
bo                157 drivers/gpu/drm/lima/lima_vm.c void lima_vm_bo_del(struct lima_vm *vm, struct lima_bo *bo)
bo                161 drivers/gpu/drm/lima/lima_vm.c 	mutex_lock(&bo->lock);
bo                163 drivers/gpu/drm/lima/lima_vm.c 	bo_va = lima_vm_bo_find(vm, bo);
bo                165 drivers/gpu/drm/lima/lima_vm.c 		mutex_unlock(&bo->lock);
bo                180 drivers/gpu/drm/lima/lima_vm.c 	mutex_unlock(&bo->lock);
bo                185 drivers/gpu/drm/lima/lima_vm.c u32 lima_vm_get_va(struct lima_vm *vm, struct lima_bo *bo)
bo                190 drivers/gpu/drm/lima/lima_vm.c 	mutex_lock(&bo->lock);
bo                192 drivers/gpu/drm/lima/lima_vm.c 	bo_va = lima_vm_bo_find(vm, bo);
bo                195 drivers/gpu/drm/lima/lima_vm.c 	mutex_unlock(&bo->lock);
bo                 41 drivers/gpu/drm/lima/lima_vm.h int lima_vm_bo_add(struct lima_vm *vm, struct lima_bo *bo, bool create);
bo                 42 drivers/gpu/drm/lima/lima_vm.h void lima_vm_bo_del(struct lima_vm *vm, struct lima_bo *bo);
bo                 44 drivers/gpu/drm/lima/lima_vm.h u32 lima_vm_get_va(struct lima_vm *vm, struct lima_bo *bo);
bo               1137 drivers/gpu/drm/msm/adreno/a5xx_gpu.c 	struct drm_gem_object *bo;
bo               1151 drivers/gpu/drm/msm/adreno/a5xx_gpu.c 		&dumper->bo, &dumper->iova);
bo               1154 drivers/gpu/drm/msm/adreno/a5xx_gpu.c 		msm_gem_object_set_name(dumper->bo, "crashdump");
bo               1251 drivers/gpu/drm/msm/adreno/a5xx_gpu.c 		msm_gem_kernel_put(dumper.bo, gpu->aspace, true);
bo               1259 drivers/gpu/drm/msm/adreno/a5xx_gpu.c 	msm_gem_kernel_put(dumper.bo, gpu->aspace, true);
bo                229 drivers/gpu/drm/msm/adreno/a5xx_preempt.c 	struct drm_gem_object *bo = NULL;
bo                234 drivers/gpu/drm/msm/adreno/a5xx_preempt.c 		MSM_BO_UNCACHED, gpu->aspace, &bo, &iova);
bo                239 drivers/gpu/drm/msm/adreno/a5xx_preempt.c 	msm_gem_object_set_name(bo, "preempt");
bo                241 drivers/gpu/drm/msm/adreno/a5xx_preempt.c 	a5xx_gpu->preempt_bo[ring->id] = bo;
bo                878 drivers/gpu/drm/msm/adreno/a6xx_gmu.c static void a6xx_gmu_memory_free(struct a6xx_gmu *gmu, struct a6xx_gmu_bo *bo)
bo                883 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 	if (IS_ERR_OR_NULL(bo))
bo                886 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 	count = bo->size >> PAGE_SHIFT;
bo                887 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 	iova = bo->iova;
bo                891 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 		__free_pages(bo->pages[i], 0);
bo                894 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 	kfree(bo->pages);
bo                895 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 	kfree(bo);
bo                901 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 	struct a6xx_gmu_bo *bo;
bo                904 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 	bo = kzalloc(sizeof(*bo), GFP_KERNEL);
bo                905 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 	if (!bo)
bo                908 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 	bo->size = PAGE_ALIGN(size);
bo                910 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 	count = bo->size >> PAGE_SHIFT;
bo                912 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 	bo->pages = kcalloc(count, sizeof(struct page *), GFP_KERNEL);
bo                913 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 	if (!bo->pages) {
bo                914 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 		kfree(bo);
bo                919 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 		bo->pages[i] = alloc_page(GFP_KERNEL);
bo                920 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 		if (!bo->pages[i])
bo                924 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 	bo->iova = gmu->uncached_iova_base;
bo                928 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 			bo->iova + (PAGE_SIZE * i),
bo                929 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 			page_to_phys(bo->pages[i]), PAGE_SIZE,
bo                937 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 					bo->iova + (PAGE_SIZE * i),
bo                944 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 	bo->virt = vmap(bo->pages, count, VM_IOREMAP,
bo                946 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 	if (!bo->virt)
bo                952 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 	return bo;
bo                956 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 		if (bo->pages[i])
bo                957 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 			__free_pages(bo->pages[i], 0);
bo                960 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 	kfree(bo->pages);
bo                961 drivers/gpu/drm/msm/adreno/a6xx_gmu.c 	kfree(bo);
bo                 74 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c 	struct drm_gem_object *bo;
bo                117 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c 		&dumper->bo, &dumper->iova);
bo                120 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c 		msm_gem_object_set_name(dumper->bo, "crashdump");
bo                897 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c 		msm_gem_kernel_put(dumper.bo, gpu->aspace, true);
bo                316 drivers/gpu/drm/msm/adreno/adreno_gpu.c 	struct drm_gem_object *bo;
bo                320 drivers/gpu/drm/msm/adreno/adreno_gpu.c 		MSM_BO_UNCACHED | MSM_BO_GPU_READONLY, gpu->aspace, &bo, iova);
bo                327 drivers/gpu/drm/msm/adreno/adreno_gpu.c 	msm_gem_put_vaddr(bo);
bo                329 drivers/gpu/drm/msm/adreno/adreno_gpu.c 	return bo;
bo                316 drivers/gpu/drm/msm/msm_drv.h 		struct drm_gem_object **bo, uint64_t *iova);
bo                319 drivers/gpu/drm/msm/msm_drv.h 		struct drm_gem_object **bo, uint64_t *iova);
bo                320 drivers/gpu/drm/msm/msm_drv.h void msm_gem_kernel_put(struct drm_gem_object *bo,
bo                327 drivers/gpu/drm/msm/msm_drv.h void msm_gem_object_set_name(struct drm_gem_object *bo, const char *fmt, ...);
bo                215 drivers/gpu/drm/msm/msm_fb.c 	struct drm_gem_object *bo;
bo                222 drivers/gpu/drm/msm/msm_fb.c 	bo = msm_gem_new(dev, size, MSM_BO_SCANOUT | MSM_BO_WC | MSM_BO_STOLEN);
bo                223 drivers/gpu/drm/msm/msm_fb.c 	if (IS_ERR(bo)) {
bo                226 drivers/gpu/drm/msm/msm_fb.c 		bo = msm_gem_new(dev, size, MSM_BO_SCANOUT | MSM_BO_WC);
bo                228 drivers/gpu/drm/msm/msm_fb.c 	if (IS_ERR(bo)) {
bo                230 drivers/gpu/drm/msm/msm_fb.c 		return ERR_CAST(bo);
bo                233 drivers/gpu/drm/msm/msm_fb.c 	msm_gem_object_set_name(bo, "stolenfb");
bo                235 drivers/gpu/drm/msm/msm_fb.c 	fb = msm_framebuffer_init(dev, &mode_cmd, &bo);
bo                241 drivers/gpu/drm/msm/msm_fb.c 		drm_gem_object_put_unlocked(bo);
bo                 48 drivers/gpu/drm/msm/msm_fbdev.c 	struct drm_gem_object *bo = msm_framebuffer_bo(fbdev->fb, 0);
bo                 51 drivers/gpu/drm/msm/msm_fbdev.c 	ret = drm_gem_mmap_obj(bo, bo->size, vma);
bo                 57 drivers/gpu/drm/msm/msm_fbdev.c 	return msm_gem_mmap_obj(bo, vma);
bo                 67 drivers/gpu/drm/msm/msm_fbdev.c 	struct drm_gem_object *bo;
bo                 88 drivers/gpu/drm/msm/msm_fbdev.c 	bo = msm_framebuffer_bo(fb, 0);
bo                 97 drivers/gpu/drm/msm/msm_fbdev.c 	ret = msm_gem_get_and_pin_iova(bo, priv->kms->aspace, &paddr);
bo                121 drivers/gpu/drm/msm/msm_fbdev.c 	fbi->screen_base = msm_gem_get_vaddr(bo);
bo                126 drivers/gpu/drm/msm/msm_fbdev.c 	fbi->screen_size = bo->size;
bo                128 drivers/gpu/drm/msm/msm_fbdev.c 	fbi->fix.smem_len = bo->size;
bo                207 drivers/gpu/drm/msm/msm_fbdev.c 		struct drm_gem_object *bo =
bo                209 drivers/gpu/drm/msm/msm_fbdev.c 		msm_gem_put_vaddr(bo);
bo               1158 drivers/gpu/drm/msm/msm_gem.c 		struct drm_gem_object **bo, uint64_t *iova, bool locked)
bo               1180 drivers/gpu/drm/msm/msm_gem.c 	if (bo)
bo               1181 drivers/gpu/drm/msm/msm_gem.c 		*bo = obj;
bo               1196 drivers/gpu/drm/msm/msm_gem.c 		struct drm_gem_object **bo, uint64_t *iova)
bo               1198 drivers/gpu/drm/msm/msm_gem.c 	return _msm_gem_kernel_new(dev, size, flags, aspace, bo, iova, false);
bo               1203 drivers/gpu/drm/msm/msm_gem.c 		struct drm_gem_object **bo, uint64_t *iova)
bo               1205 drivers/gpu/drm/msm/msm_gem.c 	return _msm_gem_kernel_new(dev, size, flags, aspace, bo, iova, true);
bo               1208 drivers/gpu/drm/msm/msm_gem.c void msm_gem_kernel_put(struct drm_gem_object *bo,
bo               1211 drivers/gpu/drm/msm/msm_gem.c 	if (IS_ERR_OR_NULL(bo))
bo               1214 drivers/gpu/drm/msm/msm_gem.c 	msm_gem_put_vaddr(bo);
bo               1215 drivers/gpu/drm/msm/msm_gem.c 	msm_gem_unpin_iova(bo, aspace);
bo               1218 drivers/gpu/drm/msm/msm_gem.c 		drm_gem_object_put(bo);
bo               1220 drivers/gpu/drm/msm/msm_gem.c 		drm_gem_object_put_unlocked(bo);
bo               1223 drivers/gpu/drm/msm/msm_gem.c void msm_gem_object_set_name(struct drm_gem_object *bo, const char *fmt, ...)
bo               1225 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(bo);
bo                 30 drivers/gpu/drm/msm/msm_ringbuffer.c 		MSM_BO_WC, gpu->aspace, &ring->bo, &ring->iova);
bo                 38 drivers/gpu/drm/msm/msm_ringbuffer.c 	msm_gem_object_set_name(ring->bo, "ring%d", id);
bo                 68 drivers/gpu/drm/msm/msm_ringbuffer.c 	msm_gem_kernel_put(ring->bo, ring->gpu->aspace, false);
bo                 39 drivers/gpu/drm/msm/msm_ringbuffer.h 	struct drm_gem_object *bo;
bo                848 drivers/gpu/drm/nouveau/dispnv04/crtc.c 	nv_crtc->fb.offset = fb->nvbo->bo.offset;
bo               1016 drivers/gpu/drm/nouveau/dispnv04/crtc.c 	nv_crtc->cursor.offset = nv_crtc->cursor.nvbo->bo.offset;
bo               1172 drivers/gpu/drm/nouveau/dispnv04/crtc.c 	ret = ttm_bo_reserve(&new_bo->bo, true, false, NULL);
bo               1179 drivers/gpu/drm/nouveau/dispnv04/crtc.c 		ttm_bo_unreserve(&new_bo->bo);
bo               1184 drivers/gpu/drm/nouveau/dispnv04/crtc.c 		ttm_bo_unreserve(&new_bo->bo);
bo               1186 drivers/gpu/drm/nouveau/dispnv04/crtc.c 		ret = ttm_bo_reserve(&old_bo->bo, true, false, NULL);
bo               1194 drivers/gpu/drm/nouveau/dispnv04/crtc.c 		  new_bo->bo.offset };
bo               1226 drivers/gpu/drm/nouveau/dispnv04/crtc.c 	ttm_bo_unreserve(&old_bo->bo);
bo               1234 drivers/gpu/drm/nouveau/dispnv04/crtc.c 	ttm_bo_unreserve(&old_bo->bo);
bo                154 drivers/gpu/drm/nouveau/dispnv04/disp.c 			nv_crtc->cursor.set_offset(nv_crtc, nv_crtc->cursor.nvbo->bo.offset);
bo                153 drivers/gpu/drm/nouveau/dispnv04/overlay.c 	nvif_wr32(dev, NV_PVIDEO_OFFSET_BUFF(flip), nv_fb->nvbo->bo.offset);
bo                175 drivers/gpu/drm/nouveau/dispnv04/overlay.c 			nv_fb->nvbo->bo.offset + fb->offsets[1]);
bo                399 drivers/gpu/drm/nouveau/dispnv04/overlay.c 			  nv_fb->nvbo->bo.offset);
bo                113 drivers/gpu/drm/nouveau/dispnv50/base507c.c base507c_ntfy_wait_begun(struct nouveau_bo *bo, u32 offset,
bo                117 drivers/gpu/drm/nouveau/dispnv50/base507c.c 		u32 data = nouveau_bo_rd32(bo, offset / 4);
bo                149 drivers/gpu/drm/nouveau/dispnv50/base507c.c base507c_ntfy_reset(struct nouveau_bo *bo, u32 offset)
bo                151 drivers/gpu/drm/nouveau/dispnv50/base507c.c 	nouveau_bo_wr32(bo, offset / 4, 0x00000000);
bo                277 drivers/gpu/drm/nouveau/dispnv50/base507c.c 			       disp->sync->bo.offset, &wndw->wndw);
bo                 47 drivers/gpu/drm/nouveau/dispnv50/core507d.c core507d_ntfy_wait_done(struct nouveau_bo *bo, u32 offset,
bo                 51 drivers/gpu/drm/nouveau/dispnv50/core507d.c 		if (nouveau_bo_rd32(bo, offset / 4))
bo                 59 drivers/gpu/drm/nouveau/dispnv50/core507d.c core507d_ntfy_init(struct nouveau_bo *bo, u32 offset)
bo                 61 drivers/gpu/drm/nouveau/dispnv50/core507d.c 	nouveau_bo_wr32(bo, offset / 4, 0x00000000);
bo                102 drivers/gpu/drm/nouveau/dispnv50/core507d.c 			       disp->sync->bo.offset, &core->chan);
bo                 52 drivers/gpu/drm/nouveau/dispnv50/corec37d.c corec37d_ntfy_wait_done(struct nouveau_bo *bo, u32 offset,
bo                 57 drivers/gpu/drm/nouveau/dispnv50/corec37d.c 		data = nouveau_bo_rd32(bo, offset / 4 + 0);
bo                 66 drivers/gpu/drm/nouveau/dispnv50/corec37d.c corec37d_ntfy_init(struct nouveau_bo *bo, u32 offset)
bo                 68 drivers/gpu/drm/nouveau/dispnv50/corec37d.c 	nouveau_bo_wr32(bo, offset / 4 + 0, 0x00000000);
bo                 69 drivers/gpu/drm/nouveau/dispnv50/corec37d.c 	nouveau_bo_wr32(bo, offset / 4 + 1, 0x00000000);
bo                 70 drivers/gpu/drm/nouveau/dispnv50/corec37d.c 	nouveau_bo_wr32(bo, offset / 4 + 2, 0x00000000);
bo                 71 drivers/gpu/drm/nouveau/dispnv50/corec37d.c 	nouveau_bo_wr32(bo, offset / 4 + 3, 0x00000000);
bo                189 drivers/gpu/drm/nouveau/dispnv50/ovly507e.c 			       disp->sync->bo.offset, &wndw->wndw);
bo                 53 drivers/gpu/drm/nouveau/dispnv50/ovly827e.c ovly827e_ntfy_wait_begun(struct nouveau_bo *bo, u32 offset,
bo                 57 drivers/gpu/drm/nouveau/dispnv50/ovly827e.c 		u32 data = nouveau_bo_rd32(bo, offset / 4 + 3);
bo                 66 drivers/gpu/drm/nouveau/dispnv50/ovly827e.c ovly827e_ntfy_reset(struct nouveau_bo *bo, u32 offset)
bo                 68 drivers/gpu/drm/nouveau/dispnv50/ovly827e.c 	nouveau_bo_wr32(bo, offset / 4 + 0, 0x00000000);
bo                 69 drivers/gpu/drm/nouveau/dispnv50/ovly827e.c 	nouveau_bo_wr32(bo, offset / 4 + 1, 0x00000000);
bo                 70 drivers/gpu/drm/nouveau/dispnv50/ovly827e.c 	nouveau_bo_wr32(bo, offset / 4 + 2, 0x00000000);
bo                 71 drivers/gpu/drm/nouveau/dispnv50/ovly827e.c 	nouveau_bo_wr32(bo, offset / 4 + 3, 0x80000000);
bo                506 drivers/gpu/drm/nouveau/dispnv50/wndw.c 	asyw->state.fence = dma_resv_get_excl_rcu(fb->nvbo->bo.base.resv);
bo                507 drivers/gpu/drm/nouveau/dispnv50/wndw.c 	asyw->image.offset[0] = fb->nvbo->bo.offset;
bo                296 drivers/gpu/drm/nouveau/dispnv50/wndwc37e.c 			       disp->sync->bo.offset, &wndw->wndw);
bo                142 drivers/gpu/drm/nouveau/nouveau_abi16.c 		drm_gem_object_put_unlocked(&chan->ntfy->bo.base);
bo                314 drivers/gpu/drm/nouveau/nouveau_abi16.c 	if (chan->chan->push.buffer->bo.mem.mem_type == TTM_PL_VRAM)
bo                342 drivers/gpu/drm/nouveau/nouveau_abi16.c 	ret = drm_gem_handle_create(file_priv, &chan->ntfy->bo.base,
bo                561 drivers/gpu/drm/nouveau/nouveau_abi16.c 		args.start += drm->agp.base + chan->ntfy->bo.offset;
bo                562 drivers/gpu/drm/nouveau/nouveau_abi16.c 		args.limit += drm->agp.base + chan->ntfy->bo.offset;
bo                566 drivers/gpu/drm/nouveau/nouveau_abi16.c 		args.start += chan->ntfy->bo.offset;
bo                567 drivers/gpu/drm/nouveau/nouveau_abi16.c 		args.limit += chan->ntfy->bo.offset;
bo                133 drivers/gpu/drm/nouveau/nouveau_bo.c nouveau_bo_del_ttm(struct ttm_buffer_object *bo)
bo                135 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_drm *drm = nouveau_bdev(bo->bdev);
bo                137 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_bo *nvbo = nouveau_bo(bo);
bo                146 drivers/gpu/drm/nouveau/nouveau_bo.c 	if (bo->base.dev)
bo                147 drivers/gpu/drm/nouveau/nouveau_bo.c 		drm_gem_object_release(&bo->base);
bo                164 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev);
bo                215 drivers/gpu/drm/nouveau/nouveau_bo.c 	nvbo->bo.bdev = &drm->ttm.bdev;
bo                303 drivers/gpu/drm/nouveau/nouveau_bo.c 	acc_size = ttm_bo_dma_acc_size(nvbo->bo.bdev, size, sizeof(*nvbo));
bo                305 drivers/gpu/drm/nouveau/nouveau_bo.c 	nvbo->bo.mem.num_pages = size >> PAGE_SHIFT;
bo                308 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = ttm_bo_init(nvbo->bo.bdev, &nvbo->bo, size, type,
bo                357 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev);
bo                363 drivers/gpu/drm/nouveau/nouveau_bo.c 	    nvbo->bo.mem.num_pages < vram_pages / 4) {
bo                410 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev);
bo                411 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct ttm_buffer_object *bo = &nvbo->bo;
bo                415 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = ttm_bo_reserve(bo, false, false, NULL);
bo                429 drivers/gpu/drm/nouveau/nouveau_bo.c 		if (!(memtype & (1 << bo->mem.mem_type)) || evict) {
bo                431 drivers/gpu/drm/nouveau/nouveau_bo.c 				      "0x%08x vs 0x%08x\n", bo,
bo                432 drivers/gpu/drm/nouveau/nouveau_bo.c 				 1 << bo->mem.mem_type, memtype);
bo                459 drivers/gpu/drm/nouveau/nouveau_bo.c 	switch (bo->mem.mem_type) {
bo                461 drivers/gpu/drm/nouveau/nouveau_bo.c 		drm->gem.vram_available -= bo->mem.size;
bo                464 drivers/gpu/drm/nouveau/nouveau_bo.c 		drm->gem.gart_available -= bo->mem.size;
bo                473 drivers/gpu/drm/nouveau/nouveau_bo.c 	ttm_bo_unreserve(bo);
bo                480 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev);
bo                481 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct ttm_buffer_object *bo = &nvbo->bo;
bo                484 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = ttm_bo_reserve(bo, false, false, NULL);
bo                493 drivers/gpu/drm/nouveau/nouveau_bo.c 	nouveau_bo_placement_set(nvbo, bo->mem.placement, 0);
bo                497 drivers/gpu/drm/nouveau/nouveau_bo.c 		switch (bo->mem.mem_type) {
bo                499 drivers/gpu/drm/nouveau/nouveau_bo.c 			drm->gem.vram_available += bo->mem.size;
bo                502 drivers/gpu/drm/nouveau/nouveau_bo.c 			drm->gem.gart_available += bo->mem.size;
bo                510 drivers/gpu/drm/nouveau/nouveau_bo.c 	ttm_bo_unreserve(bo);
bo                519 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = ttm_bo_reserve(&nvbo->bo, false, false, NULL);
bo                523 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = ttm_bo_kmap(&nvbo->bo, 0, nvbo->bo.mem.num_pages, &nvbo->kmap);
bo                525 drivers/gpu/drm/nouveau/nouveau_bo.c 	ttm_bo_unreserve(&nvbo->bo);
bo                541 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev);
bo                542 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct ttm_dma_tt *ttm_dma = (struct ttm_dma_tt *)nvbo->bo.ttm;
bo                561 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev);
bo                562 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct ttm_dma_tt *ttm_dma = (struct ttm_dma_tt *)nvbo->bo.ttm;
bo                584 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = ttm_bo_validate(&nvbo->bo, &nvbo->placement, &ctx);
bo                636 drivers/gpu/drm/nouveau/nouveau_bo.c nouveau_ttm_tt_create(struct ttm_buffer_object *bo, uint32_t page_flags)
bo                639 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_drm *drm = nouveau_bdev(bo->bdev);
bo                642 drivers/gpu/drm/nouveau/nouveau_bo.c 		return ttm_agp_tt_create(bo, drm->agp.bridge, page_flags);
bo                646 drivers/gpu/drm/nouveau/nouveau_bo.c 	return nouveau_sgdma_create_ttm(bo, page_flags);
bo                720 drivers/gpu/drm/nouveau/nouveau_bo.c nouveau_bo_evict_flags(struct ttm_buffer_object *bo, struct ttm_placement *pl)
bo                722 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_bo *nvbo = nouveau_bo(bo);
bo                724 drivers/gpu/drm/nouveau/nouveau_bo.c 	switch (bo->mem.mem_type) {
bo                751 drivers/gpu/drm/nouveau/nouveau_bo.c nve0_bo_move_copy(struct nouveau_channel *chan, struct ttm_buffer_object *bo,
bo                783 drivers/gpu/drm/nouveau/nouveau_bo.c nvc0_bo_move_copy(struct nouveau_channel *chan, struct ttm_buffer_object *bo,
bo                821 drivers/gpu/drm/nouveau/nouveau_bo.c nvc0_bo_move_m2mf(struct nouveau_channel *chan, struct ttm_buffer_object *bo,
bo                860 drivers/gpu/drm/nouveau/nouveau_bo.c nva3_bo_move_copy(struct nouveau_channel *chan, struct ttm_buffer_object *bo,
bo                898 drivers/gpu/drm/nouveau/nouveau_bo.c nv98_bo_move_exec(struct nouveau_channel *chan, struct ttm_buffer_object *bo,
bo                916 drivers/gpu/drm/nouveau/nouveau_bo.c nv84_bo_move_exec(struct nouveau_channel *chan, struct ttm_buffer_object *bo,
bo                950 drivers/gpu/drm/nouveau/nouveau_bo.c nv50_bo_move_m2mf(struct nouveau_channel *chan, struct ttm_buffer_object *bo,
bo               1037 drivers/gpu/drm/nouveau/nouveau_bo.c nouveau_bo_mem_ctxdma(struct ttm_buffer_object *bo,
bo               1046 drivers/gpu/drm/nouveau/nouveau_bo.c nv04_bo_move_m2mf(struct nouveau_channel *chan, struct ttm_buffer_object *bo,
bo               1059 drivers/gpu/drm/nouveau/nouveau_bo.c 	OUT_RING  (chan, nouveau_bo_mem_ctxdma(bo, chan, old_reg));
bo               1060 drivers/gpu/drm/nouveau/nouveau_bo.c 	OUT_RING  (chan, nouveau_bo_mem_ctxdma(bo, chan, new_reg));
bo               1092 drivers/gpu/drm/nouveau/nouveau_bo.c nouveau_bo_move_prep(struct nouveau_drm *drm, struct ttm_buffer_object *bo,
bo               1095 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_mem *old_mem = nouveau_mem(&bo->mem);
bo               1124 drivers/gpu/drm/nouveau/nouveau_bo.c nouveau_bo_move_m2mf(struct ttm_buffer_object *bo, int evict, bool intr,
bo               1127 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_drm *drm = nouveau_bdev(bo->bdev);
bo               1138 drivers/gpu/drm/nouveau/nouveau_bo.c 		ret = nouveau_bo_move_prep(drm, bo, new_reg);
bo               1144 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = nouveau_fence_sync(nouveau_bo(bo), chan, true, intr);
bo               1146 drivers/gpu/drm/nouveau/nouveau_bo.c 		ret = drm->ttm.move(chan, bo, &bo->mem, new_reg);
bo               1150 drivers/gpu/drm/nouveau/nouveau_bo.c 				ret = ttm_bo_move_accel_cleanup(bo,
bo               1231 drivers/gpu/drm/nouveau/nouveau_bo.c nouveau_bo_move_flipd(struct ttm_buffer_object *bo, bool evict, bool intr,
bo               1249 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = ttm_bo_mem_space(bo, &placement, &tmp_reg, &ctx);
bo               1253 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = ttm_tt_bind(bo->ttm, &tmp_reg, &ctx);
bo               1257 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = nouveau_bo_move_m2mf(bo, true, intr, no_wait_gpu, &tmp_reg);
bo               1261 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = ttm_bo_move_ttm(bo, &ctx, new_reg);
bo               1263 drivers/gpu/drm/nouveau/nouveau_bo.c 	ttm_bo_mem_put(bo, &tmp_reg);
bo               1268 drivers/gpu/drm/nouveau/nouveau_bo.c nouveau_bo_move_flips(struct ttm_buffer_object *bo, bool evict, bool intr,
bo               1286 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = ttm_bo_mem_space(bo, &placement, &tmp_reg, &ctx);
bo               1290 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = ttm_bo_move_ttm(bo, &ctx, &tmp_reg);
bo               1294 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = nouveau_bo_move_m2mf(bo, true, intr, no_wait_gpu, new_reg);
bo               1299 drivers/gpu/drm/nouveau/nouveau_bo.c 	ttm_bo_mem_put(bo, &tmp_reg);
bo               1304 drivers/gpu/drm/nouveau/nouveau_bo.c nouveau_bo_move_ntfy(struct ttm_buffer_object *bo, bool evict,
bo               1308 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_bo *nvbo = nouveau_bo(bo);
bo               1312 drivers/gpu/drm/nouveau/nouveau_bo.c 	if (bo->destroy != nouveau_bo_del_ttm)
bo               1322 drivers/gpu/drm/nouveau/nouveau_bo.c 			WARN_ON(ttm_bo_wait(bo, false, false));
bo               1329 drivers/gpu/drm/nouveau/nouveau_bo.c nouveau_bo_vm_bind(struct ttm_buffer_object *bo, struct ttm_mem_reg *new_reg,
bo               1332 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_drm *drm = nouveau_bdev(bo->bdev);
bo               1334 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_bo *nvbo = nouveau_bo(bo);
bo               1350 drivers/gpu/drm/nouveau/nouveau_bo.c nouveau_bo_vm_cleanup(struct ttm_buffer_object *bo,
bo               1354 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_drm *drm = nouveau_bdev(bo->bdev);
bo               1356 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct dma_fence *fence = dma_resv_get_excl(bo->base.resv);
bo               1363 drivers/gpu/drm/nouveau/nouveau_bo.c nouveau_bo_move(struct ttm_buffer_object *bo, bool evict,
bo               1367 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_drm *drm = nouveau_bdev(bo->bdev);
bo               1368 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_bo *nvbo = nouveau_bo(bo);
bo               1369 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct ttm_mem_reg *old_reg = &bo->mem;
bo               1373 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = ttm_bo_wait(bo, ctx->interruptible, ctx->no_wait_gpu);
bo               1381 drivers/gpu/drm/nouveau/nouveau_bo.c 		ret = nouveau_bo_vm_bind(bo, new_reg, &new_tile);
bo               1387 drivers/gpu/drm/nouveau/nouveau_bo.c 	if (old_reg->mem_type == TTM_PL_SYSTEM && !bo->ttm) {
bo               1388 drivers/gpu/drm/nouveau/nouveau_bo.c 		BUG_ON(bo->mem.mm_node != NULL);
bo               1389 drivers/gpu/drm/nouveau/nouveau_bo.c 		bo->mem = *new_reg;
bo               1397 drivers/gpu/drm/nouveau/nouveau_bo.c 			ret = nouveau_bo_move_flipd(bo, evict,
bo               1401 drivers/gpu/drm/nouveau/nouveau_bo.c 			ret = nouveau_bo_move_flips(bo, evict,
bo               1405 drivers/gpu/drm/nouveau/nouveau_bo.c 			ret = nouveau_bo_move_m2mf(bo, evict,
bo               1413 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = ttm_bo_wait(bo, ctx->interruptible, ctx->no_wait_gpu);
bo               1415 drivers/gpu/drm/nouveau/nouveau_bo.c 		ret = ttm_bo_move_memcpy(bo, ctx, new_reg);
bo               1420 drivers/gpu/drm/nouveau/nouveau_bo.c 			nouveau_bo_vm_cleanup(bo, NULL, &new_tile);
bo               1422 drivers/gpu/drm/nouveau/nouveau_bo.c 			nouveau_bo_vm_cleanup(bo, new_tile, &nvbo->tile);
bo               1429 drivers/gpu/drm/nouveau/nouveau_bo.c nouveau_bo_verify_access(struct ttm_buffer_object *bo, struct file *filp)
bo               1431 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_bo *nvbo = nouveau_bo(bo);
bo               1433 drivers/gpu/drm/nouveau/nouveau_bo.c 	return drm_vma_node_verify_access(&nvbo->bo.base.vma_node,
bo               1538 drivers/gpu/drm/nouveau/nouveau_bo.c nouveau_ttm_fault_reserve_notify(struct ttm_buffer_object *bo)
bo               1540 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_drm *drm = nouveau_bdev(bo->bdev);
bo               1541 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_bo *nvbo = nouveau_bo(bo);
bo               1549 drivers/gpu/drm/nouveau/nouveau_bo.c 	if (bo->mem.mem_type != TTM_PL_VRAM) {
bo               1554 drivers/gpu/drm/nouveau/nouveau_bo.c 		if (bo->mem.mem_type == TTM_PL_SYSTEM) {
bo               1566 drivers/gpu/drm/nouveau/nouveau_bo.c 	    bo->mem.start + bo->mem.num_pages < mappable)
bo               1687 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct dma_resv *resv = nvbo->bo.base.resv;
bo                 12 drivers/gpu/drm/nouveau/nouveau_bo.h 	struct ttm_buffer_object bo;
bo                 45 drivers/gpu/drm/nouveau/nouveau_bo.h nouveau_bo(struct ttm_buffer_object *bo)
bo                 47 drivers/gpu/drm/nouveau/nouveau_bo.h 	return container_of(bo, struct nouveau_bo, bo);
bo                 60 drivers/gpu/drm/nouveau/nouveau_bo.h 		ttm_bo_get(&ref->bo);
bo                 61 drivers/gpu/drm/nouveau/nouveau_bo.h 		*pnvbo = nouveau_bo(&ref->bo);
bo                 66 drivers/gpu/drm/nouveau/nouveau_bo.h 		ttm_bo_put(&prev->bo);
bo                163 drivers/gpu/drm/nouveau/nouveau_chan.c 	chan->push.addr = chan->push.buffer->bo.offset;
bo                183 drivers/gpu/drm/nouveau/nouveau_chan.c 	if (chan->push.buffer->bo.mem.mem_type == TTM_PL_VRAM) {
bo                206 drivers/gpu/drm/nouveau/nouveau_display.c 		drm_gem_object_put_unlocked(&fb->nvbo->bo.base);
bo                219 drivers/gpu/drm/nouveau/nouveau_display.c 	return drm_gem_handle_create(file_priv, &fb->nvbo->bo.base, handle);
bo                647 drivers/gpu/drm/nouveau/nouveau_display.c 	struct nouveau_bo *bo;
bo                661 drivers/gpu/drm/nouveau/nouveau_display.c 	ret = nouveau_gem_new(cli, args->size, 0, domain, 0, 0, &bo);
bo                665 drivers/gpu/drm/nouveau/nouveau_display.c 	ret = drm_gem_handle_create(file_priv, &bo->bo.base, &args->handle);
bo                666 drivers/gpu/drm/nouveau/nouveau_display.c 	drm_gem_object_put_unlocked(&bo->bo.base);
bo                679 drivers/gpu/drm/nouveau/nouveau_display.c 		struct nouveau_bo *bo = nouveau_gem_object(gem);
bo                680 drivers/gpu/drm/nouveau/nouveau_display.c 		*poffset = drm_vma_node_offset_addr(&bo->bo.base.vma_node);
bo                 59 drivers/gpu/drm/nouveau/nouveau_dmem.c 	struct nouveau_bo *bo;
bo                 92 drivers/gpu/drm/nouveau/nouveau_dmem.c 	return (idx << PAGE_SHIFT) + chunk->bo->bo.offset;
bo                232 drivers/gpu/drm/nouveau/nouveau_dmem.c 			     &chunk->bo);
bo                236 drivers/gpu/drm/nouveau/nouveau_dmem.c 	ret = nouveau_bo_pin(chunk->bo, TTM_PL_FLAG_VRAM, false);
bo                238 drivers/gpu/drm/nouveau/nouveau_dmem.c 		nouveau_bo_ref(NULL, &chunk->bo);
bo                247 drivers/gpu/drm/nouveau/nouveau_dmem.c 	if (chunk->bo)
bo                270 drivers/gpu/drm/nouveau/nouveau_dmem.c 	if (chunk->bo)
bo                358 drivers/gpu/drm/nouveau/nouveau_dmem.c 		ret = nouveau_bo_pin(chunk->bo, TTM_PL_FLAG_VRAM, false);
bo                363 drivers/gpu/drm/nouveau/nouveau_dmem.c 		ret = nouveau_bo_pin(chunk->bo, TTM_PL_FLAG_VRAM, false);
bo                380 drivers/gpu/drm/nouveau/nouveau_dmem.c 		nouveau_bo_unpin(chunk->bo);
bo                383 drivers/gpu/drm/nouveau/nouveau_dmem.c 		nouveau_bo_unpin(chunk->bo);
bo                402 drivers/gpu/drm/nouveau/nouveau_dmem.c 		if (chunk->bo) {
bo                403 drivers/gpu/drm/nouveau/nouveau_dmem.c 			nouveau_bo_unpin(chunk->bo);
bo                404 drivers/gpu/drm/nouveau/nouveau_dmem.c 			nouveau_bo_ref(NULL, &chunk->bo);
bo                379 drivers/gpu/drm/nouveau/nouveau_fbcon.c 	info->fix.smem_start = fb->nvbo->bo.mem.bus.base +
bo                380 drivers/gpu/drm/nouveau/nouveau_fbcon.c 			       fb->nvbo->bo.mem.bus.offset;
bo                381 drivers/gpu/drm/nouveau/nouveau_fbcon.c 	info->fix.smem_len = fb->nvbo->bo.mem.num_pages << PAGE_SHIFT;
bo                384 drivers/gpu/drm/nouveau/nouveau_fbcon.c 	info->screen_size = fb->nvbo->bo.mem.num_pages << PAGE_SHIFT;
bo                396 drivers/gpu/drm/nouveau/nouveau_fbcon.c 		fb->base.width, fb->base.height, fb->nvbo->bo.offset, nvbo);
bo                337 drivers/gpu/drm/nouveau/nouveau_fence.c 	struct dma_resv *resv = nvbo->bo.base.resv;
bo                 92 drivers/gpu/drm/nouveau/nouveau_fence.h 	struct nouveau_bo *bo;
bo                 43 drivers/gpu/drm/nouveau/nouveau_gem.c 	struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev);
bo                 52 drivers/gpu/drm/nouveau/nouveau_gem.c 		drm_prime_gem_destroy(gem, nvbo->bo.sg);
bo                 54 drivers/gpu/drm/nouveau/nouveau_gem.c 	ttm_bo_put(&nvbo->bo);
bo                 65 drivers/gpu/drm/nouveau/nouveau_gem.c 	struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev);
bo                 74 drivers/gpu/drm/nouveau/nouveau_gem.c 	ret = ttm_bo_reserve(&nvbo->bo, false, false, NULL);
bo                 86 drivers/gpu/drm/nouveau/nouveau_gem.c 	ttm_bo_unreserve(&nvbo->bo);
bo                140 drivers/gpu/drm/nouveau/nouveau_gem.c 	struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev);
bo                149 drivers/gpu/drm/nouveau/nouveau_gem.c 	ret = ttm_bo_reserve(&nvbo->bo, false, false, NULL);
bo                164 drivers/gpu/drm/nouveau/nouveau_gem.c 	ttm_bo_unreserve(&nvbo->bo);
bo                194 drivers/gpu/drm/nouveau/nouveau_gem.c 	ret = drm_gem_object_init(drm->dev, &nvbo->bo.base, size);
bo                215 drivers/gpu/drm/nouveau/nouveau_gem.c 	nvbo->bo.persistent_swap_storage = nvbo->bo.base.filp;
bo                231 drivers/gpu/drm/nouveau/nouveau_gem.c 	else if (nvbo->bo.mem.mem_type == TTM_PL_TT)
bo                235 drivers/gpu/drm/nouveau/nouveau_gem.c 	rep->offset = nvbo->bo.offset;
bo                244 drivers/gpu/drm/nouveau/nouveau_gem.c 	rep->size = nvbo->bo.mem.num_pages << PAGE_SHIFT;
bo                245 drivers/gpu/drm/nouveau/nouveau_gem.c 	rep->map_handle = drm_vma_node_offset_addr(&nvbo->bo.base.vma_node);
bo                273 drivers/gpu/drm/nouveau/nouveau_gem.c 	ret = drm_gem_handle_create(file_priv, &nvbo->bo.base,
bo                276 drivers/gpu/drm/nouveau/nouveau_gem.c 		ret = nouveau_gem_info(file_priv, &nvbo->bo.base, &req->info);
bo                282 drivers/gpu/drm/nouveau/nouveau_gem.c 	drm_gem_object_put_unlocked(&nvbo->bo.base);
bo                291 drivers/gpu/drm/nouveau/nouveau_gem.c 	struct ttm_buffer_object *bo = &nvbo->bo;
bo                306 drivers/gpu/drm/nouveau/nouveau_gem.c 	    bo->mem.mem_type == TTM_PL_VRAM)
bo                310 drivers/gpu/drm/nouveau/nouveau_gem.c 		 bo->mem.mem_type == TTM_PL_TT)
bo                360 drivers/gpu/drm/nouveau/nouveau_gem.c 		ttm_bo_unreserve(&nvbo->bo);
bo                361 drivers/gpu/drm/nouveau/nouveau_gem.c 		drm_gem_object_put_unlocked(&nvbo->bo.base);
bo                420 drivers/gpu/drm/nouveau/nouveau_gem.c 		ret = ttm_bo_reserve(&nvbo->bo, true, false, &op->ticket);
bo                427 drivers/gpu/drm/nouveau/nouveau_gem.c 				ret = ttm_bo_reserve_slowpath(&nvbo->bo, true,
bo                499 drivers/gpu/drm/nouveau/nouveau_gem.c 		ret = nouveau_gem_set_domain(&nvbo->bo.base, b->read_domains,
bo                522 drivers/gpu/drm/nouveau/nouveau_gem.c 			if (nvbo->bo.offset == b->presumed.offset &&
bo                523 drivers/gpu/drm/nouveau/nouveau_gem.c 			    ((nvbo->bo.mem.mem_type == TTM_PL_VRAM &&
bo                525 drivers/gpu/drm/nouveau/nouveau_gem.c 			     (nvbo->bo.mem.mem_type == TTM_PL_TT &&
bo                529 drivers/gpu/drm/nouveau/nouveau_gem.c 			if (nvbo->bo.mem.mem_type == TTM_PL_TT)
bo                533 drivers/gpu/drm/nouveau/nouveau_gem.c 			b->presumed.offset = nvbo->bo.offset;
bo                608 drivers/gpu/drm/nouveau/nouveau_gem.c 				struct drm_nouveau_gem_pushbuf_bo *bo)
bo                630 drivers/gpu/drm/nouveau/nouveau_gem.c 		b = &bo[r->bo_index];
bo                639 drivers/gpu/drm/nouveau/nouveau_gem.c 		nvbo = (void *)(unsigned long)bo[r->reloc_bo_index].user_priv;
bo                642 drivers/gpu/drm/nouveau/nouveau_gem.c 			     nvbo->bo.mem.num_pages << PAGE_SHIFT)) {
bo                649 drivers/gpu/drm/nouveau/nouveau_gem.c 			ret = ttm_bo_kmap(&nvbo->bo, 0, nvbo->bo.mem.num_pages,
bo                673 drivers/gpu/drm/nouveau/nouveau_gem.c 		ret = ttm_bo_wait(&nvbo->bo, false, false);
bo                696 drivers/gpu/drm/nouveau/nouveau_gem.c 	struct drm_nouveau_gem_pushbuf_bo *bo;
bo                742 drivers/gpu/drm/nouveau/nouveau_gem.c 	bo = u_memcpya(req->buffers, req->nr_buffers, sizeof(*bo));
bo                743 drivers/gpu/drm/nouveau/nouveau_gem.c 	if (IS_ERR(bo)) {
bo                745 drivers/gpu/drm/nouveau/nouveau_gem.c 		return nouveau_abi16_put(abi16, PTR_ERR(bo));
bo                758 drivers/gpu/drm/nouveau/nouveau_gem.c 	ret = nouveau_gem_pushbuf_validate(chan, file_priv, bo, req->buffers,
bo                768 drivers/gpu/drm/nouveau/nouveau_gem.c 		ret = nouveau_gem_pushbuf_reloc_apply(cli, req, bo);
bo                784 drivers/gpu/drm/nouveau/nouveau_gem.c 				bo[push[i].bo_index].user_priv;
bo                799 drivers/gpu/drm/nouveau/nouveau_gem.c 				bo[push[i].bo_index].user_priv;
bo                801 drivers/gpu/drm/nouveau/nouveau_gem.c 			OUT_RING(chan, (nvbo->bo.offset + push[i].offset) | 2);
bo                813 drivers/gpu/drm/nouveau/nouveau_gem.c 				bo[push[i].bo_index].user_priv;
bo                820 drivers/gpu/drm/nouveau/nouveau_gem.c 					ret = ttm_bo_kmap(&nvbo->bo, 0,
bo                821 drivers/gpu/drm/nouveau/nouveau_gem.c 							  nvbo->bo.mem.
bo                836 drivers/gpu/drm/nouveau/nouveau_gem.c 				      (nvbo->bo.offset + push[i].offset));
bo                851 drivers/gpu/drm/nouveau/nouveau_gem.c 	validate_fini(&op, chan, fence, bo);
bo                855 drivers/gpu/drm/nouveau/nouveau_gem.c 	u_free(bo);
bo                892 drivers/gpu/drm/nouveau/nouveau_gem.c 	lret = dma_resv_wait_timeout_rcu(nvbo->bo.base.resv, write, true,
bo                 11 drivers/gpu/drm/nouveau/nouveau_gem.h 	return gem ? container_of(gem, struct nouveau_bo, bo.base) : NULL;
bo                 33 drivers/gpu/drm/nouveau/nouveau_prime.c 	int npages = nvbo->bo.num_pages;
bo                 35 drivers/gpu/drm/nouveau/nouveau_prime.c 	return drm_prime_pages_to_sg(nvbo->bo.ttm->pages, npages);
bo                 43 drivers/gpu/drm/nouveau/nouveau_prime.c 	ret = ttm_bo_kmap(&nvbo->bo, 0, nvbo->bo.num_pages,
bo                 84 drivers/gpu/drm/nouveau/nouveau_prime.c 	ret = drm_gem_object_init(dev, &nvbo->bo.base, size);
bo                 98 drivers/gpu/drm/nouveau/nouveau_prime.c 	obj = &nvbo->bo.base;
bo                 85 drivers/gpu/drm/nouveau/nouveau_sgdma.c nouveau_sgdma_create_ttm(struct ttm_buffer_object *bo, uint32_t page_flags)
bo                 87 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	struct nouveau_drm *drm = nouveau_bdev(bo->bdev);
bo                 99 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	if (ttm_dma_tt_init(&nvbe->ttm, bo, page_flags))
bo                 60 drivers/gpu/drm/nouveau/nouveau_ttm.c 			 struct ttm_buffer_object *bo,
bo                 64 drivers/gpu/drm/nouveau/nouveau_ttm.c 	struct nouveau_bo *nvbo = nouveau_bo(bo);
bo                 65 drivers/gpu/drm/nouveau/nouveau_ttm.c 	struct nouveau_drm *drm = nouveau_bdev(bo->bdev);
bo                 98 drivers/gpu/drm/nouveau/nouveau_ttm.c 			 struct ttm_buffer_object *bo,
bo                102 drivers/gpu/drm/nouveau/nouveau_ttm.c 	struct nouveau_bo *nvbo = nouveau_bo(bo);
bo                103 drivers/gpu/drm/nouveau/nouveau_ttm.c 	struct nouveau_drm *drm = nouveau_bdev(bo->bdev);
bo                124 drivers/gpu/drm/nouveau/nouveau_ttm.c 		      struct ttm_buffer_object *bo,
bo                128 drivers/gpu/drm/nouveau/nouveau_ttm.c 	struct nouveau_bo *nvbo = nouveau_bo(bo);
bo                129 drivers/gpu/drm/nouveau/nouveau_ttm.c 	struct nouveau_drm *drm = nouveau_bdev(bo->bdev);
bo                 15 drivers/gpu/drm/nouveau/nouveau_ttm.h struct ttm_tt *nouveau_sgdma_create_ttm(struct ttm_buffer_object *bo,
bo                 80 drivers/gpu/drm/nouveau/nouveau_vmm.c 	struct nouveau_mem *mem = nouveau_mem(&nvbo->bo.mem);
bo                 99 drivers/gpu/drm/nouveau/nouveau_vmm.c 	if (nvbo->bo.mem.mem_type != TTM_PL_SYSTEM &&
bo                 86 drivers/gpu/drm/nouveau/nv10_fence.c 	nouveau_bo_unmap(priv->bo);
bo                 87 drivers/gpu/drm/nouveau/nv10_fence.c 	if (priv->bo)
bo                 88 drivers/gpu/drm/nouveau/nv10_fence.c 		nouveau_bo_unpin(priv->bo);
bo                 89 drivers/gpu/drm/nouveau/nv10_fence.c 	nouveau_bo_ref(NULL, &priv->bo);
bo                 15 drivers/gpu/drm/nouveau/nv10_fence.h 	struct nouveau_bo *bo;
bo                 79 drivers/gpu/drm/nouveau/nv17_fence.c 	struct ttm_mem_reg *reg = &priv->bo->bo.mem;
bo                111 drivers/gpu/drm/nouveau/nv17_fence.c 	nouveau_bo_wr32(priv->bo, 0, priv->sequence);
bo                131 drivers/gpu/drm/nouveau/nv17_fence.c 			     0, 0x0000, NULL, NULL, &priv->bo);
bo                133 drivers/gpu/drm/nouveau/nv17_fence.c 		ret = nouveau_bo_pin(priv->bo, TTM_PL_FLAG_VRAM, false);
bo                135 drivers/gpu/drm/nouveau/nv17_fence.c 			ret = nouveau_bo_map(priv->bo);
bo                137 drivers/gpu/drm/nouveau/nv17_fence.c 				nouveau_bo_unpin(priv->bo);
bo                140 drivers/gpu/drm/nouveau/nv17_fence.c 			nouveau_bo_ref(NULL, &priv->bo);
bo                148 drivers/gpu/drm/nouveau/nv17_fence.c 	nouveau_bo_wr32(priv->bo, 0x000, 0x00000000);
bo                 40 drivers/gpu/drm/nouveau/nv50_fence.c 	struct ttm_mem_reg *reg = &priv->bo->bo.mem;
bo                 84 drivers/gpu/drm/nouveau/nv50_fence.c 			     0, 0x0000, NULL, NULL, &priv->bo);
bo                 86 drivers/gpu/drm/nouveau/nv50_fence.c 		ret = nouveau_bo_pin(priv->bo, TTM_PL_FLAG_VRAM, false);
bo                 88 drivers/gpu/drm/nouveau/nv50_fence.c 			ret = nouveau_bo_map(priv->bo);
bo                 90 drivers/gpu/drm/nouveau/nv50_fence.c 				nouveau_bo_unpin(priv->bo);
bo                 93 drivers/gpu/drm/nouveau/nv50_fence.c 			nouveau_bo_ref(NULL, &priv->bo);
bo                101 drivers/gpu/drm/nouveau/nv50_fence.c 	nouveau_bo_wr32(priv->bo, 0x000, 0x00000000);
bo                 91 drivers/gpu/drm/nouveau/nv84_fence.c 	return nouveau_bo_rd32(priv->bo, chan->chid * 16/4);
bo                100 drivers/gpu/drm/nouveau/nv84_fence.c 	nouveau_bo_wr32(priv->bo, chan->chid * 16 / 4, fctx->base.sequence);
bo                129 drivers/gpu/drm/nouveau/nv84_fence.c 	ret = nouveau_vma_new(priv->bo, chan->vmm, &fctx->vma);
bo                146 drivers/gpu/drm/nouveau/nv84_fence.c 			priv->suspend[i] = nouveau_bo_rd32(priv->bo, i*4);
bo                160 drivers/gpu/drm/nouveau/nv84_fence.c 			nouveau_bo_wr32(priv->bo, i*4, priv->suspend[i]);
bo                170 drivers/gpu/drm/nouveau/nv84_fence.c 	nouveau_bo_unmap(priv->bo);
bo                171 drivers/gpu/drm/nouveau/nv84_fence.c 	if (priv->bo)
bo                172 drivers/gpu/drm/nouveau/nv84_fence.c 		nouveau_bo_unpin(priv->bo);
bo                173 drivers/gpu/drm/nouveau/nv84_fence.c 	nouveau_bo_ref(NULL, &priv->bo);
bo                207 drivers/gpu/drm/nouveau/nv84_fence.c 			     domain, 0, 0, NULL, NULL, &priv->bo);
bo                209 drivers/gpu/drm/nouveau/nv84_fence.c 		ret = nouveau_bo_pin(priv->bo, domain, false);
bo                211 drivers/gpu/drm/nouveau/nv84_fence.c 			ret = nouveau_bo_map(priv->bo);
bo                213 drivers/gpu/drm/nouveau/nv84_fence.c 				nouveau_bo_unpin(priv->bo);
bo                216 drivers/gpu/drm/nouveau/nv84_fence.c 			nouveau_bo_ref(NULL, &priv->bo);
bo               1073 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf100.c 	u32 bo = 0;
bo               1082 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf100.c 			mmio_skip(info, o, (attrib << 16) | ++bo);
bo               1083 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf100.c 			mmio_wr32(info, o, (attrib << 16) | --bo);
bo               1084 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf100.c 			bo += grctx->attrib_nr_max;
bo                747 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf108.c 	u32 bo = 0;
bo                748 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf108.c 	u32 ao = bo + grctx->attrib_nr_max * gr->tpc_total;
bo                762 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf108.c 			mmio_skip(info, o + 0x20, (t << 28) | (b << 16) | ++bo);
bo                763 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf108.c 			mmio_wr32(info, o + 0x20, (t << 28) | (b << 16) | --bo);
bo                764 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf108.c 			bo += grctx->attrib_nr_max;
bo                255 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf117.c 	u32 bo = 0;
bo                256 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf117.c 	u32 ao = bo + grctx->attrib_nr_max * gr->tpc_total;
bo                272 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf117.c 			mmio_skip(info, o + 0xc0, (t << 28) | (b << 16) | ++bo);
bo                273 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf117.c 			mmio_wr32(info, o + 0xc0, (t << 28) | (b << 16) | --bo);
bo                274 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf117.c 			bo += grctx->attrib_nr_max * gr->ppc_tpc_nr[gpc][ppc];
bo                919 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgm107.c 	u32 bo = 0;
bo                920 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgm107.c 	u32 ao = bo + grctx->attrib_nr_max * gr->tpc_total;
bo                938 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgm107.c 			mmio_wr32(info, o + 0xf4, bo);
bo                939 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgm107.c 			bo += grctx->attrib_nr_max * gr->ppc_tpc_nr[gpc][ppc];
bo                 55 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgp100.c 	u32 bo = ao + size;
bo                 81 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgp100.c 			mmio_wr32(info, o + 0xf4, bo);
bo                 83 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgp100.c 			bo += grctx->attrib_nr_max * gr->ppc_tpc_max;
bo                 51 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgp102.c 	u32 bo = ao + size;
bo                 80 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgp102.c 			mmio_wr32(info, o + 0xf4, bo);
bo                 82 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgp102.c 			bo += gs;
bo                 73 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgv100.c 	u32 bo = ao + size;
bo                 99 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgv100.c 			mmio_wr32(info, o + 0xf4, bo);
bo                101 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgv100.c 			bo += gs;
bo                 28 drivers/gpu/drm/omapdrm/omap_fbdev.c 	struct drm_gem_object *bo;
bo                 45 drivers/gpu/drm/omapdrm/omap_fbdev.c 	omap_gem_roll(fbdev->bo, fbi->var.yoffset * npages);
bo                130 drivers/gpu/drm/omapdrm/omap_fbdev.c 	fbdev->bo = omap_gem_new(dev, gsize, OMAP_BO_SCANOUT | OMAP_BO_WC);
bo                131 drivers/gpu/drm/omapdrm/omap_fbdev.c 	if (!fbdev->bo) {
bo                137 drivers/gpu/drm/omapdrm/omap_fbdev.c 	fb = omap_framebuffer_init(dev, &mode_cmd, &fbdev->bo);
bo                143 drivers/gpu/drm/omapdrm/omap_fbdev.c 		drm_gem_object_put_unlocked(fbdev->bo);
bo                156 drivers/gpu/drm/omapdrm/omap_fbdev.c 	ret = omap_gem_pin(fbdev->bo, &dma_addr);
bo                181 drivers/gpu/drm/omapdrm/omap_fbdev.c 	fbi->screen_buffer = omap_gem_vaddr(fbdev->bo);
bo                182 drivers/gpu/drm/omapdrm/omap_fbdev.c 	fbi->screen_size = fbdev->bo->size;
bo                184 drivers/gpu/drm/omapdrm/omap_fbdev.c 	fbi->fix.smem_len = fbdev->bo->size;
bo                287 drivers/gpu/drm/omapdrm/omap_fbdev.c 	if (fbdev->bo)
bo                288 drivers/gpu/drm/omapdrm/omap_fbdev.c 		omap_gem_unpin(fbdev->bo);
bo                 82 drivers/gpu/drm/panfrost/panfrost_drv.c 	struct panfrost_gem_object *bo;
bo                 95 drivers/gpu/drm/panfrost/panfrost_drv.c 	bo = panfrost_gem_create_with_handle(file, dev, args->size, args->flags,
bo                 97 drivers/gpu/drm/panfrost/panfrost_drv.c 	if (IS_ERR(bo))
bo                 98 drivers/gpu/drm/panfrost/panfrost_drv.c 		return PTR_ERR(bo);
bo                100 drivers/gpu/drm/panfrost/panfrost_drv.c 	mapping = panfrost_gem_mapping_get(bo, priv);
bo                102 drivers/gpu/drm/panfrost/panfrost_drv.c 		drm_gem_object_put_unlocked(&bo->base.base);
bo                132 drivers/gpu/drm/panfrost/panfrost_drv.c 	struct panfrost_gem_object *bo;
bo                162 drivers/gpu/drm/panfrost/panfrost_drv.c 		bo = to_panfrost_bo(job->bos[i]);
bo                163 drivers/gpu/drm/panfrost/panfrost_drv.c 		mapping = panfrost_gem_mapping_get(bo, priv);
bo                169 drivers/gpu/drm/panfrost/panfrost_drv.c 		atomic_inc(&bo->gpu_usecount);
bo                365 drivers/gpu/drm/panfrost/panfrost_drv.c 	struct panfrost_gem_object *bo;
bo                372 drivers/gpu/drm/panfrost/panfrost_drv.c 	bo = to_panfrost_bo(gem_obj);
bo                374 drivers/gpu/drm/panfrost/panfrost_drv.c 	mapping = panfrost_gem_mapping_get(bo, priv);
bo                392 drivers/gpu/drm/panfrost/panfrost_drv.c 	struct panfrost_gem_object *bo;
bo                401 drivers/gpu/drm/panfrost/panfrost_drv.c 	bo = to_panfrost_bo(gem_obj);
bo                404 drivers/gpu/drm/panfrost/panfrost_drv.c 	mutex_lock(&bo->mappings.lock);
bo                408 drivers/gpu/drm/panfrost/panfrost_drv.c 		first = list_first_entry(&bo->mappings.list,
bo                420 drivers/gpu/drm/panfrost/panfrost_drv.c 		if (!list_is_singular(&bo->mappings.list) ||
bo                431 drivers/gpu/drm/panfrost/panfrost_drv.c 			list_add_tail(&bo->base.madv_list,
bo                434 drivers/gpu/drm/panfrost/panfrost_drv.c 			list_del_init(&bo->base.madv_list);
bo                438 drivers/gpu/drm/panfrost/panfrost_drv.c 	mutex_unlock(&bo->mappings.lock);
bo                 19 drivers/gpu/drm/panfrost/panfrost_gem.c 	struct panfrost_gem_object *bo = to_panfrost_bo(obj);
bo                 29 drivers/gpu/drm/panfrost/panfrost_gem.c 	list_del_init(&bo->base.madv_list);
bo                 36 drivers/gpu/drm/panfrost/panfrost_gem.c 	WARN_ON_ONCE(!list_empty(&bo->mappings.list));
bo                 38 drivers/gpu/drm/panfrost/panfrost_gem.c 	if (bo->sgts) {
bo                 40 drivers/gpu/drm/panfrost/panfrost_gem.c 		int n_sgt = bo->base.base.size / SZ_2M;
bo                 43 drivers/gpu/drm/panfrost/panfrost_gem.c 			if (bo->sgts[i].sgl) {
bo                 44 drivers/gpu/drm/panfrost/panfrost_gem.c 				dma_unmap_sg(pfdev->dev, bo->sgts[i].sgl,
bo                 45 drivers/gpu/drm/panfrost/panfrost_gem.c 					     bo->sgts[i].nents, DMA_BIDIRECTIONAL);
bo                 46 drivers/gpu/drm/panfrost/panfrost_gem.c 				sg_free_table(&bo->sgts[i]);
bo                 49 drivers/gpu/drm/panfrost/panfrost_gem.c 		kfree(bo->sgts);
bo                 56 drivers/gpu/drm/panfrost/panfrost_gem.c panfrost_gem_mapping_get(struct panfrost_gem_object *bo,
bo                 61 drivers/gpu/drm/panfrost/panfrost_gem.c 	mutex_lock(&bo->mappings.lock);
bo                 62 drivers/gpu/drm/panfrost/panfrost_gem.c 	list_for_each_entry(iter, &bo->mappings.list, node) {
bo                 69 drivers/gpu/drm/panfrost/panfrost_gem.c 	mutex_unlock(&bo->mappings.lock);
bo                108 drivers/gpu/drm/panfrost/panfrost_gem.c void panfrost_gem_teardown_mappings(struct panfrost_gem_object *bo)
bo                112 drivers/gpu/drm/panfrost/panfrost_gem.c 	mutex_lock(&bo->mappings.lock);
bo                113 drivers/gpu/drm/panfrost/panfrost_gem.c 	list_for_each_entry(mapping, &bo->mappings.list, node)
bo                115 drivers/gpu/drm/panfrost/panfrost_gem.c 	mutex_unlock(&bo->mappings.lock);
bo                123 drivers/gpu/drm/panfrost/panfrost_gem.c 	struct panfrost_gem_object *bo = to_panfrost_bo(obj);
bo                124 drivers/gpu/drm/panfrost/panfrost_gem.c 	unsigned long color = bo->noexec ? PANFROST_BO_NOEXEC : 0;
bo                135 drivers/gpu/drm/panfrost/panfrost_gem.c 	mapping->obj = bo;
bo                143 drivers/gpu/drm/panfrost/panfrost_gem.c 	if (!bo->noexec)
bo                156 drivers/gpu/drm/panfrost/panfrost_gem.c 	if (!bo->is_heap) {
bo                162 drivers/gpu/drm/panfrost/panfrost_gem.c 	mutex_lock(&bo->mappings.lock);
bo                163 drivers/gpu/drm/panfrost/panfrost_gem.c 	WARN_ON(bo->base.madv != PANFROST_MADV_WILLNEED);
bo                164 drivers/gpu/drm/panfrost/panfrost_gem.c 	list_add_tail(&mapping->node, &bo->mappings.list);
bo                165 drivers/gpu/drm/panfrost/panfrost_gem.c 	mutex_unlock(&bo->mappings.lock);
bo                176 drivers/gpu/drm/panfrost/panfrost_gem.c 	struct panfrost_gem_object *bo = to_panfrost_bo(obj);
bo                179 drivers/gpu/drm/panfrost/panfrost_gem.c 	mutex_lock(&bo->mappings.lock);
bo                180 drivers/gpu/drm/panfrost/panfrost_gem.c 	list_for_each_entry(iter, &bo->mappings.list, node) {
bo                187 drivers/gpu/drm/panfrost/panfrost_gem.c 	mutex_unlock(&bo->mappings.lock);
bo                244 drivers/gpu/drm/panfrost/panfrost_gem.c 	struct panfrost_gem_object *bo;
bo                254 drivers/gpu/drm/panfrost/panfrost_gem.c 	bo = to_panfrost_bo(&shmem->base);
bo                255 drivers/gpu/drm/panfrost/panfrost_gem.c 	bo->noexec = !!(flags & PANFROST_BO_NOEXEC);
bo                256 drivers/gpu/drm/panfrost/panfrost_gem.c 	bo->is_heap = !!(flags & PANFROST_BO_HEAP);
bo                268 drivers/gpu/drm/panfrost/panfrost_gem.c 	return bo;
bo                277 drivers/gpu/drm/panfrost/panfrost_gem.c 	struct panfrost_gem_object *bo;
bo                283 drivers/gpu/drm/panfrost/panfrost_gem.c 	bo = to_panfrost_bo(obj);
bo                284 drivers/gpu/drm/panfrost/panfrost_gem.c 	bo->noexec = true;
bo                 82 drivers/gpu/drm/panfrost/panfrost_gem.h panfrost_gem_mapping_get(struct panfrost_gem_object *bo,
bo                 85 drivers/gpu/drm/panfrost/panfrost_gem.h void panfrost_gem_teardown_mappings(struct panfrost_gem_object *bo);
bo                 42 drivers/gpu/drm/panfrost/panfrost_gem_shrinker.c 	struct panfrost_gem_object *bo = to_panfrost_bo(obj);
bo                 44 drivers/gpu/drm/panfrost/panfrost_gem_shrinker.c 	if (atomic_read(&bo->gpu_usecount))
bo                 50 drivers/gpu/drm/panfrost/panfrost_gem_shrinker.c 	panfrost_gem_teardown_mappings(bo);
bo                284 drivers/gpu/drm/panfrost/panfrost_job.c 		struct panfrost_gem_object *bo;
bo                287 drivers/gpu/drm/panfrost/panfrost_job.c 			bo = to_panfrost_bo(job->bos[i]);
bo                279 drivers/gpu/drm/panfrost/panfrost_mmu.c 	struct panfrost_gem_object *bo = mapping->obj;
bo                280 drivers/gpu/drm/panfrost/panfrost_mmu.c 	struct drm_gem_object *obj = &bo->base.base;
bo                288 drivers/gpu/drm/panfrost/panfrost_mmu.c 	if (bo->noexec)
bo                304 drivers/gpu/drm/panfrost/panfrost_mmu.c 	struct panfrost_gem_object *bo = mapping->obj;
bo                305 drivers/gpu/drm/panfrost/panfrost_mmu.c 	struct drm_gem_object *obj = &bo->base.base;
bo                451 drivers/gpu/drm/panfrost/panfrost_mmu.c 	struct panfrost_gem_object *bo;
bo                461 drivers/gpu/drm/panfrost/panfrost_mmu.c 	bo = bomapping->obj;
bo                462 drivers/gpu/drm/panfrost/panfrost_mmu.c 	if (!bo->is_heap) {
bo                475 drivers/gpu/drm/panfrost/panfrost_mmu.c 	mutex_lock(&bo->base.pages_lock);
bo                477 drivers/gpu/drm/panfrost/panfrost_mmu.c 	if (!bo->base.pages) {
bo                478 drivers/gpu/drm/panfrost/panfrost_mmu.c 		bo->sgts = kvmalloc_array(bo->base.base.size / SZ_2M,
bo                480 drivers/gpu/drm/panfrost/panfrost_mmu.c 		if (!bo->sgts) {
bo                481 drivers/gpu/drm/panfrost/panfrost_mmu.c 			mutex_unlock(&bo->base.pages_lock);
bo                486 drivers/gpu/drm/panfrost/panfrost_mmu.c 		pages = kvmalloc_array(bo->base.base.size >> PAGE_SHIFT,
bo                489 drivers/gpu/drm/panfrost/panfrost_mmu.c 			kfree(bo->sgts);
bo                490 drivers/gpu/drm/panfrost/panfrost_mmu.c 			bo->sgts = NULL;
bo                491 drivers/gpu/drm/panfrost/panfrost_mmu.c 			mutex_unlock(&bo->base.pages_lock);
bo                495 drivers/gpu/drm/panfrost/panfrost_mmu.c 		bo->base.pages = pages;
bo                496 drivers/gpu/drm/panfrost/panfrost_mmu.c 		bo->base.pages_use_count = 1;
bo                498 drivers/gpu/drm/panfrost/panfrost_mmu.c 		pages = bo->base.pages;
bo                500 drivers/gpu/drm/panfrost/panfrost_mmu.c 	mapping = bo->base.base.filp->f_mapping;
bo                506 drivers/gpu/drm/panfrost/panfrost_mmu.c 			mutex_unlock(&bo->base.pages_lock);
bo                512 drivers/gpu/drm/panfrost/panfrost_mmu.c 	mutex_unlock(&bo->base.pages_lock);
bo                514 drivers/gpu/drm/panfrost/panfrost_mmu.c 	sgt = &bo->sgts[page_offset / (SZ_2M / PAGE_SIZE)];
bo                539 drivers/gpu/drm/panfrost/panfrost_mmu.c 	drm_gem_shmem_put_pages(&bo->base);
bo                541 drivers/gpu/drm/panfrost/panfrost_mmu.c 	drm_gem_object_put_unlocked(&bo->base.base);
bo                 75 drivers/gpu/drm/panfrost/panfrost_perfcnt.c 	struct drm_gem_shmem_object *bo;
bo                 88 drivers/gpu/drm/panfrost/panfrost_perfcnt.c 	bo = drm_gem_shmem_create(pfdev->ddev, perfcnt->bosize);
bo                 89 drivers/gpu/drm/panfrost/panfrost_perfcnt.c 	if (IS_ERR(bo))
bo                 90 drivers/gpu/drm/panfrost/panfrost_perfcnt.c 		return PTR_ERR(bo);
bo                 93 drivers/gpu/drm/panfrost/panfrost_perfcnt.c 	ret = panfrost_gem_open(&bo->base, file_priv);
bo                 97 drivers/gpu/drm/panfrost/panfrost_perfcnt.c 	perfcnt->mapping = panfrost_gem_mapping_get(to_panfrost_bo(&bo->base),
bo                104 drivers/gpu/drm/panfrost/panfrost_perfcnt.c 	perfcnt->buf = drm_gem_shmem_vmap(&bo->base);
bo                159 drivers/gpu/drm/panfrost/panfrost_perfcnt.c 	drm_gem_object_put_unlocked(&bo->base);
bo                164 drivers/gpu/drm/panfrost/panfrost_perfcnt.c 	drm_gem_shmem_vunmap(&bo->base, perfcnt->buf);
bo                168 drivers/gpu/drm/panfrost/panfrost_perfcnt.c 	panfrost_gem_close(&bo->base, file_priv);
bo                170 drivers/gpu/drm/panfrost/panfrost_perfcnt.c 	drm_gem_object_put_unlocked(&bo->base);
bo                267 drivers/gpu/drm/qxl/qxl_cmd.c 	struct qxl_bo *bo;
bo                271 drivers/gpu/drm/qxl/qxl_cmd.c 			    false, QXL_GEM_DOMAIN_VRAM, NULL, &bo);
bo                276 drivers/gpu/drm/qxl/qxl_cmd.c 	ret = qxl_release_list_add(release, bo);
bo                280 drivers/gpu/drm/qxl/qxl_cmd.c 	*_bo = bo;
bo                283 drivers/gpu/drm/qxl/qxl_cmd.c 	qxl_bo_unref(&bo);
bo                384 drivers/gpu/drm/qxl/qxl_cmd.c void qxl_io_create_primary(struct qxl_device *qdev, struct qxl_bo *bo)
bo                393 drivers/gpu/drm/qxl/qxl_cmd.c 	create->format = bo->surf.format;
bo                394 drivers/gpu/drm/qxl/qxl_cmd.c 	create->width = bo->surf.width;
bo                395 drivers/gpu/drm/qxl/qxl_cmd.c 	create->height = bo->surf.height;
bo                396 drivers/gpu/drm/qxl/qxl_cmd.c 	create->stride = bo->surf.stride;
bo                397 drivers/gpu/drm/qxl/qxl_cmd.c 	create->mem = qxl_bo_physical_address(qdev, bo, 0);
bo                399 drivers/gpu/drm/qxl/qxl_cmd.c 	DRM_DEBUG_DRIVER("mem = %llx, from %p\n", create->mem, bo->kptr);
bo                405 drivers/gpu/drm/qxl/qxl_cmd.c 	qdev->primary_bo = bo;
bo                 57 drivers/gpu/drm/qxl/qxl_debugfs.c 	struct qxl_bo *bo;
bo                 59 drivers/gpu/drm/qxl/qxl_debugfs.c 	list_for_each_entry(bo, &qdev->gem.objects, list) {
bo                 64 drivers/gpu/drm/qxl/qxl_debugfs.c 		fobj = rcu_dereference(bo->tbo.base.resv->fence);
bo                 69 drivers/gpu/drm/qxl/qxl_debugfs.c 			   (unsigned long)bo->tbo.base.size,
bo                 70 drivers/gpu/drm/qxl/qxl_debugfs.c 			   bo->pin_count, rel);
bo                213 drivers/gpu/drm/qxl/qxl_display.c 				 struct qxl_bo *bo)
bo                215 drivers/gpu/drm/qxl/qxl_display.c 	return qxl_check_mode(qdev, bo->surf.width, bo->surf.height);
bo                479 drivers/gpu/drm/qxl/qxl_display.c 	struct qxl_bo *bo;
bo                484 drivers/gpu/drm/qxl/qxl_display.c 	bo = gem_to_qxl_bo(state->fb->obj[0]);
bo                486 drivers/gpu/drm/qxl/qxl_display.c 	return qxl_check_framebuffer(qdev, bo);
bo                540 drivers/gpu/drm/qxl/qxl_display.c 	struct qxl_bo *bo = gem_to_qxl_bo(plane->state->fb->obj[0]);
bo                550 drivers/gpu/drm/qxl/qxl_display.c 	primary = bo->shadow ? bo->shadow : bo;
bo                559 drivers/gpu/drm/qxl/qxl_display.c 	if (bo->is_dumb)
bo                563 drivers/gpu/drm/qxl/qxl_display.c 	qxl_draw_dirty_fb(qdev, plane->state->fb, bo, 0, 0, &norect, 1, 1,
bo                573 drivers/gpu/drm/qxl/qxl_display.c 		struct qxl_bo *bo = gem_to_qxl_bo(old_state->fb->obj[0]);
bo                575 drivers/gpu/drm/qxl/qxl_display.c 		if (bo->is_primary) {
bo                577 drivers/gpu/drm/qxl/qxl_display.c 			bo->is_primary = false;
bo                721 drivers/gpu/drm/qxl/qxl_display.c 				 int index, struct qxl_bo *bo)
bo                728 drivers/gpu/drm/qxl/qxl_display.c 	if (bo && bo->is_dumb) {
bo                729 drivers/gpu/drm/qxl/qxl_display.c 		width = bo->surf.width;
bo                730 drivers/gpu/drm/qxl/qxl_display.c 		height = bo->surf.height;
bo                123 drivers/gpu/drm/qxl/qxl_draw.c 		       struct qxl_bo *bo,
bo                200 drivers/gpu/drm/qxl/qxl_draw.c 	ret = qxl_bo_kmap(bo, (void **)&surface_base);
bo                207 drivers/gpu/drm/qxl/qxl_draw.c 	qxl_bo_kunmap(bo);
bo                234 drivers/gpu/drm/qxl/qxl_draw.c 	drawable->u.copy.src_bitmap = qxl_bo_physical_address(qdev, dimage->bo, 0);
bo                162 drivers/gpu/drm/qxl/qxl_drv.h 	struct qxl_bo *bo;
bo                166 drivers/gpu/drm/qxl/qxl_drv.h 	struct qxl_bo *bo;
bo                306 drivers/gpu/drm/qxl/qxl_drv.h qxl_bo_physical_address(struct qxl_device *qdev, struct qxl_bo *bo,
bo                310 drivers/gpu/drm/qxl/qxl_drv.h 		(bo->tbo.mem.mem_type == TTM_PL_VRAM)
bo                313 drivers/gpu/drm/qxl/qxl_drv.h 	WARN_ON_ONCE((bo->tbo.offset & slot->gpu_offset) != slot->gpu_offset);
bo                316 drivers/gpu/drm/qxl/qxl_drv.h 	return slot->high_bits | (bo->tbo.offset - slot->gpu_offset + offset);
bo                344 drivers/gpu/drm/qxl/qxl_drv.h int qxl_bo_kmap(struct qxl_bo *bo, void **ptr);
bo                379 drivers/gpu/drm/qxl/qxl_drv.h 			   struct qxl_bo *bo);
bo                398 drivers/gpu/drm/qxl/qxl_drv.h int qxl_release_list_add(struct qxl_release *release, struct qxl_bo *bo);
bo                425 drivers/gpu/drm/qxl/qxl_drv.h 		       struct qxl_bo *bo,
bo                475 drivers/gpu/drm/qxl/qxl_drv.h int qxl_bo_check_id(struct qxl_device *qdev, struct qxl_bo *bo);
bo                 45 drivers/gpu/drm/qxl/qxl_image.c 	ret = qxl_alloc_bo_reserved(qdev, release, chunk_size, &chunk->bo);
bo                 70 drivers/gpu/drm/qxl/qxl_image.c 	ret = qxl_alloc_bo_reserved(qdev, release, sizeof(struct qxl_image), &image->bo);
bo                 78 drivers/gpu/drm/qxl/qxl_image.c 		qxl_bo_unref(&image->bo);
bo                 91 drivers/gpu/drm/qxl/qxl_image.c 		qxl_bo_unref(&chunk->bo);
bo                 95 drivers/gpu/drm/qxl/qxl_image.c 	qxl_bo_unref(&dimage->bo);
bo                122 drivers/gpu/drm/qxl/qxl_image.c 	chunk_bo = drv_chunk->bo;
bo                191 drivers/gpu/drm/qxl/qxl_image.c 	image_bo = dimage->bo;
bo                 32 drivers/gpu/drm/qxl/qxl_object.c 	struct qxl_bo *bo;
bo                 35 drivers/gpu/drm/qxl/qxl_object.c 	bo = to_qxl_bo(tbo);
bo                 36 drivers/gpu/drm/qxl/qxl_object.c 	qdev = (struct qxl_device *)bo->tbo.base.dev->dev_private;
bo                 38 drivers/gpu/drm/qxl/qxl_object.c 	qxl_surface_evict(qdev, bo, false);
bo                 39 drivers/gpu/drm/qxl/qxl_object.c 	WARN_ON_ONCE(bo->map_count > 0);
bo                 41 drivers/gpu/drm/qxl/qxl_object.c 	list_del_init(&bo->list);
bo                 43 drivers/gpu/drm/qxl/qxl_object.c 	drm_gem_object_release(&bo->tbo.base);
bo                 44 drivers/gpu/drm/qxl/qxl_object.c 	kfree(bo);
bo                 47 drivers/gpu/drm/qxl/qxl_object.c bool qxl_ttm_bo_is_qxl_bo(struct ttm_buffer_object *bo)
bo                 49 drivers/gpu/drm/qxl/qxl_object.c 	if (bo->destroy == &qxl_ttm_bo_destroy)
bo                 85 drivers/gpu/drm/qxl/qxl_object.c 	struct qxl_bo *bo;
bo                 94 drivers/gpu/drm/qxl/qxl_object.c 	bo = kzalloc(sizeof(struct qxl_bo), GFP_KERNEL);
bo                 95 drivers/gpu/drm/qxl/qxl_object.c 	if (bo == NULL)
bo                 98 drivers/gpu/drm/qxl/qxl_object.c 	r = drm_gem_object_init(&qdev->ddev, &bo->tbo.base, size);
bo                100 drivers/gpu/drm/qxl/qxl_object.c 		kfree(bo);
bo                103 drivers/gpu/drm/qxl/qxl_object.c 	bo->type = domain;
bo                104 drivers/gpu/drm/qxl/qxl_object.c 	bo->pin_count = pinned ? 1 : 0;
bo                105 drivers/gpu/drm/qxl/qxl_object.c 	bo->surface_id = 0;
bo                106 drivers/gpu/drm/qxl/qxl_object.c 	INIT_LIST_HEAD(&bo->list);
bo                109 drivers/gpu/drm/qxl/qxl_object.c 		bo->surf = *surf;
bo                111 drivers/gpu/drm/qxl/qxl_object.c 	qxl_ttm_placement_from_domain(bo, domain, pinned);
bo                113 drivers/gpu/drm/qxl/qxl_object.c 	r = ttm_bo_init(&qdev->mman.bdev, &bo->tbo, size, type,
bo                114 drivers/gpu/drm/qxl/qxl_object.c 			&bo->placement, 0, !kernel, size,
bo                123 drivers/gpu/drm/qxl/qxl_object.c 	*bo_ptr = bo;
bo                127 drivers/gpu/drm/qxl/qxl_object.c int qxl_bo_kmap(struct qxl_bo *bo, void **ptr)
bo                132 drivers/gpu/drm/qxl/qxl_object.c 	if (bo->kptr) {
bo                134 drivers/gpu/drm/qxl/qxl_object.c 			*ptr = bo->kptr;
bo                135 drivers/gpu/drm/qxl/qxl_object.c 		bo->map_count++;
bo                138 drivers/gpu/drm/qxl/qxl_object.c 	r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap);
bo                141 drivers/gpu/drm/qxl/qxl_object.c 	bo->kptr = ttm_kmap_obj_virtual(&bo->kmap, &is_iomem);
bo                143 drivers/gpu/drm/qxl/qxl_object.c 		*ptr = bo->kptr;
bo                144 drivers/gpu/drm/qxl/qxl_object.c 	bo->map_count = 1;
bo                149 drivers/gpu/drm/qxl/qxl_object.c 			      struct qxl_bo *bo, int page_offset)
bo                151 drivers/gpu/drm/qxl/qxl_object.c 	struct ttm_mem_type_manager *man = &bo->tbo.bdev->man[bo->tbo.mem.mem_type];
bo                156 drivers/gpu/drm/qxl/qxl_object.c 	if (bo->tbo.mem.mem_type == TTM_PL_VRAM)
bo                158 drivers/gpu/drm/qxl/qxl_object.c 	else if (bo->tbo.mem.mem_type == TTM_PL_PRIV)
bo                164 drivers/gpu/drm/qxl/qxl_object.c 	ret = ttm_mem_io_reserve(bo->tbo.bdev, &bo->tbo.mem);
bo                167 drivers/gpu/drm/qxl/qxl_object.c 	return io_mapping_map_atomic_wc(map, bo->tbo.mem.bus.offset + page_offset);
bo                169 drivers/gpu/drm/qxl/qxl_object.c 	if (bo->kptr) {
bo                170 drivers/gpu/drm/qxl/qxl_object.c 		rptr = bo->kptr + (page_offset * PAGE_SIZE);
bo                174 drivers/gpu/drm/qxl/qxl_object.c 	ret = qxl_bo_kmap(bo, &rptr);
bo                182 drivers/gpu/drm/qxl/qxl_object.c void qxl_bo_kunmap(struct qxl_bo *bo)
bo                184 drivers/gpu/drm/qxl/qxl_object.c 	if (bo->kptr == NULL)
bo                186 drivers/gpu/drm/qxl/qxl_object.c 	bo->map_count--;
bo                187 drivers/gpu/drm/qxl/qxl_object.c 	if (bo->map_count > 0)
bo                189 drivers/gpu/drm/qxl/qxl_object.c 	bo->kptr = NULL;
bo                190 drivers/gpu/drm/qxl/qxl_object.c 	ttm_bo_kunmap(&bo->kmap);
bo                194 drivers/gpu/drm/qxl/qxl_object.c 			       struct qxl_bo *bo, void *pmap)
bo                196 drivers/gpu/drm/qxl/qxl_object.c 	struct ttm_mem_type_manager *man = &bo->tbo.bdev->man[bo->tbo.mem.mem_type];
bo                198 drivers/gpu/drm/qxl/qxl_object.c 	if ((bo->tbo.mem.mem_type != TTM_PL_VRAM) &&
bo                199 drivers/gpu/drm/qxl/qxl_object.c 	    (bo->tbo.mem.mem_type != TTM_PL_PRIV))
bo                205 drivers/gpu/drm/qxl/qxl_object.c 	ttm_mem_io_free(bo->tbo.bdev, &bo->tbo.mem);
bo                209 drivers/gpu/drm/qxl/qxl_object.c 	qxl_bo_kunmap(bo);
bo                212 drivers/gpu/drm/qxl/qxl_object.c void qxl_bo_unref(struct qxl_bo **bo)
bo                214 drivers/gpu/drm/qxl/qxl_object.c 	if ((*bo) == NULL)
bo                217 drivers/gpu/drm/qxl/qxl_object.c 	drm_gem_object_put_unlocked(&(*bo)->tbo.base);
bo                218 drivers/gpu/drm/qxl/qxl_object.c 	*bo = NULL;
bo                221 drivers/gpu/drm/qxl/qxl_object.c struct qxl_bo *qxl_bo_ref(struct qxl_bo *bo)
bo                223 drivers/gpu/drm/qxl/qxl_object.c 	drm_gem_object_get(&bo->tbo.base);
bo                224 drivers/gpu/drm/qxl/qxl_object.c 	return bo;
bo                227 drivers/gpu/drm/qxl/qxl_object.c static int __qxl_bo_pin(struct qxl_bo *bo)
bo                230 drivers/gpu/drm/qxl/qxl_object.c 	struct drm_device *ddev = bo->tbo.base.dev;
bo                233 drivers/gpu/drm/qxl/qxl_object.c 	if (bo->pin_count) {
bo                234 drivers/gpu/drm/qxl/qxl_object.c 		bo->pin_count++;
bo                237 drivers/gpu/drm/qxl/qxl_object.c 	qxl_ttm_placement_from_domain(bo, bo->type, true);
bo                238 drivers/gpu/drm/qxl/qxl_object.c 	r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
bo                240 drivers/gpu/drm/qxl/qxl_object.c 		bo->pin_count = 1;
bo                243 drivers/gpu/drm/qxl/qxl_object.c 		dev_err(ddev->dev, "%p pin failed\n", bo);
bo                247 drivers/gpu/drm/qxl/qxl_object.c static int __qxl_bo_unpin(struct qxl_bo *bo)
bo                250 drivers/gpu/drm/qxl/qxl_object.c 	struct drm_device *ddev = bo->tbo.base.dev;
bo                253 drivers/gpu/drm/qxl/qxl_object.c 	if (!bo->pin_count) {
bo                254 drivers/gpu/drm/qxl/qxl_object.c 		dev_warn(ddev->dev, "%p unpin not necessary\n", bo);
bo                257 drivers/gpu/drm/qxl/qxl_object.c 	bo->pin_count--;
bo                258 drivers/gpu/drm/qxl/qxl_object.c 	if (bo->pin_count)
bo                260 drivers/gpu/drm/qxl/qxl_object.c 	for (i = 0; i < bo->placement.num_placement; i++)
bo                261 drivers/gpu/drm/qxl/qxl_object.c 		bo->placements[i].flags &= ~TTM_PL_FLAG_NO_EVICT;
bo                262 drivers/gpu/drm/qxl/qxl_object.c 	r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
bo                264 drivers/gpu/drm/qxl/qxl_object.c 		dev_err(ddev->dev, "%p validate failed for unpin\n", bo);
bo                273 drivers/gpu/drm/qxl/qxl_object.c int qxl_bo_pin(struct qxl_bo *bo)
bo                277 drivers/gpu/drm/qxl/qxl_object.c 	r = qxl_bo_reserve(bo, false);
bo                281 drivers/gpu/drm/qxl/qxl_object.c 	r = __qxl_bo_pin(bo);
bo                282 drivers/gpu/drm/qxl/qxl_object.c 	qxl_bo_unreserve(bo);
bo                291 drivers/gpu/drm/qxl/qxl_object.c int qxl_bo_unpin(struct qxl_bo *bo)
bo                295 drivers/gpu/drm/qxl/qxl_object.c 	r = qxl_bo_reserve(bo, false);
bo                299 drivers/gpu/drm/qxl/qxl_object.c 	r = __qxl_bo_unpin(bo);
bo                300 drivers/gpu/drm/qxl/qxl_object.c 	qxl_bo_unreserve(bo);
bo                306 drivers/gpu/drm/qxl/qxl_object.c 	struct qxl_bo *bo, *n;
bo                311 drivers/gpu/drm/qxl/qxl_object.c 	list_for_each_entry_safe(bo, n, &qdev->gem.objects, list) {
bo                313 drivers/gpu/drm/qxl/qxl_object.c 			&bo->tbo.base, bo, (unsigned long)bo->tbo.base.size,
bo                314 drivers/gpu/drm/qxl/qxl_object.c 			*((unsigned long *)&bo->tbo.base.refcount));
bo                316 drivers/gpu/drm/qxl/qxl_object.c 		list_del_init(&bo->list);
bo                319 drivers/gpu/drm/qxl/qxl_object.c 		drm_gem_object_put_unlocked(&bo->tbo.base);
bo                333 drivers/gpu/drm/qxl/qxl_object.c int qxl_bo_check_id(struct qxl_device *qdev, struct qxl_bo *bo)
bo                337 drivers/gpu/drm/qxl/qxl_object.c 	if (bo->type == QXL_GEM_DOMAIN_SURFACE && bo->surface_id == 0) {
bo                339 drivers/gpu/drm/qxl/qxl_object.c 		ret = qxl_surface_id_alloc(qdev, bo);
bo                343 drivers/gpu/drm/qxl/qxl_object.c 		ret = qxl_hw_surface_alloc(qdev, bo);
bo                 30 drivers/gpu/drm/qxl/qxl_object.h static inline int qxl_bo_reserve(struct qxl_bo *bo, bool no_wait)
bo                 34 drivers/gpu/drm/qxl/qxl_object.h 	r = ttm_bo_reserve(&bo->tbo, true, no_wait, NULL);
bo                 37 drivers/gpu/drm/qxl/qxl_object.h 			struct drm_device *ddev = bo->tbo.base.dev;
bo                 39 drivers/gpu/drm/qxl/qxl_object.h 			dev_err(ddev->dev, "%p reserve failed\n", bo);
bo                 46 drivers/gpu/drm/qxl/qxl_object.h static inline void qxl_bo_unreserve(struct qxl_bo *bo)
bo                 48 drivers/gpu/drm/qxl/qxl_object.h 	ttm_bo_unreserve(&bo->tbo);
bo                 51 drivers/gpu/drm/qxl/qxl_object.h static inline u64 qxl_bo_gpu_offset(struct qxl_bo *bo)
bo                 53 drivers/gpu/drm/qxl/qxl_object.h 	return bo->tbo.offset;
bo                 56 drivers/gpu/drm/qxl/qxl_object.h static inline unsigned long qxl_bo_size(struct qxl_bo *bo)
bo                 58 drivers/gpu/drm/qxl/qxl_object.h 	return bo->tbo.num_pages << PAGE_SHIFT;
bo                 61 drivers/gpu/drm/qxl/qxl_object.h static inline u64 qxl_bo_mmap_offset(struct qxl_bo *bo)
bo                 63 drivers/gpu/drm/qxl/qxl_object.h 	return drm_vma_node_offset_addr(&bo->tbo.base.vma_node);
bo                 66 drivers/gpu/drm/qxl/qxl_object.h static inline int qxl_bo_wait(struct qxl_bo *bo, u32 *mem_type,
bo                 71 drivers/gpu/drm/qxl/qxl_object.h 	r = ttm_bo_reserve(&bo->tbo, true, no_wait, NULL);
bo                 74 drivers/gpu/drm/qxl/qxl_object.h 			struct drm_device *ddev = bo->tbo.base.dev;
bo                 77 drivers/gpu/drm/qxl/qxl_object.h 				bo);
bo                 82 drivers/gpu/drm/qxl/qxl_object.h 		*mem_type = bo->tbo.mem.mem_type;
bo                 84 drivers/gpu/drm/qxl/qxl_object.h 	r = ttm_bo_wait(&bo->tbo, true, no_wait);
bo                 85 drivers/gpu/drm/qxl/qxl_object.h 	ttm_bo_unreserve(&bo->tbo);
bo                 94 drivers/gpu/drm/qxl/qxl_object.h extern int qxl_bo_kmap(struct qxl_bo *bo, void **ptr);
bo                 95 drivers/gpu/drm/qxl/qxl_object.h extern void qxl_bo_kunmap(struct qxl_bo *bo);
bo                 96 drivers/gpu/drm/qxl/qxl_object.h void *qxl_bo_kmap_atomic_page(struct qxl_device *qdev, struct qxl_bo *bo, int page_offset);
bo                 97 drivers/gpu/drm/qxl/qxl_object.h void qxl_bo_kunmap_atomic_page(struct qxl_device *qdev, struct qxl_bo *bo, void *map);
bo                 98 drivers/gpu/drm/qxl/qxl_object.h extern struct qxl_bo *qxl_bo_ref(struct qxl_bo *bo);
bo                 99 drivers/gpu/drm/qxl/qxl_object.h extern void qxl_bo_unref(struct qxl_bo **bo);
bo                100 drivers/gpu/drm/qxl/qxl_object.h extern int qxl_bo_pin(struct qxl_bo *bo);
bo                101 drivers/gpu/drm/qxl/qxl_object.h extern int qxl_bo_unpin(struct qxl_bo *bo);
bo                103 drivers/gpu/drm/qxl/qxl_object.h extern bool qxl_ttm_bo_is_qxl_bo(struct ttm_buffer_object *bo);
bo                 33 drivers/gpu/drm/qxl/qxl_prime.c 	struct qxl_bo *bo = gem_to_qxl_bo(obj);
bo                 35 drivers/gpu/drm/qxl/qxl_prime.c 	return qxl_bo_pin(bo);
bo                 40 drivers/gpu/drm/qxl/qxl_prime.c 	struct qxl_bo *bo = gem_to_qxl_bo(obj);
bo                 42 drivers/gpu/drm/qxl/qxl_prime.c 	qxl_bo_unpin(bo);
bo                 59 drivers/gpu/drm/qxl/qxl_prime.c 	struct qxl_bo *bo = gem_to_qxl_bo(obj);
bo                 63 drivers/gpu/drm/qxl/qxl_prime.c 	ret = qxl_bo_kmap(bo, &ptr);
bo                 72 drivers/gpu/drm/qxl/qxl_prime.c 	struct qxl_bo *bo = gem_to_qxl_bo(obj);
bo                 74 drivers/gpu/drm/qxl/qxl_prime.c 	qxl_bo_kunmap(bo);
bo                164 drivers/gpu/drm/qxl/qxl_release.c 		struct qxl_bo *bo;
bo                168 drivers/gpu/drm/qxl/qxl_release.c 		bo = to_qxl_bo(entry->tv.bo);
bo                169 drivers/gpu/drm/qxl/qxl_release.c 		qxl_bo_unref(&bo);
bo                202 drivers/gpu/drm/qxl/qxl_release.c 				struct qxl_bo **bo)
bo                206 drivers/gpu/drm/qxl/qxl_release.c 			     QXL_GEM_DOMAIN_VRAM, NULL, bo);
bo                209 drivers/gpu/drm/qxl/qxl_release.c int qxl_release_list_add(struct qxl_release *release, struct qxl_bo *bo)
bo                214 drivers/gpu/drm/qxl/qxl_release.c 		if (entry->tv.bo == &bo->tbo)
bo                222 drivers/gpu/drm/qxl/qxl_release.c 	qxl_bo_ref(bo);
bo                223 drivers/gpu/drm/qxl/qxl_release.c 	entry->tv.bo = &bo->tbo;
bo                229 drivers/gpu/drm/qxl/qxl_release.c static int qxl_release_validate_bo(struct qxl_bo *bo)
bo                234 drivers/gpu/drm/qxl/qxl_release.c 	if (!bo->pin_count) {
bo                235 drivers/gpu/drm/qxl/qxl_release.c 		qxl_ttm_placement_from_domain(bo, bo->type, false);
bo                236 drivers/gpu/drm/qxl/qxl_release.c 		ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
bo                241 drivers/gpu/drm/qxl/qxl_release.c 	ret = dma_resv_reserve_shared(bo->tbo.base.resv, 1);
bo                246 drivers/gpu/drm/qxl/qxl_release.c 	ret = qxl_bo_check_id(bo->tbo.base.dev->dev_private, bo);
bo                268 drivers/gpu/drm/qxl/qxl_release.c 		struct qxl_bo *bo = to_qxl_bo(entry->tv.bo);
bo                270 drivers/gpu/drm/qxl/qxl_release.c 		ret = qxl_release_validate_bo(bo);
bo                296 drivers/gpu/drm/qxl/qxl_release.c 		struct qxl_bo *bo;
bo                303 drivers/gpu/drm/qxl/qxl_release.c 		bo = create_rel->release_bo;
bo                305 drivers/gpu/drm/qxl/qxl_release.c 		(*release)->release_bo = bo;
bo                308 drivers/gpu/drm/qxl/qxl_release.c 		qxl_release_list_add(*release, bo);
bo                324 drivers/gpu/drm/qxl/qxl_release.c 	struct qxl_bo *bo;
bo                363 drivers/gpu/drm/qxl/qxl_release.c 	bo = qxl_bo_ref(qdev->current_release_bo[cur_idx]);
bo                365 drivers/gpu/drm/qxl/qxl_release.c 	(*release)->release_bo = bo;
bo                370 drivers/gpu/drm/qxl/qxl_release.c 		*rbo = bo;
bo                374 drivers/gpu/drm/qxl/qxl_release.c 	ret = qxl_release_list_add(*release, bo);
bo                375 drivers/gpu/drm/qxl/qxl_release.c 	qxl_bo_unref(&bo);
bo                409 drivers/gpu/drm/qxl/qxl_release.c 	struct qxl_bo *bo = release->release_bo;
bo                411 drivers/gpu/drm/qxl/qxl_release.c 	ptr = qxl_bo_kmap_atomic_page(qdev, bo, release->release_offset & PAGE_MASK);
bo                422 drivers/gpu/drm/qxl/qxl_release.c 	struct qxl_bo *bo = release->release_bo;
bo                426 drivers/gpu/drm/qxl/qxl_release.c 	qxl_bo_kunmap_atomic_page(qdev, bo, ptr);
bo                431 drivers/gpu/drm/qxl/qxl_release.c 	struct ttm_buffer_object *bo;
bo                442 drivers/gpu/drm/qxl/qxl_release.c 	bo = list_first_entry(&release->bos, struct ttm_validate_buffer, head)->bo;
bo                443 drivers/gpu/drm/qxl/qxl_release.c 	bdev = bo->bdev;
bo                459 drivers/gpu/drm/qxl/qxl_release.c 		bo = entry->bo;
bo                461 drivers/gpu/drm/qxl/qxl_release.c 		dma_resv_add_shared_fence(bo->base.resv, &release->base);
bo                462 drivers/gpu/drm/qxl/qxl_release.c 		ttm_bo_add_to_lru(bo);
bo                463 drivers/gpu/drm/qxl/qxl_release.c 		dma_resv_unlock(bo->base.resv);
bo                 56 drivers/gpu/drm/qxl/qxl_ttm.c 	struct ttm_buffer_object *bo;
bo                 59 drivers/gpu/drm/qxl/qxl_ttm.c 	bo = (struct ttm_buffer_object *)vmf->vma->vm_private_data;
bo                 60 drivers/gpu/drm/qxl/qxl_ttm.c 	if (bo == NULL)
bo                132 drivers/gpu/drm/qxl/qxl_ttm.c static void qxl_evict_flags(struct ttm_buffer_object *bo,
bo                142 drivers/gpu/drm/qxl/qxl_ttm.c 	if (!qxl_ttm_bo_is_qxl_bo(bo)) {
bo                149 drivers/gpu/drm/qxl/qxl_ttm.c 	qbo = to_qxl_bo(bo);
bo                154 drivers/gpu/drm/qxl/qxl_ttm.c static int qxl_verify_access(struct ttm_buffer_object *bo, struct file *filp)
bo                156 drivers/gpu/drm/qxl/qxl_ttm.c 	struct qxl_bo *qbo = to_qxl_bo(bo);
bo                243 drivers/gpu/drm/qxl/qxl_ttm.c static struct ttm_tt *qxl_ttm_tt_create(struct ttm_buffer_object *bo,
bo                249 drivers/gpu/drm/qxl/qxl_ttm.c 	qdev = qxl_get_qdev(bo->bdev);
bo                255 drivers/gpu/drm/qxl/qxl_ttm.c 	if (ttm_tt_init(&gtt->ttm, bo, page_flags)) {
bo                262 drivers/gpu/drm/qxl/qxl_ttm.c static void qxl_move_null(struct ttm_buffer_object *bo,
bo                265 drivers/gpu/drm/qxl/qxl_ttm.c 	struct ttm_mem_reg *old_mem = &bo->mem;
bo                272 drivers/gpu/drm/qxl/qxl_ttm.c static int qxl_bo_move(struct ttm_buffer_object *bo, bool evict,
bo                276 drivers/gpu/drm/qxl/qxl_ttm.c 	struct ttm_mem_reg *old_mem = &bo->mem;
bo                279 drivers/gpu/drm/qxl/qxl_ttm.c 	ret = ttm_bo_wait(bo, ctx->interruptible, ctx->no_wait_gpu);
bo                283 drivers/gpu/drm/qxl/qxl_ttm.c 	if (old_mem->mem_type == TTM_PL_SYSTEM && bo->ttm == NULL) {
bo                284 drivers/gpu/drm/qxl/qxl_ttm.c 		qxl_move_null(bo, new_mem);
bo                287 drivers/gpu/drm/qxl/qxl_ttm.c 	return ttm_bo_move_memcpy(bo, ctx, new_mem);
bo                290 drivers/gpu/drm/qxl/qxl_ttm.c static void qxl_bo_move_notify(struct ttm_buffer_object *bo,
bo                297 drivers/gpu/drm/qxl/qxl_ttm.c 	if (!qxl_ttm_bo_is_qxl_bo(bo))
bo                299 drivers/gpu/drm/qxl/qxl_ttm.c 	qbo = to_qxl_bo(bo);
bo                302 drivers/gpu/drm/qxl/qxl_ttm.c 	if (bo->mem.mem_type == TTM_PL_PRIV && qbo->surface_id)
bo                442 drivers/gpu/drm/radeon/radeon.h 	struct radeon_bo *bo;
bo                483 drivers/gpu/drm/radeon/radeon.h 	struct radeon_bo		*bo;
bo                544 drivers/gpu/drm/radeon/radeon.h 	struct radeon_bo	*bo;
bo                903 drivers/gpu/drm/radeon/radeon.h 	struct radeon_bo		*bo;
bo               1784 drivers/gpu/drm/radeon/radeon.h int radeon_mn_register(struct radeon_bo *bo, unsigned long addr);
bo               1785 drivers/gpu/drm/radeon/radeon.h void radeon_mn_unregister(struct radeon_bo *bo);
bo               1787 drivers/gpu/drm/radeon/radeon.h static inline int radeon_mn_register(struct radeon_bo *bo, unsigned long addr)
bo               1791 drivers/gpu/drm/radeon/radeon.h static inline void radeon_mn_unregister(struct radeon_bo *bo) {}
bo               2812 drivers/gpu/drm/radeon/radeon.h extern bool radeon_ttm_bo_is_radeon_bo(struct ttm_buffer_object *bo);
bo               2856 drivers/gpu/drm/radeon/radeon.h 			     struct radeon_bo *bo);
bo               2858 drivers/gpu/drm/radeon/radeon.h 				       struct radeon_bo *bo);
bo               2861 drivers/gpu/drm/radeon/radeon.h 				      struct radeon_bo *bo);
bo                186 drivers/gpu/drm/radeon/radeon_cs.c 		p->relocs[i].tv.bo = &p->relocs[i].robj->tbo;
bo                442 drivers/gpu/drm/radeon/radeon_cs.c 			struct radeon_bo *bo = parser->relocs[i].robj;
bo                443 drivers/gpu/drm/radeon/radeon_cs.c 			if (bo == NULL)
bo                446 drivers/gpu/drm/radeon/radeon_cs.c 			drm_gem_object_put_unlocked(&bo->tbo.base);
bo                518 drivers/gpu/drm/radeon/radeon_cs.c 				&rdev->ring_tmp_bo.bo->tbo.mem);
bo                523 drivers/gpu/drm/radeon/radeon_cs.c 		struct radeon_bo *bo;
bo                525 drivers/gpu/drm/radeon/radeon_cs.c 		bo = p->relocs[i].robj;
bo                526 drivers/gpu/drm/radeon/radeon_cs.c 		bo_va = radeon_vm_bo_find(vm, bo);
bo                528 drivers/gpu/drm/radeon/radeon_cs.c 			dev_err(rdev->dev, "bo %p not in vm %p\n", bo, vm);
bo                532 drivers/gpu/drm/radeon/radeon_cs.c 		r = radeon_vm_bo_update(rdev, bo_va, &bo->tbo.mem);
bo                246 drivers/gpu/drm/radeon/radeon_device.c 			if (rdev->surface_regs[i].bo)
bo                247 drivers/gpu/drm/radeon/radeon_device.c 				radeon_bo_get_surface_reg(rdev->surface_regs[i].bo);
bo                295 drivers/gpu/drm/radeon/radeon_gem.c 	struct radeon_bo *bo;
bo                332 drivers/gpu/drm/radeon/radeon_gem.c 	bo = gem_to_radeon_bo(gobj);
bo                333 drivers/gpu/drm/radeon/radeon_gem.c 	r = radeon_ttm_tt_set_userptr(bo->tbo.ttm, args->addr, args->flags);
bo                338 drivers/gpu/drm/radeon/radeon_gem.c 		r = radeon_mn_register(bo, args->addr);
bo                345 drivers/gpu/drm/radeon/radeon_gem.c 		r = radeon_bo_reserve(bo, true);
bo                351 drivers/gpu/drm/radeon/radeon_gem.c 		radeon_ttm_placement_from_domain(bo, RADEON_GEM_DOMAIN_GTT);
bo                352 drivers/gpu/drm/radeon/radeon_gem.c 		r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
bo                353 drivers/gpu/drm/radeon/radeon_gem.c 		radeon_bo_unreserve(bo);
bo                561 drivers/gpu/drm/radeon/radeon_gem.c 	tv.bo = &bo_va->bo->tbo;
bo                574 drivers/gpu/drm/radeon/radeon_gem.c 		domain = radeon_mem_type_to_domain(entry->bo->mem.mem_type);
bo                587 drivers/gpu/drm/radeon/radeon_gem.c 		r = radeon_vm_bo_update(rdev, bo_va, &bo_va->bo->tbo.mem);
bo                663 drivers/gpu/drm/radeon/radeon_kms.c 			r = radeon_bo_reserve(rdev->ring_tmp_bo.bo, false);
bo                673 drivers/gpu/drm/radeon/radeon_kms.c 							rdev->ring_tmp_bo.bo);
bo                726 drivers/gpu/drm/radeon/radeon_kms.c 			r = radeon_bo_reserve(rdev->ring_tmp_bo.bo, false);
bo                730 drivers/gpu/drm/radeon/radeon_kms.c 				radeon_bo_unreserve(rdev->ring_tmp_bo.bo);
bo                 86 drivers/gpu/drm/radeon/radeon_mn.c 		struct radeon_bo *bo;
bo                 97 drivers/gpu/drm/radeon/radeon_mn.c 		list_for_each_entry(bo, &node->bos, mn_list) {
bo                 99 drivers/gpu/drm/radeon/radeon_mn.c 			if (!bo->tbo.ttm || bo->tbo.ttm->state != tt_bound)
bo                102 drivers/gpu/drm/radeon/radeon_mn.c 			r = radeon_bo_reserve(bo, true);
bo                108 drivers/gpu/drm/radeon/radeon_mn.c 			r = dma_resv_wait_timeout_rcu(bo->tbo.base.resv,
bo                113 drivers/gpu/drm/radeon/radeon_mn.c 			radeon_ttm_placement_from_domain(bo, RADEON_GEM_DOMAIN_CPU);
bo                114 drivers/gpu/drm/radeon/radeon_mn.c 			r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
bo                118 drivers/gpu/drm/radeon/radeon_mn.c 			radeon_bo_unreserve(bo);
bo                175 drivers/gpu/drm/radeon/radeon_mn.c int radeon_mn_register(struct radeon_bo *bo, unsigned long addr)
bo                177 drivers/gpu/drm/radeon/radeon_mn.c 	unsigned long end = addr + radeon_bo_size(bo) - 1;
bo                210 drivers/gpu/drm/radeon/radeon_mn.c 	bo->mn = rmn;
bo                216 drivers/gpu/drm/radeon/radeon_mn.c 	list_add(&bo->mn_list, &node->bos);
bo                232 drivers/gpu/drm/radeon/radeon_mn.c void radeon_mn_unregister(struct radeon_bo *bo)
bo                234 drivers/gpu/drm/radeon/radeon_mn.c 	struct radeon_mn *rmn = bo->mn;
bo                242 drivers/gpu/drm/radeon/radeon_mn.c 	head = bo->mn_list.next;
bo                244 drivers/gpu/drm/radeon/radeon_mn.c 	list_del(&bo->mn_list);
bo                256 drivers/gpu/drm/radeon/radeon_mn.c 	bo->mn = NULL;
bo                 46 drivers/gpu/drm/radeon/radeon_object.c static void radeon_bo_clear_surface_reg(struct radeon_bo *bo);
bo                 53 drivers/gpu/drm/radeon/radeon_object.c static void radeon_update_memory_usage(struct radeon_bo *bo,
bo                 56 drivers/gpu/drm/radeon/radeon_object.c 	struct radeon_device *rdev = bo->rdev;
bo                 57 drivers/gpu/drm/radeon/radeon_object.c 	u64 size = (u64)bo->tbo.num_pages << PAGE_SHIFT;
bo                 77 drivers/gpu/drm/radeon/radeon_object.c 	struct radeon_bo *bo;
bo                 79 drivers/gpu/drm/radeon/radeon_object.c 	bo = container_of(tbo, struct radeon_bo, tbo);
bo                 81 drivers/gpu/drm/radeon/radeon_object.c 	radeon_update_memory_usage(bo, bo->tbo.mem.mem_type, -1);
bo                 83 drivers/gpu/drm/radeon/radeon_object.c 	mutex_lock(&bo->rdev->gem.mutex);
bo                 84 drivers/gpu/drm/radeon/radeon_object.c 	list_del_init(&bo->list);
bo                 85 drivers/gpu/drm/radeon/radeon_object.c 	mutex_unlock(&bo->rdev->gem.mutex);
bo                 86 drivers/gpu/drm/radeon/radeon_object.c 	radeon_bo_clear_surface_reg(bo);
bo                 87 drivers/gpu/drm/radeon/radeon_object.c 	WARN_ON_ONCE(!list_empty(&bo->va));
bo                 88 drivers/gpu/drm/radeon/radeon_object.c 	if (bo->tbo.base.import_attach)
bo                 89 drivers/gpu/drm/radeon/radeon_object.c 		drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg);
bo                 90 drivers/gpu/drm/radeon/radeon_object.c 	drm_gem_object_release(&bo->tbo.base);
bo                 91 drivers/gpu/drm/radeon/radeon_object.c 	kfree(bo);
bo                 94 drivers/gpu/drm/radeon/radeon_object.c bool radeon_ttm_bo_is_radeon_bo(struct ttm_buffer_object *bo)
bo                 96 drivers/gpu/drm/radeon/radeon_object.c 	if (bo->destroy == &radeon_ttm_bo_destroy)
bo                189 drivers/gpu/drm/radeon/radeon_object.c 	struct radeon_bo *bo;
bo                209 drivers/gpu/drm/radeon/radeon_object.c 	bo = kzalloc(sizeof(struct radeon_bo), GFP_KERNEL);
bo                210 drivers/gpu/drm/radeon/radeon_object.c 	if (bo == NULL)
bo                212 drivers/gpu/drm/radeon/radeon_object.c 	drm_gem_private_object_init(rdev->ddev, &bo->tbo.base, size);
bo                213 drivers/gpu/drm/radeon/radeon_object.c 	bo->rdev = rdev;
bo                214 drivers/gpu/drm/radeon/radeon_object.c 	bo->surface_reg = -1;
bo                215 drivers/gpu/drm/radeon/radeon_object.c 	INIT_LIST_HEAD(&bo->list);
bo                216 drivers/gpu/drm/radeon/radeon_object.c 	INIT_LIST_HEAD(&bo->va);
bo                217 drivers/gpu/drm/radeon/radeon_object.c 	bo->initial_domain = domain & (RADEON_GEM_DOMAIN_VRAM |
bo                221 drivers/gpu/drm/radeon/radeon_object.c 	bo->flags = flags;
bo                224 drivers/gpu/drm/radeon/radeon_object.c 		bo->flags &= ~(RADEON_GEM_GTT_WC | RADEON_GEM_GTT_UC);
bo                230 drivers/gpu/drm/radeon/radeon_object.c 		bo->flags &= ~(RADEON_GEM_GTT_WC | RADEON_GEM_GTT_UC);
bo                236 drivers/gpu/drm/radeon/radeon_object.c 	bo->flags &= ~(RADEON_GEM_GTT_WC | RADEON_GEM_GTT_UC);
bo                247 drivers/gpu/drm/radeon/radeon_object.c 	if (bo->flags & RADEON_GEM_GTT_WC)
bo                250 drivers/gpu/drm/radeon/radeon_object.c 	bo->flags &= ~(RADEON_GEM_GTT_WC | RADEON_GEM_GTT_UC);
bo                256 drivers/gpu/drm/radeon/radeon_object.c 		bo->flags &= ~RADEON_GEM_GTT_WC;
bo                259 drivers/gpu/drm/radeon/radeon_object.c 	radeon_ttm_placement_from_domain(bo, domain);
bo                262 drivers/gpu/drm/radeon/radeon_object.c 	r = ttm_bo_init(&rdev->mman.bdev, &bo->tbo, size, type,
bo                263 drivers/gpu/drm/radeon/radeon_object.c 			&bo->placement, page_align, !kernel, acc_size,
bo                269 drivers/gpu/drm/radeon/radeon_object.c 	*bo_ptr = bo;
bo                271 drivers/gpu/drm/radeon/radeon_object.c 	trace_radeon_bo_create(bo);
bo                276 drivers/gpu/drm/radeon/radeon_object.c int radeon_bo_kmap(struct radeon_bo *bo, void **ptr)
bo                281 drivers/gpu/drm/radeon/radeon_object.c 	if (bo->kptr) {
bo                283 drivers/gpu/drm/radeon/radeon_object.c 			*ptr = bo->kptr;
bo                287 drivers/gpu/drm/radeon/radeon_object.c 	r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap);
bo                291 drivers/gpu/drm/radeon/radeon_object.c 	bo->kptr = ttm_kmap_obj_virtual(&bo->kmap, &is_iomem);
bo                293 drivers/gpu/drm/radeon/radeon_object.c 		*ptr = bo->kptr;
bo                295 drivers/gpu/drm/radeon/radeon_object.c 	radeon_bo_check_tiling(bo, 0, 0);
bo                299 drivers/gpu/drm/radeon/radeon_object.c void radeon_bo_kunmap(struct radeon_bo *bo)
bo                301 drivers/gpu/drm/radeon/radeon_object.c 	if (bo->kptr == NULL)
bo                303 drivers/gpu/drm/radeon/radeon_object.c 	bo->kptr = NULL;
bo                304 drivers/gpu/drm/radeon/radeon_object.c 	radeon_bo_check_tiling(bo, 0, 0);
bo                305 drivers/gpu/drm/radeon/radeon_object.c 	ttm_bo_kunmap(&bo->kmap);
bo                308 drivers/gpu/drm/radeon/radeon_object.c struct radeon_bo *radeon_bo_ref(struct radeon_bo *bo)
bo                310 drivers/gpu/drm/radeon/radeon_object.c 	if (bo == NULL)
bo                313 drivers/gpu/drm/radeon/radeon_object.c 	ttm_bo_get(&bo->tbo);
bo                314 drivers/gpu/drm/radeon/radeon_object.c 	return bo;
bo                317 drivers/gpu/drm/radeon/radeon_object.c void radeon_bo_unref(struct radeon_bo **bo)
bo                321 drivers/gpu/drm/radeon/radeon_object.c 	if ((*bo) == NULL)
bo                323 drivers/gpu/drm/radeon/radeon_object.c 	tbo = &((*bo)->tbo);
bo                325 drivers/gpu/drm/radeon/radeon_object.c 	*bo = NULL;
bo                328 drivers/gpu/drm/radeon/radeon_object.c int radeon_bo_pin_restricted(struct radeon_bo *bo, u32 domain, u64 max_offset,
bo                334 drivers/gpu/drm/radeon/radeon_object.c 	if (radeon_ttm_tt_has_userptr(bo->tbo.ttm))
bo                337 drivers/gpu/drm/radeon/radeon_object.c 	if (bo->pin_count) {
bo                338 drivers/gpu/drm/radeon/radeon_object.c 		bo->pin_count++;
bo                340 drivers/gpu/drm/radeon/radeon_object.c 			*gpu_addr = radeon_bo_gpu_offset(bo);
bo                346 drivers/gpu/drm/radeon/radeon_object.c 				domain_start = bo->rdev->mc.vram_start;
bo                348 drivers/gpu/drm/radeon/radeon_object.c 				domain_start = bo->rdev->mc.gtt_start;
bo                350 drivers/gpu/drm/radeon/radeon_object.c 				     (radeon_bo_gpu_offset(bo) - domain_start));
bo                355 drivers/gpu/drm/radeon/radeon_object.c 	if (bo->prime_shared_count && domain == RADEON_GEM_DOMAIN_VRAM) {
bo                360 drivers/gpu/drm/radeon/radeon_object.c 	radeon_ttm_placement_from_domain(bo, domain);
bo                361 drivers/gpu/drm/radeon/radeon_object.c 	for (i = 0; i < bo->placement.num_placement; i++) {
bo                363 drivers/gpu/drm/radeon/radeon_object.c 		if ((bo->placements[i].flags & TTM_PL_FLAG_VRAM) &&
bo                364 drivers/gpu/drm/radeon/radeon_object.c 		    !(bo->flags & RADEON_GEM_NO_CPU_ACCESS) &&
bo                365 drivers/gpu/drm/radeon/radeon_object.c 		    (!max_offset || max_offset > bo->rdev->mc.visible_vram_size))
bo                366 drivers/gpu/drm/radeon/radeon_object.c 			bo->placements[i].lpfn =
bo                367 drivers/gpu/drm/radeon/radeon_object.c 				bo->rdev->mc.visible_vram_size >> PAGE_SHIFT;
bo                369 drivers/gpu/drm/radeon/radeon_object.c 			bo->placements[i].lpfn = max_offset >> PAGE_SHIFT;
bo                371 drivers/gpu/drm/radeon/radeon_object.c 		bo->placements[i].flags |= TTM_PL_FLAG_NO_EVICT;
bo                374 drivers/gpu/drm/radeon/radeon_object.c 	r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
bo                376 drivers/gpu/drm/radeon/radeon_object.c 		bo->pin_count = 1;
bo                378 drivers/gpu/drm/radeon/radeon_object.c 			*gpu_addr = radeon_bo_gpu_offset(bo);
bo                380 drivers/gpu/drm/radeon/radeon_object.c 			bo->rdev->vram_pin_size += radeon_bo_size(bo);
bo                382 drivers/gpu/drm/radeon/radeon_object.c 			bo->rdev->gart_pin_size += radeon_bo_size(bo);
bo                384 drivers/gpu/drm/radeon/radeon_object.c 		dev_err(bo->rdev->dev, "%p pin failed\n", bo);
bo                389 drivers/gpu/drm/radeon/radeon_object.c int radeon_bo_pin(struct radeon_bo *bo, u32 domain, u64 *gpu_addr)
bo                391 drivers/gpu/drm/radeon/radeon_object.c 	return radeon_bo_pin_restricted(bo, domain, 0, gpu_addr);
bo                394 drivers/gpu/drm/radeon/radeon_object.c int radeon_bo_unpin(struct radeon_bo *bo)
bo                399 drivers/gpu/drm/radeon/radeon_object.c 	if (!bo->pin_count) {
bo                400 drivers/gpu/drm/radeon/radeon_object.c 		dev_warn(bo->rdev->dev, "%p unpin not necessary\n", bo);
bo                403 drivers/gpu/drm/radeon/radeon_object.c 	bo->pin_count--;
bo                404 drivers/gpu/drm/radeon/radeon_object.c 	if (bo->pin_count)
bo                406 drivers/gpu/drm/radeon/radeon_object.c 	for (i = 0; i < bo->placement.num_placement; i++) {
bo                407 drivers/gpu/drm/radeon/radeon_object.c 		bo->placements[i].lpfn = 0;
bo                408 drivers/gpu/drm/radeon/radeon_object.c 		bo->placements[i].flags &= ~TTM_PL_FLAG_NO_EVICT;
bo                410 drivers/gpu/drm/radeon/radeon_object.c 	r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
bo                412 drivers/gpu/drm/radeon/radeon_object.c 		if (bo->tbo.mem.mem_type == TTM_PL_VRAM)
bo                413 drivers/gpu/drm/radeon/radeon_object.c 			bo->rdev->vram_pin_size -= radeon_bo_size(bo);
bo                415 drivers/gpu/drm/radeon/radeon_object.c 			bo->rdev->gart_pin_size -= radeon_bo_size(bo);
bo                417 drivers/gpu/drm/radeon/radeon_object.c 		dev_err(bo->rdev->dev, "%p validate failed for unpin\n", bo);
bo                437 drivers/gpu/drm/radeon/radeon_object.c 	struct radeon_bo *bo, *n;
bo                443 drivers/gpu/drm/radeon/radeon_object.c 	list_for_each_entry_safe(bo, n, &rdev->gem.objects, list) {
bo                445 drivers/gpu/drm/radeon/radeon_object.c 			&bo->tbo.base, bo, (unsigned long)bo->tbo.base.size,
bo                446 drivers/gpu/drm/radeon/radeon_object.c 			*((unsigned long *)&bo->tbo.base.refcount));
bo                447 drivers/gpu/drm/radeon/radeon_object.c 		mutex_lock(&bo->rdev->gem.mutex);
bo                448 drivers/gpu/drm/radeon/radeon_object.c 		list_del_init(&bo->list);
bo                449 drivers/gpu/drm/radeon/radeon_object.c 		mutex_unlock(&bo->rdev->gem.mutex);
bo                451 drivers/gpu/drm/radeon/radeon_object.c 		drm_gem_object_put_unlocked(&bo->tbo.base);
bo                551 drivers/gpu/drm/radeon/radeon_object.c 		struct radeon_bo *bo = lobj->robj;
bo                552 drivers/gpu/drm/radeon/radeon_object.c 		if (!bo->pin_count) {
bo                556 drivers/gpu/drm/radeon/radeon_object.c 				radeon_mem_type_to_domain(bo->tbo.mem.mem_type);
bo                574 drivers/gpu/drm/radeon/radeon_object.c 			radeon_ttm_placement_from_domain(bo, domain);
bo                576 drivers/gpu/drm/radeon/radeon_object.c 				radeon_uvd_force_into_uvd_segment(bo, allowed);
bo                579 drivers/gpu/drm/radeon/radeon_object.c 			r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
bo                593 drivers/gpu/drm/radeon/radeon_object.c 		lobj->gpu_offset = radeon_bo_gpu_offset(bo);
bo                594 drivers/gpu/drm/radeon/radeon_object.c 		lobj->tiling_flags = bo->tiling_flags;
bo                605 drivers/gpu/drm/radeon/radeon_object.c int radeon_bo_get_surface_reg(struct radeon_bo *bo)
bo                607 drivers/gpu/drm/radeon/radeon_object.c 	struct radeon_device *rdev = bo->rdev;
bo                613 drivers/gpu/drm/radeon/radeon_object.c 	dma_resv_assert_held(bo->tbo.base.resv);
bo                615 drivers/gpu/drm/radeon/radeon_object.c 	if (!bo->tiling_flags)
bo                618 drivers/gpu/drm/radeon/radeon_object.c 	if (bo->surface_reg >= 0) {
bo                619 drivers/gpu/drm/radeon/radeon_object.c 		reg = &rdev->surface_regs[bo->surface_reg];
bo                620 drivers/gpu/drm/radeon/radeon_object.c 		i = bo->surface_reg;
bo                628 drivers/gpu/drm/radeon/radeon_object.c 		if (!reg->bo)
bo                631 drivers/gpu/drm/radeon/radeon_object.c 		old_object = reg->bo;
bo                642 drivers/gpu/drm/radeon/radeon_object.c 		old_object = reg->bo;
bo                650 drivers/gpu/drm/radeon/radeon_object.c 	bo->surface_reg = i;
bo                651 drivers/gpu/drm/radeon/radeon_object.c 	reg->bo = bo;
bo                654 drivers/gpu/drm/radeon/radeon_object.c 	radeon_set_surface_reg(rdev, i, bo->tiling_flags, bo->pitch,
bo                655 drivers/gpu/drm/radeon/radeon_object.c 			       bo->tbo.mem.start << PAGE_SHIFT,
bo                656 drivers/gpu/drm/radeon/radeon_object.c 			       bo->tbo.num_pages << PAGE_SHIFT);
bo                660 drivers/gpu/drm/radeon/radeon_object.c static void radeon_bo_clear_surface_reg(struct radeon_bo *bo)
bo                662 drivers/gpu/drm/radeon/radeon_object.c 	struct radeon_device *rdev = bo->rdev;
bo                665 drivers/gpu/drm/radeon/radeon_object.c 	if (bo->surface_reg == -1)
bo                668 drivers/gpu/drm/radeon/radeon_object.c 	reg = &rdev->surface_regs[bo->surface_reg];
bo                669 drivers/gpu/drm/radeon/radeon_object.c 	radeon_clear_surface_reg(rdev, bo->surface_reg);
bo                671 drivers/gpu/drm/radeon/radeon_object.c 	reg->bo = NULL;
bo                672 drivers/gpu/drm/radeon/radeon_object.c 	bo->surface_reg = -1;
bo                675 drivers/gpu/drm/radeon/radeon_object.c int radeon_bo_set_tiling_flags(struct radeon_bo *bo,
bo                678 drivers/gpu/drm/radeon/radeon_object.c 	struct radeon_device *rdev = bo->rdev;
bo                726 drivers/gpu/drm/radeon/radeon_object.c 	r = radeon_bo_reserve(bo, false);
bo                729 drivers/gpu/drm/radeon/radeon_object.c 	bo->tiling_flags = tiling_flags;
bo                730 drivers/gpu/drm/radeon/radeon_object.c 	bo->pitch = pitch;
bo                731 drivers/gpu/drm/radeon/radeon_object.c 	radeon_bo_unreserve(bo);
bo                735 drivers/gpu/drm/radeon/radeon_object.c void radeon_bo_get_tiling_flags(struct radeon_bo *bo,
bo                739 drivers/gpu/drm/radeon/radeon_object.c 	dma_resv_assert_held(bo->tbo.base.resv);
bo                742 drivers/gpu/drm/radeon/radeon_object.c 		*tiling_flags = bo->tiling_flags;
bo                744 drivers/gpu/drm/radeon/radeon_object.c 		*pitch = bo->pitch;
bo                747 drivers/gpu/drm/radeon/radeon_object.c int radeon_bo_check_tiling(struct radeon_bo *bo, bool has_moved,
bo                751 drivers/gpu/drm/radeon/radeon_object.c 		dma_resv_assert_held(bo->tbo.base.resv);
bo                753 drivers/gpu/drm/radeon/radeon_object.c 	if (!(bo->tiling_flags & RADEON_TILING_SURFACE))
bo                757 drivers/gpu/drm/radeon/radeon_object.c 		radeon_bo_clear_surface_reg(bo);
bo                761 drivers/gpu/drm/radeon/radeon_object.c 	if (bo->tbo.mem.mem_type != TTM_PL_VRAM) {
bo                765 drivers/gpu/drm/radeon/radeon_object.c 		if (bo->surface_reg >= 0)
bo                766 drivers/gpu/drm/radeon/radeon_object.c 			radeon_bo_clear_surface_reg(bo);
bo                770 drivers/gpu/drm/radeon/radeon_object.c 	if ((bo->surface_reg >= 0) && !has_moved)
bo                773 drivers/gpu/drm/radeon/radeon_object.c 	return radeon_bo_get_surface_reg(bo);
bo                776 drivers/gpu/drm/radeon/radeon_object.c void radeon_bo_move_notify(struct ttm_buffer_object *bo,
bo                782 drivers/gpu/drm/radeon/radeon_object.c 	if (!radeon_ttm_bo_is_radeon_bo(bo))
bo                785 drivers/gpu/drm/radeon/radeon_object.c 	rbo = container_of(bo, struct radeon_bo, tbo);
bo                793 drivers/gpu/drm/radeon/radeon_object.c 	radeon_update_memory_usage(rbo, bo->mem.mem_type, -1);
bo                797 drivers/gpu/drm/radeon/radeon_object.c int radeon_bo_fault_reserve_notify(struct ttm_buffer_object *bo)
bo                805 drivers/gpu/drm/radeon/radeon_object.c 	if (!radeon_ttm_bo_is_radeon_bo(bo))
bo                807 drivers/gpu/drm/radeon/radeon_object.c 	rbo = container_of(bo, struct radeon_bo, tbo);
bo                810 drivers/gpu/drm/radeon/radeon_object.c 	if (bo->mem.mem_type != TTM_PL_VRAM)
bo                813 drivers/gpu/drm/radeon/radeon_object.c 	size = bo->mem.num_pages << PAGE_SHIFT;
bo                814 drivers/gpu/drm/radeon/radeon_object.c 	offset = bo->mem.start << PAGE_SHIFT;
bo                831 drivers/gpu/drm/radeon/radeon_object.c 	r = ttm_bo_validate(bo, &rbo->placement, &ctx);
bo                834 drivers/gpu/drm/radeon/radeon_object.c 		return ttm_bo_validate(bo, &rbo->placement, &ctx);
bo                839 drivers/gpu/drm/radeon/radeon_object.c 	offset = bo->mem.start << PAGE_SHIFT;
bo                847 drivers/gpu/drm/radeon/radeon_object.c int radeon_bo_wait(struct radeon_bo *bo, u32 *mem_type, bool no_wait)
bo                851 drivers/gpu/drm/radeon/radeon_object.c 	r = ttm_bo_reserve(&bo->tbo, true, no_wait, NULL);
bo                855 drivers/gpu/drm/radeon/radeon_object.c 		*mem_type = bo->tbo.mem.mem_type;
bo                857 drivers/gpu/drm/radeon/radeon_object.c 	r = ttm_bo_wait(&bo->tbo, true, no_wait);
bo                858 drivers/gpu/drm/radeon/radeon_object.c 	ttm_bo_unreserve(&bo->tbo);
bo                870 drivers/gpu/drm/radeon/radeon_object.c void radeon_bo_fence(struct radeon_bo *bo, struct radeon_fence *fence,
bo                873 drivers/gpu/drm/radeon/radeon_object.c 	struct dma_resv *resv = bo->tbo.base.resv;
bo                 64 drivers/gpu/drm/radeon/radeon_object.h static inline int radeon_bo_reserve(struct radeon_bo *bo, bool no_intr)
bo                 68 drivers/gpu/drm/radeon/radeon_object.h 	r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL);
bo                 71 drivers/gpu/drm/radeon/radeon_object.h 			dev_err(bo->rdev->dev, "%p reserve failed\n", bo);
bo                 77 drivers/gpu/drm/radeon/radeon_object.h static inline void radeon_bo_unreserve(struct radeon_bo *bo)
bo                 79 drivers/gpu/drm/radeon/radeon_object.h 	ttm_bo_unreserve(&bo->tbo);
bo                 91 drivers/gpu/drm/radeon/radeon_object.h static inline u64 radeon_bo_gpu_offset(struct radeon_bo *bo)
bo                 93 drivers/gpu/drm/radeon/radeon_object.h 	return bo->tbo.offset;
bo                 96 drivers/gpu/drm/radeon/radeon_object.h static inline unsigned long radeon_bo_size(struct radeon_bo *bo)
bo                 98 drivers/gpu/drm/radeon/radeon_object.h 	return bo->tbo.num_pages << PAGE_SHIFT;
bo                101 drivers/gpu/drm/radeon/radeon_object.h static inline unsigned radeon_bo_ngpu_pages(struct radeon_bo *bo)
bo                103 drivers/gpu/drm/radeon/radeon_object.h 	return (bo->tbo.num_pages << PAGE_SHIFT) / RADEON_GPU_PAGE_SIZE;
bo                106 drivers/gpu/drm/radeon/radeon_object.h static inline unsigned radeon_bo_gpu_page_alignment(struct radeon_bo *bo)
bo                108 drivers/gpu/drm/radeon/radeon_object.h 	return (bo->tbo.mem.page_alignment << PAGE_SHIFT) / RADEON_GPU_PAGE_SIZE;
bo                117 drivers/gpu/drm/radeon/radeon_object.h static inline u64 radeon_bo_mmap_offset(struct radeon_bo *bo)
bo                119 drivers/gpu/drm/radeon/radeon_object.h 	return drm_vma_node_offset_addr(&bo->tbo.base.vma_node);
bo                122 drivers/gpu/drm/radeon/radeon_object.h extern int radeon_bo_wait(struct radeon_bo *bo, u32 *mem_type,
bo                131 drivers/gpu/drm/radeon/radeon_object.h extern int radeon_bo_kmap(struct radeon_bo *bo, void **ptr);
bo                132 drivers/gpu/drm/radeon/radeon_object.h extern void radeon_bo_kunmap(struct radeon_bo *bo);
bo                133 drivers/gpu/drm/radeon/radeon_object.h extern struct radeon_bo *radeon_bo_ref(struct radeon_bo *bo);
bo                134 drivers/gpu/drm/radeon/radeon_object.h extern void radeon_bo_unref(struct radeon_bo **bo);
bo                135 drivers/gpu/drm/radeon/radeon_object.h extern int radeon_bo_pin(struct radeon_bo *bo, u32 domain, u64 *gpu_addr);
bo                136 drivers/gpu/drm/radeon/radeon_object.h extern int radeon_bo_pin_restricted(struct radeon_bo *bo, u32 domain,
bo                138 drivers/gpu/drm/radeon/radeon_object.h extern int radeon_bo_unpin(struct radeon_bo *bo);
bo                146 drivers/gpu/drm/radeon/radeon_object.h extern int radeon_bo_set_tiling_flags(struct radeon_bo *bo,
bo                148 drivers/gpu/drm/radeon/radeon_object.h extern void radeon_bo_get_tiling_flags(struct radeon_bo *bo,
bo                150 drivers/gpu/drm/radeon/radeon_object.h extern int radeon_bo_check_tiling(struct radeon_bo *bo, bool has_moved,
bo                152 drivers/gpu/drm/radeon/radeon_object.h extern void radeon_bo_move_notify(struct ttm_buffer_object *bo,
bo                155 drivers/gpu/drm/radeon/radeon_object.h extern int radeon_bo_fault_reserve_notify(struct ttm_buffer_object *bo);
bo                156 drivers/gpu/drm/radeon/radeon_object.h extern int radeon_bo_get_surface_reg(struct radeon_bo *bo);
bo                157 drivers/gpu/drm/radeon/radeon_object.h extern void radeon_bo_fence(struct radeon_bo *bo, struct radeon_fence *fence,
bo                151 drivers/gpu/drm/radeon/radeon_pm.c 	struct radeon_bo *bo, *n;
bo                156 drivers/gpu/drm/radeon/radeon_pm.c 	list_for_each_entry_safe(bo, n, &rdev->gem.objects, list) {
bo                157 drivers/gpu/drm/radeon/radeon_pm.c 		if (bo->tbo.mem.mem_type == TTM_PL_VRAM)
bo                158 drivers/gpu/drm/radeon/radeon_pm.c 			ttm_bo_unmap_virtual(&bo->tbo);
bo                 36 drivers/gpu/drm/radeon/radeon_prime.c 	struct radeon_bo *bo = gem_to_radeon_bo(obj);
bo                 37 drivers/gpu/drm/radeon/radeon_prime.c 	int npages = bo->tbo.num_pages;
bo                 39 drivers/gpu/drm/radeon/radeon_prime.c 	return drm_prime_pages_to_sg(bo->tbo.ttm->pages, npages);
bo                 44 drivers/gpu/drm/radeon/radeon_prime.c 	struct radeon_bo *bo = gem_to_radeon_bo(obj);
bo                 47 drivers/gpu/drm/radeon/radeon_prime.c 	ret = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages,
bo                 48 drivers/gpu/drm/radeon/radeon_prime.c 			  &bo->dma_buf_vmap);
bo                 52 drivers/gpu/drm/radeon/radeon_prime.c 	return bo->dma_buf_vmap.virtual;
bo                 57 drivers/gpu/drm/radeon/radeon_prime.c 	struct radeon_bo *bo = gem_to_radeon_bo(obj);
bo                 59 drivers/gpu/drm/radeon/radeon_prime.c 	ttm_bo_kunmap(&bo->dma_buf_vmap);
bo                 68 drivers/gpu/drm/radeon/radeon_prime.c 	struct radeon_bo *bo;
bo                 73 drivers/gpu/drm/radeon/radeon_prime.c 			       RADEON_GEM_DOMAIN_GTT, 0, sg, resv, &bo);
bo                 79 drivers/gpu/drm/radeon/radeon_prime.c 	list_add_tail(&bo->list, &rdev->gem.objects);
bo                 82 drivers/gpu/drm/radeon/radeon_prime.c 	bo->prime_shared_count = 1;
bo                 83 drivers/gpu/drm/radeon/radeon_prime.c 	return &bo->tbo.base;
bo                 88 drivers/gpu/drm/radeon/radeon_prime.c 	struct radeon_bo *bo = gem_to_radeon_bo(obj);
bo                 91 drivers/gpu/drm/radeon/radeon_prime.c 	ret = radeon_bo_reserve(bo, false);
bo                 96 drivers/gpu/drm/radeon/radeon_prime.c 	ret = radeon_bo_pin(bo, RADEON_GEM_DOMAIN_GTT, NULL);
bo                 98 drivers/gpu/drm/radeon/radeon_prime.c 		bo->prime_shared_count++;
bo                100 drivers/gpu/drm/radeon/radeon_prime.c 	radeon_bo_unreserve(bo);
bo                106 drivers/gpu/drm/radeon/radeon_prime.c 	struct radeon_bo *bo = gem_to_radeon_bo(obj);
bo                109 drivers/gpu/drm/radeon/radeon_prime.c 	ret = radeon_bo_reserve(bo, false);
bo                113 drivers/gpu/drm/radeon/radeon_prime.c 	radeon_bo_unpin(bo);
bo                114 drivers/gpu/drm/radeon/radeon_prime.c 	if (bo->prime_shared_count)
bo                115 drivers/gpu/drm/radeon/radeon_prime.c 		bo->prime_shared_count--;
bo                116 drivers/gpu/drm/radeon/radeon_prime.c 	radeon_bo_unreserve(bo);
bo                123 drivers/gpu/drm/radeon/radeon_prime.c 	struct radeon_bo *bo = gem_to_radeon_bo(gobj);
bo                124 drivers/gpu/drm/radeon/radeon_prime.c 	if (radeon_ttm_tt_has_userptr(bo->tbo.ttm))
bo                 57 drivers/gpu/drm/radeon/radeon_sa.c 	sa_manager->bo = NULL;
bo                 68 drivers/gpu/drm/radeon/radeon_sa.c 			     domain, flags, NULL, NULL, &sa_manager->bo);
bo                 92 drivers/gpu/drm/radeon/radeon_sa.c 	radeon_bo_unref(&sa_manager->bo);
bo                101 drivers/gpu/drm/radeon/radeon_sa.c 	if (sa_manager->bo == NULL) {
bo                107 drivers/gpu/drm/radeon/radeon_sa.c 	r = radeon_bo_reserve(sa_manager->bo, false);
bo                112 drivers/gpu/drm/radeon/radeon_sa.c 	r = radeon_bo_pin(sa_manager->bo, sa_manager->domain, &sa_manager->gpu_addr);
bo                114 drivers/gpu/drm/radeon/radeon_sa.c 		radeon_bo_unreserve(sa_manager->bo);
bo                118 drivers/gpu/drm/radeon/radeon_sa.c 	r = radeon_bo_kmap(sa_manager->bo, &sa_manager->cpu_ptr);
bo                119 drivers/gpu/drm/radeon/radeon_sa.c 	radeon_bo_unreserve(sa_manager->bo);
bo                128 drivers/gpu/drm/radeon/radeon_sa.c 	if (sa_manager->bo == NULL) {
bo                133 drivers/gpu/drm/radeon/radeon_sa.c 	r = radeon_bo_reserve(sa_manager->bo, false);
bo                135 drivers/gpu/drm/radeon/radeon_sa.c 		radeon_bo_kunmap(sa_manager->bo);
bo                136 drivers/gpu/drm/radeon/radeon_sa.c 		radeon_bo_unpin(sa_manager->bo);
bo                137 drivers/gpu/drm/radeon/radeon_sa.c 		radeon_bo_unreserve(sa_manager->bo);
bo                 16 drivers/gpu/drm/radeon/radeon_trace.h 	    TP_PROTO(struct radeon_bo *bo),
bo                 17 drivers/gpu/drm/radeon/radeon_trace.h 	    TP_ARGS(bo),
bo                 19 drivers/gpu/drm/radeon/radeon_trace.h 			     __field(struct radeon_bo *, bo)
bo                 24 drivers/gpu/drm/radeon/radeon_trace.h 			   __entry->bo = bo;
bo                 25 drivers/gpu/drm/radeon/radeon_trace.h 			   __entry->pages = bo->tbo.num_pages;
bo                 27 drivers/gpu/drm/radeon/radeon_trace.h 	    TP_printk("bo=%p, pages=%u", __entry->bo, __entry->pages)
bo                125 drivers/gpu/drm/radeon/radeon_ttm.c static void radeon_evict_flags(struct ttm_buffer_object *bo,
bo                136 drivers/gpu/drm/radeon/radeon_ttm.c 	if (!radeon_ttm_bo_is_radeon_bo(bo)) {
bo                143 drivers/gpu/drm/radeon/radeon_ttm.c 	rbo = container_of(bo, struct radeon_bo, tbo);
bo                144 drivers/gpu/drm/radeon/radeon_ttm.c 	switch (bo->mem.mem_type) {
bo                149 drivers/gpu/drm/radeon/radeon_ttm.c 			 bo->mem.start < (rbo->rdev->mc.visible_vram_size >> PAGE_SHIFT)) {
bo                181 drivers/gpu/drm/radeon/radeon_ttm.c static int radeon_verify_access(struct ttm_buffer_object *bo, struct file *filp)
bo                183 drivers/gpu/drm/radeon/radeon_ttm.c 	struct radeon_bo *rbo = container_of(bo, struct radeon_bo, tbo);
bo                185 drivers/gpu/drm/radeon/radeon_ttm.c 	if (radeon_ttm_tt_has_userptr(bo->ttm))
bo                191 drivers/gpu/drm/radeon/radeon_ttm.c static void radeon_move_null(struct ttm_buffer_object *bo,
bo                194 drivers/gpu/drm/radeon/radeon_ttm.c 	struct ttm_mem_reg *old_mem = &bo->mem;
bo                201 drivers/gpu/drm/radeon/radeon_ttm.c static int radeon_move_blit(struct ttm_buffer_object *bo,
bo                212 drivers/gpu/drm/radeon/radeon_ttm.c 	rdev = radeon_get_rdev(bo->bdev);
bo                247 drivers/gpu/drm/radeon/radeon_ttm.c 	fence = radeon_copy(rdev, old_start, new_start, num_pages, bo->base.resv);
bo                251 drivers/gpu/drm/radeon/radeon_ttm.c 	r = ttm_bo_move_accel_cleanup(bo, &fence->base, evict, new_mem);
bo                256 drivers/gpu/drm/radeon/radeon_ttm.c static int radeon_move_vram_ram(struct ttm_buffer_object *bo,
bo                262 drivers/gpu/drm/radeon/radeon_ttm.c 	struct ttm_mem_reg *old_mem = &bo->mem;
bo                277 drivers/gpu/drm/radeon/radeon_ttm.c 	r = ttm_bo_mem_space(bo, &placement, &tmp_mem, &ctx);
bo                282 drivers/gpu/drm/radeon/radeon_ttm.c 	r = ttm_tt_set_placement_caching(bo->ttm, tmp_mem.placement);
bo                287 drivers/gpu/drm/radeon/radeon_ttm.c 	r = ttm_tt_bind(bo->ttm, &tmp_mem, &ctx);
bo                291 drivers/gpu/drm/radeon/radeon_ttm.c 	r = radeon_move_blit(bo, true, no_wait_gpu, &tmp_mem, old_mem);
bo                295 drivers/gpu/drm/radeon/radeon_ttm.c 	r = ttm_bo_move_ttm(bo, &ctx, new_mem);
bo                297 drivers/gpu/drm/radeon/radeon_ttm.c 	ttm_bo_mem_put(bo, &tmp_mem);
bo                301 drivers/gpu/drm/radeon/radeon_ttm.c static int radeon_move_ram_vram(struct ttm_buffer_object *bo,
bo                307 drivers/gpu/drm/radeon/radeon_ttm.c 	struct ttm_mem_reg *old_mem = &bo->mem;
bo                322 drivers/gpu/drm/radeon/radeon_ttm.c 	r = ttm_bo_mem_space(bo, &placement, &tmp_mem, &ctx);
bo                326 drivers/gpu/drm/radeon/radeon_ttm.c 	r = ttm_bo_move_ttm(bo, &ctx, &tmp_mem);
bo                330 drivers/gpu/drm/radeon/radeon_ttm.c 	r = radeon_move_blit(bo, true, no_wait_gpu, new_mem, old_mem);
bo                335 drivers/gpu/drm/radeon/radeon_ttm.c 	ttm_bo_mem_put(bo, &tmp_mem);
bo                339 drivers/gpu/drm/radeon/radeon_ttm.c static int radeon_bo_move(struct ttm_buffer_object *bo, bool evict,
bo                345 drivers/gpu/drm/radeon/radeon_ttm.c 	struct ttm_mem_reg *old_mem = &bo->mem;
bo                348 drivers/gpu/drm/radeon/radeon_ttm.c 	r = ttm_bo_wait(bo, ctx->interruptible, ctx->no_wait_gpu);
bo                353 drivers/gpu/drm/radeon/radeon_ttm.c 	rbo = container_of(bo, struct radeon_bo, tbo);
bo                357 drivers/gpu/drm/radeon/radeon_ttm.c 	rdev = radeon_get_rdev(bo->bdev);
bo                358 drivers/gpu/drm/radeon/radeon_ttm.c 	if (old_mem->mem_type == TTM_PL_SYSTEM && bo->ttm == NULL) {
bo                359 drivers/gpu/drm/radeon/radeon_ttm.c 		radeon_move_null(bo, new_mem);
bo                367 drivers/gpu/drm/radeon/radeon_ttm.c 		radeon_move_null(bo, new_mem);
bo                378 drivers/gpu/drm/radeon/radeon_ttm.c 		r = radeon_move_vram_ram(bo, evict, ctx->interruptible,
bo                382 drivers/gpu/drm/radeon/radeon_ttm.c 		r = radeon_move_ram_vram(bo, evict, ctx->interruptible,
bo                385 drivers/gpu/drm/radeon/radeon_ttm.c 		r = radeon_move_blit(bo, evict, ctx->no_wait_gpu,
bo                391 drivers/gpu/drm/radeon/radeon_ttm.c 		r = ttm_bo_move_memcpy(bo, ctx, new_mem);
bo                398 drivers/gpu/drm/radeon/radeon_ttm.c 	atomic64_add((u64)bo->num_pages << PAGE_SHIFT, &rdev->num_bytes_moved);
bo                632 drivers/gpu/drm/radeon/radeon_ttm.c static struct ttm_tt *radeon_ttm_tt_create(struct ttm_buffer_object *bo,
bo                638 drivers/gpu/drm/radeon/radeon_ttm.c 	rdev = radeon_get_rdev(bo->bdev);
bo                641 drivers/gpu/drm/radeon/radeon_ttm.c 		return ttm_agp_tt_create(bo, rdev->ddev->agp->bridge,
bo                652 drivers/gpu/drm/radeon/radeon_ttm.c 	if (ttm_dma_tt_init(&gtt->ttm, bo, page_flags)) {
bo                888 drivers/gpu/drm/radeon/radeon_ttm.c 	struct ttm_buffer_object *bo;
bo                892 drivers/gpu/drm/radeon/radeon_ttm.c 	bo = (struct ttm_buffer_object *)vmf->vma->vm_private_data;
bo                893 drivers/gpu/drm/radeon/radeon_ttm.c 	if (bo == NULL) {
bo                896 drivers/gpu/drm/radeon/radeon_ttm.c 	rdev = radeon_get_rdev(bo->bdev);
bo                465 drivers/gpu/drm/radeon/radeon_uvd.c static int radeon_uvd_cs_msg(struct radeon_cs_parser *p, struct radeon_bo *bo,
bo                480 drivers/gpu/drm/radeon/radeon_uvd.c 	f = dma_resv_get_excl(bo->tbo.base.resv);
bo                489 drivers/gpu/drm/radeon/radeon_uvd.c 	r = radeon_bo_kmap(bo, &ptr);
bo                511 drivers/gpu/drm/radeon/radeon_uvd.c 		radeon_bo_kunmap(bo);
bo                537 drivers/gpu/drm/radeon/radeon_uvd.c 		radeon_bo_kunmap(bo);
bo                559 drivers/gpu/drm/radeon/radeon_uvd.c 		radeon_bo_kunmap(bo);
bo                144 drivers/gpu/drm/radeon/radeon_vm.c 	list[0].tv.bo = &vm->page_directory->tbo;
bo                150 drivers/gpu/drm/radeon/radeon_vm.c 		if (!vm->page_tables[i].bo)
bo                153 drivers/gpu/drm/radeon/radeon_vm.c 		list[idx].robj = vm->page_tables[i].bo;
bo                156 drivers/gpu/drm/radeon/radeon_vm.c 		list[idx].tv.bo = &list[idx].robj->tbo;
bo                294 drivers/gpu/drm/radeon/radeon_vm.c 				       struct radeon_bo *bo)
bo                298 drivers/gpu/drm/radeon/radeon_vm.c 	list_for_each_entry(bo_va, &bo->va, bo_list) {
bo                321 drivers/gpu/drm/radeon/radeon_vm.c 				      struct radeon_bo *bo)
bo                330 drivers/gpu/drm/radeon/radeon_vm.c 	bo_va->bo = bo;
bo                339 drivers/gpu/drm/radeon/radeon_vm.c 	list_add_tail(&bo_va->bo_list, &bo->va);
bo                388 drivers/gpu/drm/radeon/radeon_vm.c 			      struct radeon_bo *bo)
bo                396 drivers/gpu/drm/radeon/radeon_vm.c 	r = radeon_bo_reserve(bo, false);
bo                400 drivers/gpu/drm/radeon/radeon_vm.c 	r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
bo                404 drivers/gpu/drm/radeon/radeon_vm.c 	addr = radeon_bo_gpu_offset(bo);
bo                405 drivers/gpu/drm/radeon/radeon_vm.c 	entries = radeon_bo_size(bo) / 8;
bo                422 drivers/gpu/drm/radeon/radeon_vm.c 	radeon_bo_fence(bo, ib.fence, false);
bo                428 drivers/gpu/drm/radeon/radeon_vm.c 	radeon_bo_unreserve(bo);
bo                451 drivers/gpu/drm/radeon/radeon_vm.c 	uint64_t size = radeon_bo_size(bo_va->bo);
bo                488 drivers/gpu/drm/radeon/radeon_vm.c 				"(bo %p 0x%010lx 0x%010lx)\n", bo_va->bo,
bo                489 drivers/gpu/drm/radeon/radeon_vm.c 				soffset, tmp->bo, tmp->it.start, tmp->it.last);
bo                508 drivers/gpu/drm/radeon/radeon_vm.c 		tmp->bo = radeon_bo_ref(bo_va->bo);
bo                538 drivers/gpu/drm/radeon/radeon_vm.c 	radeon_bo_unreserve(bo_va->bo);
bo                544 drivers/gpu/drm/radeon/radeon_vm.c 		if (vm->page_tables[pt_idx].bo)
bo                565 drivers/gpu/drm/radeon/radeon_vm.c 		if (vm->page_tables[pt_idx].bo) {
bo                574 drivers/gpu/drm/radeon/radeon_vm.c 		vm->page_tables[pt_idx].bo = pt;
bo                581 drivers/gpu/drm/radeon/radeon_vm.c 	radeon_bo_unreserve(bo_va->bo);
bo                669 drivers/gpu/drm/radeon/radeon_vm.c 		struct radeon_bo *bo = vm->page_tables[pt_idx].bo;
bo                672 drivers/gpu/drm/radeon/radeon_vm.c 		if (bo == NULL)
bo                675 drivers/gpu/drm/radeon/radeon_vm.c 		pt = radeon_bo_gpu_offset(bo);
bo                828 drivers/gpu/drm/radeon/radeon_vm.c 		struct radeon_bo *pt = vm->page_tables[pt_idx].bo;
bo                896 drivers/gpu/drm/radeon/radeon_vm.c 		radeon_bo_fence(vm->page_tables[i].bo, fence, true);
bo                925 drivers/gpu/drm/radeon/radeon_vm.c 			bo_va->bo, vm);
bo                945 drivers/gpu/drm/radeon/radeon_vm.c 	if (bo_va->bo && radeon_ttm_tt_is_readonly(bo_va->bo->tbo.ttm))
bo                955 drivers/gpu/drm/radeon/radeon_vm.c 			if (!(bo_va->bo->flags & (RADEON_GEM_GTT_WC | RADEON_GEM_GTT_UC)))
bo               1061 drivers/gpu/drm/radeon/radeon_vm.c 		radeon_bo_unref(&bo_va->bo);
bo               1133 drivers/gpu/drm/radeon/radeon_vm.c 		bo_va->bo = radeon_bo_ref(bo_va->bo);
bo               1154 drivers/gpu/drm/radeon/radeon_vm.c 			     struct radeon_bo *bo)
bo               1158 drivers/gpu/drm/radeon/radeon_vm.c 	list_for_each_entry(bo_va, &bo->va, bo_list) {
bo               1242 drivers/gpu/drm/radeon/radeon_vm.c 		r = radeon_bo_reserve(bo_va->bo, false);
bo               1245 drivers/gpu/drm/radeon/radeon_vm.c 			radeon_bo_unreserve(bo_va->bo);
bo               1251 drivers/gpu/drm/radeon/radeon_vm.c 		radeon_bo_unref(&bo_va->bo);
bo               1257 drivers/gpu/drm/radeon/radeon_vm.c 		radeon_bo_unref(&vm->page_tables[i].bo);
bo                718 drivers/gpu/drm/tegra/dc.c 		struct tegra_bo *bo = tegra_fb_get_plane(fb, i);
bo                720 drivers/gpu/drm/tegra/dc.c 		window.base[i] = bo->paddr + fb->offsets[i];
bo                840 drivers/gpu/drm/tegra/dc.c 	struct tegra_bo *bo = tegra_fb_get_plane(plane->state->fb, 0);
bo                872 drivers/gpu/drm/tegra/dc.c 	value |= (bo->paddr >> 10) & 0x3fffff;
bo                876 drivers/gpu/drm/tegra/dc.c 	value = (bo->paddr >> 32) & 0x3;
bo                276 drivers/gpu/drm/tegra/drm.c 	struct tegra_bo *bo;
bo                282 drivers/gpu/drm/tegra/drm.c 	bo = to_tegra_bo(gem);
bo                283 drivers/gpu/drm/tegra/drm.c 	return &bo->base;
bo                314 drivers/gpu/drm/tegra/drm.c 	dest->cmdbuf.bo = host1x_bo_lookup(file, cmdbuf);
bo                315 drivers/gpu/drm/tegra/drm.c 	if (!dest->cmdbuf.bo)
bo                318 drivers/gpu/drm/tegra/drm.c 	dest->target.bo = host1x_bo_lookup(file, target);
bo                319 drivers/gpu/drm/tegra/drm.c 	if (!dest->target.bo)
bo                382 drivers/gpu/drm/tegra/drm.c 		struct host1x_bo *bo;
bo                400 drivers/gpu/drm/tegra/drm.c 		bo = host1x_bo_lookup(file, cmdbuf.handle);
bo                401 drivers/gpu/drm/tegra/drm.c 		if (!bo) {
bo                407 drivers/gpu/drm/tegra/drm.c 		obj = host1x_to_tegra_bo(bo);
bo                420 drivers/gpu/drm/tegra/drm.c 		host1x_job_add_gather(job, bo, cmdbuf.words, cmdbuf.offset);
bo                437 drivers/gpu/drm/tegra/drm.c 		obj = host1x_to_tegra_bo(reloc->cmdbuf.bo);
bo                451 drivers/gpu/drm/tegra/drm.c 		obj = host1x_to_tegra_bo(reloc->target.bo);
bo                510 drivers/gpu/drm/tegra/drm.c 	struct tegra_bo *bo;
bo                512 drivers/gpu/drm/tegra/drm.c 	bo = tegra_bo_create_with_handle(file, drm, args->size, args->flags,
bo                514 drivers/gpu/drm/tegra/drm.c 	if (IS_ERR(bo))
bo                515 drivers/gpu/drm/tegra/drm.c 		return PTR_ERR(bo);
bo                525 drivers/gpu/drm/tegra/drm.c 	struct tegra_bo *bo;
bo                531 drivers/gpu/drm/tegra/drm.c 	bo = to_tegra_bo(gem);
bo                533 drivers/gpu/drm/tegra/drm.c 	args->offset = drm_vma_node_offset_addr(&bo->gem.vma_node);
bo                762 drivers/gpu/drm/tegra/drm.c 	struct tegra_bo *bo;
bo                798 drivers/gpu/drm/tegra/drm.c 	bo = to_tegra_bo(gem);
bo                800 drivers/gpu/drm/tegra/drm.c 	bo->tiling.mode = mode;
bo                801 drivers/gpu/drm/tegra/drm.c 	bo->tiling.value = value;
bo                813 drivers/gpu/drm/tegra/drm.c 	struct tegra_bo *bo;
bo                820 drivers/gpu/drm/tegra/drm.c 	bo = to_tegra_bo(gem);
bo                822 drivers/gpu/drm/tegra/drm.c 	switch (bo->tiling.mode) {
bo                835 drivers/gpu/drm/tegra/drm.c 		args->value = bo->tiling.value;
bo                853 drivers/gpu/drm/tegra/drm.c 	struct tegra_bo *bo;
bo                862 drivers/gpu/drm/tegra/drm.c 	bo = to_tegra_bo(gem);
bo                863 drivers/gpu/drm/tegra/drm.c 	bo->flags = 0;
bo                866 drivers/gpu/drm/tegra/drm.c 		bo->flags |= TEGRA_BO_BOTTOM_UP;
bo                878 drivers/gpu/drm/tegra/drm.c 	struct tegra_bo *bo;
bo                884 drivers/gpu/drm/tegra/drm.c 	bo = to_tegra_bo(gem);
bo                887 drivers/gpu/drm/tegra/drm.c 	if (bo->flags & TEGRA_BO_BOTTOM_UP)
bo                 34 drivers/gpu/drm/tegra/fb.c 	struct tegra_bo *bo = tegra_fb_get_plane(framebuffer, 0);
bo                 36 drivers/gpu/drm/tegra/fb.c 	if (bo->flags & TEGRA_BO_BOTTOM_UP)
bo                183 drivers/gpu/drm/tegra/fb.c 	struct tegra_bo *bo;
bo                186 drivers/gpu/drm/tegra/fb.c 	bo = tegra_fb_get_plane(helper->fb, 0);
bo                188 drivers/gpu/drm/tegra/fb.c 	err = drm_gem_mmap_obj(&bo->gem, bo->gem.size, vma);
bo                192 drivers/gpu/drm/tegra/fb.c 	return __tegra_gem_mmap(&bo->gem, vma);
bo                215 drivers/gpu/drm/tegra/fb.c 	struct tegra_bo *bo;
bo                231 drivers/gpu/drm/tegra/fb.c 	bo = tegra_bo_create(drm, size, 0);
bo                232 drivers/gpu/drm/tegra/fb.c 	if (IS_ERR(bo))
bo                233 drivers/gpu/drm/tegra/fb.c 		return PTR_ERR(bo);
bo                238 drivers/gpu/drm/tegra/fb.c 		drm_gem_object_put_unlocked(&bo->gem);
bo                242 drivers/gpu/drm/tegra/fb.c 	fbdev->fb = tegra_fb_alloc(drm, &cmd, &bo, 1);
bo                247 drivers/gpu/drm/tegra/fb.c 		drm_gem_object_put_unlocked(&bo->gem);
bo                262 drivers/gpu/drm/tegra/fb.c 	if (bo->pages) {
bo                263 drivers/gpu/drm/tegra/fb.c 		bo->vaddr = vmap(bo->pages, bo->num_pages, VM_MAP,
bo                265 drivers/gpu/drm/tegra/fb.c 		if (!bo->vaddr) {
bo                272 drivers/gpu/drm/tegra/fb.c 	drm->mode_config.fb_base = (resource_size_t)bo->paddr;
bo                273 drivers/gpu/drm/tegra/fb.c 	info->screen_base = (void __iomem *)bo->vaddr + offset;
bo                275 drivers/gpu/drm/tegra/fb.c 	info->fix.smem_start = (unsigned long)(bo->paddr + offset);
bo                349 drivers/gpu/drm/tegra/fb.c 		struct tegra_bo *bo = tegra_fb_get_plane(fbdev->fb, 0);
bo                352 drivers/gpu/drm/tegra/fb.c 		if (bo && bo->pages) {
bo                353 drivers/gpu/drm/tegra/fb.c 			vunmap(bo->vaddr);
bo                354 drivers/gpu/drm/tegra/fb.c 			bo->vaddr = NULL;
bo                 23 drivers/gpu/drm/tegra/gem.c static void tegra_bo_put(struct host1x_bo *bo)
bo                 25 drivers/gpu/drm/tegra/gem.c 	struct tegra_bo *obj = host1x_to_tegra_bo(bo);
bo                 30 drivers/gpu/drm/tegra/gem.c static dma_addr_t tegra_bo_pin(struct host1x_bo *bo, struct sg_table **sgt)
bo                 32 drivers/gpu/drm/tegra/gem.c 	struct tegra_bo *obj = host1x_to_tegra_bo(bo);
bo                 39 drivers/gpu/drm/tegra/gem.c static void tegra_bo_unpin(struct host1x_bo *bo, struct sg_table *sgt)
bo                 43 drivers/gpu/drm/tegra/gem.c static void *tegra_bo_mmap(struct host1x_bo *bo)
bo                 45 drivers/gpu/drm/tegra/gem.c 	struct tegra_bo *obj = host1x_to_tegra_bo(bo);
bo                 56 drivers/gpu/drm/tegra/gem.c static void tegra_bo_munmap(struct host1x_bo *bo, void *addr)
bo                 58 drivers/gpu/drm/tegra/gem.c 	struct tegra_bo *obj = host1x_to_tegra_bo(bo);
bo                 68 drivers/gpu/drm/tegra/gem.c static void *tegra_bo_kmap(struct host1x_bo *bo, unsigned int page)
bo                 70 drivers/gpu/drm/tegra/gem.c 	struct tegra_bo *obj = host1x_to_tegra_bo(bo);
bo                 81 drivers/gpu/drm/tegra/gem.c static void tegra_bo_kunmap(struct host1x_bo *bo, unsigned int page,
bo                 84 drivers/gpu/drm/tegra/gem.c 	struct tegra_bo *obj = host1x_to_tegra_bo(bo);
bo                 94 drivers/gpu/drm/tegra/gem.c static struct host1x_bo *tegra_bo_get(struct host1x_bo *bo)
bo                 96 drivers/gpu/drm/tegra/gem.c 	struct tegra_bo *obj = host1x_to_tegra_bo(bo);
bo                100 drivers/gpu/drm/tegra/gem.c 	return bo;
bo                114 drivers/gpu/drm/tegra/gem.c static int tegra_bo_iommu_map(struct tegra_drm *tegra, struct tegra_bo *bo)
bo                119 drivers/gpu/drm/tegra/gem.c 	if (bo->mm)
bo                122 drivers/gpu/drm/tegra/gem.c 	bo->mm = kzalloc(sizeof(*bo->mm), GFP_KERNEL);
bo                123 drivers/gpu/drm/tegra/gem.c 	if (!bo->mm)
bo                129 drivers/gpu/drm/tegra/gem.c 					 bo->mm, bo->gem.size, PAGE_SIZE, 0, 0);
bo                136 drivers/gpu/drm/tegra/gem.c 	bo->paddr = bo->mm->start;
bo                138 drivers/gpu/drm/tegra/gem.c 	bo->size = iommu_map_sg(tegra->domain, bo->paddr, bo->sgt->sgl,
bo                139 drivers/gpu/drm/tegra/gem.c 				bo->sgt->nents, prot);
bo                140 drivers/gpu/drm/tegra/gem.c 	if (!bo->size) {
bo                151 drivers/gpu/drm/tegra/gem.c 	drm_mm_remove_node(bo->mm);
bo                154 drivers/gpu/drm/tegra/gem.c 	kfree(bo->mm);
bo                158 drivers/gpu/drm/tegra/gem.c static int tegra_bo_iommu_unmap(struct tegra_drm *tegra, struct tegra_bo *bo)
bo                160 drivers/gpu/drm/tegra/gem.c 	if (!bo->mm)
bo                164 drivers/gpu/drm/tegra/gem.c 	iommu_unmap(tegra->domain, bo->paddr, bo->size);
bo                165 drivers/gpu/drm/tegra/gem.c 	drm_mm_remove_node(bo->mm);
bo                168 drivers/gpu/drm/tegra/gem.c 	kfree(bo->mm);
bo                176 drivers/gpu/drm/tegra/gem.c 	struct tegra_bo *bo;
bo                179 drivers/gpu/drm/tegra/gem.c 	bo = kzalloc(sizeof(*bo), GFP_KERNEL);
bo                180 drivers/gpu/drm/tegra/gem.c 	if (!bo)
bo                183 drivers/gpu/drm/tegra/gem.c 	host1x_bo_init(&bo->base, &tegra_bo_ops);
bo                186 drivers/gpu/drm/tegra/gem.c 	err = drm_gem_object_init(drm, &bo->gem, size);
bo                190 drivers/gpu/drm/tegra/gem.c 	err = drm_gem_create_mmap_offset(&bo->gem);
bo                194 drivers/gpu/drm/tegra/gem.c 	return bo;
bo                197 drivers/gpu/drm/tegra/gem.c 	drm_gem_object_release(&bo->gem);
bo                199 drivers/gpu/drm/tegra/gem.c 	kfree(bo);
bo                203 drivers/gpu/drm/tegra/gem.c static void tegra_bo_free(struct drm_device *drm, struct tegra_bo *bo)
bo                205 drivers/gpu/drm/tegra/gem.c 	if (bo->pages) {
bo                206 drivers/gpu/drm/tegra/gem.c 		dma_unmap_sg(drm->dev, bo->sgt->sgl, bo->sgt->nents,
bo                208 drivers/gpu/drm/tegra/gem.c 		drm_gem_put_pages(&bo->gem, bo->pages, true, true);
bo                209 drivers/gpu/drm/tegra/gem.c 		sg_free_table(bo->sgt);
bo                210 drivers/gpu/drm/tegra/gem.c 		kfree(bo->sgt);
bo                211 drivers/gpu/drm/tegra/gem.c 	} else if (bo->vaddr) {
bo                212 drivers/gpu/drm/tegra/gem.c 		dma_free_wc(drm->dev, bo->gem.size, bo->vaddr, bo->paddr);
bo                216 drivers/gpu/drm/tegra/gem.c static int tegra_bo_get_pages(struct drm_device *drm, struct tegra_bo *bo)
bo                220 drivers/gpu/drm/tegra/gem.c 	bo->pages = drm_gem_get_pages(&bo->gem);
bo                221 drivers/gpu/drm/tegra/gem.c 	if (IS_ERR(bo->pages))
bo                222 drivers/gpu/drm/tegra/gem.c 		return PTR_ERR(bo->pages);
bo                224 drivers/gpu/drm/tegra/gem.c 	bo->num_pages = bo->gem.size >> PAGE_SHIFT;
bo                226 drivers/gpu/drm/tegra/gem.c 	bo->sgt = drm_prime_pages_to_sg(bo->pages, bo->num_pages);
bo                227 drivers/gpu/drm/tegra/gem.c 	if (IS_ERR(bo->sgt)) {
bo                228 drivers/gpu/drm/tegra/gem.c 		err = PTR_ERR(bo->sgt);
bo                232 drivers/gpu/drm/tegra/gem.c 	err = dma_map_sg(drm->dev, bo->sgt->sgl, bo->sgt->nents,
bo                242 drivers/gpu/drm/tegra/gem.c 	sg_free_table(bo->sgt);
bo                243 drivers/gpu/drm/tegra/gem.c 	kfree(bo->sgt);
bo                245 drivers/gpu/drm/tegra/gem.c 	drm_gem_put_pages(&bo->gem, bo->pages, false, false);
bo                249 drivers/gpu/drm/tegra/gem.c static int tegra_bo_alloc(struct drm_device *drm, struct tegra_bo *bo)
bo                255 drivers/gpu/drm/tegra/gem.c 		err = tegra_bo_get_pages(drm, bo);
bo                259 drivers/gpu/drm/tegra/gem.c 		err = tegra_bo_iommu_map(tegra, bo);
bo                261 drivers/gpu/drm/tegra/gem.c 			tegra_bo_free(drm, bo);
bo                265 drivers/gpu/drm/tegra/gem.c 		size_t size = bo->gem.size;
bo                267 drivers/gpu/drm/tegra/gem.c 		bo->vaddr = dma_alloc_wc(drm->dev, size, &bo->paddr,
bo                269 drivers/gpu/drm/tegra/gem.c 		if (!bo->vaddr) {
bo                283 drivers/gpu/drm/tegra/gem.c 	struct tegra_bo *bo;
bo                286 drivers/gpu/drm/tegra/gem.c 	bo = tegra_bo_alloc_object(drm, size);
bo                287 drivers/gpu/drm/tegra/gem.c 	if (IS_ERR(bo))
bo                288 drivers/gpu/drm/tegra/gem.c 		return bo;
bo                290 drivers/gpu/drm/tegra/gem.c 	err = tegra_bo_alloc(drm, bo);
bo                295 drivers/gpu/drm/tegra/gem.c 		bo->tiling.mode = TEGRA_BO_TILING_MODE_TILED;
bo                298 drivers/gpu/drm/tegra/gem.c 		bo->flags |= TEGRA_BO_BOTTOM_UP;
bo                300 drivers/gpu/drm/tegra/gem.c 	return bo;
bo                303 drivers/gpu/drm/tegra/gem.c 	drm_gem_object_release(&bo->gem);
bo                304 drivers/gpu/drm/tegra/gem.c 	kfree(bo);
bo                314 drivers/gpu/drm/tegra/gem.c 	struct tegra_bo *bo;
bo                317 drivers/gpu/drm/tegra/gem.c 	bo = tegra_bo_create(drm, size, flags);
bo                318 drivers/gpu/drm/tegra/gem.c 	if (IS_ERR(bo))
bo                319 drivers/gpu/drm/tegra/gem.c 		return bo;
bo                321 drivers/gpu/drm/tegra/gem.c 	err = drm_gem_handle_create(file, &bo->gem, handle);
bo                323 drivers/gpu/drm/tegra/gem.c 		tegra_bo_free_object(&bo->gem);
bo                327 drivers/gpu/drm/tegra/gem.c 	drm_gem_object_put_unlocked(&bo->gem);
bo                329 drivers/gpu/drm/tegra/gem.c 	return bo;
bo                337 drivers/gpu/drm/tegra/gem.c 	struct tegra_bo *bo;
bo                340 drivers/gpu/drm/tegra/gem.c 	bo = tegra_bo_alloc_object(drm, buf->size);
bo                341 drivers/gpu/drm/tegra/gem.c 	if (IS_ERR(bo))
bo                342 drivers/gpu/drm/tegra/gem.c 		return bo;
bo                352 drivers/gpu/drm/tegra/gem.c 	bo->sgt = dma_buf_map_attachment(attach, DMA_TO_DEVICE);
bo                353 drivers/gpu/drm/tegra/gem.c 	if (IS_ERR(bo->sgt)) {
bo                354 drivers/gpu/drm/tegra/gem.c 		err = PTR_ERR(bo->sgt);
bo                359 drivers/gpu/drm/tegra/gem.c 		err = tegra_bo_iommu_map(tegra, bo);
bo                363 drivers/gpu/drm/tegra/gem.c 		if (bo->sgt->nents > 1) {
bo                368 drivers/gpu/drm/tegra/gem.c 		bo->paddr = sg_dma_address(bo->sgt->sgl);
bo                371 drivers/gpu/drm/tegra/gem.c 	bo->gem.import_attach = attach;
bo                373 drivers/gpu/drm/tegra/gem.c 	return bo;
bo                376 drivers/gpu/drm/tegra/gem.c 	if (!IS_ERR_OR_NULL(bo->sgt))
bo                377 drivers/gpu/drm/tegra/gem.c 		dma_buf_unmap_attachment(attach, bo->sgt, DMA_TO_DEVICE);
bo                382 drivers/gpu/drm/tegra/gem.c 	drm_gem_object_release(&bo->gem);
bo                383 drivers/gpu/drm/tegra/gem.c 	kfree(bo);
bo                390 drivers/gpu/drm/tegra/gem.c 	struct tegra_bo *bo = to_tegra_bo(gem);
bo                393 drivers/gpu/drm/tegra/gem.c 		tegra_bo_iommu_unmap(tegra, bo);
bo                396 drivers/gpu/drm/tegra/gem.c 		dma_buf_unmap_attachment(gem->import_attach, bo->sgt,
bo                400 drivers/gpu/drm/tegra/gem.c 		tegra_bo_free(gem->dev, bo);
bo                404 drivers/gpu/drm/tegra/gem.c 	kfree(bo);
bo                412 drivers/gpu/drm/tegra/gem.c 	struct tegra_bo *bo;
bo                417 drivers/gpu/drm/tegra/gem.c 	bo = tegra_bo_create_with_handle(file, drm, args->size, 0,
bo                419 drivers/gpu/drm/tegra/gem.c 	if (IS_ERR(bo))
bo                420 drivers/gpu/drm/tegra/gem.c 		return PTR_ERR(bo);
bo                429 drivers/gpu/drm/tegra/gem.c 	struct tegra_bo *bo = to_tegra_bo(gem);
bo                433 drivers/gpu/drm/tegra/gem.c 	if (!bo->pages)
bo                437 drivers/gpu/drm/tegra/gem.c 	page = bo->pages[offset];
bo                450 drivers/gpu/drm/tegra/gem.c 	struct tegra_bo *bo = to_tegra_bo(gem);
bo                452 drivers/gpu/drm/tegra/gem.c 	if (!bo->pages) {
bo                464 drivers/gpu/drm/tegra/gem.c 		err = dma_mmap_wc(gem->dev->dev, vma, bo->vaddr, bo->paddr,
bo                503 drivers/gpu/drm/tegra/gem.c 	struct tegra_bo *bo = to_tegra_bo(gem);
bo                510 drivers/gpu/drm/tegra/gem.c 	if (bo->pages) {
bo                514 drivers/gpu/drm/tegra/gem.c 		if (sg_alloc_table(sgt, bo->num_pages, GFP_KERNEL))
bo                517 drivers/gpu/drm/tegra/gem.c 		for_each_sg(sgt->sgl, sg, bo->num_pages, i)
bo                518 drivers/gpu/drm/tegra/gem.c 			sg_set_page(sg, bo->pages[i], PAGE_SIZE, 0);
bo                526 drivers/gpu/drm/tegra/gem.c 		sg_dma_address(sgt->sgl) = bo->paddr;
bo                543 drivers/gpu/drm/tegra/gem.c 	struct tegra_bo *bo = to_tegra_bo(gem);
bo                545 drivers/gpu/drm/tegra/gem.c 	if (bo->pages)
bo                561 drivers/gpu/drm/tegra/gem.c 	struct tegra_bo *bo = to_tegra_bo(gem);
bo                564 drivers/gpu/drm/tegra/gem.c 	if (bo->pages)
bo                565 drivers/gpu/drm/tegra/gem.c 		dma_sync_sg_for_cpu(drm->dev, bo->sgt->sgl, bo->sgt->nents,
bo                575 drivers/gpu/drm/tegra/gem.c 	struct tegra_bo *bo = to_tegra_bo(gem);
bo                578 drivers/gpu/drm/tegra/gem.c 	if (bo->pages)
bo                579 drivers/gpu/drm/tegra/gem.c 		dma_sync_sg_for_device(drm->dev, bo->sgt->sgl, bo->sgt->nents,
bo                610 drivers/gpu/drm/tegra/gem.c 	struct tegra_bo *bo = to_tegra_bo(gem);
bo                612 drivers/gpu/drm/tegra/gem.c 	return bo->vaddr;
bo                650 drivers/gpu/drm/tegra/gem.c 	struct tegra_bo *bo;
bo                661 drivers/gpu/drm/tegra/gem.c 	bo = tegra_bo_import(drm, buf);
bo                662 drivers/gpu/drm/tegra/gem.c 	if (IS_ERR(bo))
bo                663 drivers/gpu/drm/tegra/gem.c 		return ERR_CAST(bo);
bo                665 drivers/gpu/drm/tegra/gem.c 	return &bo->gem;
bo                 51 drivers/gpu/drm/tegra/gem.h static inline struct tegra_bo *host1x_to_tegra_bo(struct host1x_bo *bo)
bo                 53 drivers/gpu/drm/tegra/gem.h 	return container_of(bo, struct tegra_bo, base);
bo                416 drivers/gpu/drm/tegra/hub.c 	struct tegra_bo *bo;
bo                459 drivers/gpu/drm/tegra/hub.c 	bo = tegra_fb_get_plane(fb, 0);
bo                460 drivers/gpu/drm/tegra/hub.c 	base = bo->paddr;
bo                 24 drivers/gpu/drm/tegra/plane.h 	struct tegra_bo *bo;
bo                114 drivers/gpu/drm/ttm/ttm_agp_backend.c struct ttm_tt *ttm_agp_tt_create(struct ttm_buffer_object *bo,
bo                128 drivers/gpu/drm/ttm/ttm_agp_backend.c 	if (ttm_tt_init(&agp_be->ttm, bo, page_flags)) {
bo                 61 drivers/gpu/drm/ttm/ttm_bo.c static void ttm_bo_default_destroy(struct ttm_buffer_object *bo)
bo                 63 drivers/gpu/drm/ttm/ttm_bo.c 	kfree(bo);
bo                 95 drivers/gpu/drm/ttm/ttm_bo.c static void ttm_bo_mem_space_debug(struct ttm_buffer_object *bo,
bo                102 drivers/gpu/drm/ttm/ttm_bo.c 		   bo, bo->mem.num_pages, bo->mem.size >> 10,
bo                103 drivers/gpu/drm/ttm/ttm_bo.c 		   bo->mem.size >> 20);
bo                111 drivers/gpu/drm/ttm/ttm_bo.c 		ttm_mem_type_debug(bo->bdev, &p, mem_type);
bo                149 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_buffer_object *bo =
bo                151 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_bo_device *bdev = bo->bdev;
bo                152 drivers/gpu/drm/ttm/ttm_bo.c 	size_t acc_size = bo->acc_size;
bo                154 drivers/gpu/drm/ttm/ttm_bo.c 	BUG_ON(kref_read(&bo->list_kref));
bo                155 drivers/gpu/drm/ttm/ttm_bo.c 	BUG_ON(kref_read(&bo->kref));
bo                156 drivers/gpu/drm/ttm/ttm_bo.c 	BUG_ON(atomic_read(&bo->cpu_writers));
bo                157 drivers/gpu/drm/ttm/ttm_bo.c 	BUG_ON(bo->mem.mm_node != NULL);
bo                158 drivers/gpu/drm/ttm/ttm_bo.c 	BUG_ON(!list_empty(&bo->lru));
bo                159 drivers/gpu/drm/ttm/ttm_bo.c 	BUG_ON(!list_empty(&bo->ddestroy));
bo                160 drivers/gpu/drm/ttm/ttm_bo.c 	ttm_tt_destroy(bo->ttm);
bo                161 drivers/gpu/drm/ttm/ttm_bo.c 	atomic_dec(&bo->bdev->glob->bo_count);
bo                162 drivers/gpu/drm/ttm/ttm_bo.c 	dma_fence_put(bo->moving);
bo                163 drivers/gpu/drm/ttm/ttm_bo.c 	if (!ttm_bo_uses_embedded_gem_object(bo))
bo                164 drivers/gpu/drm/ttm/ttm_bo.c 		dma_resv_fini(&bo->base._resv);
bo                165 drivers/gpu/drm/ttm/ttm_bo.c 	mutex_destroy(&bo->wu_mutex);
bo                166 drivers/gpu/drm/ttm/ttm_bo.c 	bo->destroy(bo);
bo                170 drivers/gpu/drm/ttm/ttm_bo.c static void ttm_bo_add_mem_to_lru(struct ttm_buffer_object *bo,
bo                173 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_bo_device *bdev = bo->bdev;
bo                176 drivers/gpu/drm/ttm/ttm_bo.c 	dma_resv_assert_held(bo->base.resv);
bo                178 drivers/gpu/drm/ttm/ttm_bo.c 	if (!list_empty(&bo->lru))
bo                185 drivers/gpu/drm/ttm/ttm_bo.c 	list_add_tail(&bo->lru, &man->lru[bo->priority]);
bo                186 drivers/gpu/drm/ttm/ttm_bo.c 	kref_get(&bo->list_kref);
bo                188 drivers/gpu/drm/ttm/ttm_bo.c 	if (!(man->flags & TTM_MEMTYPE_FLAG_FIXED) && bo->ttm &&
bo                189 drivers/gpu/drm/ttm/ttm_bo.c 	    !(bo->ttm->page_flags & (TTM_PAGE_FLAG_SG |
bo                191 drivers/gpu/drm/ttm/ttm_bo.c 		list_add_tail(&bo->swap, &bdev->glob->swap_lru[bo->priority]);
bo                192 drivers/gpu/drm/ttm/ttm_bo.c 		kref_get(&bo->list_kref);
bo                196 drivers/gpu/drm/ttm/ttm_bo.c void ttm_bo_add_to_lru(struct ttm_buffer_object *bo)
bo                198 drivers/gpu/drm/ttm/ttm_bo.c 	ttm_bo_add_mem_to_lru(bo, &bo->mem);
bo                207 drivers/gpu/drm/ttm/ttm_bo.c void ttm_bo_del_from_lru(struct ttm_buffer_object *bo)
bo                209 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_bo_device *bdev = bo->bdev;
bo                212 drivers/gpu/drm/ttm/ttm_bo.c 	if (!list_empty(&bo->swap)) {
bo                213 drivers/gpu/drm/ttm/ttm_bo.c 		list_del_init(&bo->swap);
bo                214 drivers/gpu/drm/ttm/ttm_bo.c 		kref_put(&bo->list_kref, ttm_bo_ref_bug);
bo                217 drivers/gpu/drm/ttm/ttm_bo.c 	if (!list_empty(&bo->lru)) {
bo                218 drivers/gpu/drm/ttm/ttm_bo.c 		list_del_init(&bo->lru);
bo                219 drivers/gpu/drm/ttm/ttm_bo.c 		kref_put(&bo->list_kref, ttm_bo_ref_bug);
bo                224 drivers/gpu/drm/ttm/ttm_bo.c 		bdev->driver->del_from_lru_notify(bo);
bo                227 drivers/gpu/drm/ttm/ttm_bo.c void ttm_bo_del_sub_from_lru(struct ttm_buffer_object *bo)
bo                229 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_bo_global *glob = bo->bdev->glob;
bo                232 drivers/gpu/drm/ttm/ttm_bo.c 	ttm_bo_del_from_lru(bo);
bo                238 drivers/gpu/drm/ttm/ttm_bo.c 				     struct ttm_buffer_object *bo)
bo                241 drivers/gpu/drm/ttm/ttm_bo.c 		pos->first = bo;
bo                242 drivers/gpu/drm/ttm/ttm_bo.c 	pos->last = bo;
bo                245 drivers/gpu/drm/ttm/ttm_bo.c void ttm_bo_move_to_lru_tail(struct ttm_buffer_object *bo,
bo                248 drivers/gpu/drm/ttm/ttm_bo.c 	dma_resv_assert_held(bo->base.resv);
bo                250 drivers/gpu/drm/ttm/ttm_bo.c 	ttm_bo_del_from_lru(bo);
bo                251 drivers/gpu/drm/ttm/ttm_bo.c 	ttm_bo_add_to_lru(bo);
bo                253 drivers/gpu/drm/ttm/ttm_bo.c 	if (bulk && !(bo->mem.placement & TTM_PL_FLAG_NO_EVICT)) {
bo                254 drivers/gpu/drm/ttm/ttm_bo.c 		switch (bo->mem.mem_type) {
bo                256 drivers/gpu/drm/ttm/ttm_bo.c 			ttm_bo_bulk_move_set_pos(&bulk->tt[bo->priority], bo);
bo                260 drivers/gpu/drm/ttm/ttm_bo.c 			ttm_bo_bulk_move_set_pos(&bulk->vram[bo->priority], bo);
bo                263 drivers/gpu/drm/ttm/ttm_bo.c 		if (bo->ttm && !(bo->ttm->page_flags &
bo                265 drivers/gpu/drm/ttm/ttm_bo.c 			ttm_bo_bulk_move_set_pos(&bulk->swap[bo->priority], bo);
bo                320 drivers/gpu/drm/ttm/ttm_bo.c static int ttm_bo_handle_move_mem(struct ttm_buffer_object *bo,
bo                324 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_bo_device *bdev = bo->bdev;
bo                325 drivers/gpu/drm/ttm/ttm_bo.c 	bool old_is_pci = ttm_mem_reg_is_pci(bdev, &bo->mem);
bo                327 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_mem_type_manager *old_man = &bdev->man[bo->mem.mem_type];
bo                332 drivers/gpu/drm/ttm/ttm_bo.c 	    ((mem->placement & bo->mem.placement & TTM_PL_MASK_CACHING) == 0)) {
bo                336 drivers/gpu/drm/ttm/ttm_bo.c 		ttm_bo_unmap_virtual_locked(bo);
bo                345 drivers/gpu/drm/ttm/ttm_bo.c 		if (bo->ttm == NULL) {
bo                347 drivers/gpu/drm/ttm/ttm_bo.c 			ret = ttm_tt_create(bo, zero);
bo                352 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_tt_set_placement_caching(bo->ttm, mem->placement);
bo                357 drivers/gpu/drm/ttm/ttm_bo.c 			ret = ttm_tt_bind(bo->ttm, mem, ctx);
bo                362 drivers/gpu/drm/ttm/ttm_bo.c 		if (bo->mem.mem_type == TTM_PL_SYSTEM) {
bo                364 drivers/gpu/drm/ttm/ttm_bo.c 				bdev->driver->move_notify(bo, evict, mem);
bo                365 drivers/gpu/drm/ttm/ttm_bo.c 			bo->mem = *mem;
bo                372 drivers/gpu/drm/ttm/ttm_bo.c 		bdev->driver->move_notify(bo, evict, mem);
bo                376 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_move_ttm(bo, ctx, mem);
bo                378 drivers/gpu/drm/ttm/ttm_bo.c 		ret = bdev->driver->move(bo, evict, ctx, mem);
bo                380 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_move_memcpy(bo, ctx, mem);
bo                384 drivers/gpu/drm/ttm/ttm_bo.c 			swap(*mem, bo->mem);
bo                385 drivers/gpu/drm/ttm/ttm_bo.c 			bdev->driver->move_notify(bo, false, mem);
bo                386 drivers/gpu/drm/ttm/ttm_bo.c 			swap(*mem, bo->mem);
bo                393 drivers/gpu/drm/ttm/ttm_bo.c 	if (bo->evicted) {
bo                395 drivers/gpu/drm/ttm/ttm_bo.c 			ret = bdev->driver->invalidate_caches(bdev, bo->mem.placement);
bo                399 drivers/gpu/drm/ttm/ttm_bo.c 		bo->evicted = false;
bo                402 drivers/gpu/drm/ttm/ttm_bo.c 	if (bo->mem.mm_node)
bo                403 drivers/gpu/drm/ttm/ttm_bo.c 		bo->offset = (bo->mem.start << PAGE_SHIFT) +
bo                404 drivers/gpu/drm/ttm/ttm_bo.c 		    bdev->man[bo->mem.mem_type].gpu_offset;
bo                406 drivers/gpu/drm/ttm/ttm_bo.c 		bo->offset = 0;
bo                408 drivers/gpu/drm/ttm/ttm_bo.c 	ctx->bytes_moved += bo->num_pages << PAGE_SHIFT;
bo                412 drivers/gpu/drm/ttm/ttm_bo.c 	new_man = &bdev->man[bo->mem.mem_type];
bo                414 drivers/gpu/drm/ttm/ttm_bo.c 		ttm_tt_destroy(bo->ttm);
bo                415 drivers/gpu/drm/ttm/ttm_bo.c 		bo->ttm = NULL;
bo                429 drivers/gpu/drm/ttm/ttm_bo.c static void ttm_bo_cleanup_memtype_use(struct ttm_buffer_object *bo)
bo                431 drivers/gpu/drm/ttm/ttm_bo.c 	if (bo->bdev->driver->move_notify)
bo                432 drivers/gpu/drm/ttm/ttm_bo.c 		bo->bdev->driver->move_notify(bo, false, NULL);
bo                434 drivers/gpu/drm/ttm/ttm_bo.c 	ttm_tt_destroy(bo->ttm);
bo                435 drivers/gpu/drm/ttm/ttm_bo.c 	bo->ttm = NULL;
bo                436 drivers/gpu/drm/ttm/ttm_bo.c 	ttm_bo_mem_put(bo, &bo->mem);
bo                439 drivers/gpu/drm/ttm/ttm_bo.c static int ttm_bo_individualize_resv(struct ttm_buffer_object *bo)
bo                443 drivers/gpu/drm/ttm/ttm_bo.c 	if (bo->base.resv == &bo->base._resv)
bo                446 drivers/gpu/drm/ttm/ttm_bo.c 	BUG_ON(!dma_resv_trylock(&bo->base._resv));
bo                448 drivers/gpu/drm/ttm/ttm_bo.c 	r = dma_resv_copy_fences(&bo->base._resv, bo->base.resv);
bo                450 drivers/gpu/drm/ttm/ttm_bo.c 		dma_resv_unlock(&bo->base._resv);
bo                455 drivers/gpu/drm/ttm/ttm_bo.c static void ttm_bo_flush_all_fences(struct ttm_buffer_object *bo)
bo                461 drivers/gpu/drm/ttm/ttm_bo.c 	fobj = dma_resv_get_list(&bo->base._resv);
bo                462 drivers/gpu/drm/ttm/ttm_bo.c 	fence = dma_resv_get_excl(&bo->base._resv);
bo                468 drivers/gpu/drm/ttm/ttm_bo.c 					dma_resv_held(bo->base.resv));
bo                475 drivers/gpu/drm/ttm/ttm_bo.c static void ttm_bo_cleanup_refs_or_queue(struct ttm_buffer_object *bo)
bo                477 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_bo_device *bdev = bo->bdev;
bo                481 drivers/gpu/drm/ttm/ttm_bo.c 	ret = ttm_bo_individualize_resv(bo);
bo                486 drivers/gpu/drm/ttm/ttm_bo.c 		dma_resv_wait_timeout_rcu(bo->base.resv, true, false,
bo                493 drivers/gpu/drm/ttm/ttm_bo.c 	ret = dma_resv_trylock(bo->base.resv) ? 0 : -EBUSY;
bo                495 drivers/gpu/drm/ttm/ttm_bo.c 		if (dma_resv_test_signaled_rcu(&bo->base._resv, true)) {
bo                496 drivers/gpu/drm/ttm/ttm_bo.c 			ttm_bo_del_from_lru(bo);
bo                498 drivers/gpu/drm/ttm/ttm_bo.c 			if (bo->base.resv != &bo->base._resv)
bo                499 drivers/gpu/drm/ttm/ttm_bo.c 				dma_resv_unlock(&bo->base._resv);
bo                501 drivers/gpu/drm/ttm/ttm_bo.c 			ttm_bo_cleanup_memtype_use(bo);
bo                502 drivers/gpu/drm/ttm/ttm_bo.c 			dma_resv_unlock(bo->base.resv);
bo                506 drivers/gpu/drm/ttm/ttm_bo.c 		ttm_bo_flush_all_fences(bo);
bo                513 drivers/gpu/drm/ttm/ttm_bo.c 		if (bo->mem.placement & TTM_PL_FLAG_NO_EVICT) {
bo                514 drivers/gpu/drm/ttm/ttm_bo.c 			bo->mem.placement &= ~TTM_PL_FLAG_NO_EVICT;
bo                515 drivers/gpu/drm/ttm/ttm_bo.c 			ttm_bo_add_to_lru(bo);
bo                518 drivers/gpu/drm/ttm/ttm_bo.c 		dma_resv_unlock(bo->base.resv);
bo                520 drivers/gpu/drm/ttm/ttm_bo.c 	if (bo->base.resv != &bo->base._resv) {
bo                521 drivers/gpu/drm/ttm/ttm_bo.c 		ttm_bo_flush_all_fences(bo);
bo                522 drivers/gpu/drm/ttm/ttm_bo.c 		dma_resv_unlock(&bo->base._resv);
bo                526 drivers/gpu/drm/ttm/ttm_bo.c 	kref_get(&bo->list_kref);
bo                527 drivers/gpu/drm/ttm/ttm_bo.c 	list_add_tail(&bo->ddestroy, &bdev->ddestroy);
bo                547 drivers/gpu/drm/ttm/ttm_bo.c static int ttm_bo_cleanup_refs(struct ttm_buffer_object *bo,
bo                551 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_bo_global *glob = bo->bdev->glob;
bo                555 drivers/gpu/drm/ttm/ttm_bo.c 	if (unlikely(list_empty(&bo->ddestroy)))
bo                556 drivers/gpu/drm/ttm/ttm_bo.c 		resv = bo->base.resv;
bo                558 drivers/gpu/drm/ttm/ttm_bo.c 		resv = &bo->base._resv;
bo                569 drivers/gpu/drm/ttm/ttm_bo.c 			dma_resv_unlock(bo->base.resv);
bo                582 drivers/gpu/drm/ttm/ttm_bo.c 		if (unlock_resv && !dma_resv_trylock(bo->base.resv)) {
bo                597 drivers/gpu/drm/ttm/ttm_bo.c 	if (ret || unlikely(list_empty(&bo->ddestroy))) {
bo                599 drivers/gpu/drm/ttm/ttm_bo.c 			dma_resv_unlock(bo->base.resv);
bo                604 drivers/gpu/drm/ttm/ttm_bo.c 	ttm_bo_del_from_lru(bo);
bo                605 drivers/gpu/drm/ttm/ttm_bo.c 	list_del_init(&bo->ddestroy);
bo                606 drivers/gpu/drm/ttm/ttm_bo.c 	kref_put(&bo->list_kref, ttm_bo_ref_bug);
bo                609 drivers/gpu/drm/ttm/ttm_bo.c 	ttm_bo_cleanup_memtype_use(bo);
bo                612 drivers/gpu/drm/ttm/ttm_bo.c 		dma_resv_unlock(bo->base.resv);
bo                631 drivers/gpu/drm/ttm/ttm_bo.c 		struct ttm_buffer_object *bo;
bo                633 drivers/gpu/drm/ttm/ttm_bo.c 		bo = list_first_entry(&bdev->ddestroy, struct ttm_buffer_object,
bo                635 drivers/gpu/drm/ttm/ttm_bo.c 		kref_get(&bo->list_kref);
bo                636 drivers/gpu/drm/ttm/ttm_bo.c 		list_move_tail(&bo->ddestroy, &removed);
bo                638 drivers/gpu/drm/ttm/ttm_bo.c 		if (remove_all || bo->base.resv != &bo->base._resv) {
bo                640 drivers/gpu/drm/ttm/ttm_bo.c 			dma_resv_lock(bo->base.resv, NULL);
bo                643 drivers/gpu/drm/ttm/ttm_bo.c 			ttm_bo_cleanup_refs(bo, false, !remove_all, true);
bo                645 drivers/gpu/drm/ttm/ttm_bo.c 		} else if (dma_resv_trylock(bo->base.resv)) {
bo                646 drivers/gpu/drm/ttm/ttm_bo.c 			ttm_bo_cleanup_refs(bo, false, !remove_all, true);
bo                651 drivers/gpu/drm/ttm/ttm_bo.c 		kref_put(&bo->list_kref, ttm_bo_release_list);
bo                673 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_buffer_object *bo =
bo                675 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_bo_device *bdev = bo->bdev;
bo                676 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_mem_type_manager *man = &bdev->man[bo->mem.mem_type];
bo                678 drivers/gpu/drm/ttm/ttm_bo.c 	if (bo->bdev->driver->release_notify)
bo                679 drivers/gpu/drm/ttm/ttm_bo.c 		bo->bdev->driver->release_notify(bo);
bo                681 drivers/gpu/drm/ttm/ttm_bo.c 	drm_vma_offset_remove(&bdev->vma_manager, &bo->base.vma_node);
bo                683 drivers/gpu/drm/ttm/ttm_bo.c 	ttm_mem_io_free_vm(bo);
bo                685 drivers/gpu/drm/ttm/ttm_bo.c 	ttm_bo_cleanup_refs_or_queue(bo);
bo                686 drivers/gpu/drm/ttm/ttm_bo.c 	kref_put(&bo->list_kref, ttm_bo_release_list);
bo                689 drivers/gpu/drm/ttm/ttm_bo.c void ttm_bo_put(struct ttm_buffer_object *bo)
bo                691 drivers/gpu/drm/ttm/ttm_bo.c 	kref_put(&bo->kref, ttm_bo_release);
bo                709 drivers/gpu/drm/ttm/ttm_bo.c static int ttm_bo_evict(struct ttm_buffer_object *bo,
bo                712 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_bo_device *bdev = bo->bdev;
bo                717 drivers/gpu/drm/ttm/ttm_bo.c 	dma_resv_assert_held(bo->base.resv);
bo                721 drivers/gpu/drm/ttm/ttm_bo.c 	bdev->driver->evict_flags(bo, &placement);
bo                724 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_pipeline_gutting(bo);
bo                728 drivers/gpu/drm/ttm/ttm_bo.c 		return ttm_tt_create(bo, false);
bo                731 drivers/gpu/drm/ttm/ttm_bo.c 	evict_mem = bo->mem;
bo                736 drivers/gpu/drm/ttm/ttm_bo.c 	ret = ttm_bo_mem_space(bo, &placement, &evict_mem, ctx);
bo                740 drivers/gpu/drm/ttm/ttm_bo.c 			       bo);
bo                741 drivers/gpu/drm/ttm/ttm_bo.c 			ttm_bo_mem_space_debug(bo, &placement);
bo                746 drivers/gpu/drm/ttm/ttm_bo.c 	ret = ttm_bo_handle_move_mem(bo, &evict_mem, true, ctx);
bo                750 drivers/gpu/drm/ttm/ttm_bo.c 		ttm_bo_mem_put(bo, &evict_mem);
bo                753 drivers/gpu/drm/ttm/ttm_bo.c 	bo->evicted = true;
bo                758 drivers/gpu/drm/ttm/ttm_bo.c bool ttm_bo_eviction_valuable(struct ttm_buffer_object *bo,
bo                764 drivers/gpu/drm/ttm/ttm_bo.c 	if (place->fpfn >= (bo->mem.start + bo->mem.size) ||
bo                765 drivers/gpu/drm/ttm/ttm_bo.c 	    (place->lpfn && place->lpfn <= bo->mem.start))
bo                782 drivers/gpu/drm/ttm/ttm_bo.c static bool ttm_bo_evict_swapout_allowable(struct ttm_buffer_object *bo,
bo                787 drivers/gpu/drm/ttm/ttm_bo.c 	if (bo->base.resv == ctx->resv) {
bo                788 drivers/gpu/drm/ttm/ttm_bo.c 		dma_resv_assert_held(bo->base.resv);
bo                790 drivers/gpu/drm/ttm/ttm_bo.c 		    || !list_empty(&bo->ddestroy))
bo                796 drivers/gpu/drm/ttm/ttm_bo.c 		ret = dma_resv_trylock(bo->base.resv);
bo                846 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_buffer_object *bo = NULL, *busy_bo = NULL;
bo                855 drivers/gpu/drm/ttm/ttm_bo.c 		list_for_each_entry(bo, &man->lru[i], lru) {
bo                858 drivers/gpu/drm/ttm/ttm_bo.c 			if (!ttm_bo_evict_swapout_allowable(bo, ctx, &locked,
bo                861 drivers/gpu/drm/ttm/ttm_bo.c 				    dma_resv_locking_ctx(bo->base.resv))
bo                862 drivers/gpu/drm/ttm/ttm_bo.c 					busy_bo = bo;
bo                866 drivers/gpu/drm/ttm/ttm_bo.c 			if (place && !bdev->driver->eviction_valuable(bo,
bo                869 drivers/gpu/drm/ttm/ttm_bo.c 					dma_resv_unlock(bo->base.resv);
bo                876 drivers/gpu/drm/ttm/ttm_bo.c 		if (&bo->lru != &man->lru[i])
bo                879 drivers/gpu/drm/ttm/ttm_bo.c 		bo = NULL;
bo                882 drivers/gpu/drm/ttm/ttm_bo.c 	if (!bo) {
bo                892 drivers/gpu/drm/ttm/ttm_bo.c 	kref_get(&bo->list_kref);
bo                894 drivers/gpu/drm/ttm/ttm_bo.c 	if (!list_empty(&bo->ddestroy)) {
bo                895 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_cleanup_refs(bo, ctx->interruptible,
bo                897 drivers/gpu/drm/ttm/ttm_bo.c 		kref_put(&bo->list_kref, ttm_bo_release_list);
bo                901 drivers/gpu/drm/ttm/ttm_bo.c 	ttm_bo_del_from_lru(bo);
bo                904 drivers/gpu/drm/ttm/ttm_bo.c 	ret = ttm_bo_evict(bo, ctx);
bo                906 drivers/gpu/drm/ttm/ttm_bo.c 		ttm_bo_unreserve(bo);
bo                909 drivers/gpu/drm/ttm/ttm_bo.c 		ttm_bo_add_to_lru(bo);
bo                913 drivers/gpu/drm/ttm/ttm_bo.c 	kref_put(&bo->list_kref, ttm_bo_release_list);
bo                917 drivers/gpu/drm/ttm/ttm_bo.c void ttm_bo_mem_put(struct ttm_buffer_object *bo, struct ttm_mem_reg *mem)
bo                919 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_mem_type_manager *man = &bo->bdev->man[mem->mem_type];
bo                929 drivers/gpu/drm/ttm/ttm_bo.c static int ttm_bo_add_move_fence(struct ttm_buffer_object *bo,
bo                947 drivers/gpu/drm/ttm/ttm_bo.c 	dma_resv_add_shared_fence(bo->base.resv, fence);
bo                949 drivers/gpu/drm/ttm/ttm_bo.c 	ret = dma_resv_reserve_shared(bo->base.resv, 1);
bo                955 drivers/gpu/drm/ttm/ttm_bo.c 	dma_fence_put(bo->moving);
bo                956 drivers/gpu/drm/ttm/ttm_bo.c 	bo->moving = fence;
bo                964 drivers/gpu/drm/ttm/ttm_bo.c static int ttm_bo_mem_force_space(struct ttm_buffer_object *bo,
bo                969 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_bo_device *bdev = bo->bdev;
bo                974 drivers/gpu/drm/ttm/ttm_bo.c 	ticket = dma_resv_locking_ctx(bo->base.resv);
bo                976 drivers/gpu/drm/ttm/ttm_bo.c 		ret = (*man->func->get_node)(man, bo, place, mem);
bo                987 drivers/gpu/drm/ttm/ttm_bo.c 	return ttm_bo_add_move_fence(bo, man, mem, ctx->no_wait_gpu);
bo               1045 drivers/gpu/drm/ttm/ttm_bo.c static int ttm_bo_mem_placement(struct ttm_buffer_object *bo,
bo               1050 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_bo_device *bdev = bo->bdev;
bo               1067 drivers/gpu/drm/ttm/ttm_bo.c 	cur_flags = ttm_bo_select_caching(man, bo->mem.placement, cur_flags);
bo               1077 drivers/gpu/drm/ttm/ttm_bo.c 	if (bo->mem.mem_type < mem_type && !list_empty(&bo->lru)) {
bo               1078 drivers/gpu/drm/ttm/ttm_bo.c 		spin_lock(&bo->bdev->glob->lru_lock);
bo               1079 drivers/gpu/drm/ttm/ttm_bo.c 		ttm_bo_del_from_lru(bo);
bo               1080 drivers/gpu/drm/ttm/ttm_bo.c 		ttm_bo_add_mem_to_lru(bo, mem);
bo               1081 drivers/gpu/drm/ttm/ttm_bo.c 		spin_unlock(&bo->bdev->glob->lru_lock);
bo               1095 drivers/gpu/drm/ttm/ttm_bo.c int ttm_bo_mem_space(struct ttm_buffer_object *bo,
bo               1100 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_bo_device *bdev = bo->bdev;
bo               1104 drivers/gpu/drm/ttm/ttm_bo.c 	ret = dma_resv_reserve_shared(bo->base.resv, 1);
bo               1113 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_mem_placement(bo, place, mem, ctx);
bo               1125 drivers/gpu/drm/ttm/ttm_bo.c 		ret = (*man->func->get_node)(man, bo, place, mem);
bo               1132 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_add_move_fence(bo, man, mem, ctx->no_wait_gpu);
bo               1146 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_mem_placement(bo, place, mem, ctx);
bo               1157 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_mem_force_space(bo, place, mem, ctx);
bo               1172 drivers/gpu/drm/ttm/ttm_bo.c 	if (bo->mem.mem_type == TTM_PL_SYSTEM && !list_empty(&bo->lru)) {
bo               1173 drivers/gpu/drm/ttm/ttm_bo.c 		spin_lock(&bo->bdev->glob->lru_lock);
bo               1174 drivers/gpu/drm/ttm/ttm_bo.c 		ttm_bo_move_to_lru_tail(bo, NULL);
bo               1175 drivers/gpu/drm/ttm/ttm_bo.c 		spin_unlock(&bo->bdev->glob->lru_lock);
bo               1182 drivers/gpu/drm/ttm/ttm_bo.c static int ttm_bo_move_buffer(struct ttm_buffer_object *bo,
bo               1189 drivers/gpu/drm/ttm/ttm_bo.c 	dma_resv_assert_held(bo->base.resv);
bo               1191 drivers/gpu/drm/ttm/ttm_bo.c 	mem.num_pages = bo->num_pages;
bo               1193 drivers/gpu/drm/ttm/ttm_bo.c 	mem.page_alignment = bo->mem.page_alignment;
bo               1199 drivers/gpu/drm/ttm/ttm_bo.c 	ret = ttm_bo_mem_space(bo, placement, &mem, ctx);
bo               1202 drivers/gpu/drm/ttm/ttm_bo.c 	ret = ttm_bo_handle_move_mem(bo, &mem, false, ctx);
bo               1205 drivers/gpu/drm/ttm/ttm_bo.c 		ttm_bo_mem_put(bo, &mem);
bo               1252 drivers/gpu/drm/ttm/ttm_bo.c int ttm_bo_validate(struct ttm_buffer_object *bo,
bo               1259 drivers/gpu/drm/ttm/ttm_bo.c 	dma_resv_assert_held(bo->base.resv);
bo               1263 drivers/gpu/drm/ttm/ttm_bo.c 	if (!ttm_bo_mem_compat(placement, &bo->mem, &new_flags)) {
bo               1264 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_move_buffer(bo, placement, ctx);
bo               1272 drivers/gpu/drm/ttm/ttm_bo.c 		ttm_flag_masked(&bo->mem.placement, new_flags,
bo               1278 drivers/gpu/drm/ttm/ttm_bo.c 	if (bo->mem.mem_type == TTM_PL_SYSTEM && bo->ttm == NULL) {
bo               1279 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_tt_create(bo, true);
bo               1288 drivers/gpu/drm/ttm/ttm_bo.c 			 struct ttm_buffer_object *bo,
bo               1308 drivers/gpu/drm/ttm/ttm_bo.c 			(*destroy)(bo);
bo               1310 drivers/gpu/drm/ttm/ttm_bo.c 			kfree(bo);
bo               1318 drivers/gpu/drm/ttm/ttm_bo.c 			(*destroy)(bo);
bo               1320 drivers/gpu/drm/ttm/ttm_bo.c 			kfree(bo);
bo               1324 drivers/gpu/drm/ttm/ttm_bo.c 	bo->destroy = destroy ? destroy : ttm_bo_default_destroy;
bo               1326 drivers/gpu/drm/ttm/ttm_bo.c 	kref_init(&bo->kref);
bo               1327 drivers/gpu/drm/ttm/ttm_bo.c 	kref_init(&bo->list_kref);
bo               1328 drivers/gpu/drm/ttm/ttm_bo.c 	atomic_set(&bo->cpu_writers, 0);
bo               1329 drivers/gpu/drm/ttm/ttm_bo.c 	INIT_LIST_HEAD(&bo->lru);
bo               1330 drivers/gpu/drm/ttm/ttm_bo.c 	INIT_LIST_HEAD(&bo->ddestroy);
bo               1331 drivers/gpu/drm/ttm/ttm_bo.c 	INIT_LIST_HEAD(&bo->swap);
bo               1332 drivers/gpu/drm/ttm/ttm_bo.c 	INIT_LIST_HEAD(&bo->io_reserve_lru);
bo               1333 drivers/gpu/drm/ttm/ttm_bo.c 	mutex_init(&bo->wu_mutex);
bo               1334 drivers/gpu/drm/ttm/ttm_bo.c 	bo->bdev = bdev;
bo               1335 drivers/gpu/drm/ttm/ttm_bo.c 	bo->type = type;
bo               1336 drivers/gpu/drm/ttm/ttm_bo.c 	bo->num_pages = num_pages;
bo               1337 drivers/gpu/drm/ttm/ttm_bo.c 	bo->mem.size = num_pages << PAGE_SHIFT;
bo               1338 drivers/gpu/drm/ttm/ttm_bo.c 	bo->mem.mem_type = TTM_PL_SYSTEM;
bo               1339 drivers/gpu/drm/ttm/ttm_bo.c 	bo->mem.num_pages = bo->num_pages;
bo               1340 drivers/gpu/drm/ttm/ttm_bo.c 	bo->mem.mm_node = NULL;
bo               1341 drivers/gpu/drm/ttm/ttm_bo.c 	bo->mem.page_alignment = page_alignment;
bo               1342 drivers/gpu/drm/ttm/ttm_bo.c 	bo->mem.bus.io_reserved_vm = false;
bo               1343 drivers/gpu/drm/ttm/ttm_bo.c 	bo->mem.bus.io_reserved_count = 0;
bo               1344 drivers/gpu/drm/ttm/ttm_bo.c 	bo->moving = NULL;
bo               1345 drivers/gpu/drm/ttm/ttm_bo.c 	bo->mem.placement = (TTM_PL_FLAG_SYSTEM | TTM_PL_FLAG_CACHED);
bo               1346 drivers/gpu/drm/ttm/ttm_bo.c 	bo->acc_size = acc_size;
bo               1347 drivers/gpu/drm/ttm/ttm_bo.c 	bo->sg = sg;
bo               1349 drivers/gpu/drm/ttm/ttm_bo.c 		bo->base.resv = resv;
bo               1350 drivers/gpu/drm/ttm/ttm_bo.c 		dma_resv_assert_held(bo->base.resv);
bo               1352 drivers/gpu/drm/ttm/ttm_bo.c 		bo->base.resv = &bo->base._resv;
bo               1354 drivers/gpu/drm/ttm/ttm_bo.c 	if (!ttm_bo_uses_embedded_gem_object(bo)) {
bo               1359 drivers/gpu/drm/ttm/ttm_bo.c 		dma_resv_init(&bo->base._resv);
bo               1360 drivers/gpu/drm/ttm/ttm_bo.c 		drm_vma_node_reset(&bo->base.vma_node);
bo               1362 drivers/gpu/drm/ttm/ttm_bo.c 	atomic_inc(&bo->bdev->glob->bo_count);
bo               1368 drivers/gpu/drm/ttm/ttm_bo.c 	if (bo->type == ttm_bo_type_device ||
bo               1369 drivers/gpu/drm/ttm/ttm_bo.c 	    bo->type == ttm_bo_type_sg)
bo               1370 drivers/gpu/drm/ttm/ttm_bo.c 		ret = drm_vma_offset_add(&bdev->vma_manager, &bo->base.vma_node,
bo               1371 drivers/gpu/drm/ttm/ttm_bo.c 					 bo->mem.num_pages);
bo               1377 drivers/gpu/drm/ttm/ttm_bo.c 		locked = dma_resv_trylock(bo->base.resv);
bo               1382 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_validate(bo, placement, ctx);
bo               1386 drivers/gpu/drm/ttm/ttm_bo.c 			ttm_bo_unreserve(bo);
bo               1388 drivers/gpu/drm/ttm/ttm_bo.c 		ttm_bo_put(bo);
bo               1392 drivers/gpu/drm/ttm/ttm_bo.c 	if (resv && !(bo->mem.placement & TTM_PL_FLAG_NO_EVICT)) {
bo               1394 drivers/gpu/drm/ttm/ttm_bo.c 		ttm_bo_add_to_lru(bo);
bo               1403 drivers/gpu/drm/ttm/ttm_bo.c 		struct ttm_buffer_object *bo,
bo               1417 drivers/gpu/drm/ttm/ttm_bo.c 	ret = ttm_bo_init_reserved(bdev, bo, size, type, placement,
bo               1424 drivers/gpu/drm/ttm/ttm_bo.c 		ttm_bo_unreserve(bo);
bo               1466 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_buffer_object *bo;
bo               1470 drivers/gpu/drm/ttm/ttm_bo.c 	bo = kzalloc(sizeof(*bo), GFP_KERNEL);
bo               1471 drivers/gpu/drm/ttm/ttm_bo.c 	if (unlikely(bo == NULL))
bo               1475 drivers/gpu/drm/ttm/ttm_bo.c 	ret = ttm_bo_init(bdev, bo, size, type, placement, page_alignment,
bo               1479 drivers/gpu/drm/ttm/ttm_bo.c 		*p_bo = bo;
bo               1794 drivers/gpu/drm/ttm/ttm_bo.c void ttm_bo_unmap_virtual_locked(struct ttm_buffer_object *bo)
bo               1796 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_bo_device *bdev = bo->bdev;
bo               1798 drivers/gpu/drm/ttm/ttm_bo.c 	drm_vma_node_unmap(&bo->base.vma_node, bdev->dev_mapping);
bo               1799 drivers/gpu/drm/ttm/ttm_bo.c 	ttm_mem_io_free_vm(bo);
bo               1802 drivers/gpu/drm/ttm/ttm_bo.c void ttm_bo_unmap_virtual(struct ttm_buffer_object *bo)
bo               1804 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_bo_device *bdev = bo->bdev;
bo               1805 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_mem_type_manager *man = &bdev->man[bo->mem.mem_type];
bo               1808 drivers/gpu/drm/ttm/ttm_bo.c 	ttm_bo_unmap_virtual_locked(bo);
bo               1815 drivers/gpu/drm/ttm/ttm_bo.c int ttm_bo_wait(struct ttm_buffer_object *bo,
bo               1821 drivers/gpu/drm/ttm/ttm_bo.c 		if (dma_resv_test_signaled_rcu(bo->base.resv, true))
bo               1827 drivers/gpu/drm/ttm/ttm_bo.c 	timeout = dma_resv_wait_timeout_rcu(bo->base.resv, true,
bo               1835 drivers/gpu/drm/ttm/ttm_bo.c 	dma_resv_add_excl_fence(bo->base.resv, NULL);
bo               1840 drivers/gpu/drm/ttm/ttm_bo.c int ttm_bo_synccpu_write_grab(struct ttm_buffer_object *bo, bool no_wait)
bo               1848 drivers/gpu/drm/ttm/ttm_bo.c 	ret = ttm_bo_reserve(bo, true, no_wait, NULL);
bo               1851 drivers/gpu/drm/ttm/ttm_bo.c 	ret = ttm_bo_wait(bo, true, no_wait);
bo               1853 drivers/gpu/drm/ttm/ttm_bo.c 		atomic_inc(&bo->cpu_writers);
bo               1854 drivers/gpu/drm/ttm/ttm_bo.c 	ttm_bo_unreserve(bo);
bo               1859 drivers/gpu/drm/ttm/ttm_bo.c void ttm_bo_synccpu_write_release(struct ttm_buffer_object *bo)
bo               1861 drivers/gpu/drm/ttm/ttm_bo.c 	atomic_dec(&bo->cpu_writers);
bo               1871 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_buffer_object *bo;
bo               1878 drivers/gpu/drm/ttm/ttm_bo.c 		list_for_each_entry(bo, &glob->swap_lru[i], swap) {
bo               1879 drivers/gpu/drm/ttm/ttm_bo.c 			if (ttm_bo_evict_swapout_allowable(bo, ctx, &locked,
bo               1894 drivers/gpu/drm/ttm/ttm_bo.c 	kref_get(&bo->list_kref);
bo               1896 drivers/gpu/drm/ttm/ttm_bo.c 	if (!list_empty(&bo->ddestroy)) {
bo               1897 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_cleanup_refs(bo, false, false, locked);
bo               1898 drivers/gpu/drm/ttm/ttm_bo.c 		kref_put(&bo->list_kref, ttm_bo_release_list);
bo               1902 drivers/gpu/drm/ttm/ttm_bo.c 	ttm_bo_del_from_lru(bo);
bo               1909 drivers/gpu/drm/ttm/ttm_bo.c 	if (bo->mem.mem_type != TTM_PL_SYSTEM ||
bo               1910 drivers/gpu/drm/ttm/ttm_bo.c 	    bo->ttm->caching_state != tt_cached) {
bo               1914 drivers/gpu/drm/ttm/ttm_bo.c 		evict_mem = bo->mem;
bo               1919 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_handle_move_mem(bo, &evict_mem, true, &ctx);
bo               1928 drivers/gpu/drm/ttm/ttm_bo.c 	ret = ttm_bo_wait(bo, false, false);
bo               1932 drivers/gpu/drm/ttm/ttm_bo.c 	ttm_bo_unmap_virtual(bo);
bo               1939 drivers/gpu/drm/ttm/ttm_bo.c 	if (bo->bdev->driver->swap_notify)
bo               1940 drivers/gpu/drm/ttm/ttm_bo.c 		bo->bdev->driver->swap_notify(bo);
bo               1942 drivers/gpu/drm/ttm/ttm_bo.c 	ret = ttm_tt_swapout(bo->ttm, bo->persistent_swap_storage);
bo               1951 drivers/gpu/drm/ttm/ttm_bo.c 		dma_resv_unlock(bo->base.resv);
bo               1952 drivers/gpu/drm/ttm/ttm_bo.c 	kref_put(&bo->list_kref, ttm_bo_release_list);
bo               1975 drivers/gpu/drm/ttm/ttm_bo.c int ttm_bo_wait_unreserved(struct ttm_buffer_object *bo)
bo               1986 drivers/gpu/drm/ttm/ttm_bo.c 	ret = mutex_lock_interruptible(&bo->wu_mutex);
bo               1989 drivers/gpu/drm/ttm/ttm_bo.c 	if (!dma_resv_is_locked(bo->base.resv))
bo               1991 drivers/gpu/drm/ttm/ttm_bo.c 	ret = dma_resv_lock_interruptible(bo->base.resv, NULL);
bo               1996 drivers/gpu/drm/ttm/ttm_bo.c 	dma_resv_unlock(bo->base.resv);
bo               1999 drivers/gpu/drm/ttm/ttm_bo.c 	mutex_unlock(&bo->wu_mutex);
bo                 52 drivers/gpu/drm/ttm/ttm_bo_manager.c 			       struct ttm_buffer_object *bo,
bo                 45 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_buffer_object *bo;
bo                 48 drivers/gpu/drm/ttm/ttm_bo_util.c void ttm_bo_free_old_node(struct ttm_buffer_object *bo)
bo                 50 drivers/gpu/drm/ttm/ttm_bo_util.c 	ttm_bo_mem_put(bo, &bo->mem);
bo                 53 drivers/gpu/drm/ttm/ttm_bo_util.c int ttm_bo_move_ttm(struct ttm_buffer_object *bo,
bo                 57 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_tt *ttm = bo->ttm;
bo                 58 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_mem_reg *old_mem = &bo->mem;
bo                 62 drivers/gpu/drm/ttm/ttm_bo_util.c 		ret = ttm_bo_wait(bo, ctx->interruptible, ctx->no_wait_gpu);
bo                 71 drivers/gpu/drm/ttm/ttm_bo_util.c 		ttm_bo_free_old_node(bo);
bo                118 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_buffer_object *bo;
bo                123 drivers/gpu/drm/ttm/ttm_bo_util.c 	bo = list_first_entry(&man->io_reserve_lru,
bo                126 drivers/gpu/drm/ttm/ttm_bo_util.c 	list_del_init(&bo->io_reserve_lru);
bo                127 drivers/gpu/drm/ttm/ttm_bo_util.c 	ttm_bo_unmap_virtual_locked(bo);
bo                174 drivers/gpu/drm/ttm/ttm_bo_util.c int ttm_mem_io_reserve_vm(struct ttm_buffer_object *bo)
bo                176 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_mem_reg *mem = &bo->mem;
bo                181 drivers/gpu/drm/ttm/ttm_bo_util.c 			&bo->bdev->man[mem->mem_type];
bo                183 drivers/gpu/drm/ttm/ttm_bo_util.c 		ret = ttm_mem_io_reserve(bo->bdev, mem);
bo                188 drivers/gpu/drm/ttm/ttm_bo_util.c 			list_add_tail(&bo->io_reserve_lru,
bo                194 drivers/gpu/drm/ttm/ttm_bo_util.c void ttm_mem_io_free_vm(struct ttm_buffer_object *bo)
bo                196 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_mem_reg *mem = &bo->mem;
bo                200 drivers/gpu/drm/ttm/ttm_bo_util.c 		list_del_init(&bo->io_reserve_lru);
bo                201 drivers/gpu/drm/ttm/ttm_bo_util.c 		ttm_mem_io_free(bo->bdev, mem);
bo                356 drivers/gpu/drm/ttm/ttm_bo_util.c int ttm_bo_move_memcpy(struct ttm_buffer_object *bo,
bo                360 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_bo_device *bdev = bo->bdev;
bo                362 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_tt *ttm = bo->ttm;
bo                363 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_mem_reg *old_mem = &bo->mem;
bo                373 drivers/gpu/drm/ttm/ttm_bo_util.c 	ret = ttm_bo_wait(bo, ctx->interruptible, ctx->no_wait_gpu);
bo                444 drivers/gpu/drm/ttm/ttm_bo_util.c 		bo->ttm = NULL;
bo                456 drivers/gpu/drm/ttm/ttm_bo_util.c 		ttm_bo_mem_put(bo, &old_copy);
bo                461 drivers/gpu/drm/ttm/ttm_bo_util.c static void ttm_transfered_destroy(struct ttm_buffer_object *bo)
bo                465 drivers/gpu/drm/ttm/ttm_bo_util.c 	fbo = container_of(bo, struct ttm_transfer_obj, base);
bo                466 drivers/gpu/drm/ttm/ttm_bo_util.c 	ttm_bo_put(fbo->bo);
bo                485 drivers/gpu/drm/ttm/ttm_bo_util.c static int ttm_buffer_object_transfer(struct ttm_buffer_object *bo,
bo                495 drivers/gpu/drm/ttm/ttm_bo_util.c 	fbo->base = *bo;
bo                498 drivers/gpu/drm/ttm/ttm_bo_util.c 	ttm_bo_get(bo);
bo                499 drivers/gpu/drm/ttm/ttm_bo_util.c 	fbo->bo = bo;
bo                506 drivers/gpu/drm/ttm/ttm_bo_util.c 	atomic_inc(&bo->bdev->glob->bo_count);
bo                555 drivers/gpu/drm/ttm/ttm_bo_util.c static int ttm_bo_ioremap(struct ttm_buffer_object *bo,
bo                560 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_mem_reg *mem = &bo->mem;
bo                562 drivers/gpu/drm/ttm/ttm_bo_util.c 	if (bo->mem.bus.addr) {
bo                564 drivers/gpu/drm/ttm/ttm_bo_util.c 		map->virtual = (void *)(((u8 *)bo->mem.bus.addr) + offset);
bo                568 drivers/gpu/drm/ttm/ttm_bo_util.c 			map->virtual = ioremap_wc(bo->mem.bus.base + bo->mem.bus.offset + offset,
bo                571 drivers/gpu/drm/ttm/ttm_bo_util.c 			map->virtual = ioremap_nocache(bo->mem.bus.base + bo->mem.bus.offset + offset,
bo                577 drivers/gpu/drm/ttm/ttm_bo_util.c static int ttm_bo_kmap_ttm(struct ttm_buffer_object *bo,
bo                582 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_mem_reg *mem = &bo->mem;
bo                587 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_tt *ttm = bo->ttm;
bo                619 drivers/gpu/drm/ttm/ttm_bo_util.c int ttm_bo_kmap(struct ttm_buffer_object *bo,
bo                624 drivers/gpu/drm/ttm/ttm_bo_util.c 		&bo->bdev->man[bo->mem.mem_type];
bo                629 drivers/gpu/drm/ttm/ttm_bo_util.c 	map->bo = bo;
bo                630 drivers/gpu/drm/ttm/ttm_bo_util.c 	if (num_pages > bo->num_pages)
bo                632 drivers/gpu/drm/ttm/ttm_bo_util.c 	if (start_page > bo->num_pages)
bo                636 drivers/gpu/drm/ttm/ttm_bo_util.c 	ret = ttm_mem_io_reserve(bo->bdev, &bo->mem);
bo                640 drivers/gpu/drm/ttm/ttm_bo_util.c 	if (!bo->mem.bus.is_iomem) {
bo                641 drivers/gpu/drm/ttm/ttm_bo_util.c 		return ttm_bo_kmap_ttm(bo, start_page, num_pages, map);
bo                645 drivers/gpu/drm/ttm/ttm_bo_util.c 		return ttm_bo_ioremap(bo, offset, size, map);
bo                652 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_buffer_object *bo = map->bo;
bo                654 drivers/gpu/drm/ttm/ttm_bo_util.c 		&bo->bdev->man[bo->mem.mem_type];
bo                674 drivers/gpu/drm/ttm/ttm_bo_util.c 	ttm_mem_io_free(map->bo->bdev, &map->bo->mem);
bo                681 drivers/gpu/drm/ttm/ttm_bo_util.c int ttm_bo_move_accel_cleanup(struct ttm_buffer_object *bo,
bo                686 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_bo_device *bdev = bo->bdev;
bo                688 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_mem_reg *old_mem = &bo->mem;
bo                692 drivers/gpu/drm/ttm/ttm_bo_util.c 	dma_resv_add_excl_fence(bo->base.resv, fence);
bo                694 drivers/gpu/drm/ttm/ttm_bo_util.c 		ret = ttm_bo_wait(bo, false, false);
bo                699 drivers/gpu/drm/ttm/ttm_bo_util.c 			ttm_tt_destroy(bo->ttm);
bo                700 drivers/gpu/drm/ttm/ttm_bo_util.c 			bo->ttm = NULL;
bo                702 drivers/gpu/drm/ttm/ttm_bo_util.c 		ttm_bo_free_old_node(bo);
bo                712 drivers/gpu/drm/ttm/ttm_bo_util.c 		dma_fence_put(bo->moving);
bo                713 drivers/gpu/drm/ttm/ttm_bo_util.c 		bo->moving = dma_fence_get(fence);
bo                715 drivers/gpu/drm/ttm/ttm_bo_util.c 		ret = ttm_buffer_object_transfer(bo, &ghost_obj);
bo                730 drivers/gpu/drm/ttm/ttm_bo_util.c 			bo->ttm = NULL;
bo                743 drivers/gpu/drm/ttm/ttm_bo_util.c int ttm_bo_pipeline_move(struct ttm_buffer_object *bo,
bo                747 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_bo_device *bdev = bo->bdev;
bo                748 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_mem_reg *old_mem = &bo->mem;
bo                755 drivers/gpu/drm/ttm/ttm_bo_util.c 	dma_resv_add_excl_fence(bo->base.resv, fence);
bo                768 drivers/gpu/drm/ttm/ttm_bo_util.c 		dma_fence_put(bo->moving);
bo                769 drivers/gpu/drm/ttm/ttm_bo_util.c 		bo->moving = dma_fence_get(fence);
bo                771 drivers/gpu/drm/ttm/ttm_bo_util.c 		ret = ttm_buffer_object_transfer(bo, &ghost_obj);
bo                786 drivers/gpu/drm/ttm/ttm_bo_util.c 			bo->ttm = NULL;
bo                805 drivers/gpu/drm/ttm/ttm_bo_util.c 		ttm_bo_free_old_node(bo);
bo                807 drivers/gpu/drm/ttm/ttm_bo_util.c 		dma_fence_put(bo->moving);
bo                808 drivers/gpu/drm/ttm/ttm_bo_util.c 		bo->moving = dma_fence_get(fence);
bo                817 drivers/gpu/drm/ttm/ttm_bo_util.c 		ret = ttm_bo_wait(bo, false, false);
bo                822 drivers/gpu/drm/ttm/ttm_bo_util.c 			ttm_tt_destroy(bo->ttm);
bo                823 drivers/gpu/drm/ttm/ttm_bo_util.c 			bo->ttm = NULL;
bo                825 drivers/gpu/drm/ttm/ttm_bo_util.c 		ttm_bo_free_old_node(bo);
bo                835 drivers/gpu/drm/ttm/ttm_bo_util.c int ttm_bo_pipeline_gutting(struct ttm_buffer_object *bo)
bo                840 drivers/gpu/drm/ttm/ttm_bo_util.c 	ret = ttm_buffer_object_transfer(bo, &ghost);
bo                844 drivers/gpu/drm/ttm/ttm_bo_util.c 	ret = dma_resv_copy_fences(ghost->base.resv, bo->base.resv);
bo                847 drivers/gpu/drm/ttm/ttm_bo_util.c 		ttm_bo_wait(bo, false, false);
bo                849 drivers/gpu/drm/ttm/ttm_bo_util.c 	memset(&bo->mem, 0, sizeof(bo->mem));
bo                850 drivers/gpu/drm/ttm/ttm_bo_util.c 	bo->mem.mem_type = TTM_PL_SYSTEM;
bo                851 drivers/gpu/drm/ttm/ttm_bo_util.c 	bo->ttm = NULL;
bo                 47 drivers/gpu/drm/ttm/ttm_bo_vm.c static vm_fault_t ttm_bo_vm_fault_idle(struct ttm_buffer_object *bo,
bo                 53 drivers/gpu/drm/ttm/ttm_bo_vm.c 	if (likely(!bo->moving))
bo                 59 drivers/gpu/drm/ttm/ttm_bo_vm.c 	if (dma_fence_is_signaled(bo->moving))
bo                 71 drivers/gpu/drm/ttm/ttm_bo_vm.c 		ttm_bo_get(bo);
bo                 73 drivers/gpu/drm/ttm/ttm_bo_vm.c 		(void) dma_fence_wait(bo->moving, true);
bo                 74 drivers/gpu/drm/ttm/ttm_bo_vm.c 		dma_resv_unlock(bo->base.resv);
bo                 75 drivers/gpu/drm/ttm/ttm_bo_vm.c 		ttm_bo_put(bo);
bo                 82 drivers/gpu/drm/ttm/ttm_bo_vm.c 	err = dma_fence_wait(bo->moving, true);
bo                 90 drivers/gpu/drm/ttm/ttm_bo_vm.c 	dma_fence_put(bo->moving);
bo                 91 drivers/gpu/drm/ttm/ttm_bo_vm.c 	bo->moving = NULL;
bo                 97 drivers/gpu/drm/ttm/ttm_bo_vm.c static unsigned long ttm_bo_io_mem_pfn(struct ttm_buffer_object *bo,
bo                100 drivers/gpu/drm/ttm/ttm_bo_vm.c 	struct ttm_bo_device *bdev = bo->bdev;
bo                103 drivers/gpu/drm/ttm/ttm_bo_vm.c 		return bdev->driver->io_mem_pfn(bo, page_offset);
bo                105 drivers/gpu/drm/ttm/ttm_bo_vm.c 	return ((bo->mem.bus.base + bo->mem.bus.offset) >> PAGE_SHIFT)
bo                112 drivers/gpu/drm/ttm/ttm_bo_vm.c 	struct ttm_buffer_object *bo = (struct ttm_buffer_object *)
bo                114 drivers/gpu/drm/ttm/ttm_bo_vm.c 	struct ttm_bo_device *bdev = bo->bdev;
bo                125 drivers/gpu/drm/ttm/ttm_bo_vm.c 		&bdev->man[bo->mem.mem_type];
bo                134 drivers/gpu/drm/ttm/ttm_bo_vm.c 	if (unlikely(!dma_resv_trylock(bo->base.resv))) {
bo                137 drivers/gpu/drm/ttm/ttm_bo_vm.c 				ttm_bo_get(bo);
bo                139 drivers/gpu/drm/ttm/ttm_bo_vm.c 				(void) ttm_bo_wait_unreserved(bo);
bo                140 drivers/gpu/drm/ttm/ttm_bo_vm.c 				ttm_bo_put(bo);
bo                158 drivers/gpu/drm/ttm/ttm_bo_vm.c 	if (bo->ttm && (bo->ttm->page_flags & TTM_PAGE_FLAG_SG)) {
bo                164 drivers/gpu/drm/ttm/ttm_bo_vm.c 		struct dma_fence *moving = dma_fence_get(bo->moving);
bo                166 drivers/gpu/drm/ttm/ttm_bo_vm.c 		err = bdev->driver->fault_reserve_notify(bo);
bo                179 drivers/gpu/drm/ttm/ttm_bo_vm.c 		if (bo->moving != moving) {
bo                181 drivers/gpu/drm/ttm/ttm_bo_vm.c 			ttm_bo_move_to_lru_tail(bo, NULL);
bo                191 drivers/gpu/drm/ttm/ttm_bo_vm.c 	ret = ttm_bo_vm_fault_idle(bo, vmf);
bo                207 drivers/gpu/drm/ttm/ttm_bo_vm.c 	err = ttm_mem_io_reserve_vm(bo);
bo                214 drivers/gpu/drm/ttm/ttm_bo_vm.c 		vma->vm_pgoff - drm_vma_node_start(&bo->base.vma_node);
bo                216 drivers/gpu/drm/ttm/ttm_bo_vm.c 		drm_vma_node_start(&bo->base.vma_node);
bo                218 drivers/gpu/drm/ttm/ttm_bo_vm.c 	if (unlikely(page_offset >= bo->num_pages)) {
bo                231 drivers/gpu/drm/ttm/ttm_bo_vm.c 	if (bo->mem.bus.is_iomem) {
bo                232 drivers/gpu/drm/ttm/ttm_bo_vm.c 		cvma.vm_page_prot = ttm_io_prot(bo->mem.placement,
bo                242 drivers/gpu/drm/ttm/ttm_bo_vm.c 		ttm = bo->ttm;
bo                243 drivers/gpu/drm/ttm/ttm_bo_vm.c 		cvma.vm_page_prot = ttm_io_prot(bo->mem.placement,
bo                258 drivers/gpu/drm/ttm/ttm_bo_vm.c 		if (bo->mem.bus.is_iomem) {
bo                261 drivers/gpu/drm/ttm/ttm_bo_vm.c 			pfn = ttm_bo_io_mem_pfn(bo, page_offset);
bo                270 drivers/gpu/drm/ttm/ttm_bo_vm.c 			page->index = drm_vma_node_start(&bo->base.vma_node) +
bo                297 drivers/gpu/drm/ttm/ttm_bo_vm.c 	dma_resv_unlock(bo->base.resv);
bo                303 drivers/gpu/drm/ttm/ttm_bo_vm.c 	struct ttm_buffer_object *bo =
bo                306 drivers/gpu/drm/ttm/ttm_bo_vm.c 	WARN_ON(bo->bdev->dev_mapping != vma->vm_file->f_mapping);
bo                308 drivers/gpu/drm/ttm/ttm_bo_vm.c 	ttm_bo_get(bo);
bo                313 drivers/gpu/drm/ttm/ttm_bo_vm.c 	struct ttm_buffer_object *bo = (struct ttm_buffer_object *)vma->vm_private_data;
bo                315 drivers/gpu/drm/ttm/ttm_bo_vm.c 	ttm_bo_put(bo);
bo                319 drivers/gpu/drm/ttm/ttm_bo_vm.c static int ttm_bo_vm_access_kmap(struct ttm_buffer_object *bo,
bo                337 drivers/gpu/drm/ttm/ttm_bo_vm.c 		ret = ttm_bo_kmap(bo, page, 1, &map);
bo                362 drivers/gpu/drm/ttm/ttm_bo_vm.c 	struct ttm_buffer_object *bo = vma->vm_private_data;
bo                365 drivers/gpu/drm/ttm/ttm_bo_vm.c 	if (len < 1 || (offset + len) >> PAGE_SHIFT > bo->num_pages)
bo                368 drivers/gpu/drm/ttm/ttm_bo_vm.c 	ret = ttm_bo_reserve(bo, true, false, NULL);
bo                372 drivers/gpu/drm/ttm/ttm_bo_vm.c 	switch (bo->mem.mem_type) {
bo                374 drivers/gpu/drm/ttm/ttm_bo_vm.c 		if (unlikely(bo->ttm->page_flags & TTM_PAGE_FLAG_SWAPPED)) {
bo                375 drivers/gpu/drm/ttm/ttm_bo_vm.c 			ret = ttm_tt_swapin(bo->ttm);
bo                381 drivers/gpu/drm/ttm/ttm_bo_vm.c 		ret = ttm_bo_vm_access_kmap(bo, offset, buf, len, write);
bo                384 drivers/gpu/drm/ttm/ttm_bo_vm.c 		if (bo->bdev->driver->access_memory)
bo                385 drivers/gpu/drm/ttm/ttm_bo_vm.c 			ret = bo->bdev->driver->access_memory(
bo                386 drivers/gpu/drm/ttm/ttm_bo_vm.c 				bo, offset, buf, len, write);
bo                391 drivers/gpu/drm/ttm/ttm_bo_vm.c 	ttm_bo_unreserve(bo);
bo                408 drivers/gpu/drm/ttm/ttm_bo_vm.c 	struct ttm_buffer_object *bo = NULL;
bo                414 drivers/gpu/drm/ttm/ttm_bo_vm.c 		bo = container_of(node, struct ttm_buffer_object,
bo                416 drivers/gpu/drm/ttm/ttm_bo_vm.c 		bo = ttm_bo_get_unless_zero(bo);
bo                421 drivers/gpu/drm/ttm/ttm_bo_vm.c 	if (!bo)
bo                424 drivers/gpu/drm/ttm/ttm_bo_vm.c 	return bo;
bo                431 drivers/gpu/drm/ttm/ttm_bo_vm.c 	struct ttm_buffer_object *bo;
bo                437 drivers/gpu/drm/ttm/ttm_bo_vm.c 	bo = ttm_bo_vm_lookup(bdev, vma->vm_pgoff, vma_pages(vma));
bo                438 drivers/gpu/drm/ttm/ttm_bo_vm.c 	if (unlikely(!bo))
bo                441 drivers/gpu/drm/ttm/ttm_bo_vm.c 	driver = bo->bdev->driver;
bo                446 drivers/gpu/drm/ttm/ttm_bo_vm.c 	ret = driver->verify_access(bo, filp);
bo                457 drivers/gpu/drm/ttm/ttm_bo_vm.c 	vma->vm_private_data = bo;
bo                470 drivers/gpu/drm/ttm/ttm_bo_vm.c 	ttm_bo_put(bo);
bo                475 drivers/gpu/drm/ttm/ttm_bo_vm.c int ttm_fbdev_mmap(struct vm_area_struct *vma, struct ttm_buffer_object *bo)
bo                480 drivers/gpu/drm/ttm/ttm_bo_vm.c 	ttm_bo_get(bo);
bo                483 drivers/gpu/drm/ttm/ttm_bo_vm.c 	vma->vm_private_data = bo;
bo                 40 drivers/gpu/drm/ttm/ttm_execbuf_util.c 		struct ttm_buffer_object *bo = entry->bo;
bo                 42 drivers/gpu/drm/ttm/ttm_execbuf_util.c 		dma_resv_unlock(bo->base.resv);
bo                 51 drivers/gpu/drm/ttm/ttm_execbuf_util.c 		struct ttm_buffer_object *bo = entry->bo;
bo                 52 drivers/gpu/drm/ttm/ttm_execbuf_util.c 		ttm_bo_del_from_lru(bo);
bo                 66 drivers/gpu/drm/ttm/ttm_execbuf_util.c 	glob = entry->bo->bdev->glob;
bo                 70 drivers/gpu/drm/ttm/ttm_execbuf_util.c 		struct ttm_buffer_object *bo = entry->bo;
bo                 72 drivers/gpu/drm/ttm/ttm_execbuf_util.c 		if (list_empty(&bo->lru))
bo                 73 drivers/gpu/drm/ttm/ttm_execbuf_util.c 			ttm_bo_add_to_lru(bo);
bo                 74 drivers/gpu/drm/ttm/ttm_execbuf_util.c 		dma_resv_unlock(bo->base.resv);
bo                107 drivers/gpu/drm/ttm/ttm_execbuf_util.c 	glob = entry->bo->bdev->glob;
bo                113 drivers/gpu/drm/ttm/ttm_execbuf_util.c 		struct ttm_buffer_object *bo = entry->bo;
bo                115 drivers/gpu/drm/ttm/ttm_execbuf_util.c 		ret = __ttm_bo_reserve(bo, intr, (ticket == NULL), ticket);
bo                116 drivers/gpu/drm/ttm/ttm_execbuf_util.c 		if (!ret && unlikely(atomic_read(&bo->cpu_writers) > 0)) {
bo                117 drivers/gpu/drm/ttm/ttm_execbuf_util.c 			dma_resv_unlock(bo->base.resv);
bo                133 drivers/gpu/drm/ttm/ttm_execbuf_util.c 			ret = dma_resv_reserve_shared(bo->base.resv,
bo                147 drivers/gpu/drm/ttm/ttm_execbuf_util.c 				ret = dma_resv_lock_slow_interruptible(bo->base.resv,
bo                150 drivers/gpu/drm/ttm/ttm_execbuf_util.c 				dma_resv_lock_slow(bo->base.resv, ticket);
bo                156 drivers/gpu/drm/ttm/ttm_execbuf_util.c 			ret = dma_resv_reserve_shared(bo->base.resv,
bo                190 drivers/gpu/drm/ttm/ttm_execbuf_util.c 	struct ttm_buffer_object *bo;
bo                196 drivers/gpu/drm/ttm/ttm_execbuf_util.c 	bo = list_first_entry(list, struct ttm_validate_buffer, head)->bo;
bo                197 drivers/gpu/drm/ttm/ttm_execbuf_util.c 	glob = bo->bdev->glob;
bo                202 drivers/gpu/drm/ttm/ttm_execbuf_util.c 		bo = entry->bo;
bo                204 drivers/gpu/drm/ttm/ttm_execbuf_util.c 			dma_resv_add_shared_fence(bo->base.resv, fence);
bo                206 drivers/gpu/drm/ttm/ttm_execbuf_util.c 			dma_resv_add_excl_fence(bo->base.resv, fence);
bo                207 drivers/gpu/drm/ttm/ttm_execbuf_util.c 		if (list_empty(&bo->lru))
bo                208 drivers/gpu/drm/ttm/ttm_execbuf_util.c 			ttm_bo_add_to_lru(bo);
bo                210 drivers/gpu/drm/ttm/ttm_execbuf_util.c 			ttm_bo_move_to_lru_tail(bo, NULL);
bo                211 drivers/gpu/drm/ttm/ttm_execbuf_util.c 		dma_resv_unlock(bo->base.resv);
bo                 46 drivers/gpu/drm/ttm/ttm_tt.c int ttm_tt_create(struct ttm_buffer_object *bo, bool zero_alloc)
bo                 48 drivers/gpu/drm/ttm/ttm_tt.c 	struct ttm_bo_device *bdev = bo->bdev;
bo                 51 drivers/gpu/drm/ttm/ttm_tt.c 	dma_resv_assert_held(bo->base.resv);
bo                 59 drivers/gpu/drm/ttm/ttm_tt.c 	switch (bo->type) {
bo                 70 drivers/gpu/drm/ttm/ttm_tt.c 		bo->ttm = NULL;
bo                 75 drivers/gpu/drm/ttm/ttm_tt.c 	bo->ttm = bdev->driver->ttm_tt_create(bo, page_flags);
bo                 76 drivers/gpu/drm/ttm/ttm_tt.c 	if (unlikely(bo->ttm == NULL))
bo                226 drivers/gpu/drm/ttm/ttm_tt.c void ttm_tt_init_fields(struct ttm_tt *ttm, struct ttm_buffer_object *bo,
bo                229 drivers/gpu/drm/ttm/ttm_tt.c 	ttm->bdev = bo->bdev;
bo                230 drivers/gpu/drm/ttm/ttm_tt.c 	ttm->num_pages = bo->num_pages;
bo                235 drivers/gpu/drm/ttm/ttm_tt.c 	ttm->sg = bo->sg;
bo                238 drivers/gpu/drm/ttm/ttm_tt.c int ttm_tt_init(struct ttm_tt *ttm, struct ttm_buffer_object *bo,
bo                241 drivers/gpu/drm/ttm/ttm_tt.c 	ttm_tt_init_fields(ttm, bo, page_flags);
bo                259 drivers/gpu/drm/ttm/ttm_tt.c int ttm_dma_tt_init(struct ttm_dma_tt *ttm_dma, struct ttm_buffer_object *bo,
bo                264 drivers/gpu/drm/ttm/ttm_tt.c 	ttm_tt_init_fields(ttm, bo, page_flags);
bo                276 drivers/gpu/drm/ttm/ttm_tt.c int ttm_sg_tt_init(struct ttm_dma_tt *ttm_dma, struct ttm_buffer_object *bo,
bo                282 drivers/gpu/drm/ttm/ttm_tt.c 	ttm_tt_init_fields(ttm, bo, page_flags);
bo                 34 drivers/gpu/drm/v3d/v3d_bo.c 	struct v3d_bo *bo = to_v3d_bo(obj);
bo                 36 drivers/gpu/drm/v3d/v3d_bo.c 	v3d_mmu_remove_ptes(bo);
bo                 44 drivers/gpu/drm/v3d/v3d_bo.c 	drm_mm_remove_node(&bo->node);
bo                 48 drivers/gpu/drm/v3d/v3d_bo.c 	bo->base.pages_mark_dirty_on_put = true;
bo                 69 drivers/gpu/drm/v3d/v3d_bo.c 	struct v3d_bo *bo;
bo                 75 drivers/gpu/drm/v3d/v3d_bo.c 	bo = kzalloc(sizeof(*bo), GFP_KERNEL);
bo                 76 drivers/gpu/drm/v3d/v3d_bo.c 	if (!bo)
bo                 78 drivers/gpu/drm/v3d/v3d_bo.c 	obj = &bo->base.base;
bo                 82 drivers/gpu/drm/v3d/v3d_bo.c 	INIT_LIST_HEAD(&bo->unref_head);
bo                 84 drivers/gpu/drm/v3d/v3d_bo.c 	return &bo->base.base;
bo                 91 drivers/gpu/drm/v3d/v3d_bo.c 	struct v3d_bo *bo = to_v3d_bo(obj);
bo                 98 drivers/gpu/drm/v3d/v3d_bo.c 	sgt = drm_gem_shmem_get_pages_sgt(&bo->base.base);
bo                107 drivers/gpu/drm/v3d/v3d_bo.c 	ret = drm_mm_insert_node_generic(&v3d->mm, &bo->node,
bo                120 drivers/gpu/drm/v3d/v3d_bo.c 	v3d_mmu_insert_ptes(bo);
bo                129 drivers/gpu/drm/v3d/v3d_bo.c 	struct v3d_bo *bo;
bo                135 drivers/gpu/drm/v3d/v3d_bo.c 	bo = to_v3d_bo(&shmem_obj->base);
bo                141 drivers/gpu/drm/v3d/v3d_bo.c 	return bo;
bo                173 drivers/gpu/drm/v3d/v3d_bo.c 	struct v3d_bo *bo = NULL;
bo                181 drivers/gpu/drm/v3d/v3d_bo.c 	bo = v3d_bo_create(dev, file_priv, PAGE_ALIGN(args->size));
bo                182 drivers/gpu/drm/v3d/v3d_bo.c 	if (IS_ERR(bo))
bo                183 drivers/gpu/drm/v3d/v3d_bo.c 		return PTR_ERR(bo);
bo                185 drivers/gpu/drm/v3d/v3d_bo.c 	args->offset = bo->node.start << PAGE_SHIFT;
bo                187 drivers/gpu/drm/v3d/v3d_bo.c 	ret = drm_gem_handle_create(file_priv, &bo->base.base, &args->handle);
bo                188 drivers/gpu/drm/v3d/v3d_bo.c 	drm_gem_object_put_unlocked(&bo->base.base);
bo                221 drivers/gpu/drm/v3d/v3d_bo.c 	struct v3d_bo *bo;
bo                228 drivers/gpu/drm/v3d/v3d_bo.c 	bo = to_v3d_bo(gem_obj);
bo                230 drivers/gpu/drm/v3d/v3d_bo.c 	args->offset = bo->node.start << PAGE_SHIFT;
bo                152 drivers/gpu/drm/v3d/v3d_drv.h to_v3d_bo(struct drm_gem_object *bo)
bo                154 drivers/gpu/drm/v3d/v3d_drv.h 	return (struct v3d_bo *)bo;
bo                193 drivers/gpu/drm/v3d/v3d_drv.h 	struct drm_gem_object **bo;
bo                333 drivers/gpu/drm/v3d/v3d_drv.h int v3d_mmu_get_offset(struct drm_file *file_priv, struct v3d_bo *bo,
bo                336 drivers/gpu/drm/v3d/v3d_drv.h void v3d_mmu_insert_ptes(struct v3d_bo *bo);
bo                337 drivers/gpu/drm/v3d/v3d_drv.h void v3d_mmu_remove_ptes(struct v3d_bo *bo);
bo                255 drivers/gpu/drm/v3d/v3d_gem.c 	ret = drm_gem_lock_reservations(job->bo, job->bo_count, acquire_ctx);
bo                261 drivers/gpu/drm/v3d/v3d_gem.c 						       job->bo[i], true);
bo                263 drivers/gpu/drm/v3d/v3d_gem.c 			drm_gem_unlock_reservations(job->bo, job->bo_count,
bo                307 drivers/gpu/drm/v3d/v3d_gem.c 	job->bo = kvmalloc_array(job->bo_count,
bo                310 drivers/gpu/drm/v3d/v3d_gem.c 	if (!job->bo) {
bo                332 drivers/gpu/drm/v3d/v3d_gem.c 		struct drm_gem_object *bo = idr_find(&file_priv->object_idr,
bo                334 drivers/gpu/drm/v3d/v3d_gem.c 		if (!bo) {
bo                341 drivers/gpu/drm/v3d/v3d_gem.c 		drm_gem_object_get(bo);
bo                342 drivers/gpu/drm/v3d/v3d_gem.c 		job->bo[i] = bo;
bo                360 drivers/gpu/drm/v3d/v3d_gem.c 		if (job->bo[i])
bo                361 drivers/gpu/drm/v3d/v3d_gem.c 			drm_gem_object_put_unlocked(job->bo[i]);
bo                363 drivers/gpu/drm/v3d/v3d_gem.c 	kvfree(job->bo);
bo                384 drivers/gpu/drm/v3d/v3d_gem.c 	struct v3d_bo *bo, *save;
bo                386 drivers/gpu/drm/v3d/v3d_gem.c 	list_for_each_entry_safe(bo, save, &job->unref_list, unref_head) {
bo                387 drivers/gpu/drm/v3d/v3d_gem.c 		drm_gem_object_put_unlocked(&bo->base.base);
bo                498 drivers/gpu/drm/v3d/v3d_gem.c 		dma_resv_add_excl_fence(job->bo[i]->resv,
bo                502 drivers/gpu/drm/v3d/v3d_gem.c 	drm_gem_unlock_reservations(job->bo, job->bo_count, acquire_ctx);
bo                621 drivers/gpu/drm/v3d/v3d_gem.c 	drm_gem_unlock_reservations(render->base.bo,
bo                664 drivers/gpu/drm/v3d/v3d_gem.c 	job->base.bo = kcalloc(ARRAY_SIZE(args->bo_handles),
bo                665 drivers/gpu/drm/v3d/v3d_gem.c 			       sizeof(*job->base.bo), GFP_KERNEL);
bo                666 drivers/gpu/drm/v3d/v3d_gem.c 	if (!job->base.bo) {
bo                677 drivers/gpu/drm/v3d/v3d_gem.c 		struct drm_gem_object *bo;
bo                682 drivers/gpu/drm/v3d/v3d_gem.c 		bo = idr_find(&file_priv->object_idr,
bo                684 drivers/gpu/drm/v3d/v3d_gem.c 		if (!bo) {
bo                692 drivers/gpu/drm/v3d/v3d_gem.c 		drm_gem_object_get(bo);
bo                693 drivers/gpu/drm/v3d/v3d_gem.c 		job->base.bo[job->base.bo_count] = bo;
bo                718 drivers/gpu/drm/v3d/v3d_gem.c 	drm_gem_unlock_reservations(job->base.bo, job->base.bo_count,
bo                818 drivers/gpu/drm/v3d/v3d_gem.c 	drm_gem_unlock_reservations(clean_job->bo, clean_job->bo_count,
bo                 42 drivers/gpu/drm/v3d/v3d_irq.c 	struct v3d_bo *bo = v3d_bo_create(dev, NULL /* XXX: GMP */, 256 * 1024);
bo                 46 drivers/gpu/drm/v3d/v3d_irq.c 	if (IS_ERR(bo)) {
bo                 50 drivers/gpu/drm/v3d/v3d_irq.c 	obj = &bo->base.base;
bo                 68 drivers/gpu/drm/v3d/v3d_irq.c 	list_add_tail(&bo->unref_head, &v3d->bin_job->render->unref_list);
bo                 71 drivers/gpu/drm/v3d/v3d_irq.c 	V3D_CORE_WRITE(0, V3D_PTB_BPOA, bo->node.start << PAGE_SHIFT);
bo                 87 drivers/gpu/drm/v3d/v3d_mmu.c void v3d_mmu_insert_ptes(struct v3d_bo *bo)
bo                 89 drivers/gpu/drm/v3d/v3d_mmu.c 	struct drm_gem_shmem_object *shmem_obj = &bo->base;
bo                 91 drivers/gpu/drm/v3d/v3d_mmu.c 	u32 page = bo->node.start;
bo                108 drivers/gpu/drm/v3d/v3d_mmu.c 	WARN_ON_ONCE(page - bo->node.start !=
bo                115 drivers/gpu/drm/v3d/v3d_mmu.c void v3d_mmu_remove_ptes(struct v3d_bo *bo)
bo                117 drivers/gpu/drm/v3d/v3d_mmu.c 	struct v3d_dev *v3d = to_v3d_dev(bo->base.base.dev);
bo                118 drivers/gpu/drm/v3d/v3d_mmu.c 	u32 npages = bo->base.base.size >> V3D_MMU_PAGE_SHIFT;
bo                121 drivers/gpu/drm/v3d/v3d_mmu.c 	for (page = bo->node.start; page < bo->node.start + npages; page++)
bo                295 drivers/gpu/drm/vboxvideo/vbox_main.c 	*obj = &gbo->bo.base;
bo                130 drivers/gpu/drm/vc4/vc4_bo.c 	struct vc4_bo *bo = to_vc4_bo(gem_obj);
bo                140 drivers/gpu/drm/vc4/vc4_bo.c 	vc4->bo_labels[bo->label].num_allocated--;
bo                141 drivers/gpu/drm/vc4/vc4_bo.c 	vc4->bo_labels[bo->label].size_allocated -= gem_obj->size;
bo                143 drivers/gpu/drm/vc4/vc4_bo.c 	if (vc4->bo_labels[bo->label].num_allocated == 0 &&
bo                144 drivers/gpu/drm/vc4/vc4_bo.c 	    is_user_label(bo->label)) {
bo                150 drivers/gpu/drm/vc4/vc4_bo.c 		kfree(vc4->bo_labels[bo->label].name);
bo                151 drivers/gpu/drm/vc4/vc4_bo.c 		vc4->bo_labels[bo->label].name = NULL;
bo                154 drivers/gpu/drm/vc4/vc4_bo.c 	bo->label = label;
bo                162 drivers/gpu/drm/vc4/vc4_bo.c static void vc4_bo_destroy(struct vc4_bo *bo)
bo                164 drivers/gpu/drm/vc4/vc4_bo.c 	struct drm_gem_object *obj = &bo->base.base;
bo                171 drivers/gpu/drm/vc4/vc4_bo.c 	if (bo->validated_shader) {
bo                172 drivers/gpu/drm/vc4/vc4_bo.c 		kfree(bo->validated_shader->uniform_addr_offsets);
bo                173 drivers/gpu/drm/vc4/vc4_bo.c 		kfree(bo->validated_shader->texture_samples);
bo                174 drivers/gpu/drm/vc4/vc4_bo.c 		kfree(bo->validated_shader);
bo                175 drivers/gpu/drm/vc4/vc4_bo.c 		bo->validated_shader = NULL;
bo                181 drivers/gpu/drm/vc4/vc4_bo.c static void vc4_bo_remove_from_cache(struct vc4_bo *bo)
bo                183 drivers/gpu/drm/vc4/vc4_bo.c 	struct vc4_dev *vc4 = to_vc4_dev(bo->base.base.dev);
bo                186 drivers/gpu/drm/vc4/vc4_bo.c 	list_del(&bo->unref_head);
bo                187 drivers/gpu/drm/vc4/vc4_bo.c 	list_del(&bo->size_head);
bo                237 drivers/gpu/drm/vc4/vc4_bo.c 		struct vc4_bo *bo = list_last_entry(&vc4->bo_cache.time_list,
bo                239 drivers/gpu/drm/vc4/vc4_bo.c 		vc4_bo_remove_from_cache(bo);
bo                240 drivers/gpu/drm/vc4/vc4_bo.c 		vc4_bo_destroy(bo);
bo                245 drivers/gpu/drm/vc4/vc4_bo.c void vc4_bo_add_to_purgeable_pool(struct vc4_bo *bo)
bo                247 drivers/gpu/drm/vc4/vc4_bo.c 	struct vc4_dev *vc4 = to_vc4_dev(bo->base.base.dev);
bo                250 drivers/gpu/drm/vc4/vc4_bo.c 	list_add_tail(&bo->size_head, &vc4->purgeable.list);
bo                252 drivers/gpu/drm/vc4/vc4_bo.c 	vc4->purgeable.size += bo->base.base.size;
bo                256 drivers/gpu/drm/vc4/vc4_bo.c static void vc4_bo_remove_from_purgeable_pool_locked(struct vc4_bo *bo)
bo                258 drivers/gpu/drm/vc4/vc4_bo.c 	struct vc4_dev *vc4 = to_vc4_dev(bo->base.base.dev);
bo                272 drivers/gpu/drm/vc4/vc4_bo.c 	list_del_init(&bo->size_head);
bo                274 drivers/gpu/drm/vc4/vc4_bo.c 	vc4->purgeable.size -= bo->base.base.size;
bo                277 drivers/gpu/drm/vc4/vc4_bo.c void vc4_bo_remove_from_purgeable_pool(struct vc4_bo *bo)
bo                279 drivers/gpu/drm/vc4/vc4_bo.c 	struct vc4_dev *vc4 = to_vc4_dev(bo->base.base.dev);
bo                282 drivers/gpu/drm/vc4/vc4_bo.c 	vc4_bo_remove_from_purgeable_pool_locked(bo);
bo                288 drivers/gpu/drm/vc4/vc4_bo.c 	struct vc4_bo *bo = to_vc4_bo(obj);
bo                291 drivers/gpu/drm/vc4/vc4_bo.c 	WARN_ON(!mutex_is_locked(&bo->madv_lock));
bo                292 drivers/gpu/drm/vc4/vc4_bo.c 	WARN_ON(bo->madv != VC4_MADV_DONTNEED);
bo                296 drivers/gpu/drm/vc4/vc4_bo.c 	dma_free_wc(dev->dev, obj->size, bo->base.vaddr, bo->base.paddr);
bo                297 drivers/gpu/drm/vc4/vc4_bo.c 	bo->base.vaddr = NULL;
bo                298 drivers/gpu/drm/vc4/vc4_bo.c 	bo->madv = __VC4_MADV_PURGED;
bo                307 drivers/gpu/drm/vc4/vc4_bo.c 		struct vc4_bo *bo = list_first_entry(&vc4->purgeable.list,
bo                309 drivers/gpu/drm/vc4/vc4_bo.c 		struct drm_gem_object *obj = &bo->base.base;
bo                312 drivers/gpu/drm/vc4/vc4_bo.c 		vc4_bo_remove_from_purgeable_pool_locked(bo);
bo                320 drivers/gpu/drm/vc4/vc4_bo.c 		mutex_lock(&bo->madv_lock);
bo                331 drivers/gpu/drm/vc4/vc4_bo.c 		if (bo->madv == VC4_MADV_DONTNEED &&
bo                332 drivers/gpu/drm/vc4/vc4_bo.c 		    list_empty(&bo->size_head) &&
bo                333 drivers/gpu/drm/vc4/vc4_bo.c 		    !refcount_read(&bo->usecnt)) {
bo                334 drivers/gpu/drm/vc4/vc4_bo.c 			purged_size = bo->base.base.size;
bo                337 drivers/gpu/drm/vc4/vc4_bo.c 		mutex_unlock(&bo->madv_lock);
bo                354 drivers/gpu/drm/vc4/vc4_bo.c 	struct vc4_bo *bo = NULL;
bo                365 drivers/gpu/drm/vc4/vc4_bo.c 	bo = list_first_entry(&vc4->bo_cache.size_list[page_index],
bo                367 drivers/gpu/drm/vc4/vc4_bo.c 	vc4_bo_remove_from_cache(bo);
bo                368 drivers/gpu/drm/vc4/vc4_bo.c 	kref_init(&bo->base.base.refcount);
bo                371 drivers/gpu/drm/vc4/vc4_bo.c 	if (bo)
bo                372 drivers/gpu/drm/vc4/vc4_bo.c 		vc4_bo_set_label(&bo->base.base, type);
bo                374 drivers/gpu/drm/vc4/vc4_bo.c 	return bo;
bo                388 drivers/gpu/drm/vc4/vc4_bo.c 	struct vc4_bo *bo;
bo                390 drivers/gpu/drm/vc4/vc4_bo.c 	bo = kzalloc(sizeof(*bo), GFP_KERNEL);
bo                391 drivers/gpu/drm/vc4/vc4_bo.c 	if (!bo)
bo                394 drivers/gpu/drm/vc4/vc4_bo.c 	bo->madv = VC4_MADV_WILLNEED;
bo                395 drivers/gpu/drm/vc4/vc4_bo.c 	refcount_set(&bo->usecnt, 0);
bo                396 drivers/gpu/drm/vc4/vc4_bo.c 	mutex_init(&bo->madv_lock);
bo                398 drivers/gpu/drm/vc4/vc4_bo.c 	bo->label = VC4_BO_TYPE_KERNEL;
bo                403 drivers/gpu/drm/vc4/vc4_bo.c 	return &bo->base.base;
bo                412 drivers/gpu/drm/vc4/vc4_bo.c 	struct vc4_bo *bo;
bo                418 drivers/gpu/drm/vc4/vc4_bo.c 	bo = vc4_bo_get_from_cache(dev, size, type);
bo                419 drivers/gpu/drm/vc4/vc4_bo.c 	if (bo) {
bo                421 drivers/gpu/drm/vc4/vc4_bo.c 			memset(bo->base.vaddr, 0, bo->base.base.size);
bo                422 drivers/gpu/drm/vc4/vc4_bo.c 		return bo;
bo                457 drivers/gpu/drm/vc4/vc4_bo.c 	bo = to_vc4_bo(&cma_obj->base);
bo                463 drivers/gpu/drm/vc4/vc4_bo.c 	bo->madv = __VC4_MADV_NOTSUPP;
bo                469 drivers/gpu/drm/vc4/vc4_bo.c 	return bo;
bo                477 drivers/gpu/drm/vc4/vc4_bo.c 	struct vc4_bo *bo = NULL;
bo                486 drivers/gpu/drm/vc4/vc4_bo.c 	bo = vc4_bo_create(dev, args->size, false, VC4_BO_TYPE_DUMB);
bo                487 drivers/gpu/drm/vc4/vc4_bo.c 	if (IS_ERR(bo))
bo                488 drivers/gpu/drm/vc4/vc4_bo.c 		return PTR_ERR(bo);
bo                490 drivers/gpu/drm/vc4/vc4_bo.c 	bo->madv = VC4_MADV_WILLNEED;
bo                492 drivers/gpu/drm/vc4/vc4_bo.c 	ret = drm_gem_handle_create(file_priv, &bo->base.base, &args->handle);
bo                493 drivers/gpu/drm/vc4/vc4_bo.c 	drm_gem_object_put_unlocked(&bo->base.base);
bo                506 drivers/gpu/drm/vc4/vc4_bo.c 		struct vc4_bo *bo = list_last_entry(&vc4->bo_cache.time_list,
bo                508 drivers/gpu/drm/vc4/vc4_bo.c 		if (time_before(expire_time, bo->free_time)) {
bo                515 drivers/gpu/drm/vc4/vc4_bo.c 		vc4_bo_remove_from_cache(bo);
bo                516 drivers/gpu/drm/vc4/vc4_bo.c 		vc4_bo_destroy(bo);
bo                527 drivers/gpu/drm/vc4/vc4_bo.c 	struct vc4_bo *bo = to_vc4_bo(gem_bo);
bo                531 drivers/gpu/drm/vc4/vc4_bo.c 	mutex_lock(&bo->madv_lock);
bo                532 drivers/gpu/drm/vc4/vc4_bo.c 	if (bo->madv == VC4_MADV_DONTNEED && !refcount_read(&bo->usecnt))
bo                533 drivers/gpu/drm/vc4/vc4_bo.c 		vc4_bo_remove_from_purgeable_pool(bo);
bo                534 drivers/gpu/drm/vc4/vc4_bo.c 	mutex_unlock(&bo->madv_lock);
bo                540 drivers/gpu/drm/vc4/vc4_bo.c 		vc4_bo_destroy(bo);
bo                546 drivers/gpu/drm/vc4/vc4_bo.c 		vc4_bo_destroy(bo);
bo                554 drivers/gpu/drm/vc4/vc4_bo.c 	if (!bo->base.vaddr) {
bo                555 drivers/gpu/drm/vc4/vc4_bo.c 		vc4_bo_destroy(bo);
bo                561 drivers/gpu/drm/vc4/vc4_bo.c 		vc4_bo_destroy(bo);
bo                565 drivers/gpu/drm/vc4/vc4_bo.c 	if (bo->validated_shader) {
bo                566 drivers/gpu/drm/vc4/vc4_bo.c 		kfree(bo->validated_shader->uniform_addr_offsets);
bo                567 drivers/gpu/drm/vc4/vc4_bo.c 		kfree(bo->validated_shader->texture_samples);
bo                568 drivers/gpu/drm/vc4/vc4_bo.c 		kfree(bo->validated_shader);
bo                569 drivers/gpu/drm/vc4/vc4_bo.c 		bo->validated_shader = NULL;
bo                573 drivers/gpu/drm/vc4/vc4_bo.c 	bo->madv = __VC4_MADV_NOTSUPP;
bo                574 drivers/gpu/drm/vc4/vc4_bo.c 	refcount_set(&bo->usecnt, 0);
bo                576 drivers/gpu/drm/vc4/vc4_bo.c 	bo->t_format = false;
bo                577 drivers/gpu/drm/vc4/vc4_bo.c 	bo->free_time = jiffies;
bo                578 drivers/gpu/drm/vc4/vc4_bo.c 	list_add(&bo->size_head, cache_list);
bo                579 drivers/gpu/drm/vc4/vc4_bo.c 	list_add(&bo->unref_head, &vc4->bo_cache.time_list);
bo                581 drivers/gpu/drm/vc4/vc4_bo.c 	vc4_bo_set_label(&bo->base.base, VC4_BO_TYPE_KERNEL_CACHE);
bo                600 drivers/gpu/drm/vc4/vc4_bo.c int vc4_bo_inc_usecnt(struct vc4_bo *bo)
bo                607 drivers/gpu/drm/vc4/vc4_bo.c 	if (refcount_inc_not_zero(&bo->usecnt))
bo                610 drivers/gpu/drm/vc4/vc4_bo.c 	mutex_lock(&bo->madv_lock);
bo                611 drivers/gpu/drm/vc4/vc4_bo.c 	switch (bo->madv) {
bo                613 drivers/gpu/drm/vc4/vc4_bo.c 		if (!refcount_inc_not_zero(&bo->usecnt))
bo                614 drivers/gpu/drm/vc4/vc4_bo.c 			refcount_set(&bo->usecnt, 1);
bo                631 drivers/gpu/drm/vc4/vc4_bo.c 	mutex_unlock(&bo->madv_lock);
bo                636 drivers/gpu/drm/vc4/vc4_bo.c void vc4_bo_dec_usecnt(struct vc4_bo *bo)
bo                641 drivers/gpu/drm/vc4/vc4_bo.c 	if (refcount_dec_not_one(&bo->usecnt))
bo                644 drivers/gpu/drm/vc4/vc4_bo.c 	mutex_lock(&bo->madv_lock);
bo                645 drivers/gpu/drm/vc4/vc4_bo.c 	if (refcount_dec_and_test(&bo->usecnt) &&
bo                646 drivers/gpu/drm/vc4/vc4_bo.c 	    bo->madv == VC4_MADV_DONTNEED)
bo                647 drivers/gpu/drm/vc4/vc4_bo.c 		vc4_bo_add_to_purgeable_pool(bo);
bo                648 drivers/gpu/drm/vc4/vc4_bo.c 	mutex_unlock(&bo->madv_lock);
bo                660 drivers/gpu/drm/vc4/vc4_bo.c 	struct vc4_bo *bo = to_vc4_bo(obj);
bo                664 drivers/gpu/drm/vc4/vc4_bo.c 	if (bo->validated_shader) {
bo                674 drivers/gpu/drm/vc4/vc4_bo.c 	ret = vc4_bo_inc_usecnt(bo);
bo                682 drivers/gpu/drm/vc4/vc4_bo.c 		vc4_bo_dec_usecnt(bo);
bo                691 drivers/gpu/drm/vc4/vc4_bo.c 	struct vc4_bo *bo = to_vc4_bo(obj);
bo                696 drivers/gpu/drm/vc4/vc4_bo.c 	mutex_lock(&bo->madv_lock);
bo                697 drivers/gpu/drm/vc4/vc4_bo.c 	WARN_ON(bo->madv != __VC4_MADV_PURGED);
bo                698 drivers/gpu/drm/vc4/vc4_bo.c 	mutex_unlock(&bo->madv_lock);
bo                707 drivers/gpu/drm/vc4/vc4_bo.c 	struct vc4_bo *bo;
bo                715 drivers/gpu/drm/vc4/vc4_bo.c 	bo = to_vc4_bo(gem_obj);
bo                717 drivers/gpu/drm/vc4/vc4_bo.c 	if (bo->validated_shader && (vma->vm_flags & VM_WRITE)) {
bo                722 drivers/gpu/drm/vc4/vc4_bo.c 	if (bo->madv != VC4_MADV_WILLNEED) {
bo                724 drivers/gpu/drm/vc4/vc4_bo.c 			  bo->madv == VC4_MADV_DONTNEED ?
bo                748 drivers/gpu/drm/vc4/vc4_bo.c 	ret = dma_mmap_wc(bo->base.base.dev->dev, vma, bo->base.vaddr,
bo                749 drivers/gpu/drm/vc4/vc4_bo.c 			  bo->base.paddr, vma->vm_end - vma->vm_start);
bo                760 drivers/gpu/drm/vc4/vc4_bo.c 	struct vc4_bo *bo = to_vc4_bo(obj);
bo                762 drivers/gpu/drm/vc4/vc4_bo.c 	if (bo->validated_shader && (vma->vm_flags & VM_WRITE)) {
bo                772 drivers/gpu/drm/vc4/vc4_bo.c 	struct vc4_bo *bo = to_vc4_bo(obj);
bo                774 drivers/gpu/drm/vc4/vc4_bo.c 	if (bo->validated_shader) {
bo                819 drivers/gpu/drm/vc4/vc4_bo.c 	struct vc4_bo *bo = NULL;
bo                830 drivers/gpu/drm/vc4/vc4_bo.c 	bo = vc4_bo_create(dev, args->size, false, VC4_BO_TYPE_V3D);
bo                831 drivers/gpu/drm/vc4/vc4_bo.c 	if (IS_ERR(bo))
bo                832 drivers/gpu/drm/vc4/vc4_bo.c 		return PTR_ERR(bo);
bo                834 drivers/gpu/drm/vc4/vc4_bo.c 	bo->madv = VC4_MADV_WILLNEED;
bo                836 drivers/gpu/drm/vc4/vc4_bo.c 	ret = drm_gem_handle_create(file_priv, &bo->base.base, &args->handle);
bo                837 drivers/gpu/drm/vc4/vc4_bo.c 	drm_gem_object_put_unlocked(&bo->base.base);
bo                868 drivers/gpu/drm/vc4/vc4_bo.c 	struct vc4_bo *bo = NULL;
bo                891 drivers/gpu/drm/vc4/vc4_bo.c 	bo = vc4_bo_create(dev, args->size, true, VC4_BO_TYPE_V3D_SHADER);
bo                892 drivers/gpu/drm/vc4/vc4_bo.c 	if (IS_ERR(bo))
bo                893 drivers/gpu/drm/vc4/vc4_bo.c 		return PTR_ERR(bo);
bo                895 drivers/gpu/drm/vc4/vc4_bo.c 	bo->madv = VC4_MADV_WILLNEED;
bo                897 drivers/gpu/drm/vc4/vc4_bo.c 	if (copy_from_user(bo->base.vaddr,
bo                906 drivers/gpu/drm/vc4/vc4_bo.c 	memset(bo->base.vaddr + args->size, 0,
bo                907 drivers/gpu/drm/vc4/vc4_bo.c 	       bo->base.base.size - args->size);
bo                909 drivers/gpu/drm/vc4/vc4_bo.c 	bo->validated_shader = vc4_validate_shader(&bo->base);
bo                910 drivers/gpu/drm/vc4/vc4_bo.c 	if (!bo->validated_shader) {
bo                918 drivers/gpu/drm/vc4/vc4_bo.c 	ret = drm_gem_handle_create(file_priv, &bo->base.base, &args->handle);
bo                921 drivers/gpu/drm/vc4/vc4_bo.c 	drm_gem_object_put_unlocked(&bo->base.base);
bo                943 drivers/gpu/drm/vc4/vc4_bo.c 	struct vc4_bo *bo;
bo                965 drivers/gpu/drm/vc4/vc4_bo.c 	bo = to_vc4_bo(gem_obj);
bo                966 drivers/gpu/drm/vc4/vc4_bo.c 	bo->t_format = t_format;
bo                986 drivers/gpu/drm/vc4/vc4_bo.c 	struct vc4_bo *bo;
bo                996 drivers/gpu/drm/vc4/vc4_bo.c 	bo = to_vc4_bo(gem_obj);
bo                998 drivers/gpu/drm/vc4/vc4_bo.c 	if (bo->t_format)
bo                873 drivers/gpu/drm/vc4/vc4_crtc.c 		struct vc4_bo *bo;
bo                876 drivers/gpu/drm/vc4/vc4_crtc.c 		bo = to_vc4_bo(&cma_bo->base);
bo                877 drivers/gpu/drm/vc4/vc4_crtc.c 		vc4_bo_dec_usecnt(bo);
bo                903 drivers/gpu/drm/vc4/vc4_crtc.c 	struct vc4_bo *bo = to_vc4_bo(&cma_bo->base);
bo                912 drivers/gpu/drm/vc4/vc4_crtc.c 	ret = vc4_bo_inc_usecnt(bo);
bo                918 drivers/gpu/drm/vc4/vc4_crtc.c 		vc4_bo_dec_usecnt(bo);
bo                931 drivers/gpu/drm/vc4/vc4_crtc.c 		vc4_bo_dec_usecnt(bo);
bo                956 drivers/gpu/drm/vc4/vc4_crtc.c 	vc4_queue_seqno_cb(dev, &flip_state->cb, bo->seqno,
bo                284 drivers/gpu/drm/vc4/vc4_drv.h to_vc4_bo(struct drm_gem_object *bo)
bo                286 drivers/gpu/drm/vc4/vc4_drv.h 	return (struct vc4_bo *)bo;
bo                505 drivers/gpu/drm/vc4/vc4_drv.h 	struct drm_gem_cma_object **bo;
bo                739 drivers/gpu/drm/vc4/vc4_drv.h int vc4_bo_inc_usecnt(struct vc4_bo *bo);
bo                740 drivers/gpu/drm/vc4/vc4_drv.h void vc4_bo_dec_usecnt(struct vc4_bo *bo);
bo                741 drivers/gpu/drm/vc4/vc4_drv.h void vc4_bo_add_to_purgeable_pool(struct vc4_bo *bo);
bo                742 drivers/gpu/drm/vc4/vc4_drv.h void vc4_bo_remove_from_purgeable_pool(struct vc4_bo *bo);
bo                 52 drivers/gpu/drm/vc4/vc4_gem.c 	struct drm_gem_object **bo;
bo                 61 drivers/gpu/drm/vc4/vc4_gem.c 		drm_gem_object_put_unlocked(state->bo[i]);
bo                105 drivers/gpu/drm/vc4/vc4_gem.c 	state->bo = get_state->bo;
bo                115 drivers/gpu/drm/vc4/vc4_gem.c 		struct vc4_bo *vc4_bo = to_vc4_bo(kernel_state->bo[i]);
bo                118 drivers/gpu/drm/vc4/vc4_gem.c 		ret = drm_gem_handle_create(file_priv, kernel_state->bo[i],
bo                130 drivers/gpu/drm/vc4/vc4_gem.c 	if (copy_to_user(u64_to_user_ptr(get_state->bo),
bo                155 drivers/gpu/drm/vc4/vc4_gem.c 	struct vc4_bo *bo;
bo                180 drivers/gpu/drm/vc4/vc4_gem.c 		list_for_each_entry(bo, &exec[i]->unref_list, unref_head)
bo                185 drivers/gpu/drm/vc4/vc4_gem.c 	kernel_state->bo = kcalloc(state->bo_count,
bo                186 drivers/gpu/drm/vc4/vc4_gem.c 				   sizeof(*kernel_state->bo), GFP_ATOMIC);
bo                188 drivers/gpu/drm/vc4/vc4_gem.c 	if (!kernel_state->bo) {
bo                199 drivers/gpu/drm/vc4/vc4_gem.c 			bo = to_vc4_bo(&exec[i]->bo[j]->base);
bo                205 drivers/gpu/drm/vc4/vc4_gem.c 			WARN_ON(!refcount_read(&bo->usecnt));
bo                206 drivers/gpu/drm/vc4/vc4_gem.c 			refcount_inc(&bo->usecnt);
bo                207 drivers/gpu/drm/vc4/vc4_gem.c 			drm_gem_object_get(&exec[i]->bo[j]->base);
bo                208 drivers/gpu/drm/vc4/vc4_gem.c 			kernel_state->bo[k++] = &exec[i]->bo[j]->base;
bo                211 drivers/gpu/drm/vc4/vc4_gem.c 		list_for_each_entry(bo, &exec[i]->unref_list, unref_head) {
bo                215 drivers/gpu/drm/vc4/vc4_gem.c 			drm_gem_object_get(&bo->base.base);
bo                216 drivers/gpu/drm/vc4/vc4_gem.c 			kernel_state->bo[k++] = &bo->base.base;
bo                263 drivers/gpu/drm/vc4/vc4_gem.c 		struct vc4_bo *bo = to_vc4_bo(kernel_state->bo[i]);
bo                265 drivers/gpu/drm/vc4/vc4_gem.c 		if (bo->madv == __VC4_MADV_NOTSUPP)
bo                268 drivers/gpu/drm/vc4/vc4_gem.c 		mutex_lock(&bo->madv_lock);
bo                269 drivers/gpu/drm/vc4/vc4_gem.c 		if (!WARN_ON(bo->madv == __VC4_MADV_PURGED))
bo                270 drivers/gpu/drm/vc4/vc4_gem.c 			bo->madv = VC4_MADV_WILLNEED;
bo                271 drivers/gpu/drm/vc4/vc4_gem.c 		refcount_dec(&bo->usecnt);
bo                272 drivers/gpu/drm/vc4/vc4_gem.c 		mutex_unlock(&bo->madv_lock);
bo                539 drivers/gpu/drm/vc4/vc4_gem.c 	struct vc4_bo *bo;
bo                543 drivers/gpu/drm/vc4/vc4_gem.c 		bo = to_vc4_bo(&exec->bo[i]->base);
bo                544 drivers/gpu/drm/vc4/vc4_gem.c 		bo->seqno = seqno;
bo                546 drivers/gpu/drm/vc4/vc4_gem.c 		dma_resv_add_shared_fence(bo->base.base.resv, exec->fence);
bo                549 drivers/gpu/drm/vc4/vc4_gem.c 	list_for_each_entry(bo, &exec->unref_list, unref_head) {
bo                550 drivers/gpu/drm/vc4/vc4_gem.c 		bo->seqno = seqno;
bo                554 drivers/gpu/drm/vc4/vc4_gem.c 		bo = to_vc4_bo(&exec->rcl_write_bo[i]->base);
bo                555 drivers/gpu/drm/vc4/vc4_gem.c 		bo->write_seqno = seqno;
bo                557 drivers/gpu/drm/vc4/vc4_gem.c 		dma_resv_add_excl_fence(bo->base.base.resv, exec->fence);
bo                569 drivers/gpu/drm/vc4/vc4_gem.c 		struct drm_gem_object *bo = &exec->bo[i]->base;
bo                571 drivers/gpu/drm/vc4/vc4_gem.c 		ww_mutex_unlock(&bo->resv->lock);
bo                591 drivers/gpu/drm/vc4/vc4_gem.c 	struct drm_gem_object *bo;
bo                597 drivers/gpu/drm/vc4/vc4_gem.c 		bo = &exec->bo[contended_lock]->base;
bo                598 drivers/gpu/drm/vc4/vc4_gem.c 		ret = ww_mutex_lock_slow_interruptible(&bo->resv->lock,
bo                610 drivers/gpu/drm/vc4/vc4_gem.c 		bo = &exec->bo[i]->base;
bo                612 drivers/gpu/drm/vc4/vc4_gem.c 		ret = ww_mutex_lock_interruptible(&bo->resv->lock, acquire_ctx);
bo                617 drivers/gpu/drm/vc4/vc4_gem.c 				bo = &exec->bo[j]->base;
bo                618 drivers/gpu/drm/vc4/vc4_gem.c 				ww_mutex_unlock(&bo->resv->lock);
bo                622 drivers/gpu/drm/vc4/vc4_gem.c 				bo = &exec->bo[contended_lock]->base;
bo                624 drivers/gpu/drm/vc4/vc4_gem.c 				ww_mutex_unlock(&bo->resv->lock);
bo                643 drivers/gpu/drm/vc4/vc4_gem.c 		bo = &exec->bo[i]->base;
bo                645 drivers/gpu/drm/vc4/vc4_gem.c 		ret = dma_resv_reserve_shared(bo->resv, 1);
bo                747 drivers/gpu/drm/vc4/vc4_gem.c 	exec->bo = kvmalloc_array(exec->bo_count,
bo                750 drivers/gpu/drm/vc4/vc4_gem.c 	if (!exec->bo) {
bo                771 drivers/gpu/drm/vc4/vc4_gem.c 		struct drm_gem_object *bo = idr_find(&file_priv->object_idr,
bo                773 drivers/gpu/drm/vc4/vc4_gem.c 		if (!bo) {
bo                780 drivers/gpu/drm/vc4/vc4_gem.c 		drm_gem_object_get(bo);
bo                781 drivers/gpu/drm/vc4/vc4_gem.c 		exec->bo[i] = (struct drm_gem_cma_object *)bo;
bo                789 drivers/gpu/drm/vc4/vc4_gem.c 		ret = vc4_bo_inc_usecnt(to_vc4_bo(&exec->bo[i]->base));
bo                807 drivers/gpu/drm/vc4/vc4_gem.c 		vc4_bo_dec_usecnt(to_vc4_bo(&exec->bo[i]->base));
bo                811 drivers/gpu/drm/vc4/vc4_gem.c 	for (i = 0; i < exec->bo_count && exec->bo[i]; i++)
bo                812 drivers/gpu/drm/vc4/vc4_gem.c 		drm_gem_object_put_unlocked(&exec->bo[i]->base);
bo                816 drivers/gpu/drm/vc4/vc4_gem.c 	kvfree(exec->bo);
bo                817 drivers/gpu/drm/vc4/vc4_gem.c 	exec->bo = NULL;
bo                836 drivers/gpu/drm/vc4/vc4_gem.c 	struct vc4_bo *bo;
bo                890 drivers/gpu/drm/vc4/vc4_gem.c 	bo = vc4_bo_create(dev, exec_size, true, VC4_BO_TYPE_BCL);
bo                891 drivers/gpu/drm/vc4/vc4_gem.c 	if (IS_ERR(bo)) {
bo                893 drivers/gpu/drm/vc4/vc4_gem.c 		ret = PTR_ERR(bo);
bo                896 drivers/gpu/drm/vc4/vc4_gem.c 	exec->exec_bo = &bo->base;
bo                956 drivers/gpu/drm/vc4/vc4_gem.c 	if (exec->bo) {
bo                958 drivers/gpu/drm/vc4/vc4_gem.c 			struct vc4_bo *bo = to_vc4_bo(&exec->bo[i]->base);
bo                960 drivers/gpu/drm/vc4/vc4_gem.c 			vc4_bo_dec_usecnt(bo);
bo                961 drivers/gpu/drm/vc4/vc4_gem.c 			drm_gem_object_put_unlocked(&exec->bo[i]->base);
bo                963 drivers/gpu/drm/vc4/vc4_gem.c 		kvfree(exec->bo);
bo                967 drivers/gpu/drm/vc4/vc4_gem.c 		struct vc4_bo *bo = list_first_entry(&exec->unref_list,
bo                969 drivers/gpu/drm/vc4/vc4_gem.c 		list_del(&bo->unref_head);
bo                970 drivers/gpu/drm/vc4/vc4_gem.c 		drm_gem_object_put_unlocked(&bo->base.base);
bo               1096 drivers/gpu/drm/vc4/vc4_gem.c 	struct vc4_bo *bo;
bo               1106 drivers/gpu/drm/vc4/vc4_gem.c 	bo = to_vc4_bo(gem_obj);
bo               1108 drivers/gpu/drm/vc4/vc4_gem.c 	ret = vc4_wait_for_seqno_ioctl_helper(dev, bo->seqno,
bo               1318 drivers/gpu/drm/vc4/vc4_gem.c 	struct vc4_bo *bo;
bo               1338 drivers/gpu/drm/vc4/vc4_gem.c 	bo = to_vc4_bo(gem_obj);
bo               1341 drivers/gpu/drm/vc4/vc4_gem.c 	if (bo->madv == __VC4_MADV_NOTSUPP) {
bo               1356 drivers/gpu/drm/vc4/vc4_gem.c 	mutex_lock(&bo->madv_lock);
bo               1358 drivers/gpu/drm/vc4/vc4_gem.c 	if (args->madv == VC4_MADV_DONTNEED && bo->madv == VC4_MADV_WILLNEED &&
bo               1359 drivers/gpu/drm/vc4/vc4_gem.c 	    !refcount_read(&bo->usecnt)) {
bo               1364 drivers/gpu/drm/vc4/vc4_gem.c 		vc4_bo_add_to_purgeable_pool(bo);
bo               1366 drivers/gpu/drm/vc4/vc4_gem.c 		   bo->madv == VC4_MADV_DONTNEED &&
bo               1367 drivers/gpu/drm/vc4/vc4_gem.c 		   !refcount_read(&bo->usecnt)) {
bo               1371 drivers/gpu/drm/vc4/vc4_gem.c 		vc4_bo_remove_from_purgeable_pool(bo);
bo               1375 drivers/gpu/drm/vc4/vc4_gem.c 	args->retained = bo->madv != __VC4_MADV_PURGED;
bo               1378 drivers/gpu/drm/vc4/vc4_gem.c 	if (bo->madv != __VC4_MADV_PURGED)
bo               1379 drivers/gpu/drm/vc4/vc4_gem.c 		bo->madv = args->madv;
bo               1381 drivers/gpu/drm/vc4/vc4_gem.c 	mutex_unlock(&bo->madv_lock);
bo                 62 drivers/gpu/drm/vc4/vc4_irq.c 	struct vc4_bo *bo;
bo                 72 drivers/gpu/drm/vc4/vc4_irq.c 	bo = vc4->bin_bo;
bo                103 drivers/gpu/drm/vc4/vc4_irq.c 	V3D_WRITE(V3D_BPOA, bo->base.paddr + bin_bo_slot * vc4->bin_alloc_size);
bo                104 drivers/gpu/drm/vc4/vc4_irq.c 	V3D_WRITE(V3D_BPOS, bo->base.base.size);
bo                313 drivers/gpu/drm/vc4/vc4_kms.c 		struct vc4_bo *bo;
bo                322 drivers/gpu/drm/vc4/vc4_kms.c 		bo = to_vc4_bo(gem_obj);
bo                326 drivers/gpu/drm/vc4/vc4_kms.c 		if (bo->t_format) {
bo                311 drivers/gpu/drm/vc4/vc4_plane.c 	struct drm_gem_cma_object *bo = drm_fb_cma_get_gem_obj(fb, 0);
bo                332 drivers/gpu/drm/vc4/vc4_plane.c 		vc4_state->offsets[i] = bo->paddr + fb->offsets[i];
bo                996 drivers/gpu/drm/vc4/vc4_plane.c 	struct drm_gem_cma_object *bo = drm_fb_cma_get_gem_obj(fb, 0);
bo               1003 drivers/gpu/drm/vc4/vc4_plane.c 	addr = bo->paddr + fb->offsets[0];
bo               1128 drivers/gpu/drm/vc4/vc4_plane.c 	struct vc4_bo *bo;
bo               1134 drivers/gpu/drm/vc4/vc4_plane.c 	bo = to_vc4_bo(&drm_fb_cma_get_gem_obj(state->fb, 0)->base);
bo               1141 drivers/gpu/drm/vc4/vc4_plane.c 	ret = vc4_bo_inc_usecnt(bo);
bo               1151 drivers/gpu/drm/vc4/vc4_plane.c 	struct vc4_bo *bo;
bo               1156 drivers/gpu/drm/vc4/vc4_plane.c 	bo = to_vc4_bo(&drm_fb_cma_get_gem_obj(state->fb, 0)->base);
bo               1157 drivers/gpu/drm/vc4/vc4_plane.c 	vc4_bo_dec_usecnt(bo);
bo                100 drivers/gpu/drm/vc4/vc4_render_cl.c 				    struct drm_gem_cma_object *bo,
bo                104 drivers/gpu/drm/vc4/vc4_render_cl.c 	return bo->paddr + surf->offset + VC4_TILE_BUFFER_SIZE *
bo                249 drivers/gpu/drm/vc4/vc4_v3d.c 		struct vc4_bo *bo = vc4_bo_create(vc4->dev, size, true,
bo                252 drivers/gpu/drm/vc4/vc4_v3d.c 		if (IS_ERR(bo)) {
bo                253 drivers/gpu/drm/vc4/vc4_v3d.c 			ret = PTR_ERR(bo);
bo                264 drivers/gpu/drm/vc4/vc4_v3d.c 		if ((bo->base.paddr & 0xf0000000) ==
bo                265 drivers/gpu/drm/vc4/vc4_v3d.c 		    ((bo->base.paddr + bo->base.base.size - 1) & 0xf0000000)) {
bo                266 drivers/gpu/drm/vc4/vc4_v3d.c 			vc4->bin_bo = bo;
bo                288 drivers/gpu/drm/vc4/vc4_v3d.c 				     bo->base.base.size / vc4->bin_alloc_size);
bo                302 drivers/gpu/drm/vc4/vc4_v3d.c 		list_add(&bo->unref_head, &list);
bo                307 drivers/gpu/drm/vc4/vc4_v3d.c 		struct vc4_bo *bo = list_last_entry(&list,
bo                310 drivers/gpu/drm/vc4/vc4_v3d.c 		list_del(&bo->unref_head);
bo                311 drivers/gpu/drm/vc4/vc4_v3d.c 		drm_gem_object_put_unlocked(&bo->base.base);
bo                109 drivers/gpu/drm/vc4/vc4_validate.c 	struct vc4_bo *bo;
bo                116 drivers/gpu/drm/vc4/vc4_validate.c 	obj = exec->bo[hindex];
bo                117 drivers/gpu/drm/vc4/vc4_validate.c 	bo = to_vc4_bo(&obj->base);
bo                119 drivers/gpu/drm/vc4/vc4_validate.c 	if (bo->validated_shader) {
bo                756 drivers/gpu/drm/vc4/vc4_validate.c 	struct drm_gem_cma_object *bo[ARRAY_SIZE(shader_reloc_offsets) + 8];
bo                801 drivers/gpu/drm/vc4/vc4_validate.c 		bo[i] = exec->bo[src_handles[i]];
bo                802 drivers/gpu/drm/vc4/vc4_validate.c 		if (!bo[i])
bo                806 drivers/gpu/drm/vc4/vc4_validate.c 		bo[i] = vc4_use_bo(exec, src_handles[i]);
bo                807 drivers/gpu/drm/vc4/vc4_validate.c 		if (!bo[i])
bo                812 drivers/gpu/drm/vc4/vc4_validate.c 	    to_vc4_bo(&bo[0]->base)->validated_shader->is_threaded) {
bo                817 drivers/gpu/drm/vc4/vc4_validate.c 	if (to_vc4_bo(&bo[1]->base)->validated_shader->is_threaded ||
bo                818 drivers/gpu/drm/vc4/vc4_validate.c 	    to_vc4_bo(&bo[2]->base)->validated_shader->is_threaded) {
bo                831 drivers/gpu/drm/vc4/vc4_validate.c 		*(uint32_t *)(pkt_v + o) = bo[i]->paddr + src_offset;
bo                839 drivers/gpu/drm/vc4/vc4_validate.c 		validated_shader = to_vc4_bo(&bo[i]->base)->validated_shader;
bo                888 drivers/gpu/drm/vc4/vc4_validate.c 			bo[ARRAY_SIZE(shader_reloc_offsets) + i];
bo                290 drivers/gpu/drm/vgem/vgem_drv.c static struct page **vgem_pin_pages(struct drm_vgem_gem_object *bo)
bo                292 drivers/gpu/drm/vgem/vgem_drv.c 	mutex_lock(&bo->pages_lock);
bo                293 drivers/gpu/drm/vgem/vgem_drv.c 	if (bo->pages_pin_count++ == 0) {
bo                296 drivers/gpu/drm/vgem/vgem_drv.c 		pages = drm_gem_get_pages(&bo->base);
bo                298 drivers/gpu/drm/vgem/vgem_drv.c 			bo->pages_pin_count--;
bo                299 drivers/gpu/drm/vgem/vgem_drv.c 			mutex_unlock(&bo->pages_lock);
bo                303 drivers/gpu/drm/vgem/vgem_drv.c 		bo->pages = pages;
bo                305 drivers/gpu/drm/vgem/vgem_drv.c 	mutex_unlock(&bo->pages_lock);
bo                307 drivers/gpu/drm/vgem/vgem_drv.c 	return bo->pages;
bo                310 drivers/gpu/drm/vgem/vgem_drv.c static void vgem_unpin_pages(struct drm_vgem_gem_object *bo)
bo                312 drivers/gpu/drm/vgem/vgem_drv.c 	mutex_lock(&bo->pages_lock);
bo                313 drivers/gpu/drm/vgem/vgem_drv.c 	if (--bo->pages_pin_count == 0) {
bo                314 drivers/gpu/drm/vgem/vgem_drv.c 		drm_gem_put_pages(&bo->base, bo->pages, true, true);
bo                315 drivers/gpu/drm/vgem/vgem_drv.c 		bo->pages = NULL;
bo                317 drivers/gpu/drm/vgem/vgem_drv.c 	mutex_unlock(&bo->pages_lock);
bo                322 drivers/gpu/drm/vgem/vgem_drv.c 	struct drm_vgem_gem_object *bo = to_vgem_bo(obj);
bo                326 drivers/gpu/drm/vgem/vgem_drv.c 	pages = vgem_pin_pages(bo);
bo                340 drivers/gpu/drm/vgem/vgem_drv.c 	struct drm_vgem_gem_object *bo = to_vgem_bo(obj);
bo                342 drivers/gpu/drm/vgem/vgem_drv.c 	vgem_unpin_pages(bo);
bo                347 drivers/gpu/drm/vgem/vgem_drv.c 	struct drm_vgem_gem_object *bo = to_vgem_bo(obj);
bo                349 drivers/gpu/drm/vgem/vgem_drv.c 	return drm_prime_pages_to_sg(bo->pages, bo->base.size >> PAGE_SHIFT);
bo                387 drivers/gpu/drm/vgem/vgem_drv.c 	struct drm_vgem_gem_object *bo = to_vgem_bo(obj);
bo                391 drivers/gpu/drm/vgem/vgem_drv.c 	pages = vgem_pin_pages(bo);
bo                400 drivers/gpu/drm/vgem/vgem_drv.c 	struct drm_vgem_gem_object *bo = to_vgem_bo(obj);
bo                403 drivers/gpu/drm/vgem/vgem_drv.c 	vgem_unpin_pages(bo);
bo                258 drivers/gpu/drm/virtio/virtgpu_drv.h 				    struct virtio_gpu_object *bo,
bo                264 drivers/gpu/drm/virtio/virtgpu_drv.h 					struct virtio_gpu_object *bo,
bo                311 drivers/gpu/drm/virtio/virtgpu_drv.h 					struct virtio_gpu_object *bo,
bo                318 drivers/gpu/drm/virtio/virtgpu_drv.h 				  struct virtio_gpu_object *bo,
bo                362 drivers/gpu/drm/virtio/virtgpu_drv.h void virtio_gpu_object_kunmap(struct virtio_gpu_object *bo);
bo                363 drivers/gpu/drm/virtio/virtgpu_drv.h int virtio_gpu_object_kmap(struct virtio_gpu_object *bo);
bo                365 drivers/gpu/drm/virtio/virtgpu_drv.h 				   struct virtio_gpu_object *bo);
bo                366 drivers/gpu/drm/virtio/virtgpu_drv.h void virtio_gpu_object_free_sg_table(struct virtio_gpu_object *bo);
bo                367 drivers/gpu/drm/virtio/virtgpu_drv.h int virtio_gpu_object_wait(struct virtio_gpu_object *bo, bool no_wait);
bo                380 drivers/gpu/drm/virtio/virtgpu_drv.h virtio_gpu_object_ref(struct virtio_gpu_object *bo)
bo                382 drivers/gpu/drm/virtio/virtgpu_drv.h 	ttm_bo_get(&bo->tbo);
bo                383 drivers/gpu/drm/virtio/virtgpu_drv.h 	return bo;
bo                386 drivers/gpu/drm/virtio/virtgpu_drv.h static inline void virtio_gpu_object_unref(struct virtio_gpu_object **bo)
bo                390 drivers/gpu/drm/virtio/virtgpu_drv.h 	if ((*bo) == NULL)
bo                392 drivers/gpu/drm/virtio/virtgpu_drv.h 	tbo = &((*bo)->tbo);
bo                394 drivers/gpu/drm/virtio/virtgpu_drv.h 	*bo = NULL;
bo                397 drivers/gpu/drm/virtio/virtgpu_drv.h static inline u64 virtio_gpu_object_mmap_offset(struct virtio_gpu_object *bo)
bo                399 drivers/gpu/drm/virtio/virtgpu_drv.h 	return drm_vma_node_offset_addr(&bo->tbo.base.vma_node);
bo                402 drivers/gpu/drm/virtio/virtgpu_drv.h static inline int virtio_gpu_object_reserve(struct virtio_gpu_object *bo,
bo                407 drivers/gpu/drm/virtio/virtgpu_drv.h 	r = ttm_bo_reserve(&bo->tbo, true, no_wait, NULL);
bo                411 drivers/gpu/drm/virtio/virtgpu_drv.h 				bo->gem_base.dev->dev_private;
bo                412 drivers/gpu/drm/virtio/virtgpu_drv.h 			dev_err(qdev->dev, "%p reserve failed\n", bo);
bo                419 drivers/gpu/drm/virtio/virtgpu_drv.h static inline void virtio_gpu_object_unreserve(struct virtio_gpu_object *bo)
bo                421 drivers/gpu/drm/virtio/virtgpu_drv.h 	ttm_bo_unreserve(&bo->tbo);
bo                 64 drivers/gpu/drm/virtio/virtgpu_ioctl.c 	struct ttm_buffer_object *bo;
bo                 73 drivers/gpu/drm/virtio/virtgpu_ioctl.c 		bo = buf->bo;
bo                 74 drivers/gpu/drm/virtio/virtgpu_ioctl.c 		qobj = container_of(bo, struct virtio_gpu_object, tbo);
bo                 75 drivers/gpu/drm/virtio/virtgpu_ioctl.c 		ret = ttm_bo_validate(bo, &qobj->placement, &ctx);
bo                 87 drivers/gpu/drm/virtio/virtgpu_ioctl.c 	struct ttm_buffer_object *bo;
bo                 91 drivers/gpu/drm/virtio/virtgpu_ioctl.c 		bo = buf->bo;
bo                 92 drivers/gpu/drm/virtio/virtgpu_ioctl.c 		qobj = container_of(bo, struct virtio_gpu_object, tbo);
bo                188 drivers/gpu/drm/virtio/virtgpu_ioctl.c 			buflist[i].bo = &qobj->tbo;
bo                 68 drivers/gpu/drm/virtio/virtgpu_object.c 	struct virtio_gpu_object *bo;
bo                 71 drivers/gpu/drm/virtio/virtgpu_object.c 	bo = container_of(tbo, struct virtio_gpu_object, tbo);
bo                 72 drivers/gpu/drm/virtio/virtgpu_object.c 	vgdev = (struct virtio_gpu_device *)bo->gem_base.dev->dev_private;
bo                 74 drivers/gpu/drm/virtio/virtgpu_object.c 	if (bo->created)
bo                 75 drivers/gpu/drm/virtio/virtgpu_object.c 		virtio_gpu_cmd_unref_resource(vgdev, bo->hw_res_handle);
bo                 76 drivers/gpu/drm/virtio/virtgpu_object.c 	if (bo->pages)
bo                 77 drivers/gpu/drm/virtio/virtgpu_object.c 		virtio_gpu_object_free_sg_table(bo);
bo                 78 drivers/gpu/drm/virtio/virtgpu_object.c 	if (bo->vmap)
bo                 79 drivers/gpu/drm/virtio/virtgpu_object.c 		virtio_gpu_object_kunmap(bo);
bo                 80 drivers/gpu/drm/virtio/virtgpu_object.c 	drm_gem_object_release(&bo->gem_base);
bo                 81 drivers/gpu/drm/virtio/virtgpu_object.c 	virtio_gpu_resource_id_put(vgdev, bo->hw_res_handle);
bo                 82 drivers/gpu/drm/virtio/virtgpu_object.c 	kfree(bo);
bo                106 drivers/gpu/drm/virtio/virtgpu_object.c 	struct virtio_gpu_object *bo;
bo                115 drivers/gpu/drm/virtio/virtgpu_object.c 	bo = kzalloc(sizeof(struct virtio_gpu_object), GFP_KERNEL);
bo                116 drivers/gpu/drm/virtio/virtgpu_object.c 	if (bo == NULL)
bo                118 drivers/gpu/drm/virtio/virtgpu_object.c 	ret = virtio_gpu_resource_id_get(vgdev, &bo->hw_res_handle);
bo                120 drivers/gpu/drm/virtio/virtgpu_object.c 		kfree(bo);
bo                124 drivers/gpu/drm/virtio/virtgpu_object.c 	ret = drm_gem_object_init(vgdev->ddev, &bo->gem_base, params->size);
bo                126 drivers/gpu/drm/virtio/virtgpu_object.c 		virtio_gpu_resource_id_put(vgdev, bo->hw_res_handle);
bo                127 drivers/gpu/drm/virtio/virtgpu_object.c 		kfree(bo);
bo                130 drivers/gpu/drm/virtio/virtgpu_object.c 	bo->dumb = params->dumb;
bo                133 drivers/gpu/drm/virtio/virtgpu_object.c 		virtio_gpu_cmd_resource_create_3d(vgdev, bo, params, fence);
bo                135 drivers/gpu/drm/virtio/virtgpu_object.c 		virtio_gpu_cmd_create_resource(vgdev, bo, params, fence);
bo                138 drivers/gpu/drm/virtio/virtgpu_object.c 	virtio_gpu_init_ttm_placement(bo);
bo                139 drivers/gpu/drm/virtio/virtgpu_object.c 	ret = ttm_bo_init(&vgdev->mman.bdev, &bo->tbo, params->size,
bo                140 drivers/gpu/drm/virtio/virtgpu_object.c 			  ttm_bo_type_device, &bo->placement, 0,
bo                159 drivers/gpu/drm/virtio/virtgpu_object.c 		drm_gem_object_get(&bo->gem_base);
bo                160 drivers/gpu/drm/virtio/virtgpu_object.c 		mainbuf.bo = &bo->tbo;
bo                179 drivers/gpu/drm/virtio/virtgpu_object.c 	*bo_ptr = bo;
bo                183 drivers/gpu/drm/virtio/virtgpu_object.c void virtio_gpu_object_kunmap(struct virtio_gpu_object *bo)
bo                185 drivers/gpu/drm/virtio/virtgpu_object.c 	bo->vmap = NULL;
bo                186 drivers/gpu/drm/virtio/virtgpu_object.c 	ttm_bo_kunmap(&bo->kmap);
bo                189 drivers/gpu/drm/virtio/virtgpu_object.c int virtio_gpu_object_kmap(struct virtio_gpu_object *bo)
bo                194 drivers/gpu/drm/virtio/virtgpu_object.c 	WARN_ON(bo->vmap);
bo                196 drivers/gpu/drm/virtio/virtgpu_object.c 	r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap);
bo                199 drivers/gpu/drm/virtio/virtgpu_object.c 	bo->vmap = ttm_kmap_obj_virtual(&bo->kmap, &is_iomem);
bo                204 drivers/gpu/drm/virtio/virtgpu_object.c 				   struct virtio_gpu_object *bo)
bo                207 drivers/gpu/drm/virtio/virtgpu_object.c 	struct page **pages = bo->tbo.ttm->pages;
bo                208 drivers/gpu/drm/virtio/virtgpu_object.c 	int nr_pages = bo->tbo.num_pages;
bo                216 drivers/gpu/drm/virtio/virtgpu_object.c 	if (bo->pages)
bo                219 drivers/gpu/drm/virtio/virtgpu_object.c 	if (bo->tbo.ttm->state == tt_unpopulated)
bo                220 drivers/gpu/drm/virtio/virtgpu_object.c 		bo->tbo.ttm->bdev->driver->ttm_tt_populate(bo->tbo.ttm, &ctx);
bo                221 drivers/gpu/drm/virtio/virtgpu_object.c 	bo->pages = kmalloc(sizeof(struct sg_table), GFP_KERNEL);
bo                222 drivers/gpu/drm/virtio/virtgpu_object.c 	if (!bo->pages)
bo                229 drivers/gpu/drm/virtio/virtgpu_object.c 	ret = __sg_alloc_table_from_pages(bo->pages, pages, nr_pages, 0,
bo                236 drivers/gpu/drm/virtio/virtgpu_object.c 	kfree(bo->pages);
bo                237 drivers/gpu/drm/virtio/virtgpu_object.c 	bo->pages = NULL;
bo                241 drivers/gpu/drm/virtio/virtgpu_object.c void virtio_gpu_object_free_sg_table(struct virtio_gpu_object *bo)
bo                243 drivers/gpu/drm/virtio/virtgpu_object.c 	sg_free_table(bo->pages);
bo                244 drivers/gpu/drm/virtio/virtgpu_object.c 	kfree(bo->pages);
bo                245 drivers/gpu/drm/virtio/virtgpu_object.c 	bo->pages = NULL;
bo                248 drivers/gpu/drm/virtio/virtgpu_object.c int virtio_gpu_object_wait(struct virtio_gpu_object *bo, bool no_wait)
bo                252 drivers/gpu/drm/virtio/virtgpu_object.c 	r = ttm_bo_reserve(&bo->tbo, true, no_wait, NULL);
bo                255 drivers/gpu/drm/virtio/virtgpu_object.c 	r = ttm_bo_wait(&bo->tbo, true, no_wait);
bo                256 drivers/gpu/drm/virtio/virtgpu_object.c 	ttm_bo_unreserve(&bo->tbo);
bo                 97 drivers/gpu/drm/virtio/virtgpu_plane.c 	struct virtio_gpu_object *bo;
bo                109 drivers/gpu/drm/virtio/virtgpu_plane.c 		bo = gem_to_virtio_gpu_obj(vgfb->base.obj[0]);
bo                110 drivers/gpu/drm/virtio/virtgpu_plane.c 		handle = bo->hw_res_handle;
bo                111 drivers/gpu/drm/virtio/virtgpu_plane.c 		if (bo->dumb) {
bo                113 drivers/gpu/drm/virtio/virtgpu_plane.c 				(vgdev, bo, 0,
bo                149 drivers/gpu/drm/virtio/virtgpu_plane.c 	struct virtio_gpu_object *bo;
bo                155 drivers/gpu/drm/virtio/virtgpu_plane.c 	bo = gem_to_virtio_gpu_obj(vgfb->base.obj[0]);
bo                156 drivers/gpu/drm/virtio/virtgpu_plane.c 	if (bo && bo->dumb && (plane->state->fb != new_state->fb)) {
bo                187 drivers/gpu/drm/virtio/virtgpu_plane.c 	struct virtio_gpu_object *bo = NULL;
bo                200 drivers/gpu/drm/virtio/virtgpu_plane.c 		bo = gem_to_virtio_gpu_obj(vgfb->base.obj[0]);
bo                201 drivers/gpu/drm/virtio/virtgpu_plane.c 		handle = bo->hw_res_handle;
bo                206 drivers/gpu/drm/virtio/virtgpu_plane.c 	if (bo && bo->dumb && (plane->state->fb != old_state->fb)) {
bo                209 drivers/gpu/drm/virtio/virtgpu_plane.c 			(vgdev, bo, 0,
bo                213 drivers/gpu/drm/virtio/virtgpu_plane.c 		ret = virtio_gpu_object_reserve(bo, false);
bo                215 drivers/gpu/drm/virtio/virtgpu_plane.c 			dma_resv_add_excl_fence(bo->tbo.base.resv,
bo                219 drivers/gpu/drm/virtio/virtgpu_plane.c 			virtio_gpu_object_unreserve(bo);
bo                220 drivers/gpu/drm/virtio/virtgpu_plane.c 			virtio_gpu_object_wait(bo, false);
bo                 35 drivers/gpu/drm/virtio/virtgpu_prime.c 	struct virtio_gpu_object *bo = gem_to_virtio_gpu_obj(obj);
bo                 37 drivers/gpu/drm/virtio/virtgpu_prime.c 	if (!bo->tbo.ttm->pages || !bo->tbo.ttm->num_pages)
bo                 41 drivers/gpu/drm/virtio/virtgpu_prime.c 	return drm_prime_pages_to_sg(bo->tbo.ttm->pages,
bo                 42 drivers/gpu/drm/virtio/virtgpu_prime.c 				     bo->tbo.ttm->num_pages);
bo                 54 drivers/gpu/drm/virtio/virtgpu_prime.c 	struct virtio_gpu_object *bo = gem_to_virtio_gpu_obj(obj);
bo                 57 drivers/gpu/drm/virtio/virtgpu_prime.c 	ret = virtio_gpu_object_kmap(bo);
bo                 60 drivers/gpu/drm/virtio/virtgpu_prime.c 	return bo->vmap;
bo                 77 drivers/gpu/drm/virtio/virtgpu_ttm.c 			       struct ttm_buffer_object *bo,
bo                138 drivers/gpu/drm/virtio/virtgpu_ttm.c static void virtio_gpu_evict_flags(struct ttm_buffer_object *bo,
bo                153 drivers/gpu/drm/virtio/virtgpu_ttm.c static int virtio_gpu_verify_access(struct ttm_buffer_object *bo,
bo                233 drivers/gpu/drm/virtio/virtgpu_ttm.c static struct ttm_tt *virtio_gpu_ttm_tt_create(struct ttm_buffer_object *bo,
bo                239 drivers/gpu/drm/virtio/virtgpu_ttm.c 	vgdev = virtio_gpu_get_vgdev(bo->bdev);
bo                244 drivers/gpu/drm/virtio/virtgpu_ttm.c 	gtt->obj = container_of(bo, struct virtio_gpu_object, tbo);
bo                245 drivers/gpu/drm/virtio/virtgpu_ttm.c 	if (ttm_dma_tt_init(&gtt->ttm, bo, page_flags)) {
bo                254 drivers/gpu/drm/virtio/virtgpu_ttm.c 	struct virtio_gpu_object *bo;
bo                256 drivers/gpu/drm/virtio/virtgpu_ttm.c 	bo = container_of(tbo, struct virtio_gpu_object, tbo);
bo                258 drivers/gpu/drm/virtio/virtgpu_ttm.c 	if (bo->pages)
bo                259 drivers/gpu/drm/virtio/virtgpu_ttm.c 		virtio_gpu_object_free_sg_table(bo);
bo                389 drivers/gpu/drm/virtio/virtgpu_vq.c 				    struct virtio_gpu_object *bo,
bo                400 drivers/gpu/drm/virtio/virtgpu_vq.c 	cmd_p->resource_id = cpu_to_le32(bo->hw_res_handle);
bo                406 drivers/gpu/drm/virtio/virtgpu_vq.c 	bo->created = true;
bo                484 drivers/gpu/drm/virtio/virtgpu_vq.c 					struct virtio_gpu_object *bo,
bo                496 drivers/gpu/drm/virtio/virtgpu_vq.c 				       bo->pages->sgl, bo->pages->nents,
bo                503 drivers/gpu/drm/virtio/virtgpu_vq.c 	cmd_p->resource_id = cpu_to_le32(bo->hw_res_handle);
bo                862 drivers/gpu/drm/virtio/virtgpu_vq.c 				  struct virtio_gpu_object *bo,
bo                873 drivers/gpu/drm/virtio/virtgpu_vq.c 	cmd_p->resource_id = cpu_to_le32(bo->hw_res_handle);
bo                887 drivers/gpu/drm/virtio/virtgpu_vq.c 	bo->created = true;
bo                891 drivers/gpu/drm/virtio/virtgpu_vq.c 					struct virtio_gpu_object *bo,
bo                903 drivers/gpu/drm/virtio/virtgpu_vq.c 				       bo->pages->sgl, bo->pages->nents,
bo                911 drivers/gpu/drm/virtio/virtgpu_vq.c 	cmd_p->resource_id = cpu_to_le32(bo->hw_res_handle);
bo                 56 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c vmw_buffer_object(struct ttm_buffer_object *bo)
bo                 58 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	return container_of(bo, struct vmw_buffer_object, base);
bo                 71 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c vmw_user_buffer_object(struct ttm_buffer_object *bo)
bo                 73 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	struct vmw_buffer_object *vmw_bo = vmw_buffer_object(bo);
bo                 95 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	struct ttm_buffer_object *bo = &buf->base;
bo                105 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	ret = ttm_bo_reserve(bo, interruptible, false, NULL);
bo                110 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 		ret = ttm_bo_mem_compat(placement, &bo->mem,
bo                113 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 		ret = ttm_bo_validate(bo, placement, &ctx);
bo                118 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	ttm_bo_unreserve(bo);
bo                144 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	struct ttm_buffer_object *bo = &buf->base;
bo                154 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	ret = ttm_bo_reserve(bo, interruptible, false, NULL);
bo                159 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 		ret = ttm_bo_mem_compat(&vmw_vram_gmr_placement, &bo->mem,
bo                164 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	ret = ttm_bo_validate(bo, &vmw_vram_gmr_placement, &ctx);
bo                168 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	ret = ttm_bo_validate(bo, &vmw_vram_placement, &ctx);
bo                174 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	ttm_bo_unreserve(bo);
bo                219 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	struct ttm_buffer_object *bo = &buf->base;
bo                226 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	place.lpfn = bo->num_pages;
bo                237 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	ret = ttm_bo_reserve(bo, interruptible, false, NULL);
bo                246 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	if (bo->mem.mem_type == TTM_PL_VRAM &&
bo                247 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	    bo->mem.start < bo->num_pages &&
bo                248 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	    bo->mem.start > 0 &&
bo                251 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 		(void) ttm_bo_validate(bo, &vmw_sys_placement, &ctx);
bo                255 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 		ret = ttm_bo_mem_compat(&placement, &bo->mem,
bo                258 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 		ret = ttm_bo_validate(bo, &placement, &ctx);
bo                261 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	WARN_ON(ret == 0 && bo->offset != 0);
bo                265 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	ttm_bo_unreserve(bo);
bo                288 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	struct ttm_buffer_object *bo = &buf->base;
bo                295 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	ret = ttm_bo_reserve(bo, interruptible, false, NULL);
bo                301 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	ttm_bo_unreserve(bo);
bo                315 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c void vmw_bo_get_guest_ptr(const struct ttm_buffer_object *bo,
bo                318 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	if (bo->mem.mem_type == TTM_PL_VRAM) {
bo                320 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 		ptr->offset = bo->offset;
bo                322 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 		ptr->gmrId = bo->mem.start;
bo                340 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	struct ttm_buffer_object *bo = &vbo->base;
bo                341 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	uint32_t old_mem_type = bo->mem.mem_type;
bo                344 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	dma_resv_assert_held(bo->base.resv);
bo                366 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	ret = ttm_bo_validate(bo, &placement, &ctx);
bo                368 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	BUG_ON(ret != 0 || bo->mem.mem_type != old_mem_type);
bo                389 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	struct ttm_buffer_object *bo = &vbo->base;
bo                398 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	ret = ttm_bo_kmap(bo, 0, bo->num_pages, &vbo->map);
bo                416 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	if (vbo->map.bo == NULL)
bo                461 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c void vmw_bo_bo_free(struct ttm_buffer_object *bo)
bo                463 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	struct vmw_buffer_object *vmw_bo = vmw_buffer_object(bo);
bo                475 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c static void vmw_user_bo_destroy(struct ttm_buffer_object *bo)
bo                477 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	struct vmw_user_buffer_object *vmw_user_bo = vmw_user_buffer_object(bo);
bo                501 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 		void (*bo_free)(struct ttm_buffer_object *bo))
bo                647 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c int vmw_user_bo_verify_access(struct ttm_buffer_object *bo,
bo                652 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	if (unlikely(bo->destroy != vmw_user_bo_destroy))
bo                655 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	vmw_user_bo = vmw_user_buffer_object(bo);
bo                685 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	struct ttm_buffer_object *bo = &user_bo->vbo.base;
bo                694 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 			(bo->base.resv, true, true,
bo                704 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 		(bo, !!(flags & drm_vmw_synccpu_dontblock));
bo               1001 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c void vmw_bo_fence_single(struct ttm_buffer_object *bo,
bo               1004 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	struct ttm_bo_device *bdev = bo->bdev;
bo               1011 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 		dma_resv_add_excl_fence(bo->base.resv, &fence->base);
bo               1014 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 		dma_resv_add_excl_fence(bo->base.resv, &fence->base);
bo               1111 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c void vmw_bo_swap_notify(struct ttm_buffer_object *bo)
bo               1114 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	if (bo->destroy != vmw_bo_bo_free &&
bo               1115 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	    bo->destroy != vmw_user_bo_destroy)
bo               1119 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	vmw_bo_unmap(vmw_buffer_object(bo));
bo               1133 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c void vmw_bo_move_notify(struct ttm_buffer_object *bo,
bo               1142 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	if (bo->destroy != vmw_bo_bo_free &&
bo               1143 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	    bo->destroy != vmw_user_bo_destroy)
bo               1146 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	vbo = container_of(bo, struct vmw_buffer_object, base);
bo               1153 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	if (mem->mem_type == TTM_PL_VRAM || bo->mem.mem_type == TTM_PL_VRAM)
bo               1161 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	if (mem->mem_type != VMW_PL_MOB && bo->mem.mem_type == VMW_PL_MOB)
bo                342 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	struct ttm_buffer_object *bo = val_buf->bo;
bo                344 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	BUG_ON(bo->mem.mem_type != VMW_PL_MOB);
bo                353 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	cmd->body.mobid = bo->mem.start;
bo                366 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	struct ttm_buffer_object *bo = val_buf->bo;
bo                383 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	BUG_ON(bo->mem.mem_type != VMW_PL_MOB);
bo                419 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	vmw_bo_fence_single(bo, fence);
bo                509 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	struct ttm_buffer_object *bo = val_buf->bo;
bo                511 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	BUG_ON(bo->mem.mem_type != VMW_PL_MOB);
bo                520 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	cmd->body.mobid = bo->mem.start;
bo                573 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	struct ttm_buffer_object *bo = val_buf->bo;
bo                590 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	BUG_ON(bo->mem.mem_type != VMW_PL_MOB);
bo                633 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	vmw_bo_fence_single(bo, fence);
bo                167 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	struct ttm_buffer_object *bo = &res->backup->base;
bo                173 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	WARN_ON_ONCE(bo->mem.mem_type != VMW_PL_MOB);
bo                174 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	dma_resv_assert_held(bo->base.resv);
bo                181 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	WARN_ON(bo->mem.mem_type != VMW_PL_MOB);
bo                186 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	cmd->body.mobid = bo->mem.start;
bo                215 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	val_buf->bo = &res->backup->base;
bo                309 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	struct ttm_buffer_object *bo = val_buf->bo;
bo                315 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	WARN_ON_ONCE(bo->mem.mem_type != VMW_PL_MOB);
bo                316 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	dma_resv_assert_held(bo->base.resv);
bo                323 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	vmw_bo_fence_single(bo, fence);
bo                387 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	struct ttm_buffer_object *bo, *old_bo = &res->backup->base;
bo                418 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	bo = &buf->base;
bo                419 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	WARN_ON_ONCE(ttm_bo_reserve(bo, false, true, NULL));
bo                439 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 		ret = ttm_bo_kmap(bo, i, 1, &new_map);
bo                452 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	ret = ttm_bo_validate(bo, &vmw_mob_placement, &ctx);
bo                487 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	ttm_bo_unreserve(bo);
bo                712 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h extern void vmw_query_move_notify(struct ttm_buffer_object *bo,
bo                744 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h 				   struct vmw_buffer_object *bo,
bo                754 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h 				       struct vmw_buffer_object *bo,
bo                757 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h 			struct vmw_buffer_object *bo,
bo                761 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h extern void vmw_bo_pin_reserved(struct vmw_buffer_object *bo, bool pin);
bo                762 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h extern void vmw_bo_bo_free(struct ttm_buffer_object *bo);
bo                767 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h 		       void (*bo_free)(struct ttm_buffer_object *bo));
bo                768 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h extern int vmw_user_bo_verify_access(struct ttm_buffer_object *bo,
bo                789 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h extern void vmw_bo_fence_single(struct ttm_buffer_object *bo,
bo                793 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h extern void vmw_bo_move_notify(struct ttm_buffer_object *bo,
bo                795 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h extern void vmw_bo_swap_notify(struct ttm_buffer_object *bo);
bo                936 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h extern int vmw_bo_map_dma(struct ttm_buffer_object *bo);
bo                937 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h extern void vmw_bo_unmap_dma(struct ttm_buffer_object *bo);
bo                939 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h vmw_bo_sg_table(struct ttm_buffer_object *bo);
bo               1080 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h 			  struct ttm_buffer_object *bo,
bo               3302 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	struct ttm_buffer_object *bo;
bo               3305 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 		bo = &reloc->vbo->base;
bo               3306 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 		switch (bo->mem.mem_type) {
bo               3308 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 			reloc->location->offset += bo->offset;
bo               3312 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 			reloc->location->gmrId = bo->mem.start;
bo               3315 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 			*reloc->mob_loc = bo->mem.start;
bo                598 drivers/gpu/drm/vmwgfx/vmwgfx_fifo.c 	struct ttm_buffer_object *bo = &dev_priv->dummy_query_bo->base;
bo                613 drivers/gpu/drm/vmwgfx/vmwgfx_fifo.c 	if (bo->mem.mem_type == TTM_PL_VRAM) {
bo                615 drivers/gpu/drm/vmwgfx/vmwgfx_fifo.c 		cmd->body.guestResult.offset = bo->offset;
bo                617 drivers/gpu/drm/vmwgfx/vmwgfx_fifo.c 		cmd->body.guestResult.gmrId = bo->mem.start;
bo                644 drivers/gpu/drm/vmwgfx/vmwgfx_fifo.c 	struct ttm_buffer_object *bo = &dev_priv->dummy_query_bo->base;
bo                658 drivers/gpu/drm/vmwgfx/vmwgfx_fifo.c 	BUG_ON(bo->mem.mem_type != VMW_PL_MOB);
bo                659 drivers/gpu/drm/vmwgfx/vmwgfx_fifo.c 	cmd->body.mobid = bo->mem.start;
bo                 48 drivers/gpu/drm/vmwgfx/vmwgfx_gmrid_manager.c 				  struct ttm_buffer_object *bo,
bo                 65 drivers/gpu/drm/vmwgfx/vmwgfx_gmrid_manager.c 		gman->used_gmr_pages += bo->num_pages;
bo                 72 drivers/gpu/drm/vmwgfx/vmwgfx_gmrid_manager.c 	mem->num_pages = bo->num_pages;
bo                 78 drivers/gpu/drm/vmwgfx/vmwgfx_gmrid_manager.c 	gman->used_gmr_pages -= bo->num_pages;
bo                383 drivers/gpu/drm/vmwgfx/vmwgfx_ioctl.c 	if (!vfb->bo) {
bo                 92 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 				struct vmw_buffer_object *bo,
bo                106 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	ret = ttm_bo_reserve(&bo->base, true, false, NULL);
bo                112 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	ret = ttm_bo_kmap(&bo->base, kmap_offset, kmap_num, &map);
bo                122 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	ttm_bo_unreserve(&bo->base);
bo                146 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 			  struct ttm_buffer_object *bo,
bo                201 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	ret = ttm_bo_reserve(bo, true, false, NULL);
bo                207 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	ret = ttm_bo_kmap(bo, kmap_offset, kmap_num, &map);
bo                227 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	ttm_bo_unreserve(bo);
bo                358 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	if (vps->bo)
bo                359 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 		vmw_bo_unreference(&vps->bo);
bo                362 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 		if (vmw_framebuffer_to_vfb(fb)->bo) {
bo                363 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 			vps->bo = vmw_framebuffer_to_vfbd(fb)->buffer;
bo                364 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 			vmw_bo_reference(vps->bo);
bo                396 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	du->cursor_bo = vps->bo;
bo                405 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	} else if (vps->bo) {
bo                406 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 		ret = vmw_cursor_update_bo(dev_priv, vps->bo,
bo                512 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	if (!vmw_framebuffer_to_vfb(fb)->bo)
bo                680 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	if (vps->bo)
bo                681 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 		(void) vmw_bo_reference(vps->bo);
bo                736 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	if (vps->bo)
bo                737 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 		vmw_bo_unreference(&vps->bo);
bo               1075 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	buf = vfb->bo ?  vmw_framebuffer_to_vfbd(&vfb->base)->buffer :
bo               1089 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 		if (vfb->bo) {
bo               1118 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	buf = vfb->bo ?  vmw_framebuffer_to_vfbd(&vfb->base)->buffer :
bo               1216 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 				      struct vmw_buffer_object *bo,
bo               1229 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	if (unlikely(requested_size > bo->base.num_pages * PAGE_SIZE)) {
bo               1258 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	vfbd->base.bo = true;
bo               1259 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	vfbd->buffer = vmw_bo_reference(bo);
bo               1271 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	vmw_bo_unreference(&bo);
bo               1310 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 			struct vmw_buffer_object *bo,
bo               1325 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	    bo && only_2d &&
bo               1329 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 					  bo, &surface);
bo               1348 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	} else if (bo) {
bo               1349 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 		ret = vmw_kms_new_framebuffer_bo(dev_priv, bo, &vfb,
bo               1376 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	struct vmw_buffer_object *bo = NULL;
bo               1402 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 				     &surface, &bo);
bo               1407 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	if (!bo &&
bo               1416 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	vfb = vmw_kms_new_framebuffer(dev_priv, bo, surface,
bo               1426 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	if (bo)
bo               1427 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 		vmw_bo_unreference(&bo);
bo               2821 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	if (update->vfb->bo) {
bo                221 drivers/gpu/drm/vmwgfx/vmwgfx_kms.h 	bool bo;
bo                288 drivers/gpu/drm/vmwgfx/vmwgfx_kms.h 	struct vmw_buffer_object *bo;
bo                438 drivers/gpu/drm/vmwgfx/vmwgfx_kms.h 			struct vmw_buffer_object *bo,
bo                200 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	struct ttm_buffer_object *bo;
bo                205 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	bo = otable->page_table->pt_bo;
bo                220 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	if (bo) {
bo                223 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 		ret = ttm_bo_reserve(bo, false, true, NULL);
bo                226 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 		vmw_bo_fence_single(bo, NULL);
bo                227 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 		ttm_bo_unreserve(bo);
bo                353 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	struct ttm_buffer_object *bo = batch->otable_bo;
bo                361 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	ret = ttm_bo_reserve(bo, false, true, NULL);
bo                364 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	vmw_bo_fence_single(bo, NULL);
bo                365 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	ttm_bo_unreserve(bo);
bo                550 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	struct ttm_buffer_object *bo = mob->pt_bo;
bo                556 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	ret = ttm_bo_reserve(bo, false, true, NULL);
bo                559 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	vsgt = vmw_bo_sg_table(bo);
bo                574 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	ttm_bo_unreserve(bo);
bo                605 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	struct ttm_buffer_object *bo = mob->pt_bo;
bo                607 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	if (bo) {
bo                608 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 		ret = ttm_bo_reserve(bo, false, true, NULL);
bo                623 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	if (bo) {
bo                624 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 		vmw_bo_fence_single(bo, NULL);
bo                625 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 		ttm_bo_unreserve(bo);
bo                109 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 		struct ttm_buffer_object *bo = &res->backup->base;
bo                111 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 		ttm_bo_reserve(bo, false, false, NULL);
bo                116 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 			val_buf.bo = bo;
bo                122 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 		ttm_bo_unreserve(bo);
bo                389 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 	      val_buf->bo != NULL) ||
bo                390 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 	     (!func->needs_backup && val_buf->bo != NULL))) {
bo                492 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 	val_buf->bo = &res->backup->base;
bo                516 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 	ttm_bo_put(val_buf->bo);
bo                517 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 	val_buf->bo = NULL;
bo                571 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 	if (likely(val_buf->bo == NULL))
bo                577 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 	ttm_bo_put(val_buf->bo);
bo                578 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 	val_buf->bo = NULL;
bo                598 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 	val_buf.bo = NULL;
bo                647 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 	val_buf.bo = NULL;
bo                650 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 		val_buf.bo = &res->backup->base;
bo                718 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 		.bo = &vbo->base,
bo                789 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c void vmw_query_move_notify(struct ttm_buffer_object *bo,
bo                793 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 	struct ttm_bo_device *bdev = bo->bdev;
bo                801 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 	dx_query_mob = container_of(bo, struct vmw_buffer_object, base);
bo                808 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 	if (mem->mem_type == TTM_PL_SYSTEM && bo->mem.mem_type == VMW_PL_MOB) {
bo                816 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 		vmw_bo_fence_single(bo, fence);
bo                821 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 		(void) ttm_bo_wait(bo, false, false);
bo                243 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c 		sou->buffer = vps->bo;
bo                385 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c 	if (vps->bo)
bo                386 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c 		vmw_bo_unpin(vmw_priv(crtc->dev), vps->bo, false);
bo                387 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c 	vmw_bo_unreference(&vps->bo);
bo                417 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c 		vmw_bo_unreference(&vps->bo);
bo                426 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c 	if (vps->bo) {
bo                432 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c 			return vmw_bo_pin_in_vram(dev_priv, vps->bo,
bo                436 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c 		vmw_bo_unreference(&vps->bo);
bo                440 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c 	vps->bo = kzalloc(sizeof(*vps->bo), GFP_KERNEL);
bo                441 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c 	if (!vps->bo)
bo                450 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c 	ret = vmw_bo_init(dev_priv, vps->bo, size,
bo                455 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c 		vps->bo = NULL; /* vmw_bo_init frees on error */
bo                465 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c 	return vmw_bo_pin_in_vram(dev_priv, vps->bo, true);
bo                741 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c 		if (vfb->bo)
bo                255 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 	struct ttm_buffer_object *bo = val_buf->bo;
bo                257 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 	BUG_ON(bo->mem.mem_type != VMW_PL_MOB);
bo                266 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 	cmd->body.mobid = bo->mem.start;
bo                305 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 	vmw_bo_fence_single(val_buf->bo, fence);
bo                451 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 	struct ttm_buffer_object *bo = val_buf->bo;
bo                453 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 	BUG_ON(bo->mem.mem_type != VMW_PL_MOB);
bo                527 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 	vmw_bo_fence_single(val_buf->bo, fence);
bo               1056 drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c 	new_vfbs = (vfb->bo) ? NULL : vmw_framebuffer_to_vfbs(new_fb);
bo               1061 drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c 	else if (vfb->bo)
bo               1615 drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c 		if (vfb->bo)
bo                468 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c 	BUG_ON(!val_buf->bo);
bo                474 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c 	vmw_bo_get_guest_ptr(val_buf->bo, &ptr);
bo                486 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c 	vmw_bo_fence_single(val_buf->bo, fence);
bo               1152 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c 	struct ttm_buffer_object *bo = val_buf->bo;
bo               1154 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c 	BUG_ON(bo->mem.mem_type != VMW_PL_MOB);
bo               1165 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c 	cmd1->body.mobid = bo->mem.start;
bo               1183 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c 	struct ttm_buffer_object *bo = val_buf->bo;
bo               1202 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c 	BUG_ON(bo->mem.mem_type != VMW_PL_MOB);
bo               1237 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c 	vmw_bo_fence_single(val_buf->bo, fence);
bo                533 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c int vmw_bo_map_dma(struct ttm_buffer_object *bo)
bo                536 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		container_of(bo->ttm, struct vmw_ttm_tt, dma_ttm.ttm);
bo                550 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c void vmw_bo_unmap_dma(struct ttm_buffer_object *bo)
bo                553 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		container_of(bo->ttm, struct vmw_ttm_tt, dma_ttm.ttm);
bo                570 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c const struct vmw_sg_table *vmw_bo_sg_table(struct ttm_buffer_object *bo)
bo                573 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		container_of(bo->ttm, struct vmw_ttm_tt, dma_ttm.ttm);
bo                712 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c static struct ttm_tt *vmw_ttm_tt_create(struct ttm_buffer_object *bo,
bo                723 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	vmw_be->dev_priv = container_of(bo->bdev, struct vmw_private, bdev);
bo                727 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		ret = ttm_dma_tt_init(&vmw_be->dma_ttm, bo, page_flags);
bo                729 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		ret = ttm_tt_init(&vmw_be->dma_ttm.ttm, bo, page_flags);
bo                783 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c static void vmw_evict_flags(struct ttm_buffer_object *bo,
bo                789 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c static int vmw_verify_access(struct ttm_buffer_object *bo, struct file *filp)
bo                794 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	return vmw_user_bo_verify_access(bo, tfile);
bo                829 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c static int vmw_ttm_fault_reserve_notify(struct ttm_buffer_object *bo)
bo                844 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c static void vmw_move_notify(struct ttm_buffer_object *bo,
bo                848 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	vmw_bo_move_notify(bo, mem);
bo                849 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	vmw_query_move_notify(bo, mem);
bo                858 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c static void vmw_swap_notify(struct ttm_buffer_object *bo)
bo                860 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	vmw_bo_swap_notify(bo);
bo                861 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	(void) ttm_bo_wait(bo, false, false);
bo                191 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 			if (entry->base.bo == &vbo->base) {
bo                287 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		val_buf->bo = ttm_bo_get_unless_zero(&vbo->base);
bo                288 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		if (!val_buf->bo)
bo                512 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c int vmw_validation_bo_validate_single(struct ttm_buffer_object *bo,
bo                517 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		container_of(bo, struct vmw_buffer_object, base);
bo                528 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		return ttm_bo_validate(bo, &vmw_mob_placement, &ctx);
bo                537 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	ret = ttm_bo_validate(bo, &vmw_vram_gmr_placement, &ctx);
bo                546 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	ret = ttm_bo_validate(bo, &vmw_vram_placement, &ctx);
bo                571 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 			ret = ttm_bo_validate(entry->base.bo,
bo                575 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 			(entry->base.bo, intr, entry->as_mob);
bo                671 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		ttm_bo_put(entry->base.bo);
bo                672 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		entry->base.bo = NULL;
bo                237 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h int vmw_validation_bo_validate_single(struct ttm_buffer_object *bo,
bo                 21 drivers/gpu/host1x/hw/channel_hw.c static void trace_write_gather(struct host1x_cdma *cdma, struct host1x_bo *bo,
bo                 28 drivers/gpu/host1x/hw/channel_hw.c 		mem = host1x_bo_mmap(bo);
bo                 41 drivers/gpu/host1x/hw/channel_hw.c 			trace_host1x_cdma_push_gather(dev_name(dev), bo,
bo                 46 drivers/gpu/host1x/hw/channel_hw.c 		host1x_bo_munmap(bo, mem);
bo                 66 drivers/gpu/host1x/hw/channel_hw.c 		trace_write_gather(cdma, g->bo, g->offset, g->words);
bo                212 drivers/gpu/host1x/hw/debug_hw.c 				mapped = host1x_bo_mmap(g->bo);
bo                226 drivers/gpu/host1x/hw/debug_hw.c 				host1x_bo_munmap(g->bo, mapped);
bo                 87 drivers/gpu/host1x/job.c void host1x_job_add_gather(struct host1x_job *job, struct host1x_bo *bo,
bo                 93 drivers/gpu/host1x/job.c 	gather->bo = bo;
bo                112 drivers/gpu/host1x/job.c 		reloc->target.bo = host1x_bo_get(reloc->target.bo);
bo                113 drivers/gpu/host1x/job.c 		if (!reloc->target.bo) {
bo                118 drivers/gpu/host1x/job.c 		phys_addr = host1x_bo_pin(reloc->target.bo, &sgt);
bo                121 drivers/gpu/host1x/job.c 		job->unpins[job->num_unpins].bo = reloc->target.bo;
bo                136 drivers/gpu/host1x/job.c 		g->bo = host1x_bo_get(g->bo);
bo                137 drivers/gpu/host1x/job.c 		if (!g->bo) {
bo                142 drivers/gpu/host1x/job.c 		phys_addr = host1x_bo_pin(g->bo, &sgt);
bo                175 drivers/gpu/host1x/job.c 		job->unpins[job->num_unpins].bo = g->bo;
bo                191 drivers/gpu/host1x/job.c 	struct host1x_bo *cmdbuf = g->bo;
bo                202 drivers/gpu/host1x/job.c 		if (cmdbuf != reloc->cmdbuf.bo)
bo                243 drivers/gpu/host1x/job.c 	if (reloc->cmdbuf.bo != cmdbuf || reloc->cmdbuf.offset != offset)
bo                379 drivers/gpu/host1x/job.c 	fw->cmdbuf = g->bo;
bo                481 drivers/gpu/host1x/job.c 		gather = host1x_bo_mmap(g->bo);
bo                484 drivers/gpu/host1x/job.c 		host1x_bo_munmap(g->bo, gather);
bo                534 drivers/gpu/host1x/job.c 			if (job->gathers[j].bo == g->bo) {
bo                570 drivers/gpu/host1x/job.c 		host1x_bo_unpin(unpin->bo, unpin->sgt);
bo                571 drivers/gpu/host1x/job.c 		host1x_bo_put(unpin->bo);
bo                 14 drivers/gpu/host1x/job.h 	struct host1x_bo *bo;
bo                 20 drivers/gpu/host1x/job.h 	struct host1x_bo *bo;
bo                370 drivers/gpu/ipu-v3/ipu-cpmem.c 	int bpp = 0, npb = 0, ro, go, bo, to;
bo                374 drivers/gpu/ipu-v3/ipu-cpmem.c 	bo = rgb->bits_per_pixel - rgb->blue.length - rgb->blue.offset;
bo                382 drivers/gpu/ipu-v3/ipu-cpmem.c 	ipu_ch_param_write_field(ch, IPU_FIELD_OFS2, bo);
bo               2206 drivers/hid/hid-wiimote-modules.c 	__u8 sx, sy, tb, wb, bd, bm, bp, bo, br, bb, bg, by, bu;
bo               2246 drivers/hid/hid-wiimote-modules.c 	bo = !(ext[5] & 0x80);
bo               2278 drivers/hid/hid-wiimote-modules.c 			 bo);
bo                419 drivers/media/usb/dvb-usb/technisat-usb2.c 	u8 bo = offset & 0xff;
bo                423 drivers/media/usb/dvb-usb/technisat-usb2.c 			.buf = &bo,
bo               4099 drivers/usb/host/oxu210hp-hcd.c 	static const char * const bo[] = {
bo               4114 drivers/usb/host/oxu210hp-hcd.c 		bo[(id & OXU_BO_MASK) >> OXU_BO_SHIFT],
bo                 39 include/drm/drm_gem_vram_helper.h 	struct ttm_buffer_object bo;
bo                 56 include/drm/drm_gem_vram_helper.h 	struct ttm_buffer_object *bo)
bo                 58 include/drm/drm_gem_vram_helper.h 	return container_of(bo, struct drm_gem_vram_object, bo);
bo                 70 include/drm/drm_gem_vram_helper.h 	return container_of(gem, struct drm_gem_vram_object, bo.base);
bo                 98 include/drm/drm_gem_vram_helper.h void drm_gem_vram_bo_driver_evict_flags(struct ttm_buffer_object *bo,
bo                101 include/drm/drm_gem_vram_helper.h int drm_gem_vram_bo_driver_verify_access(struct ttm_buffer_object *bo,
bo                 23 include/drm/drm_vram_mm_helper.h 	void (*evict_flags)(struct ttm_buffer_object *bo,
bo                 25 include/drm/drm_vram_mm_helper.h 	int (*verify_access)(struct ttm_buffer_object *bo, struct file *filp);
bo                259 include/drm/ttm/ttm_bo_api.h 	struct ttm_buffer_object *bo;
bo                291 include/drm/ttm/ttm_bo_api.h static inline void ttm_bo_get(struct ttm_buffer_object *bo)
bo                293 include/drm/ttm/ttm_bo_api.h 	kref_get(&bo->kref);
bo                307 include/drm/ttm/ttm_bo_api.h ttm_bo_get_unless_zero(struct ttm_buffer_object *bo)
bo                309 include/drm/ttm/ttm_bo_api.h 	if (!kref_get_unless_zero(&bo->kref))
bo                311 include/drm/ttm/ttm_bo_api.h 	return bo;
bo                328 include/drm/ttm/ttm_bo_api.h int ttm_bo_wait(struct ttm_buffer_object *bo, bool interruptible, bool no_wait);
bo                357 include/drm/ttm/ttm_bo_api.h int ttm_bo_validate(struct ttm_buffer_object *bo,
bo                368 include/drm/ttm/ttm_bo_api.h void ttm_bo_put(struct ttm_buffer_object *bo);
bo                380 include/drm/ttm/ttm_bo_api.h void ttm_bo_add_to_lru(struct ttm_buffer_object *bo);
bo                392 include/drm/ttm/ttm_bo_api.h void ttm_bo_del_from_lru(struct ttm_buffer_object *bo);
bo                404 include/drm/ttm/ttm_bo_api.h void ttm_bo_move_to_lru_tail(struct ttm_buffer_object *bo,
bo                441 include/drm/ttm/ttm_bo_api.h bool ttm_bo_eviction_valuable(struct ttm_buffer_object *bo,
bo                458 include/drm/ttm/ttm_bo_api.h int ttm_bo_synccpu_write_grab(struct ttm_buffer_object *bo, bool no_wait);
bo                467 include/drm/ttm/ttm_bo_api.h void ttm_bo_synccpu_write_release(struct ttm_buffer_object *bo);
bo                521 include/drm/ttm/ttm_bo_api.h 			 struct ttm_buffer_object *bo,
bo                569 include/drm/ttm/ttm_bo_api.h int ttm_bo_init(struct ttm_bo_device *bdev, struct ttm_buffer_object *bo,
bo                700 include/drm/ttm/ttm_bo_api.h int ttm_bo_kmap(struct ttm_buffer_object *bo, unsigned long start_page,
bo                722 include/drm/ttm/ttm_bo_api.h int ttm_fbdev_mmap(struct vm_area_struct *vma, struct ttm_buffer_object *bo);
bo                768 include/drm/ttm/ttm_bo_api.h int ttm_bo_wait_unreserved(struct ttm_buffer_object *bo);
bo                784 include/drm/ttm/ttm_bo_api.h static inline bool ttm_bo_uses_embedded_gem_object(struct ttm_buffer_object *bo)
bo                786 include/drm/ttm/ttm_bo_api.h 	return bo->base.dev != NULL;
bo                107 include/drm/ttm/ttm_bo_driver.h 			 struct ttm_buffer_object *bo,
bo                235 include/drm/ttm/ttm_bo_driver.h 	struct ttm_tt *(*ttm_tt_create)(struct ttm_buffer_object *bo,
bo                284 include/drm/ttm/ttm_bo_driver.h 	bool (*eviction_valuable)(struct ttm_buffer_object *bo,
bo                296 include/drm/ttm/ttm_bo_driver.h 	void (*evict_flags)(struct ttm_buffer_object *bo,
bo                310 include/drm/ttm/ttm_bo_driver.h 	int (*move)(struct ttm_buffer_object *bo, bool evict,
bo                326 include/drm/ttm/ttm_bo_driver.h 	int (*verify_access)(struct ttm_buffer_object *bo,
bo                336 include/drm/ttm/ttm_bo_driver.h 	void (*move_notify)(struct ttm_buffer_object *bo,
bo                341 include/drm/ttm/ttm_bo_driver.h 	int (*fault_reserve_notify)(struct ttm_buffer_object *bo);
bo                346 include/drm/ttm/ttm_bo_driver.h 	void (*swap_notify)(struct ttm_buffer_object *bo);
bo                365 include/drm/ttm/ttm_bo_driver.h 	unsigned long (*io_mem_pfn)(struct ttm_buffer_object *bo,
bo                382 include/drm/ttm/ttm_bo_driver.h 	int (*access_memory)(struct ttm_buffer_object *bo, unsigned long offset,
bo                392 include/drm/ttm/ttm_bo_driver.h 	void (*del_from_lru_notify)(struct ttm_buffer_object *bo);
bo                402 include/drm/ttm/ttm_bo_driver.h 	void (*release_notify)(struct ttm_buffer_object *bo);
bo                580 include/drm/ttm/ttm_bo_driver.h int ttm_bo_mem_space(struct ttm_buffer_object *bo,
bo                585 include/drm/ttm/ttm_bo_driver.h void ttm_bo_mem_put(struct ttm_buffer_object *bo, struct ttm_mem_reg *mem);
bo                586 include/drm/ttm/ttm_bo_driver.h void ttm_bo_mem_put_locked(struct ttm_buffer_object *bo,
bo                616 include/drm/ttm/ttm_bo_driver.h void ttm_bo_unmap_virtual(struct ttm_buffer_object *bo);
bo                625 include/drm/ttm/ttm_bo_driver.h void ttm_bo_unmap_virtual_locked(struct ttm_buffer_object *bo);
bo                627 include/drm/ttm/ttm_bo_driver.h int ttm_mem_io_reserve_vm(struct ttm_buffer_object *bo);
bo                628 include/drm/ttm/ttm_bo_driver.h void ttm_mem_io_free_vm(struct ttm_buffer_object *bo);
bo                632 include/drm/ttm/ttm_bo_driver.h void ttm_bo_del_sub_from_lru(struct ttm_buffer_object *bo);
bo                633 include/drm/ttm/ttm_bo_driver.h void ttm_bo_add_to_lru(struct ttm_buffer_object *bo);
bo                656 include/drm/ttm/ttm_bo_driver.h static inline int __ttm_bo_reserve(struct ttm_buffer_object *bo,
bo                667 include/drm/ttm/ttm_bo_driver.h 		success = dma_resv_trylock(bo->base.resv);
bo                672 include/drm/ttm/ttm_bo_driver.h 		ret = dma_resv_lock_interruptible(bo->base.resv, ticket);
bo                674 include/drm/ttm/ttm_bo_driver.h 		ret = dma_resv_lock(bo->base.resv, ticket);
bo                724 include/drm/ttm/ttm_bo_driver.h static inline int ttm_bo_reserve(struct ttm_buffer_object *bo,
bo                730 include/drm/ttm/ttm_bo_driver.h 	WARN_ON(!kref_read(&bo->kref));
bo                732 include/drm/ttm/ttm_bo_driver.h 	ret = __ttm_bo_reserve(bo, interruptible, no_wait, ticket);
bo                734 include/drm/ttm/ttm_bo_driver.h 		ttm_bo_del_sub_from_lru(bo);
bo                749 include/drm/ttm/ttm_bo_driver.h static inline int ttm_bo_reserve_slowpath(struct ttm_buffer_object *bo,
bo                755 include/drm/ttm/ttm_bo_driver.h 	WARN_ON(!kref_read(&bo->kref));
bo                758 include/drm/ttm/ttm_bo_driver.h 		ret = dma_resv_lock_slow_interruptible(bo->base.resv,
bo                761 include/drm/ttm/ttm_bo_driver.h 		dma_resv_lock_slow(bo->base.resv, ticket);
bo                764 include/drm/ttm/ttm_bo_driver.h 		ttm_bo_del_sub_from_lru(bo);
bo                778 include/drm/ttm/ttm_bo_driver.h static inline void ttm_bo_unreserve(struct ttm_buffer_object *bo)
bo                780 include/drm/ttm/ttm_bo_driver.h 	spin_lock(&bo->bdev->glob->lru_lock);
bo                781 include/drm/ttm/ttm_bo_driver.h 	if (list_empty(&bo->lru))
bo                782 include/drm/ttm/ttm_bo_driver.h 		ttm_bo_add_to_lru(bo);
bo                784 include/drm/ttm/ttm_bo_driver.h 		ttm_bo_move_to_lru_tail(bo, NULL);
bo                785 include/drm/ttm/ttm_bo_driver.h 	spin_unlock(&bo->bdev->glob->lru_lock);
bo                786 include/drm/ttm/ttm_bo_driver.h 	dma_resv_unlock(bo->base.resv);
bo                815 include/drm/ttm/ttm_bo_driver.h int ttm_bo_move_ttm(struct ttm_buffer_object *bo,
bo                837 include/drm/ttm/ttm_bo_driver.h int ttm_bo_move_memcpy(struct ttm_buffer_object *bo,
bo                848 include/drm/ttm/ttm_bo_driver.h void ttm_bo_free_old_node(struct ttm_buffer_object *bo);
bo                865 include/drm/ttm/ttm_bo_driver.h int ttm_bo_move_accel_cleanup(struct ttm_buffer_object *bo,
bo                880 include/drm/ttm/ttm_bo_driver.h int ttm_bo_pipeline_move(struct ttm_buffer_object *bo,
bo                891 include/drm/ttm/ttm_bo_driver.h int ttm_bo_pipeline_gutting(struct ttm_buffer_object *bo);
bo                 48 include/drm/ttm/ttm_execbuf_util.h 	struct ttm_buffer_object *bo;
bo                147 include/drm/ttm/ttm_tt.h int ttm_tt_create(struct ttm_buffer_object *bo, bool zero_alloc);
bo                161 include/drm/ttm/ttm_tt.h int ttm_tt_init(struct ttm_tt *ttm, struct ttm_buffer_object *bo,
bo                163 include/drm/ttm/ttm_tt.h int ttm_dma_tt_init(struct ttm_dma_tt *ttm_dma, struct ttm_buffer_object *bo,
bo                165 include/drm/ttm/ttm_tt.h int ttm_sg_tt_init(struct ttm_dma_tt *ttm_dma, struct ttm_buffer_object *bo,
bo                265 include/drm/ttm/ttm_tt.h struct ttm_tt *ttm_agp_tt_create(struct ttm_buffer_object *bo,
bo                 65 include/linux/host1x.h 	struct host1x_bo *(*get)(struct host1x_bo *bo);
bo                 66 include/linux/host1x.h 	void (*put)(struct host1x_bo *bo);
bo                 67 include/linux/host1x.h 	dma_addr_t (*pin)(struct host1x_bo *bo, struct sg_table **sgt);
bo                 68 include/linux/host1x.h 	void (*unpin)(struct host1x_bo *bo, struct sg_table *sgt);
bo                 69 include/linux/host1x.h 	void *(*mmap)(struct host1x_bo *bo);
bo                 70 include/linux/host1x.h 	void (*munmap)(struct host1x_bo *bo, void *addr);
bo                 71 include/linux/host1x.h 	void *(*kmap)(struct host1x_bo *bo, unsigned int pagenum);
bo                 72 include/linux/host1x.h 	void (*kunmap)(struct host1x_bo *bo, unsigned int pagenum, void *addr);
bo                 79 include/linux/host1x.h static inline void host1x_bo_init(struct host1x_bo *bo,
bo                 82 include/linux/host1x.h 	bo->ops = ops;
bo                 85 include/linux/host1x.h static inline struct host1x_bo *host1x_bo_get(struct host1x_bo *bo)
bo                 87 include/linux/host1x.h 	return bo->ops->get(bo);
bo                 90 include/linux/host1x.h static inline void host1x_bo_put(struct host1x_bo *bo)
bo                 92 include/linux/host1x.h 	bo->ops->put(bo);
bo                 95 include/linux/host1x.h static inline dma_addr_t host1x_bo_pin(struct host1x_bo *bo,
bo                 98 include/linux/host1x.h 	return bo->ops->pin(bo, sgt);
bo                101 include/linux/host1x.h static inline void host1x_bo_unpin(struct host1x_bo *bo, struct sg_table *sgt)
bo                103 include/linux/host1x.h 	bo->ops->unpin(bo, sgt);
bo                106 include/linux/host1x.h static inline void *host1x_bo_mmap(struct host1x_bo *bo)
bo                108 include/linux/host1x.h 	return bo->ops->mmap(bo);
bo                111 include/linux/host1x.h static inline void host1x_bo_munmap(struct host1x_bo *bo, void *addr)
bo                113 include/linux/host1x.h 	bo->ops->munmap(bo, addr);
bo                116 include/linux/host1x.h static inline void *host1x_bo_kmap(struct host1x_bo *bo, unsigned int pagenum)
bo                118 include/linux/host1x.h 	return bo->ops->kmap(bo, pagenum);
bo                121 include/linux/host1x.h static inline void host1x_bo_kunmap(struct host1x_bo *bo,
bo                124 include/linux/host1x.h 	bo->ops->kunmap(bo, pagenum, addr);
bo                172 include/linux/host1x.h 		struct host1x_bo *bo;
bo                176 include/linux/host1x.h 		struct host1x_bo *bo;
bo                241 include/linux/host1x.h void host1x_job_add_gather(struct host1x_job *job, struct host1x_bo *bo,
bo                412 include/net/flow_offload.h 			  struct flow_block_offload *bo,
bo                 97 include/trace/events/host1x.h 	TP_PROTO(const char *name, struct host1x_bo *bo,
bo                100 include/trace/events/host1x.h 	TP_ARGS(name, bo, words, offset, cmdbuf),
bo                104 include/trace/events/host1x.h 		__field(struct host1x_bo *, bo)
bo                118 include/trace/events/host1x.h 		__entry->bo = bo;
bo                124 include/trace/events/host1x.h 	  __entry->name, __entry->bo,
bo                237 include/trace/events/host1x.h 	TP_PROTO(struct host1x_bo *bo, u32 offset, u32 syncpt_id, u32 thresh,
bo                240 include/trace/events/host1x.h 	TP_ARGS(bo, offset, syncpt_id, thresh, min),
bo                243 include/trace/events/host1x.h 		__field(struct host1x_bo *, bo)
bo                251 include/trace/events/host1x.h 		__entry->bo = bo;
bo                259 include/trace/events/host1x.h 		__entry->bo, __entry->offset,
bo                295 include/uapi/drm/vc4_drm.h 	__u64 bo;
bo                513 kernel/trace/trace_probe.c 	unsigned long bw, bo;
bo                525 kernel/trace/trace_probe.c 	bo = simple_strtoul(bf, &tail, 0);
bo                535 kernel/trace/trace_probe.c 	code->lshift = BYTES_TO_BITS(t->size) - (bw + bo);
bo                539 kernel/trace/trace_probe.c 	return (BYTES_TO_BITS(t->size) < (bw + bo)) ? -EINVAL : 0;
bo                193 net/can/bcm.c  	struct bcm_sock *bo = bcm_sk(sk);
bo                198 net/can/bcm.c  	seq_printf(m, " / bo %pK", bo);
bo                199 net/can/bcm.c  	seq_printf(m, " / dropped %lu", bo->dropped_usr_msgs);
bo                200 net/can/bcm.c  	seq_printf(m, " / bound %s", bcm_proc_getifname(net, ifname, bo->ifindex));
bo                203 net/can/bcm.c  	list_for_each_entry(op, &bo->rx_ops, list) {
bo                238 net/can/bcm.c  	list_for_each_entry(op, &bo->tx_ops, list) {
bo                365 net/can/bcm.c  		struct bcm_sock *bo = bcm_sk(sk);
bo                369 net/can/bcm.c  		bo->dropped_usr_msgs++;
bo                838 net/can/bcm.c  	struct bcm_sock *bo = bcm_sk(sk);
bo                857 net/can/bcm.c  	op = bcm_find_op(&bo->tx_ops, msg_head, ifindex);
bo                959 net/can/bcm.c  		list_add(&op->list, &bo->tx_ops);
bo               1013 net/can/bcm.c  	struct bcm_sock *bo = bcm_sk(sk);
bo               1039 net/can/bcm.c  	op = bcm_find_op(&bo->rx_ops, msg_head, ifindex);
bo               1134 net/can/bcm.c  		list_add(&op->list, &bo->rx_ops);
bo               1269 net/can/bcm.c  	struct bcm_sock *bo = bcm_sk(sk);
bo               1270 net/can/bcm.c  	int ifindex = bo->ifindex; /* default ifindex for this bcm_op */
bo               1275 net/can/bcm.c  	if (!bo->bound)
bo               1335 net/can/bcm.c  		if (bcm_delete_tx_op(&bo->tx_ops, &msg_head, ifindex))
bo               1342 net/can/bcm.c  		if (bcm_delete_rx_op(&bo->rx_ops, &msg_head, ifindex))
bo               1351 net/can/bcm.c  		ret = bcm_read_op(&bo->tx_ops, &msg_head, ifindex);
bo               1357 net/can/bcm.c  		ret = bcm_read_op(&bo->rx_ops, &msg_head, ifindex);
bo               1385 net/can/bcm.c  	struct bcm_sock *bo = container_of(nb, struct bcm_sock, notifier);
bo               1386 net/can/bcm.c  	struct sock *sk = &bo->sk;
bo               1402 net/can/bcm.c  		list_for_each_entry(op, &bo->rx_ops, list)
bo               1407 net/can/bcm.c  		if (bo->bound && bo->ifindex == dev->ifindex) {
bo               1408 net/can/bcm.c  			bo->bound   = 0;
bo               1409 net/can/bcm.c  			bo->ifindex = 0;
bo               1423 net/can/bcm.c  		if (bo->bound && bo->ifindex == dev->ifindex) {
bo               1438 net/can/bcm.c  	struct bcm_sock *bo = bcm_sk(sk);
bo               1440 net/can/bcm.c  	bo->bound            = 0;
bo               1441 net/can/bcm.c  	bo->ifindex          = 0;
bo               1442 net/can/bcm.c  	bo->dropped_usr_msgs = 0;
bo               1443 net/can/bcm.c  	bo->bcm_proc_read    = NULL;
bo               1445 net/can/bcm.c  	INIT_LIST_HEAD(&bo->tx_ops);
bo               1446 net/can/bcm.c  	INIT_LIST_HEAD(&bo->rx_ops);
bo               1449 net/can/bcm.c  	bo->notifier.notifier_call = bcm_notifier;
bo               1451 net/can/bcm.c  	register_netdevice_notifier(&bo->notifier);
bo               1463 net/can/bcm.c  	struct bcm_sock *bo;
bo               1470 net/can/bcm.c  	bo = bcm_sk(sk);
bo               1474 net/can/bcm.c  	unregister_netdevice_notifier(&bo->notifier);
bo               1478 net/can/bcm.c  	list_for_each_entry_safe(op, next, &bo->tx_ops, list)
bo               1481 net/can/bcm.c  	list_for_each_entry_safe(op, next, &bo->rx_ops, list) {
bo               1511 net/can/bcm.c  	if (net->can.bcmproc_dir && bo->bcm_proc_read)
bo               1512 net/can/bcm.c  		remove_proc_entry(bo->procname, net->can.bcmproc_dir);
bo               1516 net/can/bcm.c  	if (bo->bound) {
bo               1517 net/can/bcm.c  		bo->bound   = 0;
bo               1518 net/can/bcm.c  		bo->ifindex = 0;
bo               1535 net/can/bcm.c  	struct bcm_sock *bo = bcm_sk(sk);
bo               1544 net/can/bcm.c  	if (bo->bound) {
bo               1564 net/can/bcm.c  		bo->ifindex = dev->ifindex;
bo               1569 net/can/bcm.c  		bo->ifindex = 0;
bo               1575 net/can/bcm.c  		sprintf(bo->procname, "%lu", sock_i_ino(sk));
bo               1576 net/can/bcm.c  		bo->bcm_proc_read = proc_create_net_single(bo->procname, 0644,
bo               1579 net/can/bcm.c  		if (!bo->bcm_proc_read) {
bo               1586 net/can/bcm.c  	bo->bound = 1;
bo                485 net/core/flow_offload.c 			  struct flow_block_offload *bo,
bo                497 net/core/flow_offload.c 				  bo);
bo                 38 net/ieee802154/trace.h #define BOOL_TO_STR(bo) (bo) ? "true" : "false"
bo                 32 net/mac802154/trace.h #define BOOL_TO_STR(bo) (bo) ? "true" : "false"
bo                189 net/netfilter/nf_tables_offload.c static int nft_flow_offload_bind(struct flow_block_offload *bo,
bo                192 net/netfilter/nf_tables_offload.c 	list_splice(&bo->cb_list, &basechain->flow_block.cb_list);
bo                196 net/netfilter/nf_tables_offload.c static int nft_flow_offload_unbind(struct flow_block_offload *bo,
bo                201 net/netfilter/nf_tables_offload.c 	list_for_each_entry_safe(block_cb, next, &bo->cb_list, list) {
bo                210 net/netfilter/nf_tables_offload.c 			   struct flow_block_offload *bo,
bo                217 net/netfilter/nf_tables_offload.c 		err = nft_flow_offload_bind(bo, basechain);
bo                220 net/netfilter/nf_tables_offload.c 		err = nft_flow_offload_unbind(bo, basechain);
bo                235 net/netfilter/nf_tables_offload.c 	struct flow_block_offload bo = {};
bo                238 net/netfilter/nf_tables_offload.c 	bo.net = dev_net(dev);
bo                239 net/netfilter/nf_tables_offload.c 	bo.block = &chain->flow_block;
bo                240 net/netfilter/nf_tables_offload.c 	bo.command = cmd;
bo                241 net/netfilter/nf_tables_offload.c 	bo.binder_type = FLOW_BLOCK_BINDER_TYPE_CLSACT_INGRESS;
bo                242 net/netfilter/nf_tables_offload.c 	bo.extack = &extack;
bo                243 net/netfilter/nf_tables_offload.c 	INIT_LIST_HEAD(&bo.cb_list);
bo                245 net/netfilter/nf_tables_offload.c 	err = dev->netdev_ops->ndo_setup_tc(dev, TC_SETUP_BLOCK, &bo);
bo                249 net/netfilter/nf_tables_offload.c 	return nft_block_setup(chain, &bo, cmd);
bo                259 net/netfilter/nf_tables_offload.c 	struct flow_block_offload bo = {};
bo                264 net/netfilter/nf_tables_offload.c 	bo.net = dev_net(dev);
bo                265 net/netfilter/nf_tables_offload.c 	bo.block = &chain->flow_block;
bo                266 net/netfilter/nf_tables_offload.c 	bo.command = cmd;
bo                267 net/netfilter/nf_tables_offload.c 	bo.binder_type = FLOW_BLOCK_BINDER_TYPE_CLSACT_INGRESS;
bo                268 net/netfilter/nf_tables_offload.c 	bo.extack = &extack;
bo                269 net/netfilter/nf_tables_offload.c 	INIT_LIST_HEAD(&bo.cb_list);
bo                271 net/netfilter/nf_tables_offload.c 	cb(dev, cb_priv, TC_SETUP_BLOCK, &bo);
bo                273 net/netfilter/nf_tables_offload.c 	nft_block_setup(chain, &bo, cmd);
bo                280 net/netfilter/nf_tables_offload.c 	struct flow_block_offload bo = {};
bo                283 net/netfilter/nf_tables_offload.c 	bo.net = dev_net(dev);
bo                284 net/netfilter/nf_tables_offload.c 	bo.block = &chain->flow_block;
bo                285 net/netfilter/nf_tables_offload.c 	bo.command = cmd;
bo                286 net/netfilter/nf_tables_offload.c 	bo.binder_type = FLOW_BLOCK_BINDER_TYPE_CLSACT_INGRESS;
bo                287 net/netfilter/nf_tables_offload.c 	bo.extack = &extack;
bo                288 net/netfilter/nf_tables_offload.c 	INIT_LIST_HEAD(&bo.cb_list);
bo                290 net/netfilter/nf_tables_offload.c 	flow_indr_block_call(dev, &bo, cmd);
bo                292 net/netfilter/nf_tables_offload.c 	if (list_empty(&bo.cb_list))
bo                295 net/netfilter/nf_tables_offload.c 	return nft_block_setup(chain, &bo, cmd);
bo                 38 net/netfilter/xt_set.c #define ADT_OPT(n, f, d, fs, cfs, t, p, b, po, bo)	\
bo                 48 net/netfilter/xt_set.c 	.ext.bytes_op = bo,				\
bo                606 net/sched/cls_api.c 			   struct flow_block_offload *bo);
bo                612 net/sched/cls_api.c 	struct flow_block_offload bo = {
bo                620 net/sched/cls_api.c 	INIT_LIST_HEAD(&bo.cb_list);
bo                625 net/sched/cls_api.c 	bo.block = &block->flow_block;
bo                628 net/sched/cls_api.c 	cb(dev, cb_priv, TC_SETUP_BLOCK, &bo);
bo                630 net/sched/cls_api.c 	tcf_block_setup(block, &bo);
bo                686 net/sched/cls_api.c 	struct flow_block_offload bo = {
bo                694 net/sched/cls_api.c 	INIT_LIST_HEAD(&bo.cb_list);
bo                696 net/sched/cls_api.c 	flow_indr_block_call(dev, &bo, command);
bo                697 net/sched/cls_api.c 	tcf_block_setup(block, &bo);
bo                711 net/sched/cls_api.c 	struct flow_block_offload bo = {};
bo                714 net/sched/cls_api.c 	bo.net = dev_net(dev);
bo                715 net/sched/cls_api.c 	bo.command = command;
bo                716 net/sched/cls_api.c 	bo.binder_type = ei->binder_type;
bo                717 net/sched/cls_api.c 	bo.block = &block->flow_block;
bo                718 net/sched/cls_api.c 	bo.block_shared = tcf_block_shared(block);
bo                719 net/sched/cls_api.c 	bo.extack = extack;
bo                720 net/sched/cls_api.c 	INIT_LIST_HEAD(&bo.cb_list);
bo                722 net/sched/cls_api.c 	err = dev->netdev_ops->ndo_setup_tc(dev, TC_SETUP_BLOCK, &bo);
bo                726 net/sched/cls_api.c 	return tcf_block_setup(block, &bo);
bo               1478 net/sched/cls_api.c 			  struct flow_block_offload *bo)
bo               1485 net/sched/cls_api.c 	list_for_each_entry(block_cb, &bo->cb_list, list) {
bo               1489 net/sched/cls_api.c 						  bo->extack);
bo               1492 net/sched/cls_api.c 		if (!bo->unlocked_driver_cb)
bo               1497 net/sched/cls_api.c 	list_splice(&bo->cb_list, &block->flow_block.cb_list);
bo               1502 net/sched/cls_api.c 	list_for_each_entry_safe(block_cb, next, &bo->cb_list, list) {
bo               1509 net/sched/cls_api.c 			if (!bo->unlocked_driver_cb)
bo               1519 net/sched/cls_api.c 			     struct flow_block_offload *bo)
bo               1525 net/sched/cls_api.c 	list_for_each_entry_safe(block_cb, next, &bo->cb_list, list) {
bo               1532 net/sched/cls_api.c 		if (!bo->unlocked_driver_cb)
bo               1538 net/sched/cls_api.c 			   struct flow_block_offload *bo)
bo               1542 net/sched/cls_api.c 	switch (bo->command) {
bo               1544 net/sched/cls_api.c 		err = tcf_block_bind(block, bo);
bo               1548 net/sched/cls_api.c 		tcf_block_unbind(block, bo);
bo                189 net/wireless/trace.h #define BOOL_TO_STR(bo) (bo) ? "true" : "false"