Lines Matching refs:ttm
306 r = ttm_tt_set_placement_caching(bo->ttm, tmp_mem.placement); in amdgpu_move_vram_ram()
311 r = ttm_tt_bind(bo->ttm, &tmp_mem); in amdgpu_move_vram_ram()
375 if (old_mem->mem_type == TTM_PL_SYSTEM && bo->ttm == NULL) { in amdgpu_bo_move()
482 struct ttm_dma_tt ttm; member
491 static int amdgpu_ttm_tt_pin_userptr(struct ttm_tt *ttm) in amdgpu_ttm_tt_pin_userptr() argument
493 struct amdgpu_device *adev = amdgpu_get_adev(ttm->bdev); in amdgpu_ttm_tt_pin_userptr()
494 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_tt_pin_userptr()
508 unsigned long end = gtt->userptr + ttm->num_pages * PAGE_SIZE; in amdgpu_ttm_tt_pin_userptr()
517 unsigned num_pages = ttm->num_pages - pinned; in amdgpu_ttm_tt_pin_userptr()
519 struct page **pages = ttm->pages + pinned; in amdgpu_ttm_tt_pin_userptr()
528 } while (pinned < ttm->num_pages); in amdgpu_ttm_tt_pin_userptr()
530 r = sg_alloc_table_from_pages(ttm->sg, ttm->pages, ttm->num_pages, 0, in amdgpu_ttm_tt_pin_userptr()
531 ttm->num_pages << PAGE_SHIFT, in amdgpu_ttm_tt_pin_userptr()
537 nents = dma_map_sg(adev->dev, ttm->sg->sgl, ttm->sg->nents, direction); in amdgpu_ttm_tt_pin_userptr()
538 if (nents != ttm->sg->nents) in amdgpu_ttm_tt_pin_userptr()
541 drm_prime_sg_to_page_addr_arrays(ttm->sg, ttm->pages, in amdgpu_ttm_tt_pin_userptr()
542 gtt->ttm.dma_address, ttm->num_pages); in amdgpu_ttm_tt_pin_userptr()
547 kfree(ttm->sg); in amdgpu_ttm_tt_pin_userptr()
550 release_pages(ttm->pages, pinned, 0); in amdgpu_ttm_tt_pin_userptr()
554 static void amdgpu_ttm_tt_unpin_userptr(struct ttm_tt *ttm) in amdgpu_ttm_tt_unpin_userptr() argument
556 struct amdgpu_device *adev = amdgpu_get_adev(ttm->bdev); in amdgpu_ttm_tt_unpin_userptr()
557 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_tt_unpin_userptr()
565 if (!ttm->sg->sgl) in amdgpu_ttm_tt_unpin_userptr()
569 dma_unmap_sg(adev->dev, ttm->sg->sgl, ttm->sg->nents, direction); in amdgpu_ttm_tt_unpin_userptr()
571 for_each_sg_page(ttm->sg->sgl, &sg_iter, ttm->sg->nents, 0) { in amdgpu_ttm_tt_unpin_userptr()
580 sg_free_table(ttm->sg); in amdgpu_ttm_tt_unpin_userptr()
583 static int amdgpu_ttm_backend_bind(struct ttm_tt *ttm, in amdgpu_ttm_backend_bind() argument
586 struct amdgpu_ttm_tt *gtt = (void*)ttm; in amdgpu_ttm_backend_bind()
587 uint32_t flags = amdgpu_ttm_tt_pte_flags(gtt->adev, ttm, bo_mem); in amdgpu_ttm_backend_bind()
591 r = amdgpu_ttm_tt_pin_userptr(ttm); in amdgpu_ttm_backend_bind()
598 if (!ttm->num_pages) { in amdgpu_ttm_backend_bind()
600 ttm->num_pages, bo_mem, ttm); in amdgpu_ttm_backend_bind()
608 r = amdgpu_gart_bind(gtt->adev, gtt->offset, ttm->num_pages, in amdgpu_ttm_backend_bind()
609 ttm->pages, gtt->ttm.dma_address, flags); in amdgpu_ttm_backend_bind()
613 ttm->num_pages, (unsigned)gtt->offset); in amdgpu_ttm_backend_bind()
619 static int amdgpu_ttm_backend_unbind(struct ttm_tt *ttm) in amdgpu_ttm_backend_unbind() argument
621 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_backend_unbind()
625 amdgpu_gart_unbind(gtt->adev, gtt->offset, ttm->num_pages); in amdgpu_ttm_backend_unbind()
628 amdgpu_ttm_tt_unpin_userptr(ttm); in amdgpu_ttm_backend_unbind()
633 static void amdgpu_ttm_backend_destroy(struct ttm_tt *ttm) in amdgpu_ttm_backend_destroy() argument
635 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_backend_destroy()
637 ttm_dma_tt_fini(>t->ttm); in amdgpu_ttm_backend_destroy()
660 gtt->ttm.ttm.func = &amdgpu_backend_func; in amdgpu_ttm_tt_create()
662 if (ttm_dma_tt_init(>t->ttm, bdev, size, page_flags, dummy_read_page)) { in amdgpu_ttm_tt_create()
666 return >t->ttm.ttm; in amdgpu_ttm_tt_create()
669 static int amdgpu_ttm_tt_populate(struct ttm_tt *ttm) in amdgpu_ttm_tt_populate() argument
672 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_tt_populate()
675 bool slave = !!(ttm->page_flags & TTM_PAGE_FLAG_SG); in amdgpu_ttm_tt_populate()
677 if (ttm->state != tt_unpopulated) in amdgpu_ttm_tt_populate()
681 ttm->sg = kzalloc(sizeof(struct sg_table), GFP_KERNEL); in amdgpu_ttm_tt_populate()
682 if (!ttm->sg) in amdgpu_ttm_tt_populate()
685 ttm->page_flags |= TTM_PAGE_FLAG_SG; in amdgpu_ttm_tt_populate()
686 ttm->state = tt_unbound; in amdgpu_ttm_tt_populate()
690 if (slave && ttm->sg) { in amdgpu_ttm_tt_populate()
691 drm_prime_sg_to_page_addr_arrays(ttm->sg, ttm->pages, in amdgpu_ttm_tt_populate()
692 gtt->ttm.dma_address, ttm->num_pages); in amdgpu_ttm_tt_populate()
693 ttm->state = tt_unbound; in amdgpu_ttm_tt_populate()
697 adev = amdgpu_get_adev(ttm->bdev); in amdgpu_ttm_tt_populate()
701 return ttm_dma_populate(>t->ttm, adev->dev); in amdgpu_ttm_tt_populate()
705 r = ttm_pool_populate(ttm); in amdgpu_ttm_tt_populate()
710 for (i = 0; i < ttm->num_pages; i++) { in amdgpu_ttm_tt_populate()
711 gtt->ttm.dma_address[i] = pci_map_page(adev->pdev, ttm->pages[i], in amdgpu_ttm_tt_populate()
714 if (pci_dma_mapping_error(adev->pdev, gtt->ttm.dma_address[i])) { in amdgpu_ttm_tt_populate()
716 pci_unmap_page(adev->pdev, gtt->ttm.dma_address[i], in amdgpu_ttm_tt_populate()
718 gtt->ttm.dma_address[i] = 0; in amdgpu_ttm_tt_populate()
720 ttm_pool_unpopulate(ttm); in amdgpu_ttm_tt_populate()
727 static void amdgpu_ttm_tt_unpopulate(struct ttm_tt *ttm) in amdgpu_ttm_tt_unpopulate() argument
730 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_tt_unpopulate()
732 bool slave = !!(ttm->page_flags & TTM_PAGE_FLAG_SG); in amdgpu_ttm_tt_unpopulate()
735 kfree(ttm->sg); in amdgpu_ttm_tt_unpopulate()
736 ttm->page_flags &= ~TTM_PAGE_FLAG_SG; in amdgpu_ttm_tt_unpopulate()
743 adev = amdgpu_get_adev(ttm->bdev); in amdgpu_ttm_tt_unpopulate()
747 ttm_dma_unpopulate(>t->ttm, adev->dev); in amdgpu_ttm_tt_unpopulate()
752 for (i = 0; i < ttm->num_pages; i++) { in amdgpu_ttm_tt_unpopulate()
753 if (gtt->ttm.dma_address[i]) { in amdgpu_ttm_tt_unpopulate()
754 pci_unmap_page(adev->pdev, gtt->ttm.dma_address[i], in amdgpu_ttm_tt_unpopulate()
759 ttm_pool_unpopulate(ttm); in amdgpu_ttm_tt_unpopulate()
762 int amdgpu_ttm_tt_set_userptr(struct ttm_tt *ttm, uint64_t addr, in amdgpu_ttm_tt_set_userptr() argument
765 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_tt_set_userptr()
776 bool amdgpu_ttm_tt_has_userptr(struct ttm_tt *ttm) in amdgpu_ttm_tt_has_userptr() argument
778 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_tt_has_userptr()
786 bool amdgpu_ttm_tt_affect_userptr(struct ttm_tt *ttm, unsigned long start, in amdgpu_ttm_tt_affect_userptr() argument
789 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_tt_affect_userptr()
795 if (gtt->ttm.ttm.state != tt_bound || !gtt->userptr) in amdgpu_ttm_tt_affect_userptr()
798 size = (unsigned long)gtt->ttm.ttm.num_pages * PAGE_SIZE; in amdgpu_ttm_tt_affect_userptr()
805 bool amdgpu_ttm_tt_is_readonly(struct ttm_tt *ttm) in amdgpu_ttm_tt_is_readonly() argument
807 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_tt_is_readonly()
815 uint32_t amdgpu_ttm_tt_pte_flags(struct amdgpu_device *adev, struct ttm_tt *ttm, in amdgpu_ttm_tt_pte_flags() argument
826 if (ttm->caching_state == tt_cached) in amdgpu_ttm_tt_pte_flags()
835 if (!amdgpu_ttm_tt_is_readonly(ttm)) in amdgpu_ttm_tt_pte_flags()