msm_obj           797 drivers/gpu/drm/msm/msm_drv.c 	struct msm_gem_object *msm_obj;
msm_obj           821 drivers/gpu/drm/msm/msm_drv.c 	msm_obj = to_msm_bo(obj);
msm_obj           832 drivers/gpu/drm/msm/msm_drv.c 		if (args->len >= sizeof(msm_obj->name)) {
msm_obj           836 drivers/gpu/drm/msm/msm_drv.c 		if (copy_from_user(msm_obj->name, u64_to_user_ptr(args->value),
msm_obj           838 drivers/gpu/drm/msm/msm_drv.c 			msm_obj->name[0] = '\0';
msm_obj           842 drivers/gpu/drm/msm/msm_drv.c 		msm_obj->name[args->len] = '\0';
msm_obj           844 drivers/gpu/drm/msm/msm_drv.c 			if (!isprint(msm_obj->name[i])) {
msm_obj           845 drivers/gpu/drm/msm/msm_drv.c 				msm_obj->name[i] = '\0';
msm_obj           851 drivers/gpu/drm/msm/msm_drv.c 		if (args->value && (args->len < strlen(msm_obj->name))) {
msm_obj           855 drivers/gpu/drm/msm/msm_drv.c 		args->len = strlen(msm_obj->name);
msm_obj           858 drivers/gpu/drm/msm/msm_drv.c 					 msm_obj->name, args->len))
msm_obj            25 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj            27 drivers/gpu/drm/msm/msm_gem.c 	return (((dma_addr_t)msm_obj->vram_node->start) << PAGE_SHIFT) +
msm_obj            33 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj            34 drivers/gpu/drm/msm/msm_gem.c 	return !msm_obj->vram_node;
msm_obj            51 drivers/gpu/drm/msm/msm_gem.c static void sync_for_device(struct msm_gem_object *msm_obj)
msm_obj            53 drivers/gpu/drm/msm/msm_gem.c 	struct device *dev = msm_obj->base.dev->dev;
msm_obj            56 drivers/gpu/drm/msm/msm_gem.c 		dma_sync_sg_for_device(dev, msm_obj->sgt->sgl,
msm_obj            57 drivers/gpu/drm/msm/msm_gem.c 			msm_obj->sgt->nents, DMA_BIDIRECTIONAL);
msm_obj            59 drivers/gpu/drm/msm/msm_gem.c 		dma_map_sg(dev, msm_obj->sgt->sgl,
msm_obj            60 drivers/gpu/drm/msm/msm_gem.c 			msm_obj->sgt->nents, DMA_BIDIRECTIONAL);
msm_obj            64 drivers/gpu/drm/msm/msm_gem.c static void sync_for_cpu(struct msm_gem_object *msm_obj)
msm_obj            66 drivers/gpu/drm/msm/msm_gem.c 	struct device *dev = msm_obj->base.dev->dev;
msm_obj            69 drivers/gpu/drm/msm/msm_gem.c 		dma_sync_sg_for_cpu(dev, msm_obj->sgt->sgl,
msm_obj            70 drivers/gpu/drm/msm/msm_gem.c 			msm_obj->sgt->nents, DMA_BIDIRECTIONAL);
msm_obj            72 drivers/gpu/drm/msm/msm_gem.c 		dma_unmap_sg(dev, msm_obj->sgt->sgl,
msm_obj            73 drivers/gpu/drm/msm/msm_gem.c 			msm_obj->sgt->nents, DMA_BIDIRECTIONAL);
msm_obj            80 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj            91 drivers/gpu/drm/msm/msm_gem.c 	ret = drm_mm_insert_node(&priv->vram.mm, msm_obj->vram_node, npages);
msm_obj           109 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           111 drivers/gpu/drm/msm/msm_gem.c 	if (!msm_obj->pages) {
msm_obj           127 drivers/gpu/drm/msm/msm_gem.c 		msm_obj->pages = p;
msm_obj           129 drivers/gpu/drm/msm/msm_gem.c 		msm_obj->sgt = drm_prime_pages_to_sg(p, npages);
msm_obj           130 drivers/gpu/drm/msm/msm_gem.c 		if (IS_ERR(msm_obj->sgt)) {
msm_obj           131 drivers/gpu/drm/msm/msm_gem.c 			void *ptr = ERR_CAST(msm_obj->sgt);
msm_obj           134 drivers/gpu/drm/msm/msm_gem.c 			msm_obj->sgt = NULL;
msm_obj           141 drivers/gpu/drm/msm/msm_gem.c 		if (msm_obj->flags & (MSM_BO_WC|MSM_BO_UNCACHED))
msm_obj           142 drivers/gpu/drm/msm/msm_gem.c 			sync_for_device(msm_obj);
msm_obj           145 drivers/gpu/drm/msm/msm_gem.c 	return msm_obj->pages;
msm_obj           150 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           154 drivers/gpu/drm/msm/msm_gem.c 	drm_mm_remove_node(msm_obj->vram_node);
msm_obj           157 drivers/gpu/drm/msm/msm_gem.c 	kvfree(msm_obj->pages);
msm_obj           162 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           164 drivers/gpu/drm/msm/msm_gem.c 	if (msm_obj->pages) {
msm_obj           165 drivers/gpu/drm/msm/msm_gem.c 		if (msm_obj->sgt) {
msm_obj           170 drivers/gpu/drm/msm/msm_gem.c 			if (msm_obj->flags & (MSM_BO_WC|MSM_BO_UNCACHED))
msm_obj           171 drivers/gpu/drm/msm/msm_gem.c 				sync_for_cpu(msm_obj);
msm_obj           173 drivers/gpu/drm/msm/msm_gem.c 			sg_free_table(msm_obj->sgt);
msm_obj           174 drivers/gpu/drm/msm/msm_gem.c 			kfree(msm_obj->sgt);
msm_obj           178 drivers/gpu/drm/msm/msm_gem.c 			drm_gem_put_pages(obj, msm_obj->pages, true, false);
msm_obj           182 drivers/gpu/drm/msm/msm_gem.c 		msm_obj->pages = NULL;
msm_obj           188 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           191 drivers/gpu/drm/msm/msm_gem.c 	mutex_lock(&msm_obj->lock);
msm_obj           193 drivers/gpu/drm/msm/msm_gem.c 	if (WARN_ON(msm_obj->madv != MSM_MADV_WILLNEED)) {
msm_obj           194 drivers/gpu/drm/msm/msm_gem.c 		mutex_unlock(&msm_obj->lock);
msm_obj           199 drivers/gpu/drm/msm/msm_gem.c 	mutex_unlock(&msm_obj->lock);
msm_obj           211 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           216 drivers/gpu/drm/msm/msm_gem.c 	if (msm_obj->flags & MSM_BO_WC) {
msm_obj           218 drivers/gpu/drm/msm/msm_gem.c 	} else if (msm_obj->flags & MSM_BO_UNCACHED) {
msm_obj           254 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           265 drivers/gpu/drm/msm/msm_gem.c 	err = mutex_lock_interruptible(&msm_obj->lock);
msm_obj           271 drivers/gpu/drm/msm/msm_gem.c 	if (WARN_ON(msm_obj->madv != MSM_MADV_WILLNEED)) {
msm_obj           272 drivers/gpu/drm/msm/msm_gem.c 		mutex_unlock(&msm_obj->lock);
msm_obj           293 drivers/gpu/drm/msm/msm_gem.c 	mutex_unlock(&msm_obj->lock);
msm_obj           302 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           305 drivers/gpu/drm/msm/msm_gem.c 	WARN_ON(!mutex_is_locked(&msm_obj->lock));
msm_obj           321 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           323 drivers/gpu/drm/msm/msm_gem.c 	mutex_lock(&msm_obj->lock);
msm_obj           325 drivers/gpu/drm/msm/msm_gem.c 	mutex_unlock(&msm_obj->lock);
msm_obj           332 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           335 drivers/gpu/drm/msm/msm_gem.c 	WARN_ON(!mutex_is_locked(&msm_obj->lock));
msm_obj           343 drivers/gpu/drm/msm/msm_gem.c 	list_add_tail(&vma->list, &msm_obj->vmas);
msm_obj           351 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           354 drivers/gpu/drm/msm/msm_gem.c 	WARN_ON(!mutex_is_locked(&msm_obj->lock));
msm_obj           356 drivers/gpu/drm/msm/msm_gem.c 	list_for_each_entry(vma, &msm_obj->vmas, list) {
msm_obj           377 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           380 drivers/gpu/drm/msm/msm_gem.c 	WARN_ON(!mutex_is_locked(&msm_obj->lock));
msm_obj           382 drivers/gpu/drm/msm/msm_gem.c 	list_for_each_entry_safe(vma, tmp, &msm_obj->vmas, list) {
msm_obj           394 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           398 drivers/gpu/drm/msm/msm_gem.c 	WARN_ON(!mutex_is_locked(&msm_obj->lock));
msm_obj           421 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           426 drivers/gpu/drm/msm/msm_gem.c 	if (!(msm_obj->flags & MSM_BO_GPU_READONLY))
msm_obj           429 drivers/gpu/drm/msm/msm_gem.c 	WARN_ON(!mutex_is_locked(&msm_obj->lock));
msm_obj           431 drivers/gpu/drm/msm/msm_gem.c 	if (WARN_ON(msm_obj->madv != MSM_MADV_WILLNEED))
msm_obj           443 drivers/gpu/drm/msm/msm_gem.c 			msm_obj->sgt, obj->size >> PAGE_SHIFT);
msm_obj           450 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           454 drivers/gpu/drm/msm/msm_gem.c 	mutex_lock(&msm_obj->lock);
msm_obj           464 drivers/gpu/drm/msm/msm_gem.c 	mutex_unlock(&msm_obj->lock);
msm_obj           475 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           478 drivers/gpu/drm/msm/msm_gem.c 	mutex_lock(&msm_obj->lock);
msm_obj           480 drivers/gpu/drm/msm/msm_gem.c 	mutex_unlock(&msm_obj->lock);
msm_obj           491 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           494 drivers/gpu/drm/msm/msm_gem.c 	mutex_lock(&msm_obj->lock);
msm_obj           496 drivers/gpu/drm/msm/msm_gem.c 	mutex_unlock(&msm_obj->lock);
msm_obj           510 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           513 drivers/gpu/drm/msm/msm_gem.c 	mutex_lock(&msm_obj->lock);
msm_obj           519 drivers/gpu/drm/msm/msm_gem.c 	mutex_unlock(&msm_obj->lock);
msm_obj           554 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           557 drivers/gpu/drm/msm/msm_gem.c 	mutex_lock(&msm_obj->lock);
msm_obj           559 drivers/gpu/drm/msm/msm_gem.c 	if (WARN_ON(msm_obj->madv > madv)) {
msm_obj           561 drivers/gpu/drm/msm/msm_gem.c 			msm_obj->madv, madv);
msm_obj           562 drivers/gpu/drm/msm/msm_gem.c 		mutex_unlock(&msm_obj->lock);
msm_obj           572 drivers/gpu/drm/msm/msm_gem.c 	msm_obj->vmap_count++;
msm_obj           574 drivers/gpu/drm/msm/msm_gem.c 	if (!msm_obj->vaddr) {
msm_obj           580 drivers/gpu/drm/msm/msm_gem.c 		msm_obj->vaddr = vmap(pages, obj->size >> PAGE_SHIFT,
msm_obj           582 drivers/gpu/drm/msm/msm_gem.c 		if (msm_obj->vaddr == NULL) {
msm_obj           588 drivers/gpu/drm/msm/msm_gem.c 	mutex_unlock(&msm_obj->lock);
msm_obj           589 drivers/gpu/drm/msm/msm_gem.c 	return msm_obj->vaddr;
msm_obj           592 drivers/gpu/drm/msm/msm_gem.c 	msm_obj->vmap_count--;
msm_obj           593 drivers/gpu/drm/msm/msm_gem.c 	mutex_unlock(&msm_obj->lock);
msm_obj           615 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           617 drivers/gpu/drm/msm/msm_gem.c 	mutex_lock(&msm_obj->lock);
msm_obj           618 drivers/gpu/drm/msm/msm_gem.c 	WARN_ON(msm_obj->vmap_count < 1);
msm_obj           619 drivers/gpu/drm/msm/msm_gem.c 	msm_obj->vmap_count--;
msm_obj           620 drivers/gpu/drm/msm/msm_gem.c 	mutex_unlock(&msm_obj->lock);
msm_obj           628 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           630 drivers/gpu/drm/msm/msm_gem.c 	mutex_lock(&msm_obj->lock);
msm_obj           634 drivers/gpu/drm/msm/msm_gem.c 	if (msm_obj->madv != __MSM_MADV_PURGED)
msm_obj           635 drivers/gpu/drm/msm/msm_gem.c 		msm_obj->madv = madv;
msm_obj           637 drivers/gpu/drm/msm/msm_gem.c 	madv = msm_obj->madv;
msm_obj           639 drivers/gpu/drm/msm/msm_gem.c 	mutex_unlock(&msm_obj->lock);
msm_obj           647 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           650 drivers/gpu/drm/msm/msm_gem.c 	WARN_ON(!is_purgeable(msm_obj));
msm_obj           653 drivers/gpu/drm/msm/msm_gem.c 	mutex_lock_nested(&msm_obj->lock, subclass);
msm_obj           661 drivers/gpu/drm/msm/msm_gem.c 	msm_obj->madv = __MSM_MADV_PURGED;
msm_obj           676 drivers/gpu/drm/msm/msm_gem.c 	mutex_unlock(&msm_obj->lock);
msm_obj           681 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           683 drivers/gpu/drm/msm/msm_gem.c 	WARN_ON(!mutex_is_locked(&msm_obj->lock));
msm_obj           685 drivers/gpu/drm/msm/msm_gem.c 	if (!msm_obj->vaddr || WARN_ON(!is_vunmapable(msm_obj)))
msm_obj           688 drivers/gpu/drm/msm/msm_gem.c 	vunmap(msm_obj->vaddr);
msm_obj           689 drivers/gpu/drm/msm/msm_gem.c 	msm_obj->vaddr = NULL;
msm_obj           694 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           696 drivers/gpu/drm/msm/msm_gem.c 	mutex_lock_nested(&msm_obj->lock, subclass);
msm_obj           698 drivers/gpu/drm/msm/msm_gem.c 	mutex_unlock(&msm_obj->lock);
msm_obj           739 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           740 drivers/gpu/drm/msm/msm_gem.c 	WARN_ON(msm_obj->madv != MSM_MADV_WILLNEED);
msm_obj           741 drivers/gpu/drm/msm/msm_gem.c 	msm_obj->gpu = gpu;
msm_obj           746 drivers/gpu/drm/msm/msm_gem.c 	list_del_init(&msm_obj->mm_list);
msm_obj           747 drivers/gpu/drm/msm/msm_gem.c 	list_add_tail(&msm_obj->mm_list, &gpu->active_list);
msm_obj           754 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           758 drivers/gpu/drm/msm/msm_gem.c 	msm_obj->gpu = NULL;
msm_obj           759 drivers/gpu/drm/msm/msm_gem.c 	list_del_init(&msm_obj->mm_list);
msm_obj           760 drivers/gpu/drm/msm/msm_gem.c 	list_add_tail(&msm_obj->mm_list, &priv->inactive_list);
msm_obj           801 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           809 drivers/gpu/drm/msm/msm_gem.c 	mutex_lock(&msm_obj->lock);
msm_obj           811 drivers/gpu/drm/msm/msm_gem.c 	switch (msm_obj->madv) {
msm_obj           825 drivers/gpu/drm/msm/msm_gem.c 			msm_obj->flags, is_active(msm_obj) ? 'A' : 'I',
msm_obj           827 drivers/gpu/drm/msm/msm_gem.c 			off, msm_obj->vaddr);
msm_obj           829 drivers/gpu/drm/msm/msm_gem.c 	seq_printf(m, " %08zu %9s %-32s\n", obj->size, madv, msm_obj->name);
msm_obj           831 drivers/gpu/drm/msm/msm_gem.c 	if (!list_empty(&msm_obj->vmas)) {
msm_obj           835 drivers/gpu/drm/msm/msm_gem.c 		list_for_each_entry(vma, &msm_obj->vmas, list)
msm_obj           860 drivers/gpu/drm/msm/msm_gem.c 	mutex_unlock(&msm_obj->lock);
msm_obj           865 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj;
msm_obj           870 drivers/gpu/drm/msm/msm_gem.c 	list_for_each_entry(msm_obj, list, mm_list) {
msm_obj           871 drivers/gpu/drm/msm/msm_gem.c 		struct drm_gem_object *obj = &msm_obj->base;
msm_obj           885 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj           889 drivers/gpu/drm/msm/msm_gem.c 	if (llist_add(&msm_obj->freed, &priv->free_list))
msm_obj           893 drivers/gpu/drm/msm/msm_gem.c static void free_object(struct msm_gem_object *msm_obj)
msm_obj           895 drivers/gpu/drm/msm/msm_gem.c 	struct drm_gem_object *obj = &msm_obj->base;
msm_obj           901 drivers/gpu/drm/msm/msm_gem.c 	WARN_ON(is_active(msm_obj));
msm_obj           903 drivers/gpu/drm/msm/msm_gem.c 	list_del(&msm_obj->mm_list);
msm_obj           905 drivers/gpu/drm/msm/msm_gem.c 	mutex_lock(&msm_obj->lock);
msm_obj           910 drivers/gpu/drm/msm/msm_gem.c 		if (msm_obj->vaddr)
msm_obj           911 drivers/gpu/drm/msm/msm_gem.c 			dma_buf_vunmap(obj->import_attach->dmabuf, msm_obj->vaddr);
msm_obj           916 drivers/gpu/drm/msm/msm_gem.c 		if (msm_obj->pages)
msm_obj           917 drivers/gpu/drm/msm/msm_gem.c 			kvfree(msm_obj->pages);
msm_obj           919 drivers/gpu/drm/msm/msm_gem.c 		drm_prime_gem_destroy(obj, msm_obj->sgt);
msm_obj           927 drivers/gpu/drm/msm/msm_gem.c 	mutex_unlock(&msm_obj->lock);
msm_obj           928 drivers/gpu/drm/msm/msm_gem.c 	kfree(msm_obj);
msm_obj           937 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj, *next;
msm_obj           943 drivers/gpu/drm/msm/msm_gem.c 		llist_for_each_entry_safe(msm_obj, next,
msm_obj           945 drivers/gpu/drm/msm/msm_gem.c 			free_object(msm_obj);
msm_obj           984 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj;
msm_obj           997 drivers/gpu/drm/msm/msm_gem.c 	msm_obj = kzalloc(sizeof(*msm_obj), GFP_KERNEL);
msm_obj           998 drivers/gpu/drm/msm/msm_gem.c 	if (!msm_obj)
msm_obj          1001 drivers/gpu/drm/msm/msm_gem.c 	mutex_init(&msm_obj->lock);
msm_obj          1003 drivers/gpu/drm/msm/msm_gem.c 	msm_obj->flags = flags;
msm_obj          1004 drivers/gpu/drm/msm/msm_gem.c 	msm_obj->madv = MSM_MADV_WILLNEED;
msm_obj          1006 drivers/gpu/drm/msm/msm_gem.c 	INIT_LIST_HEAD(&msm_obj->submit_entry);
msm_obj          1007 drivers/gpu/drm/msm/msm_gem.c 	INIT_LIST_HEAD(&msm_obj->vmas);
msm_obj          1011 drivers/gpu/drm/msm/msm_gem.c 		list_add_tail(&msm_obj->mm_list, &priv->inactive_list);
msm_obj          1014 drivers/gpu/drm/msm/msm_gem.c 		list_add_tail(&msm_obj->mm_list, &priv->inactive_list);
msm_obj          1018 drivers/gpu/drm/msm/msm_gem.c 	*obj = &msm_obj->base;
msm_obj          1054 drivers/gpu/drm/msm/msm_gem.c 		struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj          1056 drivers/gpu/drm/msm/msm_gem.c 		mutex_lock(&msm_obj->lock);
msm_obj          1059 drivers/gpu/drm/msm/msm_gem.c 		mutex_unlock(&msm_obj->lock);
msm_obj          1111 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj;
msm_obj          1132 drivers/gpu/drm/msm/msm_gem.c 	msm_obj = to_msm_bo(obj);
msm_obj          1133 drivers/gpu/drm/msm/msm_gem.c 	mutex_lock(&msm_obj->lock);
msm_obj          1134 drivers/gpu/drm/msm/msm_gem.c 	msm_obj->sgt = sgt;
msm_obj          1135 drivers/gpu/drm/msm/msm_gem.c 	msm_obj->pages = kvmalloc_array(npages, sizeof(struct page *), GFP_KERNEL);
msm_obj          1136 drivers/gpu/drm/msm/msm_gem.c 	if (!msm_obj->pages) {
msm_obj          1137 drivers/gpu/drm/msm/msm_gem.c 		mutex_unlock(&msm_obj->lock);
msm_obj          1142 drivers/gpu/drm/msm/msm_gem.c 	ret = drm_prime_sg_to_page_addr_arrays(sgt, msm_obj->pages, NULL, npages);
msm_obj          1144 drivers/gpu/drm/msm/msm_gem.c 		mutex_unlock(&msm_obj->lock);
msm_obj          1148 drivers/gpu/drm/msm/msm_gem.c 	mutex_unlock(&msm_obj->lock);
msm_obj          1225 drivers/gpu/drm/msm/msm_gem.c 	struct msm_gem_object *msm_obj = to_msm_bo(bo);
msm_obj          1232 drivers/gpu/drm/msm/msm_gem.c 	vsnprintf(msm_obj->name, sizeof(msm_obj->name), fmt, ap);
msm_obj            88 drivers/gpu/drm/msm/msm_gem.h static inline bool is_active(struct msm_gem_object *msm_obj)
msm_obj            90 drivers/gpu/drm/msm/msm_gem.h 	return msm_obj->gpu != NULL;
msm_obj            93 drivers/gpu/drm/msm/msm_gem.h static inline bool is_purgeable(struct msm_gem_object *msm_obj)
msm_obj            95 drivers/gpu/drm/msm/msm_gem.h 	WARN_ON(!mutex_is_locked(&msm_obj->base.dev->struct_mutex));
msm_obj            96 drivers/gpu/drm/msm/msm_gem.h 	return (msm_obj->madv == MSM_MADV_DONTNEED) && msm_obj->sgt &&
msm_obj            97 drivers/gpu/drm/msm/msm_gem.h 			!msm_obj->base.dma_buf && !msm_obj->base.import_attach;
msm_obj           100 drivers/gpu/drm/msm/msm_gem.h static inline bool is_vunmapable(struct msm_gem_object *msm_obj)
msm_obj           102 drivers/gpu/drm/msm/msm_gem.h 	return (msm_obj->vmap_count == 0) && msm_obj->vaddr;
msm_obj            16 drivers/gpu/drm/msm/msm_gem_prime.c 	struct msm_gem_object *msm_obj = to_msm_bo(obj);
msm_obj            19 drivers/gpu/drm/msm/msm_gem_prime.c 	if (WARN_ON(!msm_obj->pages))  /* should have already pinned! */
msm_obj            22 drivers/gpu/drm/msm/msm_gem_prime.c 	return drm_prime_pages_to_sg(msm_obj->pages, npages);
msm_obj            46 drivers/gpu/drm/msm/msm_gem_shrinker.c 	struct msm_gem_object *msm_obj;
msm_obj            53 drivers/gpu/drm/msm/msm_gem_shrinker.c 	list_for_each_entry(msm_obj, &priv->inactive_list, mm_list) {
msm_obj            54 drivers/gpu/drm/msm/msm_gem_shrinker.c 		if (is_purgeable(msm_obj))
msm_obj            55 drivers/gpu/drm/msm/msm_gem_shrinker.c 			count += msm_obj->base.size >> PAGE_SHIFT;
msm_obj            70 drivers/gpu/drm/msm/msm_gem_shrinker.c 	struct msm_gem_object *msm_obj;
msm_obj            77 drivers/gpu/drm/msm/msm_gem_shrinker.c 	list_for_each_entry(msm_obj, &priv->inactive_list, mm_list) {
msm_obj            80 drivers/gpu/drm/msm/msm_gem_shrinker.c 		if (is_purgeable(msm_obj)) {
msm_obj            81 drivers/gpu/drm/msm/msm_gem_shrinker.c 			msm_gem_purge(&msm_obj->base, OBJ_LOCK_SHRINKER);
msm_obj            82 drivers/gpu/drm/msm/msm_gem_shrinker.c 			freed += msm_obj->base.size >> PAGE_SHIFT;
msm_obj           101 drivers/gpu/drm/msm/msm_gem_shrinker.c 	struct msm_gem_object *msm_obj;
msm_obj           108 drivers/gpu/drm/msm/msm_gem_shrinker.c 	list_for_each_entry(msm_obj, &priv->inactive_list, mm_list) {
msm_obj           109 drivers/gpu/drm/msm/msm_gem_shrinker.c 		if (is_vunmapable(msm_obj)) {
msm_obj           110 drivers/gpu/drm/msm/msm_gem_shrinker.c 			msm_gem_vunmap(&msm_obj->base, OBJ_LOCK_SHRINKER);
msm_obj           115 drivers/gpu/drm/msm/msm_gem_submit.c 		struct msm_gem_object *msm_obj;
msm_obj           127 drivers/gpu/drm/msm/msm_gem_submit.c 		msm_obj = to_msm_bo(obj);
msm_obj           129 drivers/gpu/drm/msm/msm_gem_submit.c 		if (!list_empty(&msm_obj->submit_entry)) {
msm_obj           138 drivers/gpu/drm/msm/msm_gem_submit.c 		submit->bos[i].obj = msm_obj;
msm_obj           140 drivers/gpu/drm/msm/msm_gem_submit.c 		list_add_tail(&msm_obj->submit_entry, &submit->bo_list);
msm_obj           155 drivers/gpu/drm/msm/msm_gem_submit.c 	struct msm_gem_object *msm_obj = submit->bos[i].obj;
msm_obj           158 drivers/gpu/drm/msm/msm_gem_submit.c 		msm_gem_unpin_iova(&msm_obj->base, submit->aspace);
msm_obj           161 drivers/gpu/drm/msm/msm_gem_submit.c 		ww_mutex_unlock(&msm_obj->base.resv->lock);
msm_obj           176 drivers/gpu/drm/msm/msm_gem_submit.c 		struct msm_gem_object *msm_obj = submit->bos[i].obj;
msm_obj           184 drivers/gpu/drm/msm/msm_gem_submit.c 			ret = ww_mutex_lock_interruptible(&msm_obj->base.resv->lock,
msm_obj           204 drivers/gpu/drm/msm/msm_gem_submit.c 		struct msm_gem_object *msm_obj = submit->bos[contended].obj;
msm_obj           206 drivers/gpu/drm/msm/msm_gem_submit.c 		ret = ww_mutex_lock_slow_interruptible(&msm_obj->base.resv->lock,
msm_obj           223 drivers/gpu/drm/msm/msm_gem_submit.c 		struct msm_gem_object *msm_obj = submit->bos[i].obj;
msm_obj           232 drivers/gpu/drm/msm/msm_gem_submit.c 			ret = dma_resv_reserve_shared(msm_obj->base.resv,
msm_obj           241 drivers/gpu/drm/msm/msm_gem_submit.c 		ret = msm_gem_sync_object(&msm_obj->base, submit->ring->fctx,
msm_obj           257 drivers/gpu/drm/msm/msm_gem_submit.c 		struct msm_gem_object *msm_obj = submit->bos[i].obj;
msm_obj           261 drivers/gpu/drm/msm/msm_gem_submit.c 		ret = msm_gem_get_and_pin_iova(&msm_obj->base,
msm_obj           388 drivers/gpu/drm/msm/msm_gem_submit.c 		struct msm_gem_object *msm_obj = submit->bos[i].obj;
msm_obj           390 drivers/gpu/drm/msm/msm_gem_submit.c 		list_del_init(&msm_obj->submit_entry);
msm_obj           391 drivers/gpu/drm/msm/msm_gem_submit.c 		drm_gem_object_put(&msm_obj->base);
msm_obj           508 drivers/gpu/drm/msm/msm_gem_submit.c 		struct msm_gem_object *msm_obj;
msm_obj           530 drivers/gpu/drm/msm/msm_gem_submit.c 				&msm_obj, &iova, NULL);
msm_obj           543 drivers/gpu/drm/msm/msm_gem_submit.c 				msm_obj->base.size)) {
msm_obj           557 drivers/gpu/drm/msm/msm_gem_submit.c 		ret = submit_reloc(submit, msm_obj, submit_cmd.submit_offset,
msm_obj           675 drivers/gpu/drm/msm/msm_gpu.c 		struct msm_gem_object *msm_obj = submit->bos[i].obj;
msm_obj           677 drivers/gpu/drm/msm/msm_gpu.c 		msm_gem_move_to_inactive(&msm_obj->base);
msm_obj           678 drivers/gpu/drm/msm/msm_gpu.c 		msm_gem_unpin_iova(&msm_obj->base, submit->aspace);
msm_obj           679 drivers/gpu/drm/msm/msm_gpu.c 		drm_gem_object_put(&msm_obj->base);
msm_obj           752 drivers/gpu/drm/msm/msm_gpu.c 		struct msm_gem_object *msm_obj = submit->bos[i].obj;
msm_obj           758 drivers/gpu/drm/msm/msm_gpu.c 		WARN_ON(is_active(msm_obj) && (msm_obj->gpu != gpu));
msm_obj           761 drivers/gpu/drm/msm/msm_gpu.c 		drm_gem_object_get(&msm_obj->base);
msm_obj           762 drivers/gpu/drm/msm/msm_gpu.c 		msm_gem_get_and_pin_iova(&msm_obj->base, submit->aspace, &iova);
msm_obj           765 drivers/gpu/drm/msm/msm_gpu.c 			msm_gem_move_to_active(&msm_obj->base, gpu, true, submit->fence);
msm_obj           767 drivers/gpu/drm/msm/msm_gpu.c 			msm_gem_move_to_active(&msm_obj->base, gpu, false, submit->fence);