Lines Matching refs:mm

36 	struct mm_struct *mm;  member
81 was_interruptible = dev_priv->mm.interruptible; in __cancel_userptr__worker()
82 dev_priv->mm.interruptible = false; in __cancel_userptr__worker()
90 dev_priv->mm.interruptible = was_interruptible; in __cancel_userptr__worker()
118 struct mm_struct *mm, in i915_gem_userptr_mn_invalidate_range_start() argument
155 i915_mmu_notifier_create(struct mm_struct *mm) in i915_mmu_notifier_create() argument
171 ret = __mmu_notifier_register(&mn->mn, mm); in i915_mmu_notifier_create()
275 i915_mmu_notifier_find(struct i915_mm_struct *mm) in i915_mmu_notifier_find() argument
277 struct i915_mmu_notifier *mn = mm->mn; in i915_mmu_notifier_find()
279 mn = mm->mn; in i915_mmu_notifier_find()
283 down_write(&mm->mm->mmap_sem); in i915_mmu_notifier_find()
284 mutex_lock(&to_i915(mm->dev)->mm_lock); in i915_mmu_notifier_find()
285 if ((mn = mm->mn) == NULL) { in i915_mmu_notifier_find()
286 mn = i915_mmu_notifier_create(mm->mm); in i915_mmu_notifier_find()
288 mm->mn = mn; in i915_mmu_notifier_find()
290 mutex_unlock(&to_i915(mm->dev)->mm_lock); in i915_mmu_notifier_find()
291 up_write(&mm->mm->mmap_sem); in i915_mmu_notifier_find()
307 if (WARN_ON(obj->userptr.mm == NULL)) in i915_gem_userptr_init__mmu_notifier()
310 mn = i915_mmu_notifier_find(obj->userptr.mm); in i915_gem_userptr_init__mmu_notifier()
336 struct mm_struct *mm) in i915_mmu_notifier_free() argument
341 mmu_notifier_unregister(&mn->mn, mm); in i915_mmu_notifier_free()
367 struct mm_struct *mm) in i915_mmu_notifier_free() argument
376 struct i915_mm_struct *mm; in __i915_mm_struct_find() local
379 hash_for_each_possible(dev_priv->mm_structs, mm, node, (unsigned long)real) in __i915_mm_struct_find()
380 if (mm->mm == real) in __i915_mm_struct_find()
381 return mm; in __i915_mm_struct_find()
390 struct i915_mm_struct *mm; in i915_gem_userptr_init__mm_struct() local
404 mm = __i915_mm_struct_find(dev_priv, current->mm); in i915_gem_userptr_init__mm_struct()
405 if (mm == NULL) { in i915_gem_userptr_init__mm_struct()
406 mm = kmalloc(sizeof(*mm), GFP_KERNEL); in i915_gem_userptr_init__mm_struct()
407 if (mm == NULL) { in i915_gem_userptr_init__mm_struct()
412 kref_init(&mm->kref); in i915_gem_userptr_init__mm_struct()
413 mm->dev = obj->base.dev; in i915_gem_userptr_init__mm_struct()
415 mm->mm = current->mm; in i915_gem_userptr_init__mm_struct()
416 atomic_inc(&current->mm->mm_count); in i915_gem_userptr_init__mm_struct()
418 mm->mn = NULL; in i915_gem_userptr_init__mm_struct()
422 &mm->node, (unsigned long)mm->mm); in i915_gem_userptr_init__mm_struct()
424 kref_get(&mm->kref); in i915_gem_userptr_init__mm_struct()
426 obj->userptr.mm = mm; in i915_gem_userptr_init__mm_struct()
435 struct i915_mm_struct *mm = container_of(work, typeof(*mm), work); in __i915_mm_struct_free__worker() local
436 i915_mmu_notifier_free(mm->mn, mm->mm); in __i915_mm_struct_free__worker()
437 mmdrop(mm->mm); in __i915_mm_struct_free__worker()
438 kfree(mm); in __i915_mm_struct_free__worker()
444 struct i915_mm_struct *mm = container_of(kref, typeof(*mm), kref); in __i915_mm_struct_free() local
447 hash_del(&mm->node); in __i915_mm_struct_free()
448 mutex_unlock(&to_i915(mm->dev)->mm_lock); in __i915_mm_struct_free()
450 INIT_WORK(&mm->work, __i915_mm_struct_free__worker); in __i915_mm_struct_free()
451 schedule_work(&mm->work); in __i915_mm_struct_free()
457 if (obj->userptr.mm == NULL) in i915_gem_userptr_release__mm_struct()
460 kref_put_mutex(&obj->userptr.mm->kref, in i915_gem_userptr_release__mm_struct()
463 obj->userptr.mm = NULL; in i915_gem_userptr_release__mm_struct()
583 struct mm_struct *mm = obj->userptr.mm->mm; in __i915_gem_userptr_get_pages_worker() local
585 down_read(&mm->mmap_sem); in __i915_gem_userptr_get_pages_worker()
587 ret = get_user_pages(work->task, mm, in __i915_gem_userptr_get_pages_worker()
597 up_read(&mm->mmap_sem); in __i915_gem_userptr_get_pages_worker()
606 &to_i915(dev)->mm.unbound_list); in __i915_gem_userptr_get_pages_worker()
717 if (obj->userptr.mm->mm == current->mm) { in i915_gem_userptr_get_pages()