Lines Matching refs:mm

56 void __mmu_notifier_release(struct mm_struct *mm)  in __mmu_notifier_release()  argument
66 hlist_for_each_entry_rcu(mn, &mm->mmu_notifier_mm->list, hlist) in __mmu_notifier_release()
74 mn->ops->release(mn, mm); in __mmu_notifier_release()
76 spin_lock(&mm->mmu_notifier_mm->lock); in __mmu_notifier_release()
77 while (unlikely(!hlist_empty(&mm->mmu_notifier_mm->list))) { in __mmu_notifier_release()
78 mn = hlist_entry(mm->mmu_notifier_mm->list.first, in __mmu_notifier_release()
89 spin_unlock(&mm->mmu_notifier_mm->lock); in __mmu_notifier_release()
109 int __mmu_notifier_clear_flush_young(struct mm_struct *mm, in __mmu_notifier_clear_flush_young() argument
117 hlist_for_each_entry_rcu(mn, &mm->mmu_notifier_mm->list, hlist) { in __mmu_notifier_clear_flush_young()
119 young |= mn->ops->clear_flush_young(mn, mm, start, end); in __mmu_notifier_clear_flush_young()
126 int __mmu_notifier_clear_young(struct mm_struct *mm, in __mmu_notifier_clear_young() argument
134 hlist_for_each_entry_rcu(mn, &mm->mmu_notifier_mm->list, hlist) { in __mmu_notifier_clear_young()
136 young |= mn->ops->clear_young(mn, mm, start, end); in __mmu_notifier_clear_young()
143 int __mmu_notifier_test_young(struct mm_struct *mm, in __mmu_notifier_test_young() argument
150 hlist_for_each_entry_rcu(mn, &mm->mmu_notifier_mm->list, hlist) { in __mmu_notifier_test_young()
152 young = mn->ops->test_young(mn, mm, address); in __mmu_notifier_test_young()
162 void __mmu_notifier_change_pte(struct mm_struct *mm, unsigned long address, in __mmu_notifier_change_pte() argument
169 hlist_for_each_entry_rcu(mn, &mm->mmu_notifier_mm->list, hlist) { in __mmu_notifier_change_pte()
171 mn->ops->change_pte(mn, mm, address, pte); in __mmu_notifier_change_pte()
176 void __mmu_notifier_invalidate_page(struct mm_struct *mm, in __mmu_notifier_invalidate_page() argument
183 hlist_for_each_entry_rcu(mn, &mm->mmu_notifier_mm->list, hlist) { in __mmu_notifier_invalidate_page()
185 mn->ops->invalidate_page(mn, mm, address); in __mmu_notifier_invalidate_page()
190 void __mmu_notifier_invalidate_range_start(struct mm_struct *mm, in __mmu_notifier_invalidate_range_start() argument
197 hlist_for_each_entry_rcu(mn, &mm->mmu_notifier_mm->list, hlist) { in __mmu_notifier_invalidate_range_start()
199 mn->ops->invalidate_range_start(mn, mm, start, end); in __mmu_notifier_invalidate_range_start()
205 void __mmu_notifier_invalidate_range_end(struct mm_struct *mm, in __mmu_notifier_invalidate_range_end() argument
212 hlist_for_each_entry_rcu(mn, &mm->mmu_notifier_mm->list, hlist) { in __mmu_notifier_invalidate_range_end()
222 mn->ops->invalidate_range(mn, mm, start, end); in __mmu_notifier_invalidate_range_end()
224 mn->ops->invalidate_range_end(mn, mm, start, end); in __mmu_notifier_invalidate_range_end()
230 void __mmu_notifier_invalidate_range(struct mm_struct *mm, in __mmu_notifier_invalidate_range() argument
237 hlist_for_each_entry_rcu(mn, &mm->mmu_notifier_mm->list, hlist) { in __mmu_notifier_invalidate_range()
239 mn->ops->invalidate_range(mn, mm, start, end); in __mmu_notifier_invalidate_range()
246 struct mm_struct *mm, in do_mmu_notifier_register() argument
252 BUG_ON(atomic_read(&mm->mm_users) <= 0); in do_mmu_notifier_register()
266 down_write(&mm->mmap_sem); in do_mmu_notifier_register()
267 ret = mm_take_all_locks(mm); in do_mmu_notifier_register()
271 if (!mm_has_notifiers(mm)) { in do_mmu_notifier_register()
275 mm->mmu_notifier_mm = mmu_notifier_mm; in do_mmu_notifier_register()
278 atomic_inc(&mm->mm_count); in do_mmu_notifier_register()
288 spin_lock(&mm->mmu_notifier_mm->lock); in do_mmu_notifier_register()
289 hlist_add_head(&mn->hlist, &mm->mmu_notifier_mm->list); in do_mmu_notifier_register()
290 spin_unlock(&mm->mmu_notifier_mm->lock); in do_mmu_notifier_register()
292 mm_drop_all_locks(mm); in do_mmu_notifier_register()
295 up_write(&mm->mmap_sem); in do_mmu_notifier_register()
298 BUG_ON(atomic_read(&mm->mm_users) <= 0); in do_mmu_notifier_register()
315 int mmu_notifier_register(struct mmu_notifier *mn, struct mm_struct *mm) in mmu_notifier_register() argument
317 return do_mmu_notifier_register(mn, mm, 1); in mmu_notifier_register()
325 int __mmu_notifier_register(struct mmu_notifier *mn, struct mm_struct *mm) in __mmu_notifier_register() argument
327 return do_mmu_notifier_register(mn, mm, 0); in __mmu_notifier_register()
332 void __mmu_notifier_mm_destroy(struct mm_struct *mm) in __mmu_notifier_mm_destroy() argument
334 BUG_ON(!hlist_empty(&mm->mmu_notifier_mm->list)); in __mmu_notifier_mm_destroy()
335 kfree(mm->mmu_notifier_mm); in __mmu_notifier_mm_destroy()
336 mm->mmu_notifier_mm = LIST_POISON1; /* debug */ in __mmu_notifier_mm_destroy()
349 void mmu_notifier_unregister(struct mmu_notifier *mn, struct mm_struct *mm) in mmu_notifier_unregister() argument
351 BUG_ON(atomic_read(&mm->mm_count) <= 0); in mmu_notifier_unregister()
366 mn->ops->release(mn, mm); in mmu_notifier_unregister()
369 spin_lock(&mm->mmu_notifier_mm->lock); in mmu_notifier_unregister()
375 spin_unlock(&mm->mmu_notifier_mm->lock); in mmu_notifier_unregister()
384 BUG_ON(atomic_read(&mm->mm_count) <= 0); in mmu_notifier_unregister()
386 mmdrop(mm); in mmu_notifier_unregister()
394 struct mm_struct *mm) in mmu_notifier_unregister_no_release() argument
396 spin_lock(&mm->mmu_notifier_mm->lock); in mmu_notifier_unregister_no_release()
402 spin_unlock(&mm->mmu_notifier_mm->lock); in mmu_notifier_unregister_no_release()
404 BUG_ON(atomic_read(&mm->mm_count) <= 0); in mmu_notifier_unregister_no_release()
405 mmdrop(mm); in mmu_notifier_unregister_no_release()