Lines Matching refs:cache

125 static int mmu_topup_memory_cache(struct kvm_mmu_memory_cache *cache,  in mmu_topup_memory_cache()  argument
131 if (cache->nobjs >= min) in mmu_topup_memory_cache()
133 while (cache->nobjs < max) { in mmu_topup_memory_cache()
137 cache->objects[cache->nobjs++] = page; in mmu_topup_memory_cache()
833 static pud_t *stage2_get_pud(struct kvm *kvm, struct kvm_mmu_memory_cache *cache, in stage2_get_pud() argument
841 if (!cache) in stage2_get_pud()
843 pud = mmu_memory_cache_alloc(cache); in stage2_get_pud()
851 static pmd_t *stage2_get_pmd(struct kvm *kvm, struct kvm_mmu_memory_cache *cache, in stage2_get_pmd() argument
857 pud = stage2_get_pud(kvm, cache, addr); in stage2_get_pmd()
859 if (!cache) in stage2_get_pmd()
861 pmd = mmu_memory_cache_alloc(cache); in stage2_get_pmd()
870 *cache, phys_addr_t addr, const pmd_t *new_pmd) in stage2_set_pmd_huge()
874 pmd = stage2_get_pmd(kvm, cache, addr); in stage2_set_pmd_huge()
900 static int stage2_set_pte(struct kvm *kvm, struct kvm_mmu_memory_cache *cache, in stage2_set_pte() argument
909 VM_BUG_ON(logging_active && !cache); in stage2_set_pte()
912 pmd = stage2_get_pmd(kvm, cache, addr); in stage2_set_pte()
930 if (!cache) in stage2_set_pte()
932 pte = mmu_memory_cache_alloc(cache); in stage2_set_pte()
970 struct kvm_mmu_memory_cache cache = { 0, }; in kvm_phys_addr_ioremap() local
981 ret = mmu_topup_memory_cache(&cache, KVM_MMU_CACHE_MIN_PAGES, in kvm_phys_addr_ioremap()
986 ret = stage2_set_pte(kvm, &cache, addr, &pte, in kvm_phys_addr_ioremap()
996 mmu_free_memory_cache(&cache); in kvm_phys_addr_ioremap()