Searched refs:old_spte (Results 1 - 2 of 2) sorted by relevance
/linux-4.4.14/arch/x86/kvm/ |
H A D | mmutrace.h | 250 u64 *sptep, u64 old_spte, bool retry), 251 TP_ARGS(vcpu, gva, error_code, sptep, old_spte, retry), 258 __field(u64, old_spte) 268 __entry->old_spte = old_spte; 277 __entry->old_spte, __entry->new_spte, 278 __spte_satisfied(old_spte), __spte_satisfied(new_spte)
|
H A D | mmu.c | 507 static bool spte_is_bit_cleared(u64 old_spte, u64 new_spte, u64 bit_mask) spte_is_bit_cleared() argument 509 return (old_spte & bit_mask) && !(new_spte & bit_mask); spte_is_bit_cleared() 512 static bool spte_is_bit_changed(u64 old_spte, u64 new_spte, u64 bit_mask) spte_is_bit_changed() argument 514 return (old_spte & bit_mask) != (new_spte & bit_mask); spte_is_bit_changed() 540 u64 old_spte = *sptep; mmu_spte_update() local 545 if (!is_shadow_present_pte(old_spte)) { mmu_spte_update() 550 if (!spte_has_volatile_bits(old_spte)) mmu_spte_update() 553 old_spte = __update_clear_spte_slow(sptep, new_spte); mmu_spte_update() 560 if (spte_is_locklessly_modifiable(old_spte) && mmu_spte_update() 571 if (spte_is_bit_changed(old_spte, new_spte, mmu_spte_update() 575 if (spte_is_bit_cleared(old_spte, new_spte, shadow_accessed_mask)) mmu_spte_update() 576 kvm_set_pfn_accessed(spte_to_pfn(old_spte)); mmu_spte_update() 577 if (spte_is_bit_cleared(old_spte, new_spte, shadow_dirty_mask)) mmu_spte_update() 578 kvm_set_pfn_dirty(spte_to_pfn(old_spte)); mmu_spte_update() 591 u64 old_spte = *sptep; mmu_spte_clear_track_bits() local 593 if (!spte_has_volatile_bits(old_spte)) mmu_spte_clear_track_bits() 596 old_spte = __update_clear_spte_slow(sptep, 0ull); mmu_spte_clear_track_bits() 598 if (!is_rmap_spte(old_spte)) mmu_spte_clear_track_bits() 601 pfn = spte_to_pfn(old_spte); mmu_spte_clear_track_bits() 610 if (!shadow_accessed_mask || old_spte & shadow_accessed_mask) mmu_spte_clear_track_bits() 612 if (!shadow_dirty_mask || (old_spte & shadow_dirty_mask)) mmu_spte_clear_track_bits()
|
Completed in 105 milliseconds