]>
Commit | Line | Data |
---|---|---|
1d737c8a ZX |
1 | #ifndef __KVM_X86_MMU_H |
2 | #define __KVM_X86_MMU_H | |
3 | ||
4 | #include "kvm.h" | |
5 | ||
6 | static inline void kvm_mmu_free_some_pages(struct kvm_vcpu *vcpu) | |
7 | { | |
8 | if (unlikely(vcpu->kvm->n_free_mmu_pages < KVM_MIN_FREE_MMU_PAGES)) | |
9 | __kvm_mmu_free_some_pages(vcpu); | |
10 | } | |
11 | ||
12 | static inline int kvm_mmu_reload(struct kvm_vcpu *vcpu) | |
13 | { | |
14 | if (likely(vcpu->arch.mmu.root_hpa != INVALID_PAGE)) | |
15 | return 0; | |
16 | ||
17 | return kvm_mmu_load(vcpu); | |
18 | } | |
19 | ||
20 | static inline int is_long_mode(struct kvm_vcpu *vcpu) | |
21 | { | |
22 | #ifdef CONFIG_X86_64 | |
23 | return vcpu->arch.shadow_efer & EFER_LME; | |
24 | #else | |
25 | return 0; | |
26 | #endif | |
27 | } | |
28 | ||
29 | static inline int is_pae(struct kvm_vcpu *vcpu) | |
30 | { | |
31 | return vcpu->arch.cr4 & X86_CR4_PAE; | |
32 | } | |
33 | ||
34 | static inline int is_pse(struct kvm_vcpu *vcpu) | |
35 | { | |
36 | return vcpu->arch.cr4 & X86_CR4_PSE; | |
37 | } | |
38 | ||
39 | static inline int is_paging(struct kvm_vcpu *vcpu) | |
40 | { | |
41 | return vcpu->arch.cr0 & X86_CR0_PG; | |
42 | } | |
43 | ||
44 | #endif |