return rc;
}
+#ifdef __x86_64__
static int hvm_memory_event_traps(long p, uint32_t reason,
unsigned long value, unsigned long old,
bool_t gla_valid, unsigned long gla)
MEM_EVENT_REASON_INT3,
gfn, 0, 1, gla);
}
+#endif /* __x86_64__ */
/*
* Local variables:
__trace_var(TRC_HVM_NPF, 0, sizeof(_d), &_d);
}
- if ( hvm_hap_nested_page_fault(gpa, 0, ~0ull, 0, 0, 0, 0) )
+ if ( hvm_hap_nested_page_fault(gpa, 0, ~0ul, 0, 0, 0, 0) )
return;
/* Everything else is an error. */
/* Unpause any domains that were paused because the ring was full */
mem_event_unpause_vcpus(d);
}
-#endif /* __x86_64__ */
void p2m_mem_access_check(unsigned long gpa, bool_t gla_valid, unsigned long gla,
bool_t access_r, bool_t access_w, bool_t access_x)
* was available */
mem_event_unpause_vcpus(d);
}
+#endif /* __x86_64__ */
/*
* Local variables:
int hvm_x2apic_msr_read(struct vcpu *v, unsigned int msr, uint64_t *msr_content);
int hvm_x2apic_msr_write(struct vcpu *v, unsigned int msr, uint64_t msr_content);
+#ifdef __x86_64__
/* Called for current VCPU on crX changes by guest */
void hvm_memory_event_cr0(unsigned long value, unsigned long old);
void hvm_memory_event_cr3(unsigned long value, unsigned long old);
/* Called for current VCPU on int3: returns -1 if no listener */
int hvm_memory_event_int3(unsigned long gla);
+#else
+static inline void hvm_memory_event_cr0(unsigned long value, unsigned long old)
+{ }
+static inline void hvm_memory_event_cr3(unsigned long value, unsigned long old)
+{ }
+static inline void hvm_memory_event_cr4(unsigned long value, unsigned long old)
+{ }
+static inline int hvm_memory_event_int3(unsigned long gla)
+{ return 0; }
+#endif
#endif /* __ASM_X86_HVM_HVM_H__ */
{ }
#endif
+#ifdef __x86_64__
/* Send mem event based on the access (gla is -1ull if not available). Handles
* the rw2rx conversion */
void p2m_mem_access_check(unsigned long gpa, bool_t gla_valid, unsigned long gla,
bool_t access_r, bool_t access_w, bool_t access_x);
/* Resumes the running of the VCPU, restarting the last instruction */
void p2m_mem_access_resume(struct p2m_domain *p2m);
+#else
+static inline void p2m_mem_access_check(unsigned long gpa, bool_t gla_valid,
+ unsigned long gla, bool_t access_r,
+ bool_t access_w, bool_t access_x)
+{ }
+#endif
struct page_info *p2m_alloc_ptp(struct p2m_domain *p2m, unsigned long type);