return tsc;
}
-void hvm_set_guest_tsc_fixed(struct vcpu *v, u64 guest_tsc, u64 at_tsc)
+static void hvm_set_guest_tsc_fixed(struct vcpu *v, u64 guest_tsc, u64 at_tsc)
{
uint64_t tsc;
uint64_t delta_tsc;
hvm_funcs.set_tsc_offset(v, v->arch.hvm_vcpu.cache_tsc_offset, at_tsc);
}
-void hvm_set_guest_tsc_adjust(struct vcpu *v, u64 tsc_adjust)
+#define hvm_set_guest_tsc(v, t) hvm_set_guest_tsc_fixed(v, t, 0)
+
+static void hvm_set_guest_tsc_adjust(struct vcpu *v, u64 tsc_adjust)
{
v->arch.hvm_vcpu.cache_tsc_offset += tsc_adjust
- v->arch.hvm_vcpu.msr_tsc_adjust;
return tsc + v->arch.hvm_vcpu.cache_tsc_offset;
}
-u64 hvm_get_guest_tsc_adjust(struct vcpu *v)
-{
- return v->arch.hvm_vcpu.msr_tsc_adjust;
-}
-
void hvm_migrate_timers(struct vcpu *v)
{
/* PVH doesn't use rtc and emulated timers, it uses pvclock mechanism. */
break;
case MSR_IA32_TSC_ADJUST:
- *msr_content = hvm_get_guest_tsc_adjust(v);
+ *msr_content = v->arch.hvm_vcpu.msr_tsc_adjust;
break;
case MSR_TSC_AUX:
void hvm_get_guest_pat(struct vcpu *v, u64 *guest_pat);
int hvm_set_guest_pat(struct vcpu *v, u64 guest_pat);
-void hvm_set_guest_tsc_fixed(struct vcpu *v, u64 guest_tsc, u64 at_tsc);
-#define hvm_set_guest_tsc(v, t) hvm_set_guest_tsc_fixed(v, t, 0)
u64 hvm_get_guest_tsc_fixed(struct vcpu *v, u64 at_tsc);
#define hvm_get_guest_tsc(v) hvm_get_guest_tsc_fixed(v, 0)