#define QHZ_FROM_HZ(_hz) (((_hz) << 10)/ 1000000000)
#define ADDR_SPACE_BITS 48
-#define DEFAULT_SAMPLE_SIZE 10240
+#define DEFAULT_SAMPLE_SIZE 1024
+#define DEFAULT_SAMPLE_MAX 1024*1024*32
#define DEFAULT_INTERVAL_LENGTH 1000
struct array_struct {
unsigned long long histogram_interrupt_increment;
int interrupt_eip_enumeration_vector;
int default_guest_paging_levels;
- int sample_size;
+ int sample_size, sample_max;
enum error_level tolerance; /* Tolerate up to this level of error */
struct {
tsc_t cycles;
.cpu_qhz = QHZ_FROM_HZ(DEFAULT_CPU_HZ),
.default_guest_paging_levels = 2,
.sample_size = DEFAULT_SAMPLE_SIZE,
+ .sample_max = DEFAULT_SAMPLE_MAX,
.tolerance = ERR_SANITY,
.interval = { .msec = DEFAULT_INTERVAL_LENGTH },
};
};
struct cycle_summary {
- int event_count, count;
+ int event_count, count, sample_size;
unsigned long long cycles;
long long *sample;
struct interval_element interval;
}
static inline void update_cycles(struct cycle_summary *s, long long c) {
-/* We don't know ahead of time how many samples there are, and working
- * with dynamic stuff is a pain, and unnecessary. This algorithm will
- * generate a sample set that approximates an even sample. We can
- * then take the percentiles on this, and get an approximate value. */
s->event_count++;
if (!c)
return;
if(opt.sample_size) {
- int lap = (s->count/opt.sample_size)+1,
- index =s->count % opt.sample_size;
- if((index - (lap/3))%lap == 0) {
- if(!s->sample) {
- s->sample = malloc(sizeof(*s->sample) * opt.sample_size);
- if(!s->sample) {
- fprintf(stderr, "%s: malloc failed!\n", __func__);
- error(ERR_SYSTEM, NULL);
- }
+ if (s->count >= s->sample_size
+ && (s->count == 0
+ || opt.sample_max == 0
+ || s->sample_size < opt.sample_max)) {
+ int new_size;
+ void * new_sample = NULL;
+
+ new_size = s->sample_size << 1;
+
+ if (new_size == 0)
+ new_size = opt.sample_size;
+
+ if (opt.sample_max != 0 && new_size > opt.sample_max)
+ new_size = opt.sample_max;
+
+ new_sample = realloc(s->sample, sizeof(*s->sample) * new_size);
+
+ if (new_sample) {
+ s->sample = new_sample;
+ s->sample_size = new_size;
}
- s->sample[index]=c;
}
+
+ if (s->count < s->sample_size) {
+ s->sample[s->count]=c;
+ } else {
+ /*
+ * If we run out of space for samples, start taking only a
+ * subset of samples.
+ */
+ int lap, index;
+ lap = (s->count/s->sample_size)+1;
+ index =s->count % s->sample_size;
+ if((index - (lap/3))%lap == 0) {
+ s->sample[index]=c;
+ }
+ }
}
s->count++;
s->cycles += c;
if ( opt.sample_size ) {
long long p5, p50, p95;
int data_size = s->count;
- if(data_size > opt.sample_size)
- data_size = opt.sample_size;
+ if(data_size > s->sample_size)
+ data_size = s->sample_size;
p50 = percentile(s->sample, data_size, 50);
p5 = percentile(s->sample, data_size, 5);
long long p5, p50, p95;
int data_size = s->count;
- if(data_size > opt.sample_size)
- data_size = opt.sample_size;
+ if(data_size > s->sample_size)
+ data_size = s->sample_size;
p50 = self_weighted_percentile(s->sample, data_size, 50);
p5 = self_weighted_percentile(s->sample, data_size, 5);
long long p5, p50, p95;
int data_size = s->count;
- if(data_size > opt.sample_size)
- data_size = opt.sample_size;
+ if(data_size > s->sample_size)
+ data_size = s->sample_size;
p50 = self_weighted_percentile(s->sample, data_size, 50);
p5 = self_weighted_percentile(s->sample, data_size, 5);
if ( opt.sample_size ) { \
unsigned long long p5, p50, p95; \
int data_size=(_s).count; \
- if(data_size > opt.sample_size) \
- data_size=opt.sample_size; \
+ if(data_size > (_s).sample_size) \
+ data_size=(_s).sample_size; \
p50=percentile((_s).sample, data_size, 50); \
p5=percentile((_s).sample, data_size, 5); \
p95=percentile((_s).sample, data_size, 95); \
OPT_SHOW_DEFAULT_DOMAIN_SUMMARY,
OPT_MMIO_ENUMERATION_SKIP_VGA,
OPT_SAMPLE_SIZE,
+ OPT_SAMPLE_MAX,
OPT_REPORT_PCPU,
/* Guest info */
OPT_DEFAULT_GUEST_PAGING_LEVELS,
argp_usage(state);
break;
}
+ case OPT_SAMPLE_MAX:
+ {
+ char * inval;
+ opt.sample_max = (int)strtol(arg, &inval, 0);
+ if( inval == arg )
+ argp_usage(state);
+ break;
+ }
case OPT_MMIO_ENUMERATION_SKIP_VGA:
{
char * inval;
.key = OPT_SAMPLE_SIZE,
.arg = "size",
.group = OPT_GROUP_SUMMARY,
- .doc = "Keep [size] samples for percentile purposes. Enter 0 to " \
- "disable. Default 10240.", },
+ .doc = "Start with [size] samples for percentile purposes. Enter 0 to" \
+ "disable. Default 1024.", },
+
+ { .name = "sample-max",
+ .key = OPT_SAMPLE_MAX,
+ .arg = "size",
+ .group = OPT_GROUP_SUMMARY,
+ .doc = "Do not allow sample to grow beyond [size] samples for percentile"\
+ " purposes. Enter 0 for no limit.", },
{ .name = "summary",
.key = OPT_SUMMARY,