/* Initialise L3 xen-map page directory entry. */
mov $(sym_phys(l2_xenmap)+7),%eax
mov %eax,sym_phys(l3_xenmap) + (50*8)
- /* Hook indentity-map and xen-map L3 tables into PML4. */
+ /* Hook identity-map and xen-map L3 tables into PML4. */
mov $(sym_phys(l3_identmap)+7),%eax
mov %eax,sym_phys(idle_pg_table) + ( 0*8) /* PML4[ 0]: 1:1 map */
mov %eax,sym_phys(idle_pg_table) + (262*8) /* PML4[262]: 1:1 map */
jne 1b
#endif
+ /* Initialize 4kB mappings of first 2MB or 4MB of memory. */
+ mov $sym_phys(l1_identmap),%edi
+ mov $0x263,%eax /* PRESENT+RW+A+D+SMALL_PAGES */
+#if defined(__x86_64__)
+ or $0x100,%eax /* GLOBAL */
+#endif
+ xor %ecx,%ecx
+1: stosl
+#if CONFIG_PAGING_LEVELS >= 3
+ add $4,%edi
+#endif
+ add $PAGE_SIZE,%eax
+ inc %ecx
+ /* VGA hole (0xa0000-0xc0000) should be mapped UC. */
+ cmp $0xa0,%ecx
+ jne 2f
+ or $0x10,%eax /* +PCD */
+2: cmp $0xc0,%ecx
+ jne 2f
+ and $~0x10,%eax /* -PCD */
+2: cmp $L1_PAGETABLE_ENTRIES,%ecx
+ jne 1b
+ sub $(PAGE_SIZE-0x63),%edi
+#if defined(__x86_64__)
+ mov %edi,sym_phys(l2_identmap)
+ mov %edi,sym_phys(l2_xenmap)
+#elif defined(CONFIG_X86_PAE)
+ mov %edi,sym_phys(idle_pg_table_l2)
+ mov %edi,sym_phys(idle_pg_table_l2) + (__PAGE_OFFSET>>18)
+#else
+ mov %edi,sym_phys(idle_pg_table)
+ mov %edi,sym_phys(idle_pg_table) + (__PAGE_OFFSET>>20)
+#endif
+
/* Copy bootstrap trampoline to low memory, below 1MB. */
mov $sym_phys(trampoline_start),%esi
mov $bootsym_phys(trampoline_start),%edi
#include <xsm/xsm.h>
#include <xen/trace.h>
+/*
+ * Mapping of first 2 or 4 megabytes of memory. This is mapped with 4kB
+ * mappings to avoid type conflicts with fixed-range MTRRs covering the
+ * lowest megabyte of physical memory. In any case the VGA hole should be
+ * mapped with type UC.
+ */
+l1_pgentry_t __attribute__ ((__section__ (".bss.page_aligned")))
+ l1_identmap[L1_PAGETABLE_ENTRIES];
+
#define MEM_LOG(_f, _a...) gdprintk(XENLOG_WARNING , _f "\n" , ## _a)
/*
void memguard_init(void)
{
+ unsigned long start = max_t(unsigned long, xen_phys_start, 1UL << 20);
map_pages_to_xen(
- (unsigned long)__va(xen_phys_start),
- xen_phys_start >> PAGE_SHIFT,
- (xenheap_phys_end - xen_phys_start) >> PAGE_SHIFT,
+ (unsigned long)__va(start),
+ start >> PAGE_SHIFT,
+ (xenheap_phys_end - start) >> PAGE_SHIFT,
__PAGE_HYPERVISOR|MAP_SMALL_PAGES);
#ifdef __x86_64__
+ BUG_ON(start != xen_phys_start);
map_pages_to_xen(
XEN_VIRT_START,
- xen_phys_start >> PAGE_SHIFT,
- (__pa(&_end) + PAGE_SIZE - 1 - xen_phys_start) >> PAGE_SHIFT,
+ start >> PAGE_SHIFT,
+ (__pa(&_end) + PAGE_SIZE - 1 - start) >> PAGE_SHIFT,
__PAGE_HYPERVISOR|MAP_SMALL_PAGES);
#endif
}
static void __init bootstrap_map(unsigned long start, unsigned long end)
{
unsigned long mask = (1UL << L2_PAGETABLE_SHIFT) - 1;
- start = start & ~mask;
+ start = max_t(unsigned long, start & ~mask, 16UL << 20);
end = (end + mask) & ~mask;
+ if ( start >= end )
+ return;
if ( end > BOOTSTRAP_DIRECTMAP_END )
panic("Cannot access memory beyond end of "
"bootstrap direct-map area\n");
l4_pgentry_t *pl4e;
l3_pgentry_t *pl3e;
l2_pgentry_t *pl2e;
- int i, j;
+ int i, j, k;
/* Select relocation address. */
e = (e - (opt_xenheap_megabytes << 20)) & ~mask;
continue;
*pl3e = l3e_from_intpte(l3e_get_intpte(*pl3e) +
xen_phys_start);
+ pl2e = l3e_to_l2e(*pl3e);
+ for ( k = 0; k < L2_PAGETABLE_ENTRIES; k++, pl2e++ )
+ {
+ /* Not present, PSE, or already relocated? */
+ if ( !(l2e_get_flags(*pl2e) & _PAGE_PRESENT) ||
+ (l2e_get_flags(*pl2e) & _PAGE_PSE) ||
+ (l2e_get_pfn(*pl2e) > 0x1000) )
+ continue;
+ *pl2e = l2e_from_intpte(l2e_get_intpte(*pl2e) +
+ xen_phys_start);
+ }
}
}
/* The only data mappings to be relocated are in the Xen area. */
pl2e = __va(__pa(l2_xenmap));
- for ( i = 0; i < L2_PAGETABLE_ENTRIES; i++, pl2e++ )
+ *pl2e++ = l2e_from_pfn(xen_phys_start >> PAGE_SHIFT,
+ PAGE_HYPERVISOR | _PAGE_PSE);
+ for ( i = 1; i < L2_PAGETABLE_ENTRIES; i++, pl2e++ )
{
if ( !(l2e_get_flags(*pl2e) & _PAGE_PRESENT) )
continue;
idle_pg_table_l2[L2_PAGETABLE_ENTRIES];
#endif
+extern l1_pgentry_t l1_identmap[L1_PAGETABLE_ENTRIES];
+
unsigned int PAGE_HYPERVISOR = __PAGE_HYPERVISOR;
unsigned int PAGE_HYPERVISOR_NOCACHE = __PAGE_HYPERVISOR_NOCACHE;
(_PAGE_PSE|_PAGE_PRESENT)) == (_PAGE_PSE|_PAGE_PRESENT) )
l2e_add_flags(idle_pg_table_l2[l2_linear_offset(v)],
_PAGE_GLOBAL);
+ for ( i = 0; i < L1_PAGETABLE_ENTRIES; i++ )
+ l1e_add_flags(l1_identmap[i], _PAGE_GLOBAL);
}
/*
l2e_write(&dom0_l2[i], l2e_empty());
/* Now zap mappings in the idle pagetables. */
+ BUG_ON(l2e_get_pfn(idle_pg_table_l2[0]) != virt_to_mfn(l1_identmap));
+ l2e_write_atomic(&idle_pg_table_l2[0], l2e_empty());
destroy_xen_mappings(0, HYPERVISOR_VIRT_START);
flush_all(FLUSH_TLB_GLOBAL);