Always enable caching on transition32; backup/restore cr0 on call32

Always enable caching at start of 32bit code and always make sure the
paging flag is off.  Because this alters the cr0 register, perform a
backup and restore of it when using call32().

Also, rename get/setcr0() to cr0_read/write() to more closely match
other register access functions.

Signed-off-by: Kevin O'Connor <kevin@koconnor.net>
This commit is contained in:
Kevin O'Connor
2015-09-22 12:35:00 -04:00
parent 4c599ef396
commit 62de31b4fe
5 changed files with 35 additions and 26 deletions

View File

@ -52,9 +52,6 @@ handle_smp(void)
if (!CONFIG_QEMU)
return;
// Enable CPU caching
setcr0(getcr0() & ~(CR0_CD|CR0_NW));
// Detect apic_id
u32 eax, ebx, ecx, cpuid_features;
cpuid(1, &eax, &ebx, &ecx, &cpuid_features);

View File

@ -158,9 +158,6 @@ device_hardware_setup(void)
static void
platform_hardware_setup(void)
{
// Enable CPU caching
setcr0(getcr0() & ~(CR0_CD|CR0_NW));
// Make sure legacy DMA isn't running.
dma_setup();

View File

@ -47,6 +47,7 @@ transition32_nmi_off:
// Enable protected mode
movl %cr0, %ecx
andl $~(CR0_PG|CR0_CD|CR0_NW), %ecx
orl $CR0_PE, %ecx
movl %ecx, %cr0

View File

@ -1,6 +1,6 @@
// Code for manipulating stack locations.
//
// Copyright (C) 2009-2014 Kevin O'Connor <kevin@koconnor.net>
// Copyright (C) 2009-2015 Kevin O'Connor <kevin@koconnor.net>
//
// This file may be distributed under the terms of the GNU LGPLv3 license.
@ -28,6 +28,7 @@ struct {
u8 cmosindex;
u8 a20;
u16 ss, fs, gs;
u32 cr0;
struct descloc_s gdt;
} Call16Data VARLOW;
@ -37,19 +38,17 @@ struct {
int HaveSmmCall32 VARFSEG;
// Backup state in preparation for call32
static void
static int
call32_prep(u8 method)
{
// Backup cmos index register and disable nmi
u8 cmosindex = inb(PORT_CMOS_INDEX);
outb(cmosindex | NMI_DISABLE_BIT, PORT_CMOS_INDEX);
inb(PORT_CMOS_DATA);
SET_LOW(Call16Data.cmosindex, cmosindex);
// Backup ss
SET_LOW(Call16Data.ss, GET_SEG(SS));
if (!CONFIG_CALL32_SMM || method != C16_SMM) {
// Backup cr0
u32 cr0 = cr0_read();
if (cr0 & CR0_PE)
// Called in 16bit protected mode?!
return -1;
SET_LOW(Call16Data.cr0, cr0);
// Backup fs/gs and gdt
SET_LOW(Call16Data.fs, GET_SEG(FS));
SET_LOW(Call16Data.gs, GET_SEG(GS));
@ -62,7 +61,17 @@ call32_prep(u8 method)
SET_LOW(Call16Data.a20, set_a20(1));
}
// Backup ss
SET_LOW(Call16Data.ss, GET_SEG(SS));
// Backup cmos index register and disable nmi
u8 cmosindex = inb(PORT_CMOS_INDEX);
outb(cmosindex | NMI_DISABLE_BIT, PORT_CMOS_INDEX);
inb(PORT_CMOS_DATA);
SET_LOW(Call16Data.cmosindex, cmosindex);
SET_LOW(Call16Data.method, method);
return 0;
}
// Restore state backed up during call32
@ -84,6 +93,11 @@ call32_post(void)
lgdt(&gdt);
SET_SEG(FS, GET_LOW(Call16Data.fs));
SET_SEG(GS, GET_LOW(Call16Data.gs));
// Restore cr0
u32 cr0_caching = GET_LOW(Call16Data.cr0) & (CR0_CD|CR0_NW);
if (cr0_caching)
cr0_mask(CR0_CD|CR0_NW, cr0_caching);
}
// Restore cmos index register
@ -220,14 +234,11 @@ call32(void *func, u32 eax, u32 errret)
ASSERT16();
if (CONFIG_CALL32_SMM && GET_GLOBAL(HaveSmmCall32))
return call32_smm(func, eax);
u32 cr0 = getcr0();
if (cr0 & CR0_PE)
// Called in 16bit protected mode?!
return errret;
// Jump direclty to 32bit mode - this clobbers the 16bit segment
// selector registers.
call32_prep(C16_BIG);
int ret = call32_prep(C16_BIG);
if (ret)
return errret;
u32 bkup_ss, bkup_esp;
asm volatile(
// Backup ss/esp / set esp to flat stack location

View File

@ -75,15 +75,18 @@ static inline void __cpuid(u32 index, u32 *eax, u32 *ebx, u32 *ecx, u32 *edx)
: "0" (index));
}
static inline u32 getcr0(void) {
static inline u32 cr0_read(void) {
u32 cr0;
asm("movl %%cr0, %0" : "=r"(cr0));
return cr0;
}
static inline void setcr0(u32 cr0) {
static inline void cr0_write(u32 cr0) {
asm("movl %0, %%cr0" : : "r"(cr0));
}
static inline u16 getcr0_vm86(void) {
static inline void cr0_mask(u32 off, u32 on) {
cr0_write((cr0_read() & ~off) | on);
}
static inline u16 cr0_vm86_read(void) {
u16 cr0;
asm("smsww %0" : "=r"(cr0));
return cr0;