Kernel: Fix sse state saving
This was broken when I added SMP support. This patch makes sse kind of dumb as it is saved and restored on every interrupt, but now it at least works properly... I'll have to look into how sse can get optimized nicely with SMP. Simple way would be pinning each thread to a specific processor and doing pretty much what I had before, but sse thread saved in processor rather than static global.
This commit is contained in:
parent
7a0fb9a57f
commit
4b917390ac
|
@ -8,8 +8,9 @@
|
|||
"${workspaceFolder}/userspace/libraries/*/include"
|
||||
],
|
||||
"defines": [
|
||||
"__arch=x86_64"
|
||||
],
|
||||
"__arch=x86_64",
|
||||
"__enable_sse=1"
|
||||
],
|
||||
"compilerPath": "${workspaceFolder}/toolchain/local/bin/x86_64-banan_os-gcc",
|
||||
"cStandard": "c17",
|
||||
"cppStandard": "gnu++20",
|
||||
|
@ -23,9 +24,10 @@
|
|||
"${workspaceFolder}/userspace/libraries/*/include"
|
||||
],
|
||||
"defines": [
|
||||
"__arch=x86_64",
|
||||
"__is_kernel"
|
||||
],
|
||||
"__arch=x86_64",
|
||||
"__is_kernel",
|
||||
"__enable_sse=1"
|
||||
],
|
||||
"compilerPath": "${workspaceFolder}/toolchain/local/bin/x86_64-banan_os-gcc",
|
||||
"cStandard": "c17",
|
||||
"cppStandard": "gnu++20",
|
||||
|
@ -33,4 +35,4 @@
|
|||
}
|
||||
],
|
||||
"version": 4
|
||||
}
|
||||
}
|
||||
|
|
|
@ -82,7 +82,6 @@ namespace Kernel
|
|||
#if __enable_sse
|
||||
void save_sse();
|
||||
void load_sse();
|
||||
static Thread* sse_thread();
|
||||
#endif
|
||||
|
||||
void add_mutex() { m_mutex_count++; }
|
||||
|
|
|
@ -173,6 +173,10 @@ namespace Kernel
|
|||
|
||||
if (tid)
|
||||
{
|
||||
#if __enable_sse
|
||||
Thread::current().save_sse();
|
||||
#endif
|
||||
|
||||
if (isr == ISR::PageFault)
|
||||
{
|
||||
// Check if stack is OOB
|
||||
|
@ -218,31 +222,6 @@ namespace Kernel
|
|||
}
|
||||
}
|
||||
}
|
||||
#if __enable_sse
|
||||
else if (isr == ISR::DeviceNotAvailable)
|
||||
{
|
||||
#if ARCH(x86_64)
|
||||
asm volatile(
|
||||
"movq %cr0, %rax;"
|
||||
"andq $~(1 << 3), %rax;"
|
||||
"movq %rax, %cr0;"
|
||||
);
|
||||
#elif ARCH(i686)
|
||||
asm volatile(
|
||||
"movl %cr0, %eax;"
|
||||
"andl $~(1 << 3), %eax;"
|
||||
"movl %eax, %cr0;"
|
||||
);
|
||||
#endif
|
||||
if (auto* current = &Thread::current(); current != Thread::sse_thread())
|
||||
{
|
||||
if (auto* sse = Thread::sse_thread())
|
||||
sse->save_sse();
|
||||
current->load_sse();
|
||||
}
|
||||
goto done;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
Debug::s_debug_lock.lock();
|
||||
|
@ -334,7 +313,11 @@ namespace Kernel
|
|||
ASSERT(Thread::current().state() != Thread::State::Terminated);
|
||||
|
||||
done:
|
||||
#if __enable_sse
|
||||
Thread::current().load_sse();
|
||||
#else
|
||||
return;
|
||||
#endif
|
||||
}
|
||||
|
||||
extern "C" void cpp_yield_handler(InterruptStack* interrupt_stack, InterruptRegisters* interrupt_registers)
|
||||
|
@ -357,6 +340,10 @@ done:
|
|||
asm volatile("cli; 1: hlt; jmp 1b");
|
||||
}
|
||||
|
||||
#if __enable_sse
|
||||
Thread::current().save_sse();
|
||||
#endif
|
||||
|
||||
if (!InterruptController::get().is_in_service(irq))
|
||||
dprintln("spurious irq 0x{2H}", irq);
|
||||
else
|
||||
|
@ -377,6 +364,10 @@ done:
|
|||
Scheduler::get().reschedule_if_idling();
|
||||
|
||||
ASSERT(Thread::current().state() != Thread::State::Terminated);
|
||||
|
||||
#if __enable_sse
|
||||
Thread::current().load_sse();
|
||||
#endif
|
||||
}
|
||||
|
||||
void IDT::register_interrupt_handler(uint8_t index, void (*handler)())
|
||||
|
|
|
@ -113,33 +113,8 @@ namespace Kernel
|
|||
: m_tid(tid), m_process(process)
|
||||
{
|
||||
#if __enable_sse
|
||||
#if ARCH(x86_64)
|
||||
uintptr_t cr0;
|
||||
asm volatile(
|
||||
"movq %%cr0, %%rax;"
|
||||
"movq %%rax, %[cr0];"
|
||||
"andq $~(1 << 3), %%rax;"
|
||||
"movq %%rax, %%cr0;"
|
||||
: [cr0]"=r"(cr0)
|
||||
:: "rax"
|
||||
);
|
||||
// initializes sse storage to valid state
|
||||
save_sse();
|
||||
asm volatile("movq %0, %%cr0" :: "r"(cr0));
|
||||
#elif ARCH(i686)
|
||||
uintptr_t cr0;
|
||||
asm volatile(
|
||||
"movl %%cr0, %%eax;"
|
||||
"movl %%eax, %[cr0];"
|
||||
"andl $~(1 << 3), %%eax;"
|
||||
"movl %%eax, %%cr0;"
|
||||
: [cr0]"=r"(cr0)
|
||||
:: "eax"
|
||||
);
|
||||
save_sse();
|
||||
asm volatile("movl %0, %%cr0" :: "r"(cr0));
|
||||
#else
|
||||
#error
|
||||
#endif
|
||||
#endif
|
||||
}
|
||||
|
||||
|
@ -482,8 +457,6 @@ namespace Kernel
|
|||
}
|
||||
|
||||
#if __enable_sse
|
||||
static Thread* s_sse_thread = nullptr;
|
||||
|
||||
void Thread::save_sse()
|
||||
{
|
||||
asm volatile("fxsave %0" :: "m"(m_sse_storage));
|
||||
|
@ -492,12 +465,6 @@ namespace Kernel
|
|||
void Thread::load_sse()
|
||||
{
|
||||
asm volatile("fxrstor %0" :: "m"(m_sse_storage));
|
||||
s_sse_thread = this;
|
||||
}
|
||||
|
||||
Thread* Thread::sse_thread()
|
||||
{
|
||||
return s_sse_thread;
|
||||
}
|
||||
#endif
|
||||
|
||||
|
|
Loading…
Reference in New Issue