Discard va to pa. Higher half kernel complete.

master
WangRunji 7 years ago
parent a4988195ce
commit d5cd4673fb

@ -1,7 +1,7 @@
use x86_64::VirtualAddress; use x86_64::VirtualAddress;
use x86_64::structures::idt::{Idt, ExceptionStackFrame, PageFaultErrorCode}; use x86_64::structures::idt::{Idt, ExceptionStackFrame, PageFaultErrorCode};
use x86_64::structures::tss::TaskStateSegment; use x86_64::structures::tss::TaskStateSegment;
use memory::{MemoryController, as_ref_in_real}; use memory::MemoryController;
use spin::Once; use spin::Once;
mod gdt; mod gdt;
@ -32,7 +32,7 @@ pub fn init(memory_controller: &mut MemoryController) {
let gdt = GDT.call_once(|| { let gdt = GDT.call_once(|| {
let mut gdt = gdt::Gdt::new(); let mut gdt = gdt::Gdt::new();
code_selector = gdt.add_entry(gdt::Descriptor::kernel_code_segment()); code_selector = gdt.add_entry(gdt::Descriptor::kernel_code_segment());
tss_selector = gdt.add_entry(gdt::Descriptor::tss_segment(unsafe{as_ref_in_real(&tss)})); tss_selector = gdt.add_entry(gdt::Descriptor::tss_segment(&tss));
gdt gdt
}); });
gdt.load(); gdt.load();
@ -44,17 +44,18 @@ pub fn init(memory_controller: &mut MemoryController) {
load_tss(tss_selector); load_tss(tss_selector);
} }
unsafe {
let idt = IDT.call_once(|| { let idt = IDT.call_once(|| {
let mut idt = Idt::new(); let mut idt = Idt::new();
idt.breakpoint.set_handler_fn(*as_ref_in_real(&breakpoint_handler)); idt.breakpoint.set_handler_fn(breakpoint_handler);
idt.double_fault.set_handler_fn(*as_ref_in_real(&double_fault_handler)); idt.double_fault.set_handler_fn(double_fault_handler);
idt.page_fault.set_handler_fn(*as_ref_in_real(&page_fault_handler)) unsafe {
idt.page_fault.set_handler_fn(page_fault_handler)
.set_stack_index(DOUBLE_FAULT_IST_INDEX as u16); .set_stack_index(DOUBLE_FAULT_IST_INDEX as u16);
}
idt idt
}); });
as_ref_in_real(&idt).load();
} idt.load();
} }
extern "x86-interrupt" fn breakpoint_handler( extern "x86-interrupt" fn breakpoint_handler(

@ -129,7 +129,6 @@ pub fn remap_the_kernel<A>(allocator: &mut A, boot_info: &BootInformation)
for frame in Frame::range_inclusive(multiboot_start, multiboot_end) { for frame in Frame::range_inclusive(multiboot_start, multiboot_end) {
mapper.identity_map(frame, PRESENT, allocator); mapper.identity_map(frame, PRESENT, allocator);
} }
debug!("{:?}", mapper);
}); });
let old_table = active_table.switch(new_table); let old_table = active_table.switch(new_table);
@ -137,7 +136,7 @@ pub fn remap_the_kernel<A>(allocator: &mut A, boot_info: &BootInformation)
// turn the old p4 page into a guard page // turn the old p4 page into a guard page
let old_p4_page = Page::containing_address( let old_p4_page = Page::containing_address(
old_table.p4_frame.start_address().to_identity_virtual() old_table.p4_frame.start_address().to_kernel_virtual()
); );
active_table.unmap(old_p4_page, allocator); active_table.unmap(old_p4_page, allocator);
println!("guard page at {:#x}", old_p4_page.start_address()); println!("guard page at {:#x}", old_p4_page.start_address());
@ -210,16 +209,3 @@ impl MemoryController {
size_in_pages) size_in_pages)
} }
} }
pub unsafe fn as_mut_in_real<T> (obj: &mut T) -> &mut T {
let va = obj as *mut T as VirtualAddress;
let pa = PhysicalAddress::from_kernel_virtual(va).0;
&mut *(pa as *mut T)
}
pub unsafe fn as_ref_in_real<T> (obj: &T) -> &T {
let va = obj as *const T as VirtualAddress;
let pa = PhysicalAddress::from_kernel_virtual(va).0;
&*(pa as *const T)
}
Loading…
Cancel
Save