parent
60ed3a2ed4
commit
f4b9308f2c
@ -1,51 +0,0 @@
|
||||
use memory::Frame;
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct Entry(u64);
|
||||
|
||||
impl Entry {
|
||||
pub fn is_unused(&self) -> bool {
|
||||
self.0 == 0
|
||||
}
|
||||
|
||||
pub fn set_unused(&mut self) {
|
||||
self.0 = 0;
|
||||
}
|
||||
|
||||
pub fn flags(&self) -> EntryFlags {
|
||||
EntryFlags::from_bits_truncate(self.0)
|
||||
}
|
||||
|
||||
pub fn pointed_frame(&self) -> Option<Frame> {
|
||||
if self.flags().contains(EntryFlags::PRESENT) {
|
||||
Some(Frame::of_addr(
|
||||
self.0 as usize & 0x000fffff_fffff000
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set(&mut self, frame: Frame, flags: EntryFlags) {
|
||||
assert_eq!(frame.start_address().as_u64() & !0x000fffff_fffff000, 0);
|
||||
self.0 = (frame.start_address().as_u64()) | flags.bits();
|
||||
}
|
||||
}
|
||||
|
||||
bitflags! {
|
||||
pub struct EntryFlags: u64 {
|
||||
const PRESENT = 1 << 0;
|
||||
const WRITABLE = 1 << 1;
|
||||
const USER_ACCESSIBLE = 1 << 2;
|
||||
const WRITE_THROUGH = 1 << 3;
|
||||
const NO_CACHE = 1 << 4;
|
||||
const ACCESSED = 1 << 5;
|
||||
const DIRTY = 1 << 6;
|
||||
const HUGE_PAGE = 1 << 7;
|
||||
const GLOBAL = 1 << 8;
|
||||
const NO_EXECUTE = 1 << 63;
|
||||
// Types at bit 9-11
|
||||
const SHARED = 1 << 9;
|
||||
const COW = 2 << 9;
|
||||
}
|
||||
}
|
@ -1,115 +0,0 @@
|
||||
use core::ptr::Unique;
|
||||
use memory::*;
|
||||
use super::{ENTRY_COUNT, EntryFlags, Page};
|
||||
use super::table::{self, Level1, Level4, Table};
|
||||
|
||||
pub struct Mapper {
|
||||
p4: Unique<Table<Level4>>,
|
||||
}
|
||||
|
||||
impl Mapper {
|
||||
pub const unsafe fn new() -> Mapper {
|
||||
Mapper {
|
||||
p4: Unique::new_unchecked(table::P4),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn p4(&self) -> &Table<Level4> {
|
||||
unsafe { self.p4.as_ref() }
|
||||
}
|
||||
|
||||
pub fn p4_mut(&mut self) -> &mut Table<Level4> {
|
||||
unsafe { self.p4.as_mut() }
|
||||
}
|
||||
|
||||
pub fn translate(&self, virtual_address: VirtAddr) -> Option<PhysAddr> {
|
||||
let offset = virtual_address % PAGE_SIZE;
|
||||
self.translate_page(Page::of_addr(virtual_address))
|
||||
.map(|frame| PhysAddr::new((frame.start_address().get() + offset) as u64))
|
||||
}
|
||||
|
||||
pub fn translate_page(&self, page: Page) -> Option<Frame> {
|
||||
let p3 = self.p4().next_table(page.p4_index());
|
||||
|
||||
let huge_page = || {
|
||||
p3.and_then(|p3| {
|
||||
let p3_entry = &p3[page.p3_index()];
|
||||
// 1GiB page?
|
||||
if let Some(start_frame) = p3_entry.pointed_frame() {
|
||||
if p3_entry.flags().contains(EntryFlags::HUGE_PAGE) {
|
||||
// address must be 1GiB aligned
|
||||
assert_eq!(start_frame.start_address().get() % (ENTRY_COUNT * ENTRY_COUNT * PAGE_SIZE), 0);
|
||||
return Some(Frame::of_addr(
|
||||
start_frame.start_address().get() +
|
||||
(page.p2_index() * ENTRY_COUNT + page.p1_index()) * PAGE_SIZE
|
||||
));
|
||||
}
|
||||
}
|
||||
if let Some(p2) = p3.next_table(page.p3_index()) {
|
||||
let p2_entry = &p2[page.p2_index()];
|
||||
// 2MiB page?
|
||||
if let Some(start_frame) = p2_entry.pointed_frame() {
|
||||
if p2_entry.flags().contains(EntryFlags::HUGE_PAGE) {
|
||||
// address must be 2MiB aligned
|
||||
assert_eq!(start_frame.start_address().get() % ENTRY_COUNT, 0);
|
||||
return Some(Frame::of_addr(
|
||||
start_frame.start_address().get() + page.p1_index() * PAGE_SIZE
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
})
|
||||
};
|
||||
|
||||
p3.and_then(|p3| p3.next_table(page.p3_index()))
|
||||
.and_then(|p2| p2.next_table(page.p2_index()))
|
||||
.and_then(|p1| p1[page.p1_index()].pointed_frame())
|
||||
.or_else(huge_page)
|
||||
}
|
||||
|
||||
pub(super) fn entry_mut(&mut self, page: Page) -> &mut Entry {
|
||||
use core::ops::IndexMut;
|
||||
let p4 = self.p4_mut();
|
||||
let p3 = p4.next_table_create(page.p4_index());
|
||||
let p2 = p3.next_table_create(page.p3_index());
|
||||
let p1 = p2.next_table_create(page.p2_index());
|
||||
p1.index_mut(page.p1_index())
|
||||
}
|
||||
|
||||
pub fn map_to(&mut self, page: Page, frame: Frame, flags: EntryFlags) {
|
||||
let entry = self.entry_mut(page);
|
||||
assert!(entry.is_unused());
|
||||
entry.set(frame, flags | EntryFlags::PRESENT);
|
||||
}
|
||||
|
||||
pub fn map(&mut self, page: Page, flags: EntryFlags)
|
||||
{
|
||||
self.map_to(page, alloc_frame(), flags)
|
||||
}
|
||||
|
||||
pub fn identity_map(&mut self, frame: Frame, flags: EntryFlags)
|
||||
{
|
||||
let page = Page::of_addr(frame.start_address().to_identity_virtual());
|
||||
self.map_to(page, frame, flags)
|
||||
}
|
||||
|
||||
pub fn unmap(&mut self, page: Page) -> Frame
|
||||
{
|
||||
use x86_64::instructions::tlb;
|
||||
use x86_64::VirtAddr;
|
||||
|
||||
assert!(self.translate(page.start_address()).is_some());
|
||||
|
||||
let p1 = self.p4_mut()
|
||||
.next_table_mut(page.p4_index())
|
||||
.and_then(|p3| p3.next_table_mut(page.p3_index()))
|
||||
.and_then(|p2| p2.next_table_mut(page.p2_index()))
|
||||
.expect("mapping code does not support huge pages");
|
||||
let frame = p1[page.p1_index()].pointed_frame().unwrap();
|
||||
p1[page.p1_index()].set_unused();
|
||||
tlb::flush(VirtAddr::new(page.start_address() as u64));
|
||||
// TODO free p(1,2,3) table if empty
|
||||
frame
|
||||
}
|
||||
}
|
@ -1,97 +0,0 @@
|
||||
use core::marker::PhantomData;
|
||||
use core::ops::{Index, IndexMut};
|
||||
use memory::alloc_frame;
|
||||
use super::entry::*;
|
||||
use super::ENTRY_COUNT;
|
||||
|
||||
pub const P4: *mut Table<Level4> = 0xffffffff_fffff000 as *mut _;
|
||||
|
||||
pub struct Table<L: TableLevel> {
|
||||
entries: [Entry; ENTRY_COUNT],
|
||||
level: PhantomData<L>,
|
||||
}
|
||||
|
||||
impl<L> Table<L> where L: TableLevel {
|
||||
pub fn zero(&mut self) {
|
||||
for entry in self.entries.iter_mut() {
|
||||
entry.set_unused();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<L> Table<L> where L: HierarchicalLevel {
|
||||
fn next_table_address(&self, index: usize) -> Option<usize> {
|
||||
let entry_flags = self[index].flags();
|
||||
if entry_flags.contains(EntryFlags::PRESENT) && !entry_flags.contains(EntryFlags::HUGE_PAGE) {
|
||||
let table_address = self as *const _ as usize;
|
||||
Some((table_address << 9) | (index << 12))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next_table(&self, index: usize) -> Option<&Table<L::NextLevel>> {
|
||||
self.next_table_address(index)
|
||||
.map(|address| unsafe { &*(address as *const _) })
|
||||
}
|
||||
|
||||
pub fn next_table_mut(&mut self, index: usize) -> Option<&mut Table<L::NextLevel>> {
|
||||
self.next_table_address(index)
|
||||
.map(|address| unsafe { &mut *(address as *mut _) })
|
||||
}
|
||||
|
||||
pub fn next_table_create(&mut self, index: usize) -> &mut Table<L::NextLevel>
|
||||
{
|
||||
if self.next_table(index).is_none() {
|
||||
assert!(!self.entries[index].flags().contains(EntryFlags::HUGE_PAGE),
|
||||
"mapping code does not support huge pages");
|
||||
let frame = alloc_frame();
|
||||
// TODO: Remove USER_ACCESSIBLE
|
||||
self.entries[index].set(frame, EntryFlags::PRESENT | EntryFlags::WRITABLE | EntryFlags::USER_ACCESSIBLE);
|
||||
self.next_table_mut(index).unwrap().zero();
|
||||
}
|
||||
self.next_table_mut(index).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl<L> Index<usize> for Table<L> where L: TableLevel {
|
||||
type Output = Entry;
|
||||
|
||||
fn index(&self, index: usize) -> &Entry {
|
||||
&self.entries[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl<L> IndexMut<usize> for Table<L> where L: TableLevel {
|
||||
fn index_mut(&mut self, index: usize) -> &mut Entry {
|
||||
&mut self.entries[index]
|
||||
}
|
||||
}
|
||||
|
||||
pub trait TableLevel {}
|
||||
|
||||
pub enum Level4 {}
|
||||
pub enum Level3 {}
|
||||
pub enum Level2 {}
|
||||
pub enum Level1 {}
|
||||
|
||||
impl TableLevel for Level4 {}
|
||||
impl TableLevel for Level3 {}
|
||||
impl TableLevel for Level2 {}
|
||||
impl TableLevel for Level1 {}
|
||||
|
||||
pub trait HierarchicalLevel: TableLevel {
|
||||
type NextLevel: TableLevel;
|
||||
}
|
||||
|
||||
impl HierarchicalLevel for Level4 {
|
||||
type NextLevel = Level3;
|
||||
}
|
||||
|
||||
impl HierarchicalLevel for Level3 {
|
||||
type NextLevel = Level2;
|
||||
}
|
||||
|
||||
impl HierarchicalLevel for Level2 {
|
||||
type NextLevel = Level1;
|
||||
}
|
@ -1,34 +0,0 @@
|
||||
use super::*;
|
||||
use super::table::{Level1, Table};
|
||||
|
||||
pub struct TemporaryPage {
|
||||
page: Page,
|
||||
}
|
||||
|
||||
impl TemporaryPage {
|
||||
pub fn new() -> TemporaryPage {
|
||||
TemporaryPage { page: Page::of_addr(0xcafebabe) }
|
||||
}
|
||||
|
||||
/// Maps the temporary page to the given frame in the active table.
|
||||
/// Returns the start address of the temporary page.
|
||||
pub fn map(&self, frame: Frame, active_table: &mut ActivePageTable) -> VirtAddr {
|
||||
use super::entry::EntryFlags;
|
||||
|
||||
assert!(active_table.translate_page(self.page).is_none(),
|
||||
"temporary page is already mapped");
|
||||
active_table.map_to(self.page, frame, EntryFlags::WRITABLE);
|
||||
self.page.start_address()
|
||||
}
|
||||
|
||||
/// Unmaps the temporary page in the active table.
|
||||
pub fn unmap(&self, active_table: &mut ActivePageTable) -> Frame {
|
||||
active_table.unmap(self.page)
|
||||
}
|
||||
|
||||
/// Maps the temporary page to the given page table frame in the active
|
||||
/// table. Returns a reference to the now mapped table.
|
||||
pub fn map_table_frame(&self, frame: Frame, active_table: &mut ActivePageTable) -> &mut Table<Level1> {
|
||||
unsafe { &mut *(self.map(frame, active_table) as *mut Table<Level1>) }
|
||||
}
|
||||
}
|
@ -1,27 +0,0 @@
|
||||
use super::address::PhysAddr;
|
||||
|
||||
pub const PAGE_SIZE: usize = 4096;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct Frame {
|
||||
pub(super) number: usize,
|
||||
}
|
||||
|
||||
impl Frame {
|
||||
pub fn of_addr(address: usize) -> Frame {
|
||||
Frame{ number: address / PAGE_SIZE }
|
||||
}
|
||||
//TODO: Set private
|
||||
pub fn start_address(&self) -> PhysAddr {
|
||||
PhysAddr::new((self.number * PAGE_SIZE) as u64)
|
||||
}
|
||||
|
||||
pub fn clone(&self) -> Frame {
|
||||
Frame { number: self.number }
|
||||
}
|
||||
}
|
||||
|
||||
pub trait FrameAllocator {
|
||||
fn allocate_frame(&mut self) -> Option<Frame>;
|
||||
fn deallocate_frame(&mut self, frame: Frame);
|
||||
}
|
Loading…
Reference in new issue