From 9dc998222aef8d1ff75b2d0baf8db872b6d70d65 Mon Sep 17 00:00:00 2001 From: Philipp Oppermann Date: Sun, 19 Nov 2017 12:54:47 +0100 Subject: [PATCH] Make the bump allocator lock free and impl Alloc for shared reference --- src/lib.rs | 2 +- src/memory/heap_allocator.rs | 33 ++++++++++++++++++++++----------- 2 files changed, 23 insertions(+), 12 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 93529aa..bb0dbab 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,7 +1,7 @@ #![feature(lang_items)] #![feature(const_fn)] #![feature(alloc)] -#![feature(const_unique_new)] +#![feature(const_unique_new, const_atomic_usize_new)] #![feature(unique)] #![feature(allocator_api)] #![no_std] diff --git a/src/memory/heap_allocator.rs b/src/memory/heap_allocator.rs index 990a2ea..0a232f9 100644 --- a/src/memory/heap_allocator.rs +++ b/src/memory/heap_allocator.rs @@ -1,29 +1,40 @@ use alloc::heap::{Alloc, AllocErr, Layout}; +use core::sync::atomic::{AtomicUsize, Ordering}; + /// A simple allocator that allocates memory linearly and ignores freed memory. #[derive(Debug)] pub struct BumpAllocator { heap_start: usize, heap_end: usize, - next: usize, + next: AtomicUsize, } impl BumpAllocator { pub const fn new(heap_start: usize, heap_end: usize) -> Self { - Self { heap_start, heap_end, next: heap_start } + Self { heap_start, heap_end, next: AtomicUsize::new(heap_start) } } } -unsafe impl Alloc for BumpAllocator { +unsafe impl<'a> Alloc for &'a BumpAllocator { unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { - let alloc_start = align_up(self.next, layout.align()); - let alloc_end = alloc_start.saturating_add(layout.size()); - - if alloc_end <= self.heap_end { - self.next = alloc_end; - Ok(alloc_start as *mut u8) - } else { - Err(AllocErr::Exhausted{ request: layout }) + loop { + // load current state of the `next` field + let current_next = self.next.load(Ordering::Relaxed); + let alloc_start = align_up(current_next, layout.align()); + let alloc_end = alloc_start.saturating_add(layout.size()); + + if alloc_end <= self.heap_end { + // update the `next` pointer if it still has the value `current_next` + let next_now = self.next.compare_and_swap(current_next, alloc_end, + Ordering::Relaxed); + if next_now == current_next { + // next address was successfully updated, allocation succeeded + return Ok(alloc_start as *mut u8); + } + } else { + return Err(AllocErr::Exhausted{ request: layout }) + } } }