Skip to content

Use new nightly GlobalAlloc API #11

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Apr 18, 2018
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 8 additions & 8 deletions src/hole.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use core::mem::size_of;
use alloc::allocator::{Layout, AllocErr};
use core::alloc::{Layout, Opaque};

use super::align_up;

Expand Down Expand Up @@ -42,17 +42,17 @@ impl HoleList {
/// block is returned.
/// This function uses the “first fit” strategy, so it uses the first hole that is big
/// enough. Thus the runtime is in O(n) but it should be reasonably fast for small allocations.
pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
pub fn allocate_first_fit(&mut self, layout: Layout) -> *mut Opaque {
assert!(layout.size() >= Self::min_size());

allocate_first_fit(&mut self.first, layout).map(|allocation| {
allocate_first_fit(&mut self.first, layout).map_or(0 as *mut Opaque, |allocation| {
if let Some(padding) = allocation.front_padding {
deallocate(&mut self.first, padding.addr, padding.size);
}
if let Some(padding) = allocation.back_padding {
deallocate(&mut self.first, padding.addr, padding.size);
}
allocation.info.addr as *mut u8
allocation.info.addr as *mut Opaque
})
}

Expand All @@ -62,7 +62,7 @@ impl HoleList {
/// This function walks the list and inserts the given block at the correct place. If the freed
/// block is adjacent to another free block, the blocks are merged again.
/// This operation is in `O(n)` since the list needs to be sorted by address.
pub unsafe fn deallocate(&mut self, ptr: *mut u8, layout: Layout) {
pub unsafe fn deallocate(&mut self, ptr: *mut Opaque, layout: Layout) {
deallocate(&mut self.first, ptr as usize, layout.size())
}

Expand Down Expand Up @@ -182,7 +182,7 @@ fn split_hole(hole: HoleInfo, required_layout: Layout) -> Option<Allocation> {
/// care of freeing it again.
/// This function uses the “first fit” strategy, so it breaks as soon as a big enough hole is
/// found (and returns it).
fn allocate_first_fit(mut previous: &mut Hole, layout: Layout) -> Result<Allocation, AllocErr> {
fn allocate_first_fit(mut previous: &mut Hole, layout: Layout) -> Option<Allocation> {
loop {
let allocation: Option<Allocation> = previous.next.as_mut().and_then(|current| {
split_hole(current.info(), layout.clone())
Expand All @@ -191,15 +191,15 @@ fn allocate_first_fit(mut previous: &mut Hole, layout: Layout) -> Result<Allocat
Some(allocation) => {
// hole is big enough, so remove it from the list by updating the previous pointer
previous.next = previous.next.as_mut().unwrap().next.take();
return Ok(allocation);
return Some(allocation);
}
None if previous.next.is_some() => {
// try next hole
previous = move_helper(previous).next.as_mut().unwrap();
}
None => {
// this was the last hole, so no hole is big enough -> allocation not possible
return Err(AllocErr::Exhausted { request: layout });
return None;
}
}
}
Expand Down
37 changes: 21 additions & 16 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,19 @@ use hole::{Hole, HoleList};
use core::mem;
#[cfg(feature = "use_spin")]
use core::ops::Deref;
use alloc::allocator::{Alloc, Layout, AllocErr};
use core::alloc::{GlobalAlloc, Layout, Opaque};
#[cfg(feature = "use_spin")]
use spin::Mutex;

mod hole;
#[cfg(test)]
mod test;

#[cfg(feature = "use_spin")]
pub static mut LOCKED_ALLOCATOR: LockedHeap = LockedHeap::empty();

pub static mut ALLOCATOR: Heap = Heap::empty();

/// A fixed size heap backed by a linked list of free memory blocks.
pub struct Heap {
bottom: usize,
Expand Down Expand Up @@ -70,7 +75,7 @@ impl Heap {
/// This function scans the list of free memory blocks and uses the first block that is big
/// enough. The runtime is in O(n) where n is the number of free blocks, but it should be
/// reasonably fast for small allocations.
pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
pub fn allocate_first_fit(&mut self, layout: Layout) -> *mut Opaque {
let mut size = layout.size();
if size < HoleList::min_size() {
size = HoleList::min_size();
Expand All @@ -88,7 +93,7 @@ impl Heap {
/// This function walks the list of free memory blocks and inserts the freed block at the
/// correct place. If the freed block is adjacent to another free block, the blocks are merged
/// again. This operation is in `O(n)` since the list needs to be sorted by address.
pub unsafe fn deallocate(&mut self, ptr: *mut u8, layout: Layout) {
pub unsafe fn deallocate(&mut self, ptr: *mut Opaque, layout: Layout) {
let mut size = layout.size();
if size < HoleList::min_size() {
size = HoleList::min_size();
Expand Down Expand Up @@ -122,21 +127,21 @@ impl Heap {
pub unsafe fn extend(&mut self, by: usize) {
let top = self.top();
let layout = Layout::from_size_align(by, 1).unwrap();
self.holes.deallocate(top as *mut u8, layout);
self.holes.deallocate(top as *mut Opaque, layout);
self.size += by;
}
}

unsafe impl Alloc for Heap {
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
self.allocate_first_fit(layout)
unsafe impl GlobalAlloc for Heap {
unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
ALLOCATOR.allocate_first_fit(layout)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why the new ALLOCATOR static?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

GlobalAlloc takes &self instead of &mut self, so as far as I know static and RefCell are the only choices here. There's also new Alloc trait, but it can't be used with #[global_allocator]. In that case someone using this library would have to implement GlobalAlloc on his own to be able to use it with #[global_allocator]

}

unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
self.deallocate(ptr, layout)
unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) {
ALLOCATOR.deallocate(ptr, layout)
}

fn oom(&mut self, _: AllocErr) -> ! {
fn oom(&self) -> ! {
panic!("Out of memory");
}
}
Expand Down Expand Up @@ -174,16 +179,16 @@ impl Deref for LockedHeap {
}

#[cfg(feature = "use_spin")]
unsafe impl<'a> Alloc for &'a LockedHeap {
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
self.0.lock().allocate_first_fit(layout)
unsafe impl<'a> GlobalAlloc for &'a LockedHeap {
unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
LOCKED_ALLOCATOR.0.lock().allocate_first_fit(layout)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why the new LOCKED_ALLOCATOR static?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Same as with ALLOCATOR

}

unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
self.0.lock().deallocate(ptr, layout)
unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) {
LOCKED_ALLOCATOR.0.lock().deallocate(ptr, layout)
}

fn oom(&mut self, _: AllocErr) -> ! {
fn oom(&self) -> ! {
panic!("Out of memory");
}
}
Expand Down
74 changes: 36 additions & 38 deletions src/test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,15 +28,15 @@ fn new_max_heap() -> Heap {
fn empty() {
let mut heap = Heap::empty();
let layout = Layout::from_size_align(1, 1).unwrap();
assert!(heap.allocate_first_fit(layout.clone()).is_err());
assert!(heap.allocate_first_fit(layout.clone()) as *mut u8 == core::ptr::null_mut());
}

#[test]
fn oom() {
let mut heap = new_heap();
let layout = Layout::from_size_align(heap.size() + 1, align_of::<usize>());
let addr = heap.allocate_first_fit(layout.unwrap());
assert!(addr.is_err());
assert!(addr as *mut u8 == core::ptr::null_mut());
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Maybe we can encapsulate the == core::ptr::null_mut() check in some is_err / is_ok functions to avoid the boilerplate? It might also make sense to move all the tests in an inline tests submodule.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That's a good idea

}

#[test]
Expand All @@ -45,15 +45,14 @@ fn allocate_double_usize() {
let size = size_of::<usize>() * 2;
let layout = Layout::from_size_align(size, align_of::<usize>());
let addr = heap.allocate_first_fit(layout.unwrap());
assert!(addr.is_ok());
let addr = addr.unwrap() as usize;
assert!(addr == heap.bottom);
assert!(addr as *mut u8 != core::ptr::null_mut());
assert!(addr as usize == heap.bottom);
let (hole_addr, hole_size) = heap.holes.first_hole().expect("ERROR: no hole left");
assert!(hole_addr == heap.bottom + size);
assert!(hole_size == heap.size - size);

unsafe {
assert_eq!((*((addr + size) as *const Hole)).size, heap.size - size);
assert_eq!((*((addr as usize + size) as *const Hole)).size, heap.size - size);
}
}

Expand All @@ -62,7 +61,7 @@ fn allocate_and_free_double_usize() {
let mut heap = new_heap();

let layout = Layout::from_size_align(size_of::<usize>() * 2, align_of::<usize>()).unwrap();
let x = heap.allocate_first_fit(layout.clone()).unwrap();
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We shouldn't just remove the unwrap, because it also checked that the allocation succeeded. When we don't check it we risk deallocating a null pointer. Also applies to the other cases where unwraps are removed.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Looks like I'll have to add asserts here, or write a function that panics when pointer is null

let x = heap.allocate_first_fit(layout.clone());
unsafe {
*(x as *mut (usize, usize)) = (0xdeafdeadbeafbabe, 0xdeafdeadbeafbabe);

Expand All @@ -77,9 +76,9 @@ fn deallocate_right_before() {
let mut heap = new_heap();
let layout = Layout::from_size_align(size_of::<usize>() * 5, 1).unwrap();

let x = heap.allocate_first_fit(layout.clone()).unwrap();
let y = heap.allocate_first_fit(layout.clone()).unwrap();
let z = heap.allocate_first_fit(layout.clone()).unwrap();
let x = heap.allocate_first_fit(layout.clone());
let y = heap.allocate_first_fit(layout.clone());
let z = heap.allocate_first_fit(layout.clone());

unsafe {
heap.deallocate(y, layout.clone());
Expand All @@ -97,9 +96,9 @@ fn deallocate_right_behind() {
let size = size_of::<usize>() * 5;
let layout = Layout::from_size_align(size, 1).unwrap();

let x = heap.allocate_first_fit(layout.clone()).unwrap();
let y = heap.allocate_first_fit(layout.clone()).unwrap();
let z = heap.allocate_first_fit(layout.clone()).unwrap();
let x = heap.allocate_first_fit(layout.clone());
let y = heap.allocate_first_fit(layout.clone());
let z = heap.allocate_first_fit(layout.clone());

unsafe {
heap.deallocate(x, layout.clone());
Expand All @@ -117,10 +116,10 @@ fn deallocate_middle() {
let size = size_of::<usize>() * 5;
let layout = Layout::from_size_align(size, 1).unwrap();

let x = heap.allocate_first_fit(layout.clone()).unwrap();
let y = heap.allocate_first_fit(layout.clone()).unwrap();
let z = heap.allocate_first_fit(layout.clone()).unwrap();
let a = heap.allocate_first_fit(layout.clone()).unwrap();
let x = heap.allocate_first_fit(layout.clone());
let y = heap.allocate_first_fit(layout.clone());
let z = heap.allocate_first_fit(layout.clone());
let a = heap.allocate_first_fit(layout.clone());

unsafe {
heap.deallocate(x, layout.clone());
Expand All @@ -141,12 +140,12 @@ fn reallocate_double_usize() {

let layout = Layout::from_size_align(size_of::<usize>() * 2, align_of::<usize>()).unwrap();

let x = heap.allocate_first_fit(layout.clone()).unwrap();
let x = heap.allocate_first_fit(layout.clone());
unsafe {
heap.deallocate(x, layout.clone());
}

let y = heap.allocate_first_fit(layout.clone()).unwrap();
let y = heap.allocate_first_fit(layout.clone());
unsafe {
heap.deallocate(y, layout.clone());
}
Expand All @@ -165,18 +164,18 @@ fn allocate_multiple_sizes() {
let layout_3 = Layout::from_size_align(base_size * 3, base_align * 4).unwrap();
let layout_4 = Layout::from_size_align(base_size * 4, base_align).unwrap();

let x = heap.allocate_first_fit(layout_1.clone()).unwrap();
let y = heap.allocate_first_fit(layout_2.clone()).unwrap();
let x = heap.allocate_first_fit(layout_1.clone());
let y = heap.allocate_first_fit(layout_2.clone());
assert_eq!(y as usize, x as usize + base_size * 2);
let z = heap.allocate_first_fit(layout_3.clone()).unwrap();
let z = heap.allocate_first_fit(layout_3.clone());
assert_eq!(z as usize % (base_size * 4), 0);

unsafe {
heap.deallocate(x, layout_1.clone());
}

let a = heap.allocate_first_fit(layout_4.clone()).unwrap();
let b = heap.allocate_first_fit(layout_1.clone()).unwrap();
let a = heap.allocate_first_fit(layout_4.clone());
let b = heap.allocate_first_fit(layout_1.clone());
assert_eq!(b, x);

unsafe {
Expand All @@ -193,7 +192,7 @@ fn allocate_usize() {

let layout = Layout::from_size_align(size_of::<usize>(), 1).unwrap();

assert!(heap.allocate_first_fit(layout.clone()).is_ok());
assert!(heap.allocate_first_fit(layout.clone()) as *mut u8 != core::ptr::null_mut());
}

#[test]
Expand All @@ -203,15 +202,14 @@ fn allocate_usize_in_bigger_block() {
let layout_1 = Layout::from_size_align(size_of::<usize>() * 2, 1).unwrap();
let layout_2 = Layout::from_size_align(size_of::<usize>(), 1).unwrap();

let x = heap.allocate_first_fit(layout_1.clone()).unwrap();
let y = heap.allocate_first_fit(layout_1.clone()).unwrap();
let x = heap.allocate_first_fit(layout_1.clone());
let y = heap.allocate_first_fit(layout_1.clone());
unsafe {
heap.deallocate(x, layout_1.clone());
}

let z = heap.allocate_first_fit(layout_2.clone());
assert!(z.is_ok());
let z = z.unwrap();
assert!(z as *mut u8 != core::ptr::null_mut());
assert_eq!(x, z);

unsafe {
Expand All @@ -229,9 +227,9 @@ fn align_from_small_to_big() {
let layout_2 = Layout::from_size_align(8, 8).unwrap();

// allocate 28 bytes so that the heap end is only 4 byte aligned
assert!(heap.allocate_first_fit(layout_1.clone()).is_ok());
assert!(heap.allocate_first_fit(layout_1.clone()) as *mut u8 != core::ptr::null_mut());
// try to allocate a 8 byte aligned block
assert!(heap.allocate_first_fit(layout_2.clone()).is_ok());
assert!(heap.allocate_first_fit(layout_2.clone()) as *mut u8 != core::ptr::null_mut());
}

#[test]
Expand All @@ -244,7 +242,7 @@ fn extend_empty_heap() {

// Try to allocate full heap after extend
let layout = Layout::from_size_align(2048, 1).unwrap();
assert!(heap.allocate_first_fit(layout.clone()).is_ok());
assert!(heap.allocate_first_fit(layout.clone()) as *mut u8 != core::ptr::null_mut());
}

#[test]
Expand All @@ -254,11 +252,11 @@ fn extend_full_heap() {
let layout = Layout::from_size_align(1024, 1).unwrap();

// Allocate full heap, extend and allocate again to the max
assert!(heap.allocate_first_fit(layout.clone()).is_ok());
assert!(heap.allocate_first_fit(layout.clone()) as *mut u8 != core::ptr::null_mut());
unsafe {
heap.extend(1024);
}
assert!(heap.allocate_first_fit(layout.clone()).is_ok());
assert!(heap.allocate_first_fit(layout.clone()) as *mut u8 != core::ptr::null_mut());
}

#[test]
Expand All @@ -271,12 +269,12 @@ fn extend_fragmented_heap() {
let alloc1 = heap.allocate_first_fit(layout_1.clone());
let alloc2 = heap.allocate_first_fit(layout_1.clone());

assert!(alloc1.is_ok());
assert!(alloc2.is_ok());
assert!(alloc1 as *mut u8 != core::ptr::null_mut());
assert!(alloc2 as *mut u8 != core::ptr::null_mut());

unsafe {
// Create a hole at the beginning of the heap
heap.deallocate(alloc1.unwrap(), layout_1.clone());
heap.deallocate(alloc1, layout_1.clone());
}

unsafe {
Expand All @@ -285,5 +283,5 @@ fn extend_fragmented_heap() {

// We got additional 1024 bytes hole at the end of the heap
// Try to allocate there
assert!(heap.allocate_first_fit(layout_2.clone()).is_ok());
assert!(heap.allocate_first_fit(layout_2.clone()) as *mut u8 != core::ptr::null_mut());
}