Skip to content

Use new nightly GlobalAlloc API #11

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Apr 18, 2018
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 7 additions & 6 deletions src/hole.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use core::ptr::NonNull;
use core::mem::size_of;
use alloc::allocator::{Layout, AllocErr};
use core::alloc::{Layout, Opaque, AllocErr};

use super::align_up;

Expand Down Expand Up @@ -42,7 +43,7 @@ impl HoleList {
/// block is returned.
/// This function uses the “first fit” strategy, so it uses the first hole that is big
/// enough. Thus the runtime is in O(n) but it should be reasonably fast for small allocations.
pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
assert!(layout.size() >= Self::min_size());

allocate_first_fit(&mut self.first, layout).map(|allocation| {
Expand All @@ -52,7 +53,7 @@ impl HoleList {
if let Some(padding) = allocation.back_padding {
deallocate(&mut self.first, padding.addr, padding.size);
}
allocation.info.addr as *mut u8
NonNull::new(allocation.info.addr as *mut Opaque).unwrap()
})
}

Expand All @@ -62,8 +63,8 @@ impl HoleList {
/// This function walks the list and inserts the given block at the correct place. If the freed
/// block is adjacent to another free block, the blocks are merged again.
/// This operation is in `O(n)` since the list needs to be sorted by address.
pub unsafe fn deallocate(&mut self, ptr: *mut u8, layout: Layout) {
deallocate(&mut self.first, ptr as usize, layout.size())
pub unsafe fn deallocate(&mut self, ptr: NonNull<Opaque>, layout: Layout) {
deallocate(&mut self.first, ptr.as_ptr() as usize, layout.size())
}

/// Returns the minimal allocation size. Smaller allocations or deallocations are not allowed.
Expand Down Expand Up @@ -199,7 +200,7 @@ fn allocate_first_fit(mut previous: &mut Hole, layout: Layout) -> Result<Allocat
}
None => {
// this was the last hole, so no hole is big enough -> allocation not possible
return Err(AllocErr::Exhausted { request: layout });
return Err(AllocErr);
}
}
}
Expand Down
34 changes: 19 additions & 15 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@
#![feature(pointer_methods)]
#![no_std]

extern crate alloc;

#[cfg(test)]
#[macro_use]
extern crate std;
Expand All @@ -14,16 +12,20 @@ extern crate spin;

use hole::{Hole, HoleList};
use core::mem;
use core::ptr::NonNull;
#[cfg(feature = "use_spin")]
use core::ops::Deref;
use alloc::allocator::{Alloc, Layout, AllocErr};
use core::alloc::{Alloc, GlobalAlloc, AllocErr, Layout, Opaque};
#[cfg(feature = "use_spin")]
use spin::Mutex;

mod hole;
#[cfg(test)]
mod test;

#[cfg(feature = "use_spin")]
pub static mut LOCKED_ALLOCATOR: LockedHeap = LockedHeap::empty();

/// A fixed size heap backed by a linked list of free memory blocks.
pub struct Heap {
bottom: usize,
Expand Down Expand Up @@ -70,7 +72,7 @@ impl Heap {
/// This function scans the list of free memory blocks and uses the first block that is big
/// enough. The runtime is in O(n) where n is the number of free blocks, but it should be
/// reasonably fast for small allocations.
pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
let mut size = layout.size();
if size < HoleList::min_size() {
size = HoleList::min_size();
Expand All @@ -88,7 +90,7 @@ impl Heap {
/// This function walks the list of free memory blocks and inserts the freed block at the
/// correct place. If the freed block is adjacent to another free block, the blocks are merged
/// again. This operation is in `O(n)` since the list needs to be sorted by address.
pub unsafe fn deallocate(&mut self, ptr: *mut u8, layout: Layout) {
pub unsafe fn deallocate(&mut self, ptr: NonNull<Opaque>, layout: Layout) {
let mut size = layout.size();
if size < HoleList::min_size() {
size = HoleList::min_size();
Expand Down Expand Up @@ -122,21 +124,21 @@ impl Heap {
pub unsafe fn extend(&mut self, by: usize) {
let top = self.top();
let layout = Layout::from_size_align(by, 1).unwrap();
self.holes.deallocate(top as *mut u8, layout);
self.holes.deallocate(NonNull::new_unchecked(top as *mut Opaque), layout);
self.size += by;
}
}

unsafe impl Alloc for Heap {
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
self.allocate_first_fit(layout)
}

unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
unsafe fn dealloc(&mut self, ptr: NonNull<Opaque>, layout: Layout) {
self.deallocate(ptr, layout)
}

fn oom(&mut self, _: AllocErr) -> ! {
fn oom(&mut self) -> ! {
panic!("Out of memory");
}
}
Expand Down Expand Up @@ -174,16 +176,18 @@ impl Deref for LockedHeap {
}

#[cfg(feature = "use_spin")]
unsafe impl<'a> Alloc for &'a LockedHeap {
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
self.0.lock().allocate_first_fit(layout)
unsafe impl<'a> GlobalAlloc for &'a LockedHeap {
unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
LOCKED_ALLOCATOR.0.lock().allocate_first_fit(layout).ok().map_or(0 as *mut Opaque, |allocation| {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think we should be able to do just self.0.lock without the static LOCKED_ALLOCATOR.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yep, Mutex lock() uses &self and returns MutexGuard, so static is not needed here.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is the &'a needed in impl? It works without it.

allocation.as_ptr()
})
}

unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
self.0.lock().deallocate(ptr, layout)
unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) {
LOCKED_ALLOCATOR.0.lock().deallocate(NonNull::new_unchecked(ptr), layout)
}

fn oom(&mut self, _: AllocErr) -> ! {
fn oom(&self) -> ! {
panic!("Out of memory");
}
}
Expand Down
32 changes: 16 additions & 16 deletions src/test.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use std::prelude::v1::*;
use std::mem::{size_of, align_of};
use alloc::allocator::Layout;
use core::alloc::Layout;
use super::*;

fn new_heap() -> Heap {
Expand Down Expand Up @@ -46,7 +46,7 @@ fn allocate_double_usize() {
let layout = Layout::from_size_align(size, align_of::<usize>());
let addr = heap.allocate_first_fit(layout.unwrap());
assert!(addr.is_ok());
let addr = addr.unwrap() as usize;
let addr = addr.unwrap().as_ptr() as usize;
assert!(addr == heap.bottom);
let (hole_addr, hole_size) = heap.holes.first_hole().expect("ERROR: no hole left");
assert!(hole_addr == heap.bottom + size);
Expand All @@ -64,7 +64,7 @@ fn allocate_and_free_double_usize() {
let layout = Layout::from_size_align(size_of::<usize>() * 2, align_of::<usize>()).unwrap();
let x = heap.allocate_first_fit(layout.clone()).unwrap();
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We shouldn't just remove the unwrap, because it also checked that the allocation succeeded. When we don't check it we risk deallocating a null pointer. Also applies to the other cases where unwraps are removed.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Looks like I'll have to add asserts here, or write a function that panics when pointer is null

unsafe {
*(x as *mut (usize, usize)) = (0xdeafdeadbeafbabe, 0xdeafdeadbeafbabe);
*(x.as_ptr() as *mut (usize, usize)) = (0xdeafdeadbeafbabe, 0xdeafdeadbeafbabe);

heap.deallocate(x, layout.clone());
assert_eq!((*(heap.bottom as *const Hole)).size, heap.size);
Expand All @@ -83,11 +83,11 @@ fn deallocate_right_before() {

unsafe {
heap.deallocate(y, layout.clone());
assert_eq!((*(y as *const Hole)).size, layout.size());
assert_eq!((*(y.as_ptr() as *const Hole)).size, layout.size());
heap.deallocate(x, layout.clone());
assert_eq!((*(x as *const Hole)).size, layout.size() * 2);
assert_eq!((*(x.as_ptr() as *const Hole)).size, layout.size() * 2);
heap.deallocate(z, layout.clone());
assert_eq!((*(x as *const Hole)).size, heap.size);
assert_eq!((*(x.as_ptr() as *const Hole)).size, heap.size);
}
}

Expand All @@ -103,11 +103,11 @@ fn deallocate_right_behind() {

unsafe {
heap.deallocate(x, layout.clone());
assert_eq!((*(x as *const Hole)).size, size);
assert_eq!((*(x.as_ptr() as *const Hole)).size, size);
heap.deallocate(y, layout.clone());
assert_eq!((*(x as *const Hole)).size, size * 2);
assert_eq!((*(x.as_ptr() as *const Hole)).size, size * 2);
heap.deallocate(z, layout.clone());
assert_eq!((*(x as *const Hole)).size, heap.size);
assert_eq!((*(x.as_ptr() as *const Hole)).size, heap.size);
}
}

Expand All @@ -124,14 +124,14 @@ fn deallocate_middle() {

unsafe {
heap.deallocate(x, layout.clone());
assert_eq!((*(x as *const Hole)).size, size);
assert_eq!((*(x.as_ptr() as *const Hole)).size, size);
heap.deallocate(z, layout.clone());
assert_eq!((*(x as *const Hole)).size, size);
assert_eq!((*(z as *const Hole)).size, size);
assert_eq!((*(x.as_ptr() as *const Hole)).size, size);
assert_eq!((*(z.as_ptr() as *const Hole)).size, size);
heap.deallocate(y, layout.clone());
assert_eq!((*(x as *const Hole)).size, size * 3);
assert_eq!((*(x.as_ptr() as *const Hole)).size, size * 3);
heap.deallocate(a, layout.clone());
assert_eq!((*(x as *const Hole)).size, heap.size);
assert_eq!((*(x.as_ptr() as *const Hole)).size, heap.size);
}
}

Expand Down Expand Up @@ -167,9 +167,9 @@ fn allocate_multiple_sizes() {

let x = heap.allocate_first_fit(layout_1.clone()).unwrap();
let y = heap.allocate_first_fit(layout_2.clone()).unwrap();
assert_eq!(y as usize, x as usize + base_size * 2);
assert_eq!(y.as_ptr() as usize, x.as_ptr() as usize + base_size * 2);
let z = heap.allocate_first_fit(layout_3.clone()).unwrap();
assert_eq!(z as usize % (base_size * 4), 0);
assert_eq!(z.as_ptr() as usize % (base_size * 4), 0);

unsafe {
heap.deallocate(x, layout_1.clone());
Expand Down