extract mutable and staticable parts

This commit is contained in:
Corwin 2023-04-06 17:31:02 +01:00
parent af168f709b
commit 7030d65d9f
No known key found for this signature in database
2 changed files with 163 additions and 129 deletions

View file

@ -10,9 +10,9 @@ use core::convert::TryInto;
use core::ptr::NonNull; use core::ptr::NonNull;
use crate::interrupt::free; use crate::interrupt::free;
use bare_metal::{CriticalSection, Mutex}; use bare_metal::Mutex;
use super::bump_allocator::{BumpAllocator, StartEnd}; use super::bump_allocator::{BumpAllocatorInner, StartEnd};
use super::SendNonNull; use super::SendNonNull;
struct Block { struct Block {
@ -43,77 +43,104 @@ struct BlockAllocatorState {
first_free_block: Option<SendNonNull<Block>>, first_free_block: Option<SendNonNull<Block>>,
} }
struct BlockAllocatorInner {
inner_allocator: BumpAllocatorInner,
state: BlockAllocatorState,
}
pub struct BlockAllocator { pub struct BlockAllocator {
inner_allocator: BumpAllocator, inner: Mutex<RefCell<BlockAllocatorInner>>,
state: Mutex<RefCell<BlockAllocatorState>>,
} }
impl BlockAllocator { impl BlockAllocator {
pub(crate) const unsafe fn new(start: StartEnd) -> Self { pub(crate) const unsafe fn new(start: StartEnd) -> Self {
Self { Self {
inner_allocator: BumpAllocator::new(start), inner: Mutex::new(RefCell::new(BlockAllocatorInner::new(start))),
state: Mutex::new(RefCell::new(BlockAllocatorState {
first_free_block: None,
})),
} }
} }
#[doc(hidden)] #[doc(hidden)]
#[cfg(any(test, feature = "testing"))] #[cfg(any(test, feature = "testing"))]
pub unsafe fn number_of_blocks(&self) -> u32 { pub unsafe fn number_of_blocks(&self) -> u32 {
free(|key| { free(|key| self.inner.borrow(key).borrow_mut().number_of_blocks())
let mut state = self.state.borrow(key).borrow_mut();
let mut count = 0;
let mut list_ptr = &mut state.first_free_block;
while let Some(mut current) = list_ptr {
count += 1;
list_ptr = &mut current.as_mut().next;
}
count
})
}
/// Requests a brand new block from the inner bump allocator
fn new_block(&self, layout: Layout, cs: CriticalSection) -> Option<NonNull<u8>> {
let overall_layout = Block::either_layout(layout);
self.inner_allocator.alloc_critical(overall_layout, cs)
}
/// Merges blocks together to create a normalised list
unsafe fn normalise(&self) {
free(|key| {
let mut state = self.state.borrow(key).borrow_mut();
let mut list_ptr = &mut state.first_free_block;
while let Some(mut current) = list_ptr {
if let Some(next_elem) = current.as_mut().next {
let difference = next_elem
.as_ptr()
.cast::<u8>()
.offset_from(current.as_ptr().cast::<u8>());
let usize_difference: usize = difference
.try_into()
.expect("distances in alloc'd blocks must be positive");
if usize_difference == current.as_mut().size {
let current = current.as_mut();
let next = next_elem.as_ref();
current.size += next.size;
current.next = next.next;
continue;
}
}
list_ptr = &mut current.as_mut().next;
}
});
} }
pub unsafe fn alloc(&self, layout: Layout) -> Option<NonNull<u8>> { pub unsafe fn alloc(&self, layout: Layout) -> Option<NonNull<u8>> {
free(|key| self.inner.borrow(key).borrow_mut().alloc(layout))
}
pub unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
free(|key| self.inner.borrow(key).borrow_mut().dealloc(ptr, layout))
}
pub unsafe fn dealloc_no_normalise(&self, ptr: *mut u8, layout: Layout) {
free(|key| {
self.inner
.borrow(key)
.borrow_mut()
.dealloc_no_normalise(ptr, layout)
})
}
}
impl BlockAllocatorInner {
pub(crate) const unsafe fn new(start: StartEnd) -> Self {
Self {
inner_allocator: BumpAllocatorInner::new(start),
state: BlockAllocatorState {
first_free_block: None,
},
}
}
#[doc(hidden)]
#[cfg(any(test, feature = "testing"))]
pub unsafe fn number_of_blocks(&mut self) -> u32 {
let mut count = 0;
let mut list_ptr = &mut self.state.first_free_block;
while let Some(mut current) = list_ptr {
count += 1;
list_ptr = &mut current.as_mut().next;
}
count
}
/// Requests a brand new block from the inner bump allocator
fn new_block(&mut self, layout: Layout) -> Option<NonNull<u8>> {
let overall_layout = Block::either_layout(layout);
self.inner_allocator.alloc(overall_layout)
}
/// Merges blocks together to create a normalised list
unsafe fn normalise(&mut self) {
let mut list_ptr = &mut self.state.first_free_block;
while let Some(mut current) = list_ptr {
if let Some(next_elem) = current.as_mut().next {
let difference = next_elem
.as_ptr()
.cast::<u8>()
.offset_from(current.as_ptr().cast::<u8>());
let usize_difference: usize = difference
.try_into()
.expect("distances in alloc'd blocks must be positive");
if usize_difference == current.as_mut().size {
let current = current.as_mut();
let next = next_elem.as_ref();
current.size += next.size;
current.next = next.next;
continue;
}
}
list_ptr = &mut current.as_mut().next;
}
}
pub unsafe fn alloc(&mut self, layout: Layout) -> Option<NonNull<u8>> {
// find a block that this current request fits in // find a block that this current request fits in
let full_layout = Block::either_layout(layout); let full_layout = Block::either_layout(layout);
@ -121,86 +148,80 @@ impl BlockAllocator {
.extend(Layout::new::<Block>().align_to(8).unwrap().pad_to_align()) .extend(Layout::new::<Block>().align_to(8).unwrap().pad_to_align())
.unwrap(); .unwrap();
free(|key| { let mut current_block = self.state.first_free_block;
let mut state = self.state.borrow(key).borrow_mut(); let mut list_ptr = &mut self.state.first_free_block;
let mut current_block = state.first_free_block; // This iterates the free list until it either finds a block that
let mut list_ptr = &mut state.first_free_block; // is the exact size requested or a block that can be split into
// This iterates the free list until it either finds a block that // one with the desired size and another block header.
// is the exact size requested or a block that can be split into while let Some(mut current) = current_block {
// one with the desired size and another block header. let block_to_examine = current.as_mut();
while let Some(mut current) = current_block { if block_to_examine.size == full_layout.size() {
let block_to_examine = current.as_mut(); *list_ptr = block_to_examine.next;
if block_to_examine.size == full_layout.size() { return Some(current.cast());
*list_ptr = block_to_examine.next; } else if block_to_examine.size >= block_after_layout.size() {
return Some(current.cast()); // can split block
} else if block_to_examine.size >= block_after_layout.size() { let split_block = Block {
// can split block size: block_to_examine.size - block_after_layout_offset,
let split_block = Block { next: block_to_examine.next,
size: block_to_examine.size - block_after_layout_offset, };
next: block_to_examine.next, let split_ptr = current
}; .as_ptr()
let split_ptr = current .cast::<u8>()
.as_ptr() .add(block_after_layout_offset)
.cast::<u8>() .cast();
.add(block_after_layout_offset) *split_ptr = split_block;
.cast(); *list_ptr = NonNull::new(split_ptr).map(SendNonNull);
*split_ptr = split_block;
*list_ptr = NonNull::new(split_ptr).map(SendNonNull);
return Some(current.cast()); return Some(current.cast());
}
current_block = block_to_examine.next;
list_ptr = &mut block_to_examine.next;
} }
current_block = block_to_examine.next;
list_ptr = &mut block_to_examine.next;
}
self.new_block(layout, key) self.new_block(layout)
})
} }
pub unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { pub unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
self.dealloc_no_normalise(ptr, layout); self.dealloc_no_normalise(ptr, layout);
self.normalise(); self.normalise();
} }
pub unsafe fn dealloc_no_normalise(&self, ptr: *mut u8, layout: Layout) { pub unsafe fn dealloc_no_normalise(&mut self, ptr: *mut u8, layout: Layout) {
let new_layout = Block::either_layout(layout).pad_to_align(); let new_layout = Block::either_layout(layout).pad_to_align();
free(|key| {
let mut state = self.state.borrow(key).borrow_mut();
// note that this is a reference to a pointer // note that this is a reference to a pointer
let mut list_ptr = &mut state.first_free_block; let mut list_ptr = &mut self.state.first_free_block;
// This searches the free list until it finds a block further along // This searches the free list until it finds a block further along
// than the block that is being freed. The newly freed block is then // than the block that is being freed. The newly freed block is then
// inserted before this block. If the end of the list is reached // inserted before this block. If the end of the list is reached
// then the block is placed at the end with no new block after it. // then the block is placed at the end with no new block after it.
loop { loop {
match list_ptr { match list_ptr {
Some(mut current_block) => { Some(mut current_block) => {
if current_block.as_ptr().cast() > ptr { if current_block.as_ptr().cast() > ptr {
let new_block_content = Block {
size: new_layout.size(),
next: Some(current_block),
};
*ptr.cast() = new_block_content;
*list_ptr = NonNull::new(ptr.cast()).map(SendNonNull);
break;
}
list_ptr = &mut current_block.as_mut().next;
}
None => {
// reached the end of the list without finding a place to insert the value
let new_block_content = Block { let new_block_content = Block {
size: new_layout.size(), size: new_layout.size(),
next: None, next: Some(current_block),
}; };
*ptr.cast() = new_block_content; *ptr.cast() = new_block_content;
*list_ptr = NonNull::new(ptr.cast()).map(SendNonNull); *list_ptr = NonNull::new(ptr.cast()).map(SendNonNull);
break; break;
} }
list_ptr = &mut current_block.as_mut().next;
}
None => {
// reached the end of the list without finding a place to insert the value
let new_block_content = Block {
size: new_layout.size(),
next: None,
};
*ptr.cast() = new_block_content;
*list_ptr = NonNull::new(ptr.cast()).map(SendNonNull);
break;
} }
} }
}); }
} }
} }

View file

@ -4,35 +4,45 @@ use core::ptr::NonNull;
use super::SendNonNull; use super::SendNonNull;
use crate::interrupt::free; use crate::interrupt::free;
use bare_metal::{CriticalSection, Mutex}; use bare_metal::Mutex;
pub(crate) struct StartEnd { pub(crate) struct StartEnd {
pub start: fn() -> usize, pub start: fn() -> usize,
pub end: fn() -> usize, pub end: fn() -> usize,
} }
pub(crate) struct BumpAllocatorInner {
current_ptr: Option<SendNonNull<u8>>,
start_end: StartEnd,
}
pub(crate) struct BumpAllocator { pub(crate) struct BumpAllocator {
current_ptr: Mutex<RefCell<Option<SendNonNull<u8>>>>, inner: Mutex<RefCell<BumpAllocatorInner>>,
start_end: Mutex<StartEnd>,
} }
impl BumpAllocator { impl BumpAllocator {
pub const fn new(start_end: StartEnd) -> Self { pub const fn new(start_end: StartEnd) -> Self {
Self { Self {
current_ptr: Mutex::new(RefCell::new(None)), inner: Mutex::new(RefCell::new(BumpAllocatorInner::new(start_end))),
start_end: Mutex::new(start_end),
} }
} }
} }
impl BumpAllocator { impl BumpAllocatorInner {
pub fn alloc_critical(&self, layout: Layout, cs: CriticalSection) -> Option<NonNull<u8>> { pub const fn new(start_end: StartEnd) -> Self {
let mut current_ptr = self.current_ptr.borrow(cs).borrow_mut(); Self {
current_ptr: None,
start_end,
}
}
pub fn alloc(&mut self, layout: Layout) -> Option<NonNull<u8>> {
let current_ptr = &mut self.current_ptr;
let ptr = if let Some(c) = *current_ptr { let ptr = if let Some(c) = *current_ptr {
c.as_ptr() as usize c.as_ptr() as usize
} else { } else {
(self.start_end.borrow(cs).start)() (self.start_end.start)()
}; };
let alignment_bitmask = layout.align() - 1; let alignment_bitmask = layout.align() - 1;
@ -43,7 +53,7 @@ impl BumpAllocator {
let resulting_ptr = ptr + amount_to_add; let resulting_ptr = ptr + amount_to_add;
let new_current_ptr = resulting_ptr + layout.size(); let new_current_ptr = resulting_ptr + layout.size();
if new_current_ptr >= (self.start_end.borrow(cs).end)() { if new_current_ptr >= (self.start_end.end)() {
return None; return None;
} }
@ -51,8 +61,11 @@ impl BumpAllocator {
NonNull::new(resulting_ptr as *mut _) NonNull::new(resulting_ptr as *mut _)
} }
pub fn alloc_safe(&self, layout: Layout) -> Option<NonNull<u8>> { }
free(|key| self.alloc_critical(layout, key))
impl BumpAllocator {
fn alloc_safe(&self, layout: Layout) -> Option<NonNull<u8>> {
free(|key| self.inner.borrow(key).borrow_mut().alloc(layout))
} }
} }