From a65108703fc2c71c56386f7d31bc68497a3896f5 Mon Sep 17 00:00:00 2001 From: Blindspot22 Date: Wed, 7 Aug 2024 10:57:56 +0100 Subject: [PATCH] fixed size block allocator --- src/allocator.rs | 8 +-- src/allocator/fixed_size_block.rs | 90 +++++++++++++++++++++++++++++++ 2 files changed, 95 insertions(+), 3 deletions(-) create mode 100644 src/allocator/fixed_size_block.rs diff --git a/src/allocator.rs b/src/allocator.rs index 8b6a1f2..91d6575 100644 --- a/src/allocator.rs +++ b/src/allocator.rs @@ -1,3 +1,4 @@ +pub mod fixed_size_block; pub mod linked_list; pub mod bump; use x86_64::{ @@ -7,15 +8,16 @@ use x86_64::{ VirtAddr, }; use alloc::alloc::{GlobalAlloc, Layout}; -use linked_list::LinkedListAllocator; use core::ptr::null_mut; +use fixed_size_block::FixedSizeBlockAllocator; + pub const HEAP_START: usize = 0x_4444_4444_0000; pub const HEAP_SIZE: usize = 100 * 1024; // 100 KiB #[global_allocator] -static ALLOCATOR: Locked = - Locked::new(LinkedListAllocator::new()); +static ALLOCATOR: Locked = Locked::new( + FixedSizeBlockAllocator::new()); /// A wrapper around spin::Mutex to permit trait implementations. pub struct Locked { inner: spin::Mutex, diff --git a/src/allocator/fixed_size_block.rs b/src/allocator/fixed_size_block.rs new file mode 100644 index 0000000..6d2a015 --- /dev/null +++ b/src/allocator/fixed_size_block.rs @@ -0,0 +1,90 @@ +use super::Locked; +use alloc::alloc::GlobalAlloc; +use alloc::alloc::Layout; +use core::{mem, ptr, ptr::NonNull}; +struct ListNode { + next: Option<&'static mut ListNode>, +} + +const BLOCK_SIZES: &[usize] = &[8, 16, 32, 64, 128, 256, 512, 1024, 2048]; + +pub struct FixedSizeBlockAllocator { + list_heads: [Option<&'static mut ListNode>; BLOCK_SIZES.len()], + fallback_allocator: linked_list_allocator::Heap, +} + +unsafe impl GlobalAlloc for Locked { + unsafe fn alloc(&self, layout: Layout) -> *mut u8 { + let mut allocator = self.lock(); + match FixedSizeBlockAllocator::list_index(&layout) { + Some(index) => { + match allocator.list_heads[index].take() { + Some(node) => { + allocator.list_heads[index] = node.next.take(); + node as *mut ListNode as *mut u8 + } + None => { + // no block exists in list => allocate new block + let block_size = BLOCK_SIZES[index]; + // only works if all block sizes are a power of 2 + let block_align = block_size; + let layout = Layout::from_size_align(block_size, block_align) + .unwrap(); + allocator.fallback_alloc(layout) + } + } + } + None => allocator.fallback_alloc(layout), + } + } + + unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { + let mut allocator = self.lock(); + match FixedSizeBlockAllocator::list_index(&layout) { + Some(index) => { + let new_node = ListNode { + next: allocator.list_heads[index].take(), + }; + // verify that block has size and alignment required for storing node + assert!(mem::size_of::() <= BLOCK_SIZES[index]); + assert!(mem::align_of::() <= BLOCK_SIZES[index]); + let new_node_ptr = ptr as *mut ListNode; + new_node_ptr.write(new_node); + allocator.list_heads[index] = Some(&mut *new_node_ptr); + } + None => { + let ptr = NonNull::new(ptr).unwrap(); + allocator.fallback_allocator.deallocate(ptr, layout); + } + } + } +} + +impl FixedSizeBlockAllocator { + /// Creates an empty FixedSizeBlockAllocator. + pub const fn new() -> Self { + const EMPTY: Option<&'static mut ListNode> = None; + FixedSizeBlockAllocator { + list_heads: [EMPTY; BLOCK_SIZES.len()], + fallback_allocator: linked_list_allocator::Heap::empty(), + } + } + + pub unsafe fn init(&mut self, heap_start: usize, heap_size: usize) { + self.fallback_allocator.init(heap_start, heap_size); + } + + /// Allocates using the fallback allocator. + fn fallback_alloc(&mut self, layout: Layout) -> *mut u8 { + match self.fallback_allocator.allocate_first_fit(layout) { + Ok(ptr) => ptr.as_ptr(), + Err(_) => ptr::null_mut(), + } + } + + /// Returns an index into the `BLOCK_SIZES` array. + fn list_index(layout: &Layout) -> Option { + let required_block_size = layout.size().max(layout.align()); + BLOCK_SIZES.iter().position(|&s| s >= required_block_size) + } +}