Skip to content

Commit

Permalink
Added a global allocator and heap support
Browse files Browse the repository at this point in the history
  • Loading branch information
Carter Reeb committed Jun 28, 2021
1 parent 75ae594 commit 3301764
Show file tree
Hide file tree
Showing 9 changed files with 196 additions and 1 deletion.
2 changes: 1 addition & 1 deletion .cargo/config.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[unstable]
build-std = ["core"]
build-std = ["core", "compiler_builtins", "alloc"]
build-std-features = ["compiler-builtins-mem"]

[build]
Expand Down
1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ name = "lateral"
version = "0.1.0"

[dependencies]
linked_list_allocator = "0.9.0"
pc-keyboard = "0.5.0"
pic8259 = "0.10.1"
spin = "0.9.1"
Expand Down
96 changes: 96 additions & 0 deletions src/alloc/block.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
use core::alloc::GlobalAlloc;
use core::alloc::Layout;
use core::mem;
use core::ptr;
use core::ptr::NonNull;

use super::lock::Locked;

const BLOCK_SIZES: &[usize] = &[8, 16, 32, 64, 128, 256, 512, 1024, 2048];

struct ListNode {
next: Option<&'static mut ListNode>,
}

pub struct FixedSizeBlockAllocator {
list_heads: [Option<&'static mut ListNode>; BLOCK_SIZES.len()],
fallback_allocator: linked_list_allocator::Heap,
}

impl FixedSizeBlockAllocator {
/// Creates an empty FixedSizeBlockAllocator.
pub const fn new() -> Self {
const EMPTY: Option<&'static mut ListNode> = None;
FixedSizeBlockAllocator {
list_heads: [EMPTY; BLOCK_SIZES.len()],
fallback_allocator: linked_list_allocator::Heap::empty(),
}
}

/// Initialize the allocator with the given heap bounds.
///
/// This function is unsafe because the caller must guarantee that the given
/// heap bounds are valid and that the heap is unused. This method must be
/// called only once.
pub unsafe fn init(&mut self, heap_start: usize, heap_size: usize) {
self.fallback_allocator.init(heap_start, heap_size);
}

fn fallback_alloc(&mut self, layout: Layout) -> *mut u8 {
match self.fallback_allocator.allocate_first_fit(layout) {
Ok(ptr) => ptr.as_ptr(),
Err(_) => ptr::null_mut(),
}
}
}

fn list_index(layout: &Layout) -> Option<usize> {
let required_block_size = layout.size().max(layout.align());
BLOCK_SIZES.iter().position(|&s| s >= required_block_size)
}

unsafe impl GlobalAlloc for Locked<FixedSizeBlockAllocator> {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
let mut allocator = self.lock();
match list_index(&layout) {
Some(index) => {
match allocator.list_heads[index].take() {
Some(node) => {
allocator.list_heads[index] = node.next.take();
node as *mut ListNode as *mut u8
}
None => {
// no block exists in list => allocate new block
let block_size = BLOCK_SIZES[index];
// only works if all block sizes are a power of 2
let block_align = block_size;
let layout = Layout::from_size_align(block_size, block_align).unwrap();
allocator.fallback_alloc(layout)
}
}
}
None => allocator.fallback_alloc(layout),
}
}

unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
let mut allocator = self.lock();
match list_index(&layout) {
Some(index) => {
let new_node = ListNode {
next: allocator.list_heads[index].take(),
};
// verify that block has size and alignment required for storing node
assert!(mem::size_of::<ListNode>() <= BLOCK_SIZES[index]);
assert!(mem::align_of::<ListNode>() <= BLOCK_SIZES[index]);
let new_node_ptr = ptr as *mut ListNode;
new_node_ptr.write(new_node);
allocator.list_heads[index] = Some(&mut *new_node_ptr);
}
None => {
let ptr = NonNull::new(ptr).unwrap();
allocator.fallback_allocator.deallocate(ptr, layout);
}
}
}
}
23 changes: 23 additions & 0 deletions src/alloc/global.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
extern crate alloc as rust_alloc;

use core::ptr::null_mut;
use rust_alloc::alloc::{GlobalAlloc, Layout};

use super::block::FixedSizeBlockAllocator;
use super::lock::Locked;

#[global_allocator]
pub(super) static ALLOCATOR: Locked<FixedSizeBlockAllocator> =
Locked::new(FixedSizeBlockAllocator::new());

pub struct Dummy;

unsafe impl GlobalAlloc for Dummy {
unsafe fn alloc(&self, _layout: Layout) -> *mut u8 {
null_mut()
}

unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {
panic!("dealloc should be never called")
}
}
38 changes: 38 additions & 0 deletions src/alloc/heap.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
use crate::alloc::global::ALLOCATOR;

const KIB: usize = 0x400;
pub const HEAP_START: usize = 0x_4444_4444_0000;
pub const HEAP_SIZE: usize = 100 * KIB;

use x86_64::{
structures::paging::{
mapper::MapToError, FrameAllocator, Mapper, Page, PageTableFlags, Size4KiB,
},
VirtAddr,
};
pub fn init_heap(
mapper: &mut impl Mapper<Size4KiB>,
frame_allocator: &mut impl FrameAllocator<Size4KiB>,
) -> Result<(), MapToError<Size4KiB>> {
let page_range = {
let heap_start = VirtAddr::new(HEAP_START as u64);
let heap_end = heap_start + HEAP_SIZE - 1u64;
let heap_start_page = Page::containing_address(heap_start);
let heap_end_page = Page::containing_address(heap_end);
Page::range_inclusive(heap_start_page, heap_end_page)
};

for page in page_range {
let frame = frame_allocator
.allocate_frame()
.ok_or(MapToError::FrameAllocationFailed)?;
let flags = PageTableFlags::PRESENT | PageTableFlags::WRITABLE;
unsafe { mapper.map_to(page, frame, flags, frame_allocator)?.flush() };
}

unsafe {
ALLOCATOR.lock().init(HEAP_START, HEAP_SIZE);
}

Ok(())
}
15 changes: 15 additions & 0 deletions src/alloc/lock.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
pub struct Locked<A> {
inner: spin::Mutex<A>,
}

impl<A> Locked<A> {
pub const fn new(inner: A) -> Self {
Locked {
inner: spin::Mutex::new(inner),
}
}

pub fn lock(&self) -> spin::MutexGuard<A> {
self.inner.lock()
}
}
4 changes: 4 additions & 0 deletions src/alloc/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
pub mod block;
pub mod global;
pub mod heap;
pub mod lock;
10 changes: 10 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,12 @@
#![feature(custom_test_frameworks)]
#![test_runner(crate::test::runner)]
#![reexport_test_harness_main = "test_harness"]
#![feature(alloc_error_handler)]
#![feature(const_mut_refs)]

extern crate alloc as rust_alloc;

pub mod alloc;
pub mod cpu;
pub mod io;
pub mod mem;
Expand Down Expand Up @@ -40,3 +45,8 @@ pub fn init() {
unsafe { cpu::interrupt::PICS.lock().initialize() };
x86_64::instructions::interrupts::enable();
}

#[alloc_error_handler]
fn alloc_error_handler(layout: rust_alloc::alloc::Layout) -> ! {
panic!("allocation error: {:?}", layout)
}
8 changes: 8 additions & 0 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,11 @@ bootloader::entry_point!(tests::main);

#[cfg(not(test))]
mod kernel {
extern crate alloc as rust_alloc;
use lateral::mem::frame::BootInfoFrameAllocator;
use lateral::mem::paging;
use lateral::println;
use rust_alloc::boxed::Box;
use x86_64::structures::paging::Page;
use x86_64::VirtAddr;

Expand All @@ -32,6 +34,12 @@ mod kernel {
let page_ptr: *mut u64 = page.start_address().as_mut_ptr();
unsafe { page_ptr.offset(400).write_volatile(0x_f021_f077_f065_f04e) };

lateral::alloc::heap::init_heap(&mut mapper, &mut frame_allocator)
.expect("heap initialization failed");

let x = Box::new(100);
println!("{}", x);

println!("Hello World!");
lateral::halt_loop();
}
Expand Down

0 comments on commit 3301764

Please sign in to comment.