Add linked list allocator with automatic resizing
Fix memory leaks in exec Remove warnings
This commit is contained in:
70
src/allocator/linked_list.rs
Normal file
70
src/allocator/linked_list.rs
Normal file
@@ -0,0 +1,70 @@
|
||||
use alloc::heap::{Alloc, AllocErr, Layout};
|
||||
use linked_list_allocator::Heap;
|
||||
use spin::Mutex;
|
||||
|
||||
use paging::ActivePageTable;
|
||||
|
||||
static HEAP: Mutex<Option<Heap>> = Mutex::new(None);
|
||||
|
||||
pub struct Allocator;
|
||||
|
||||
impl Allocator {
|
||||
pub unsafe fn init(offset: usize, size: usize) {
|
||||
*HEAP.lock() = Some(Heap::new(offset, size));
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl<'a> Alloc for &'a Allocator {
|
||||
unsafe fn alloc(&mut self, mut layout: Layout) -> Result<*mut u8, AllocErr> {
|
||||
loop {
|
||||
let res = if let Some(ref mut heap) = *HEAP.lock() {
|
||||
heap.allocate_first_fit(layout)
|
||||
} else {
|
||||
panic!("__rust_allocate: heap not initialized");
|
||||
};
|
||||
|
||||
match res {
|
||||
Err(AllocErr::Exhausted { request }) => {
|
||||
layout = request;
|
||||
|
||||
let size = if let Some(ref heap) = *HEAP.lock() {
|
||||
heap.size()
|
||||
} else {
|
||||
panic!("__rust_allocate: heap not initialized");
|
||||
};
|
||||
|
||||
println!("Expand heap at {} MB by {} MB", size/1024/1024, ::KERNEL_HEAP_SIZE/1024/1024);
|
||||
|
||||
super::map_heap(&mut ActivePageTable::new(), ::KERNEL_HEAP_OFFSET + size, ::KERNEL_HEAP_SIZE);
|
||||
|
||||
if let Some(ref mut heap) = *HEAP.lock() {
|
||||
heap.extend(::KERNEL_HEAP_SIZE);
|
||||
} else {
|
||||
panic!("__rust_allocate: heap not initialized");
|
||||
}
|
||||
},
|
||||
other => return other,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
|
||||
if let Some(ref mut heap) = *HEAP.lock() {
|
||||
heap.deallocate(ptr, layout)
|
||||
} else {
|
||||
panic!("__rust_deallocate: heap not initialized");
|
||||
}
|
||||
}
|
||||
|
||||
fn oom(&mut self, error: AllocErr) -> ! {
|
||||
panic!("Out of memory: {:?}", error);
|
||||
}
|
||||
|
||||
fn usable_size(&self, layout: &Layout) -> (usize, usize) {
|
||||
if let Some(ref mut heap) = *HEAP.lock() {
|
||||
heap.usable_size(layout)
|
||||
} else {
|
||||
panic!("__rust_usable_size: heap not initialized");
|
||||
}
|
||||
}
|
||||
}
|
||||
36
src/allocator/mod.rs
Normal file
36
src/allocator/mod.rs
Normal file
@@ -0,0 +1,36 @@
|
||||
use paging::{ActivePageTable, Page, VirtualAddress};
|
||||
use paging::entry::EntryFlags;
|
||||
use paging::mapper::MapperFlushAll;
|
||||
|
||||
#[cfg(not(feature="slab"))]
|
||||
pub use self::linked_list::Allocator;
|
||||
|
||||
#[cfg(feature="slab")]
|
||||
pub use self::slab::Allocator;
|
||||
|
||||
mod linked_list;
|
||||
mod slab;
|
||||
|
||||
unsafe fn map_heap(active_table: &mut ActivePageTable, offset: usize, size: usize) {
|
||||
let mut flush_all = MapperFlushAll::new();
|
||||
|
||||
let heap_start_page = Page::containing_address(VirtualAddress::new(offset));
|
||||
let heap_end_page = Page::containing_address(VirtualAddress::new(offset + size-1));
|
||||
for page in Page::range_inclusive(heap_start_page, heap_end_page) {
|
||||
let result = active_table.map(page, EntryFlags::PRESENT | EntryFlags::GLOBAL | EntryFlags::WRITABLE | EntryFlags::NO_EXECUTE);
|
||||
flush_all.consume(result);
|
||||
}
|
||||
|
||||
flush_all.flush(active_table);
|
||||
}
|
||||
|
||||
pub unsafe fn init(active_table: &mut ActivePageTable) {
|
||||
let offset = ::KERNEL_HEAP_OFFSET;
|
||||
let size = ::KERNEL_HEAP_SIZE;
|
||||
|
||||
// Map heap pages
|
||||
map_heap(active_table, offset, size);
|
||||
|
||||
// Initialize global heap
|
||||
Allocator::init(offset, size);
|
||||
}
|
||||
43
src/allocator/slab.rs
Normal file
43
src/allocator/slab.rs
Normal file
@@ -0,0 +1,43 @@
|
||||
use alloc::heap::{Alloc, AllocErr, Layout};
|
||||
use spin::Mutex;
|
||||
use slab_allocator::Heap;
|
||||
|
||||
static HEAP: Mutex<Option<Heap>> = Mutex::new(None);
|
||||
|
||||
pub struct Allocator;
|
||||
|
||||
impl Allocator {
|
||||
pub unsafe fn init(offset: usize, size: usize) {
|
||||
*HEAP.lock() = Some(Heap::new(offset, size));
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl<'a> Alloc for &'a Allocator {
|
||||
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
|
||||
if let Some(ref mut heap) = *HEAP.lock() {
|
||||
heap.allocate(layout)
|
||||
} else {
|
||||
panic!("__rust_allocate: heap not initialized");
|
||||
}
|
||||
}
|
||||
|
||||
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
|
||||
if let Some(ref mut heap) = *HEAP.lock() {
|
||||
heap.deallocate(ptr, layout)
|
||||
} else {
|
||||
panic!("__rust_deallocate: heap not initialized");
|
||||
}
|
||||
}
|
||||
|
||||
fn oom(&mut self, error: AllocErr) -> ! {
|
||||
panic!("Out of memory: {:?}", error);
|
||||
}
|
||||
|
||||
fn usable_size(&self, layout: &Layout) -> (usize, usize) {
|
||||
if let Some(ref mut heap) = *HEAP.lock() {
|
||||
heap.usable_size(layout)
|
||||
} else {
|
||||
panic!("__rust_usable_size: heap not initialized");
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user