diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0085f6a..cc72680 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -56,7 +56,8 @@ jobs: sudo apt update sudo apt install qemu-system-arm - run: qemu-system-arm --version - - run: cargo run --target thumbv7m-none-eabi --example integration_test --all-features + - run: cargo run --target thumbv7m-none-eabi --example llff_integration_test --all-features + - run: cargo run --target thumbv7m-none-eabi --example tlsf_integration_test --all-features clippy: name: Clippy diff --git a/Cargo.toml b/Cargo.toml index febf07f..247517c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -24,14 +24,20 @@ name = "embedded-alloc" version = "0.5.1" [features] +default = ["llff", "tlsf"] allocator_api = [] +# Use the Two-Level Segregated Fit allocator +tlsf = ["rlsf", "const-default"] + # Use the LinkedList first-fit allocator +llff = ["linked_list_allocator"] + [dependencies] critical-section = "1.0" +linked_list_allocator = { version = "0.10.5", default-features = false, optional = true } +rlsf = { version = "0.2.1", default-features = false, optional = true } +const-default = { version = "1.0.0", default-features = false, optional = true } -[dependencies.linked_list_allocator] -default-features = false -version = "0.10.5" [dev-dependencies] cortex-m = { version = "0.7.6", features = ["critical-section-single-core"] } diff --git a/README.md b/README.md index 5fd93d7..a34cb15 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,7 @@ Starting with Rust 1.68, this crate can be used as a global allocator on stable extern crate alloc; use cortex_m_rt::entry; -use embedded_alloc::Heap; +use embedded_alloc::LlffHeap as Heap; #[global_allocator] static HEAP: Heap = Heap::empty(); diff --git a/examples/allocator_api.rs b/examples/allocator_api.rs index 95ab702..39d54cf 100644 --- a/examples/allocator_api.rs +++ b/examples/allocator_api.rs @@ -8,7 +8,7 @@ use alloc::vec::Vec; use core::mem::MaybeUninit; use core::panic::PanicInfo; use cortex_m_rt::entry; -use embedded_alloc::Heap; +use embedded_alloc::LlffHeap as Heap; // This is not used, but as of 2023-10-29 allocator_api cannot be used without // a global heap diff --git a/examples/global_alloc.rs b/examples/global_alloc.rs index 812c9f1..d41debd 100644 --- a/examples/global_alloc.rs +++ b/examples/global_alloc.rs @@ -6,7 +6,10 @@ extern crate alloc; use alloc::vec::Vec; use core::panic::PanicInfo; use cortex_m_rt::entry; -use embedded_alloc::Heap; +// Linked-List First Fit Heap allocator (feature = "llff") +use embedded_alloc::LlffHeap as Heap; +// Two-Level Segregated Fit Heap allocator (feature = "tlsf") +// use embedded_alloc::TlsfHeap as Heap; #[global_allocator] static HEAP: Heap = Heap::empty(); diff --git a/examples/integration_test.rs b/examples/llff_integration_test.rs similarity index 94% rename from examples/integration_test.rs rename to examples/llff_integration_test.rs index a45ba87..af006be 100644 --- a/examples/integration_test.rs +++ b/examples/llff_integration_test.rs @@ -7,7 +7,7 @@ //! After toolchain installation this test can be run with: //! //! ```bash -//! cargo +nightly run --target thumbv7m-none-eabi --example integration_test --all-features +//! cargo +nightly run --target thumbv7m-none-eabi --example llff_integration_test --all-features //! ``` //! //! [Embedded Rust Book]: https://docs.rust-embedded.org/book/intro/index.html @@ -23,7 +23,7 @@ use alloc::vec::Vec; use core::mem::{size_of, MaybeUninit}; use cortex_m_rt::entry; use cortex_m_semihosting::{debug, hprintln}; -use embedded_alloc::Heap; +use embedded_alloc::LlffHeap as Heap; #[global_allocator] static HEAP: Heap = Heap::empty(); diff --git a/examples/tlsf_integration_test.rs b/examples/tlsf_integration_test.rs new file mode 100644 index 0000000..574497b --- /dev/null +++ b/examples/tlsf_integration_test.rs @@ -0,0 +1,105 @@ +//! This is a very basic smoke test that runs in QEMU +//! Reference the QEMU section of the [Embedded Rust Book] for more information +//! +//! This only tests integration of the allocator on an embedded target. +//! Comprehensive allocator tests are located in the allocator dependency. +//! +//! After toolchain installation this test can be run with: +//! +//! ```bash +//! cargo +nightly run --target thumbv7m-none-eabi --example tlsf_integration_test --all-features +//! ``` +//! +//! [Embedded Rust Book]: https://docs.rust-embedded.org/book/intro/index.html + +#![feature(allocator_api)] +#![no_main] +#![no_std] + +extern crate alloc; +extern crate panic_semihosting; + +use alloc::collections::LinkedList; +use core::mem::MaybeUninit; +use cortex_m_rt::entry; +use cortex_m_semihosting::{debug, hprintln}; +use embedded_alloc::TlsfHeap as Heap; + +#[global_allocator] +static HEAP: Heap = Heap::empty(); +const HEAP_SIZE: usize = 30 * 1024; + +fn test_global_heap() { + const ELEMS: usize = 250; + + let mut allocated = LinkedList::new(); + for _ in 0..ELEMS { + allocated.push_back(0); + } + for i in 0..ELEMS { + allocated.push_back(i as i32); + } + + assert_eq!(allocated.len(), 2 * ELEMS); + + for _ in 0..ELEMS { + allocated.pop_front(); + } + + for i in 0..ELEMS { + assert_eq!(allocated.pop_front().unwrap(), i as i32); + } +} + +fn test_allocator_api() { + // small local heap + const HEAP_SIZE: usize = 256; + let heap_mem: [MaybeUninit; HEAP_SIZE] = [MaybeUninit::uninit(); HEAP_SIZE]; + let local_heap: Heap = Heap::empty(); + unsafe { local_heap.init(heap_mem.as_ptr() as usize, HEAP_SIZE) } + + const ELEMS: usize = 2; + + let mut allocated = LinkedList::new_in(local_heap); + for _ in 0..ELEMS { + allocated.push_back(0); + } + for i in 0..ELEMS { + allocated.push_back(i as i32); + } + + assert_eq!(allocated.len(), 2 * ELEMS); + + for _ in 0..ELEMS { + allocated.pop_front(); + } + + for i in 0..ELEMS { + assert_eq!(allocated.pop_front().unwrap(), i as i32); + } +} + +#[entry] +fn main() -> ! { + { + static mut HEAP_MEM: [MaybeUninit; HEAP_SIZE] = [MaybeUninit::uninit(); HEAP_SIZE]; + unsafe { HEAP.init(HEAP_MEM.as_ptr() as usize, HEAP_SIZE) } + } + + #[allow(clippy::type_complexity)] + let tests: &[(fn() -> (), &'static str)] = &[ + (test_global_heap, "test_global_heap"), + (test_allocator_api, "test_allocator_api"), + ]; + + for (test_fn, test_name) in tests { + hprintln!("{}: start", test_name); + test_fn(); + hprintln!("{}: pass", test_name); + } + + // exit QEMU with a success status + debug::exit(debug::EXIT_SUCCESS); + #[allow(clippy::empty_loop)] + loop {} +} diff --git a/src/lib.rs b/src/lib.rs index 999308c..33ac654 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -7,107 +7,237 @@ use core::cell::RefCell; use core::ptr::{self, NonNull}; use critical_section::Mutex; -use linked_list_allocator::Heap as LLHeap; -pub struct Heap { - heap: Mutex>, -} +#[cfg(feature = "llff")] +pub use llff::Heap as LlffHeap; +#[cfg(feature = "tlsf")] +pub use tlsf::Heap as TlsfHeap; -impl Heap { - /// Create a new UNINITIALIZED heap allocator - /// - /// You must initialize this heap using the - /// [`init`](Self::init) method before using the allocator. - pub const fn empty() -> Heap { - Heap { - heap: Mutex::new(RefCell::new(LLHeap::empty())), - } - } +#[cfg(feature = "llff")] +mod llff { + use super::*; + use linked_list_allocator::Heap as LLHeap; - /// Initializes the heap - /// - /// This function must be called BEFORE you run any code that makes use of the - /// allocator. - /// - /// `start_addr` is the address where the heap will be located. - /// - /// `size` is the size of the heap in bytes. - /// - /// Note that: - /// - /// - The heap grows "upwards", towards larger addresses. Thus `start_addr` will - /// be the smallest address used. - /// - /// - The largest address used is `start_addr + size - 1`, so if `start_addr` is - /// `0x1000` and `size` is `0x30000` then the allocator won't use memory at - /// addresses `0x31000` and larger. - /// - /// # Safety - /// - /// Obey these or Bad Stuff will happen. - /// - /// - This function must be called exactly ONCE. - /// - `size > 0` - pub unsafe fn init(&self, start_addr: usize, size: usize) { - critical_section::with(|cs| { - self.heap - .borrow(cs) - .borrow_mut() - .init(start_addr as *mut u8, size); - }); + pub struct Heap { + heap: Mutex>, } - /// Returns an estimate of the amount of bytes in use. - pub fn used(&self) -> usize { - critical_section::with(|cs| self.heap.borrow(cs).borrow_mut().used()) + impl Heap { + /// Create a new UNINITIALIZED heap allocator + /// + /// You must initialize this heap using the + /// [`init`](Self::init) method before using the allocator. + pub const fn empty() -> Heap { + Heap { + heap: Mutex::new(RefCell::new(LLHeap::empty())), + } + } + + /// Initializes the heap + /// + /// This function must be called BEFORE you run any code that makes use of the + /// allocator. + /// + /// `start_addr` is the address where the heap will be located. + /// + /// `size` is the size of the heap in bytes. + /// + /// Note that: + /// + /// - The heap grows "upwards", towards larger addresses. Thus `start_addr` will + /// be the smallest address used. + /// + /// - The largest address used is `start_addr + size - 1`, so if `start_addr` is + /// `0x1000` and `size` is `0x30000` then the allocator won't use memory at + /// addresses `0x31000` and larger. + /// + /// # Safety + /// + /// Obey these or Bad Stuff will happen. + /// + /// - This function must be called exactly ONCE. + /// - `size > 0` + pub unsafe fn init(&self, start_addr: usize, size: usize) { + critical_section::with(|cs| { + self.heap + .borrow(cs) + .borrow_mut() + .init(start_addr as *mut u8, size); + }); + } + + /// Returns an estimate of the amount of bytes in use. + pub fn used(&self) -> usize { + critical_section::with(|cs| self.heap.borrow(cs).borrow_mut().used()) + } + + /// Returns an estimate of the amount of bytes available. + pub fn free(&self) -> usize { + critical_section::with(|cs| self.heap.borrow(cs).borrow_mut().free()) + } + + fn alloc(&self, layout: Layout) -> Option> { + critical_section::with(|cs| { + self.heap + .borrow(cs) + .borrow_mut() + .allocate_first_fit(layout) + .ok() + }) + } + + unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { + critical_section::with(|cs| { + self.heap + .borrow(cs) + .borrow_mut() + .deallocate(NonNull::new_unchecked(ptr), layout) + }); + } } - /// Returns an estimate of the amount of bytes available. - pub fn free(&self) -> usize { - critical_section::with(|cs| self.heap.borrow(cs).borrow_mut().free()) + unsafe impl GlobalAlloc for Heap { + unsafe fn alloc(&self, layout: Layout) -> *mut u8 { + self.alloc(layout) + .map_or(ptr::null_mut(), |allocation| allocation.as_ptr()) + } + + unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { + self.dealloc(ptr, layout); + } } - fn alloc_first_fit(&self, layout: Layout) -> Result, ()> { - critical_section::with(|cs| self.heap.borrow(cs).borrow_mut().allocate_first_fit(layout)) + #[cfg(feature = "allocator_api")] + mod allocator_api { + use super::*; + use core::{ + alloc::{AllocError, Allocator, Layout}, + ptr::NonNull, + }; + + unsafe impl Allocator for Heap { + fn allocate(&self, layout: Layout) -> Result, AllocError> { + match layout.size() { + 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)), + size => self.alloc(layout).map_or(Err(AllocError), |allocation| { + Ok(NonNull::slice_from_raw_parts(allocation, size)) + }), + } + } + + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + if layout.size() != 0 { + self.dealloc(ptr.as_ptr(), layout); + } + } + } } } -unsafe impl GlobalAlloc for Heap { - unsafe fn alloc(&self, layout: Layout) -> *mut u8 { - self.alloc_first_fit(layout) - .ok() - .map_or(ptr::null_mut(), |allocation| allocation.as_ptr()) - } +#[cfg(feature = "tlsf")] +mod tlsf { + use super::*; + use const_default::ConstDefault; + use rlsf::Tlsf; - unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { - critical_section::with(|cs| { - self.heap - .borrow(cs) - .borrow_mut() - .deallocate(NonNull::new_unchecked(ptr), layout) - }); + type TlsfHeap = Tlsf<'static, usize, usize, { usize::BITS as usize }, { usize::BITS as usize }>; + + pub struct Heap { + heap: Mutex>, } -} -#[cfg(feature = "allocator_api")] -mod allocator_api { - use core::alloc::{AllocError, Allocator, GlobalAlloc, Layout}; - use core::ptr::NonNull; - - unsafe impl Allocator for crate::Heap { - fn allocate(&self, layout: Layout) -> Result, AllocError> { - match layout.size() { - 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)), - size => self - .alloc_first_fit(layout) - .map(|allocation| NonNull::slice_from_raw_parts(allocation, size)) - .map_err(|_| AllocError), + impl Heap { + /// Create a new UNINITIALIZED heap allocator + /// + /// You must initialize this heap using the + /// [`init`](Self::init) method before using the allocator. + pub const fn empty() -> Heap { + Heap { + heap: Mutex::new(RefCell::new(ConstDefault::DEFAULT)), } } - unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { - if layout.size() != 0 { - self.dealloc(ptr.as_ptr(), layout); + /// Initializes the heap + /// + /// This function must be called BEFORE you run any code that makes use of the + /// allocator. + /// + /// `start_addr` is the address where the heap will be located. + /// + /// `size` is the size of the heap in bytes. + /// + /// Note that: + /// + /// - The heap grows "upwards", towards larger addresses. Thus `start_addr` will + /// be the smallest address used. + /// + /// - The largest address used is `start_addr + size - 1`, so if `start_addr` is + /// `0x1000` and `size` is `0x30000` then the allocator won't use memory at + /// addresses `0x31000` and larger. + /// + /// # Safety + /// + /// Obey these or Bad Stuff will happen. + /// + /// - This function must be called exactly ONCE. + /// - `size > 0` + pub unsafe fn init(&self, start_addr: usize, size: usize) { + critical_section::with(|cs| { + let block: &[u8] = core::slice::from_raw_parts(start_addr as *const u8, size); + self.heap + .borrow(cs) + .borrow_mut() + .insert_free_block_ptr(block.into()); + }); + } + + fn alloc(&self, layout: Layout) -> Option> { + critical_section::with(|cs| self.heap.borrow(cs).borrow_mut().allocate(layout)) + } + + unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { + critical_section::with(|cs| { + self.heap + .borrow(cs) + .borrow_mut() + .deallocate(NonNull::new_unchecked(ptr), layout.align()) + }) + } + } + + unsafe impl GlobalAlloc for Heap { + unsafe fn alloc(&self, layout: Layout) -> *mut u8 { + self.alloc(layout) + .map_or(ptr::null_mut(), |allocation| allocation.as_ptr()) + } + + unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { + self.dealloc(ptr, layout) + } + } + + #[cfg(feature = "allocator_api")] + mod allocator_api { + use super::*; + use core::{ + alloc::{AllocError, Allocator, Layout}, + ptr::NonNull, + }; + + unsafe impl Allocator for Heap { + fn allocate(&self, layout: Layout) -> Result, AllocError> { + match layout.size() { + 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)), + size => self.alloc(layout).map_or(Err(AllocError), |allocation| { + Ok(NonNull::slice_from_raw_parts(allocation, size)) + }), + } + } + + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + if layout.size() != 0 { + self.dealloc(ptr.as_ptr(), layout); + } } } }