From 63df552bac78cf17b147a83264ed655957843685 Mon Sep 17 00:00:00 2001 From: James Munns Date: Sat, 2 Jul 2022 03:00:47 +0200 Subject: [PATCH 1/3] Add logic to merge to start/end of alloc region --- src/hole.rs | 170 ++++++++++++++++++++++++++++++++++++++-------------- src/lib.rs | 27 +++------ src/test.rs | 156 ++++++++++++++++++++++++++++++++++++++++++----- 3 files changed, 272 insertions(+), 81 deletions(-) diff --git a/src/hole.rs b/src/hole.rs index 5c25649..3ec4510 100644 --- a/src/hole.rs +++ b/src/hole.rs @@ -1,7 +1,7 @@ use core::alloc::Layout; -use core::convert::TryInto; use core::mem; use core::mem::{align_of, size_of}; +use core::ptr::null_mut; use core::ptr::NonNull; use crate::align_up_size; @@ -11,11 +11,27 @@ use super::align_up; /// A sorted list of holes. It uses the the holes itself to store its nodes. pub struct HoleList { pub(crate) first: Hole, // dummy + pub(crate) bottom: *mut u8, + pub(crate) top: *mut u8, } pub(crate) struct Cursor { prev: NonNull, hole: NonNull, + top: *mut u8, +} + +/// A block containing free memory. It points to the next hole and thus forms a linked list. +pub(crate) struct Hole { + pub size: usize, + pub next: Option>, +} + +/// Basic information about a hole. +#[derive(Debug, Clone, Copy)] +struct HoleInfo { + addr: *mut u8, + size: usize, } impl Cursor { @@ -24,6 +40,7 @@ impl Cursor { self.hole.as_mut().next.map(|nhole| Cursor { prev: self.hole, hole: nhole, + top: self.top, }) } } @@ -133,7 +150,9 @@ impl Cursor { //////////////////////////////////////////////////////////////////////////// // This is where we actually perform surgery on the linked list. //////////////////////////////////////////////////////////////////////////// - let Cursor { mut prev, mut hole } = self; + let Cursor { + mut prev, mut hole, .. + } = self; // Remove the current location from the previous node unsafe { prev.as_mut().next = None; @@ -200,6 +219,42 @@ impl Cursor { } } +// See if we can extend this hole towards the end of the allocation region +// If so: increase the size of the node. If no: keep the node as-is +fn check_merge_top(mut node: NonNull, top: *mut u8) { + let node_u8 = node.as_ptr().cast::(); + let node_sz = unsafe { node.as_ref().size }; + + // If this is the last node, we need to see if we need to merge to the end + // TODO(AJM): We MIGHT need this for merging ALL holes. + let end = node_u8.wrapping_add(node_sz); + let hole_layout = Layout::new::(); + if end < top { + let next_hole_end = align_up(end, hole_layout.align()).wrapping_add(hole_layout.size()); + + if next_hole_end > top { + let offset = (top as usize) - (end as usize); + unsafe { + node.as_mut().size += offset; + } + } + } +} + +// See if we can scoot this hole back to the bottom of the allocation region +// If so: create and return the new hole. If not: return the existing hole +fn check_merge_bottom(node: NonNull, bottom: *mut u8) -> NonNull { + debug_assert_eq!(bottom as usize % align_of::(), 0); + + if bottom.wrapping_add(core::mem::size_of::()) > node.as_ptr().cast::() { + let offset = (node.as_ptr() as usize) - (bottom as usize); + let size = unsafe { node.as_ref() }.size + offset; + unsafe { make_hole(bottom, size) } + } else { + node + } +} + impl HoleList { /// Creates an empty `HoleList`. #[cfg(not(feature = "const_mut_refs"))] @@ -209,6 +264,8 @@ impl HoleList { size: 0, next: None, }, + bottom: null_mut(), + top: null_mut(), } } @@ -220,6 +277,8 @@ impl HoleList { size: 0, next: None, }, + bottom: null_mut(), + top: null_mut(), } } @@ -228,6 +287,7 @@ impl HoleList { Some(Cursor { hole, prev: NonNull::new(&mut self.first)?, + top: self.top, }) } else { None @@ -274,8 +334,7 @@ impl HoleList { let aligned_hole_addr = align_up(hole_addr, align_of::()); let ptr = aligned_hole_addr as *mut Hole; ptr.write(Hole { - size: hole_size - .saturating_sub(aligned_hole_addr.offset_from(hole_addr).try_into().unwrap()), + size: hole_size - ((aligned_hole_addr as usize) - (hole_addr as usize)), next: None, }); @@ -284,6 +343,8 @@ impl HoleList { size: 0, next: Some(NonNull::new_unchecked(ptr)), }, + bottom: aligned_hole_addr, + top: hole_addr.wrapping_add(hole_size), } } @@ -370,19 +431,6 @@ impl HoleList { } } -/// A block containing free memory. It points to the next hole and thus forms a linked list. -pub(crate) struct Hole { - pub size: usize, - pub next: Option>, -} - -/// Basic information about a hole. -#[derive(Debug, Clone, Copy)] -struct HoleInfo { - addr: *mut u8, - size: usize, -} - unsafe fn make_hole(addr: *mut u8, size: usize) -> NonNull { let hole_addr = addr.cast::(); debug_assert_eq!( @@ -395,7 +443,7 @@ unsafe fn make_hole(addr: *mut u8, size: usize) -> NonNull { } impl Cursor { - fn try_insert_back(self, mut node: NonNull) -> Result { + fn try_insert_back(self, node: NonNull, bottom: *mut u8) -> Result { // Covers the case where the new hole exists BEFORE the current pointer, // which only happens when previous is the stub pointer if node < self.hole { @@ -409,59 +457,86 @@ impl Cursor { ); debug_assert_eq!(self.previous().size, 0); - let Cursor { mut prev, hole } = self; + let Cursor { + mut prev, + hole, + top, + } = self; unsafe { + let mut node = check_merge_bottom(node, bottom); prev.as_mut().next = Some(node); node.as_mut().next = Some(hole); } - Ok(Cursor { prev, hole: node }) + Ok(Cursor { + prev, + hole: node, + top, + }) } else { Err(self) } } fn try_insert_after(&mut self, mut node: NonNull) -> Result<(), ()> { - if self.hole < node { - let node_u8 = node.as_ptr().cast::(); - let node_size = unsafe { node.as_ref().size }; - let hole_u8 = self.hole.as_ptr().cast::(); - let hole_size = self.current().size; + let node_u8 = node.as_ptr().cast::(); + let node_size = unsafe { node.as_ref().size }; - // Does hole overlap node? - assert!( - hole_u8.wrapping_add(hole_size) <= node_u8, - "Freed node aliases existing hole! Bad free?", - ); - - // If we have a next, does the node overlap next? - if let Some(next) = self.current().next.as_ref() { + // If we have a next, does the node overlap next? + if let Some(next) = self.current().next.as_ref() { + if node < *next { let node_u8 = node_u8 as *const u8; assert!( node_u8.wrapping_add(node_size) <= next.as_ptr().cast::(), "Freed node aliases existing hole! Bad free?", ); + } else { + // The new hole isn't between current and next. + return Err(()); } + } - // All good! Let's insert that after. - unsafe { - let maybe_next = self.hole.as_mut().next.replace(node); - node.as_mut().next = maybe_next; - } - Ok(()) - } else { - Err(()) + // At this point, we either have no "next" pointer, or the hole is + // between current and "next". The following assert can only trigger + // if we've gotten our list out of order. + debug_assert!(self.hole < node, "Hole list out of order?"); + + let hole_u8 = self.hole.as_ptr().cast::(); + let hole_size = self.current().size; + + // Does hole overlap node? + assert!( + hole_u8.wrapping_add(hole_size) <= node_u8, + "Freed node aliases existing hole! Bad free?", + ); + + // All good! Let's insert that after. + unsafe { + let maybe_next = self.hole.as_mut().next.replace(node); + node.as_mut().next = maybe_next; } + + Ok(()) } // Merge the current node with up to n following nodes fn try_merge_next_n(self, max: usize) { - let Cursor { prev: _, mut hole } = self; + let Cursor { + prev: _, + mut hole, + top, + .. + } = self; for _ in 0..max { // Is there a next node? let mut next = if let Some(next) = unsafe { hole.as_mut() }.next.as_ref() { *next } else { + // Since there is no NEXT node, we need to check whether the current + // hole SHOULD extend to the end, but doesn't. This would happen when + // there isn't enough remaining space to place a hole after the current + // node's placement. + check_merge_top(hole, top); return; }; @@ -515,7 +590,10 @@ fn deallocate(list: &mut HoleList, addr: *mut u8, size: usize) { cursor } else { // Oh hey, there are no "real" holes at all. That means this just - // becomes the only "real" hole! + // becomes the only "real" hole! Check if this is touching the end + // or the beginning of the allocation range + let hole = check_merge_bottom(hole, list.bottom); + check_merge_top(hole, list.top); list.first.next = Some(hole); return; }; @@ -525,7 +603,7 @@ fn deallocate(list: &mut HoleList, addr: *mut u8, size: usize) { // previous location the cursor was pointing to. // // Otherwise, our cursor will point at the current non-"dummy" head of the list - let (cursor, n) = match cursor.try_insert_back(hole) { + let (cursor, n) = match cursor.try_insert_back(hole, list.bottom) { Ok(cursor) => { // Yup! It lives at the front of the list. Hooray! Attempt to merge // it with just ONE next node, since it is at the front of the list @@ -578,8 +656,8 @@ pub mod test { let assumed_location = data.as_mut_ptr().cast(); let heap = Heap::from_slice(data); - assert!(heap.bottom == assumed_location); - assert!(heap.size == HEAP_SIZE); + assert!(heap.bottom() == assumed_location); + assert!(heap.size() == HEAP_SIZE); heap } diff --git a/src/lib.rs b/src/lib.rs index d257629..aea9fcf 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -17,7 +17,6 @@ use core::alloc::GlobalAlloc; use core::alloc::Layout; #[cfg(feature = "alloc_ref")] use core::alloc::{AllocError, Allocator}; -use core::convert::TryInto; use core::mem::MaybeUninit; #[cfg(feature = "use_spin")] use core::ops::Deref; @@ -34,8 +33,6 @@ mod test; /// A fixed size heap backed by a linked list of free memory blocks. pub struct Heap { - bottom: *mut u8, - size: usize, used: usize, holes: HoleList, } @@ -47,8 +44,6 @@ impl Heap { #[cfg(not(feature = "const_mut_refs"))] pub fn empty() -> Heap { Heap { - bottom: core::ptr::null_mut(), - size: 0, used: 0, holes: HoleList::empty(), } @@ -57,8 +52,6 @@ impl Heap { #[cfg(feature = "const_mut_refs")] pub const fn empty() -> Heap { Heap { - bottom: core::ptr::null_mut(), - size: 0, used: 0, holes: HoleList::empty(), } @@ -78,8 +71,6 @@ impl Heap { /// /// The provided memory range must be valid for the `'static` lifetime. pub unsafe fn init(&mut self, heap_bottom: *mut u8, heap_size: usize) { - self.bottom = heap_bottom; - self.size = heap_size; self.used = 0; self.holes = HoleList::new(heap_bottom, heap_size); } @@ -100,7 +91,7 @@ impl Heap { /// This method panics if the heap is already initialized. pub fn init_from_slice(&mut self, mem: &'static mut [MaybeUninit]) { assert!( - self.bottom.is_null(), + self.bottom().is_null(), "The heap has already been initialized." ); let size = mem.len(); @@ -128,8 +119,6 @@ impl Heap { Self::empty() } else { Heap { - bottom: heap_bottom, - size: heap_size, used: 0, holes: HoleList::new(heap_bottom, heap_size), } @@ -184,17 +173,17 @@ impl Heap { /// Returns the bottom address of the heap. pub fn bottom(&self) -> *mut u8 { - self.bottom + self.holes.bottom } /// Returns the size of the heap. pub fn size(&self) -> usize { - self.size + (self.top() as usize) - (self.bottom() as usize) } /// Return the top address of the heap pub fn top(&self) -> *mut u8 { - self.bottom.wrapping_add(self.size) + self.holes.top } /// Returns the size of the used part of the heap @@ -204,7 +193,7 @@ impl Heap { /// Returns the size of the free part of the heap pub fn free(&self) -> usize { - self.size - self.used + self.size() - self.used } /// Extends the size of the heap by creating a new hole at the end @@ -219,7 +208,7 @@ impl Heap { let layout = Layout::from_size_align(by, 1).unwrap(); self.holes .deallocate(NonNull::new_unchecked(top as *mut u8), layout); - self.size += by; + self.holes.top = self.holes.top.add(by); } } @@ -271,8 +260,6 @@ impl LockedHeap { /// The provided memory range must be valid for the `'static` lifetime. pub unsafe fn new(heap_bottom: *mut u8, heap_size: usize) -> LockedHeap { LockedHeap(Spinlock::new(Heap { - bottom: heap_bottom, - size: heap_size, used: 0, holes: HoleList::new(heap_bottom, heap_size), })) @@ -325,5 +312,5 @@ pub fn align_up_size(size: usize, align: usize) -> usize { /// so that x >= addr. The alignment must be a power of 2. pub fn align_up(addr: *mut u8, align: usize) -> *mut u8 { let offset = addr.align_offset(align); - addr.wrapping_offset(offset.try_into().unwrap()) + addr.wrapping_add(offset) } diff --git a/src/test.rs b/src/test.rs index f61bc10..d293625 100644 --- a/src/test.rs +++ b/src/test.rs @@ -23,8 +23,8 @@ fn new_heap() -> Heap { let assumed_location = data.as_mut_ptr().cast(); let heap = Heap::from_slice(data); - assert!(heap.bottom == assumed_location); - assert!(heap.size == HEAP_SIZE); + assert!(heap.bottom() == assumed_location); + assert!(heap.size() == HEAP_SIZE); heap } @@ -37,8 +37,8 @@ fn new_max_heap() -> Heap { // Unsafe so that we have provenance over the whole allocation. let heap = unsafe { Heap::new(start_ptr, HEAP_SIZE) }; - assert!(heap.bottom == start_ptr); - assert!(heap.size == HEAP_SIZE); + assert!(heap.bottom() == start_ptr); + assert!(heap.size() == HEAP_SIZE); heap } @@ -65,15 +65,15 @@ fn allocate_double_usize() { let addr = heap.allocate_first_fit(layout.unwrap()); assert!(addr.is_ok()); let addr = addr.unwrap().as_ptr(); - assert!(addr == heap.bottom); + assert!(addr == heap.bottom()); let (hole_addr, hole_size) = heap.holes.first_hole().expect("ERROR: no hole left"); - assert!(hole_addr == heap.bottom.wrapping_add(size)); - assert!(hole_size == heap.size - size); + assert!(hole_addr == heap.bottom().wrapping_add(size)); + assert!(hole_size == heap.size() - size); unsafe { assert_eq!( - (*((addr.wrapping_offset(size.try_into().unwrap())) as *const Hole)).size, - heap.size - size + (*((addr.wrapping_add(size)) as *const Hole)).size, + heap.size() - size ); } } @@ -90,7 +90,7 @@ fn allocate_and_free_double_usize() { heap.deallocate(x, layout.clone()); let real_first = heap.holes.first.next.as_ref().unwrap().as_ref(); - assert_eq!(real_first.size, heap.size); + assert_eq!(real_first.size, heap.size()); assert!(real_first.next.is_none()); } } @@ -110,7 +110,7 @@ fn deallocate_right_before() { heap.deallocate(x, layout.clone()); assert_eq!((*(x.as_ptr() as *const Hole)).size, layout.size() * 2); heap.deallocate(z, layout.clone()); - assert_eq!((*(x.as_ptr() as *const Hole)).size, heap.size); + assert_eq!((*(x.as_ptr() as *const Hole)).size, heap.size()); } } @@ -130,7 +130,7 @@ fn deallocate_right_behind() { heap.deallocate(y, layout.clone()); assert_eq!((*(x.as_ptr() as *const Hole)).size, size * 2); heap.deallocate(z, layout.clone()); - assert_eq!((*(x.as_ptr() as *const Hole)).size, heap.size); + assert_eq!((*(x.as_ptr() as *const Hole)).size, heap.size()); } } @@ -154,7 +154,7 @@ fn deallocate_middle() { heap.deallocate(y, layout.clone()); assert_eq!((*(x.as_ptr() as *const Hole)).size, size * 3); heap.deallocate(a, layout.clone()); - assert_eq!((*(x.as_ptr() as *const Hole)).size, heap.size); + assert_eq!((*(x.as_ptr() as *const Hole)).size, heap.size()); } } @@ -177,6 +177,134 @@ fn reallocate_double_usize() { assert_eq!(x, y); } +#[test] +fn allocate_many_size_aligns() { + use core::ops::{Range, RangeInclusive}; + + #[cfg(not(miri))] + const SIZE: RangeInclusive = 1..=512; + + #[cfg(miri)] + const SIZE: RangeInclusive = 256..=(256 + core::mem::size_of::()); + + #[cfg(not(miri))] + const ALIGN: Range = 0..10; + + #[cfg(miri)] + const ALIGN: Range = 1..4; + + #[cfg(not(miri))] + const STRATS: Range = 0..4; + + #[cfg(miri)] + const STRATS: Range = 0..2; + + let mut heap = new_heap(); + assert_eq!(heap.size(), 1000); + + heap.holes.debug(); + + let max_alloc = Layout::from_size_align(1000, 1).unwrap(); + let full = heap.allocate_first_fit(max_alloc).unwrap(); + unsafe { + heap.deallocate(full, max_alloc); + } + + heap.holes.debug(); + + struct Alloc { + alloc: NonNull, + layout: Layout, + } + + for strat in STRATS { + for align in ALIGN { + for size in SIZE { + println!("========================================================="); + println!("Align: {}", 1 << align); + println!("Size: {}", size); + println!("Free Pattern: {}/0..4", strat); + println!(); + let mut allocs = vec![]; + + let layout = Layout::from_size_align(size, 1 << align).unwrap(); + while let Ok(alloc) = heap.allocate_first_fit(layout) { + heap.holes.debug(); + allocs.push(Alloc { alloc, layout }); + } + + println!("Allocs: {} - {} bytes", allocs.len(), allocs.len() * size); + + match strat { + 0 => { + // Forward + allocs.drain(..).for_each(|a| unsafe { + heap.deallocate(a.alloc, a.layout); + heap.holes.debug(); + }); + } + 1 => { + // Backwards + allocs.drain(..).rev().for_each(|a| unsafe { + heap.deallocate(a.alloc, a.layout); + heap.holes.debug(); + }); + } + 2 => { + // Interleaved forwards + let mut a = Vec::new(); + let mut b = Vec::new(); + for (i, alloc) in allocs.drain(..).enumerate() { + if (i % 2) == 0 { + a.push(alloc); + } else { + b.push(alloc); + } + } + a.drain(..).for_each(|a| unsafe { + heap.deallocate(a.alloc, a.layout); + heap.holes.debug(); + }); + b.drain(..).for_each(|a| unsafe { + heap.deallocate(a.alloc, a.layout); + heap.holes.debug(); + }); + } + 3 => { + // Interleaved backwards + let mut a = Vec::new(); + let mut b = Vec::new(); + for (i, alloc) in allocs.drain(..).rev().enumerate() { + if (i % 2) == 0 { + a.push(alloc); + } else { + b.push(alloc); + } + } + a.drain(..).for_each(|a| unsafe { + heap.deallocate(a.alloc, a.layout); + heap.holes.debug(); + }); + b.drain(..).for_each(|a| unsafe { + heap.deallocate(a.alloc, a.layout); + heap.holes.debug(); + }); + } + _ => panic!(), + } + + println!("MAX CHECK"); + + let full = heap.allocate_first_fit(max_alloc).unwrap(); + unsafe { + heap.deallocate(full, max_alloc); + } + println!(); + } + } + } +} + #[test] fn allocate_multiple_sizes() { let mut heap = new_heap(); @@ -252,8 +380,6 @@ fn new_heap_skip(ct: usize) -> Heap { let heap_space = Box::leak(Box::new(Chonk::::new())); let data = &mut heap_space.data[ct..]; let heap = Heap::from_slice(data); - // assert!(heap.bottom == assumed_location); - // assert!(heap.size == HEAP_SIZE); heap } From 1bf1b9af83bc075a41a48fc8d399a588bed0dae0 Mon Sep 17 00:00:00 2001 From: James Munns Date: Sat, 2 Jul 2022 03:08:34 +0200 Subject: [PATCH 2/3] Update changelog, remove out of date TODO --- Changelog.md | 4 ++++ src/hole.rs | 1 - 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/Changelog.md b/Changelog.md index 8828f6b..e4454ce 100644 --- a/Changelog.md +++ b/Changelog.md @@ -1,5 +1,9 @@ # Unreleased +- Fixed logic for freeing nodes ([#64]) + +[#64]: https://github.com/rust-osdev/linked-list-allocator/pull/64 + # 0.10.0 – 2022-06-27 - Changed constructor to take `*mut u8` instead of `usize` ([#62]) diff --git a/src/hole.rs b/src/hole.rs index 3ec4510..8284e0c 100644 --- a/src/hole.rs +++ b/src/hole.rs @@ -226,7 +226,6 @@ fn check_merge_top(mut node: NonNull, top: *mut u8) { let node_sz = unsafe { node.as_ref().size }; // If this is the last node, we need to see if we need to merge to the end - // TODO(AJM): We MIGHT need this for merging ALL holes. let end = node_u8.wrapping_add(node_sz); let hole_layout = Layout::new::(); if end < top { From 2878c054ad94f28a50aa74aa4ae2d2e0a7e448ee Mon Sep 17 00:00:00 2001 From: James Munns Date: Sat, 2 Jul 2022 03:17:19 +0200 Subject: [PATCH 3/3] Test more in miri, but don't print to the console --- src/test.rs | 30 +++++++++++++++++++++--------- 1 file changed, 21 insertions(+), 9 deletions(-) diff --git a/src/test.rs b/src/test.rs index d293625..0a3b08c 100644 --- a/src/test.rs +++ b/src/test.rs @@ -193,12 +193,8 @@ fn allocate_many_size_aligns() { #[cfg(miri)] const ALIGN: Range = 1..4; - #[cfg(not(miri))] const STRATS: Range = 0..4; - #[cfg(miri)] - const STRATS: Range = 0..2; - let mut heap = new_heap(); assert_eq!(heap.size(), 1000); @@ -217,22 +213,29 @@ fn allocate_many_size_aligns() { layout: Layout, } + // NOTE: Printing to the console SIGNIFICANTLY slows down miri. + for strat in STRATS { for align in ALIGN { for size in SIZE { - println!("========================================================="); - println!("Align: {}", 1 << align); - println!("Size: {}", size); - println!("Free Pattern: {}/0..4", strat); - println!(); + #[cfg(not(miri))] + { + println!("========================================================="); + println!("Align: {}", 1 << align); + println!("Size: {}", size); + println!("Free Pattern: {}/0..4", strat); + println!(); + } let mut allocs = vec![]; let layout = Layout::from_size_align(size, 1 << align).unwrap(); while let Ok(alloc) = heap.allocate_first_fit(layout) { + #[cfg(not(miri))] heap.holes.debug(); allocs.push(Alloc { alloc, layout }); } + #[cfg(not(miri))] println!("Allocs: {} - {} bytes", allocs.len(), allocs.len() * size); match strat { @@ -240,6 +243,7 @@ fn allocate_many_size_aligns() { // Forward allocs.drain(..).for_each(|a| unsafe { heap.deallocate(a.alloc, a.layout); + #[cfg(not(miri))] heap.holes.debug(); }); } @@ -247,6 +251,7 @@ fn allocate_many_size_aligns() { // Backwards allocs.drain(..).rev().for_each(|a| unsafe { heap.deallocate(a.alloc, a.layout); + #[cfg(not(miri))] heap.holes.debug(); }); } @@ -263,10 +268,12 @@ fn allocate_many_size_aligns() { } a.drain(..).for_each(|a| unsafe { heap.deallocate(a.alloc, a.layout); + #[cfg(not(miri))] heap.holes.debug(); }); b.drain(..).for_each(|a| unsafe { heap.deallocate(a.alloc, a.layout); + #[cfg(not(miri))] heap.holes.debug(); }); } @@ -283,22 +290,27 @@ fn allocate_many_size_aligns() { } a.drain(..).for_each(|a| unsafe { heap.deallocate(a.alloc, a.layout); + #[cfg(not(miri))] heap.holes.debug(); }); b.drain(..).for_each(|a| unsafe { heap.deallocate(a.alloc, a.layout); + #[cfg(not(miri))] heap.holes.debug(); }); } _ => panic!(), } + #[cfg(not(miri))] println!("MAX CHECK"); let full = heap.allocate_first_fit(max_alloc).unwrap(); unsafe { heap.deallocate(full, max_alloc); } + + #[cfg(not(miri))] println!(); } }