diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0604fa8..42f8b5a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -59,3 +59,35 @@ jobs: # Test - name: Test run: cargo test ${{ matrix.profile }} ${{ matrix.features }} + + miri_test: + name: Miri Test + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v3 + - name: Set up Rust + uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: nightly + - name: Install miri + run: rustup +nightly component add miri + - name: Run miri tests + run: cargo miri test + + asan_test: + name: Address Sanitizer Test + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v3 + - name: Set up Rust + uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: nightly + - name: Install miri + run: rustup +nightly component add miri + - name: Run ASan tests + run: RUSTFLAGS="-Z sanitizer=address" cargo test diff --git a/src/lib.rs b/src/lib.rs index 9bb94ee..c42c6b0 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,4 +1,4 @@ -#![cfg_attr(feature = "no_std", no_std)] +#![cfg_attr(not(test), cfg_attr(feature = "no_std", no_std))] #![feature(allocator_api)] #![cfg_attr(not(doctest), doc = include_str!("../README.md"))] @@ -8,3 +8,11 @@ extern crate core; pub mod claim; pub mod try_clone; pub mod vec; +pub mod vec_deque; + +#[cfg(test)] +pub(crate) mod testing; + +#[cfg(test)] +#[global_allocator] +static GLOBAL: testing::GlobalAllocTestGuardAllocator = testing::GlobalAllocTestGuardAllocator; diff --git a/src/testing.rs b/src/testing.rs new file mode 100644 index 0000000..7998b27 --- /dev/null +++ b/src/testing.rs @@ -0,0 +1,199 @@ +use crate::claim::Claim; +use alloc::sync::Arc; +use core::ptr::NonNull; +use core::sync::atomic::{AtomicUsize, Ordering}; +use std::alloc::{AllocError, Allocator, Global, GlobalAlloc, Layout, System}; + +thread_local! { + static GLOBAL_ALLOC_ALLOWED: std::cell::RefCell = const { std::cell::RefCell::new(true) }; +} + +struct NoPubCtor; + +/// A guard that temporarily error if a test performs global allocation in the current thread. +pub struct NoGlobalAllocGuard(NoPubCtor); + +impl NoGlobalAllocGuard { + pub fn new() -> Self { + GLOBAL_ALLOC_ALLOWED.with(|alloc_allowed| { + let mut alloc_allowed = alloc_allowed.borrow_mut(); + if !*alloc_allowed { + panic!("NoGlobalAllocGuard is not re-entrant."); + } + *alloc_allowed = false; // Disable global allocation + }); + + Self(NoPubCtor) + } +} + +impl Drop for NoGlobalAllocGuard { + fn drop(&mut self) { + GLOBAL_ALLOC_ALLOWED.with(|alloc_allowed| { + let mut alloc_allowed = alloc_allowed.borrow_mut(); + *alloc_allowed = true; + }); + } +} + +pub struct AllowGlobalAllocGuard { + was_allowed: bool, +} + +impl AllowGlobalAllocGuard { + pub fn new() -> Self { + let was_allowed = GLOBAL_ALLOC_ALLOWED.with(|alloc_allowed| { + let was_allowed = *alloc_allowed.borrow(); + if !was_allowed { + let mut alloc_allowed = alloc_allowed.borrow_mut(); + *alloc_allowed = true; + } + was_allowed + }); + + Self { was_allowed } + } +} + +impl Drop for AllowGlobalAllocGuard { + fn drop(&mut self) { + GLOBAL_ALLOC_ALLOWED.with(|alloc_allowed| { + let mut alloc_allowed = alloc_allowed.borrow_mut(); + *alloc_allowed = self.was_allowed; + }); + } +} + +/// Enables the `NoGlobalAllocGuard` by acting as a global allocator. +pub struct GlobalAllocTestGuardAllocator; + +impl GlobalAllocTestGuardAllocator { + fn is_allowed(&self) -> bool { + GLOBAL_ALLOC_ALLOWED.with(|alloc_allowed| { + *alloc_allowed.borrow() // Check if allocation is allowed for the current thread + }) + } + + fn guard(&self) { + if !self.is_allowed() { + panic!("Caught unexpected global allocation with the NoGlobalAllocGuard. Run tests under debugger."); + } + } +} + +unsafe impl GlobalAlloc for GlobalAllocTestGuardAllocator { + unsafe fn alloc(&self, layout: Layout) -> *mut u8 { + self.guard(); + System.alloc(layout) + } + + unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { + self.guard(); + System.dealloc(ptr, layout) + } + + unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 { + self.guard(); + System.alloc_zeroed(layout) + } + + unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 { + self.guard(); + System.realloc(ptr, layout, new_size) + } +} + +#[derive(Clone)] +pub struct WatermarkAllocator { + watermark: usize, + in_use: Option>, +} + +impl Drop for WatermarkAllocator { + fn drop(&mut self) { + let in_use = self.in_use.take().unwrap(); + let _g = AllowGlobalAllocGuard::new(); + drop(in_use); + } +} + +impl WatermarkAllocator { + pub fn new(watermark: usize) -> Self { + let in_use = Some({ + let _g = AllowGlobalAllocGuard::new(); + AtomicUsize::new(0).into() + }); + Self { watermark, in_use } + } + + pub fn in_use(&self) -> usize { + self.in_use.as_ref().unwrap().load(Ordering::SeqCst) + } +} + +impl Claim for WatermarkAllocator {} + +unsafe impl Allocator for WatermarkAllocator { + fn allocate(&self, layout: Layout) -> Result, AllocError> { + let current_in_use = self.in_use.as_ref().unwrap().load(Ordering::SeqCst); + let new_in_use = current_in_use + layout.size(); + if new_in_use > self.watermark { + return Err(AllocError); + } + let allocated = { + let _g = AllowGlobalAllocGuard::new(); + Global.allocate(layout)? + }; + let true_new_in_use = self + .in_use + .as_ref() + .unwrap() + .fetch_add(allocated.len(), Ordering::SeqCst); + unsafe { + if true_new_in_use > self.watermark { + let ptr = allocated.as_ptr() as *mut u8; + let to_dealloc = NonNull::new_unchecked(ptr); + { + let _g = AllowGlobalAllocGuard::new(); + Global.deallocate(to_dealloc, layout); + } + Err(AllocError) + } else { + Ok(allocated) + } + } + } + + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + let _g = AllowGlobalAllocGuard::new(); + Global.deallocate(ptr, layout); + self.in_use + .as_ref() + .unwrap() + .fetch_sub(layout.size(), Ordering::SeqCst); + } +} + +/// A second watermark allocator. This is just to test cases where we need generic types +/// to interoperate, even when their allocator differs. E.g. `lhs: Vec == rhs: Vec`. +#[derive(Clone)] +pub struct WatermarkAllocator2(WatermarkAllocator); + +impl WatermarkAllocator2 { + pub fn new(watermark: usize) -> Self { + let inner = WatermarkAllocator::new(watermark); + Self(inner) + } +} + +impl Claim for WatermarkAllocator2 {} + +unsafe impl Allocator for WatermarkAllocator2 { + fn allocate(&self, layout: Layout) -> Result, AllocError> { + self.0.allocate(layout) + } + + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + self.0.deallocate(ptr, layout) + } +} diff --git a/src/vec.rs b/src/vec.rs index 7d86c58..802b1e3 100644 --- a/src/vec.rs +++ b/src/vec.rs @@ -146,6 +146,10 @@ impl Vec { } Ok(()) } + + pub(crate) fn into_inner(self) -> InnerVec { + self.inner + } } impl Vec { @@ -290,67 +294,15 @@ impl AsMut<[T]> for Vec { mod tests { use super::*; use crate::claim::Claim; - use alloc::alloc::Global; + use crate::testing::{AllowGlobalAllocGuard, NoGlobalAllocGuard}; + use crate::testing::{WatermarkAllocator, WatermarkAllocator2}; use alloc::boxed::Box; use alloc::collections::TryReserveError; - use alloc::sync::Arc; use alloc::{format, vec}; - use core::alloc::{AllocError, Layout}; - use core::ptr::NonNull; - use core::sync::atomic::{AtomicUsize, Ordering}; - - #[derive(Clone)] - struct WatermarkAllocator { - watermark: usize, - in_use: Arc, - } - - impl Claim for WatermarkAllocator {} - - impl WatermarkAllocator { - pub(crate) fn in_use(&self) -> usize { - self.in_use.load(Ordering::SeqCst) - } - } - - impl WatermarkAllocator { - fn new(watermark: usize) -> Self { - Self { - watermark, - in_use: AtomicUsize::new(0).into(), - } - } - } - - unsafe impl Allocator for WatermarkAllocator { - fn allocate(&self, layout: Layout) -> Result, AllocError> { - let current_in_use = self.in_use.load(Ordering::SeqCst); - let new_in_use = current_in_use + layout.size(); - if new_in_use > self.watermark { - return Err(AllocError); - } - let allocated = Global.allocate(layout)?; - let true_new_in_use = self.in_use.fetch_add(allocated.len(), Ordering::SeqCst); - unsafe { - if true_new_in_use > self.watermark { - let ptr = allocated.as_ptr() as *mut u8; - let to_dealloc = NonNull::new_unchecked(ptr); - Global.deallocate(to_dealloc, layout); - Err(AllocError) - } else { - Ok(allocated) - } - } - } - - unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { - Global.deallocate(ptr, layout); - self.in_use.fetch_sub(layout.size(), Ordering::SeqCst); - } - } #[test] fn test_basics() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); let wma = WatermarkAllocator::new(32); let mut vec = Vec::new_in(wma.clone()); assert_eq!(vec.len(), 0); @@ -364,14 +316,8 @@ mod tests { vec.push(4).unwrap(); assert_eq!(vec.len(), 4); assert_eq!(vec.capacity(), 4); - assert_eq!( - wma.in_use.load(Ordering::SeqCst), - vec.capacity() * size_of::() - ); - assert_eq!( - vec.allocator().in_use.load(Ordering::SeqCst), - vec.capacity() * size_of::() - ); + assert_eq!(wma.in_use(), vec.capacity() * size_of::()); + assert_eq!(vec.allocator().in_use(), vec.capacity() * size_of::()); let _err: TryReserveError = vec.push(5).unwrap_err(); assert_eq!(vec.as_slice(), &[1, 2, 3, 4]); assert_eq!(vec.len(), 4); @@ -383,6 +329,7 @@ mod tests { #[test] fn test_with_capacity_in() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); let wma = WatermarkAllocator::new(32); let vec: Vec = Vec::with_capacity_in(4, wma.clone()).unwrap(); assert_eq!(vec.len(), 0); @@ -395,6 +342,7 @@ mod tests { #[test] fn test_reserve() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); let wma = WatermarkAllocator::new(32); let mut vec: Vec = Vec::new_in(wma); vec.reserve(32).unwrap(); @@ -405,17 +353,22 @@ mod tests { #[test] fn test_fmt_debug() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); let wma = WatermarkAllocator::new(32); let mut vec = Vec::new_in(wma); vec.push(1).unwrap(); vec.push(2).unwrap(); vec.push(3).unwrap(); vec.push(4).unwrap(); - assert_eq!(format!("{:?}", vec), "[1, 2, 3, 4]"); + { + let _allow_global_alloc = AllowGlobalAllocGuard::new(); + assert_eq!(format!("{:?}", vec), "[1, 2, 3, 4]"); + } } #[test] fn test_iter() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); let wma = WatermarkAllocator::new(32); let mut vec = Vec::new_in(wma); vec.push(1).unwrap(); @@ -432,6 +385,7 @@ mod tests { #[test] fn test_iter_mut() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); let wma = WatermarkAllocator::new(32); let mut vec = Vec::new_in(wma); vec.push(1).unwrap(); @@ -448,6 +402,7 @@ mod tests { #[test] fn test_as_ptr() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); let wma = WatermarkAllocator::new(32); let mut vec = Vec::new_in(wma.clone()); assert_eq!(wma.in_use(), 0); @@ -466,6 +421,7 @@ mod tests { #[test] fn test_as_mut_ptr() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); let wma = WatermarkAllocator::new(64); let mut vec = Vec::new_in(wma.clone()); assert_eq!(wma.in_use(), 0); @@ -488,6 +444,7 @@ mod tests { #[test] fn test_index() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); let wma = WatermarkAllocator::new(32); let mut vec = Vec::new_in(wma); vec.push(1).unwrap(); @@ -508,6 +465,7 @@ mod tests { #[test] fn test_extend_from_slice_clone() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); let wma = WatermarkAllocator::new(32); let mut vec = Vec::new_in(wma); vec.extend_from_slice(&[Claimable(1), Claimable(2), Claimable(3), Claimable(4)]) @@ -516,6 +474,7 @@ mod tests { #[test] fn test_extend_from_slice_copy() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); let wma = WatermarkAllocator::new(32); let mut vec = Vec::new_in(wma); vec.extend_from_slice(&[1, 2, 3, 4]).unwrap(); @@ -528,6 +487,7 @@ mod tests { #[test] fn test_deref() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); let wma = WatermarkAllocator::new(32); let mut vec = Vec::new_in(wma); vec.push(1).unwrap(); @@ -539,6 +499,7 @@ mod tests { #[test] fn test_deref_mut() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); let wma = WatermarkAllocator::new(32); let mut vec = Vec::new_in(wma); vec.push(1).unwrap(); @@ -590,6 +551,8 @@ mod tests { #[test] fn test_extend() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + // Test the optimised with mixed pre-reserved and dynamic allocation extend paths. let wma = WatermarkAllocator::new(32 * size_of::()); { @@ -627,6 +590,7 @@ mod tests { #[test] fn test_truncate() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); let wma = WatermarkAllocator::new(32); let mut vec = Vec::new_in(wma); vec.push(1).unwrap(); @@ -642,6 +606,7 @@ mod tests { #[test] fn test_extend_with() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); let wma = WatermarkAllocator::new(32); let mut vec = Vec::new_in(wma); vec.extend_with(3, 1).unwrap(); @@ -650,6 +615,7 @@ mod tests { #[test] fn test_resize() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); let wma = WatermarkAllocator::new(64); let mut vec = Vec::new_in(wma); vec.resize(3, 1).unwrap(); @@ -662,6 +628,7 @@ mod tests { #[test] fn test_resize_with() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); let wma = WatermarkAllocator::new(64); let mut vec = Vec::new_in(wma); vec.resize_with(3, || 1).unwrap(); @@ -693,15 +660,17 @@ mod tests { #[test] fn test_eq() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); let wma = WatermarkAllocator::new(64); + let wma2 = WatermarkAllocator2::new(1024); // __impl_slice_eq1! { [A1: Allocator, A2: Allocator] Vec, Vec } { let mut lhs = Vec::new_in(wma.clone()); - let mut rhs = Vec::new_in(Global); + let mut rhs = Vec::new_in(wma2); - lhs.extend(vec![1, 2, 3]).unwrap(); - rhs.extend(vec![w(1), w(2), w(3)]).unwrap(); + lhs.extend([1, 2, 3]).unwrap(); + rhs.extend([w(1), w(2), w(3)]).unwrap(); assert_eq!(lhs, rhs); assert_eq!(rhs, lhs); @@ -714,7 +683,7 @@ mod tests { // __impl_slice_eq1! { [A: Allocator] &[T], Vec } { let mut lhs = Vec::new_in(wma.clone()); - lhs.extend(vec![1, 2, 3]).unwrap(); + lhs.extend([1, 2, 3]).unwrap(); let rhs: &[IntWrapper] = &[w(1), w(2), w(3)]; assert_eq!(lhs, rhs); assert_eq!(rhs, lhs); @@ -728,9 +697,14 @@ mod tests { // __impl_slice_eq1! { [A: Allocator] &mut [T], Vec } { let mut lhs = Vec::new_in(wma.clone()); - lhs.extend(vec![1, 2, 3]).unwrap(); + lhs.extend([1, 2, 3]).unwrap(); - let mut rhs_vec = vec![w(1), w(2), w(3)]; + let mut rhs_vec = { + let _allow_global_alloc = AllowGlobalAllocGuard::new(); + let mut v = vec![w(1), w(2), w(3)]; + v.reserve(1); + v + }; let rhs: &mut [IntWrapper] = &mut rhs_vec; assert_eq!(lhs, rhs); @@ -740,28 +714,44 @@ mod tests { let rhs2: &mut [IntWrapper] = &mut rhs_vec; assert_ne!(lhs, rhs2); assert_ne!(rhs2, lhs); + + { + let _allow_global_alloc = AllowGlobalAllocGuard::new(); + drop(rhs_vec) + } } // __impl_slice_eq1! { [A: Allocator] Vec, [U] } // __impl_slice_eq1! { [A: Allocator] [T], Vec } { let mut lhs = Vec::new_in(wma.clone()); - lhs.extend(vec![1, 2, 3]).unwrap(); - - let rhs: Box<[IntWrapper]> = Box::new([w(1), w(2), w(3)]); + lhs.extend([1, 2, 3]).unwrap(); + + // We explicitly elide the `len` part here by using a box. + let (rhs, rhs2) = { + let _allow_global_alloc = AllowGlobalAllocGuard::new(); + let rhs: Box<[IntWrapper]> = Box::new([w(1), w(2), w(3)]); + let rhs2: Box<[IntWrapper]> = Box::new([w(1), w(2), w(3), w(4)]); + (rhs, rhs2) + }; assert_eq!(lhs, *rhs); assert_eq!(*rhs, lhs); - let rhs2: Box<[IntWrapper]> = Box::new([w(1), w(2), w(3), w(4)]); assert_ne!(lhs, *rhs2); assert_ne!(*rhs2, lhs); + + { + let _allow_global_alloc = AllowGlobalAllocGuard::new(); + drop(rhs); + drop(rhs2); + } } // __impl_slice_eq1! { [A: Allocator, const N: usize] Vec, [U; N] } // __impl_slice_eq1! { [A: Allocator, const N: usize] [T; N], Vec } { let mut lhs = Vec::new_in(wma.clone()); - lhs.extend(vec![1, 2, 3]).unwrap(); + lhs.extend([1, 2, 3]).unwrap(); let rhs: [IntWrapper; 3] = [w(1), w(2), w(3)]; assert_eq!(lhs, rhs); // Compare Vec with fixed-size array @@ -776,7 +766,7 @@ mod tests { // __impl_slice_eq1! { [A: Allocator, const N: usize] &[T; N], Vec } { let mut lhs = Vec::new_in(wma.clone()); - lhs.extend(vec![1, 2, 3]).unwrap(); + lhs.extend([1, 2, 3]).unwrap(); let rhs_arr: [IntWrapper; 3] = [w(1), w(2), w(3)]; let rhs: &[IntWrapper; 3] = &rhs_arr; @@ -801,9 +791,10 @@ mod tests { #[test] fn test_as_ref() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); let wma = WatermarkAllocator::new(128); let mut vec1 = Vec::new_in(wma); - vec1.extend(vec![1, 2, 3]).unwrap(); + vec1.extend([1, 2, 3]).unwrap(); let vec2 = vec1.try_clone().unwrap(); assert_eq!(vec1, vec2); @@ -827,9 +818,10 @@ mod tests { #[test] fn test_as_mut() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); let wma = WatermarkAllocator::new(128); let mut vec1 = Vec::new_in(wma); - vec1.extend(vec![1, 2, 3]).unwrap(); + vec1.extend([1, 2, 3]).unwrap(); let vec2 = vec1.try_clone().unwrap(); assert_eq!(vec1, vec2); @@ -842,6 +834,7 @@ mod tests { #[test] fn test_try_clone() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); let wma = WatermarkAllocator::new(64); let mut vec1 = Vec::new_in(wma.clone()); vec1.extend([1usize, 2, 3, 4, 5, 6, 7, 8]).unwrap(); diff --git a/src/vec_deque.rs b/src/vec_deque.rs new file mode 100644 index 0000000..22c16f6 --- /dev/null +++ b/src/vec_deque.rs @@ -0,0 +1,673 @@ +use crate::claim::Claim; +use crate::try_clone::TryClone; +use alloc::collections::vec_deque::{Drain, VecDeque as InnerVecDeque}; +use alloc::collections::vec_deque::{Iter, IterMut}; +use alloc::collections::TryReserveError; +use core::alloc::Allocator; +use core::ops::RangeBounds; + +pub struct VecDeque { + inner: InnerVecDeque, +} + +impl VecDeque { + #[inline] + pub fn new_in(alloc: A) -> Self { + Self { + inner: InnerVecDeque::new_in(alloc), + } + } + + #[inline] + pub fn with_capacity_in(capacity: usize, alloc: A) -> Result { + Ok(crate::vec::Vec::with_capacity_in(capacity, alloc)?.into()) + } + + #[inline] + pub fn get(&self, index: usize) -> Option<&T> { + self.inner.get(index) + } + + #[inline] + pub fn get_mut(&mut self, index: usize) -> Option<&mut T> { + self.inner.get_mut(index) + } + + #[inline] + pub fn capacity(&self) -> usize { + self.inner.capacity() + } + + #[inline] + pub fn allocator(&self) -> &A { + self.inner.allocator() + } + + #[inline] + pub fn iter(&self) -> Iter<'_, T> { + self.inner.iter() + } + + #[inline] + pub fn iter_mut(&mut self) -> IterMut<'_, T> { + self.inner.iter_mut() + } + + #[inline] + pub fn len(&self) -> usize { + self.inner.len() + } + + #[inline] + pub fn is_empty(&self) -> bool { + self.inner.is_empty() + } + + #[inline] + pub fn range(&self, range: R) -> Iter<'_, T> + where + R: RangeBounds, + { + self.inner.range(range) + } + + #[inline] + pub fn range_mut(&mut self, range: R) -> IterMut<'_, T> + where + R: RangeBounds, + { + self.inner.range_mut(range) + } + + #[inline] + pub fn reserve(&mut self, additional: usize) -> Result<(), TryReserveError> { + self.inner.try_reserve(additional) + } + + #[inline] + pub fn drain(&mut self, range: R) -> Drain<'_, T, A> + where + R: RangeBounds, + { + self.inner.drain(range) + } + + #[inline] + pub fn clear(&mut self) { + self.inner.clear() + } + + #[inline] + pub fn contains(&self, x: &T) -> bool + where + T: PartialEq, + { + self.inner.contains(x) + } + + #[inline] + pub fn front(&self) -> Option<&T> { + self.inner.front() + } + + #[inline] + pub fn front_mut(&mut self) -> Option<&mut T> { + self.inner.front_mut() + } + + #[inline] + pub fn back(&self) -> Option<&T> { + self.inner.back() + } + + #[inline] + pub fn back_mut(&mut self) -> Option<&mut T> { + self.inner.back_mut() + } + + #[inline] + pub fn pop_front(&mut self) -> Option { + self.inner.pop_front() + } + + #[inline] + pub fn pop_back(&mut self) -> Option { + self.inner.pop_back() + } + + #[inline] + pub fn push_front(&mut self, item: T) -> Result<(), TryReserveError> { + self.reserve(1)?; + self.inner.push_front(item); + Ok(()) + } + + #[inline] + pub fn push_back(&mut self, item: T) -> Result<(), TryReserveError> { + self.reserve(1)?; + self.inner.push_back(item); + Ok(()) + } + + #[inline] + pub fn insert(&mut self, index: usize, item: T) -> Result<(), TryReserveError> { + self.reserve(1)?; + self.inner.insert(index, item); + Ok(()) + } + + #[inline] + pub fn remove(&mut self, index: usize) -> Option { + self.inner.remove(index) + } + + #[inline] + pub fn append(&mut self, other: &mut Self) -> Result<(), TryReserveError> { + self.reserve(other.len())?; + self.inner.append(&mut other.inner); + Ok(()) + } + + #[inline] + pub fn make_contiguous(&mut self) -> &mut [T] { + self.inner.make_contiguous() + } +} + +impl TryClone for VecDeque { + type Error = TryReserveError; + + fn try_clone(&self) -> Result { + let mut cloned = Self::with_capacity_in(self.len(), self.allocator().clone())?; + cloned.inner.extend(self.iter().cloned()); + Ok(cloned) + } +} + +impl From> for VecDeque { + fn from(vec: crate::vec::Vec) -> Self { + let vec_inner = vec.into_inner(); + let inner = vec_inner.into(); + Self { inner } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::testing::{AllowGlobalAllocGuard, NoGlobalAllocGuard, WatermarkAllocator}; + use alloc::vec::Vec as InnerVec; + + #[test] + fn test_new_in() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + let wma = WatermarkAllocator::new(1024); + let deque: VecDeque = VecDeque::new_in(wma.clone()); + assert!(deque.is_empty()); + assert_eq!(deque.len(), 0); + assert_eq!(wma.in_use(), 0); + } + + #[test] + fn test_with_capacity_in_success() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + let wma = WatermarkAllocator::new(128); + let deque: Result, _> = VecDeque::with_capacity_in(10, wma.clone()); + assert!(deque.is_ok()); + assert_eq!(wma.in_use(), deque.unwrap().capacity() * size_of::()); + } + + #[test] + fn test_with_capacity_in_failure() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + let wma = WatermarkAllocator::new(4); // Set a low watermark to trigger failure + let deque = VecDeque::::with_capacity_in(10, wma.clone()); + assert!(deque.is_err()); + assert_eq!(wma.in_use(), 0); + } + + #[test] + fn test_push_front_back() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + let wma = WatermarkAllocator::new(128); + let mut deque = VecDeque::new_in(wma.clone()); + + // Push elements to the front and back + assert!(deque.push_back(1).is_ok()); + assert!(deque.push_front(2).is_ok()); + assert_eq!(deque.len(), 2); + assert_eq!(deque.front(), Some(&2)); + assert_eq!(deque.back(), Some(&1)); + } + + #[test] + fn test_push_front_back_allocation_failure() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + let wma = WatermarkAllocator::new(16); // Small watermark to limit allocations + let mut deque = VecDeque::with_capacity_in(1, wma.clone()).expect("should allocate"); + assert_eq!(deque.capacity(), 1); // overallocated by default. + + // Push first element should work + assert!(deque.push_back(1).is_ok()); + // Second push should fail due to allocation error + assert!(deque.push_back(2).is_err()); + } + + #[test] + fn test_insert_remove() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + let wma = WatermarkAllocator::new(128); + let mut deque = VecDeque::new_in(wma.clone()); + + // Insert elements + assert!(deque.push_back(1).is_ok()); + assert!(deque.push_back(3).is_ok()); + assert!(deque.insert(1, 2).is_ok()); + assert_eq!(deque.len(), 3); + + // Check order after insertion + assert_eq!(deque.get(0), Some(&1)); + assert_eq!(deque.get(1), Some(&2)); + assert_eq!(deque.get(2), Some(&3)); + + // Remove an element and check results + assert_eq!(deque.remove(1), Some(2)); + assert_eq!(deque.len(), 2); + assert_eq!(deque.get(1), Some(&3)); + } + + #[test] + fn test_insert_allocation_failure() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + let wma = WatermarkAllocator::new(16); // Limited allocation capacity + let mut deque = VecDeque::with_capacity_in(1, wma.clone()).expect("should allocate"); + + // First insert should succeed + assert!(deque.push_back(1).is_ok()); + // Second insert, due to allocation, should fail + assert!(deque.insert(1, 2).is_err()); + } + + #[test] + fn test_append() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + let wma = WatermarkAllocator::new(128); + let mut deque1 = VecDeque::new_in(wma.clone()); + let mut deque2 = VecDeque::new_in(wma.clone()); + + // Fill both deques + assert!(deque1.push_back(1).is_ok()); + assert!(deque1.push_back(2).is_ok()); + assert!(deque2.push_back(3).is_ok()); + + // Append deque2 into deque1 + assert!(deque1.append(&mut deque2).is_ok()); + assert_eq!(deque1.len(), 3); + assert!(deque2.is_empty()); + assert_eq!(deque1.get(2), Some(&3)); + } + + #[test] + fn test_append_allocation_failure() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + let wma = WatermarkAllocator::new(16); + let mut deque1 = VecDeque::with_capacity_in(1, wma.clone()).expect("should allocate"); + assert_eq!(deque1.capacity(), 1); + assert_eq!(wma.in_use(), deque1.capacity() * size_of::()); + assert_eq!(wma.in_use(), 4); + let mut deque2 = VecDeque::with_capacity_in(2, wma.clone()).expect("should allocate"); + assert_eq!(deque2.capacity(), 2); + assert_eq!( + wma.in_use(), + deque1.capacity() * size_of::() + deque2.capacity() * size_of::() + ); + assert_eq!(wma.in_use(), 12); + + // Push items into deque2 + assert!(deque2.push_back(1).is_ok()); + assert!(deque2.push_back(2).is_ok()); + + // Append should fail due to insufficient allocation capacity in deque1 + assert!(deque1.append(&mut deque2).is_err()); + assert!(!deque2.is_empty()); // deque2 should remain intact + } + + #[test] + fn test_try_clone() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + let wma = WatermarkAllocator::new(128); + let mut deque = VecDeque::new_in(wma.clone()); + deque.push_back(1).unwrap(); + deque.push_back(2).unwrap(); + + let cloned = deque.try_clone(); + assert!(cloned.is_ok()); + let cloned = cloned.unwrap(); + assert_eq!(cloned.len(), deque.len()); + assert_eq!(cloned.get(0), Some(&1)); + assert_eq!(cloned.get(1), Some(&2)); + } + + #[test] + fn test_try_clone_allocation_failure() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + let wma = WatermarkAllocator::new(16); // Low watermark for testing allocation failure + let mut deque = VecDeque::new_in(wma.clone()); + deque.push_back(1).unwrap(); + + // Cloning should fail due to allocation constraints + let cloned = deque.try_clone(); + assert!(cloned.is_err()); + } + + #[test] + fn test_get_mut() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + let wma = WatermarkAllocator::new(128); + let mut deque = VecDeque::new_in(wma.clone()); + deque.push_back(1).unwrap(); + deque.push_back(2).unwrap(); + + if let Some(value) = deque.get_mut(1) { + *value = 3; + } + assert_eq!(deque.get(1), Some(&3)); + } + + #[test] + fn test_iter() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + let wma = WatermarkAllocator::new(128); + let mut deque = VecDeque::new_in(wma.clone()); + deque.push_back(1).unwrap(); + deque.push_back(2).unwrap(); + deque.push_back(3).unwrap(); + + let mut values = { + let _allow_global_alloc_guard = AllowGlobalAllocGuard::new(); + InnerVec::with_capacity(deque.len()) + }; + values.extend(deque.iter().cloned()); + assert_eq!(values, [1, 2, 3]); + + { + let _allow_global_alloc_guard = AllowGlobalAllocGuard::new(); + drop(values); + } + } + + #[test] + fn test_iter_mut() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + let wma = WatermarkAllocator::new(128); + let mut deque = VecDeque::new_in(wma.clone()); + deque.push_back(1).unwrap(); + deque.push_back(2).unwrap(); + deque.push_back(3).unwrap(); + + for value in deque.iter_mut() { + *value *= 2; + } + + let mut values = { + let _allow_global_alloc_guard = AllowGlobalAllocGuard::new(); + InnerVec::with_capacity(deque.len()) + }; + values.extend(deque.iter().cloned()); + assert_eq!(values, [2, 4, 6]); + { + let _allow_global_alloc_guard = AllowGlobalAllocGuard::new(); + drop(values); + } + } + + #[test] + fn test_range() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + let wma = WatermarkAllocator::new(128); + let mut deque = VecDeque::new_in(wma.clone()); + deque.push_back(10).unwrap(); + deque.push_back(20).unwrap(); + deque.push_back(30).unwrap(); + deque.push_back(40).unwrap(); + + let mut values = { + let _allow_global_alloc_guard = AllowGlobalAllocGuard::new(); + InnerVec::with_capacity(deque.len()) + }; + values.extend(deque.range(1..3).cloned()); + assert_eq!(values, [20, 30]); + { + let _allow_global_alloc_guard = AllowGlobalAllocGuard::new(); + drop(values); + } + } + + #[test] + fn test_range_mut() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + let wma = WatermarkAllocator::new(128); + let mut deque = VecDeque::new_in(wma.clone()); + deque.push_back(5).unwrap(); + deque.push_back(10).unwrap(); + deque.push_back(15).unwrap(); + + for value in deque.range_mut(1..3) { + *value += 10; + } + + let mut values = { + let _allow_global_alloc_guard = AllowGlobalAllocGuard::new(); + InnerVec::with_capacity(deque.len()) + }; + values.extend(deque.iter().cloned()); + assert_eq!(values, [5, 20, 25]); + { + let _allow_global_alloc_guard = AllowGlobalAllocGuard::new(); + drop(values); + } + } + + #[test] + fn test_drain() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + let wma = WatermarkAllocator::new(128); + let mut deque = VecDeque::new_in(wma.clone()); + deque.push_back(1).unwrap(); + deque.push_back(2).unwrap(); + deque.push_back(3).unwrap(); + deque.push_back(4).unwrap(); + + let mut drained = { + let _allow_alloc_guard = AllowGlobalAllocGuard::new(); + InnerVec::with_capacity(deque.len()) + }; + + drained.extend(deque.drain(1..3)); + assert_eq!(drained, [2, 3]); + assert_eq!(deque.len(), 2); + assert_eq!(deque.get(1), Some(&4)); + + { + let _allow_alloc_guard = AllowGlobalAllocGuard::new(); + drop(drained); + } + } + + #[test] + fn test_clear() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + let wma = WatermarkAllocator::new(128); + let mut deque = VecDeque::new_in(wma.clone()); + deque.push_back(1).unwrap(); + deque.push_back(2).unwrap(); + + deque.clear(); + assert!(deque.is_empty()); + assert_eq!(deque.len(), 0); + } + + #[test] + fn test_contains() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + let wma = WatermarkAllocator::new(128); + let mut deque = VecDeque::new_in(wma.clone()); + deque.push_back(42).unwrap(); + deque.push_back(99).unwrap(); + + assert!(deque.contains(&42)); + assert!(!deque.contains(&1)); + } + + #[test] + fn test_front_mut() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + let wma = WatermarkAllocator::new(128); + let mut deque = VecDeque::new_in(wma.clone()); + deque.push_back(5).unwrap(); + deque.push_back(10).unwrap(); + + if let Some(value) = deque.front_mut() { + *value = 7; + } + assert_eq!(deque.front(), Some(&7)); + } + + #[test] + fn test_back_mut() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + let wma = WatermarkAllocator::new(128); + let mut deque = VecDeque::new_in(wma.clone()); + deque.push_back(5).unwrap(); + deque.push_back(10).unwrap(); + + if let Some(value) = deque.back_mut() { + *value = 15; + } + assert_eq!(deque.back(), Some(&15)); + } + + #[test] + fn test_pop_front() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + let wma = WatermarkAllocator::new(128); + let mut deque = VecDeque::new_in(wma.clone()); + deque.push_back(1).unwrap(); + deque.push_back(2).unwrap(); + + assert_eq!(deque.pop_front(), Some(1)); + assert_eq!(deque.pop_front(), Some(2)); + assert!(deque.is_empty()); + } + + #[test] + fn test_pop_back() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + let wma = WatermarkAllocator::new(128); + let mut deque = VecDeque::new_in(wma.clone()); + deque.push_back(3).unwrap(); + deque.push_back(4).unwrap(); + + assert_eq!(deque.pop_back(), Some(4)); + assert_eq!(deque.pop_back(), Some(3)); + assert!(deque.is_empty()); + } + + #[test] + fn test_make_contiguous() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + let wma = WatermarkAllocator::new(128); + let mut deque = VecDeque::new_in(wma.clone()); + + // Alternate between front and back pushes to create a discontinuous buffer. + deque.push_back(1).unwrap(); + deque.push_front(2).unwrap(); + deque.push_back(3).unwrap(); + deque.push_front(4).unwrap(); + deque.push_back(5).unwrap(); + + // Calling make_contiguous should arrange elements in a contiguous slice. + let slice = deque.make_contiguous(); + + // Verify the order matches the intended sequence as if the buffer were continuous. + assert_eq!(slice, &[4, 2, 1, 3, 5]); + } + + #[test] + fn test_try_clone_success() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + let wma = WatermarkAllocator::new(128); + let mut deque = VecDeque::new_in(wma.clone()); + + // Populate the deque with some elements. + deque.push_back(1).unwrap(); + deque.push_back(2).unwrap(); + deque.push_back(3).unwrap(); + + // Attempt to clone the deque. + let cloned = deque.try_clone(); + + // Verify the clone was successful and matches the original. + assert!(cloned.is_ok()); + let cloned = cloned.unwrap(); + assert_eq!(cloned.len(), deque.len()); + { + let _allow_alloc_guard = AllowGlobalAllocGuard::new(); + assert_eq!( + cloned.iter().collect::>(), + deque.iter().collect::>() + ); + } + } + + #[test] + fn test_try_clone_failure() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + // Set a low watermark to trigger allocation failure during cloning. + let wma = WatermarkAllocator::new(16); // Low watermark for small allocations. + let mut deque = VecDeque::new_in(wma.clone()); + + // Fill deque so it requires more allocation on cloning. + deque.push_back(1).unwrap(); + deque.push_back(2).unwrap(); + deque.push_back(3).unwrap(); + deque.push_back(4).unwrap(); + + // Attempt to clone the deque. Expect an error due to allocation limit. + let cloned = deque.try_clone(); + assert!(cloned.is_err()); + } + + #[test] + fn test_try_clone_from_success() { + let _no_global_alloc_guard = NoGlobalAllocGuard::new(); + let wma = WatermarkAllocator::new(128); + let mut original = VecDeque::new_in(wma.clone()); + + // Populate the original deque with some elements. + original.push_back(1).unwrap(); + original.push_back(2).unwrap(); + original.push_back(3).unwrap(); + + // Create a target deque with different contents to clone into. + let mut target = VecDeque::new_in(wma.clone()); + target.push_back(10).unwrap(); + target.push_back(20).unwrap(); + + // Use try_clone_from to clone from the original deque into the target. + let result = target.try_clone_from(&original); + + // Verify that the clone was successful. + assert!(result.is_ok()); + + // Check that the target now matches the original. + assert_eq!(target.len(), original.len()); + { + let _allow_global_alloc_guard = AllowGlobalAllocGuard::new(); + assert_eq!( + target.iter().collect::>(), + original.iter().collect::>() + ); + } + } +}