#[cfg(not(feature = "std"))]
use alloc::vec::Vec;
#[cfg(feature = "std")]
mod rstd {
	pub use std::*;
}
#[cfg(not(feature = "std"))]
mod rstd {
	pub use core::*;
	pub mod collections {
		pub use alloc::collections::*;
		pub use vec_deque::VecDeque;
	}
}
#[cfg(feature = "std")]
use std::sync::Arc;
#[cfg(not(feature = "std"))]
pub use alloc::boxed::Box;
#[cfg(not(feature = "std"))]
use core::ffi::c_void;
#[cfg(feature = "std")]
use rstd::hash::Hash;
use rstd::marker::PhantomData;
use rstd::mem::size_of;
use rstd::ops::Range;
use rstd::ops::{Deref, DerefMut};
#[cfg(feature = "std")]
use std::hash::BuildHasher;
#[cfg(feature = "std")]
use std::os::raw::c_void;
pub type VoidPtrToSizeFn = unsafe extern "C" fn(ptr: *const c_void) -> usize;
pub type VoidPtrToBoolFnMut = dyn FnMut(*const c_void) -> bool;
pub struct MallocSizeOfOps {
	
	size_of_op: VoidPtrToSizeFn,
	
	
	
	
	enclosing_size_of_op: Option<VoidPtrToSizeFn>,
	
	
	
	have_seen_ptr_op: Option<Box<VoidPtrToBoolFnMut>>,
}
impl MallocSizeOfOps {
	pub fn new(
		size_of: VoidPtrToSizeFn,
		malloc_enclosing_size_of: Option<VoidPtrToSizeFn>,
		have_seen_ptr: Option<Box<VoidPtrToBoolFnMut>>,
	) -> Self {
		MallocSizeOfOps {
			size_of_op: size_of,
			enclosing_size_of_op: malloc_enclosing_size_of,
			have_seen_ptr_op: have_seen_ptr,
		}
	}
	
	
	fn is_empty<T: ?Sized>(ptr: *const T) -> bool {
		
		
		
		
		
		
		
		return ptr as *const usize as usize <= 256;
	}
	
	
	pub unsafe fn malloc_size_of<T: ?Sized>(&self, ptr: *const T) -> usize {
		if MallocSizeOfOps::is_empty(ptr) {
			0
		} else {
			(self.size_of_op)(ptr as *const c_void)
		}
	}
	
	pub fn has_malloc_enclosing_size_of(&self) -> bool {
		self.enclosing_size_of_op.is_some()
	}
	
	
	pub unsafe fn malloc_enclosing_size_of<T>(&self, ptr: *const T) -> usize {
		assert!(!MallocSizeOfOps::is_empty(ptr));
		(self.enclosing_size_of_op.unwrap())(ptr as *const c_void)
	}
	
	pub fn have_seen_ptr<T>(&mut self, ptr: *const T) -> bool {
		let have_seen_ptr_op = self.have_seen_ptr_op.as_mut().expect("missing have_seen_ptr_op");
		have_seen_ptr_op(ptr as *const c_void)
	}
}
pub trait MallocSizeOf {
	
	
	
	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
	
	
	
	fn constant_size() -> Option<usize>
	where
		Self: Sized,
	{
		None
	}
}
pub trait MallocShallowSizeOf {
	
	
	
	
	fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
}
pub trait MallocUnconditionalSizeOf {
	
	
	fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
}
pub trait MallocUnconditionalShallowSizeOf {
	
	fn unconditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
}
pub trait MallocConditionalSizeOf {
	
	
	
	fn conditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
}
pub trait MallocConditionalShallowSizeOf {
	
	fn conditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
}
#[cfg(not(any(all(target_os = "macos", not(feature = "jemalloc-global"),), feature = "estimate-heapsize")))]
pub mod inner_allocator_use {
	use super::*;
	#[cfg(not(feature = "std"))]
	use alloc::string::String;
	impl<T: ?Sized> MallocShallowSizeOf for Box<T> {
		fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
			unsafe { ops.malloc_size_of(&**self) }
		}
	}
	impl<T> MallocShallowSizeOf for Vec<T> {
		fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
			unsafe { ops.malloc_size_of(self.as_ptr()) }
		}
	}
	
	#[cfg(feature = "std")]
	#[cfg(all(feature = "jemalloc-global", not(target_os = "windows")))]
	impl<T> MallocUnconditionalShallowSizeOf for Arc<T> {
		fn unconditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
			unsafe { ops.malloc_size_of(arc_ptr(self)) }
		}
	}
	#[cfg(feature = "std")]
	#[cfg(not(all(feature = "jemalloc-global", not(target_os = "windows"))))]
	impl<T> MallocUnconditionalShallowSizeOf for Arc<T> {
		fn unconditional_shallow_size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
			size_of::<T>()
		}
	}
	impl MallocSizeOf for String {
		fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
			unsafe { ops.malloc_size_of(self.as_ptr()) }
		}
	}
}
impl<'a, T: ?Sized> MallocSizeOf for &'a T {
	fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
		
		0
	}
	fn constant_size() -> Option<usize> {
		Some(0)
	}
}
impl<T: MallocSizeOf + ?Sized> MallocSizeOf for Box<T> {
	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		self.shallow_size_of(ops) + (**self).size_of(ops)
	}
}
#[impl_trait_for_tuples::impl_for_tuples(12)]
impl MallocSizeOf for Tuple {
	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		let mut result = 0;
		for_tuples!( #( result += Tuple.size_of(ops); )* );
		result
	}
	fn constant_size() -> Option<usize> {
		let mut result = Some(0);
		for_tuples!( #( result = result.and_then(|s| Tuple::constant_size().map(|t| s + t)); )* );
		result
	}
}
impl<T: MallocSizeOf> MallocSizeOf for Option<T> {
	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		if let Some(val) = self.as_ref() {
			val.size_of(ops)
		} else {
			0
		}
	}
	fn constant_size() -> Option<usize> {
		T::constant_size().filter(|s| *s == 0)
	}
}
impl<T: MallocSizeOf, E: MallocSizeOf> MallocSizeOf for Result<T, E> {
	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		match *self {
			Ok(ref x) => x.size_of(ops),
			Err(ref e) => e.size_of(ops),
		}
	}
	fn constant_size() -> Option<usize> {
		
		T::constant_size().and_then(|t| E::constant_size().filter(|e| *e == t))
	}
}
impl<T: MallocSizeOf + Copy> MallocSizeOf for rstd::cell::Cell<T> {
	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		self.get().size_of(ops)
	}
	fn constant_size() -> Option<usize> {
		T::constant_size()
	}
}
impl<T: MallocSizeOf> MallocSizeOf for rstd::cell::RefCell<T> {
	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		self.borrow().size_of(ops)
	}
	fn constant_size() -> Option<usize> {
		T::constant_size()
	}
}
#[cfg(feature = "std")]
impl<'a, B: ?Sized + ToOwned> MallocSizeOf for std::borrow::Cow<'a, B>
where
	B::Owned: MallocSizeOf,
{
	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		match *self {
			std::borrow::Cow::Borrowed(_) => 0,
			std::borrow::Cow::Owned(ref b) => b.size_of(ops),
		}
	}
}
impl<T: MallocSizeOf> MallocSizeOf for [T] {
	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		let mut n = 0;
		if let Some(t) = T::constant_size() {
			n += self.len() * t;
		} else {
			n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
		}
		n
	}
}
impl<T: MallocSizeOf> MallocSizeOf for Vec<T> {
	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		let mut n = self.shallow_size_of(ops);
		if let Some(t) = T::constant_size() {
			n += self.len() * t;
		} else {
			n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
		}
		n
	}
}
impl<T> MallocShallowSizeOf for rstd::collections::VecDeque<T> {
	fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		if ops.has_malloc_enclosing_size_of() {
			if let Some(front) = self.front() {
				
				unsafe { ops.malloc_enclosing_size_of(&*front) }
			} else {
				
				0
			}
		} else {
			
			self.capacity() * size_of::<T>()
		}
	}
}
impl<T: MallocSizeOf> MallocSizeOf for rstd::collections::VecDeque<T> {
	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		let mut n = self.shallow_size_of(ops);
		if let Some(t) = T::constant_size() {
			n += self.len() * t;
		} else {
			n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
		}
		n
	}
}
#[cfg(feature = "std")]
impl<T, S> MallocShallowSizeOf for std::collections::HashSet<T, S>
where
	T: Eq + Hash,
	S: BuildHasher,
{
	fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		if ops.has_malloc_enclosing_size_of() {
			
			
			
			
			self.iter().next().map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
		} else {
			
			self.capacity() * (size_of::<T>() + size_of::<usize>())
		}
	}
}
#[cfg(feature = "std")]
impl<T, S> MallocSizeOf for std::collections::HashSet<T, S>
where
	T: Eq + Hash + MallocSizeOf,
	S: BuildHasher,
{
	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		let mut n = self.shallow_size_of(ops);
		if let Some(t) = T::constant_size() {
			n += self.len() * t;
		} else {
			n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
		}
		n
	}
}
impl<I: MallocSizeOf> MallocSizeOf for rstd::cmp::Reverse<I> {
	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		self.0.size_of(ops)
	}
	fn constant_size() -> Option<usize> {
		I::constant_size()
	}
}
#[cfg(feature = "std")]
impl<K, V, S> MallocShallowSizeOf for std::collections::HashMap<K, V, S> {
	fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		
		if ops.has_malloc_enclosing_size_of() {
			self.values().next().map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
		} else {
			self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
		}
	}
}
#[cfg(feature = "std")]
impl<K, V, S> MallocSizeOf for std::collections::HashMap<K, V, S>
where
	K: MallocSizeOf,
	V: MallocSizeOf,
{
	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		let mut n = self.shallow_size_of(ops);
		if let (Some(k), Some(v)) = (K::constant_size(), V::constant_size()) {
			n += self.len() * (k + v)
		} else {
			n = self.iter().fold(n, |acc, (k, v)| acc + k.size_of(ops) + v.size_of(ops))
		}
		n
	}
}
impl<K, V> MallocShallowSizeOf for rstd::collections::BTreeMap<K, V> {
	fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		if ops.has_malloc_enclosing_size_of() {
			self.values().next().map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
		} else {
			self.len() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
		}
	}
}
impl<K, V> MallocSizeOf for rstd::collections::BTreeMap<K, V>
where
	K: MallocSizeOf,
	V: MallocSizeOf,
{
	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		let mut n = self.shallow_size_of(ops);
		if let (Some(k), Some(v)) = (K::constant_size(), V::constant_size()) {
			n += self.len() * (k + v)
		} else {
			n = self.iter().fold(n, |acc, (k, v)| acc + k.size_of(ops) + v.size_of(ops))
		}
		n
	}
}
impl<T> MallocShallowSizeOf for rstd::collections::BTreeSet<T> {
	fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		if ops.has_malloc_enclosing_size_of() {
			
			self.iter().next().map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
		} else {
			
			self.len() * (size_of::<T>() + size_of::<usize>())
		}
	}
}
impl<T> MallocSizeOf for rstd::collections::BTreeSet<T>
where
	T: MallocSizeOf,
{
	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		let mut n = self.shallow_size_of(ops);
		if let Some(t) = T::constant_size() {
			n += self.len() * t;
		} else {
			n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
		}
		n
	}
}
#[cfg(feature = "std")]
fn arc_ptr<T>(s: &Arc<T>) -> *const T {
	&(**s) as *const T
}
#[cfg(feature = "std")]
impl<T: MallocSizeOf> MallocUnconditionalSizeOf for Arc<T> {
	fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		self.unconditional_shallow_size_of(ops) + (**self).size_of(ops)
	}
}
#[cfg(feature = "std")]
impl<T> MallocConditionalShallowSizeOf for Arc<T> {
	fn conditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		if ops.have_seen_ptr(arc_ptr(self)) {
			0
		} else {
			self.unconditional_shallow_size_of(ops)
		}
	}
}
#[cfg(feature = "std")]
impl<T: MallocSizeOf> MallocConditionalSizeOf for Arc<T> {
	fn conditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		if ops.have_seen_ptr(arc_ptr(self)) {
			0
		} else {
			self.unconditional_size_of(ops)
		}
	}
}
#[cfg(feature = "std")]
impl<T: MallocSizeOf> MallocSizeOf for std::sync::Mutex<T> {
	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		self.lock().unwrap().size_of(ops)
	}
}
#[cfg(feature = "std")]
impl<T: MallocSizeOf> MallocSizeOf for parking_lot::Mutex<T> {
	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		self.lock().size_of(ops)
	}
}
#[cfg(feature = "std")]
impl<T: MallocSizeOf> MallocSizeOf for std::sync::RwLock<T> {
	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		self.read().unwrap().size_of(ops)
	}
}
#[cfg(feature = "std")]
impl<T: MallocSizeOf> MallocSizeOf for parking_lot::RwLock<T> {
	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		self.read().size_of(ops)
	}
}
#[macro_export]
macro_rules! malloc_size_of_is_0(
	($($ty:ty),+) => (
		$(
			impl $crate::MallocSizeOf for $ty {
				#[inline(always)]
				fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
					0
				}
				#[inline(always)]
				fn constant_size() -> Option<usize> { Some(0) }
			}
		)+
	);
	(any: $($ty:ident<$($gen:ident),+>),+) => (
		$(
			impl<$($gen),+> $crate::MallocSizeOf for $ty<$($gen),+> {
				#[inline(always)]
				fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
					0
				}
				#[inline(always)]
				fn constant_size() -> Option<usize> { Some(0) }
			}
		)+
	);
	($($ty:ident<$($gen:ident),+>),+) => (
		$(
			impl<$($gen: $crate::MallocSizeOf),+> $crate::MallocSizeOf for $ty<$($gen),+> {
				#[inline(always)]
				fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
					0
				}
				#[inline(always)]
				fn constant_size() -> Option<usize> { Some(0) }
			}
		)+
	);
);
malloc_size_of_is_0!(bool, char, str);
malloc_size_of_is_0!(u8, u16, u32, u64, u128, usize);
malloc_size_of_is_0!(i8, i16, i32, i64, i128, isize);
malloc_size_of_is_0!(f32, f64);
malloc_size_of_is_0!(rstd::sync::atomic::AtomicBool);
malloc_size_of_is_0!(rstd::sync::atomic::AtomicIsize);
malloc_size_of_is_0!(rstd::sync::atomic::AtomicUsize);
malloc_size_of_is_0!(Range<u8>, Range<u16>, Range<u32>, Range<u64>, Range<usize>);
malloc_size_of_is_0!(Range<i8>, Range<i16>, Range<i32>, Range<i64>, Range<isize>);
malloc_size_of_is_0!(Range<f32>, Range<f64>);
malloc_size_of_is_0!(any: PhantomData<T>);
#[derive(Clone)]
pub struct Measurable<T: MallocSizeOf>(pub T);
impl<T: MallocSizeOf> Deref for Measurable<T> {
	type Target = T;
	fn deref(&self) -> &T {
		&self.0
	}
}
impl<T: MallocSizeOf> DerefMut for Measurable<T> {
	fn deref_mut(&mut self) -> &mut T {
		&mut self.0
	}
}
#[cfg(feature = "hashbrown")]
impl<K, V, S> MallocShallowSizeOf for hashbrown::HashMap<K, V, S> {
	fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		
		if ops.has_malloc_enclosing_size_of() {
			self.values().next().map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
		} else {
			self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
		}
	}
}
#[cfg(feature = "hashbrown")]
impl<K, V, S> MallocSizeOf for hashbrown::HashMap<K, V, S>
where
	K: MallocSizeOf,
	V: MallocSizeOf,
{
	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		let mut n = self.shallow_size_of(ops);
		if let (Some(k), Some(v)) = (K::constant_size(), V::constant_size()) {
			n += self.len() * (k + v)
		} else {
			n = self.iter().fold(n, |acc, (k, v)| acc + k.size_of(ops) + v.size_of(ops))
		}
		n
	}
}
#[cfg(feature = "lru")]
impl<K, V, S> MallocSizeOf for lru::LruCache<K, V, S>
where
	K: MallocSizeOf + rstd::cmp::Eq + rstd::hash::Hash,
	V: MallocSizeOf,
	S: rstd::hash::BuildHasher,
{
	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
		let mut n = 0;
		if let (Some(k), Some(v)) = (K::constant_size(), V::constant_size()) {
			n += self.len() * (k + v)
		} else {
			n = self.iter().fold(n, |acc, (k, v)| acc + k.size_of(ops) + v.size_of(ops))
		}
		n
	}
}
malloc_size_of_is_0!(
	[u8; 1], [u8; 2], [u8; 3], [u8; 4], [u8; 5], [u8; 6], [u8; 7], [u8; 8], [u8; 9], [u8; 10], [u8; 11], [u8; 12],
	[u8; 13], [u8; 14], [u8; 15], [u8; 16], [u8; 17], [u8; 18], [u8; 19], [u8; 20], [u8; 21], [u8; 22], [u8; 23],
	[u8; 24], [u8; 25], [u8; 26], [u8; 27], [u8; 28], [u8; 29], [u8; 30], [u8; 31], [u8; 32]
);
macro_rules! impl_smallvec {
	($size: expr) => {
		#[cfg(feature = "smallvec")]
		impl<T> MallocSizeOf for smallvec::SmallVec<[T; $size]>
		where
			T: MallocSizeOf,
		{
			fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
				let mut n = if self.spilled() { self.capacity() * core::mem::size_of::<T>() } else { 0 };
				if let Some(t) = T::constant_size() {
					n += self.len() * t;
				} else {
					n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
				}
				n
			}
		}
	};
}
impl_smallvec!(32); 
impl_smallvec!(36); 
#[cfg(feature = "std")]
malloc_size_of_is_0!(std::time::Instant);
#[cfg(feature = "std")]
malloc_size_of_is_0!(std::time::Duration);
#[cfg(all(test, feature = "std"))] 
mod tests {
	use crate::{allocators::new_malloc_size_ops, MallocSizeOf, MallocSizeOfOps};
	use smallvec::SmallVec;
	use std::collections::BTreeSet;
	use std::mem;
	impl_smallvec!(3);
	#[test]
	fn test_smallvec_stack_allocated_type() {
		let mut v: SmallVec<[u8; 3]> = SmallVec::new();
		let mut ops = new_malloc_size_ops();
		assert_eq!(v.size_of(&mut ops), 0);
		v.push(1);
		v.push(2);
		v.push(3);
		assert_eq!(v.size_of(&mut ops), 0);
		assert!(!v.spilled());
		v.push(4);
		assert!(v.spilled(), "SmallVec spills when going beyond the capacity of the inner backing array");
		assert_eq!(v.size_of(&mut ops), 4); 
	}
	#[test]
	fn test_smallvec_boxed_stack_allocated_type() {
		let mut v: SmallVec<[Box<u8>; 3]> = SmallVec::new();
		let mut ops = new_malloc_size_ops();
		assert_eq!(v.size_of(&mut ops), 0);
		v.push(Box::new(1u8));
		v.push(Box::new(2u8));
		v.push(Box::new(3u8));
		assert!(v.size_of(&mut ops) >= 3);
		assert!(!v.spilled());
		v.push(Box::new(4u8));
		assert!(v.spilled(), "SmallVec spills when going beyond the capacity of the inner backing array");
		let mut ops = new_malloc_size_ops();
		let expected_min_allocs = mem::size_of::<Box<u8>>() * 4 + 4;
		assert!(v.size_of(&mut ops) >= expected_min_allocs);
	}
	#[test]
	fn test_smallvec_heap_allocated_type() {
		let mut v: SmallVec<[String; 3]> = SmallVec::new();
		let mut ops = new_malloc_size_ops();
		assert_eq!(v.size_of(&mut ops), 0);
		v.push("COW".into());
		v.push("PIG".into());
		v.push("DUCK".into());
		assert!(!v.spilled());
		assert!(v.size_of(&mut ops) >= "COW".len() + "PIG".len() + "DUCK".len());
		v.push("ÖWL".into());
		assert!(v.spilled());
		let mut ops = new_malloc_size_ops();
		let expected_min_allocs = mem::size_of::<String>() * 4 + "ÖWL".len() + "COW".len() + "PIG".len() + "DUCK".len();
		assert!(v.size_of(&mut ops) >= expected_min_allocs);
	}
	#[test]
	fn test_large_vec() {
		const N: usize = 128 * 1024 * 1024;
		let val = vec![1u8; N];
		let mut ops = new_malloc_size_ops();
		assert!(val.size_of(&mut ops) >= N);
		assert!(val.size_of(&mut ops) < 2 * N);
	}
	#[test]
	fn btree_set() {
		let mut set = BTreeSet::new();
		for t in 0..100 {
			set.insert(vec![t]);
		}
		
		assert!(crate::malloc_size(&set) > 3000);
	}
	#[test]
	fn special_malloc_size_of_0() {
		struct Data<P> {
			phantom: std::marker::PhantomData<P>,
		}
		malloc_size_of_is_0!(any: Data<P>);
		
		assert_eq!(crate::malloc_size(&Data::<[u8; 333]> { phantom: std::marker::PhantomData }), 0);
	}
	#[test]
	fn constant_size() {
		struct AlwaysTwo(Vec<u8>);
		impl MallocSizeOf for AlwaysTwo {
			fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
				self.0.size_of(ops)
			}
			fn constant_size() -> Option<usize> {
				Some(2)
			}
		}
		assert_eq!(AlwaysTwo::constant_size(), Some(2));
		assert_eq!(std::cmp::Reverse::<u8>::constant_size(), Some(0));
		assert_eq!(std::cell::RefCell::<u8>::constant_size(), Some(0));
		assert_eq!(std::cell::Cell::<u8>::constant_size(), Some(0));
		assert_eq!(Result::<(), ()>::constant_size(), Some(0));
		assert_eq!(<(AlwaysTwo, (), [u8; 32], AlwaysTwo)>::constant_size(), Some(2 + 2));
		assert_eq!(Option::<u8>::constant_size(), Some(0));
		assert_eq!(<&String>::constant_size(), Some(0));
		assert_eq!(<String>::constant_size(), None);
		assert_eq!(std::borrow::Cow::<String>::constant_size(), None);
		assert_eq!(Result::<(), String>::constant_size(), None);
		assert_eq!(Option::<AlwaysTwo>::constant_size(), None);
	}
}