From 44b4ce1d61206dcb5351644ceeac2490f60fc512 Mon Sep 17 00:00:00 2001 From: Scott McMurray Date: Thu, 22 Sep 2022 23:12:29 -0700 Subject: [PATCH 1/4] Make ZST checks in core/alloc more readable There's a bunch of these checks because of special handing for ZSTs in various unsafe implementations of stuff. This lets them be `T::IS_ZST` instead of `mem::size_of::() == 0` every time, making them both more readable and more terse. *Not* proposed for stabilization at this time. Would be `pub(crate)` except `alloc` wants to use it too. (And while it doesn't matter now, if we ever get something like 85836 making it a const can help codegen be simpler.) --- library/alloc/src/collections/vec_deque/mod.rs | 8 ++++---- library/alloc/src/lib.rs | 1 + library/alloc/src/raw_vec.rs | 12 ++++++------ library/alloc/src/slice.rs | 6 ++---- library/alloc/src/vec/drain.rs | 4 ++-- library/alloc/src/vec/in_place_collect.rs | 4 ++-- library/alloc/src/vec/into_iter.rs | 16 ++++++++-------- library/alloc/src/vec/mod.rs | 6 +++--- library/core/src/mem/mod.rs | 15 +++++++++++++++ library/core/src/slice/iter.rs | 6 +++--- library/core/src/slice/iter/macros.rs | 8 ++++---- library/core/src/slice/mod.rs | 10 +++++----- library/core/src/slice/rotate.rs | 4 ++-- library/core/src/slice/sort.rs | 6 +++--- 14 files changed, 60 insertions(+), 46 deletions(-) diff --git a/library/alloc/src/collections/vec_deque/mod.rs b/library/alloc/src/collections/vec_deque/mod.rs index e3f4deb0875b9..5546c9383de9c 100644 --- a/library/alloc/src/collections/vec_deque/mod.rs +++ b/library/alloc/src/collections/vec_deque/mod.rs @@ -12,7 +12,7 @@ use core::fmt; use core::hash::{Hash, Hasher}; use core::iter::{repeat_with, FromIterator}; use core::marker::PhantomData; -use core::mem::{self, ManuallyDrop, MaybeUninit}; +use core::mem::{ManuallyDrop, MaybeUninit, SizedTypeProperties}; use core::ops::{Index, IndexMut, Range, RangeBounds}; use core::ptr::{self, NonNull}; use core::slice; @@ -177,7 +177,7 @@ impl VecDeque { /// Marginally more convenient #[inline] fn cap(&self) -> usize { - if mem::size_of::() == 0 { + if T::IS_ZST { // For zero sized types, we are always at maximum capacity MAXIMUM_ZST_CAPACITY } else { @@ -3038,7 +3038,7 @@ impl From> for VecDeque { /// `Vec` came from `From>` and hasn't been reallocated. fn from(mut other: Vec) -> Self { let len = other.len(); - if mem::size_of::() == 0 { + if T::IS_ZST { // There's no actual allocation for ZSTs to worry about capacity, // but `VecDeque` can't handle as much length as `Vec`. assert!(len < MAXIMUM_ZST_CAPACITY, "capacity overflow"); @@ -3124,7 +3124,7 @@ impl From<[T; N]> for VecDeque { fn from(arr: [T; N]) -> Self { let mut deq = VecDeque::with_capacity(N); let arr = ManuallyDrop::new(arr); - if mem::size_of::() != 0 { + if !::IS_ZST { // SAFETY: VecDeque::with_capacity ensures that there is enough capacity. unsafe { ptr::copy_nonoverlapping(arr.as_ptr(), deq.ptr(), N); diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index 8619467c2d928..2641942f962ce 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -136,6 +136,7 @@ #![feature(receiver_trait)] #![feature(saturating_int_impl)] #![feature(set_ptr_value)] +#![feature(sized_type_properties)] #![feature(slice_from_ptr_range)] #![feature(slice_group_by)] #![feature(slice_ptr_get)] diff --git a/library/alloc/src/raw_vec.rs b/library/alloc/src/raw_vec.rs index b0f4529abdfa5..5a10121bbbe4b 100644 --- a/library/alloc/src/raw_vec.rs +++ b/library/alloc/src/raw_vec.rs @@ -3,7 +3,7 @@ use core::alloc::LayoutError; use core::cmp; use core::intrinsics; -use core::mem::{self, ManuallyDrop, MaybeUninit}; +use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties}; use core::ops::Drop; use core::ptr::{self, NonNull, Unique}; use core::slice; @@ -168,7 +168,7 @@ impl RawVec { #[cfg(not(no_global_oom_handling))] fn allocate_in(capacity: usize, init: AllocInit, alloc: A) -> Self { // Don't allocate here because `Drop` will not deallocate when `capacity` is 0. - if mem::size_of::() == 0 || capacity == 0 { + if T::IS_ZST || capacity == 0 { Self::new_in(alloc) } else { // We avoid `unwrap_or_else` here because it bloats the amount of @@ -229,7 +229,7 @@ impl RawVec { /// This will always be `usize::MAX` if `T` is zero-sized. #[inline(always)] pub fn capacity(&self) -> usize { - if mem::size_of::() == 0 { usize::MAX } else { self.cap } + if T::IS_ZST { usize::MAX } else { self.cap } } /// Returns a shared reference to the allocator backing this `RawVec`. @@ -238,7 +238,7 @@ impl RawVec { } fn current_memory(&self) -> Option<(NonNull, Layout)> { - if mem::size_of::() == 0 || self.cap == 0 { + if T::IS_ZST || self.cap == 0 { None } else { // We have an allocated chunk of memory, so we can bypass runtime @@ -380,7 +380,7 @@ impl RawVec { // This is ensured by the calling contexts. debug_assert!(additional > 0); - if mem::size_of::() == 0 { + if T::IS_ZST { // Since we return a capacity of `usize::MAX` when `elem_size` is // 0, getting to here necessarily means the `RawVec` is overfull. return Err(CapacityOverflow.into()); @@ -406,7 +406,7 @@ impl RawVec { // `grow_amortized`, but this method is usually instantiated less often so // it's less critical. fn grow_exact(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> { - if mem::size_of::() == 0 { + if T::IS_ZST { // Since we return a capacity of `usize::MAX` when the type size is // 0, getting to here necessarily means the `RawVec` is overfull. return Err(CapacityOverflow.into()); diff --git a/library/alloc/src/slice.rs b/library/alloc/src/slice.rs index bcd3f49e20892..2e24c3c652d11 100644 --- a/library/alloc/src/slice.rs +++ b/library/alloc/src/slice.rs @@ -16,9 +16,7 @@ use core::borrow::{Borrow, BorrowMut}; #[cfg(not(no_global_oom_handling))] use core::cmp::Ordering::{self, Less}; #[cfg(not(no_global_oom_handling))] -use core::mem; -#[cfg(not(no_global_oom_handling))] -use core::mem::size_of; +use core::mem::{self, SizedTypeProperties}; #[cfg(not(no_global_oom_handling))] use core::ptr; @@ -1018,7 +1016,7 @@ where const MIN_RUN: usize = 10; // Sorting has no meaningful behavior on zero-sized types. - if size_of::() == 0 { + if T::IS_ZST { return; } diff --git a/library/alloc/src/vec/drain.rs b/library/alloc/src/vec/drain.rs index 5b73906a1c97c..541f99bcfaba4 100644 --- a/library/alloc/src/vec/drain.rs +++ b/library/alloc/src/vec/drain.rs @@ -1,7 +1,7 @@ use crate::alloc::{Allocator, Global}; use core::fmt; use core::iter::{FusedIterator, TrustedLen}; -use core::mem::{self, ManuallyDrop}; +use core::mem::{self, ManuallyDrop, SizedTypeProperties}; use core::ptr::{self, NonNull}; use core::slice::{self}; @@ -202,7 +202,7 @@ impl Drop for Drain<'_, T, A> { let mut vec = self.vec; - if mem::size_of::() == 0 { + if T::IS_ZST { // ZSTs have no identity, so we don't need to move them around, we only need to drop the correct amount. // this can be achieved by manipulating the Vec length instead of moving values out from `iter`. unsafe { diff --git a/library/alloc/src/vec/in_place_collect.rs b/library/alloc/src/vec/in_place_collect.rs index b211421b20270..a3f8fe40fd5ce 100644 --- a/library/alloc/src/vec/in_place_collect.rs +++ b/library/alloc/src/vec/in_place_collect.rs @@ -135,7 +135,7 @@ //! vec.truncate(write_idx); //! ``` use core::iter::{InPlaceIterable, SourceIter, TrustedRandomAccessNoCoerce}; -use core::mem::{self, ManuallyDrop}; +use core::mem::{self, ManuallyDrop, SizedTypeProperties}; use core::ptr::{self}; use super::{InPlaceDrop, SpecFromIter, SpecFromIterNested, Vec}; @@ -154,7 +154,7 @@ where default fn from_iter(mut iterator: I) -> Self { // See "Layout constraints" section in the module documentation. We rely on const // optimization here since these conditions currently cannot be expressed as trait bounds - if mem::size_of::() == 0 + if T::IS_ZST || mem::size_of::() != mem::size_of::<<::Source as AsVecIntoIter>::Item>() || mem::align_of::() diff --git a/library/alloc/src/vec/into_iter.rs b/library/alloc/src/vec/into_iter.rs index b4157fd589541..d74e77637bdc4 100644 --- a/library/alloc/src/vec/into_iter.rs +++ b/library/alloc/src/vec/into_iter.rs @@ -8,7 +8,7 @@ use core::iter::{ FusedIterator, InPlaceIterable, SourceIter, TrustedLen, TrustedRandomAccessNoCoerce, }; use core::marker::PhantomData; -use core::mem::{self, ManuallyDrop, MaybeUninit}; +use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties}; #[cfg(not(no_global_oom_handling))] use core::ops::Deref; use core::ptr::{self, NonNull}; @@ -149,7 +149,7 @@ impl Iterator for IntoIter { fn next(&mut self) -> Option { if self.ptr == self.end { None - } else if mem::size_of::() == 0 { + } else if T::IS_ZST { // purposefully don't use 'ptr.offset' because for // vectors with 0-size elements this would return the // same pointer. @@ -167,7 +167,7 @@ impl Iterator for IntoIter { #[inline] fn size_hint(&self) -> (usize, Option) { - let exact = if mem::size_of::() == 0 { + let exact = if T::IS_ZST { self.end.addr().wrapping_sub(self.ptr.addr()) } else { unsafe { self.end.sub_ptr(self.ptr) } @@ -179,7 +179,7 @@ impl Iterator for IntoIter { fn advance_by(&mut self, n: usize) -> Result<(), usize> { let step_size = self.len().min(n); let to_drop = ptr::slice_from_raw_parts_mut(self.ptr as *mut T, step_size); - if mem::size_of::() == 0 { + if T::IS_ZST { // SAFETY: due to unchecked casts of unsigned amounts to signed offsets the wraparound // effectively results in unsigned pointers representing positions 0..usize::MAX, // which is valid for ZSTs. @@ -209,7 +209,7 @@ impl Iterator for IntoIter { let len = self.len(); - if mem::size_of::() == 0 { + if T::IS_ZST { if len < N { self.forget_remaining_elements(); // Safety: ZSTs can be conjured ex nihilo, only the amount has to be correct @@ -253,7 +253,7 @@ impl Iterator for IntoIter { // that `T: Copy` so reading elements from the buffer doesn't invalidate // them for `Drop`. unsafe { - if mem::size_of::() == 0 { mem::zeroed() } else { ptr::read(self.ptr.add(i)) } + if T::IS_ZST { mem::zeroed() } else { ptr::read(self.ptr.add(i)) } } } } @@ -264,7 +264,7 @@ impl DoubleEndedIterator for IntoIter { fn next_back(&mut self) -> Option { if self.end == self.ptr { None - } else if mem::size_of::() == 0 { + } else if T::IS_ZST { // See above for why 'ptr.offset' isn't used self.end = self.end.wrapping_byte_sub(1); @@ -280,7 +280,7 @@ impl DoubleEndedIterator for IntoIter { #[inline] fn advance_back_by(&mut self, n: usize) -> Result<(), usize> { let step_size = self.len().min(n); - if mem::size_of::() == 0 { + if T::IS_ZST { // SAFETY: same as for advance_by() self.end = self.end.wrapping_byte_sub(step_size); } else { diff --git a/library/alloc/src/vec/mod.rs b/library/alloc/src/vec/mod.rs index 60b36af5e67fc..d6d986905e6c1 100644 --- a/library/alloc/src/vec/mod.rs +++ b/library/alloc/src/vec/mod.rs @@ -64,7 +64,7 @@ use core::iter; #[cfg(not(no_global_oom_handling))] use core::iter::FromIterator; use core::marker::PhantomData; -use core::mem::{self, ManuallyDrop, MaybeUninit}; +use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties}; use core::ops::{self, Index, IndexMut, Range, RangeBounds}; use core::ptr::{self, NonNull}; use core::slice::{self, SliceIndex}; @@ -2347,7 +2347,7 @@ impl Vec<[T; N], A> { #[unstable(feature = "slice_flatten", issue = "95629")] pub fn into_flattened(self) -> Vec { let (ptr, len, cap, alloc) = self.into_raw_parts_with_alloc(); - let (new_len, new_cap) = if mem::size_of::() == 0 { + let (new_len, new_cap) = if T::IS_ZST { (len.checked_mul(N).expect("vec len overflow"), usize::MAX) } else { // SAFETY: @@ -2677,7 +2677,7 @@ impl IntoIterator for Vec { let mut me = ManuallyDrop::new(self); let alloc = ManuallyDrop::new(ptr::read(me.allocator())); let begin = me.as_mut_ptr(); - let end = if mem::size_of::() == 0 { + let end = if T::IS_ZST { begin.wrapping_byte_add(me.len()) } else { begin.add(me.len()) as *const T diff --git a/library/core/src/mem/mod.rs b/library/core/src/mem/mod.rs index d2dd2941d590f..cd92ea24b6171 100644 --- a/library/core/src/mem/mod.rs +++ b/library/core/src/mem/mod.rs @@ -1178,3 +1178,18 @@ pub const fn discriminant(v: &T) -> Discriminant { pub const fn variant_count() -> usize { intrinsics::variant_count::() } + +/// This is here only to simplify all the ZST checks we need in the library. +/// It's not on a stabilization track right now. +#[doc(hidden)] +#[unstable(feature = "sized_type_properties", issue = "none")] +pub trait SizedTypeProperties: Sized { + /// `true` if this type requires no storage. + /// `false` if its [size](size_of) is greater than zero. + #[doc(hidden)] + #[unstable(feature = "sized_type_properties", issue = "none")] + const IS_ZST: bool = size_of::() == 0; +} +#[doc(hidden)] +#[unstable(feature = "sized_type_properties", issue = "none")] +impl SizedTypeProperties for T {} diff --git a/library/core/src/slice/iter.rs b/library/core/src/slice/iter.rs index 395c5678451cd..d9dfc56ab6347 100644 --- a/library/core/src/slice/iter.rs +++ b/library/core/src/slice/iter.rs @@ -9,7 +9,7 @@ use crate::fmt; use crate::intrinsics::{assume, exact_div, unchecked_sub}; use crate::iter::{FusedIterator, TrustedLen, TrustedRandomAccess, TrustedRandomAccessNoCoerce}; use crate::marker::{PhantomData, Send, Sized, Sync}; -use crate::mem; +use crate::mem::{self, SizedTypeProperties}; use crate::num::NonZeroUsize; use crate::ptr::NonNull; @@ -91,7 +91,7 @@ impl<'a, T> Iter<'a, T> { unsafe { assume(!ptr.is_null()); - let end = if mem::size_of::() == 0 { + let end = if T::IS_ZST { ptr.wrapping_byte_add(slice.len()) } else { ptr.add(slice.len()) @@ -227,7 +227,7 @@ impl<'a, T> IterMut<'a, T> { unsafe { assume(!ptr.is_null()); - let end = if mem::size_of::() == 0 { + let end = if T::IS_ZST { ptr.wrapping_byte_add(slice.len()) } else { ptr.add(slice.len()) diff --git a/library/core/src/slice/iter/macros.rs b/library/core/src/slice/iter/macros.rs index 6c9e7574e1746..ce51d48e3e551 100644 --- a/library/core/src/slice/iter/macros.rs +++ b/library/core/src/slice/iter/macros.rs @@ -100,7 +100,7 @@ macro_rules! iterator { // Unsafe because the offset must not exceed `self.len()`. #[inline(always)] unsafe fn pre_dec_end(&mut self, offset: usize) -> * $raw_mut T { - if mem::size_of::() == 0 { + if T::IS_ZST { zst_shrink!(self, offset); self.ptr.as_ptr() } else { @@ -140,7 +140,7 @@ macro_rules! iterator { // since we check if the iterator is empty first. unsafe { assume(!self.ptr.as_ptr().is_null()); - if mem::size_of::() != 0 { + if !::IS_ZST { assume(!self.end.is_null()); } if is_empty!(self) { @@ -166,7 +166,7 @@ macro_rules! iterator { fn nth(&mut self, n: usize) -> Option<$elem> { if n >= len!(self) { // This iterator is now empty. - if mem::size_of::() == 0 { + if T::IS_ZST { // We have to do it this way as `ptr` may never be 0, but `end` // could be (due to wrapping). self.end = self.ptr.as_ptr(); @@ -355,7 +355,7 @@ macro_rules! iterator { // empty first. unsafe { assume(!self.ptr.as_ptr().is_null()); - if mem::size_of::() != 0 { + if !::IS_ZST { assume(!self.end.is_null()); } if is_empty!(self) { diff --git a/library/core/src/slice/mod.rs b/library/core/src/slice/mod.rs index 6a7150d2986ed..bf5c0cb46ae46 100644 --- a/library/core/src/slice/mod.rs +++ b/library/core/src/slice/mod.rs @@ -9,7 +9,7 @@ use crate::cmp::Ordering::{self, Greater, Less}; use crate::intrinsics::{assert_unsafe_precondition, exact_div}; use crate::marker::Copy; -use crate::mem; +use crate::mem::{self, SizedTypeProperties}; use crate::num::NonZeroUsize; use crate::ops::{Bound, FnMut, OneSidedRange, Range, RangeBounds}; use crate::option::Option; @@ -3459,7 +3459,7 @@ impl [T] { #[must_use] pub unsafe fn align_to(&self) -> (&[T], &[U], &[T]) { // Note that most of this function will be constant-evaluated, - if mem::size_of::() == 0 || mem::size_of::() == 0 { + if U::IS_ZST || T::IS_ZST { // handle ZSTs specially, which is – don't handle them at all. return (self, &[], &[]); } @@ -3520,7 +3520,7 @@ impl [T] { #[must_use] pub unsafe fn align_to_mut(&mut self) -> (&mut [T], &mut [U], &mut [T]) { // Note that most of this function will be constant-evaluated, - if mem::size_of::() == 0 || mem::size_of::() == 0 { + if U::IS_ZST || T::IS_ZST { // handle ZSTs specially, which is – don't handle them at all. return (self, &mut [], &mut []); } @@ -4066,7 +4066,7 @@ impl [[T; N]] { /// ``` #[unstable(feature = "slice_flatten", issue = "95629")] pub fn flatten(&self) -> &[T] { - let len = if crate::mem::size_of::() == 0 { + let len = if T::IS_ZST { self.len().checked_mul(N).expect("slice len overflow") } else { // SAFETY: `self.len() * N` cannot overflow because `self` is @@ -4104,7 +4104,7 @@ impl [[T; N]] { /// ``` #[unstable(feature = "slice_flatten", issue = "95629")] pub fn flatten_mut(&mut self) -> &mut [T] { - let len = if crate::mem::size_of::() == 0 { + let len = if T::IS_ZST { self.len().checked_mul(N).expect("slice len overflow") } else { // SAFETY: `self.len() * N` cannot overflow because `self` is diff --git a/library/core/src/slice/rotate.rs b/library/core/src/slice/rotate.rs index 4589c6c0f04a5..fa8c238f8e7a2 100644 --- a/library/core/src/slice/rotate.rs +++ b/library/core/src/slice/rotate.rs @@ -1,5 +1,5 @@ use crate::cmp; -use crate::mem::{self, MaybeUninit}; +use crate::mem::{self, MaybeUninit, SizedTypeProperties}; use crate::ptr; /// Rotates the range `[mid-left, mid+right)` such that the element at `mid` becomes the first @@ -63,7 +63,7 @@ use crate::ptr; /// when `left < right` the swapping happens from the left instead. pub unsafe fn ptr_rotate(mut left: usize, mut mid: *mut T, mut right: usize) { type BufType = [usize; 32]; - if mem::size_of::() == 0 { + if T::IS_ZST { return; } loop { diff --git a/library/core/src/slice/sort.rs b/library/core/src/slice/sort.rs index c6c03c0b0db96..87f77b7f21d62 100644 --- a/library/core/src/slice/sort.rs +++ b/library/core/src/slice/sort.rs @@ -7,7 +7,7 @@ //! stable sorting implementation. use crate::cmp; -use crate::mem::{self, MaybeUninit}; +use crate::mem::{self, MaybeUninit, SizedTypeProperties}; use crate::ptr; /// When dropped, copies from `src` into `dest`. @@ -813,7 +813,7 @@ where F: FnMut(&T, &T) -> bool, { // Sorting has no meaningful behavior on zero-sized types. - if mem::size_of::() == 0 { + if T::IS_ZST { return; } @@ -898,7 +898,7 @@ where panic!("partition_at_index index {} greater than length of slice {}", index, v.len()); } - if mem::size_of::() == 0 { + if T::IS_ZST { // Sorting has no meaningful behavior on zero-sized types. Do nothing. } else if index == v.len() - 1 { // Find max element and place it in the last position of the array. We're free to use From cbbcd9f52ceef47a2855379c9b476667d0e81429 Mon Sep 17 00:00:00 2001 From: Scott McMurray Date: Thu, 22 Sep 2022 23:13:12 -0700 Subject: [PATCH 2/4] rustfmt --- library/core/src/slice/iter.rs | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/library/core/src/slice/iter.rs b/library/core/src/slice/iter.rs index d9dfc56ab6347..ad39ce38319dd 100644 --- a/library/core/src/slice/iter.rs +++ b/library/core/src/slice/iter.rs @@ -91,11 +91,8 @@ impl<'a, T> Iter<'a, T> { unsafe { assume(!ptr.is_null()); - let end = if T::IS_ZST { - ptr.wrapping_byte_add(slice.len()) - } else { - ptr.add(slice.len()) - }; + let end = + if T::IS_ZST { ptr.wrapping_byte_add(slice.len()) } else { ptr.add(slice.len()) }; Self { ptr: NonNull::new_unchecked(ptr as *mut T), end, _marker: PhantomData } } @@ -227,11 +224,8 @@ impl<'a, T> IterMut<'a, T> { unsafe { assume(!ptr.is_null()); - let end = if T::IS_ZST { - ptr.wrapping_byte_add(slice.len()) - } else { - ptr.add(slice.len()) - }; + let end = + if T::IS_ZST { ptr.wrapping_byte_add(slice.len()) } else { ptr.add(slice.len()) }; Self { ptr: NonNull::new_unchecked(ptr), end, _marker: PhantomData } } From f0dc35927becebf20730117a0bf00840ac1dd4ff Mon Sep 17 00:00:00 2001 From: Scott McMurray Date: Fri, 23 Sep 2022 00:02:49 -0700 Subject: [PATCH 3/4] Put back one of the `use`s for intra-doc mentions --- library/alloc/src/collections/vec_deque/mod.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/library/alloc/src/collections/vec_deque/mod.rs b/library/alloc/src/collections/vec_deque/mod.rs index 5546c9383de9c..2a57dad89a770 100644 --- a/library/alloc/src/collections/vec_deque/mod.rs +++ b/library/alloc/src/collections/vec_deque/mod.rs @@ -17,6 +17,12 @@ use core::ops::{Index, IndexMut, Range, RangeBounds}; use core::ptr::{self, NonNull}; use core::slice; +// This is used in a bunch of intra-doc links. +// FIXME: For some reason, `#[cfg(doc)]` wasn't sufficient, resulting in +// failures in linkchecker even though rustdoc built the docs just fine. +#[allow(unused_imports)] +use core::mem; + use crate::alloc::{Allocator, Global}; use crate::collections::TryReserveError; use crate::collections::TryReserveErrorKind; From ed16dbf65e55c54a6cb5eb2596a46a6639428370 Mon Sep 17 00:00:00 2001 From: Scott McMurray Date: Sat, 24 Sep 2022 12:12:41 -0700 Subject: [PATCH 4/4] Add some more documentation --- library/core/src/mem/mod.rs | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/library/core/src/mem/mod.rs b/library/core/src/mem/mod.rs index cd92ea24b6171..66fca2fd28141 100644 --- a/library/core/src/mem/mod.rs +++ b/library/core/src/mem/mod.rs @@ -1179,6 +1179,9 @@ pub const fn variant_count() -> usize { intrinsics::variant_count::() } +/// Provides associated constants for various useful properties of types, +/// to give them a canonical form in our code and make them easier to read. +/// /// This is here only to simplify all the ZST checks we need in the library. /// It's not on a stabilization track right now. #[doc(hidden)] @@ -1186,6 +1189,29 @@ pub const fn variant_count() -> usize { pub trait SizedTypeProperties: Sized { /// `true` if this type requires no storage. /// `false` if its [size](size_of) is greater than zero. + /// + /// # Examples + /// + /// ``` + /// #![feature(sized_type_properties)] + /// use core::mem::SizedTypeProperties; + /// + /// fn do_something_with() { + /// if T::IS_ZST { + /// // ... special approach ... + /// } else { + /// // ... the normal thing ... + /// } + /// } + /// + /// struct MyUnit; + /// assert!(MyUnit::IS_ZST); + /// + /// // For negative checks, consider using UFCS to emphasize the negation + /// assert!(!::IS_ZST); + /// // As it can sometimes hide in the type otherwise + /// assert!(!String::IS_ZST); + /// ``` #[doc(hidden)] #[unstable(feature = "sized_type_properties", issue = "none")] const IS_ZST: bool = size_of::() == 0;