diff --git a/compiler/rustc_arena/src/lib.rs b/compiler/rustc_arena/src/lib.rs index 4fae5ef845f7..ecd254cb4da2 100644 --- a/compiler/rustc_arena/src/lib.rs +++ b/compiler/rustc_arena/src/lib.rs @@ -11,10 +11,12 @@ html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/", test(no_crate_inject, attr(deny(warnings))) )] +#![allow(incomplete_features)] #![feature(dropck_eyepatch)] #![feature(new_uninit)] #![feature(maybe_uninit_slice)] -#![feature(min_specialization)] +//#![feature(min_specialization)] +#![feature(specialization)] #![feature(decl_macro)] #![feature(pointer_byte_offsets)] #![feature(rustc_attrs)] diff --git a/compiler/rustc_ast/src/ast.rs b/compiler/rustc_ast/src/ast.rs index f2258fecfeaf..aecb0bf24b86 100644 --- a/compiler/rustc_ast/src/ast.rs +++ b/compiler/rustc_ast/src/ast.rs @@ -34,10 +34,11 @@ use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; use rustc_span::source_map::{respan, Spanned}; use rustc_span::symbol::{kw, sym, Ident, Symbol}; use rustc_span::{Span, DUMMY_SP}; +use std::alloc::Allocator; +use std::alloc::Global; use std::fmt; use std::mem; use thin_vec::{thin_vec, ThinVec}; - /// A "Label" is an identifier of some point in sources, /// e.g. in the following code: /// @@ -3112,26 +3113,26 @@ mod size_asserts { static_assert_size!(AssocItem, 104); static_assert_size!(AssocItemKind, 32); static_assert_size!(Attribute, 32); - static_assert_size!(Block, 48); - static_assert_size!(Expr, 72); - static_assert_size!(ExprKind, 40); - static_assert_size!(Fn, 184); + static_assert_size!(Block, 48 + mem::size_of::<::CoAllocMeta>()); + static_assert_size!(Expr, 72 + mem::size_of::<::CoAllocMeta>()); + static_assert_size!(ExprKind, 40 + mem::size_of::<::CoAllocMeta>()); + static_assert_size!(Fn, 184 + 2 * mem::size_of::<::CoAllocMeta>()); static_assert_size!(ForeignItem, 96); static_assert_size!(ForeignItemKind, 24); static_assert_size!(GenericArg, 24); - static_assert_size!(GenericBound, 72); - static_assert_size!(Generics, 72); - static_assert_size!(Impl, 184); - static_assert_size!(Item, 184); - static_assert_size!(ItemKind, 112); + static_assert_size!(GenericBound, 72 + mem::size_of::<::CoAllocMeta>()); + static_assert_size!(Generics, 72 + 2 * mem::size_of::<::CoAllocMeta>()); + static_assert_size!(Impl, 184 + 3 * mem::size_of::<::CoAllocMeta>()); + static_assert_size!(Item, 184 + 3 * mem::size_of::<::CoAllocMeta>()); + static_assert_size!(ItemKind, 112 + 3 * mem::size_of::<::CoAllocMeta>()); static_assert_size!(LitKind, 24); static_assert_size!(Local, 72); static_assert_size!(MetaItemLit, 40); static_assert_size!(Param, 40); - static_assert_size!(Pat, 88); + static_assert_size!(Pat, 88 + mem::size_of::<::CoAllocMeta>()); static_assert_size!(Path, 24); static_assert_size!(PathSegment, 24); - static_assert_size!(PatKind, 64); + static_assert_size!(PatKind, 64 + mem::size_of::<::CoAllocMeta>()); static_assert_size!(Stmt, 32); static_assert_size!(StmtKind, 16); static_assert_size!(Ty, 64); diff --git a/compiler/rustc_ast/src/lib.rs b/compiler/rustc_ast/src/lib.rs index 23c32fa96ca4..3baaf2c9b63b 100644 --- a/compiler/rustc_ast/src/lib.rs +++ b/compiler/rustc_ast/src/lib.rs @@ -8,10 +8,12 @@ html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/", test(attr(deny(warnings))) )] +#![feature(allocator_api)] #![feature(associated_type_bounds)] #![feature(box_patterns)] #![feature(const_default_impls)] #![feature(const_trait_impl)] +#![feature(global_co_alloc_meta)] #![feature(if_let_guard)] #![feature(let_chains)] #![feature(min_specialization)] diff --git a/compiler/rustc_middle/src/lib.rs b/compiler/rustc_middle/src/lib.rs index e6cd38c0f158..14c059d9db9a 100644 --- a/compiler/rustc_middle/src/lib.rs +++ b/compiler/rustc_middle/src/lib.rs @@ -32,6 +32,7 @@ #![feature(exhaustive_patterns)] #![feature(generators)] #![feature(get_mut_unchecked)] +#![feature(global_co_alloc_meta)] #![feature(if_let_guard)] #![feature(iter_from_generator)] #![feature(local_key_cell_methods)] diff --git a/compiler/rustc_middle/src/mir/mod.rs b/compiler/rustc_middle/src/mir/mod.rs index 46184cddd51f..b9fd6b135ba6 100644 --- a/compiler/rustc_middle/src/mir/mod.rs +++ b/compiler/rustc_middle/src/mir/mod.rs @@ -34,6 +34,7 @@ use rustc_span::{Span, DUMMY_SP}; use either::Either; +use std::alloc::{Allocator, Global}; use std::borrow::Cow; use std::fmt::{self, Debug, Display, Formatter, Write}; use std::ops::{ControlFlow, Index, IndexMut}; @@ -3077,7 +3078,10 @@ mod size_asserts { use super::*; use rustc_data_structures::static_assert_size; // tidy-alphabetical-start - static_assert_size!(BasicBlockData<'_>, 144); + static_assert_size!( + BasicBlockData<'_>, + 144 + mem::size_of::<::CoAllocMeta>() + ); static_assert_size!(LocalDecl<'_>, 56); static_assert_size!(Statement<'_>, 32); static_assert_size!(StatementKind<'_>, 16); diff --git a/compiler/rustc_middle/src/mir/syntax.rs b/compiler/rustc_middle/src/mir/syntax.rs index ae09562a85e9..9557b0d9cd08 100644 --- a/compiler/rustc_middle/src/mir/syntax.rs +++ b/compiler/rustc_middle/src/mir/syntax.rs @@ -4,6 +4,7 @@ //! The intention is that this file only contains datatype declarations, no code. use super::{BasicBlock, Constant, Field, Local, SwitchTargets, UserTypeProjection}; +use core::mem; use crate::mir::coverage::{CodeRegion, CoverageKind}; use crate::traits::Reveal; @@ -24,6 +25,8 @@ use rustc_span::symbol::Symbol; use rustc_span::Span; use rustc_target::asm::InlineAsmRegOrRegClass; +use std::alloc::{Allocator, Global}; + /// Represents the "flavors" of MIR. /// /// All flavors of MIR use the same data structure, but there are some important differences. These @@ -1284,6 +1287,6 @@ mod size_asserts { static_assert_size!(Operand<'_>, 24); static_assert_size!(Place<'_>, 16); static_assert_size!(PlaceElem<'_>, 24); - static_assert_size!(Rvalue<'_>, 40); + static_assert_size!(Rvalue<'_>, 40 + mem::size_of::<::CoAllocMeta>()); // tidy-alphabetical-end } diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index b49a01d75ed5..b2968280e12a 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -1,7 +1,9 @@ //! The main parser interface. +#![feature(allocator_api)] #![feature(array_windows)] #![feature(box_patterns)] +#![feature(global_co_alloc_meta)] #![feature(if_let_guard)] #![feature(iter_intersperse)] #![feature(let_chains)] diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs index dbd3b76786f4..0f5cbed92309 100644 --- a/compiler/rustc_parse/src/parser/attr_wrapper.rs +++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs @@ -1,4 +1,5 @@ use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken}; +use core::mem; use rustc_ast::token::{self, Delimiter, Token, TokenKind}; use rustc_ast::tokenstream::{AttrTokenStream, AttributesData, ToAttrTokenStream}; use rustc_ast::tokenstream::{AttrTokenTree, DelimSpan, LazyAttrTokenStream, Spacing}; @@ -8,6 +9,7 @@ use rustc_errors::PResult; use rustc_session::parse::ParseSess; use rustc_span::{sym, Span, DUMMY_SP}; +use std::alloc::{Allocator, Global}; use std::ops::Range; /// A wrapper type to ensure that the parser handles outer attributes correctly. @@ -469,6 +471,9 @@ mod size_asserts { use rustc_data_structures::static_assert_size; // tidy-alphabetical-start static_assert_size!(AttrWrapper, 16); - static_assert_size!(LazyAttrTokenStreamImpl, 120); + static_assert_size!( + LazyAttrTokenStreamImpl, + 120 + mem::size_of::<::CoAllocMeta>() + ); // tidy-alphabetical-end } diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index a74f408d7741..0ea735e51610 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -37,6 +37,7 @@ use rustc_session::parse::ParseSess; use rustc_span::source_map::{Span, DUMMY_SP}; use rustc_span::symbol::{kw, sym, Ident, Symbol}; +use std::alloc::{Allocator, Global}; use std::ops::Range; use std::{cmp, mem, slice}; @@ -167,7 +168,10 @@ pub struct Parser<'a> { // This type is used a lot, e.g. it's cloned when matching many declarative macro rules with nonterminals. Make sure // it doesn't unintentionally get bigger. #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] -rustc_data_structures::static_assert_size!(Parser<'_>, 312); +rustc_data_structures::static_assert_size!( + Parser<'_>, + 312 + 4 * mem::size_of::<::CoAllocMeta>() +); /// Stores span information about a closure. #[derive(Clone)] diff --git a/compiler/rustc_trait_selection/src/lib.rs b/compiler/rustc_trait_selection/src/lib.rs index 6fa094103639..8d855b62fa80 100644 --- a/compiler/rustc_trait_selection/src/lib.rs +++ b/compiler/rustc_trait_selection/src/lib.rs @@ -11,10 +11,12 @@ //! This API is completely unstable and subject to change. #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] +#![feature(allocator_api)] #![feature(associated_type_bounds)] #![feature(box_patterns)] #![feature(control_flow_enum)] #![feature(drain_filter)] +#![feature(global_co_alloc_meta)] #![feature(hash_drain_filter)] #![feature(let_chains)] #![feature(if_let_guard)] diff --git a/compiler/rustc_trait_selection/src/traits/fulfill.rs b/compiler/rustc_trait_selection/src/traits/fulfill.rs index deeed930e50e..30b923ad6833 100644 --- a/compiler/rustc_trait_selection/src/traits/fulfill.rs +++ b/compiler/rustc_trait_selection/src/traits/fulfill.rs @@ -1,4 +1,6 @@ use crate::infer::{InferCtxt, TyOrConstInferVar}; +use core::mem; +// use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::obligation_forest::ProcessResult; use rustc_data_structures::obligation_forest::{Error, ForestObligation, Outcome}; use rustc_data_structures::obligation_forest::{ObligationForest, ObligationProcessor}; @@ -9,6 +11,7 @@ use rustc_middle::ty::abstract_const::NotConstEvaluatable; use rustc_middle::ty::error::{ExpectedFound, TypeError}; use rustc_middle::ty::subst::SubstsRef; use rustc_middle::ty::{self, Binder, Const, TypeVisitable}; +use std::alloc::{Allocator, Global}; use std::marker::PhantomData; use super::const_evaluatable; @@ -77,7 +80,10 @@ pub struct PendingPredicateObligation<'tcx> { // `PendingPredicateObligation` is used a lot. Make sure it doesn't unintentionally get bigger. #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] -static_assert_size!(PendingPredicateObligation<'_>, 72); +static_assert_size!( + PendingPredicateObligation<'_>, + 72 + mem::size_of::<::CoAllocMeta>() +); impl<'a, 'tcx> FulfillmentContext<'tcx> { /// Creates a new fulfillment context. diff --git a/library/alloc/src/boxed.rs b/library/alloc/src/boxed.rs index a563b2587236..439e2a800167 100644 --- a/library/alloc/src/boxed.rs +++ b/library/alloc/src/boxed.rs @@ -146,6 +146,7 @@ #![stable(feature = "rust1", since = "1.0.0")] +use crate::co_alloc::CoAllocPref; use core::any::Any; use core::async_iter::AsyncIterator; use core::borrow; @@ -641,7 +642,10 @@ impl Box<[T]> { #[unstable(feature = "new_uninit", issue = "63291")] #[must_use] pub fn new_uninit_slice(len: usize) -> Box<[mem::MaybeUninit]> { - unsafe { RawVec::with_capacity(len).into_box(len) } + // false = no need for co-alloc metadata, since it would get lost once converted to Box. + unsafe { + RawVec::::with_capacity(len).into_box(len) + } } /// Constructs a new boxed slice with uninitialized contents, with the memory @@ -666,7 +670,11 @@ impl Box<[T]> { #[unstable(feature = "new_uninit", issue = "63291")] #[must_use] pub fn new_zeroed_slice(len: usize) -> Box<[mem::MaybeUninit]> { - unsafe { RawVec::with_capacity_zeroed(len).into_box(len) } + // false = no need for co-alloc metadata, since it would get lost once converted to Box. + unsafe { + RawVec::::with_capacity_zeroed(len) + .into_box(len) + } } /// Constructs a new boxed slice with uninitialized contents. Returns an error if @@ -698,7 +706,12 @@ impl Box<[T]> { Err(_) => return Err(AllocError), }; let ptr = Global.allocate(layout)?; - Ok(RawVec::from_raw_parts_in(ptr.as_mut_ptr() as *mut _, len, Global).into_box(len)) + Ok(RawVec::::from_raw_parts_in( + ptr.as_mut_ptr() as *mut _, + len, + Global, + ) + .into_box(len)) } } @@ -730,12 +743,21 @@ impl Box<[T]> { Err(_) => return Err(AllocError), }; let ptr = Global.allocate_zeroed(layout)?; - Ok(RawVec::from_raw_parts_in(ptr.as_mut_ptr() as *mut _, len, Global).into_box(len)) + Ok(RawVec::::from_raw_parts_in( + ptr.as_mut_ptr() as *mut _, + len, + Global, + ) + .into_box(len)) } } } -impl Box<[T], A> { +#[allow(unused_braces)] +impl Box<[T], A> +where + [(); { crate::meta_num_slots!(A, crate::CO_ALLOC_PREF_META_NO!()) }]:, +{ /// Constructs a new boxed slice with uninitialized contents in the provided allocator. /// /// # Examples @@ -762,8 +784,11 @@ impl Box<[T], A> { #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] #[must_use] + #[allow(unused_braces)] pub fn new_uninit_slice_in(len: usize, alloc: A) -> Box<[mem::MaybeUninit], A> { - unsafe { RawVec::with_capacity_in(len, alloc).into_box(len) } + unsafe { + RawVec::::with_capacity_in(len, alloc).into_box(len) + } } /// Constructs a new boxed slice with uninitialized contents in the provided allocator, @@ -790,8 +815,12 @@ impl Box<[T], A> { #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] #[must_use] + #[allow(unused_braces)] pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Box<[mem::MaybeUninit], A> { - unsafe { RawVec::with_capacity_zeroed_in(len, alloc).into_box(len) } + unsafe { + RawVec::::with_capacity_zeroed_in(len, alloc) + .into_box(len) + } } } @@ -1496,7 +1525,8 @@ impl From<&[T]> for Box<[T]> { /// ``` fn from(slice: &[T]) -> Box<[T]> { let len = slice.len(); - let buf = RawVec::with_capacity(len); + // false = no need for co-alloc metadata, since it would get lost once converted to Box. + let buf = RawVec::::with_capacity(len); unsafe { ptr::copy_nonoverlapping(slice.as_ptr(), buf.ptr(), len); buf.into_box(slice.len()).assume_init() @@ -1661,8 +1691,13 @@ impl TryFrom> for Box<[T; N]> { #[cfg(not(no_global_oom_handling))] #[stable(feature = "boxed_array_try_from_vec", since = "1.66.0")] -impl TryFrom> for Box<[T; N]> { - type Error = Vec; +#[allow(unused_braces)] +impl TryFrom> + for Box<[T; N]> +where + [(); { meta_num_slots_global!(CO_ALLOC_PREF) }]:, +{ + type Error = Vec; /// Attempts to convert a `Vec` into a `Box<[T; N]>`. /// @@ -1682,7 +1717,7 @@ impl TryFrom> for Box<[T; N]> { /// let state: Box<[f32; 100]> = vec![1.0; 100].try_into().unwrap(); /// assert_eq!(state.len(), 100); /// ``` - fn try_from(vec: Vec) -> Result { + fn try_from(vec: Vec) -> Result { if vec.len() == N { let boxed_slice = vec.into_boxed_slice(); Ok(unsafe { boxed_slice_as_array_unchecked(boxed_slice) }) @@ -2019,10 +2054,15 @@ impl FromIterator for Box<[I]> { #[cfg(not(no_global_oom_handling))] #[stable(feature = "box_slice_clone", since = "1.3.0")] -impl Clone for Box<[T], A> { +#[allow(unused_braces)] +impl Clone for Box<[T], A> +where + [(); { crate::meta_num_slots!(A, crate::CO_ALLOC_PREF_META_NO!()) }]:, +{ fn clone(&self) -> Self { let alloc = Box::allocator(self).clone(); - self.to_vec_in(alloc).into_boxed_slice() + // false = no need for co-alloc metadata, since it would get lost once converted to the boxed slice. + self.to_vec_in_co::(alloc).into_boxed_slice() } fn clone_from(&mut self, other: &Self) { diff --git a/library/alloc/src/co_alloc.rs b/library/alloc/src/co_alloc.rs new file mode 100644 index 000000000000..23b0598cf4ee --- /dev/null +++ b/library/alloc/src/co_alloc.rs @@ -0,0 +1,47 @@ +//! CoAlloction-specific types that only apply in heap-based applications (hence not a part of +//! [::core]). +//! +//! Types here have names with `CoAlloc` prefix. Yes, when using a q ualified path (like +//! ::alloc::co_alloc::CoAllocPref), that involves "stuttering", which is not recommended. +//! +//! However, as per Rust Book the common practice is to import type names fully and access them just +//! with their name (except for cases of conflict). And we don't want the type names any shorter +//! (such `Pref`), because thoe would be vague/confusing. + +/// `CoAllocPref` values indicate a type's preference for coallocation (in either user space, or +/// `std` space). Used as a `const` generic parameter type (usually called `CO_ALLOC_PREF`). +/// +/// The actual value may be overriden by the allocator. See also `CoAllocMetaNumSlotsPref` and +/// `co_alloc_pref` macro . +/// +/// This type WILL CHANGE (once ``#![feature(generic_const_exprs)]` and +/// `#![feature(adt_const_params)]` are stable) to a dedicated struct/enum. Hence: +/// - DO NOT construct instances, but use `co_alloc_pref` macro together with constants +/// `CO_ALLOC_PREF_META_YES` and `CO_ALLOC_PREF_META_NO`; +/// - DO NOT hard code any values; and +/// - DO NOT mix this/cast this with/to `u8`, `u16`, `usize` (nor any other integer). +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +pub type CoAllocPref = usize; //u8; + +/// `CoAllocMetaNumSlotsPref` values indicate that a type (but not necessarily an allocator) prefers +/// to coallocate by carrying metadata, or not. (In either user space, or `std` or `alloc` space). +/// Used as an argument to macro call of `co_alloc_pref`, which generates a `CoAllocPref` value. +/// +/// Currently this indicates only the (preferred) number of `CoAllocMetaBase` slots being used +/// (either 1 = coallocation, or 0 = no coallocation). However, in the future this type may have +/// other properties (serving as extra hints to the allocator). +/// +/// The actual value may be overriden by the allocator. For example, if the allocator doesn't +/// support coallocation, then whether this value prefers to coallocate or not makes no difference. +/// +/// This type WILL CHANGE (once ``#![feature(generic_const_exprs)]` and +/// `#![feature(adt_const_params)]` are stable) to a dedicated struct/enum. Hence: +/// - DO NOT mix this/cast this with/to `u8`, `u16`, (nor any other integer); and +/// - DO NOT hard code any values, but use `CO_ALLOC_PREF_META_YES` and `CO_ALLOC_PREF_META_NO`. +/// +/// This type is intentionally not `u16`, `u32`, nor `usize`. Why? This helps to prevent mistakes +/// when one would use `CO_ALLOC_PREF_META_YES` or `CO_ALLOC_PREF_META_NO` in place of `CoAllocPref` +/// vales, or in place of a result of `meta_num_slots` macro. That also prevents mixing up with +/// [core::alloc::CoAllocatorMetaNumSlots]. +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +pub type CoAllocMetaNumSlotsPref = u16; diff --git a/library/alloc/src/collections/binary_heap/mod.rs b/library/alloc/src/collections/binary_heap/mod.rs index 0b73b1af4eb3..3f08c31fb673 100644 --- a/library/alloc/src/collections/binary_heap/mod.rs +++ b/library/alloc/src/collections/binary_heap/mod.rs @@ -143,6 +143,7 @@ #![allow(missing_docs)] #![stable(feature = "rust1", since = "1.0.0")] +use crate::co_alloc::CoAllocPref; use core::fmt; use core::iter::{FromIterator, FusedIterator, InPlaceIterable, SourceIter, TrustedLen}; use core::mem::{self, swap, ManuallyDrop}; @@ -150,9 +151,12 @@ use core::num::NonZeroUsize; use core::ops::{Deref, DerefMut}; use core::ptr; +use crate::alloc::Global; + use crate::collections::TryReserveError; use crate::slice; use crate::vec::{self, AsVecIntoIter, Vec}; +use crate::CO_ALLOC_PREF_DEFAULT; use super::SpecExtend; @@ -1241,7 +1245,8 @@ impl BinaryHeap { /// ``` #[inline] #[stable(feature = "drain", since = "1.6.0")] - pub fn drain(&mut self) -> Drain<'_, T> { + #[allow(unused_braces)] + pub fn drain(&mut self) -> Drain<'_, T, { SHORT_TERM_VEC_CO_ALLOC_PREF!() }> { Drain { iter: self.data.drain(..) } } @@ -1521,12 +1526,20 @@ unsafe impl TrustedLen for IntoIterSorted {} /// [`drain`]: BinaryHeap::drain #[stable(feature = "drain", since = "1.6.0")] #[derive(Debug)] -pub struct Drain<'a, T: 'a> { - iter: vec::Drain<'a, T>, +#[allow(unused_braces)] +pub struct Drain<'a, T: 'a, const CO_ALLOC_PREF: CoAllocPref> +where + [(); { meta_num_slots_global!(CO_ALLOC_PREF) }]:, +{ + iter: vec::Drain<'a, T, Global, CO_ALLOC_PREF>, } #[stable(feature = "drain", since = "1.6.0")] -impl Iterator for Drain<'_, T> { +#[allow(unused_braces)] +impl Iterator for Drain<'_, T, CO_ALLOC_PREF> +where + [(); { meta_num_slots_global!(CO_ALLOC_PREF) }]:, +{ type Item = T; #[inline] @@ -1541,7 +1554,11 @@ impl Iterator for Drain<'_, T> { } #[stable(feature = "drain", since = "1.6.0")] -impl DoubleEndedIterator for Drain<'_, T> { +#[allow(unused_braces)] +impl DoubleEndedIterator for Drain<'_, T, CO_ALLOC_PREF> +where + [(); { meta_num_slots_global!(CO_ALLOC_PREF) }]:, +{ #[inline] fn next_back(&mut self) -> Option { self.iter.next_back() @@ -1549,14 +1566,22 @@ impl DoubleEndedIterator for Drain<'_, T> { } #[stable(feature = "drain", since = "1.6.0")] -impl ExactSizeIterator for Drain<'_, T> { +#[allow(unused_braces)] +impl ExactSizeIterator for Drain<'_, T, CO_ALLOC_PREF> +where + [(); { meta_num_slots_global!(CO_ALLOC_PREF) }]:, +{ fn is_empty(&self) -> bool { self.iter.is_empty() } } #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for Drain<'_, T> {} +#[allow(unused_braces)] +impl FusedIterator for Drain<'_, T, CO_ALLOC_PREF> where + [(); { meta_num_slots_global!(CO_ALLOC_PREF) }]: +{ +} /// A draining iterator over the elements of a `BinaryHeap`. /// @@ -1644,7 +1669,8 @@ impl From<[T; N]> for BinaryHeap { } #[stable(feature = "binary_heap_extras_15", since = "1.5.0")] -impl From> for Vec { +#[allow(unused_braces)] +impl From> for Vec { /// Converts a `BinaryHeap` into a `Vec`. /// /// This conversion requires no data movement or allocation, and has diff --git a/library/alloc/src/collections/binary_heap/tests.rs b/library/alloc/src/collections/binary_heap/tests.rs index ffbb6c80ac01..4f46bc5385aa 100644 --- a/library/alloc/src/collections/binary_heap/tests.rs +++ b/library/alloc/src/collections/binary_heap/tests.rs @@ -1,6 +1,7 @@ use super::*; use crate::boxed::Box; use crate::testing::crash_test::{CrashTestDummy, Panic}; +use crate::{CO_ALLOC_PREF_META_NO, CO_ALLOC_PREF_META_YES}; use core::mem; use std::iter::TrustedLen; use std::panic::{catch_unwind, AssertUnwindSafe}; @@ -448,7 +449,14 @@ fn test_extend_specialization() { #[allow(dead_code)] fn assert_covariance() { - fn drain<'new>(d: Drain<'static, &'static str>) -> Drain<'new, &'new str> { + fn drain<'new>( + d: Drain<'static, &'static str, { CO_ALLOC_PREF_META_NO!() }>, + ) -> Drain<'new, &'new str, { CO_ALLOC_PREF_META_NO!() }> { + d + } + fn drain_co<'new>( + d: Drain<'static, &'static str, { CO_ALLOC_PREF_META_YES!() }>, + ) -> Drain<'new, &'new str, { CO_ALLOC_PREF_META_YES!() }> { d } } diff --git a/library/alloc/src/collections/btree/node.rs b/library/alloc/src/collections/btree/node.rs index 3233a575ecf2..2e43ce937f06 100644 --- a/library/alloc/src/collections/btree/node.rs +++ b/library/alloc/src/collections/btree/node.rs @@ -319,7 +319,8 @@ impl NodeRef self, ) -> Result, marker::Edge>, Self> { const { - assert!(BorrowType::TRAVERSAL_PERMIT); + //@FIXME uncomment once compilable + //assert!(BorrowType::TRAVERSAL_PERMIT); } // We need to use raw pointers to nodes because, if BorrowType is marker::ValMut, @@ -1063,7 +1064,8 @@ impl /// both, upon success, do nothing. pub fn descend(self) -> NodeRef { const { - assert!(BorrowType::TRAVERSAL_PERMIT); + // @FIXME uncomment once compilable + //assert!(BorrowType::TRAVERSAL_PERMIT); } // We need to use raw pointers to nodes because, if BorrowType is diff --git a/library/alloc/src/collections/vec_deque/drain.rs b/library/alloc/src/collections/vec_deque/drain.rs index 89feb361ddc1..4dd85edac6ea 100644 --- a/library/alloc/src/collections/vec_deque/drain.rs +++ b/library/alloc/src/collections/vec_deque/drain.rs @@ -1,3 +1,4 @@ +use crate::co_alloc::CoAllocPref; use core::iter::FusedIterator; use core::marker::PhantomData; use core::mem::{self, SizedTypeProperties}; @@ -15,14 +16,16 @@ use super::VecDeque; /// /// [`drain`]: VecDeque::drain #[stable(feature = "drain", since = "1.6.0")] +#[allow(unused_braces)] pub struct Drain< 'a, T: 'a, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, -> { - // We can't just use a &mut VecDeque, as that would make Drain invariant over T - // and we want it to be covariant instead - deque: NonNull>, + const CO_ALLOC_PREF: CoAllocPref = { SHORT_TERM_VEC_CO_ALLOC_PREF!() }, +> where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ + deque: NonNull>, // drain_start is stored in deque.len drain_len: usize, // index into the logical array, not the physical one (always lies in [0..deque.len)) @@ -34,9 +37,13 @@ pub struct Drain< _marker: PhantomData<&'a T>, } -impl<'a, T, A: Allocator> Drain<'a, T, A> { +#[allow(unused_braces)] +impl<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Drain<'a, T, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ pub(super) unsafe fn new( - deque: &'a mut VecDeque, + deque: &'a mut VecDeque, drain_start: usize, drain_len: usize, ) -> Self { @@ -88,7 +95,12 @@ impl<'a, T, A: Allocator> Drain<'a, T, A> { } #[stable(feature = "collection_debug", since = "1.17.0")] -impl fmt::Debug for Drain<'_, T, A> { +#[allow(unused_braces)] +impl fmt::Debug + for Drain<'_, T, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("Drain") .field(&self.drain_len) @@ -100,16 +112,40 @@ impl fmt::Debug for Drain<'_, T, A> { } #[stable(feature = "drain", since = "1.6.0")] -unsafe impl Sync for Drain<'_, T, A> {} +#[allow(unused_braces)] +unsafe impl Sync + for Drain<'_, T, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ +} #[stable(feature = "drain", since = "1.6.0")] -unsafe impl Send for Drain<'_, T, A> {} +#[allow(unused_braces)] +unsafe impl Send + for Drain<'_, T, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ +} #[stable(feature = "drain", since = "1.6.0")] -impl Drop for Drain<'_, T, A> { +#[allow(unused_braces)] +impl Drop for Drain<'_, T, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn drop(&mut self) { - struct DropGuard<'r, 'a, T, A: Allocator>(&'r mut Drain<'a, T, A>); + struct DropGuard<'r, 'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref>( + &'r mut Drain<'a, T, A, CO_ALLOC_PREF>, + ) + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:; - impl<'r, 'a, T, A: Allocator> Drop for DropGuard<'r, 'a, T, A> { + impl<'r, 'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Drop + for DropGuard<'r, 'a, T, A, CO_ALLOC_PREF> + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { fn drop(&mut self) { if self.0.remaining != 0 { unsafe { @@ -190,7 +226,11 @@ impl Drop for Drain<'_, T, A> { } #[stable(feature = "drain", since = "1.6.0")] -impl Iterator for Drain<'_, T, A> { +#[allow(unused_braces)] +impl Iterator for Drain<'_, T, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ type Item = T; #[inline] @@ -212,7 +252,12 @@ impl Iterator for Drain<'_, T, A> { } #[stable(feature = "drain", since = "1.6.0")] -impl DoubleEndedIterator for Drain<'_, T, A> { +#[allow(unused_braces)] +impl DoubleEndedIterator + for Drain<'_, T, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ #[inline] fn next_back(&mut self) -> Option { if self.remaining == 0 { @@ -225,7 +270,19 @@ impl DoubleEndedIterator for Drain<'_, T, A> { } #[stable(feature = "drain", since = "1.6.0")] -impl ExactSizeIterator for Drain<'_, T, A> {} +#[allow(unused_braces)] +impl ExactSizeIterator + for Drain<'_, T, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ +} #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for Drain<'_, T, A> {} +#[allow(unused_braces)] +impl FusedIterator + for Drain<'_, T, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ +} diff --git a/library/alloc/src/collections/vec_deque/into_iter.rs b/library/alloc/src/collections/vec_deque/into_iter.rs index 34bc0ce9177c..0615290f7a56 100644 --- a/library/alloc/src/collections/vec_deque/into_iter.rs +++ b/library/alloc/src/collections/vec_deque/into_iter.rs @@ -1,3 +1,4 @@ +use crate::co_alloc::CoAllocPref; use core::iter::{FusedIterator, TrustedLen}; use core::{array, fmt, mem::MaybeUninit, ops::Try, ptr}; @@ -14,32 +15,49 @@ use super::VecDeque; /// [`IntoIterator`]: core::iter::IntoIterator #[derive(Clone)] #[stable(feature = "rust1", since = "1.0.0")] +#[allow(unused_braces)] pub struct IntoIter< T, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, -> { - inner: VecDeque, + const CO_ALLOC_PREF: CoAllocPref = { CO_ALLOC_PREF_DEFAULT!() }, +> where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ + inner: VecDeque, } -impl IntoIter { - pub(super) fn new(inner: VecDeque) -> Self { +#[allow(unused_braces)] +impl IntoIter +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ + pub(super) fn new(inner: VecDeque) -> Self { IntoIter { inner } } - pub(super) fn into_vecdeque(self) -> VecDeque { + pub(super) fn into_vecdeque(self) -> VecDeque { self.inner } } #[stable(feature = "collection_debug", since = "1.17.0")] -impl fmt::Debug for IntoIter { +#[allow(unused_braces)] +impl fmt::Debug + for IntoIter +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("IntoIter").field(&self.inner).finish() } } #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for IntoIter { +#[allow(unused_braces)] +impl Iterator for IntoIter +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ type Item = T; #[inline] @@ -75,13 +93,19 @@ impl Iterator for IntoIter { F: FnMut(B, Self::Item) -> R, R: Try, { - struct Guard<'a, T, A: Allocator> { - deque: &'a mut VecDeque, + struct Guard<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { + deque: &'a mut VecDeque, // `consumed <= deque.len` always holds. consumed: usize, } - impl<'a, T, A: Allocator> Drop for Guard<'a, T, A> { + impl<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Drop for Guard<'a, T, A, CO_ALLOC_PREF> + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { fn drop(&mut self) { self.deque.len -= self.consumed; self.deque.head = self.deque.to_physical_idx(self.consumed); @@ -175,7 +199,12 @@ impl Iterator for IntoIter { } #[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for IntoIter { +#[allow(unused_braces)] +impl DoubleEndedIterator + for IntoIter +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ #[inline] fn next_back(&mut self) -> Option { self.inner.pop_back() @@ -198,13 +227,19 @@ impl DoubleEndedIterator for IntoIter { F: FnMut(B, Self::Item) -> R, R: Try, { - struct Guard<'a, T, A: Allocator> { - deque: &'a mut VecDeque, + struct Guard<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { + deque: &'a mut VecDeque, // `consumed <= deque.len` always holds. consumed: usize, } - impl<'a, T, A: Allocator> Drop for Guard<'a, T, A> { + impl<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Drop for Guard<'a, T, A, CO_ALLOC_PREF> + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { fn drop(&mut self) { self.deque.len -= self.consumed; } @@ -245,7 +280,12 @@ impl DoubleEndedIterator for IntoIter { } #[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for IntoIter { +#[allow(unused_braces)] +impl ExactSizeIterator + for IntoIter +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ #[inline] fn is_empty(&self) -> bool { self.inner.is_empty() @@ -253,7 +293,19 @@ impl ExactSizeIterator for IntoIter { } #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for IntoIter {} +#[allow(unused_braces)] +impl FusedIterator + for IntoIter +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ +} #[unstable(feature = "trusted_len", issue = "37572")] -unsafe impl TrustedLen for IntoIter {} +#[allow(unused_braces)] +unsafe impl TrustedLen + for IntoIter +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ +} diff --git a/library/alloc/src/collections/vec_deque/macros.rs b/library/alloc/src/collections/vec_deque/macros.rs index 5c7913073fe8..e92fc6a8fa3b 100644 --- a/library/alloc/src/collections/vec_deque/macros.rs +++ b/library/alloc/src/collections/vec_deque/macros.rs @@ -1,9 +1,10 @@ macro_rules! __impl_slice_eq1 { ([$($vars:tt)*] $lhs:ty, $rhs:ty, $($constraints:tt)*) => { #[stable(feature = "vec_deque_partial_eq_slice", since = "1.17.0")] - impl PartialEq<$rhs> for $lhs + impl PartialEq<$rhs> for $lhs where T: PartialEq, + [(); {crate::meta_num_slots!(A, CO_ALLOC_PREF)}]:, $($constraints)* { fn eq(&self, other: &$rhs) -> bool { diff --git a/library/alloc/src/collections/vec_deque/mod.rs b/library/alloc/src/collections/vec_deque/mod.rs index 1573b3d77dc1..eb5229d484aa 100644 --- a/library/alloc/src/collections/vec_deque/mod.rs +++ b/library/alloc/src/collections/vec_deque/mod.rs @@ -5,8 +5,10 @@ //! are not required to be copyable, and the queue will be sendable if the //! contained type is sendable. +#![feature(global_co_alloc)] #![stable(feature = "rust1", since = "1.0.0")] - +use crate::co_alloc::CoAllocPref; +use crate::CO_ALLOC_PREF_DEFAULT; use core::cmp::{self, Ordering}; use core::fmt; use core::hash::{Hash, Hasher}; @@ -55,7 +57,7 @@ use self::spec_extend::SpecExtend; mod spec_extend; -use self::spec_from_iter::SpecFromIter; +use self::spec_from_iter::SpecFromIterCo; mod spec_from_iter; @@ -91,22 +93,29 @@ mod tests; #[cfg_attr(not(test), rustc_diagnostic_item = "VecDeque")] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_insignificant_dtor] +#[allow(unused_braces)] pub struct VecDeque< T, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, -> { - // `self[0]`, if it exists, is `buf[head]`. - // `head < buf.capacity()`, unless `buf.capacity() == 0` when `head == 0`. + const CO_ALLOC_PREF: CoAllocPref = { CO_ALLOC_PREF_DEFAULT!() }, +> where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ head: usize, // the number of initialized elements, starting from the one at `head` and potentially wrapping around. // if `len == 0`, the exact value of `head` is unimportant. // if `T` is zero-Sized, then `self.len <= usize::MAX`, otherwise `self.len <= isize::MAX as usize`. len: usize, - buf: RawVec, + buf: RawVec, } #[stable(feature = "rust1", since = "1.0.0")] -impl Clone for VecDeque { +#[allow(unused_braces)] +impl Clone + for VecDeque +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn clone(&self) -> Self { let mut deq = Self::with_capacity_in(self.len(), self.allocator().clone()); deq.extend(self.iter().cloned()); @@ -120,7 +129,12 @@ impl Clone for VecDeque { } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<#[may_dangle] T, A: Allocator> Drop for VecDeque { +#[allow(unused_braces)] +unsafe impl<#[may_dangle] T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Drop + for VecDeque +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn drop(&mut self) { /// Runs the destructor for all items in the slice when it gets dropped (normally or /// during unwinding). @@ -145,15 +159,33 @@ unsafe impl<#[may_dangle] T, A: Allocator> Drop for VecDeque { } #[stable(feature = "rust1", since = "1.0.0")] +#[allow(unused_braces)] +impl Default for VecDeque +where + [(); { meta_num_slots_global!(CO_ALLOC_PREF) }]:, +{ + /// Creates an empty deque. + #[inline] + default fn default() -> VecDeque { + VecDeque::::new_co() + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +#[allow(unused_braces)] impl Default for VecDeque { /// Creates an empty deque. #[inline] fn default() -> VecDeque { - VecDeque::new() + VecDeque::::new() } } -impl VecDeque { +#[allow(unused_braces)] +impl VecDeque +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ /// Marginally more convenient #[inline] fn ptr(&self) -> *mut T { @@ -442,12 +474,19 @@ impl VecDeque { mut iter: impl Iterator, len: usize, ) -> usize { - struct Guard<'a, T, A: Allocator> { - deque: &'a mut VecDeque, + struct Guard<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { + deque: &'a mut VecDeque, written: usize, } - impl<'a, T, A: Allocator> Drop for Guard<'a, T, A> { + #[allow(unused_braces)] + impl<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Drop for Guard<'a, T, A, CO_ALLOC_PREF> + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { fn drop(&mut self) { self.deque.len += self.written; } @@ -539,7 +578,8 @@ impl VecDeque { #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_vec_deque_new", since = "1.68.0")] #[must_use] - pub const fn new() -> VecDeque { + #[allow(unused_braces)] + pub const fn new() -> VecDeque { // FIXME: This should just be `VecDeque::new_in(Global)` once that hits stable. VecDeque { head: 0, len: 0, buf: RawVec::NEW } } @@ -556,12 +596,42 @@ impl VecDeque { #[inline] #[stable(feature = "rust1", since = "1.0.0")] #[must_use] - pub fn with_capacity(capacity: usize) -> VecDeque { - Self::with_capacity_in(capacity, Global) + #[allow(unused_braces)] + pub fn with_capacity(capacity: usize) -> VecDeque { + VecDeque::::with_capacity_in(capacity, Global) + } +} + +#[allow(unused_braces)] +impl VecDeque +where + [(); { crate::meta_num_slots_global!(CO_ALLOC_PREF) }]:, +{ + /// Coallocation-aware version of `new`. + #[inline] + #[unstable(feature = "co_alloc_global", issue = "none")] + #[must_use] + #[allow(unused_braces)] + pub const fn new_co() -> VecDeque { + // FIXME: This should just be `VecDeque::new_in(Global)` once that hits stable. + VecDeque { head: 0, len: 0, buf: RawVec::NEW } + } + + /// Coallocation-aware version of `with_capacity`. + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + #[must_use] + #[allow(unused_braces)] + pub fn with_capacity_co(capacity: usize) -> VecDeque { + VecDeque::::with_capacity_in(capacity, Global) } } -impl VecDeque { +#[allow(unused_braces)] +impl VecDeque +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ /// Creates an empty deque. /// /// # Examples @@ -573,7 +643,7 @@ impl VecDeque { /// ``` #[inline] #[unstable(feature = "allocator_api", issue = "32838")] - pub const fn new_in(alloc: A) -> VecDeque { + pub const fn new_in(alloc: A) -> VecDeque { VecDeque { head: 0, len: 0, buf: RawVec::new_in(alloc) } } @@ -587,7 +657,7 @@ impl VecDeque { /// let deque: VecDeque = VecDeque::with_capacity(10); /// ``` #[unstable(feature = "allocator_api", issue = "32838")] - pub fn with_capacity_in(capacity: usize, alloc: A) -> VecDeque { + pub fn with_capacity_in(capacity: usize, alloc: A) -> VecDeque { VecDeque { head: 0, len: 0, buf: RawVec::with_capacity_in(capacity, alloc) } } @@ -1368,7 +1438,7 @@ impl VecDeque { /// ``` #[inline] #[stable(feature = "drain", since = "1.6.0")] - pub fn drain(&mut self, range: R) -> Drain<'_, T, A> + pub fn drain(&mut self, range: R) -> Drain<'_, T, A, CO_ALLOC_PREF> where R: RangeBounds, { @@ -2596,7 +2666,11 @@ impl VecDeque { } } -impl VecDeque { +#[allow(unused_braces)] +impl VecDeque +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ /// Modifies the deque in-place so that `len()` is equal to new_len, /// either by removing excess elements from the back or by appending clones of `value` /// to the back. @@ -2640,8 +2714,13 @@ fn wrap_index(logical_index: usize, capacity: usize) -> usize { if logical_index >= capacity { logical_index - capacity } else { logical_index } } +#[allow(unused_braces)] #[stable(feature = "rust1", since = "1.0.0")] -impl PartialEq for VecDeque { +impl PartialEq + for VecDeque +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn eq(&self, other: &Self) -> bool { if self.len != other.len() { return false; @@ -2679,25 +2758,38 @@ impl PartialEq for VecDeque { } } +#[allow(unused_braces)] #[stable(feature = "rust1", since = "1.0.0")] -impl Eq for VecDeque {} +impl Eq for VecDeque where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]: +{ +} -__impl_slice_eq1! { [] VecDeque, Vec, } -__impl_slice_eq1! { [] VecDeque, &[U], } -__impl_slice_eq1! { [] VecDeque, &mut [U], } -__impl_slice_eq1! { [const N: usize] VecDeque, [U; N], } -__impl_slice_eq1! { [const N: usize] VecDeque, &[U; N], } -__impl_slice_eq1! { [const N: usize] VecDeque, &mut [U; N], } +__impl_slice_eq1! { [] VecDeque, Vec, } +__impl_slice_eq1! { [] VecDeque, &[U], } +__impl_slice_eq1! { [] VecDeque, &mut [U], } +__impl_slice_eq1! { [const N: usize] VecDeque, [U; N], } +__impl_slice_eq1! { [const N: usize] VecDeque, &[U; N], } +__impl_slice_eq1! { [const N: usize] VecDeque, &mut [U; N], } #[stable(feature = "rust1", since = "1.0.0")] -impl PartialOrd for VecDeque { +#[allow(unused_braces)] +impl PartialOrd + for VecDeque +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn partial_cmp(&self, other: &Self) -> Option { self.iter().partial_cmp(other.iter()) } } #[stable(feature = "rust1", since = "1.0.0")] -impl Ord for VecDeque { +#[allow(unused_braces)] +impl Ord for VecDeque +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ #[inline] fn cmp(&self, other: &Self) -> Ordering { self.iter().cmp(other.iter()) @@ -2705,7 +2797,11 @@ impl Ord for VecDeque { } #[stable(feature = "rust1", since = "1.0.0")] -impl Hash for VecDeque { +#[allow(unused_braces)] +impl Hash for VecDeque +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn hash(&self, state: &mut H) { state.write_length_prefix(self.len); // It's not possible to use Hash::hash_slice on slices @@ -2719,7 +2815,12 @@ impl Hash for VecDeque { } #[stable(feature = "rust1", since = "1.0.0")] -impl Index for VecDeque { +#[allow(unused_braces)] +impl Index + for VecDeque +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ type Output = T; #[inline] @@ -2729,7 +2830,12 @@ impl Index for VecDeque { } #[stable(feature = "rust1", since = "1.0.0")] -impl IndexMut for VecDeque { +#[allow(unused_braces)] +impl IndexMut + for VecDeque +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ #[inline] fn index_mut(&mut self, index: usize) -> &mut T { self.get_mut(index).expect("Out of bounds access") @@ -2737,26 +2843,49 @@ impl IndexMut for VecDeque { } #[stable(feature = "rust1", since = "1.0.0")] +#[allow(unused_braces)] impl FromIterator for VecDeque { fn from_iter>(iter: I) -> VecDeque { - SpecFromIter::spec_from_iter(iter.into_iter()) + SpecFromIterCo::spec_from_iter_co(iter.into_iter()) + } +} + +#[unstable(feature = "global_co_alloc", issue = "none")] +#[allow(unused_braces)] +impl VecDeque +where + [(); { crate::meta_num_slots_global!(CO_ALLOC_PREF) }]:, +{ + /// Like [from_iter], but coallocation-aware. + pub fn from_iter_co>(iter: I) -> VecDeque { + SpecFromIterCo::spec_from_iter_co(iter.into_iter()) } } #[stable(feature = "rust1", since = "1.0.0")] -impl IntoIterator for VecDeque { +#[allow(unused_braces)] +impl IntoIterator + for VecDeque +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ type Item = T; - type IntoIter = IntoIter; + type IntoIter = IntoIter; /// Consumes the deque into a front-to-back iterator yielding elements by /// value. - fn into_iter(self) -> IntoIter { + fn into_iter(self) -> IntoIter { IntoIter::new(self) } } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T, A: Allocator> IntoIterator for &'a VecDeque { +#[allow(unused_braces)] +impl<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> IntoIterator + for &'a VecDeque +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ type Item = &'a T; type IntoIter = Iter<'a, T>; @@ -2766,7 +2895,12 @@ impl<'a, T, A: Allocator> IntoIterator for &'a VecDeque { } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T, A: Allocator> IntoIterator for &'a mut VecDeque { +#[allow(unused_braces)] +impl<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> IntoIterator + for &'a mut VecDeque +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ type Item = &'a mut T; type IntoIter = IterMut<'a, T>; @@ -2776,7 +2910,11 @@ impl<'a, T, A: Allocator> IntoIterator for &'a mut VecDeque { } #[stable(feature = "rust1", since = "1.0.0")] -impl Extend for VecDeque { +#[allow(unused_braces)] +impl Extend for VecDeque +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn extend>(&mut self, iter: I) { >::spec_extend(self, iter.into_iter()); } @@ -2793,7 +2931,12 @@ impl Extend for VecDeque { } #[stable(feature = "extend_ref", since = "1.2.0")] -impl<'a, T: 'a + Copy, A: Allocator> Extend<&'a T> for VecDeque { +#[allow(unused_braces)] +impl<'a, T: 'a + Copy, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Extend<&'a T> + for VecDeque +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn extend>(&mut self, iter: I) { self.spec_extend(iter.into_iter()); } @@ -2810,14 +2953,58 @@ impl<'a, T: 'a + Copy, A: Allocator> Extend<&'a T> for VecDeque { } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Debug for VecDeque { +#[allow(unused_braces)] +impl fmt::Debug + for VecDeque +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.iter()).finish() } } #[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")] -impl From> for VecDeque { +#[allow(unused_braces)] +impl< + T, + A: Allocator, + /*const CO_ALLOC_PREF: CoAllocPref,*/ const OTHER_CO_ALLOC_PREF: CoAllocPref, +> From> for VecDeque +//, CO_ALLOC_PREF> +where + //[(); {crate::meta_num_slots!(A, CO_ALLOC_PREF)}]:, + [(); { crate::meta_num_slots!(A, OTHER_CO_ALLOC_PREF) }]:, +{ + /// Turn a [`Vec`] into a [`VecDeque`]. + /// + /// [`Vec`]: crate::vec::Vec + /// [`VecDeque`]: crate::collections::VecDeque + /// + /// This conversion is guaranteed to run in *O*(1) time + /// and to not re-allocate the `Vec`'s buffer or allocate + /// any additional memory. + #[inline] + default fn from(other: Vec) -> Self { + let (ptr, len, cap, alloc) = other.into_raw_parts_with_alloc(); + Self { + head: 0, + len, + buf: unsafe { + RawVec::::from_raw_parts_in(ptr, cap, alloc) + }, + } + } +} + +#[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")] +#[allow(unused_braces)] +impl + From> for VecDeque +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + [(); { crate::meta_num_slots!(A, OTHER_CO_ALLOC_PREF) }]:, +{ /// Turn a [`Vec`] into a [`VecDeque`]. /// /// [`Vec`]: crate::vec::Vec @@ -2827,14 +3014,24 @@ impl From> for VecDeque { /// and to not re-allocate the `Vec`'s buffer or allocate /// any additional memory. #[inline] - fn from(other: Vec) -> Self { + default fn from(other: Vec) -> Self { let (ptr, len, cap, alloc) = other.into_raw_parts_with_alloc(); - Self { head: 0, len, buf: unsafe { RawVec::from_raw_parts_in(ptr, cap, alloc) } } + Self { + head: 0, + len, + buf: unsafe { RawVec::::from_raw_parts_in(ptr, cap, alloc) }, + } } } #[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")] -impl From> for Vec { +#[allow(unused_braces)] +impl + From> for Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + [(); { crate::meta_num_slots!(A, VECDEQUE_CO_ALLOC_PREF) }]:, +{ /// Turn a [`VecDeque`] into a [`Vec`]. /// /// [`Vec`]: crate::vec::Vec @@ -2864,7 +3061,10 @@ impl From> for Vec { /// assert_eq!(vec, [8, 9, 1, 2, 3, 4]); /// assert_eq!(vec.as_ptr(), ptr); /// ``` - fn from(mut other: VecDeque) -> Self { + fn from(mut other: VecDeque) -> Self + where + [(); { crate::meta_num_slots!(A, VECDEQUE_CO_ALLOC_PREF) }]:, + { other.make_contiguous(); unsafe { @@ -2877,12 +3077,14 @@ impl From> for Vec { if other.head != 0 { ptr::copy(buf.add(other.head), buf, len); } - Vec::from_raw_parts_in(buf, len, cap, alloc) + // @FIXME: COOP + Vec::::from_raw_parts_in(buf, len, cap, alloc) } } } #[stable(feature = "std_collections_from_array", since = "1.56.0")] +#[allow(unused_braces)] impl From<[T; N]> for VecDeque { /// Converts a `[T; N]` into a `VecDeque`. /// @@ -2894,11 +3096,12 @@ impl From<[T; N]> for VecDeque { /// assert_eq!(deq1, deq2); /// ``` fn from(arr: [T; N]) -> Self { - let mut deq = VecDeque::with_capacity(N); + let mut deq = VecDeque::::with_capacity(N); let arr = ManuallyDrop::new(arr); if !::IS_ZST { // SAFETY: VecDeque::with_capacity ensures that there is enough capacity. unsafe { + // @FIXME for CO_ALLOC_PREF: ptr::copy_nonoverlapping(arr.as_ptr(), deq.ptr(), N); } } diff --git a/library/alloc/src/collections/vec_deque/spec_extend.rs b/library/alloc/src/collections/vec_deque/spec_extend.rs index dccf40ccb38a..0c5d9cf7a537 100644 --- a/library/alloc/src/collections/vec_deque/spec_extend.rs +++ b/library/alloc/src/collections/vec_deque/spec_extend.rs @@ -1,4 +1,7 @@ +#![feature(min_specialization)] + use crate::alloc::Allocator; +use crate::co_alloc::CoAllocPref; use crate::vec; use core::iter::TrustedLen; use core::slice; @@ -10,9 +13,12 @@ pub(super) trait SpecExtend { fn spec_extend(&mut self, iter: I); } -impl SpecExtend for VecDeque +#[allow(unused_braces)] +impl SpecExtend + for VecDeque where I: Iterator, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { default fn spec_extend(&mut self, mut iter: I) { // This function should be the moral equivalent of: @@ -22,7 +28,12 @@ where // } // May only be called if `deque.len() < deque.capacity()` - unsafe fn push_unchecked(deque: &mut VecDeque, element: T) { + unsafe fn push_unchecked( + deque: &mut VecDeque, + element: T, + ) where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { // SAFETY: Because of the precondition, it's guaranteed that there is space // in the logical array after the last element. unsafe { deque.buffer_write(deque.to_physical_idx(deque.len), element) }; @@ -49,9 +60,12 @@ where } } -impl SpecExtend for VecDeque +#[allow(unused_braces)] +impl SpecExtend + for VecDeque where I: TrustedLen, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { default fn spec_extend(&mut self, iter: I) { // This is the case for a TrustedLen iterator. @@ -84,7 +98,12 @@ where } } -impl SpecExtend> for VecDeque { +#[allow(unused_braces)] +impl SpecExtend> + for VecDeque +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn spec_extend(&mut self, mut iterator: vec::IntoIter) { let slice = iterator.as_slice(); self.reserve(slice.len()); @@ -97,19 +116,25 @@ impl SpecExtend> for VecDeque { } } -impl<'a, T: 'a, I, A: Allocator> SpecExtend<&'a T, I> for VecDeque +#[allow(unused_braces)] +impl<'a, T: 'a, I, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> SpecExtend<&'a T, I> + for VecDeque where I: Iterator, T: Copy, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { default fn spec_extend(&mut self, iterator: I) { self.spec_extend(iterator.copied()) } } -impl<'a, T: 'a, A: Allocator> SpecExtend<&'a T, slice::Iter<'a, T>> for VecDeque +#[allow(unused_braces)] +impl<'a, T: 'a, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> + SpecExtend<&'a T, slice::Iter<'a, T>> for VecDeque where T: Copy, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) { let slice = iterator.as_slice(); diff --git a/library/alloc/src/collections/vec_deque/spec_from_iter.rs b/library/alloc/src/collections/vec_deque/spec_from_iter.rs index 7650492ebdad..1ef5150dda3b 100644 --- a/library/alloc/src/collections/vec_deque/spec_from_iter.rs +++ b/library/alloc/src/collections/vec_deque/spec_from_iter.rs @@ -1,10 +1,17 @@ use super::{IntoIter, VecDeque}; +use crate::alloc::Global; +use crate::co_alloc::CoAllocPref; /// Specialization trait used for `VecDeque::from_iter` pub(super) trait SpecFromIter { fn spec_from_iter(iter: I) -> Self; } +/// Specialization trait used for `VecDeque::from_iter_co` +pub(super) trait SpecFromIterCo { + fn spec_from_iter_co(iter: I) -> Self; +} + impl SpecFromIter for VecDeque where I: Iterator, @@ -31,3 +38,45 @@ impl SpecFromIter> for VecDeque { iterator.into_vecdeque() } } +// ---- + +#[allow(unused_braces)] +impl SpecFromIterCo + for VecDeque +where + I: Iterator, + [(); { crate::meta_num_slots_global!(CO_ALLOC_PREF) }]:, +{ + default fn spec_from_iter_co(iterator: I) -> Self { + // Since converting is O(1) now, just re-use the `Vec` logic for + // anything where we can't do something extra-special for `VecDeque`, + // especially as that could save us some monomorphiziation work + // if one uses the same iterators (like slice ones) with both. + crate::vec::Vec::::from_iter_co(iterator).into() + } +} + +#[allow(unused_braces)] +impl + SpecFromIterCo> + for VecDeque +where + [(); { crate::meta_num_slots_global!(CO_ALLOC_PREF) }]:, +{ + #[inline] + fn spec_from_iter_co(iterator: crate::vec::IntoIter) -> Self { + iterator.into_vecdeque() + } +} + +#[allow(unused_braces)] +impl SpecFromIterCo> + for VecDeque +where + [(); { crate::meta_num_slots_global!(CO_ALLOC_PREF) }]:, +{ + #[inline] + fn spec_from_iter_co(iterator: IntoIter) -> Self { + iterator.into_vecdeque() + } +} diff --git a/library/alloc/src/ffi/c_str.rs b/library/alloc/src/ffi/c_str.rs index f99395c72aa0..9d7321270d83 100644 --- a/library/alloc/src/ffi/c_str.rs +++ b/library/alloc/src/ffi/c_str.rs @@ -1,12 +1,14 @@ #[cfg(test)] mod tests; +use crate::alloc::Global; use crate::borrow::{Cow, ToOwned}; use crate::boxed::Box; use crate::rc::Rc; use crate::slice::hack::into_vec; use crate::string::String; use crate::vec::Vec; +use crate::CO_ALLOC_PREF_DEFAULT; use core::borrow::Borrow; use core::ffi::{c_char, CStr}; use core::fmt; @@ -723,7 +725,8 @@ impl fmt::Debug for CString { } #[stable(feature = "cstring_into", since = "1.7.0")] -impl From for Vec { +#[allow(unused_braces)] +impl From for Vec { /// Converts a [`CString`] into a [Vec]<[u8]>. /// /// The conversion consumes the [`CString`], and removes the terminating NUL byte. diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index e9cc3875f683..ad0e529f53ea 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -86,6 +86,7 @@ #![warn(deprecated_in_future)] #![warn(missing_debug_implementations)] #![warn(missing_docs)] +#![allow(incomplete_features)] #![allow(explicit_outlives_requirements)] #![cfg_attr(not(bootstrap), warn(multiple_supertrait_upcastable))] // @@ -123,6 +124,8 @@ #![feature(extend_one)] #![feature(fmt_internals)] #![feature(fn_traits)] +#![feature(generic_const_exprs)] +#![feature(global_co_alloc_meta)] #![feature(hasher_prefixfree_extras)] #![feature(inline_const)] #![feature(inplace_iteration)] @@ -178,9 +181,17 @@ #![feature(exclusive_range_pattern)] #![feature(fundamental)] #![cfg_attr(not(test), feature(generator_trait))] +#![feature(global_co_alloc)] #![feature(hashmap_internals)] #![feature(lang_items)] -#![feature(min_specialization)] +#![feature(global_co_alloc_default)] +// When we used min_specialization instead of specialization, library/alloc/src/vec/mod.rs was failing with: +// - cannot specialize on predicate `the constant `core::alloc::co_alloc_metadata_num_slots::()` can be evaluated` +// - cannot specialize on predicate `[(); _] well-formed` +// - cannot specialize on predicate `the constant `core::alloc::co_alloc_metadata_num_slots::()` can be evaluated` +//#![feature(min_specialization)] +#![feature(associated_type_defaults)] +#![feature(specialization)] #![feature(negative_impls)] #![feature(never_type)] #![feature(rustc_allow_const_fn_unstable)] @@ -237,6 +248,9 @@ mod boxed { pub use std::boxed::Box; } pub mod borrow; +#[macro_use] +#[unstable(feature = "global_co_alloc", issue = "none")] +pub mod co_alloc; pub mod collections; #[cfg(all(not(no_rc), not(no_sync), not(no_global_oom_handling)))] pub mod ffi; diff --git a/library/alloc/src/macros.rs b/library/alloc/src/macros.rs index 5198bf297d92..29ec2799d47a 100644 --- a/library/alloc/src/macros.rs +++ b/library/alloc/src/macros.rs @@ -1,7 +1,7 @@ /// Creates a [`Vec`] containing the arguments. /// -/// `vec!` allows `Vec`s to be defined with the same syntax as array expressions. -/// There are two forms of this macro: +/// `vec!` allows `Vec`s to be defined with the same syntax as array expressions. There are two +/// forms of this macro: /// /// - Create a [`Vec`] containing a given list of elements: /// @@ -19,19 +19,17 @@ /// assert_eq!(v, [1, 1, 1]); /// ``` /// -/// Note that unlike array expressions this syntax supports all elements -/// which implement [`Clone`] and the number of elements doesn't have to be -/// a constant. +/// Note that unlike array expressions this syntax supports all elements which implement [`Clone`] +/// and the number of elements doesn't have to be a constant. /// -/// This will use `clone` to duplicate an expression, so one should be careful -/// using this with types having a nonstandard `Clone` implementation. For -/// example, `vec![Rc::new(1); 5]` will create a vector of five references -/// to the same boxed integer value, not five references pointing to independently -/// boxed integers. +/// This will use `clone` to duplicate an expression, so one should be careful using this with types +/// having a nonstandard `Clone` implementation. For example, `vec![Rc::new(1); 5]` will create a +/// vector of five references to the same boxed integer value, not five references pointing to +/// independently boxed integers. /// -/// Also, note that `vec![expr; 0]` is allowed, and produces an empty vector. -/// This will still evaluate `expr`, however, and immediately drop the resulting value, so -/// be mindful of side effects. +/// Also, note that `vec![expr; 0]` is allowed, and produces an empty vector. This will still +/// evaluate `expr`, however, and immediately drop the resulting value, so be mindful of side +/// effects. /// /// [`Vec`]: crate::vec::Vec #[cfg(all(not(no_global_oom_handling), not(test)))] @@ -54,10 +52,9 @@ macro_rules! vec { ); } -// HACK(japaric): with cfg(test) the inherent `[T]::into_vec` method, which is -// required for this macro definition, is not available. Instead use the -// `slice::into_vec` function which is only available with cfg(test) -// NB see the slice::hack module in slice.rs for more information +// HACK(japaric): with cfg(test) the inherent `[T]::into_vec` method, which is required for this +// macro definition, is not available. Instead use the `slice::into_vec` function which is only +// available with cfg(test) NB see the slice::hack module in slice.rs for more information #[cfg(all(not(no_global_oom_handling), test))] #[allow(unused_macro_rules)] macro_rules! vec { @@ -75,19 +72,18 @@ macro_rules! vec { /// Creates a `String` using interpolation of runtime expressions. /// -/// The first argument `format!` receives is a format string. This must be a string -/// literal. The power of the formatting string is in the `{}`s contained. +/// The first argument `format!` receives is a format string. This must be a string literal. The +/// power of the formatting string is in the `{}`s contained. /// -/// Additional parameters passed to `format!` replace the `{}`s within the -/// formatting string in the order given unless named or positional parameters -/// are used; see [`std::fmt`] for more information. +/// Additional parameters passed to `format!` replace the `{}`s within the formatting string in the +/// order given unless named or positional parameters are used; see [`std::fmt`] for more +/// information. /// -/// A common use for `format!` is concatenation and interpolation of strings. -/// The same convention is used with [`print!`] and [`write!`] macros, -/// depending on the intended destination of the string. +/// A common use for `format!` is concatenation and interpolation of strings. The same convention is +/// used with [`print!`] and [`write!`] macros, depending on the intended destination of the string. /// -/// To convert a single value to a string, use the [`to_string`] method. This -/// will use the [`Display`] formatting trait. +/// To convert a single value to a string, use the [`to_string`] method. This will use the +/// [`Display`] formatting trait. /// /// [`std::fmt`]: ../std/fmt/index.html /// [`print!`]: ../std/macro.print.html @@ -97,9 +93,8 @@ macro_rules! vec { /// /// # Panics /// -/// `format!` panics if a formatting trait implementation returns an error. -/// This indicates an incorrect implementation -/// since `fmt::Write for String` never returns an error itself. +/// `format!` panics if a formatting trait implementation returns an error. This indicates an +/// incorrect implementation since `fmt::Write for String` never returns an error itself. /// /// # Examples /// @@ -129,3 +124,246 @@ macro_rules! __rust_force_expr { $e }; } + +// ----- CoAlloc ICE workaround macro: +/// This "validates" type of a given `const` expression, and it casts it. That helps to prevent mix ups with macros/integer constant values. +#[doc(hidden)] +#[macro_export] +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +macro_rules! check_type_and_cast { + // Use the following for compile-time/build check only. And use it + // with a hard-coded `0` version of `meta_num_slots` - otherwise you get an ICE. + // + /*($e:expr, $t_check:ty, $t_cast:ty) => { + ($e + 0 as $t_check) as $t_cast + }*/ + // Use the following to build for testing/using, while rustc causes an ICE with the above and + // with a full version of `meta_num_slots`. + ($e:expr, $t_check:ty, $t_cast:ty) => { + $e + }; +} + +// ----- CoAlloc constant-like macros: +/// Coallocation option/parameter about using metadata that does prefer to use meta data. This is of type [::alloc::co_alloc::CoAllocMetaNumSlotsPref] (but not a whole []::alloc::co_alloc::CoAllocPref]). +#[doc(hidden)] +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +#[macro_export] +macro_rules! CO_ALLOC_PREF_NUM_META_SLOTS_ONE { + () => { + $crate::check_type_and_cast!(1, i32, $crate::co_alloc::CoAllocMetaNumSlotsPref) + }; +} + +/// Coallocation option/parameter about using metadata that prefers NOT to use meta data. This is of type [::alloc::co_alloc::CoAllocMetaNumSlotsPref] (but not a whole []::alloc::co_alloc::CoAllocPref]). +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +#[macro_export] +macro_rules! CO_ALLOC_PREF_NUM_META_SLOTS_ZERO { + () => { + $crate::check_type_and_cast!(0, i32, $crate::co_alloc::CoAllocMetaNumSlotsPref) + }; +} + +/// Default coallocation option/parameter about using metadata (whether to use meta data, or not). This is of type [::alloc::co_alloc::CoAllocMetaNumSlotsPref] (but not a whole []::alloc::co_alloc::CoAllocPref]). +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +#[macro_export] +macro_rules! CO_ALLOC_PREF_NUM_META_SLOTS_DEFAULT { + () => { + $crate::check_type_and_cast!(0, i32, $crate::co_alloc::CoAllocMetaNumSlotsPref) + }; +} + +/// "Yes" as a type's preference for coallocation using metadata (in either user space, or `alloc` +/// or `std` space). +/// +/// It may be overriden by the allocator. For example, if the allocator doesn't support +/// coallocation, then this value makes no difference. +/// +/// This constant and its type WILL CHANGE (once ``#![feature(generic_const_exprs)]` and +/// `#![feature(adt_const_params)]` are stable) to a dedicated struct/enum. Hence DO NOT hard +/// code/replace/mix this any other values/parameters. +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +#[macro_export] +macro_rules! CO_ALLOC_PREF_META_YES { + () => { + //1usize + $crate::co_alloc_pref!($crate::CO_ALLOC_PREF_NUM_META_SLOTS_ONE!()) + }; +} + +/// "No" as a type's preference for coallocation using metadata (in either user space, or `alloc` or +/// `std` space). +/// +/// Any allocator is required to respect this. Even if the allocator does support coallocation, it +/// will not coallocate types that use this value. +/// +/// This constant and its type WILL CHANGE (once ``#![feature(generic_const_exprs)]` and +/// `#![feature(adt_const_params)]` are stable) to a dedicated struct/enum. Hence DO NOT hard +/// code/replace/mix this any other values/parameters. +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +#[macro_export] +macro_rules! CO_ALLOC_PREF_META_NO { + () => { + //0usize + $crate::co_alloc_pref!($crate::CO_ALLOC_PREF_NUM_META_SLOTS_ZERO!()) + }; +} + +/// "Default" as a type's preference for coallocation using metadata (in either user space, or +/// `alloc` or `std` space). +/// +/// This value and its type WILL CHANGE (once ``#![feature(generic_const_exprs)]` and +/// `#![feature(adt_const_params)]` are stable) to a dedicated struct/enum. Hence DO NOT hard +/// code/replace/mix this any other values/parameters. +/// +/// (@FIXME) This WILL BE BECOME OBSOLETE and it WILL BE REPLACED with a `const` (and/or some kind +/// of compile time preference) once a related ICE is fixed (@FIXME add the ICE link here). Then +/// consider moving such a `const` to a submodule, for example `::alloc::co_alloc`. +#[unstable(feature = "global_co_alloc_default", issue = "none")] +#[macro_export] +macro_rules! CO_ALLOC_PREF_META_DEFAULT { + () => { + //0usize + $crate::co_alloc_pref!($crate::CO_ALLOC_PREF_NUM_META_SLOTS_DEFAULT!()) + }; +} + +/// Default [::alloc::CoAllocPref] value/config, based on `CO_ALLOC_PREF_META_DEFAULT`. +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +#[macro_export] +macro_rules! CO_ALLOC_PREF_DEFAULT { + () => { + //0usize + $crate::CO_ALLOC_PREF_META_DEFAULT!() + }; +} + +/// Coallocation preference for (internal) short term vectors. +#[unstable(feature = "global_co_alloc", issue = "none")] +//pub const SHORT_TERM_VEC_CO_ALLOC_PREF: bool = true; +#[macro_export] +macro_rules! SHORT_TERM_VEC_CO_ALLOC_PREF { + () => { + //0usize + $crate::CO_ALLOC_PREF_META_NO!() + }; +} + +// ------ CoAlloc preference/config conversion macros: + +/// Create a `CoAllocPref` value based on the given parameter(s). For now, only one parameter is +/// supported, and it's required: `meta_pref`. +/// +/// @param `meta_pref` is one of: `CO_ALLOC_PREF_META_YES, CO_ALLOC_PREF_META_NO`, or +/// `CO_ALLOC_PREF_META_DEFAULT`. +/// +/// @return `CoAllocPref` value +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +#[macro_export] +macro_rules! co_alloc_pref { + // ($meta_pref + (0 as CoAllocMetaNumSlotsPref)) ensures that $meta_pref is of type + // `CoAllocMetaNumSlotsPref`. Otherwise the casting of the result to `CoAllocPref` would not + // report the incorrect type of $meta_pref (if $meta_pref were some other integer, casting would + // compile, and we would not be notified). + ($meta_pref:expr) => { + $crate::check_type_and_cast!( + $meta_pref, + $crate::co_alloc::CoAllocMetaNumSlotsPref, + $crate::co_alloc::CoAllocPref + ) + }; +} + +/// Return 0 or 1, indicating whether to use coallocation metadata (or not) with the given allocator +/// type `alloc` and cooperation preference `co_alloc_pref`. +/// +/// NOT for public use. Param `co_alloc_pref` - can override the allocator's default preference for +/// cooperation, or can make the type not cooperative, regardless of whether allocator `A` is +/// cooperative. +/// +/// @param `alloc` Allocator (implementation) type. @param `co_alloc_pref` The heap-based type's +/// preference for coallocation, as an [::alloc::CoAllocPref] value. +/// +/// The type of second parameter `co_alloc_pref` WILL CHANGE. DO NOT hardcode/cast/mix that type. +/// Instead, use [::alloc::CoAllocPref]. +/// +// FIXME replace the macro with an (updated version of the below) `const` function). Only once +// generic_const_exprs is stable (that is, when consumer crates don't need to declare +// generic_const_exprs feature anymore). Then consider moving the function to a submodule, for +// example ::alloc::co_alloc. +#[unstable(feature = "global_co_alloc", issue = "none")] +#[macro_export] +macro_rules! meta_num_slots { + // @FIXME Use this only + // - once the ICE gets fixed, or + // - (until the ICE is fixed) with a related change in `check_type_and_cast` that makes it pass + // the given expression (parameter) unchecked & uncast. + /*($alloc:ty, $co_alloc_pref:expr) => { + $crate::check_type_and_cast!(<$alloc as ::core::alloc::Allocator>::CO_ALLOC_META_NUM_SLOTS,::core::alloc::CoAllocatorMetaNumSlots, + usize) * + $crate::check_type_and_cast!($co_alloc_pref, $crate::co_alloc::CoAllocPref, usize) + };*/ + // Use for testing & production, until ICE gets fixed. (Regardless of $co_alloc_pref.) + // + // Why still ICE?! + ($alloc:ty, $co_alloc_pref:expr) => { + // The following fails here - even if not used from meta_num_slots_default nor from meta_num_slots_global! + //<$alloc as ::core::alloc::Allocator>::CO_ALLOC_META_NUM_SLOTS + //<$crate::alloc::Global as ::core::alloc::Allocator>::CO_ALLOC_META_NUM_SLOTS + //1usize + $co_alloc_pref + } + // Use for testing & production as enforcing no meta. + /*($alloc:ty, $co_alloc_pref:expr) => { + 0usize // compiles + }*/ +} +// -\---> replace with something like: +/* +#[unstable(feature = "global_co_alloc", issue = "none")] +pub const fn meta_num_slots( + CO_ALLOC_PREF: bool, +) -> usize { + if A::CO_ALLOC_META_NUM_SLOTS && CO_ALLOC_PREF { 1 } else { 0 } +} +*/ + +/// Like `meta_num_slots`, but for the default coallocation preference (`DEFAULT_CO_ALLOC_PREF`). +/// +/// Return 0 or 1, indicating whether to use coallocation metadata (or not) with the given allocator +/// type `alloc` and the default coallocation preference (`DEFAULT_CO_ALLOC_PREF()!`). +/// +// FIXME replace the macro with a `const` function. Only once generic_const_exprs is stable (that +// is, when consumer crates don't need to declare generic_const_exprs feature anymore). Then +// consider moving the function to a submodule, for example ::alloc::co_alloc. +#[unstable(feature = "global_co_alloc", issue = "none")] +#[macro_export] +macro_rules! meta_num_slots_default { + // Can't generate if ... {1} else {0} + // because it's "overly complex generic constant". + ($alloc:ty) => { + // EITHER of the following are OK here + $crate::meta_num_slots!($alloc, $crate::CO_ALLOC_PREF_DEFAULT!()) + //<$alloc as ::core::alloc::Allocator>::CO_ALLOC_META_NUM_SLOTS + //<$crate::alloc::Global as ::core::alloc::Allocator>::CO_ALLOC_META_NUM_SLOTS + }; +} + +/// Like `meta_num_slots`, but for the default coallocation preference (`DEFAULT_CO_ALLOC_PREF`). +/// +/// Return 0 or 1, indicating whether to use coallocation metadata (or not) with the global allocator +/// type `alloc` and the given coallocation preference `co_alloc_`. +/// +// FIXME replace the macro with a `const` function. Only once generic_const_exprs is stable (that +// is, when consumer crates don't need to declare `generic_const_exprs` feature anymore). Then +// consider moving the function to a submodule, for example ::alloc::co_alloc. See above. +#[unstable(feature = "global_co_alloc", issue = "none")] +#[macro_export] +macro_rules! meta_num_slots_global { + ($co_alloc_pref:expr) => { + // EITHER of the following are OK here + $crate::meta_num_slots!($crate::alloc::Global, $co_alloc_pref) + // The following is OK here: + //<$crate::alloc::Global as ::core::alloc::Allocator>::CO_ALLOC_META_NUM_SLOTS + }; +} diff --git a/library/alloc/src/raw_vec.rs b/library/alloc/src/raw_vec.rs index 3751f2a24545..39827052b75c 100644 --- a/library/alloc/src/raw_vec.rs +++ b/library/alloc/src/raw_vec.rs @@ -1,6 +1,9 @@ #![unstable(feature = "raw_vec_internals", reason = "unstable const warnings", issue = "none")] -use core::alloc::LayoutError; +use crate::co_alloc::CoAllocPref; +use crate::meta_num_slots_default; +use core::alloc::CoAllocMetaBase; +use core::alloc::{LayoutError, PtrAndMeta}; use core::cmp; use core::intrinsics; use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties}; @@ -14,6 +17,7 @@ use crate::alloc::{Allocator, Global, Layout}; use crate::boxed::Box; use crate::collections::TryReserveError; use crate::collections::TryReserveErrorKind::*; +use crate::CO_ALLOC_PREF_DEFAULT; #[cfg(test)] mod tests; @@ -49,13 +53,28 @@ enum AllocInit { /// `usize::MAX`. This means that you need to be careful when round-tripping this type with a /// `Box<[T]>`, since `capacity()` won't yield the length. #[allow(missing_debug_implementations)] -pub(crate) struct RawVec { +#[allow(unused_braces)] //@FIXME remove #[allow(unused_braces)] once that false positive warning fix is included on stable +pub(crate) struct RawVec< + T, + A: Allocator = Global, + const CO_ALLOC_PREF: CoAllocPref = { CO_ALLOC_PREF_DEFAULT!() }, +> where + [(); { meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ ptr: Unique, cap: usize, alloc: A, + // As of v1.67.0, `cmp` for `TypeId` is not `const`, unfortunately: + //pub(crate) meta: [GlobalCoAllocMeta; {if core::any::TypeId::of::()==core::any::TypeId::of::() {1} else {0}}], + //pub(crate) meta: [GlobalCoAllocMeta; mem::size_of::()], + pub(crate) metas: [A::CoAllocMeta; { crate::meta_num_slots!(A, CO_ALLOC_PREF) }], } -impl RawVec { +#[allow(unused_braces)] +impl RawVec +where + [(); { meta_num_slots_global!(CO_ALLOC_PREF) }]:, +{ /// HACK(Centril): This exists because stable `const fn` can only call stable `const fn`, so /// they cannot call `Self::new()`. /// @@ -102,7 +121,16 @@ impl RawVec { } } -impl RawVec { +#[allow(unused_braces)] +impl RawVec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ + #[allow(dead_code)] + const fn new_plain_metas() -> [A::CoAllocMeta; { meta_num_slots_default!(A) }] { + loop {} + } + // Tiny Vecs are dumb. Skip to: // - 8 if the element size is 1, because any heap allocators is likely // to round up a request of less than 8 bytes to at least 8 bytes. @@ -120,7 +148,13 @@ impl RawVec { /// the returned `RawVec`. pub const fn new_in(alloc: A) -> Self { // `cap: 0` means "unallocated". zero-sized types are ignored. - Self { ptr: Unique::dangling(), cap: 0, alloc } + Self { + ptr: Unique::dangling(), + cap: 0, + alloc, + metas: [A::CoAllocMeta::new_plain(); // @FIXME CoAlloc + {crate::meta_num_slots!(A, CO_ALLOC_PREF)}], + } } /// Like `with_capacity`, but parameterized over the choice of @@ -193,10 +227,13 @@ impl RawVec { // Allocators currently return a `NonNull<[u8]>` whose length // matches the size requested. If that ever changes, the capacity // here should change to `ptr.len() / mem::size_of::()`. + #[allow(unreachable_code)] // @FIXME CoAlloc Self { ptr: unsafe { Unique::new_unchecked(ptr.cast().as_ptr()) }, cap: capacity, alloc, + metas: [A::CoAllocMeta::new_plain(); // @FIXME CoAlloc + {crate::meta_num_slots!(A, CO_ALLOC_PREF)}], } } } @@ -213,7 +250,14 @@ impl RawVec { /// guaranteed. #[inline] pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, alloc: A) -> Self { - Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap: capacity, alloc } + #[allow(unreachable_code)] //@FIXME CoAlloc + Self { + ptr: unsafe { Unique::new_unchecked(ptr) }, + cap: capacity, + alloc, + metas: [A::CoAllocMeta::new_plain(); //@FIXME CoAlloc + {crate::meta_num_slots!(A, CO_ALLOC_PREF)}], + } } /// Gets a raw pointer to the start of the allocation. Note that this is @@ -237,6 +281,12 @@ impl RawVec { &self.alloc } + // @FIXME + #[inline] + const fn assert_alignment() { + assert!(mem::size_of::() % mem::align_of::() == 0); + } + fn current_memory(&self) -> Option<(NonNull, Layout)> { if T::IS_ZST || self.cap == 0 { None @@ -245,7 +295,8 @@ impl RawVec { // and could hypothetically handle differences between stride and size, but this memory // has already been allocated so we know it can't overflow and currently rust does not // support such types. So we can do better by skipping some checks and avoid an unwrap. - let _: () = const { assert!(mem::size_of::() % mem::align_of::() == 0) }; + let _: () = Self::assert_alignment(); + //let _: () = const { assert!(mem::size_of::() % mem::align_of::() == 0) }; unsafe { let align = mem::align_of::(); let size = mem::size_of::().unchecked_mul(self.cap); @@ -276,17 +327,20 @@ impl RawVec { /// Aborts on OOM. #[cfg(not(no_global_oom_handling))] #[inline] + #[allow(unused_braces)] pub fn reserve(&mut self, len: usize, additional: usize) { // Callers expect this function to be very cheap when there is already sufficient capacity. // Therefore, we move all the resizing and error-handling logic from grow_amortized and // handle_reserve behind a call, while making sure that this function is likely to be // inlined as just a comparison and a call if the comparison fails. #[cold] - fn do_reserve_and_handle( - slf: &mut RawVec, + fn do_reserve_and_handle( + slf: &mut RawVec, len: usize, additional: usize, - ) { + ) where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { handle_reserve(slf.grow_amortized(len, additional)); } @@ -359,7 +413,11 @@ impl RawVec { } } -impl RawVec { +#[allow(unused_braces)] +impl RawVec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ /// Returns if the buffer needs to grow to fulfill the needed extra capacity. /// Mainly used to make inlining reserve-calls possible without inlining `grow`. fn needs_to_grow(&self, len: usize, additional: usize) -> bool { @@ -432,7 +490,8 @@ impl RawVec { let (ptr, layout) = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) }; // See current_memory() why this assert is here - let _: () = const { assert!(mem::size_of::() % mem::align_of::() == 0) }; + let _: () = Self::assert_alignment(); + //let _: () = const { assert!(mem::size_of::() % mem::align_of::() == 0) }; let ptr = unsafe { // `Layout::array` cannot overflow here because it would have // overflowed earlier when capacity was larger. @@ -479,11 +538,27 @@ where memory.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () }.into()) } -unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawVec { +#[allow(unused_braces)] +unsafe impl<#[may_dangle] T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Drop + for RawVec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ /// Frees the memory owned by the `RawVec` *without* trying to drop its contents. - fn drop(&mut self) { + default fn drop(&mut self) { if let Some((ptr, layout)) = self.current_memory() { - unsafe { self.alloc.deallocate(ptr, layout) } + let meta_num_slots = crate::meta_num_slots!(A, CO_ALLOC_PREF); + if meta_num_slots != 0 { + debug_assert!( + meta_num_slots == 1, + "Number of coallocation meta slots can be only 0 or 1, but it is {}!", + meta_num_slots + ); + let meta = self.metas[0]; + unsafe { self.alloc.co_deallocate(PtrAndMeta { ptr, meta }, layout) } + } else { + unsafe { self.alloc.deallocate(ptr, layout) } + } } } } diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index fd1e3e0f75b0..52af085bc470 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -275,6 +275,7 @@ use crate::alloc::handle_alloc_error; use crate::alloc::{box_free, WriteCloneIntoRaw}; use crate::alloc::{AllocError, Allocator, Global, Layout}; use crate::borrow::{Cow, ToOwned}; +use crate::co_alloc::CoAllocPref; #[cfg(not(no_global_oom_handling))] use crate::string::String; #[cfg(not(no_global_oom_handling))] @@ -1987,7 +1988,11 @@ impl From> for Rc { #[cfg(not(no_global_oom_handling))] #[stable(feature = "shared_from_slice", since = "1.21.0")] -impl From> for Rc<[T]> { +#[allow(unused_braces)] +impl From> for Rc<[T]> +where + [(); { meta_num_slots_global!(CO_ALLOC_PREF) }]:, +{ /// Allocate a reference-counted slice and move `v`'s items into it. /// /// # Example @@ -1999,7 +2004,11 @@ impl From> for Rc<[T]> { /// assert_eq!(vec![1, 2, 3], *shared); /// ``` #[inline] - fn from(mut v: Vec) -> Rc<[T]> { + #[allow(unused_braces)] + fn from(mut v: Vec) -> Rc<[T]> + where + [(); { meta_num_slots_global!(CO_ALLOC_PREF) }]:, + { unsafe { let rc = Rc::copy_from_slice(&v); // Allow the Vec to free its memory, but not destroy its contents @@ -2120,6 +2129,7 @@ trait ToRcSlice: Iterator + Sized { fn to_rc_slice(self) -> Rc<[T]>; } +// COOP_NOT_POSSIBLE #[cfg(not(no_global_oom_handling))] impl> ToRcSlice for I { default fn to_rc_slice(self) -> Rc<[T]> { diff --git a/library/alloc/src/slice.rs b/library/alloc/src/slice.rs index 093dcbbe8bf7..0fecde66d9f1 100644 --- a/library/alloc/src/slice.rs +++ b/library/alloc/src/slice.rs @@ -28,6 +28,7 @@ use crate::alloc::{self, Global}; #[cfg(not(no_global_oom_handling))] use crate::borrow::ToOwned; use crate::boxed::Box; +use crate::co_alloc::CoAllocPref; use crate::vec::Vec; #[cfg(test)] @@ -84,6 +85,9 @@ pub use hack::into_vec; #[cfg(test)] pub use hack::to_vec; +#[cfg(test)] +pub use hack::to_vec_co; + // HACK(japaric): With cfg(test) `impl [T]` is not available, these three // functions are actually methods that are in `impl [T]` but not in // `core::slice::SliceExt` - we need to supply these functions for the @@ -92,12 +96,27 @@ pub(crate) mod hack { use core::alloc::Allocator; use crate::boxed::Box; + use crate::co_alloc::CoAllocPref; use crate::vec::Vec; // We shouldn't add inline attribute to this since this is used in // `vec!` macro mostly and causes perf regression. See #71204 for // discussion and perf results. - pub fn into_vec(b: Box<[T], A>) -> Vec { + #[allow(unused_braces)] + pub fn into_vec(b: Box<[T], A>) -> Vec + where + [(); { crate::meta_num_slots_default!(A) }]:, + { + into_vec_co::(b) + } + + #[allow(unused_braces)] + pub fn into_vec_co( + b: Box<[T], A>, + ) -> Vec + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { unsafe { let len = b.len(); let (b, alloc) = Box::into_raw_with_allocator(b); @@ -107,26 +126,68 @@ pub(crate) mod hack { #[cfg(not(no_global_oom_handling))] #[inline] - pub fn to_vec(s: &[T], alloc: A) -> Vec { + #[allow(unused_braces)] + pub fn to_vec(s: &[T], alloc: A) -> Vec + where + [(); { crate::meta_num_slots_default!(A) }]:, + { T::to_vec(s, alloc) } #[cfg(not(no_global_oom_handling))] + #[inline] + #[allow(unused_braces)] + pub fn to_vec_co( + s: &[T], + alloc: A, + ) -> Vec + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { + T::to_vec_co(s, alloc) + } + + #[cfg(not(no_global_oom_handling))] + #[allow(unused_braces)] pub trait ConvertVec { fn to_vec(s: &[Self], alloc: A) -> Vec where - Self: Sized; + Self: Sized, + [(); { crate::meta_num_slots_default!(A) }]:; + } + + #[allow(unused_braces)] + pub trait ConvertVecCo { + fn to_vec_co( + s: &[Self], + alloc: A, + ) -> Vec + where + Self: Sized, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:; } #[cfg(not(no_global_oom_handling))] + #[allow(unused_braces)] impl ConvertVec for T { #[inline] - default fn to_vec(s: &[Self], alloc: A) -> Vec { - struct DropGuard<'a, T, A: Allocator> { + #[allow(unused_braces)] + default fn to_vec(s: &[Self], alloc: A) -> Vec + where + [(); { crate::meta_num_slots_default!(A) }]:, + { + #[allow(unused_braces)] + struct DropGuard<'a, T, A: Allocator> + where + [(); { crate::meta_num_slots_default!(A) }]:, + { vec: &'a mut Vec, num_init: usize, } - impl<'a, T, A: Allocator> Drop for DropGuard<'a, T, A> { + impl<'a, T, A: Allocator> Drop for DropGuard<'a, T, A> + where + [(); { crate::meta_num_slots_default!(A) }]:, + { #[inline] fn drop(&mut self) { // SAFETY: @@ -158,7 +219,87 @@ pub(crate) mod hack { #[cfg(not(no_global_oom_handling))] impl ConvertVec for T { #[inline] - fn to_vec(s: &[Self], alloc: A) -> Vec { + #[allow(unused_braces)] + fn to_vec(s: &[Self], alloc: A) -> Vec + where + [(); { crate::meta_num_slots_default!(A) }]:, + { + let mut v = Vec::with_capacity_in(s.len(), alloc); + // SAFETY: + // allocated above with the capacity of `s`, and initialize to `s.len()` in + // ptr::copy_to_non_overlapping below. + unsafe { + s.as_ptr().copy_to_nonoverlapping(v.as_mut_ptr(), s.len()); + v.set_len(s.len()); + } + v + } + } + + #[cfg(not(no_global_oom_handling))] + #[allow(unused_braces)] + impl ConvertVecCo for T { + #[inline] + #[allow(unused_braces)] + default fn to_vec_co( + s: &[Self], + alloc: A, + ) -> Vec + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { + #[allow(unused_braces)] + struct DropGuard<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { + vec: &'a mut Vec, + num_init: usize, + } + impl<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Drop + for DropGuard<'a, T, A, CO_ALLOC_PREF> + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { + #[inline] + fn drop(&mut self) { + // SAFETY: + // items were marked initialized in the loop below + unsafe { + self.vec.set_len(self.num_init); + } + } + } + let mut vec = Vec::with_capacity_in(s.len(), alloc); + let mut guard = DropGuard { vec: &mut vec, num_init: 0 }; + let slots = guard.vec.spare_capacity_mut(); + // .take(slots.len()) is necessary for LLVM to remove bounds checks + // and has better codegen than zip. + for (i, b) in s.iter().enumerate().take(slots.len()) { + guard.num_init = i; + slots[i].write(b.clone()); + } + core::mem::forget(guard); + // SAFETY: + // the vec was allocated and initialized above to at least this length. + unsafe { + vec.set_len(s.len()); + } + vec + } + } + + #[cfg(not(no_global_oom_handling))] + impl ConvertVecCo for T { + #[inline] + #[allow(unused_braces)] + fn to_vec_co( + s: &[Self], + alloc: A, + ) -> Vec + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { let mut v = Vec::with_capacity_in(s.len(), alloc); // SAFETY: // allocated above with the capacity of `s`, and initialize to `s.len()` in @@ -406,6 +547,7 @@ impl [T] { /// ``` #[cfg(not(no_global_oom_handling))] #[rustc_allow_incoherent_impl] + #[allow(unused_braces)] #[rustc_conversion_suggestion] #[stable(feature = "rust1", since = "1.0.0")] #[inline] @@ -413,7 +555,22 @@ impl [T] { where T: Clone, { - self.to_vec_in(Global) + self.to_vec_in::(Global) + } + + /// Coallocation-aware alternative to `to_vec`. + #[cfg(not(no_global_oom_handling))] + #[rustc_allow_incoherent_impl] + #[allow(unused_braces)] + #[rustc_conversion_suggestion] + #[unstable(feature = "global_co_alloc", issue = "none")] + #[inline] + pub fn to_vec_co(&self) -> Vec + where + T: Clone, + [(); { meta_num_slots_global!(CO_ALLOC_PREF) }]:, + { + self.to_vec_in_co::(Global) } /// Copies `self` into a new `Vec` with an allocator. @@ -433,14 +590,34 @@ impl [T] { #[rustc_allow_incoherent_impl] #[inline] #[unstable(feature = "allocator_api", issue = "32838")] + #[allow(unused_braces)] pub fn to_vec_in(&self, alloc: A) -> Vec where T: Clone, + [(); { crate::meta_num_slots_default!(A) }]:, { // N.B., see the `hack` module in this file for more details. hack::to_vec(self, alloc) } + /// Coallocation-aware version of `to_vec_in`. + #[cfg(not(no_global_oom_handling))] + #[rustc_allow_incoherent_impl] + #[inline] + #[unstable(feature = "global_co_alloc", issue = "none")] + #[allow(unused_braces)] + pub fn to_vec_in_co( + &self, + alloc: A, + ) -> Vec + where + T: Clone, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { + // N.B., see the `hack` module in this file for more details. + hack::to_vec_co(self, alloc) + } + /// Converts `self` into a vector without clones or allocation. /// /// The resulting vector can be converted back into a box via @@ -458,11 +635,30 @@ impl [T] { #[rustc_allow_incoherent_impl] #[stable(feature = "rust1", since = "1.0.0")] #[inline] - pub fn into_vec(self: Box) -> Vec { + #[allow(unused_braces)] + pub fn into_vec(self: Box) -> Vec + where + [(); { crate::meta_num_slots_default!(A) }]:, + { // N.B., see the `hack` module in this file for more details. hack::into_vec(self) } + /// Coallocation-aware version of [into_vec]. + #[rustc_allow_incoherent_impl] + #[unstable(feature = "global_co_alloc", issue = "none")] + #[inline] + #[allow(unused_braces)] + pub fn into_vec_co( + self: Box, + ) -> Vec + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { + // N.B., see the `hack` module in this file for more details. + hack::into_vec_co(self) + } + /// Creates a vector by copying a slice `n` times. /// /// # Panics @@ -702,6 +898,7 @@ pub trait Join { fn join(slice: &Self, sep: Separator) -> Self::Output; } +// COOP_NOT_POSSIBLE #[cfg(not(no_global_oom_handling))] #[unstable(feature = "slice_concat_ext", issue = "27747")] impl> Concat for [V] { @@ -717,6 +914,7 @@ impl> Concat for [V] { } } +// COOP_NOT_POSSIBLE #[cfg(not(no_global_oom_handling))] #[unstable(feature = "slice_concat_ext", issue = "27747")] impl> Join<&T> for [V] { @@ -740,10 +938,11 @@ impl> Join<&T> for [V] { } } +// COOP_NOT_POSSIBLE #[cfg(not(no_global_oom_handling))] #[unstable(feature = "slice_concat_ext", issue = "27747")] impl> Join<&[T]> for [V] { - type Output = Vec; + type Output = Vec; fn join(slice: &Self, sep: &[T]) -> Vec { let mut iter = slice.iter(); @@ -769,14 +968,22 @@ impl> Join<&[T]> for [V] { //////////////////////////////////////////////////////////////////////////////// #[stable(feature = "rust1", since = "1.0.0")] -impl Borrow<[T]> for Vec { +#[allow(unused_braces)] +impl Borrow<[T]> for Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn borrow(&self) -> &[T] { &self[..] } } #[stable(feature = "rust1", since = "1.0.0")] -impl BorrowMut<[T]> for Vec { +#[allow(unused_braces)] +impl BorrowMut<[T]> for Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn borrow_mut(&mut self) -> &mut [T] { &mut self[..] } @@ -786,12 +993,20 @@ impl BorrowMut<[T]> for Vec { // public in the crate and has the Allocator parameter so that // vec::clone_from use it too. #[cfg(not(no_global_oom_handling))] -pub(crate) trait SpecCloneIntoVec { +#[allow(unused_braces)] +pub(crate) trait SpecCloneIntoVec +where + [(); { crate::meta_num_slots_default!(A) }]:, +{ fn clone_into(&self, target: &mut Vec); } #[cfg(not(no_global_oom_handling))] -impl SpecCloneIntoVec for [T] { +#[allow(unused_braces)] +impl SpecCloneIntoVec for [T] +where + [(); { crate::meta_num_slots_default!(A) }]:, +{ default fn clone_into(&self, target: &mut Vec) { // drop anything in target that will not be overwritten target.truncate(self.len()); @@ -807,13 +1022,61 @@ impl SpecCloneIntoVec for [T] { } #[cfg(not(no_global_oom_handling))] -impl SpecCloneIntoVec for [T] { +#[allow(unused_braces)] +impl SpecCloneIntoVec for [T] +where + [(); { crate::meta_num_slots_default!(A) }]:, +{ fn clone_into(&self, target: &mut Vec) { target.clear(); target.extend_from_slice(self); } } +/// Coallocation-aware version of `SpecCloneIntoVec`. +#[cfg(not(no_global_oom_handling))] +#[allow(unused_braces)] +pub(crate) trait SpecCloneIntoVecCo +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ + fn clone_into_co(&self, target: &mut Vec); +} + +#[cfg(not(no_global_oom_handling))] +#[allow(unused_braces)] +impl + SpecCloneIntoVecCo for [T] +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ + default fn clone_into_co(&self, target: &mut Vec) { + // drop anything in target that will not be overwritten + target.truncate(self.len()); + + // target.len <= self.len due to the truncate above, so the + // slices here are always in-bounds. + let (init, tail) = self.split_at(target.len()); + + // reuse the contained values' allocations/resources. + target.clone_from_slice(init); + target.extend_from_slice(tail); + } +} + +#[cfg(not(no_global_oom_handling))] +#[allow(unused_braces)] +impl + SpecCloneIntoVecCo for [T] +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ + fn clone_into_co(&self, target: &mut Vec) { + target.clear(); + target.extend_from_slice(self); + } +} + #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] impl ToOwned for [T] { diff --git a/library/alloc/src/str.rs b/library/alloc/src/str.rs index afbe5cfaf8ef..3c900c116453 100644 --- a/library/alloc/src/str.rs +++ b/library/alloc/src/str.rs @@ -14,8 +14,11 @@ use core::ptr; use core::str::pattern::{DoubleEndedSearcher, Pattern, ReverseSearcher, Searcher}; use core::unicode::conversions; +use crate::alloc; +use crate::alloc::Global; use crate::borrow::ToOwned; use crate::boxed::Box; +use crate::co_alloc::CoAllocPref; use crate::slice::{Concat, Join, SliceIndex}; use crate::string::String; use crate::vec::Vec; @@ -126,11 +129,16 @@ macro_rules! copy_slice_and_advance { // [T] and str both impl AsRef<[T]> for some T // => s.borrow().as_ref() and we always have slices #[cfg(not(no_global_oom_handling))] -fn join_generic_copy(slice: &[S], sep: &[T]) -> Vec +#[allow(unused_braces)] +fn join_generic_copy( + slice: &[S], + sep: &[T], +) -> Vec where T: Copy, B: AsRef<[T]> + ?Sized, S: Borrow, + [(); { crate::meta_num_slots_global!(CO_ALLOC_PREF) }]:, { let sep_len = sep.len(); let mut iter = slice.iter(); @@ -138,7 +146,7 @@ where // the first slice is the only one without a separator preceding it let first = match iter.next() { Some(first) => first, - None => return vec![], + None => return Vec::new_co(), }; // compute the exact total length of the joined Vec @@ -153,7 +161,7 @@ where .expect("attempt to join into collection with len > usize::MAX"); // prepare an uninitialized buffer - let mut result = Vec::with_capacity(reserved_len); + let mut result = Vec::with_capacity_co(reserved_len); debug_assert!(result.capacity() >= reserved_len); result.extend_from_slice(first.borrow().as_ref()); diff --git a/library/alloc/src/vec/drain.rs b/library/alloc/src/vec/drain.rs index 2b1a787cc549..eaea943baecd 100644 --- a/library/alloc/src/vec/drain.rs +++ b/library/alloc/src/vec/drain.rs @@ -1,9 +1,10 @@ use crate::alloc::{Allocator, Global}; +use crate::co_alloc::CoAllocPref; use core::fmt; use core::iter::{FusedIterator, TrustedLen}; use core::mem::{self, ManuallyDrop, SizedTypeProperties}; use core::ptr::{self, NonNull}; -use core::slice::{self}; +use core::slice; use super::Vec; @@ -19,28 +20,41 @@ use super::Vec; /// let iter: std::vec::Drain<_> = v.drain(..); /// ``` #[stable(feature = "drain", since = "1.6.0")] +#[allow(unused_braces)] pub struct Drain< 'a, T: 'a, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + 'a = Global, -> { + const CO_ALLOC_PREF: CoAllocPref = { SHORT_TERM_VEC_CO_ALLOC_PREF!() }, +> where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ /// Index of tail to preserve pub(super) tail_start: usize, /// Length of tail pub(super) tail_len: usize, /// Current remaining range to remove pub(super) iter: slice::Iter<'a, T>, - pub(super) vec: NonNull>, + pub(super) vec: NonNull>, } #[stable(feature = "collection_debug", since = "1.17.0")] -impl fmt::Debug for Drain<'_, T, A> { +#[allow(unused_braces)] +impl fmt::Debug + for Drain<'_, T, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("Drain").field(&self.iter.as_slice()).finish() } } -impl<'a, T, A: Allocator> Drain<'a, T, A> { +#[allow(unused_braces)] +impl<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Drain<'a, T, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ /// Returns the remaining items of this iterator as a slice. /// /// # Examples @@ -139,19 +153,40 @@ impl<'a, T, A: Allocator> Drain<'a, T, A> { } #[stable(feature = "vec_drain_as_slice", since = "1.46.0")] -impl<'a, T, A: Allocator> AsRef<[T]> for Drain<'a, T, A> { +#[allow(unused_braces)] +impl<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> AsRef<[T]> + for Drain<'a, T, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn as_ref(&self) -> &[T] { self.as_slice() } } #[stable(feature = "drain", since = "1.6.0")] -unsafe impl Sync for Drain<'_, T, A> {} +#[allow(unused_braces)] +unsafe impl Sync + for Drain<'_, T, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ +} #[stable(feature = "drain", since = "1.6.0")] -unsafe impl Send for Drain<'_, T, A> {} +#[allow(unused_braces)] +unsafe impl Send + for Drain<'_, T, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ +} #[stable(feature = "drain", since = "1.6.0")] -impl Iterator for Drain<'_, T, A> { +#[allow(unused_braces)] +impl Iterator for Drain<'_, T, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ type Item = T; #[inline] @@ -165,7 +200,12 @@ impl Iterator for Drain<'_, T, A> { } #[stable(feature = "drain", since = "1.6.0")] -impl DoubleEndedIterator for Drain<'_, T, A> { +#[allow(unused_braces)] +impl DoubleEndedIterator + for Drain<'_, T, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ #[inline] fn next_back(&mut self) -> Option { self.iter.next_back().map(|elt| unsafe { ptr::read(elt as *const _) }) @@ -173,12 +213,25 @@ impl DoubleEndedIterator for Drain<'_, T, A> { } #[stable(feature = "drain", since = "1.6.0")] -impl Drop for Drain<'_, T, A> { +#[allow(unused_braces)] +impl Drop for Drain<'_, T, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn drop(&mut self) { /// Moves back the un-`Drain`ed elements to restore the original `Vec`. - struct DropGuard<'r, 'a, T, A: Allocator>(&'r mut Drain<'a, T, A>); - - impl<'r, 'a, T, A: Allocator> Drop for DropGuard<'r, 'a, T, A> { + #[allow(unused_braces)] + struct DropGuard<'r, 'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref>( + &'r mut Drain<'a, T, A, CO_ALLOC_PREF>, + ) + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:; + + impl<'r, 'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Drop + for DropGuard<'r, 'a, T, A, CO_ALLOC_PREF> + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { fn drop(&mut self) { if self.0.tail_len > 0 { unsafe { @@ -242,14 +295,31 @@ impl Drop for Drain<'_, T, A> { } #[stable(feature = "drain", since = "1.6.0")] -impl ExactSizeIterator for Drain<'_, T, A> { +#[allow(unused_braces)] +impl ExactSizeIterator + for Drain<'_, T, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn is_empty(&self) -> bool { self.iter.is_empty() } } #[unstable(feature = "trusted_len", issue = "37572")] -unsafe impl TrustedLen for Drain<'_, T, A> {} +#[allow(unused_braces)] +unsafe impl TrustedLen + for Drain<'_, T, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ +} #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for Drain<'_, T, A> {} +#[allow(unused_braces)] +impl FusedIterator + for Drain<'_, T, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ +} diff --git a/library/alloc/src/vec/drain_filter.rs b/library/alloc/src/vec/drain_filter.rs index 8c03f1692d94..c47d21733cd9 100644 --- a/library/alloc/src/vec/drain_filter.rs +++ b/library/alloc/src/vec/drain_filter.rs @@ -1,7 +1,7 @@ use crate::alloc::{Allocator, Global}; +use crate::co_alloc::CoAllocPref; use core::mem::{self, ManuallyDrop}; -use core::ptr; -use core::slice; +use core::{ptr, slice}; use super::Vec; @@ -20,15 +20,18 @@ use super::Vec; /// ``` #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] #[derive(Debug)] +#[allow(unused_braces)] pub struct DrainFilter< 'a, T, F, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, + const CO_ALLOC_PREF: CoAllocPref = { CO_ALLOC_PREF_DEFAULT!() }, > where F: FnMut(&mut T) -> bool, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { - pub(super) vec: &'a mut Vec, + pub(super) vec: &'a mut Vec, /// The index of the item that will be inspected by the next call to `next`. pub(super) idx: usize, /// The number of items that have been drained (removed) thus far. @@ -45,9 +48,11 @@ pub struct DrainFilter< pub(super) panic_flag: bool, } -impl DrainFilter<'_, T, F, A> +#[allow(unused_braces)] +impl DrainFilter<'_, T, F, A, CO_ALLOC_PREF> where F: FnMut(&mut T) -> bool, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { /// Returns a reference to the underlying allocator. #[unstable(feature = "allocator_api", issue = "32838")] @@ -113,9 +118,12 @@ where } #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] -impl Iterator for DrainFilter<'_, T, F, A> +#[allow(unused_braces)] +impl Iterator + for DrainFilter<'_, T, F, A, CO_ALLOC_PREF> where F: FnMut(&mut T) -> bool, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { type Item = T; @@ -151,21 +159,29 @@ where } #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] -impl Drop for DrainFilter<'_, T, F, A> +#[allow(unused_braces)] +impl Drop + for DrainFilter<'_, T, F, A, CO_ALLOC_PREF> where F: FnMut(&mut T) -> bool, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { + #[allow(unused_braces)] fn drop(&mut self) { - struct BackshiftOnDrop<'a, 'b, T, F, A: Allocator> + struct BackshiftOnDrop<'a, 'b, T, F, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> where F: FnMut(&mut T) -> bool, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { - drain: &'b mut DrainFilter<'a, T, F, A>, + drain: &'b mut DrainFilter<'a, T, F, A, CO_ALLOC_PREF>, } - impl<'a, 'b, T, F, A: Allocator> Drop for BackshiftOnDrop<'a, 'b, T, F, A> + #[allow(unused_braces)] + impl<'a, 'b, T, F, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Drop + for BackshiftOnDrop<'a, 'b, T, F, A, CO_ALLOC_PREF> where F: FnMut(&mut T) -> bool, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn drop(&mut self) { unsafe { diff --git a/library/alloc/src/vec/in_place_collect.rs b/library/alloc/src/vec/in_place_collect.rs index 87d61deb1eb2..f2d9579c2027 100644 --- a/library/alloc/src/vec/in_place_collect.rs +++ b/library/alloc/src/vec/in_place_collect.rs @@ -137,6 +137,8 @@ //! } //! vec.truncate(write_idx); //! ``` +use crate::alloc::Global; +use crate::co_alloc::CoAllocPref; use core::iter::{InPlaceIterable, SourceIter, TrustedRandomAccessNoCoerce}; use core::mem::{self, ManuallyDrop, SizedTypeProperties}; use core::ptr::{self}; @@ -150,9 +152,11 @@ pub(super) trait InPlaceIterableMarker {} impl InPlaceIterableMarker for T where T: InPlaceIterable {} -impl SpecFromIter for Vec +#[allow(unused_braces)] +impl SpecFromIter for Vec where I: Iterator + SourceIter + InPlaceIterableMarker, + [(); { crate::meta_num_slots_global!(CO_ALLOC_PREF) }]:, { default fn from_iter(mut iterator: I) -> Self { // See "Layout constraints" section in the module documentation. We rely on const @@ -206,7 +210,7 @@ where src.forget_allocation_drop_remaining(); mem::forget(dst_guard); - let vec = unsafe { Vec::from_raw_parts(dst_buf, len, cap) }; + let vec = unsafe { Vec::from_raw_parts_co(dst_buf, len, cap) }; vec } diff --git a/library/alloc/src/vec/in_place_drop.rs b/library/alloc/src/vec/in_place_drop.rs index 25ca33c6a7bf..42ebb6ca395d 100644 --- a/library/alloc/src/vec/in_place_drop.rs +++ b/library/alloc/src/vec/in_place_drop.rs @@ -1,3 +1,4 @@ +use crate::alloc::Global; use core::ptr::{self}; use core::slice::{self}; @@ -34,6 +35,11 @@ pub(super) struct InPlaceDstBufDrop { impl Drop for InPlaceDstBufDrop { #[inline] fn drop(&mut self) { - unsafe { super::Vec::from_raw_parts(self.ptr, self.len, self.cap) }; + // false = no need for co-alloc metadata, since it would get lost once converted to Box. + unsafe { + super::Vec::::from_raw_parts( + self.ptr, self.len, self.cap, + ) + }; } } diff --git a/library/alloc/src/vec/into_iter.rs b/library/alloc/src/vec/into_iter.rs index 37966007eb7e..9bba9b992e60 100644 --- a/library/alloc/src/vec/into_iter.rs +++ b/library/alloc/src/vec/into_iter.rs @@ -1,11 +1,10 @@ #[cfg(not(no_global_oom_handling))] use super::AsVecIntoIter; use crate::alloc::{Allocator, Global}; +use crate::co_alloc::CoAllocPref; #[cfg(not(no_global_oom_handling))] use crate::collections::VecDeque; use crate::raw_vec::RawVec; -use core::array; -use core::fmt; use core::iter::{ FusedIterator, InPlaceIterable, SourceIter, TrustedLen, TrustedRandomAccessNoCoerce, }; @@ -15,6 +14,7 @@ use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties}; use core::ops::Deref; use core::ptr::{self, NonNull}; use core::slice::{self}; +use core::{array, fmt}; /// An iterator that moves out of a vector. /// @@ -29,10 +29,14 @@ use core::slice::{self}; /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_insignificant_dtor] +#[allow(unused_braces)] pub struct IntoIter< T, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, -> { + const CO_ALLOC_PREF: CoAllocPref = { SHORT_TERM_VEC_CO_ALLOC_PREF!() }, +> where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ pub(super) buf: NonNull, pub(super) phantom: PhantomData, pub(super) cap: usize, @@ -46,13 +50,22 @@ pub struct IntoIter< } #[stable(feature = "vec_intoiter_debug", since = "1.13.0")] -impl fmt::Debug for IntoIter { +#[allow(unused_braces)] +impl fmt::Debug + for IntoIter +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("IntoIter").field(&self.as_slice()).finish() } } -impl IntoIter { +#[allow(unused_braces)] +impl IntoIter +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ /// Returns the remaining items of this iterator as a slice. /// /// # Examples @@ -121,7 +134,25 @@ impl IntoIter { // struct and then overwriting &mut self. // this creates less assembly self.cap = 0; - self.buf = unsafe { NonNull::new_unchecked(RawVec::NEW.ptr()) }; + self.buf = unsafe { + // @FIXME The below if .. {..} else {..} + // branching exists, because the following fails. Otherwise we'd have a snowball effect of wide spread of where...Global... bounds. + // + //NonNull::new_unchecked(RawVec::::NEW.ptr()); + let meta_num_slots = crate::meta_num_slots!(A, CO_ALLOC_PREF); + if meta_num_slots > 0 { + debug_assert!( + meta_num_slots == 1, + "Number of coallocation meta slots can be only 0 or 1, but it is {}!", + meta_num_slots + ); + NonNull::new_unchecked( + RawVec::::NEW.ptr(), + ) + } else { + NonNull::new_unchecked(RawVec::::NEW.ptr()) + } + }; self.ptr = self.buf.as_ptr(); self.end = self.buf.as_ptr(); @@ -141,7 +172,7 @@ impl IntoIter { #[cfg(not(no_global_oom_handling))] #[inline] - pub(crate) fn into_vecdeque(self) -> VecDeque { + pub(crate) fn into_vecdeque(self) -> VecDeque { // Keep our `Drop` impl from dropping the elements and the allocator let mut this = ManuallyDrop::new(self); @@ -168,19 +199,39 @@ impl IntoIter { } #[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")] -impl AsRef<[T]> for IntoIter { +#[allow(unused_braces)] +impl AsRef<[T]> for IntoIter +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn as_ref(&self) -> &[T] { self.as_slice() } } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl Send for IntoIter {} +#[allow(unused_braces)] +unsafe impl Send + for IntoIter +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ +} #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl Sync for IntoIter {} +#[allow(unused_braces)] +unsafe impl Sync + for IntoIter +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ +} #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for IntoIter { +#[allow(unused_braces)] +impl Iterator for IntoIter +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ type Item = T; #[inline] @@ -294,7 +345,12 @@ impl Iterator for IntoIter { } #[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for IntoIter { +#[allow(unused_braces)] +impl DoubleEndedIterator + for IntoIter +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ #[inline] fn next_back(&mut self) -> Option { if self.end == self.ptr { @@ -335,17 +391,34 @@ impl DoubleEndedIterator for IntoIter { } #[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for IntoIter { +#[allow(unused_braces)] +impl ExactSizeIterator + for IntoIter +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn is_empty(&self) -> bool { self.ptr == self.end } } #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for IntoIter {} +#[allow(unused_braces)] +impl FusedIterator + for IntoIter +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ +} #[unstable(feature = "trusted_len", issue = "37572")] -unsafe impl TrustedLen for IntoIter {} +#[allow(unused_braces)] +unsafe impl TrustedLen + for IntoIter +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ +} #[doc(hidden)] #[unstable(issue = "none", feature = "std_internals")] @@ -359,40 +432,72 @@ impl NonDrop for T {} #[doc(hidden)] #[unstable(issue = "none", feature = "std_internals")] +#[allow(unused_braces)] // TrustedRandomAccess (without NoCoerce) must not be implemented because // subtypes/supertypes of `T` might not be `NonDrop` -unsafe impl TrustedRandomAccessNoCoerce for IntoIter +unsafe impl TrustedRandomAccessNoCoerce + for IntoIter where T: NonDrop, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { const MAY_HAVE_SIDE_EFFECT: bool = false; } #[cfg(not(no_global_oom_handling))] #[stable(feature = "vec_into_iter_clone", since = "1.8.0")] -impl Clone for IntoIter { +#[allow(unused_braces)] +impl Clone + for IntoIter +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ #[cfg(not(test))] fn clone(&self) -> Self { - self.as_slice().to_vec_in(self.alloc.deref().clone()).into_iter() + // @FIXME Remove the following extras - used for type checks only + let slice = self.as_slice(); + let vec: crate::vec::Vec = + slice.to_vec_in_co::(self.alloc.deref().clone()); + let _iter: IntoIter = vec.into_iter(); + + //self.as_slice().to_vec_in::(self.alloc.deref().clone()).into_iter() + loop {} } #[cfg(test)] fn clone(&self) -> Self { - crate::slice::to_vec(self.as_slice(), self.alloc.deref().clone()).into_iter() + crate::slice::to_vec_co(self.as_slice(), self.alloc.deref().clone()).into_iter() } } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter { +#[allow(unused_braces)] +unsafe impl<#[may_dangle] T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Drop + for IntoIter +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn drop(&mut self) { - struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter); - - impl Drop for DropGuard<'_, T, A> { + struct DropGuard<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref>( + &'a mut IntoIter, + ) + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:; + + impl Drop for DropGuard<'_, T, A, CO_ALLOC_PREF> + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { fn drop(&mut self) { unsafe { // `IntoIter::alloc` is not used anymore after this and will be dropped by RawVec let alloc = ManuallyDrop::take(&mut self.0.alloc); // RawVec handles deallocation - let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc); + // @FIXME pass true instead of CO_ALLOC_PREF - use e.g.: if CO_ALLOC_PREF {let _ = RawVec::::from_raw_parts_in(..) } else { let _ = from_raw_parts_in_coop(...)} } + let _ = RawVec::::from_raw_parts_in( + self.0.buf.as_ptr(), + self.0.cap, + alloc, + ); } } } @@ -410,11 +515,22 @@ unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter { // also refer to the vec::in_place_collect module documentation to get an overview #[unstable(issue = "none", feature = "inplace_iteration")] #[doc(hidden)] -unsafe impl InPlaceIterable for IntoIter {} +#[allow(unused_braces)] +unsafe impl InPlaceIterable + for IntoIter +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ +} #[unstable(issue = "none", feature = "inplace_iteration")] #[doc(hidden)] -unsafe impl SourceIter for IntoIter { +#[allow(unused_braces)] +unsafe impl SourceIter + for IntoIter +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ type Source = Self; #[inline] diff --git a/library/alloc/src/vec/mod.rs b/library/alloc/src/vec/mod.rs index a07f3da78d33..23c90d83d03c 100644 --- a/library/alloc/src/vec/mod.rs +++ b/library/alloc/src/vec/mod.rs @@ -72,6 +72,7 @@ use core::slice::{self, SliceIndex}; use crate::alloc::{Allocator, Global}; use crate::borrow::{Cow, ToOwned}; use crate::boxed::Box; +use crate::co_alloc::CoAllocPref; use crate::collections::TryReserveError; use crate::raw_vec::RawVec; @@ -397,16 +398,40 @@ mod spec_extend; #[stable(feature = "rust1", since = "1.0.0")] #[cfg_attr(not(test), rustc_diagnostic_item = "Vec")] #[rustc_insignificant_dtor] -pub struct Vec { - buf: RawVec, +#[allow(unused_braces)] +pub struct Vec< + T, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, + //@FIXME: #[unstable(feature ="global_co_alloc_vec", issue="none")] + const CO_ALLOC_PREF: CoAllocPref = { CO_ALLOC_PREF_DEFAULT!() }, +> where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ + buf: RawVec, len: usize, } -//////////////////////////////////////////////////////////////////////////////// -// Inherent methods -//////////////////////////////////////////////////////////////////////////////// +/// "Cooperative" Vector. Preferring co-alloc API (if Global alloc supports it). +#[unstable(feature = "global_co_alloc_covec", issue = "none")] +pub type CoVec = Vec; + +/// "Plain" Vec. Not "cooperative" - not carrying extra data to assist the allocator. +/// FIXME after cleanup, see if we still use this in core:: and/or alloc:: +#[unstable(feature = "global_co_alloc_plvec", issue = "none")] +pub type PlVec = Vec; + +/// "Default" Vec. Either "cooperative" or not - as specified by `DEFAULT_CO_ALLOC_PREF`. The +/// difference to `Vec` (used without specifying `CO_ALLOC_PREF`): `DefVec` indicates that the +/// author considered using `CoVec` or `PlVec`, but left it to default instead. +#[unstable(feature = "global_co_alloc_defvec", issue = "none")] +#[allow(unused_braces)] +pub type DefVec = Vec; impl Vec { + /*impl Vec + where + [(); {meta_num_slots_global!(CO_ALLOC_PREF)}]:, + {*/ /// Constructs a new, empty `Vec`. /// /// The vector will not allocate until elements are pushed onto it. @@ -422,9 +447,89 @@ impl Vec { #[stable(feature = "rust1", since = "1.0.0")] #[must_use] pub const fn new() -> Self { + #[allow(unused_braces)] + Vec::::new_co() + //Self::new_co() + } + + /// Constructs a new, empty `Vec` with at least the specified capacity. + /// + /// The vector will be able to hold at least `capacity` elements without + /// reallocating. This method is allowed to allocate for more elements than + /// `capacity`. If `capacity` is 0, the vector will not allocate. + /// + /// It is important to note that although the returned vector has the + /// minimum *capacity* specified, the vector will have a zero *length*. For + /// an explanation of the difference between length and capacity, see + /// *[Capacity and reallocation]*. + /// + /// If it is important to know the exact allocated capacity of a `Vec`, + /// always use the [`capacity`] method after construction. + /// + /// For `Vec` where `T` is a zero-sized type, there will be no allocation + /// and the capacity will always be `usize::MAX`. + /// + /// [Capacity and reallocation]: #capacity-and-reallocation + /// [`capacity`]: Vec::capacity + /// + /// # Panics + /// + /// Panics if the new capacity exceeds `isize::MAX` bytes. + /// + /// # Examples + /// + /// ``` + /// let mut vec = Vec::with_capacity(10); + /// + /// // The vector contains no items, even though it has capacity for more + /// assert_eq!(vec.len(), 0); + /// assert!(vec.capacity() >= 10); + /// + /// // These are all done without reallocating... + /// for i in 0..10 { + /// vec.push(i); + /// } + /// assert_eq!(vec.len(), 10); + /// assert!(vec.capacity() >= 10); + /// + /// // ...but this may make the vector reallocate + /// vec.push(11); + /// assert_eq!(vec.len(), 11); + /// assert!(vec.capacity() >= 11); + /// + /// // A vector of a zero-sized type will always over-allocate, since no + /// // allocation is necessary + /// let vec_units = Vec::<()>::with_capacity(10); + /// assert_eq!(vec_units.capacity(), usize::MAX); + /// ``` #[cfg(not(no_global_oom_handling))] + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + #[must_use] + pub fn with_capacity(capacity: usize) -> Self { + Self::with_capacity_in(capacity, Global) + } +} + +//////////////////////////////////////////////////////////////////////////////// +// Inherent methods +//////////////////////////////////////////////////////////////////////////////// + +/**/ +#[allow(unused_braces)] +impl Vec +where + [(); { meta_num_slots_global!(CO_ALLOC_PREF) }]:, +{ + /// Like new(), but it respects CO_ALLOC_PREF. + #[inline] + #[rustc_const_stable(feature = "const_vec_new_co", since = "1.60.0")] //@FIXME This is `rustc_const_stable`, so that String::new() can be const and can call this. + #[unstable(feature = "vec_new_co", reason = "confirm_or_fix_the_function_name", issue = "none")] + #[must_use] + pub const fn new_co() -> Self { Vec { buf: RawVec::NEW, len: 0 } } + // @FIXME document co-allocation /// Constructs a new, empty `Vec` with at least the specified capacity. /// /// The vector will be able to hold at least `capacity` elements without @@ -477,12 +582,21 @@ impl Vec { /// ``` #[cfg(not(no_global_oom_handling))] #[inline] - #[stable(feature = "rust1", since = "1.0.0")] + #[unstable(feature = "vec_new_co", reason = "confirm_or_fix_the_function_name", issue = "none")] #[must_use] - pub fn with_capacity(capacity: usize) -> Self { + pub fn with_capacity_co(capacity: usize) -> Self { Self::with_capacity_in(capacity, Global) } + /// Coallocation-aware alternative to `from_row_parts`. + #[inline] + #[unstable(feature = "global_co_alloc", issue = "none")] + pub unsafe fn from_raw_parts_co(ptr: *mut T, length: usize, capacity: usize) -> Self { + unsafe { Self::from_raw_parts_in(ptr, length, capacity, Global) } + } +} + +impl Vec { /// Creates a `Vec` directly from a pointer, a capacity, and a length. /// /// # Safety @@ -593,7 +707,11 @@ impl Vec { } } -impl Vec { +#[allow(unused_braces)] +impl Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ /// Constructs a new, empty `Vec`. /// /// The vector will not allocate until elements are pushed onto it. @@ -1606,14 +1724,27 @@ impl Vec { // This drop guard will be invoked when predicate or `drop` of element panicked. // It shifts unchecked elements to cover holes and `set_len` to the correct length. // In cases when predicate and `drop` never panick, it will be optimized out. - struct BackshiftOnDrop<'a, T, A: Allocator> { - v: &'a mut Vec, + struct BackshiftOnDrop< + 'a, + T, + A: Allocator, + const VEC_CO_ALLOC_PREF: CoAllocPref = { CO_ALLOC_PREF_META_DEFAULT!() }, + > + where + [(); { crate::meta_num_slots!(A, VEC_CO_ALLOC_PREF) }]:, + { + v: &'a mut Vec, processed_len: usize, deleted_cnt: usize, original_len: usize, } - impl Drop for BackshiftOnDrop<'_, T, A> { + #[allow(unused_braces)] + impl Drop + for BackshiftOnDrop<'_, T, A, VEC_CO_ALLOC_PREF> + where + [(); { crate::meta_num_slots!(A, VEC_CO_ALLOC_PREF) }]:, + { fn drop(&mut self) { if self.deleted_cnt > 0 { // SAFETY: Trailing unchecked items must be valid since we never touch them. @@ -1632,14 +1763,26 @@ impl Vec { } } - let mut g = BackshiftOnDrop { v: self, processed_len: 0, deleted_cnt: 0, original_len }; + let mut g = BackshiftOnDrop:: { + v: self, + processed_len: 0, + deleted_cnt: 0, + original_len, + }; - fn process_loop( + fn process_loop< + F, + T, + A: Allocator, + const DELETED: bool, + const VEC_CO_ALLOC_PREF: CoAllocPref, + >( original_len: usize, f: &mut F, - g: &mut BackshiftOnDrop<'_, T, A>, + g: &mut BackshiftOnDrop<'_, T, A, VEC_CO_ALLOC_PREF>, ) where F: FnMut(&mut T) -> bool, + [(); { crate::meta_num_slots!(A, VEC_CO_ALLOC_PREF) }]:, { while g.processed_len != original_len { // SAFETY: Unchecked element must be valid. @@ -1670,10 +1813,10 @@ impl Vec { } // Stage 1: Nothing was deleted. - process_loop::(original_len, &mut f, &mut g); + process_loop::(original_len, &mut f, &mut g); // Stage 2: Some elements were deleted. - process_loop::(original_len, &mut f, &mut g); + process_loop::(original_len, &mut f, &mut g); // All item are processed. This can be optimized to `set_len` by LLVM. drop(g); @@ -1732,7 +1875,12 @@ impl Vec { } /* INVARIANT: vec.len() > read >= write > write-1 >= 0 */ - struct FillGapOnDrop<'a, T, A: core::alloc::Allocator> { + #[allow(unused_braces)] + struct FillGapOnDrop<'a, T, A: core::alloc::Allocator, const CO_ALLOC_PREF: CoAllocPref> + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { + /// @FIXME This doc-comment here is only to workaround rust-lang/rustfmt issue #5691. Remove once it's fixed on nightly. /* Offset of the element we want to check if it is duplicate */ read: usize, @@ -1741,10 +1889,15 @@ impl Vec { write: usize, /* The Vec that would need correction if `same_bucket` panicked */ - vec: &'a mut Vec, + vec: &'a mut Vec, } - impl<'a, T, A: core::alloc::Allocator> Drop for FillGapOnDrop<'a, T, A> { + #[allow(unused_braces)] + impl<'a, T, A: core::alloc::Allocator, const CO_ALLOC_PREF: CoAllocPref> Drop + for FillGapOnDrop<'a, T, A, CO_ALLOC_PREF> + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { fn drop(&mut self) { /* This code gets executed when `same_bucket` panics */ @@ -1986,7 +2139,7 @@ impl Vec { /// assert_eq!(v, &[]); /// ``` #[stable(feature = "drain", since = "1.6.0")] - pub fn drain(&mut self, range: R) -> Drain<'_, T, A> + pub fn drain(&mut self, range: R) -> Drain<'_, T, A, CO_ALLOC_PREF> where R: RangeBounds, { @@ -2337,7 +2490,11 @@ impl Vec { } } -impl Vec { +#[allow(unused_braces)] +impl Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ /// Resizes the `Vec` in-place so that `len` is equal to `new_len`. /// /// If `new_len` is greater than `len`, the `Vec` is extended by the @@ -2436,7 +2593,12 @@ impl Vec { } } -impl Vec<[T; N], A> { +#[allow(unused_braces)] +impl + Vec<[T; N], A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ /// Takes a `Vec<[T; N]>` and flattens it into a `Vec`. /// /// # Panics @@ -2459,7 +2621,7 @@ impl Vec<[T; N], A> { /// assert_eq!(flattened.pop(), Some(6)); /// ``` #[unstable(feature = "slice_flatten", issue = "95629")] - pub fn into_flattened(self) -> Vec { + pub fn into_flattened(self) -> Vec { let (ptr, len, cap, alloc) = self.into_raw_parts_with_alloc(); let (new_len, new_cap) = if T::IS_ZST { (len.checked_mul(N).expect("vec len overflow"), usize::MAX) @@ -2477,7 +2639,9 @@ impl Vec<[T; N], A> { // - `new_cap` refers to the same sized allocation as `cap` because // `new_cap * size_of::()` == `cap * size_of::<[T; N]>()` // - `len` <= `cap`, so `len * N` <= `cap * N`. - unsafe { Vec::::from_raw_parts_in(ptr.cast(), new_len, new_cap, alloc) } + unsafe { + Vec::::from_raw_parts_in(ptr.cast(), new_len, new_cap, alloc) + } } } @@ -2497,7 +2661,11 @@ impl ExtendWith for ExtendElement { } } -impl Vec { +#[allow(unused_braces)] +impl Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ #[cfg(not(no_global_oom_handling))] /// Extend the vector by `n` values, using the given generator. fn extend_with>(&mut self, n: usize, mut value: E) { @@ -2529,7 +2697,11 @@ impl Vec { } } -impl Vec { +#[allow(unused_braces)] +impl Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ /// Removes consecutive repeated elements in the vector according to the /// [`PartialEq`] trait implementation. /// @@ -2565,7 +2737,15 @@ pub fn from_elem(elem: T, n: usize) -> Vec { #[doc(hidden)] #[cfg(not(no_global_oom_handling))] #[unstable(feature = "allocator_api", issue = "32838")] -pub fn from_elem_in(elem: T, n: usize, alloc: A) -> Vec { +#[allow(unused_braces)] +pub fn from_elem_in( + elem: T, + n: usize, + alloc: A, +) -> Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ ::from_elem(elem, n, alloc) } @@ -2577,7 +2757,12 @@ trait ExtendFromWithinSpec { unsafe fn spec_extend_from_within(&mut self, src: Range); } -impl ExtendFromWithinSpec for Vec { +#[allow(unused_braces)] +impl ExtendFromWithinSpec + for Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ default unsafe fn spec_extend_from_within(&mut self, src: Range) { // SAFETY: // - len is increased only after initializing elements @@ -2596,7 +2781,12 @@ impl ExtendFromWithinSpec for Vec { } } -impl ExtendFromWithinSpec for Vec { +#[allow(unused_braces)] +impl ExtendFromWithinSpec + for Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ unsafe fn spec_extend_from_within(&mut self, src: Range) { let count = src.len(); { @@ -2629,7 +2819,11 @@ impl ExtendFromWithinSpec for Vec { //////////////////////////////////////////////////////////////////////////////// #[stable(feature = "rust1", since = "1.0.0")] -impl ops::Deref for Vec { +#[allow(unused_braces)] +impl ops::Deref for Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ type Target = [T]; #[inline] @@ -2639,7 +2833,11 @@ impl ops::Deref for Vec { } #[stable(feature = "rust1", since = "1.0.0")] -impl ops::DerefMut for Vec { +#[allow(unused_braces)] +impl ops::DerefMut for Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ #[inline] fn deref_mut(&mut self) -> &mut [T] { unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.len) } @@ -2648,11 +2846,16 @@ impl ops::DerefMut for Vec { #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] -impl Clone for Vec { +#[allow(unused_braces)] +impl Clone + for Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ #[cfg(not(test))] fn clone(&self) -> Self { let alloc = self.allocator().clone(); - <[T]>::to_vec_in(&**self, alloc) + <[T]>::to_vec_in_co(&**self, alloc) } // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is @@ -2662,11 +2865,11 @@ impl Clone for Vec { #[cfg(test)] fn clone(&self) -> Self { let alloc = self.allocator().clone(); - crate::slice::to_vec(&**self, alloc) + crate::slice::to_vec_co(&**self, alloc) } fn clone_from(&mut self, other: &Self) { - crate::slice::SpecCloneIntoVec::clone_into(other.as_slice(), self); + crate::slice::SpecCloneIntoVecCo::clone_into_co(other.as_slice(), self); } } @@ -2683,7 +2886,11 @@ impl Clone for Vec { /// assert_eq!(b.hash_one(v), b.hash_one(s)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] -impl Hash for Vec { +#[allow(unused_braces)] +impl Hash for Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ #[inline] fn hash(&self, state: &mut H) { Hash::hash(&**self, state) @@ -2695,7 +2902,12 @@ impl Hash for Vec { message = "vector indices are of type `usize` or ranges of `usize`", label = "vector indices are of type `usize` or ranges of `usize`" )] -impl, A: Allocator> Index for Vec { +#[allow(unused_braces)] +impl, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Index + for Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ type Output = I::Output; #[inline] @@ -2709,7 +2921,12 @@ impl, A: Allocator> Index for Vec { message = "vector indices are of type `usize` or ranges of `usize`", label = "vector indices are of type `usize` or ranges of `usize`" )] -impl, A: Allocator> IndexMut for Vec { +#[allow(unused_braces)] +impl, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> IndexMut + for Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ #[inline] fn index_mut(&mut self, index: I) -> &mut Self::Output { IndexMut::index_mut(&mut **self, index) @@ -2718,6 +2935,7 @@ impl, A: Allocator> IndexMut for Vec { #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] +#[allow(unused_braces)] impl FromIterator for Vec { #[inline] fn from_iter>(iter: I) -> Vec { @@ -2725,10 +2943,28 @@ impl FromIterator for Vec { } } +#[cfg(not(no_global_oom_handling))] +#[unstable(feature = "global_co_alloc", issue = "none")] +#[allow(unused_braces)] +impl Vec +where + [(); { crate::meta_num_slots_global!(CO_ALLOC_PREF) }]:, +{ + /// Coallocation-aware alternative to `from_iter`. + #[inline] + pub fn from_iter_co>(iter: I) -> Vec { + >::from_iter(iter.into_iter()) + } +} + #[stable(feature = "rust1", since = "1.0.0")] -impl IntoIterator for Vec { +#[allow(unused_braces)] +impl IntoIterator for Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ type Item = T; - type IntoIter = IntoIter; + type IntoIter = IntoIter; /// Creates a consuming iterator, that is, one that moves each value out of /// the vector (from start to end). The vector cannot be used after calling @@ -2771,7 +3007,12 @@ impl IntoIterator for Vec { } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T, A: Allocator> IntoIterator for &'a Vec { +#[allow(unused_braces)] +impl<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> IntoIterator + for &'a Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ type Item = &'a T; type IntoIter = slice::Iter<'a, T>; @@ -2781,7 +3022,12 @@ impl<'a, T, A: Allocator> IntoIterator for &'a Vec { } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T, A: Allocator> IntoIterator for &'a mut Vec { +#[allow(unused_braces)] +impl<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> IntoIterator + for &'a mut Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ type Item = &'a mut T; type IntoIter = slice::IterMut<'a, T>; @@ -2792,7 +3038,11 @@ impl<'a, T, A: Allocator> IntoIterator for &'a mut Vec { #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] -impl Extend for Vec { +#[allow(unused_braces)] +impl Extend for Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ #[inline] fn extend>(&mut self, iter: I) { >::spec_extend(self, iter.into_iter()) @@ -2809,7 +3059,11 @@ impl Extend for Vec { } } -impl Vec { +#[allow(unused_braces)] +impl Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ // leaf method to which various SpecFrom/SpecExtend implementations delegate when // they have no further optimizations to apply #[cfg(not(no_global_oom_handling))] @@ -2907,10 +3161,15 @@ impl Vec { #[cfg(not(no_global_oom_handling))] #[inline] #[stable(feature = "vec_splice", since = "1.21.0")] - pub fn splice(&mut self, range: R, replace_with: I) -> Splice<'_, I::IntoIter, A> + pub fn splice( + &mut self, + range: R, + replace_with: I, + ) -> Splice<'_, I::IntoIter, A, CO_ALLOC_PREF> where R: RangeBounds, I: IntoIterator, + [(); { crate::meta_num_slots!(A, crate::CO_ALLOC_PREF_META_YES!()) }]:, { Splice { drain: self.drain(range), replace_with: replace_with.into_iter() } } @@ -2960,9 +3219,10 @@ impl Vec { /// assert_eq!(odds, vec![1, 3, 5, 9, 11, 13, 15]); /// ``` #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] - pub fn drain_filter(&mut self, filter: F) -> DrainFilter<'_, T, F, A> + pub fn drain_filter(&mut self, filter: F) -> DrainFilter<'_, T, F, A, CO_ALLOC_PREF> where F: FnMut(&mut T) -> bool, + [(); { crate::meta_num_slots!(A, crate::CO_ALLOC_PREF_META_YES!()) }]:, { let old_len = self.len(); @@ -2983,7 +3243,12 @@ impl Vec { /// [`copy_from_slice`]: slice::copy_from_slice #[cfg(not(no_global_oom_handling))] #[stable(feature = "extend_ref", since = "1.2.0")] -impl<'a, T: Copy + 'a, A: Allocator + 'a> Extend<&'a T> for Vec { +#[allow(unused_braces)] +impl<'a, T: Copy + 'a, A: Allocator + 'a, const CO_ALLOC_PREF: CoAllocPref> Extend<&'a T> + for Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn extend>(&mut self, iter: I) { self.spec_extend(iter.into_iter()) } @@ -3001,7 +3266,12 @@ impl<'a, T: Copy + 'a, A: Allocator + 'a> Extend<&'a T> for Vec { /// Implements comparison of vectors, [lexicographically](core::cmp::Ord#lexicographical-comparison). #[stable(feature = "rust1", since = "1.0.0")] -impl PartialOrd for Vec { +#[allow(unused_braces)] +impl PartialOrd + for Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ #[inline] fn partial_cmp(&self, other: &Self) -> Option { PartialOrd::partial_cmp(&**self, &**other) @@ -3009,11 +3279,19 @@ impl PartialOrd for Vec { } #[stable(feature = "rust1", since = "1.0.0")] -impl Eq for Vec {} +#[allow(unused_braces)] +impl Eq for Vec where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]: +{ +} /// Implements ordering of vectors, [lexicographically](core::cmp::Ord#lexicographical-comparison). #[stable(feature = "rust1", since = "1.0.0")] -impl Ord for Vec { +#[allow(unused_braces)] +impl Ord for Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ #[inline] fn cmp(&self, other: &Self) -> Ordering { Ord::cmp(&**self, &**other) @@ -3021,7 +3299,12 @@ impl Ord for Vec { } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<#[may_dangle] T, A: Allocator> Drop for Vec { +#[allow(unused_braces)] +unsafe impl<#[may_dangle] T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Drop + for Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn drop(&mut self) { unsafe { // use drop for [T] @@ -3033,6 +3316,21 @@ unsafe impl<#[may_dangle] T, A: Allocator> Drop for Vec { } } +#[stable(feature = "rust1", since = "1.0.0")] +#[rustc_const_unstable(feature = "const_default_impls", issue = "87864")] +#[allow(unused_braces)] +impl const Default for Vec +where + [(); { meta_num_slots_global!(CO_ALLOC_PREF) }]:, +{ + /// Creates an empty `Vec`. + /// + /// The vector will not allocate until elements are pushed onto it. + default fn default() -> Vec { + Vec::new_co() + } +} + #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_unstable(feature = "const_default_impls", issue = "87864")] impl const Default for Vec { @@ -3040,40 +3338,63 @@ impl const Default for Vec { /// /// The vector will not allocate until elements are pushed onto it. fn default() -> Vec { - Vec::new() + Vec::new_co() } } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Debug for Vec { +#[allow(unused_braces)] +impl fmt::Debug + for Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] -impl AsRef> for Vec { - fn as_ref(&self) -> &Vec { +#[allow(unused_braces)] +impl AsRef> + for Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ + fn as_ref(&self) -> &Vec { self } } #[stable(feature = "vec_as_mut", since = "1.5.0")] -impl AsMut> for Vec { - fn as_mut(&mut self) -> &mut Vec { +#[allow(unused_braces)] +impl AsMut> + for Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ + fn as_mut(&mut self) -> &mut Vec { self } } #[stable(feature = "rust1", since = "1.0.0")] -impl AsRef<[T]> for Vec { +#[allow(unused_braces)] +impl AsRef<[T]> for Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn as_ref(&self) -> &[T] { self } } #[stable(feature = "vec_as_mut", since = "1.5.0")] -impl AsMut<[T]> for Vec { +#[allow(unused_braces)] +impl AsMut<[T]> for Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn as_mut(&mut self) -> &mut [T] { self } @@ -3081,7 +3402,8 @@ impl AsMut<[T]> for Vec { #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] -impl From<&[T]> for Vec { +#[allow(unused_braces)] +impl From<&[T]> for Vec { /// Allocate a `Vec` and fill it by cloning `s`'s items. /// /// # Examples @@ -3101,7 +3423,8 @@ impl From<&[T]> for Vec { #[cfg(not(no_global_oom_handling))] #[stable(feature = "vec_from_mut", since = "1.19.0")] -impl From<&mut [T]> for Vec { +#[allow(unused_braces)] +impl From<&mut [T]> for Vec { /// Allocate a `Vec` and fill it by cloning `s`'s items. /// /// # Examples @@ -3121,7 +3444,8 @@ impl From<&mut [T]> for Vec { #[cfg(not(no_global_oom_handling))] #[stable(feature = "vec_from_array", since = "1.44.0")] -impl From<[T; N]> for Vec { +#[allow(unused_braces)] +impl From<[T; N]> for Vec { /// Allocate a `Vec` and move `s`'s items into it. /// /// # Examples @@ -3144,7 +3468,8 @@ impl From<[T; N]> for Vec { } #[stable(feature = "vec_from_cow_slice", since = "1.14.0")] -impl<'a, T> From> for Vec +#[allow(unused_braces)] +impl<'a, T> From> for Vec where [T]: ToOwned>, { @@ -3167,10 +3492,28 @@ where } } -// note: test pulls in std, which causes errors here +// @FIXME unsure about test +#[cfg(not(test))] +#[allow(ineffective_unstable_trait_impl)] //@FIXME What/why is #[unstable(...)] ignored here? +#[unstable(feature = "global_co_alloc", issue = "none")] +#[allow(unused_braces)] +impl From> + for Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ + default fn from(s: Box<[T], A>) -> Self { + s.into_vec_co() + } +} + #[cfg(not(test))] #[stable(feature = "vec_from_box", since = "1.18.0")] -impl From> for Vec { +#[allow(unused_braces)] +impl From> for Vec +where + [(); { crate::meta_num_slots_default!(A) }]:, +{ /// Convert a boxed slice into a vector by transferring ownership of /// the existing heap allocation. /// @@ -3185,11 +3528,30 @@ impl From> for Vec { } } +#[cfg(not(no_global_oom_handling))] +// @FIXME Can this apply to test? +#[cfg(not(test))] +#[allow(ineffective_unstable_trait_impl)] //@FIXME What/why is #[unstable(...)] ignored here? +#[unstable(feature = "global_co_alloc", issue = "none")] +#[allow(unused_braces)] +impl From> + for Box<[T], A> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ + default fn from(v: Vec) -> Self { + v.into_boxed_slice() + } +} // note: test pulls in std, which causes errors here #[cfg(not(no_global_oom_handling))] #[cfg(not(test))] #[stable(feature = "box_from_vec", since = "1.20.0")] -impl From> for Box<[T], A> { +#[allow(unused_braces)] +impl From> for Box<[T], A> +where + [(); { crate::meta_num_slots_default!(A) }]:, +{ /// Convert a vector into a boxed slice. /// /// If `v` has excess capacity, its items will be moved into a @@ -3215,7 +3577,8 @@ impl From> for Box<[T], A> { #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] -impl From<&str> for Vec { +#[allow(unused_braces)] +impl From<&str> for Vec { /// Allocate a `Vec` and fill it with a UTF-8 string. /// /// # Examples @@ -3229,8 +3592,13 @@ impl From<&str> for Vec { } #[stable(feature = "array_try_from_vec", since = "1.48.0")] -impl TryFrom> for [T; N] { - type Error = Vec; +#[allow(unused_braces)] +impl + TryFrom> for [T; N] +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ + type Error = Vec; /// Gets the entire contents of the `Vec` as an array, /// if its size exactly matches that of the requested array. @@ -3258,7 +3626,7 @@ impl TryFrom> for [T; N] { /// assert_eq!(a, b' '); /// assert_eq!(b, b'd'); /// ``` - fn try_from(mut vec: Vec) -> Result<[T; N], Vec> { + fn try_from(mut vec: Vec) -> Result<[T; N], Vec> { if vec.len() != N { return Err(vec); } diff --git a/library/alloc/src/vec/partial_eq.rs b/library/alloc/src/vec/partial_eq.rs index b0cf72577a1b..a4fb19794b4d 100644 --- a/library/alloc/src/vec/partial_eq.rs +++ b/library/alloc/src/vec/partial_eq.rs @@ -1,16 +1,19 @@ +//use core::alloc; use crate::alloc::Allocator; #[cfg(not(no_global_oom_handling))] use crate::borrow::Cow; +use crate::co_alloc::CoAllocPref; use super::Vec; macro_rules! __impl_slice_eq1 { - ([$($vars:tt)*] $lhs:ty, $rhs:ty $(where $ty:ty: $bound:ident)?, #[$stability:meta]) => { + ([$($vars:tt)*] $lhs:ty, $rhs:ty, #[$stability:meta], $($constraints:tt)*) => { #[$stability] + #[allow(unused_braces)] impl PartialEq<$rhs> for $lhs where T: PartialEq, - $($ty: $bound)? + $($constraints)* { #[inline] fn eq(&self, other: &$rhs) -> bool { self[..] == other[..] } @@ -20,21 +23,21 @@ macro_rules! __impl_slice_eq1 { } } -__impl_slice_eq1! { [A1: Allocator, A2: Allocator] Vec, Vec, #[stable(feature = "rust1", since = "1.0.0")] } -__impl_slice_eq1! { [A: Allocator] Vec, &[U], #[stable(feature = "rust1", since = "1.0.0")] } -__impl_slice_eq1! { [A: Allocator] Vec, &mut [U], #[stable(feature = "rust1", since = "1.0.0")] } -__impl_slice_eq1! { [A: Allocator] &[T], Vec, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] } -__impl_slice_eq1! { [A: Allocator] &mut [T], Vec, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] } -__impl_slice_eq1! { [A: Allocator] Vec, [U], #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] } -__impl_slice_eq1! { [A: Allocator] [T], Vec, #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] } +__impl_slice_eq1! { [A1: Allocator, A2: Allocator, const CO_ALLOC_PREF1: crate::co_alloc::CoAllocPref, const CO_ALLOC_PREF2: crate::co_alloc::CoAllocPref] Vec, Vec, #[stable(feature = "rust1", since = "1.0.0")], [(); {crate::meta_num_slots!(A1, CO_ALLOC_PREF1)}]:, [(); {crate::meta_num_slots!(A2, CO_ALLOC_PREF2)}]: } +__impl_slice_eq1! { [A: Allocator, const CO_ALLOC_PREF: CoAllocPref] Vec, &[U], #[stable(feature = "rust1", since = "1.0.0")], [(); {crate::meta_num_slots!(A, CO_ALLOC_PREF)}]: } +__impl_slice_eq1! { [A: Allocator, const CO_ALLOC_PREF: CoAllocPref] Vec, &mut [U], #[stable(feature = "rust1", since = "1.0.0")], [(); {crate::meta_num_slots!(A, CO_ALLOC_PREF)}]: } +__impl_slice_eq1! { [A: Allocator, const CO_ALLOC_PREF: CoAllocPref] &[T], Vec, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")], [(); {crate::meta_num_slots!(A, CO_ALLOC_PREF)}]: } +__impl_slice_eq1! { [A: Allocator, const CO_ALLOC_PREF: CoAllocPref] &mut [T], Vec, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")], [(); {crate::meta_num_slots!(A, CO_ALLOC_PREF)}]: } +__impl_slice_eq1! { [A: Allocator, const CO_ALLOC_PREF: CoAllocPref] Vec, [U], #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")], [(); {crate::meta_num_slots!(A, CO_ALLOC_PREF)}]: } +__impl_slice_eq1! { [A: Allocator, const CO_ALLOC_PREF: CoAllocPref] [T], Vec, #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")], [(); {crate::meta_num_slots!(A, CO_ALLOC_PREF)}]: } #[cfg(not(no_global_oom_handling))] -__impl_slice_eq1! { [A: Allocator] Cow<'_, [T]>, Vec where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] } +__impl_slice_eq1! { [A: Allocator, const CO_ALLOC_PREF: CoAllocPref] Cow<'_, [T]>, Vec, #[stable(feature = "rust1", since = "1.0.0")], T: Clone, [(); {crate::meta_num_slots!(A, CO_ALLOC_PREF)}]: } #[cfg(not(no_global_oom_handling))] -__impl_slice_eq1! { [] Cow<'_, [T]>, &[U] where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] } +__impl_slice_eq1! { [] Cow<'_, [T]>, &[U], #[stable(feature = "rust1", since = "1.0.0")], T: Clone } #[cfg(not(no_global_oom_handling))] -__impl_slice_eq1! { [] Cow<'_, [T]>, &mut [U] where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] } -__impl_slice_eq1! { [A: Allocator, const N: usize] Vec, [U; N], #[stable(feature = "rust1", since = "1.0.0")] } -__impl_slice_eq1! { [A: Allocator, const N: usize] Vec, &[U; N], #[stable(feature = "rust1", since = "1.0.0")] } +__impl_slice_eq1! { [] Cow<'_, [T]>, &mut [U], #[stable(feature = "rust1", since = "1.0.0")], T: Clone } +__impl_slice_eq1! { [A: Allocator, const CO_ALLOC_PREF: CoAllocPref, const N: usize] Vec, [U; N], #[stable(feature = "rust1", since = "1.0.0")], [(); {crate::meta_num_slots!(A, CO_ALLOC_PREF)}]: } +__impl_slice_eq1! { [A: Allocator, const CO_ALLOC_PREF: CoAllocPref, const N: usize] Vec, &[U; N], #[stable(feature = "rust1", since = "1.0.0")], [(); {crate::meta_num_slots!(A, CO_ALLOC_PREF)}]: } // NOTE: some less important impls are omitted to reduce code bloat // FIXME(Centril): Reconsider this? diff --git a/library/alloc/src/vec/spec_extend.rs b/library/alloc/src/vec/spec_extend.rs index 56065ce565bf..546e2c0e0ae7 100644 --- a/library/alloc/src/vec/spec_extend.rs +++ b/library/alloc/src/vec/spec_extend.rs @@ -1,4 +1,5 @@ use crate::alloc::Allocator; +use crate::co_alloc::CoAllocPref; use core::iter::TrustedLen; use core::slice::{self}; @@ -9,25 +10,36 @@ pub(super) trait SpecExtend { fn spec_extend(&mut self, iter: I); } -impl SpecExtend for Vec +#[allow(unused_braces)] +impl SpecExtend + for Vec where I: Iterator, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { default fn spec_extend(&mut self, iter: I) { self.extend_desugared(iter) } } -impl SpecExtend for Vec +#[allow(unused_braces)] +impl SpecExtend + for Vec where I: TrustedLen, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { default fn spec_extend(&mut self, iterator: I) { self.extend_trusted(iterator) } } -impl SpecExtend> for Vec { +#[allow(unused_braces)] +impl SpecExtend> + for Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn spec_extend(&mut self, mut iterator: IntoIter) { unsafe { self.append_elements(iterator.as_slice() as _); @@ -36,19 +48,25 @@ impl SpecExtend> for Vec { } } -impl<'a, T: 'a, I, A: Allocator + 'a> SpecExtend<&'a T, I> for Vec +#[allow(unused_braces)] +impl<'a, T: 'a, I, A: Allocator + 'a, const CO_ALLOC_PREF: CoAllocPref> SpecExtend<&'a T, I> + for Vec where I: Iterator, T: Clone, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { default fn spec_extend(&mut self, iterator: I) { self.spec_extend(iterator.cloned()) } } -impl<'a, T: 'a, A: Allocator + 'a> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec +#[allow(unused_braces)] +impl<'a, T: 'a, A: Allocator + 'a, const CO_ALLOC_PREF: CoAllocPref> + SpecExtend<&'a T, slice::Iter<'a, T>> for Vec where T: Copy, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) { let slice = iterator.as_slice(); diff --git a/library/alloc/src/vec/spec_from_elem.rs b/library/alloc/src/vec/spec_from_elem.rs index ff364c033ee9..f6ddf0a6ef22 100644 --- a/library/alloc/src/vec/spec_from_elem.rs +++ b/library/alloc/src/vec/spec_from_elem.rs @@ -1,26 +1,50 @@ use core::ptr; use crate::alloc::Allocator; +use crate::co_alloc::CoAllocPref; use crate::raw_vec::RawVec; use super::{ExtendElement, IsZero, Vec}; // Specialization trait used for Vec::from_elem pub(super) trait SpecFromElem: Sized { - fn from_elem(elem: Self, n: usize, alloc: A) -> Vec; + #[allow(unused_braces)] + fn from_elem( + elem: Self, + n: usize, + alloc: A, + ) -> Vec + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:; } +#[allow(unused_braces)] impl SpecFromElem for T { - default fn from_elem(elem: Self, n: usize, alloc: A) -> Vec { + default fn from_elem( + elem: Self, + n: usize, + alloc: A, + ) -> Vec + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { let mut v = Vec::with_capacity_in(n, alloc); v.extend_with(n, ExtendElement(elem)); v } } +#[allow(unused_braces)] impl SpecFromElem for T { #[inline] - default fn from_elem(elem: T, n: usize, alloc: A) -> Vec { + default fn from_elem( + elem: T, + n: usize, + alloc: A, + ) -> Vec + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { if elem.is_zero() { return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n }; } @@ -32,7 +56,15 @@ impl SpecFromElem for T { impl SpecFromElem for i8 { #[inline] - fn from_elem(elem: i8, n: usize, alloc: A) -> Vec { + #[allow(unused_braces)] + fn from_elem( + elem: i8, + n: usize, + alloc: A, + ) -> Vec + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { if elem == 0 { return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n }; } @@ -47,7 +79,15 @@ impl SpecFromElem for i8 { impl SpecFromElem for u8 { #[inline] - fn from_elem(elem: u8, n: usize, alloc: A) -> Vec { + #[allow(unused_braces)] + fn from_elem( + elem: u8, + n: usize, + alloc: A, + ) -> Vec + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { if elem == 0 { return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n }; } diff --git a/library/alloc/src/vec/spec_from_iter.rs b/library/alloc/src/vec/spec_from_iter.rs index efa6868473e4..cde02b319bef 100644 --- a/library/alloc/src/vec/spec_from_iter.rs +++ b/library/alloc/src/vec/spec_from_iter.rs @@ -1,3 +1,5 @@ +use crate::alloc::Global; +use crate::co_alloc::CoAllocPref; use core::mem::ManuallyDrop; use core::ptr::{self}; @@ -25,16 +27,23 @@ pub(super) trait SpecFromIter { fn from_iter(iter: I) -> Self; } -impl SpecFromIter for Vec +#[allow(unused_braces)] +impl SpecFromIter for Vec where I: Iterator, + [(); { crate::meta_num_slots_global!(CO_ALLOC_PREF) }]:, { default fn from_iter(iterator: I) -> Self { SpecFromIterNested::from_iter(iterator) } } -impl SpecFromIter> for Vec { +#[allow(unused_braces)] +impl SpecFromIter> + for Vec +where + [(); { crate::meta_num_slots_global!(CO_ALLOC_PREF) }]:, +{ fn from_iter(iterator: IntoIter) -> Self { // A common case is passing a vector into a function which immediately // re-collects into a vector. We can short circuit this if the IntoIter @@ -51,11 +60,11 @@ impl SpecFromIter> for Vec { if has_advanced { ptr::copy(it.ptr, it.buf.as_ptr(), it.len()); } - return Vec::from_raw_parts(it.buf.as_ptr(), it.len(), it.cap); + return Vec::from_raw_parts_co(it.buf.as_ptr(), it.len(), it.cap); } } - let mut vec = Vec::new(); + let mut vec = Vec::::new_co(); // must delegate to spec_extend() since extend() itself delegates // to spec_from for empty Vecs vec.spec_extend(iterator); diff --git a/library/alloc/src/vec/spec_from_iter_nested.rs b/library/alloc/src/vec/spec_from_iter_nested.rs index f915ebb86e5a..e567d3697fd2 100644 --- a/library/alloc/src/vec/spec_from_iter_nested.rs +++ b/library/alloc/src/vec/spec_from_iter_nested.rs @@ -2,7 +2,10 @@ use core::cmp; use core::iter::TrustedLen; use core::ptr; +use crate::alloc::Global; +use crate::co_alloc::CoAllocPref; use crate::raw_vec::RawVec; +use crate::CO_ALLOC_PREF_DEFAULT; use super::{SpecExtend, Vec}; @@ -13,9 +16,12 @@ pub(super) trait SpecFromIterNested { fn from_iter(iter: I) -> Self; } -impl SpecFromIterNested for Vec +#[allow(unused_braces)] +impl SpecFromIterNested + for Vec where I: Iterator, + [(); { crate::meta_num_slots_global!(CO_ALLOC_PREF) }]:, { default fn from_iter(mut iterator: I) -> Self { // Unroll the first iteration, as the vector is going to be @@ -24,12 +30,12 @@ where // vector being full in the few subsequent loop iterations. // So we get better branch prediction. let mut vector = match iterator.next() { - None => return Vec::new(), + None => return Vec::new_co(), Some(element) => { let (lower, _) = iterator.size_hint(); let initial_capacity = cmp::max(RawVec::::MIN_NON_ZERO_CAP, lower.saturating_add(1)); - let mut vector = Vec::with_capacity(initial_capacity); + let mut vector = Vec::with_capacity_co(initial_capacity); unsafe { // SAFETY: We requested capacity at least 1 ptr::write(vector.as_mut_ptr(), element); @@ -40,12 +46,13 @@ where }; // must delegate to spec_extend() since extend() itself delegates // to spec_from for empty Vecs - as SpecExtend>::spec_extend(&mut vector, iterator); + as SpecExtend>::spec_extend(&mut vector, iterator); vector } } -impl SpecFromIterNested for Vec +#[allow(unused_braces)] +impl SpecFromIterNested for Vec where I: TrustedLen, { diff --git a/library/alloc/src/vec/splice.rs b/library/alloc/src/vec/splice.rs index 1861147fe72f..b48e021e522f 100644 --- a/library/alloc/src/vec/splice.rs +++ b/library/alloc/src/vec/splice.rs @@ -1,4 +1,5 @@ use crate::alloc::{Allocator, Global}; +use crate::co_alloc::CoAllocPref; use core::ptr::{self}; use core::slice::{self}; @@ -18,17 +19,26 @@ use super::{Drain, Vec}; /// ``` #[derive(Debug)] #[stable(feature = "vec_splice", since = "1.21.0")] +#[allow(unused_braces)] pub struct Splice< 'a, I: Iterator + 'a, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + 'a = Global, -> { - pub(super) drain: Drain<'a, I::Item, A>, + const CO_ALLOC_PREF: CoAllocPref = { CO_ALLOC_PREF_DEFAULT!() }, +> where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ + pub(super) drain: Drain<'a, I::Item, A, CO_ALLOC_PREF>, pub(super) replace_with: I, } #[stable(feature = "vec_splice", since = "1.21.0")] -impl Iterator for Splice<'_, I, A> { +#[allow(unused_braces)] +impl Iterator + for Splice<'_, I, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ type Item = I::Item; fn next(&mut self) -> Option { @@ -41,17 +51,33 @@ impl Iterator for Splice<'_, I, A> { } #[stable(feature = "vec_splice", since = "1.21.0")] -impl DoubleEndedIterator for Splice<'_, I, A> { +#[allow(unused_braces)] +impl DoubleEndedIterator + for Splice<'_, I, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn next_back(&mut self) -> Option { self.drain.next_back() } } #[stable(feature = "vec_splice", since = "1.21.0")] -impl ExactSizeIterator for Splice<'_, I, A> {} +#[allow(unused_braces)] +impl ExactSizeIterator + for Splice<'_, I, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ +} #[stable(feature = "vec_splice", since = "1.21.0")] -impl Drop for Splice<'_, I, A> { +#[allow(unused_braces)] +impl Drop + for Splice<'_, I, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ fn drop(&mut self) { self.drain.by_ref().for_each(drop); // At this point draining is done and the only remaining tasks are splicing @@ -98,7 +124,11 @@ impl Drop for Splice<'_, I, A> { } /// Private helper methods for `Splice::drop` -impl Drain<'_, T, A> { +#[allow(unused_braces)] +impl Drain<'_, T, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ /// The range from `self.vec.len` to `self.tail_start` contains elements /// that have been moved out. /// Fill that range as much as possible with new elements from the `replace_with` iterator. diff --git a/library/alloc/tests/autotraits.rs b/library/alloc/tests/autotraits.rs index 879e32b3fa30..0a14b165a74f 100644 --- a/library/alloc/tests/autotraits.rs +++ b/library/alloc/tests/autotraits.rs @@ -1,3 +1,5 @@ +use alloc::{CO_ALLOC_PREF_META_YES, CO_ALLOC_PREF_META_NO}; + fn require_sync(_: T) {} fn require_send_sync(_: T) {} @@ -192,7 +194,12 @@ fn test_binary_heap() { }); require_send_sync(async { - let _v = None::>; + let _v = None::>; + async {}.await; + }); + + require_send_sync(async { + let _v = None::>; async {}.await; }); diff --git a/library/alloc/tests/boxed.rs b/library/alloc/tests/boxed.rs index af49826ff30a..656e079f9c61 100644 --- a/library/alloc/tests/boxed.rs +++ b/library/alloc/tests/boxed.rs @@ -61,6 +61,7 @@ fn box_deref_lval() { pub struct ConstAllocator; +//@FIXME unsafe impl const Allocator for ConstAllocator { fn allocate(&self, layout: Layout) -> Result, AllocError> { match layout.size() { diff --git a/library/alloc/tests/lib.rs b/library/alloc/tests/lib.rs index 2a93a242d517..1a435018e0b1 100644 --- a/library/alloc/tests/lib.rs +++ b/library/alloc/tests/lib.rs @@ -2,6 +2,7 @@ #![feature(alloc_layout_extra)] #![feature(assert_matches)] #![feature(btree_drain_filter)] +#![feature(global_co_alloc_meta)] #![feature(cow_is_borrowed)] #![feature(const_box)] #![feature(const_convert)] diff --git a/library/core/src/alloc/global.rs b/library/core/src/alloc/global.rs index 18da70451f29..081858e2fb51 100644 --- a/library/core/src/alloc/global.rs +++ b/library/core/src/alloc/global.rs @@ -1,7 +1,16 @@ use crate::alloc::Layout; +use crate::alloc::{CoAllocMetaBase, CoAllocMetaPlain}; use crate::cmp; use crate::ptr; +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +#[derive(Debug)] +/// Used for parameters and results (to/from `GlobalCoAllocator`'s functions, where applicable). +pub struct RawAndMeta { + pub ptr: *mut u8, + pub meta: M, +} + /// A memory allocator that can be registered as the standard library’s default /// through the `#[global_allocator]` attribute. /// @@ -121,6 +130,13 @@ use crate::ptr; /// having side effects. #[stable(feature = "global_alloc", since = "1.28.0")] pub unsafe trait GlobalAlloc { + /// NOT for public use. The default value MAY be REMOVED or CHANGED. + /// + /// @FIXME Validate (preferrable at compile time, otherwise as a test) that this type's + /// alignment <= `usize` alignment. + #[unstable(feature = "global_co_alloc_meta", issue = "none")] + type CoAllocMeta: CoAllocMetaBase = CoAllocMetaPlain; + /// Allocate memory as described by the given `layout`. /// /// Returns a pointer to newly-allocated memory, @@ -156,6 +172,11 @@ pub unsafe trait GlobalAlloc { #[stable(feature = "global_alloc", since = "1.28.0")] unsafe fn alloc(&self, layout: Layout) -> *mut u8; + #[unstable(feature = "global_co_alloc", issue = "none")] + unsafe fn co_alloc(&self, _layout: Layout, mut _result: &mut RawAndMeta) { + panic!("@FIXME") + } + /// Deallocate the block of memory at the given `ptr` pointer with the given `layout`. /// /// # Safety @@ -171,6 +192,11 @@ pub unsafe trait GlobalAlloc { #[stable(feature = "global_alloc", since = "1.28.0")] unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout); + #[unstable(feature = "global_co_alloc", issue = "none")] + unsafe fn co_dealloc(&self, _ptr_and_meta: RawAndMeta, _layout: Layout) { + panic!("@FIXME") + } + /// Behaves like `alloc`, but also ensures that the contents /// are set to zero before being returned. /// @@ -198,11 +224,27 @@ pub unsafe trait GlobalAlloc { if !ptr.is_null() { // SAFETY: as allocation succeeded, the region from `ptr` // of size `size` is guaranteed to be valid for writes. - unsafe { ptr::write_bytes(ptr, 0, size) }; + unsafe { ptr::write_bytes(ptr, 0u8, size) }; } ptr } + #[unstable(feature = "global_co_alloc", issue = "none")] + unsafe fn co_alloc_zeroed( + &self, + layout: Layout, + mut result: &mut RawAndMeta, + ) { + let size = layout.size(); + // SAFETY: the safety contract for `alloc` must be upheld by the caller. + unsafe { self.co_alloc(layout, &mut result) }; + if !result.ptr.is_null() { + // SAFETY: as allocation succeeded, the region from `ptr_and_meta.ptr` of size `size` is + // guaranteed to be valid for writes. + unsafe { ptr::write_bytes(result.ptr, 0u8, size) }; + } + } + /// Shrink or grow a block of memory to the given `new_size` in bytes. /// The block is described by the given `ptr` pointer and `layout`. /// @@ -275,4 +317,31 @@ pub unsafe trait GlobalAlloc { } new_ptr } + + #[unstable(feature = "global_co_alloc", issue = "none")] + unsafe fn co_realloc( + &self, + ptr_and_meta: RawAndMeta, + layout: Layout, + new_size: usize, + mut result: &mut RawAndMeta, + ) { + // SAFETY: the caller must ensure that the `new_size` does not overflow. + // `layout.align()` comes from a `Layout` and is thus guaranteed to be valid. + let new_layout = unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) }; + // SAFETY: the caller must ensure that `new_layout` is greater than zero. + unsafe { self.co_alloc(new_layout, &mut result) }; + if !result.ptr.is_null() { + // SAFETY: the previously allocated block cannot overlap the newly allocated block. + // The safety contract for `dealloc` must be upheld by the caller. + unsafe { + ptr::copy_nonoverlapping( + ptr_and_meta.ptr, + result.ptr, + cmp::min(layout.size(), new_size), + ); + self.co_dealloc(ptr_and_meta, layout); + } + } + } } diff --git a/library/core/src/alloc/mod.rs b/library/core/src/alloc/mod.rs index a6082455fac8..609cbedda341 100644 --- a/library/core/src/alloc/mod.rs +++ b/library/core/src/alloc/mod.rs @@ -48,6 +48,77 @@ impl fmt::Display for AllocError { } } +/// (Non-Null) Pointer and coallocation metadata. +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +#[derive(Clone, Copy, Debug)] +pub struct PtrAndMeta { + pub ptr: NonNull, + pub meta: M, +} + +/// (NonNull) Slice and coallocation metadata. +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +#[derive(Clone, Copy, Debug)] +/// Used for results (from `CoAllocator`'s functions, where applicable). +pub struct SliceAndMeta { + pub slice: NonNull<[u8]>, + pub meta: M, +} + +/// `Result` of `SliceAndMeta` or `AllocError`. +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +pub type SliceAndMetaResult = Result, AllocError>; + +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +#[const_trait] +pub trait CoAllocMetaBase: Clone + Copy { + /// NOT for public use. This MAY BE REMOVED or CHANGED. + /// + /// For EXPERIMENTATION only. + const ZERO_METAS: [Self; 0]; + const ONE_METAS: [Self; 1]; + + /// NOT for public use. This MAY BE REMOVED or CHANGED. + /// + /// For EXPERIMENTATION only. + fn new_plain() -> Self; +} + +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +#[derive(Clone, Copy, Debug)] +pub struct CoAllocMetaPlain {} + +const CO_ALLOC_META_PLAIN: CoAllocMetaPlain = CoAllocMetaPlain {}; + +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +impl const CoAllocMetaBase for CoAllocMetaPlain { + const ZERO_METAS: [Self; 0] = []; + const ONE_METAS: [Self; 1] = [CO_ALLOC_META_PLAIN]; + + fn new_plain() -> Self { + CO_ALLOC_META_PLAIN + } +} + +/// Whether an `Allocator` implementation supports coallocation. +/// +/// This type WILL CHANGE (once ``#![feature(generic_const_exprs)]` and +/// `#![feature(adt_const_params)]` are stable) to a dedicated struct/enum. Hence: +/// - DO NOT mix this/cast this with/to `u8`, `u16`, (nor any other integer); and +/// - DO NOT hard code any values, but use `CO_ALLOCATOR_SUPPORTS_META_YES` and `CO_ALLOCATOR_SUPPORTS_META_NO`. +// @FIXME Once ICE is fixed: Change to `u32` (or any other unused unsinged integer type, and other +// than `usize`, so we can't mix it up with `usize`). +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +pub type CoAllocatorMetaNumSlots = usize; + +/// Indicating that an Allocator supports coallocation (if a type of the allocated instances supports it, too). +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +pub const CO_ALLOCATOR_SUPPORTS_META_YES: CoAllocatorMetaNumSlots = 1; + +/// Indicating that an Allocator does not support coallocation. +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +pub const CO_ALLOCATOR_SUPPORTS_META_NO: CoAllocatorMetaNumSlots = 0; + /// An implementation of `Allocator` can allocate, grow, shrink, and deallocate arbitrary blocks of /// data described via [`Layout`][]. /// @@ -107,6 +178,20 @@ impl fmt::Display for AllocError { #[unstable(feature = "allocator_api", issue = "32838")] #[const_trait] pub unsafe trait Allocator { + /// NOT for public use. MAY CHANGE. + const CO_ALLOC_META_NUM_SLOTS: CoAllocatorMetaNumSlots = CO_ALLOCATOR_SUPPORTS_META_NO; + + /// Type to store coallocation metadata (if both the allocator and the heap-based type support + /// coallocation, and if coallocation is used). + /// + /// If this is any type with non-zero size, then the actual `Allocator` implementation supports + /// cooperative functions (`co_*`) as first class citizens. NOT for public use. The default + /// value MAY be REMOVED or CHANGED. + /// + /// @FIXME Validate (preferrable at compile time, otherwise as a test) that this type's + /// alignment <= `usize` alignment. + type CoAllocMeta: ~const CoAllocMetaBase = CoAllocMetaPlain; + /// Attempts to allocate a block of memory. /// /// On success, returns a [`NonNull<[u8]>`][NonNull] meeting the size and alignment guarantees of `layout`. @@ -129,6 +214,10 @@ pub unsafe trait Allocator { /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html fn allocate(&self, layout: Layout) -> Result, AllocError>; + fn co_allocate(&self, _layout: Layout, _result: &mut SliceAndMetaResult) { + panic!("FIXME") + } + /// Behaves like `allocate`, but also ensures that the returned memory is zero-initialized. /// /// # Errors @@ -151,6 +240,18 @@ pub unsafe trait Allocator { Ok(ptr) } + fn co_allocate_zeroed( + &self, + layout: Layout, + mut result: &mut SliceAndMetaResult, + ) { + self.co_allocate(layout, &mut result); + if let Ok(SliceAndMeta { slice, .. }) = result { + // SAFETY: `alloc` returns a valid memory block + unsafe { slice.as_non_null_ptr().as_ptr().write_bytes(0, slice.len()) } + } + } + /// Deallocates the memory referenced by `ptr`. /// /// # Safety @@ -162,6 +263,10 @@ pub unsafe trait Allocator { /// [*fit*]: #memory-fitting unsafe fn deallocate(&self, ptr: NonNull, layout: Layout); + unsafe fn co_deallocate(&self, _ptr_and_meta: PtrAndMeta, _layout: Layout) { + panic!("FIXME") + } + /// Attempts to extend the memory block. /// /// Returns a new [`NonNull<[u8]>`][NonNull] containing a pointer and the actual size of the allocated @@ -226,6 +331,37 @@ pub unsafe trait Allocator { Ok(new_ptr) } + unsafe fn co_grow( + &self, + ptr_and_meta: PtrAndMeta, + old_layout: Layout, + new_layout: Layout, + mut result: &mut SliceAndMetaResult, + ) { + debug_assert!( + new_layout.size() >= old_layout.size(), + "`new_layout.size()` must be greater than or equal to `old_layout.size()`" + ); + + self.co_allocate(new_layout, &mut result); + + if let Ok(SliceAndMeta { slice, .. }) = result { + // SAFETY: because `new_layout.size()` must be greater than or equal to + // `old_layout.size()`, both the old and new memory allocation are valid for reads and + // writes for `old_layout.size()` bytes. Also, because the old allocation wasn't yet + // deallocated, it cannot overlap `new_slice_and_meta.slice`. Thus, the call to `copy_nonoverlapping` is + // safe. The safety contract for `dealloc` must be upheld by the caller. + unsafe { + ptr::copy_nonoverlapping( + ptr_and_meta.ptr.as_ptr(), + slice.as_mut_ptr(), + old_layout.size(), + ); + self.co_deallocate(ptr_and_meta, old_layout); + } + } + } + /// Behaves like `grow`, but also ensures that the new contents are set to zero before being /// returned. /// @@ -289,6 +425,37 @@ pub unsafe trait Allocator { Ok(new_ptr) } + unsafe fn co_grow_zeroed( + &self, + ptr_and_meta: PtrAndMeta, + old_layout: Layout, + new_layout: Layout, + mut result: &mut SliceAndMetaResult, + ) { + debug_assert!( + new_layout.size() >= old_layout.size(), + "`new_layout.size()` must be greater than or equal to `old_layout.size()`" + ); + + self.co_allocate_zeroed(new_layout, &mut result); + + if let Ok(SliceAndMeta { slice, .. }) = result { + // SAFETY: because `new_layout.size()` must be greater than or equal to + // `old_layout.size()`, both the old and new memory allocation are valid for reads and + // writes for `old_layout.size()` bytes. Also, because the old allocation wasn't yet + // deallocated, it cannot overlap `new_slice_and_meta.slice`. Thus, the call to `copy_nonoverlapping` is + // safe. The safety contract for `dealloc` must be upheld by the caller. + unsafe { + ptr::copy_nonoverlapping( + ptr_and_meta.ptr.as_ptr(), + slice.as_mut_ptr(), + old_layout.size(), + ); + self.co_deallocate(ptr_and_meta, old_layout); + } + } + } + /// Attempts to shrink the memory block. /// /// Returns a new [`NonNull<[u8]>`][NonNull] containing a pointer and the actual size of the allocated @@ -353,6 +520,37 @@ pub unsafe trait Allocator { Ok(new_ptr) } + unsafe fn co_shrink( + &self, + ptr_and_meta: PtrAndMeta, + old_layout: Layout, + new_layout: Layout, + mut result: &mut SliceAndMetaResult, + ) { + debug_assert!( + new_layout.size() <= old_layout.size(), + "`new_layout.size()` must be smaller than or equal to `old_layout.size()`" + ); + + self.co_allocate(new_layout, &mut result); + + if let Ok(SliceAndMeta { slice, .. }) = result { + // SAFETY: because `new_layout.size()` must be lower than or equal to + // `old_layout.size()`, both the old and new memory allocation are valid for reads and + // writes for `new_layout.size()` bytes. Also, because the old allocation wasn't yet + // deallocated, it cannot overlap `new_slice_and_meta.slice`. Thus, the call to `copy_nonoverlapping` is + // safe. The safety contract for `dealloc` must be upheld by the caller. + unsafe { + ptr::copy_nonoverlapping( + ptr_and_meta.ptr.as_ptr(), + slice.as_mut_ptr(), + new_layout.size(), + ); + self.co_deallocate(ptr_and_meta, old_layout); + } + } + } + /// Creates a "by reference" adapter for this instance of `Allocator`. /// /// The returned adapter also implements `Allocator` and will simply borrow this. @@ -365,6 +563,7 @@ pub unsafe trait Allocator { } } +// @FIXME #[unstable(feature = "allocator_api", issue = "32838")] unsafe impl Allocator for &A where diff --git a/library/core/src/lib.rs b/library/core/src/lib.rs index dc0702c467a4..ba4a2ca02d84 100644 --- a/library/core/src/lib.rs +++ b/library/core/src/lib.rs @@ -98,6 +98,7 @@ #![cfg_attr(not(bootstrap), warn(multiple_supertrait_upcastable))] // // Library features: +#![feature(associated_type_defaults)] #![feature(const_align_offset)] #![feature(const_align_of_val)] #![feature(const_align_of_val_raw)] diff --git a/library/proc_macro/src/bridge/mod.rs b/library/proc_macro/src/bridge/mod.rs index 4c1e196b5ad1..e7980d44fd49 100644 --- a/library/proc_macro/src/bridge/mod.rs +++ b/library/proc_macro/src/bridge/mod.rs @@ -9,6 +9,7 @@ #![deny(unsafe_code)] use crate::{Delimiter, Level, LineColumn, Spacing}; +use std::alloc::Global; use std::fmt; use std::hash::Hash; use std::marker; @@ -252,14 +253,14 @@ impl<'a, T, M> Unmark for &'a mut Marked { } } -impl Mark for Vec { +impl Mark for Vec { type Unmarked = Vec; fn mark(unmarked: Self::Unmarked) -> Self { // Should be a no-op due to std's in-place collect optimizations. unmarked.into_iter().map(T::mark).collect() } } -impl Unmark for Vec { +impl Unmark for Vec { type Unmarked = Vec; fn unmark(self) -> Self::Unmarked { // Should be a no-op due to std's in-place collect optimizations. diff --git a/library/proc_macro/src/bridge/rpc.rs b/library/proc_macro/src/bridge/rpc.rs index 5b1bfb30983b..e116a54b7e73 100644 --- a/library/proc_macro/src/bridge/rpc.rs +++ b/library/proc_macro/src/bridge/rpc.rs @@ -1,9 +1,12 @@ //! Serialization for client-server communication. +use std::alloc::Global; use std::any::Any; use std::io::Write; use std::num::NonZeroU32; use std::str; +//use alloc::alloc::Global; +//use std::CO_ALLOC_PREF_DEFAULT; pub(super) type Writer = super::buffer::Buffer; @@ -224,7 +227,7 @@ impl DecodeMut<'_, '_, S> for String { } } -impl> Encode for Vec { +impl> Encode for Vec { fn encode(self, w: &mut Writer, s: &mut S) { self.len().encode(w, s); for x in self { @@ -233,7 +236,9 @@ impl> Encode for Vec { } } -impl<'a, S, T: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S> for Vec { +impl<'a, S, T: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S> + for Vec +{ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self { let len = usize::decode(r, s); let mut vec = Vec::with_capacity(len); diff --git a/library/proc_macro/src/diagnostic.rs b/library/proc_macro/src/diagnostic.rs index 5a209f7c7aa1..9a81e6006114 100644 --- a/library/proc_macro/src/diagnostic.rs +++ b/library/proc_macro/src/diagnostic.rs @@ -1,4 +1,5 @@ use crate::Span; +use std::alloc::Global; /// An enum representing a diagnostic level. #[unstable(feature = "proc_macro_diagnostic", issue = "54140")] @@ -30,7 +31,7 @@ impl MultiSpan for Span { } #[unstable(feature = "proc_macro_diagnostic", issue = "54140")] -impl MultiSpan for Vec { +impl MultiSpan for Vec { fn into_spans(self) -> Vec { self } diff --git a/library/proc_macro/src/lib.rs b/library/proc_macro/src/lib.rs index 938935771d64..5e6be69bf810 100644 --- a/library/proc_macro/src/lib.rs +++ b/library/proc_macro/src/lib.rs @@ -22,6 +22,7 @@ // to make it compile with rust-analyzer on stable. #![feature(rustc_allow_const_fn_unstable)] #![feature(staged_api)] +#![feature(allocator_api)] #![feature(allow_internal_unstable)] #![feature(decl_macro)] #![feature(local_key_cell_methods)] @@ -33,6 +34,9 @@ #![feature(min_specialization)] #![feature(strict_provenance)] #![recursion_limit = "256"] +#![feature(global_co_alloc)] +#![feature(global_co_alloc_default)] +#![feature(global_co_alloc_meta)] #[unstable(feature = "proc_macro_internals", issue = "27812")] #[doc(hidden)] diff --git a/library/std/src/alloc.rs b/library/std/src/alloc.rs index c5a5991cc81c..a2dcb9d5b78a 100644 --- a/library/std/src/alloc.rs +++ b/library/std/src/alloc.rs @@ -197,6 +197,7 @@ impl System { } } +// @FIXME // The Allocator impl checks the layout size to be non-zero and forwards to the GlobalAlloc impl, // which is in `std::sys::*::alloc`. #[unstable(feature = "allocator_api", issue = "32838")] diff --git a/library/std/src/io/cursor.rs b/library/std/src/io/cursor.rs index d98ab021cadb..2a95ad4ceb2d 100644 --- a/library/std/src/io/cursor.rs +++ b/library/std/src/io/cursor.rs @@ -6,6 +6,7 @@ use crate::io::prelude::*; use crate::alloc::Allocator; use crate::cmp; use crate::io::{self, BorrowedCursor, ErrorKind, IoSlice, IoSliceMut, SeekFrom}; +use ::alloc::{co_alloc::CoAllocPref, meta_num_slots}; /// A `Cursor` wraps an in-memory buffer and provides it with a /// [`Seek`] implementation. @@ -397,11 +398,15 @@ fn slice_write_vectored( } /// Reserves the required space, and pads the vec with 0s if necessary. -fn reserve_and_pad( +#[allow(unused_braces)] +fn reserve_and_pad( pos_mut: &mut u64, - vec: &mut Vec, + vec: &mut Vec, buf_len: usize, -) -> io::Result { +) -> io::Result +where + [(); { meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ let pos: usize = (*pos_mut).try_into().map_err(|_| { io::const_io_error!( ErrorKind::InvalidInput, @@ -440,9 +445,15 @@ fn reserve_and_pad( /// Writes the slice to the vec without allocating /// # Safety: vec must have buf.len() spare capacity -unsafe fn vec_write_unchecked(pos: usize, vec: &mut Vec, buf: &[u8]) -> usize +#[allow(unused_braces)] +unsafe fn vec_write_unchecked( + pos: usize, + vec: &mut Vec, + buf: &[u8], +) -> usize where A: Allocator, + [(); { meta_num_slots!(A, CO_ALLOC_PREF) }]:, { debug_assert!(vec.capacity() >= pos + buf.len()); vec.as_mut_ptr().add(pos).copy_from(buf.as_ptr(), buf.len()); @@ -458,9 +469,15 @@ where /// This also allows for the vec body to be empty, but with a position of N. /// This means that [`Write`] will pad the vec with 0 initially, /// before writing anything from that point -fn vec_write(pos_mut: &mut u64, vec: &mut Vec, buf: &[u8]) -> io::Result +#[allow(unused_braces)] +fn vec_write( + pos_mut: &mut u64, + vec: &mut Vec, + buf: &[u8], +) -> io::Result where A: Allocator, + [(); { meta_num_slots!(A, CO_ALLOC_PREF) }]:, { let buf_len = buf.len(); let mut pos = reserve_and_pad(pos_mut, vec, buf_len)?; @@ -489,13 +506,15 @@ where /// This also allows for the vec body to be empty, but with a position of N. /// This means that [`Write`] will pad the vec with 0 initially, /// before writing anything from that point -fn vec_write_vectored( +#[allow(unused_braces)] +fn vec_write_vectored( pos_mut: &mut u64, - vec: &mut Vec, + vec: &mut Vec, bufs: &[IoSlice<'_>], ) -> io::Result where A: Allocator, + [(); { meta_num_slots!(A, CO_ALLOC_PREF) }]:, { // For safety reasons, we don't want this sum to overflow ever. // If this saturates, the reserve should panic to avoid any unsound writing. @@ -543,9 +562,11 @@ impl Write for Cursor<&mut [u8]> { } #[stable(feature = "cursor_mut_vec", since = "1.25.0")] -impl Write for Cursor<&mut Vec> +#[allow(unused_braces)] +impl Write for Cursor<&mut Vec> where A: Allocator, + [(); { meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn write(&mut self, buf: &[u8]) -> io::Result { vec_write(&mut self.pos, self.inner, buf) @@ -567,9 +588,11 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl Write for Cursor> +#[allow(unused_braces)] +impl Write for Cursor> where A: Allocator, + [(); { meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn write(&mut self, buf: &[u8]) -> io::Result { vec_write(&mut self.pos, &mut self.inner, buf) diff --git a/library/std/src/io/impls.rs b/library/std/src/io/impls.rs index e5048dcc8acd..541f882d84cc 100644 --- a/library/std/src/io/impls.rs +++ b/library/std/src/io/impls.rs @@ -9,6 +9,7 @@ use crate::io::{ self, BorrowedCursor, BufRead, ErrorKind, IoSlice, IoSliceMut, Read, Seek, SeekFrom, Write, }; use crate::mem; +use ::alloc::{co_alloc::CoAllocPref, meta_num_slots}; // ============================================================================= // Forwarding implementations @@ -378,7 +379,11 @@ impl Write for &mut [u8] { /// Write is implemented for `Vec` by appending to the vector. /// The vector will grow as needed. #[stable(feature = "rust1", since = "1.0.0")] -impl Write for Vec { +#[allow(unused_braces)] +impl Write for Vec +where + [(); { meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ #[inline] fn write(&mut self, buf: &[u8]) -> io::Result { self.extend_from_slice(buf); @@ -414,7 +419,11 @@ impl Write for Vec { /// Read is implemented for `VecDeque` by consuming bytes from the front of the `VecDeque`. #[stable(feature = "vecdeque_read_write", since = "1.63.0")] -impl Read for VecDeque { +#[allow(unused_braces)] +impl Read for VecDeque +where + [(); { meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ /// Fill `buf` with the contents of the "front" slice as returned by /// [`as_slices`][`VecDeque::as_slices`]. If the contained byte slices of the `VecDeque` are /// discontiguous, multiple calls to `read` will be needed to read the entire content. @@ -438,7 +447,11 @@ impl Read for VecDeque { /// Write is implemented for `VecDeque` by appending to the `VecDeque`, growing it as needed. #[stable(feature = "vecdeque_read_write", since = "1.63.0")] -impl Write for VecDeque { +#[allow(unused_braces)] +impl Write for VecDeque +where + [(); { meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ #[inline] fn write(&mut self, buf: &[u8]) -> io::Result { self.extend(buf); diff --git a/library/std/src/lib.rs b/library/std/src/lib.rs index 363a26671746..7ffb41a32d73 100644 --- a/library/std/src/lib.rs +++ b/library/std/src/lib.rs @@ -214,6 +214,12 @@ #![needs_panic_runtime] // // Lints: +#![allow(incomplete_features)] +#![feature(generic_const_exprs)] +#![feature(global_co_alloc)] +#![feature(global_co_alloc_default)] +#![feature(global_co_alloc_plvec)] +#![feature(global_co_alloc_meta)] #![warn(deprecated_in_future)] #![warn(missing_docs)] #![warn(missing_debug_implementations)] @@ -320,6 +326,7 @@ #![feature(try_reserve_kind)] #![feature(vec_into_raw_parts)] #![feature(slice_concat_trait)] +#![feature(vec_new_co)] // // Library features (unwind): #![feature(panic_unwind)] @@ -410,6 +417,9 @@ pub mod prelude; pub use alloc_crate::borrow; #[stable(feature = "rust1", since = "1.0.0")] pub use alloc_crate::boxed; +#[unstable(feature = "global_co_alloc", issue = "none")] +pub use alloc_crate::co_alloc; +// @FIXME ugly - someone move this to a better place, please #[stable(feature = "rust1", since = "1.0.0")] pub use alloc_crate::fmt; #[stable(feature = "rust1", since = "1.0.0")] @@ -424,6 +434,8 @@ pub use alloc_crate::str; pub use alloc_crate::string; #[stable(feature = "rust1", since = "1.0.0")] pub use alloc_crate::vec; +#[unstable(feature = "global_co_alloc", issue = "none")] +pub use alloc_crate::{CO_ALLOC_PREF_DEFAULT, SHORT_TERM_VEC_CO_ALLOC_PREF}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::any; #[stable(feature = "core_array", since = "1.36.0")] diff --git a/library/std/src/sys/hermit/thread_local_dtor.rs b/library/std/src/sys/hermit/thread_local_dtor.rs index 613266b9530a..09b2df46a0f2 100644 --- a/library/std/src/sys/hermit/thread_local_dtor.rs +++ b/library/std/src/sys/hermit/thread_local_dtor.rs @@ -1,14 +1,16 @@ #![cfg(target_thread_local)] #![unstable(feature = "thread_local_internals", issue = "none")] +#![feature(global_co_alloc_plvec)] // Simplify dtor registration by using a list of destructors. // The this solution works like the implementation of macOS and // doesn't additional OS support use crate::mem; +use core::alloc::PlVec; #[thread_local] -static mut DTORS: Vec<(*mut u8, unsafe extern "C" fn(*mut u8))> = Vec::new(); +static mut DTORS: PlVec<(*mut u8, unsafe extern "C" fn(*mut u8))> = PlVec::new(); pub unsafe fn register_dtor(t: *mut u8, dtor: unsafe extern "C" fn(*mut u8)) { let list = &mut DTORS; diff --git a/library/std/src/sys/solid/thread_local_dtor.rs b/library/std/src/sys/solid/thread_local_dtor.rs index bad14bb37f72..59263cd41c76 100644 --- a/library/std/src/sys/solid/thread_local_dtor.rs +++ b/library/std/src/sys/solid/thread_local_dtor.rs @@ -1,17 +1,20 @@ #![cfg(target_thread_local)] #![unstable(feature = "thread_local_internals", issue = "none")] +#![feature(global_co_alloc_plvec)] +#![feature(global_co_alloc_plvec)] // Simplify dtor registration by using a list of destructors. use super::{abi, itron::task}; use crate::cell::Cell; use crate::mem; +use core::alloc::PlVec; #[thread_local] static REGISTERED: Cell = Cell::new(false); #[thread_local] -static mut DTORS: Vec<(*mut u8, unsafe extern "C" fn(*mut u8))> = Vec::new(); +static mut DTORS: PlVec<(*mut u8, unsafe extern "C" fn(*mut u8))> = PlVec::new(); pub unsafe fn register_dtor(t: *mut u8, dtor: unsafe extern "C" fn(*mut u8)) { if !REGISTERED.get() { diff --git a/library/std/src/sys/unix/thread_local_dtor.rs b/library/std/src/sys/unix/thread_local_dtor.rs index 236d2f2ee292..16f237fa6a61 100644 --- a/library/std/src/sys/unix/thread_local_dtor.rs +++ b/library/std/src/sys/unix/thread_local_dtor.rs @@ -58,13 +58,22 @@ pub unsafe fn register_dtor(t: *mut u8, dtor: unsafe extern "C" fn(*mut u8)) { static REGISTERED: Cell = Cell::new(false); #[thread_local] - static mut DTORS: Vec<(*mut u8, unsafe extern "C" fn(*mut u8))> = Vec::new(); + static mut DTORS: PlVec<(*mut u8, unsafe extern "C" fn(*mut u8))> = PlVec::new(); if !REGISTERED.get() { _tlv_atexit(run_dtors, ptr::null_mut()); REGISTERED.set(true); } + type List = alloc::vec::PlVec<(*mut u8, unsafe extern "C" fn(*mut u8))>; + + #[thread_local] + static DTORS: Cell<*mut List> = Cell::new(ptr::null_mut()); + if DTORS.get().is_null() { + let v: Box = box Vec::new(); + DTORS.set(Box::into_raw(v)); + } + extern "C" { fn _tlv_atexit(dtor: unsafe extern "C" fn(*mut u8), arg: *mut u8); } diff --git a/library/std/src/sys/windows/mod.rs b/library/std/src/sys/windows/mod.rs index 77359abe4299..0296c2dab555 100644 --- a/library/std/src/sys/windows/mod.rs +++ b/library/std/src/sys/windows/mod.rs @@ -204,6 +204,7 @@ where // incorrect size hints for some short paths: // https://github.com/dylni/normpath/issues/5 let mut stack_buf: [MaybeUninit; 512] = MaybeUninit::uninit_array(); + // @FIXME Use CoVec? let mut heap_buf: Vec> = Vec::new(); unsafe { let mut n = stack_buf.len(); diff --git a/library/std/src/sys/windows/thread_local_dtor.rs b/library/std/src/sys/windows/thread_local_dtor.rs index 9707a95dff21..cbadd2dd23ae 100644 --- a/library/std/src/sys/windows/thread_local_dtor.rs +++ b/library/std/src/sys/windows/thread_local_dtor.rs @@ -3,10 +3,13 @@ #![unstable(feature = "thread_local_internals", issue = "none")] #![cfg(target_thread_local)] +#![feature(global_co_alloc_plvec)] + +use core::alloc::PlVec; // Using a per-thread list avoids the problems in synchronizing global state. #[thread_local] -static mut DESTRUCTORS: Vec<(*mut u8, unsafe extern "C" fn(*mut u8))> = Vec::new(); +static mut DESTRUCTORS: PlVec<(*mut u8, unsafe extern "C" fn(*mut u8))> = Vec::new(); // Ensure this can never be inlined because otherwise this may break in dylibs. // See #44391. diff --git a/library/std/src/sys_common/thread_local_dtor.rs b/library/std/src/sys_common/thread_local_dtor.rs index 844946eda031..dcab8a89e7f5 100644 --- a/library/std/src/sys_common/thread_local_dtor.rs +++ b/library/std/src/sys_common/thread_local_dtor.rs @@ -15,6 +15,7 @@ use crate::ptr; use crate::sys_common::thread_local_key::StaticKey; +use alloc::vec::PlVec; pub unsafe fn register_dtor_fallback(t: *mut u8, dtor: unsafe extern "C" fn(*mut u8)) { // The fallback implementation uses a vanilla OS-based TLS key to track @@ -28,9 +29,9 @@ pub unsafe fn register_dtor_fallback(t: *mut u8, dtor: unsafe extern "C" fn(*mut // flagged for destruction. static DTORS: StaticKey = StaticKey::new(Some(run_dtors)); - type List = Vec<(*mut u8, unsafe extern "C" fn(*mut u8))>; + type List = PlVec<(*mut u8, unsafe extern "C" fn(*mut u8))>; if DTORS.get().is_null() { - let v: Box = Box::new(Vec::new()); + let v: Box = Box::new(Vec::new_co()); DTORS.set(Box::into_raw(v) as *mut u8); } let list: &mut List = &mut *(DTORS.get() as *mut List); diff --git a/library/test/src/lib.rs b/library/test/src/lib.rs index 69fb529d7f56..bf28cde1bf03 100644 --- a/library/test/src/lib.rs +++ b/library/test/src/lib.rs @@ -16,6 +16,9 @@ #![unstable(feature = "test", issue = "50297")] #![doc(test(attr(deny(warnings))))] +#![feature(allocator_api)] +#![feature(global_co_alloc)] +#![feature(global_co_alloc_meta)] #![feature(internal_output_capture)] #![feature(is_terminal)] #![feature(staged_api)] @@ -52,6 +55,7 @@ pub mod test { } use std::{ + alloc::Global, collections::VecDeque, env, io, io::prelude::Write, @@ -346,7 +350,8 @@ where }; let mut running_tests: TestMap = HashMap::default(); - let mut timeout_queue: VecDeque = VecDeque::new(); + // @FIXME See if we can remove `Global` generic param: + let mut timeout_queue: VecDeque = VecDeque::new(); fn get_timed_out_tests( running_tests: &TestMap, diff --git a/library/test/src/stats.rs b/library/test/src/stats.rs index b33b08012613..bc892745d75a 100644 --- a/library/test/src/stats.rs +++ b/library/test/src/stats.rs @@ -1,6 +1,7 @@ #![allow(missing_docs)] use std::mem; +use std::SHORT_TERM_VEC_CO_ALLOC_PREF; #[cfg(test)] mod tests; @@ -232,13 +233,13 @@ impl Stats for [f64] { } fn percentile(&self, pct: f64) -> f64 { - let mut tmp = self.to_vec(); + let mut tmp = self.to_vec_co::<{ SHORT_TERM_VEC_CO_ALLOC_PREF!() }>(); local_sort(&mut tmp); percentile_of_sorted(&tmp, pct) } fn quartiles(&self) -> (f64, f64, f64) { - let mut tmp = self.to_vec(); + let mut tmp = self.to_vec_co::<{ SHORT_TERM_VEC_CO_ALLOC_PREF!() }>(); local_sort(&mut tmp); let first = 25_f64; let a = percentile_of_sorted(&tmp, first); diff --git a/src/librustdoc/clean/types.rs b/src/librustdoc/clean/types.rs index fc1396e86f6b..092fc9b16701 100644 --- a/src/librustdoc/clean/types.rs +++ b/src/librustdoc/clean/types.rs @@ -1,6 +1,8 @@ +use std::alloc::GlobalCoAllocMeta; use std::cell::RefCell; use std::default::Default; use std::hash::Hash; +use std::mem; use std::path::PathBuf; use std::rc::Rc; use std::sync::Arc; @@ -2400,13 +2402,13 @@ mod size_asserts { // tidy-alphabetical-start static_assert_size!(Crate, 64); // frequently moved by-value static_assert_size!(DocFragment, 32); - static_assert_size!(GenericArg, 32); + static_assert_size!(GenericArg, 32 + mem::size_of::()); static_assert_size!(GenericArgs, 32); - static_assert_size!(GenericParamDef, 56); + static_assert_size!(GenericParamDef, 56 + mem::size_of::()); static_assert_size!(Generics, 16); static_assert_size!(Item, 56); - static_assert_size!(ItemKind, 64); + static_assert_size!(ItemKind, 64 + mem::size_of::()); static_assert_size!(PathSegment, 40); - static_assert_size!(Type, 32); + static_assert_size!(Type, 32 + mem::size_of::()); // tidy-alphabetical-end } diff --git a/src/librustdoc/html/render/context.rs b/src/librustdoc/html/render/context.rs index 5e4a595627b4..d201fa6dd9a4 100644 --- a/src/librustdoc/html/render/context.rs +++ b/src/librustdoc/html/render/context.rs @@ -1,6 +1,8 @@ +use std::alloc::GlobalCoAllocMeta; use std::cell::RefCell; use std::collections::BTreeMap; use std::io; +use std::mem; use std::path::{Path, PathBuf}; use std::rc::Rc; use std::sync::mpsc::{channel, Receiver}; @@ -75,7 +77,10 @@ pub(crate) struct Context<'tcx> { // `Context` is cloned a lot, so we don't want the size to grow unexpectedly. #[cfg(all(not(windows), target_arch = "x86_64", target_pointer_width = "64"))] -rustc_data_structures::static_assert_size!(Context<'_>, 160); +rustc_data_structures::static_assert_size!( + Context<'_>, + 160 + 2 * mem::size_of::() +); /// Shared mutable state used in [`Context`] and elsewhere. pub(crate) struct SharedContext<'tcx> { diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index 910a7190b584..52c494de7537 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -7,6 +7,7 @@ #![feature(assert_matches)] #![feature(box_patterns)] #![feature(drain_filter)] +#![feature(global_co_alloc_meta)] #![feature(is_terminal)] #![feature(let_chains)] #![feature(test)]