From 33a20e6e290bd7f32bdb77cc242c3a665ae3cf34 Mon Sep 17 00:00:00 2001 From: Peter Kehl Date: Sun, 19 Feb 2023 15:23:39 -0800 Subject: [PATCH 1/9] CoAlloc: Allocator + Global API --- library/core/src/alloc/global.rs | 56 +++++++++++- library/core/src/alloc/mod.rs | 152 +++++++++++++++++++++++++++++++ library/core/src/lib.rs | 1 + library/std/src/alloc.rs | 3 + 4 files changed, 211 insertions(+), 1 deletion(-) diff --git a/library/core/src/alloc/global.rs b/library/core/src/alloc/global.rs index 18da70451f299..26308e63a7bad 100644 --- a/library/core/src/alloc/global.rs +++ b/library/core/src/alloc/global.rs @@ -1,6 +1,15 @@ use crate::alloc::Layout; use crate::cmp; use crate::ptr; +use crate::alloc::GlobalCoAllocMeta; + +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +#[allow(missing_debug_implementations)] +/// Used for parameters and results (to/from `GlobalCoAllocator`'s functions, where applicable). +pub struct RawAndMeta { + pub ptr: *mut u8, + pub meta: GlobalCoAllocMeta, +} /// A memory allocator that can be registered as the standard library’s default /// through the `#[global_allocator]` attribute. @@ -156,6 +165,9 @@ pub unsafe trait GlobalAlloc { #[stable(feature = "global_alloc", since = "1.28.0")] unsafe fn alloc(&self, layout: Layout) -> *mut u8; + #[unstable(feature = "global_co_alloc", issue = "none")] + unsafe fn co_alloc(&self, _layout: Layout, mut _result: &mut RawAndMeta) {panic!("TODO")} + /// Deallocate the block of memory at the given `ptr` pointer with the given `layout`. /// /// # Safety @@ -171,6 +183,9 @@ pub unsafe trait GlobalAlloc { #[stable(feature = "global_alloc", since = "1.28.0")] unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout); + #[unstable(feature = "global_co_alloc", issue = "none")] + unsafe fn co_dealloc(&self, _ptr_and_meta: RawAndMeta, _layout: Layout) {panic!("TODO")} + /// Behaves like `alloc`, but also ensures that the contents /// are set to zero before being returned. /// @@ -198,11 +213,23 @@ pub unsafe trait GlobalAlloc { if !ptr.is_null() { // SAFETY: as allocation succeeded, the region from `ptr` // of size `size` is guaranteed to be valid for writes. - unsafe { ptr::write_bytes(ptr, 0, size) }; + unsafe { ptr::write_bytes(ptr, 0u8, size) }; } ptr } + #[unstable(feature = "global_co_alloc", issue = "none")] + unsafe fn co_alloc_zeroed(&self, layout: Layout, mut result: &mut RawAndMeta) { + let size = layout.size(); + // SAFETY: the safety contract for `alloc` must be upheld by the caller. + unsafe { self.co_alloc(layout, &mut result) }; + if !result.ptr.is_null() { + // SAFETY: as allocation succeeded, the region from `ptr_and_meta.ptr` of size `size` is + // guaranteed to be valid for writes. + unsafe { ptr::write_bytes(result.ptr, 0u8, size) }; + } + } + /// Shrink or grow a block of memory to the given `new_size` in bytes. /// The block is described by the given `ptr` pointer and `layout`. /// @@ -275,4 +302,31 @@ pub unsafe trait GlobalAlloc { } new_ptr } + + #[unstable(feature = "global_co_alloc", issue = "none")] + unsafe fn co_realloc( + &self, + ptr_and_meta: RawAndMeta, + layout: Layout, + new_size: usize, + mut result: &mut RawAndMeta + ) { + // SAFETY: the caller must ensure that the `new_size` does not overflow. + // `layout.align()` comes from a `Layout` and is thus guaranteed to be valid. + let new_layout = unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) }; + // SAFETY: the caller must ensure that `new_layout` is greater than zero. + unsafe { self.co_alloc(new_layout, &mut result) }; + if !result.ptr.is_null() { + // SAFETY: the previously allocated block cannot overlap the newly allocated block. + // The safety contract for `dealloc` must be upheld by the caller. + unsafe { + ptr::copy_nonoverlapping( + ptr_and_meta.ptr, + result.ptr, + cmp::min(layout.size(), new_size), + ); + self.co_dealloc(ptr_and_meta, layout); + } + } + } } diff --git a/library/core/src/alloc/mod.rs b/library/core/src/alloc/mod.rs index a6082455fac8d..23bfdc732557e 100644 --- a/library/core/src/alloc/mod.rs +++ b/library/core/src/alloc/mod.rs @@ -25,6 +25,16 @@ use crate::error::Error; use crate::fmt; use crate::ptr::{self, NonNull}; +// @TODO Make this target-specific +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +#[allow(missing_debug_implementations)] +pub struct GlobalCoAllocMeta { + //pub one: usize, + /*pub two: usize, + pub three: usize, + pub four: usize,*/ +} + /// The `AllocError` error indicates an allocation failure /// that may be due to resource exhaustion or to /// something wrong when combining the given input arguments with this @@ -48,6 +58,30 @@ impl fmt::Display for AllocError { } } +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +#[allow(missing_debug_implementations)] +pub struct PtrAndMeta { + pub ptr: NonNull, + pub meta: GlobalCoAllocMeta, +} + +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +#[allow(missing_debug_implementations)] +/// Used for results (from `CoAllocator`'s functions, where applicable). +pub struct SliceAndMeta { + pub slice: NonNull<[u8]>, + pub meta: GlobalCoAllocMeta, +} + +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +#[allow(missing_debug_implementations)] +pub type SliceAndMetaResult = Result; + +#[unstable(feature = "global_co_alloc", issue = "none")] +pub const fn co_alloc_metadata_num_slots() -> usize { + if A::IS_CO_ALLOCATOR { 1 } else { 0 } +} + /// An implementation of `Allocator` can allocate, grow, shrink, and deallocate arbitrary blocks of /// data described via [`Layout`][]. /// @@ -107,6 +141,13 @@ impl fmt::Display for AllocError { #[unstable(feature = "allocator_api", issue = "32838")] #[const_trait] pub unsafe trait Allocator { + //const fn is_co_allocator() -> bool {false} + // Can't have: const type Xyz; + /// If this is any type with non-zero size, then the actual `Allocator` implementation supports cooperative functions (`co_*`) as first class citizens. + //type IsCoAllocator = (); + // It applies to the global (default) allocator only. And/or System allocator?! TODO + const IS_CO_ALLOCATOR: bool = true; + /// Attempts to allocate a block of memory. /// /// On success, returns a [`NonNull<[u8]>`][NonNull] meeting the size and alignment guarantees of `layout`. @@ -129,6 +170,8 @@ pub unsafe trait Allocator { /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html fn allocate(&self, layout: Layout) -> Result, AllocError>; + fn co_allocate(&self, _layout: Layout, _result: &mut SliceAndMetaResult) {panic!("TODO")} + /// Behaves like `allocate`, but also ensures that the returned memory is zero-initialized. /// /// # Errors @@ -151,6 +194,19 @@ pub unsafe trait Allocator { Ok(ptr) } + fn co_allocate_zeroed(&self, layout: Layout, mut result: &mut SliceAndMetaResult) { + self.co_allocate(layout, &mut result); + if let Ok(SliceAndMeta{slice, ..}) = result { + // SAFETY: `alloc` returns a valid memory block + unsafe { + slice + .as_non_null_ptr() + .as_ptr() + .write_bytes(0, slice.len()) + } + } + } + /// Deallocates the memory referenced by `ptr`. /// /// # Safety @@ -162,6 +218,8 @@ pub unsafe trait Allocator { /// [*fit*]: #memory-fitting unsafe fn deallocate(&self, ptr: NonNull, layout: Layout); + unsafe fn co_deallocate(&self, _ptr_and_meta: PtrAndMeta, _layout: Layout) {panic!("TODO")} + /// Attempts to extend the memory block. /// /// Returns a new [`NonNull<[u8]>`][NonNull] containing a pointer and the actual size of the allocated @@ -226,6 +284,37 @@ pub unsafe trait Allocator { Ok(new_ptr) } + unsafe fn co_grow( + &self, + ptr_and_meta: PtrAndMeta, + old_layout: Layout, + new_layout: Layout, + mut result: &mut SliceAndMetaResult + ) { + debug_assert!( + new_layout.size() >= old_layout.size(), + "`new_layout.size()` must be greater than or equal to `old_layout.size()`" + ); + + self.co_allocate(new_layout, &mut result); + + if let Ok(SliceAndMeta {slice, ..}) = result { + // SAFETY: because `new_layout.size()` must be greater than or equal to + // `old_layout.size()`, both the old and new memory allocation are valid for reads and + // writes for `old_layout.size()` bytes. Also, because the old allocation wasn't yet + // deallocated, it cannot overlap `new_slice_and_meta.slice`. Thus, the call to `copy_nonoverlapping` is + // safe. The safety contract for `dealloc` must be upheld by the caller. + unsafe { + ptr::copy_nonoverlapping( + ptr_and_meta.ptr.as_ptr(), + slice.as_mut_ptr(), + old_layout.size(), + ); + self.co_deallocate(ptr_and_meta, old_layout); + } + } + } + /// Behaves like `grow`, but also ensures that the new contents are set to zero before being /// returned. /// @@ -289,6 +378,37 @@ pub unsafe trait Allocator { Ok(new_ptr) } + unsafe fn co_grow_zeroed( + &self, + ptr_and_meta: PtrAndMeta, + old_layout: Layout, + new_layout: Layout, + mut result: &mut SliceAndMetaResult + ) { + debug_assert!( + new_layout.size() >= old_layout.size(), + "`new_layout.size()` must be greater than or equal to `old_layout.size()`" + ); + + self.co_allocate_zeroed(new_layout, &mut result); + + if let Ok(SliceAndMeta{ slice, ..}) = result { + // SAFETY: because `new_layout.size()` must be greater than or equal to + // `old_layout.size()`, both the old and new memory allocation are valid for reads and + // writes for `old_layout.size()` bytes. Also, because the old allocation wasn't yet + // deallocated, it cannot overlap `new_slice_and_meta.slice`. Thus, the call to `copy_nonoverlapping` is + // safe. The safety contract for `dealloc` must be upheld by the caller. + unsafe { + ptr::copy_nonoverlapping( + ptr_and_meta.ptr.as_ptr(), + slice.as_mut_ptr(), + old_layout.size(), + ); + self.co_deallocate(ptr_and_meta, old_layout); + } + } + } + /// Attempts to shrink the memory block. /// /// Returns a new [`NonNull<[u8]>`][NonNull] containing a pointer and the actual size of the allocated @@ -353,6 +473,37 @@ pub unsafe trait Allocator { Ok(new_ptr) } + unsafe fn co_shrink( + &self, + ptr_and_meta: PtrAndMeta, + old_layout: Layout, + new_layout: Layout, + mut result: &mut SliceAndMetaResult + ) { + debug_assert!( + new_layout.size() <= old_layout.size(), + "`new_layout.size()` must be smaller than or equal to `old_layout.size()`" + ); + + self.co_allocate(new_layout, &mut result); + + if let Ok(SliceAndMeta{ slice, ..}) = result { + // SAFETY: because `new_layout.size()` must be lower than or equal to + // `old_layout.size()`, both the old and new memory allocation are valid for reads and + // writes for `new_layout.size()` bytes. Also, because the old allocation wasn't yet + // deallocated, it cannot overlap `new_slice_and_meta.slice`. Thus, the call to `copy_nonoverlapping` is + // safe. The safety contract for `dealloc` must be upheld by the caller. + unsafe { + ptr::copy_nonoverlapping( + ptr_and_meta.ptr.as_ptr(), + slice.as_mut_ptr(), + new_layout.size(), + ); + self.co_deallocate(ptr_and_meta, old_layout); + } + } + } + /// Creates a "by reference" adapter for this instance of `Allocator`. /// /// The returned adapter also implements `Allocator` and will simply borrow this. @@ -365,6 +516,7 @@ pub unsafe trait Allocator { } } +// @TODO #[unstable(feature = "allocator_api", issue = "32838")] unsafe impl Allocator for &A where diff --git a/library/core/src/lib.rs b/library/core/src/lib.rs index dc0702c467a4e..ba4a2ca02d844 100644 --- a/library/core/src/lib.rs +++ b/library/core/src/lib.rs @@ -98,6 +98,7 @@ #![cfg_attr(not(bootstrap), warn(multiple_supertrait_upcastable))] // // Library features: +#![feature(associated_type_defaults)] #![feature(const_align_offset)] #![feature(const_align_of_val)] #![feature(const_align_of_val_raw)] diff --git a/library/std/src/alloc.rs b/library/std/src/alloc.rs index c5a5991cc81c4..bee6edbc40731 100644 --- a/library/std/src/alloc.rs +++ b/library/std/src/alloc.rs @@ -197,10 +197,13 @@ impl System { } } +// @TODO // The Allocator impl checks the layout size to be non-zero and forwards to the GlobalAlloc impl, // which is in `std::sys::*::alloc`. #[unstable(feature = "allocator_api", issue = "32838")] unsafe impl Allocator for System { + const IS_CO_ALLOCATOR: bool = false; + #[inline] fn allocate(&self, layout: Layout) -> Result, AllocError> { self.alloc_impl(layout, false) From 187a2cc2d251483b96909fad1f72752635756b2d Mon Sep 17 00:00:00 2001 From: Peter Kehl Date: Sat, 21 Jan 2023 15:39:15 -0800 Subject: [PATCH 2/9] CoAlloc: RawVec + compiler 'tests' --- compiler/rustc_ast/src/ast.rs | 24 +++++++++---------- compiler/rustc_ast/src/lib.rs | 1 + compiler/rustc_middle/src/lib.rs | 1 + compiler/rustc_middle/src/mir/mod.rs | 3 ++- compiler/rustc_middle/src/mir/syntax.rs | 4 +++- compiler/rustc_parse/src/lib.rs | 1 + .../rustc_parse/src/parser/attr_wrapper.rs | 4 +++- compiler/rustc_parse/src/parser/mod.rs | 3 ++- compiler/rustc_trait_selection/src/lib.rs | 1 + .../src/traits/fulfill.rs | 5 +++- library/alloc/src/lib.rs | 2 ++ library/alloc/src/raw_vec.rs | 19 ++++++++++++--- src/librustdoc/clean/types.rs | 10 ++++---- src/librustdoc/html/render/context.rs | 4 +++- src/librustdoc/lib.rs | 1 + 15 files changed, 58 insertions(+), 25 deletions(-) diff --git a/compiler/rustc_ast/src/ast.rs b/compiler/rustc_ast/src/ast.rs index f2258fecfeafc..aa3bec7297785 100644 --- a/compiler/rustc_ast/src/ast.rs +++ b/compiler/rustc_ast/src/ast.rs @@ -26,6 +26,7 @@ pub use UnsafeSource::*; use crate::ptr::P; use crate::token::{self, CommentKind, Delimiter}; use crate::tokenstream::{DelimSpan, LazyAttrTokenStream, TokenStream}; +use core::alloc::GlobalCoAllocMeta; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_data_structures::sync::Lrc; @@ -37,7 +38,6 @@ use rustc_span::{Span, DUMMY_SP}; use std::fmt; use std::mem; use thin_vec::{thin_vec, ThinVec}; - /// A "Label" is an identifier of some point in sources, /// e.g. in the following code: /// @@ -3112,26 +3112,26 @@ mod size_asserts { static_assert_size!(AssocItem, 104); static_assert_size!(AssocItemKind, 32); static_assert_size!(Attribute, 32); - static_assert_size!(Block, 48); - static_assert_size!(Expr, 72); - static_assert_size!(ExprKind, 40); - static_assert_size!(Fn, 184); + static_assert_size!(Block, 48 + mem::size_of::()); + static_assert_size!(Expr, 72 + mem::size_of::()); + static_assert_size!(ExprKind, 40 + mem::size_of::()); + static_assert_size!(Fn, 184 + 2 * mem::size_of::()); static_assert_size!(ForeignItem, 96); static_assert_size!(ForeignItemKind, 24); static_assert_size!(GenericArg, 24); - static_assert_size!(GenericBound, 72); - static_assert_size!(Generics, 72); - static_assert_size!(Impl, 184); - static_assert_size!(Item, 184); - static_assert_size!(ItemKind, 112); + static_assert_size!(GenericBound, 72 + mem::size_of::()); + static_assert_size!(Generics, 72 + 2 * mem::size_of::()); + static_assert_size!(Impl, 184 + 3 * mem::size_of::()); + static_assert_size!(Item, 184 + 3 * mem::size_of::()); + static_assert_size!(ItemKind, 112 + 3 * mem::size_of::()); static_assert_size!(LitKind, 24); static_assert_size!(Local, 72); static_assert_size!(MetaItemLit, 40); static_assert_size!(Param, 40); - static_assert_size!(Pat, 88); + static_assert_size!(Pat, 88 + mem::size_of::()); static_assert_size!(Path, 24); static_assert_size!(PathSegment, 24); - static_assert_size!(PatKind, 64); + static_assert_size!(PatKind, 64 + mem::size_of::()); static_assert_size!(Stmt, 32); static_assert_size!(StmtKind, 16); static_assert_size!(Ty, 64); diff --git a/compiler/rustc_ast/src/lib.rs b/compiler/rustc_ast/src/lib.rs index 23c32fa96ca44..225ad76802188 100644 --- a/compiler/rustc_ast/src/lib.rs +++ b/compiler/rustc_ast/src/lib.rs @@ -12,6 +12,7 @@ #![feature(box_patterns)] #![feature(const_default_impls)] #![feature(const_trait_impl)] +#![feature(global_co_alloc_meta)] #![feature(if_let_guard)] #![feature(let_chains)] #![feature(min_specialization)] diff --git a/compiler/rustc_middle/src/lib.rs b/compiler/rustc_middle/src/lib.rs index e6cd38c0f1584..14c059d9db9a6 100644 --- a/compiler/rustc_middle/src/lib.rs +++ b/compiler/rustc_middle/src/lib.rs @@ -32,6 +32,7 @@ #![feature(exhaustive_patterns)] #![feature(generators)] #![feature(get_mut_unchecked)] +#![feature(global_co_alloc_meta)] #![feature(if_let_guard)] #![feature(iter_from_generator)] #![feature(local_key_cell_methods)] diff --git a/compiler/rustc_middle/src/mir/mod.rs b/compiler/rustc_middle/src/mir/mod.rs index 46184cddd51f5..cc02a3eb97158 100644 --- a/compiler/rustc_middle/src/mir/mod.rs +++ b/compiler/rustc_middle/src/mir/mod.rs @@ -2,6 +2,7 @@ //! //! [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/mir/index.html +use core::alloc::GlobalCoAllocMeta; use crate::mir::interpret::{ AllocRange, ConstAllocation, ConstValue, ErrorHandled, GlobalAlloc, LitToConstInput, Scalar, }; @@ -3077,7 +3078,7 @@ mod size_asserts { use super::*; use rustc_data_structures::static_assert_size; // tidy-alphabetical-start - static_assert_size!(BasicBlockData<'_>, 144); + static_assert_size!(BasicBlockData<'_>, 144 + mem::size_of::()); static_assert_size!(LocalDecl<'_>, 56); static_assert_size!(Statement<'_>, 32); static_assert_size!(StatementKind<'_>, 16); diff --git a/compiler/rustc_middle/src/mir/syntax.rs b/compiler/rustc_middle/src/mir/syntax.rs index ae09562a85e98..9e87f4bb61892 100644 --- a/compiler/rustc_middle/src/mir/syntax.rs +++ b/compiler/rustc_middle/src/mir/syntax.rs @@ -3,6 +3,8 @@ //! This is in a dedicated file so that changes to this file can be reviewed more carefully. //! The intention is that this file only contains datatype declarations, no code. +use core::alloc::GlobalCoAllocMeta; +use core::mem; use super::{BasicBlock, Constant, Field, Local, SwitchTargets, UserTypeProjection}; use crate::mir::coverage::{CodeRegion, CoverageKind}; @@ -1284,6 +1286,6 @@ mod size_asserts { static_assert_size!(Operand<'_>, 24); static_assert_size!(Place<'_>, 16); static_assert_size!(PlaceElem<'_>, 24); - static_assert_size!(Rvalue<'_>, 40); + static_assert_size!(Rvalue<'_>, 40 + mem::size_of::()); // tidy-alphabetical-end } diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index b49a01d75ed54..1ba1049b0bc80 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -2,6 +2,7 @@ #![feature(array_windows)] #![feature(box_patterns)] +#![feature(global_co_alloc_meta)] #![feature(if_let_guard)] #![feature(iter_intersperse)] #![feature(let_chains)] diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs index dbd3b76786f42..15833cdf268d7 100644 --- a/compiler/rustc_parse/src/parser/attr_wrapper.rs +++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs @@ -1,3 +1,5 @@ +use core::alloc::GlobalCoAllocMeta; +use core::mem; use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken}; use rustc_ast::token::{self, Delimiter, Token, TokenKind}; use rustc_ast::tokenstream::{AttrTokenStream, AttributesData, ToAttrTokenStream}; @@ -469,6 +471,6 @@ mod size_asserts { use rustc_data_structures::static_assert_size; // tidy-alphabetical-start static_assert_size!(AttrWrapper, 16); - static_assert_size!(LazyAttrTokenStreamImpl, 120); + static_assert_size!(LazyAttrTokenStreamImpl, 120 + mem::size_of::()); // tidy-alphabetical-end } diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index a74f408d77412..f058907f188ea 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -11,6 +11,7 @@ mod stmt; mod ty; use crate::lexer::UnmatchedBrace; +use core::alloc::GlobalCoAllocMeta; pub use attr_wrapper::AttrWrapper; pub use diagnostics::AttemptLocalParseRecovery; pub(crate) use item::FnParseMode; @@ -167,7 +168,7 @@ pub struct Parser<'a> { // This type is used a lot, e.g. it's cloned when matching many declarative macro rules with nonterminals. Make sure // it doesn't unintentionally get bigger. #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] -rustc_data_structures::static_assert_size!(Parser<'_>, 312); +rustc_data_structures::static_assert_size!(Parser<'_>, 312 + 4 * mem::size_of::()); /// Stores span information about a closure. #[derive(Clone)] diff --git a/compiler/rustc_trait_selection/src/lib.rs b/compiler/rustc_trait_selection/src/lib.rs index 6fa0941036390..d35cf93347b6e 100644 --- a/compiler/rustc_trait_selection/src/lib.rs +++ b/compiler/rustc_trait_selection/src/lib.rs @@ -15,6 +15,7 @@ #![feature(box_patterns)] #![feature(control_flow_enum)] #![feature(drain_filter)] +#![feature(global_co_alloc_meta)] #![feature(hash_drain_filter)] #![feature(let_chains)] #![feature(if_let_guard)] diff --git a/compiler/rustc_trait_selection/src/traits/fulfill.rs b/compiler/rustc_trait_selection/src/traits/fulfill.rs index deeed930e50e2..3f85de7d7e446 100644 --- a/compiler/rustc_trait_selection/src/traits/fulfill.rs +++ b/compiler/rustc_trait_selection/src/traits/fulfill.rs @@ -1,4 +1,7 @@ use crate::infer::{InferCtxt, TyOrConstInferVar}; +use core::alloc::GlobalCoAllocMeta; +use core::mem; +// use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::obligation_forest::ProcessResult; use rustc_data_structures::obligation_forest::{Error, ForestObligation, Outcome}; use rustc_data_structures::obligation_forest::{ObligationForest, ObligationProcessor}; @@ -77,7 +80,7 @@ pub struct PendingPredicateObligation<'tcx> { // `PendingPredicateObligation` is used a lot. Make sure it doesn't unintentionally get bigger. #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] -static_assert_size!(PendingPredicateObligation<'_>, 72); +static_assert_size!(PendingPredicateObligation<'_>, 72 + mem::size_of::()); impl<'a, 'tcx> FulfillmentContext<'tcx> { /// Creates a new fulfillment context. diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index e9cc3875f683b..fbbed4f636315 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -123,6 +123,7 @@ #![feature(extend_one)] #![feature(fmt_internals)] #![feature(fn_traits)] +#![feature(global_co_alloc_meta)] #![feature(hasher_prefixfree_extras)] #![feature(inline_const)] #![feature(inplace_iteration)] @@ -178,6 +179,7 @@ #![feature(exclusive_range_pattern)] #![feature(fundamental)] #![cfg_attr(not(test), feature(generator_trait))] +#![feature(global_co_alloc)] #![feature(hashmap_internals)] #![feature(lang_items)] #![feature(min_specialization)] diff --git a/library/alloc/src/raw_vec.rs b/library/alloc/src/raw_vec.rs index 3751f2a245456..af2813abd1ddb 100644 --- a/library/alloc/src/raw_vec.rs +++ b/library/alloc/src/raw_vec.rs @@ -1,6 +1,6 @@ #![unstable(feature = "raw_vec_internals", reason = "unstable const warnings", issue = "none")] -use core::alloc::LayoutError; +use core::alloc::{LayoutError, GlobalCoAllocMeta}; use core::cmp; use core::intrinsics; use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties}; @@ -53,6 +53,8 @@ pub(crate) struct RawVec { ptr: Unique, cap: usize, alloc: A, + #[allow(dead_code)] + pub(crate) meta: GlobalCoAllocMeta, } impl RawVec { @@ -120,7 +122,7 @@ impl RawVec { /// the returned `RawVec`. pub const fn new_in(alloc: A) -> Self { // `cap: 0` means "unallocated". zero-sized types are ignored. - Self { ptr: Unique::dangling(), cap: 0, alloc } + Self { ptr: Unique::dangling(), cap: 0, alloc, meta: GlobalCoAllocMeta {/*one: 1*/ /* , two: 2, three: 3, four: 4*/} } } /// Like `with_capacity`, but parameterized over the choice of @@ -197,6 +199,7 @@ impl RawVec { ptr: unsafe { Unique::new_unchecked(ptr.cast().as_ptr()) }, cap: capacity, alloc, + meta: GlobalCoAllocMeta {/*one: 1*/ /*, two: 2, three: 3, four: 4*/} } } } @@ -213,7 +216,7 @@ impl RawVec { /// guaranteed. #[inline] pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, alloc: A) -> Self { - Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap: capacity, alloc } + Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap: capacity, alloc, meta: GlobalCoAllocMeta {/*one: 1*/ /*, two: 2, three: 3, four: 4*/} } } /// Gets a raw pointer to the start of the allocation. Note that this is @@ -480,8 +483,18 @@ where } unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawVec { + /// Frees the memory owned by the `RawVec` *without* trying to drop its contents. + default fn drop(&mut self) { + if let Some((ptr, layout)) = self.current_memory() { + unsafe { self.alloc.deallocate(ptr, layout) } + } + } +} + +unsafe impl<#[may_dangle] T> Drop for RawVec { /// Frees the memory owned by the `RawVec` *without* trying to drop its contents. fn drop(&mut self) { + // @TODO if let Some((ptr, layout)) = self.current_memory() { unsafe { self.alloc.deallocate(ptr, layout) } } diff --git a/src/librustdoc/clean/types.rs b/src/librustdoc/clean/types.rs index fc1396e86f6b1..092fc9b167012 100644 --- a/src/librustdoc/clean/types.rs +++ b/src/librustdoc/clean/types.rs @@ -1,6 +1,8 @@ +use std::alloc::GlobalCoAllocMeta; use std::cell::RefCell; use std::default::Default; use std::hash::Hash; +use std::mem; use std::path::PathBuf; use std::rc::Rc; use std::sync::Arc; @@ -2400,13 +2402,13 @@ mod size_asserts { // tidy-alphabetical-start static_assert_size!(Crate, 64); // frequently moved by-value static_assert_size!(DocFragment, 32); - static_assert_size!(GenericArg, 32); + static_assert_size!(GenericArg, 32 + mem::size_of::()); static_assert_size!(GenericArgs, 32); - static_assert_size!(GenericParamDef, 56); + static_assert_size!(GenericParamDef, 56 + mem::size_of::()); static_assert_size!(Generics, 16); static_assert_size!(Item, 56); - static_assert_size!(ItemKind, 64); + static_assert_size!(ItemKind, 64 + mem::size_of::()); static_assert_size!(PathSegment, 40); - static_assert_size!(Type, 32); + static_assert_size!(Type, 32 + mem::size_of::()); // tidy-alphabetical-end } diff --git a/src/librustdoc/html/render/context.rs b/src/librustdoc/html/render/context.rs index 5e4a595627b4a..504938fef733b 100644 --- a/src/librustdoc/html/render/context.rs +++ b/src/librustdoc/html/render/context.rs @@ -1,6 +1,8 @@ +use std::alloc::GlobalCoAllocMeta; use std::cell::RefCell; use std::collections::BTreeMap; use std::io; +use std::mem; use std::path::{Path, PathBuf}; use std::rc::Rc; use std::sync::mpsc::{channel, Receiver}; @@ -75,7 +77,7 @@ pub(crate) struct Context<'tcx> { // `Context` is cloned a lot, so we don't want the size to grow unexpectedly. #[cfg(all(not(windows), target_arch = "x86_64", target_pointer_width = "64"))] -rustc_data_structures::static_assert_size!(Context<'_>, 160); +rustc_data_structures::static_assert_size!(Context<'_>, 160 + 2 * mem::size_of::()); /// Shared mutable state used in [`Context`] and elsewhere. pub(crate) struct SharedContext<'tcx> { diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index 910a7190b5842..52c494de75372 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -7,6 +7,7 @@ #![feature(assert_matches)] #![feature(box_patterns)] #![feature(drain_filter)] +#![feature(global_co_alloc_meta)] #![feature(is_terminal)] #![feature(let_chains)] #![feature(test)] From 044ef5576e7868ded2ea41950b298ce9cb16c347 Mon Sep 17 00:00:00 2001 From: Peter Kehl Date: Sat, 21 Jan 2023 15:58:34 -0800 Subject: [PATCH 3/9] CoAlloc: Vec, VecDeque, RawVec; related str, slice, Box, BinaryHeap, proc_macro... --- compiler/rustc_middle/src/mir/mod.rs | 2 +- compiler/rustc_middle/src/mir/syntax.rs | 2 +- .../rustc_parse/src/parser/attr_wrapper.rs | 2 +- compiler/rustc_parse/src/parser/mod.rs | 2 +- library/alloc/src/boxed.rs | 64 +++- .../alloc/src/collections/binary_heap/mod.rs | 36 +- .../alloc/src/collections/vec_deque/drain.rs | 77 +++- .../src/collections/vec_deque/into_iter.rs | 53 ++- .../alloc/src/collections/vec_deque/macros.rs | 3 +- .../alloc/src/collections/vec_deque/mod.rs | 214 ++++++++--- .../src/collections/vec_deque/spec_extend.rs | 32 +- .../collections/vec_deque/spec_from_iter.rs | 23 +- library/alloc/src/ffi/c_str.rs | 5 +- library/alloc/src/lib.rs | 12 +- library/alloc/src/macros.rs | 2 +- library/alloc/src/raw_vec.rs | 84 +++-- library/alloc/src/rc.rs | 11 +- library/alloc/src/slice.rs | 84 ++++- library/alloc/src/str.rs | 8 +- library/alloc/src/vec/drain.rs | 88 ++++- library/alloc/src/vec/drain_filter.rs | 27 +- library/alloc/src/vec/in_place_collect.rs | 6 +- library/alloc/src/vec/in_place_drop.rs | 4 +- library/alloc/src/vec/into_iter.rs | 140 +++++-- library/alloc/src/vec/mod.rs | 345 ++++++++++++++---- library/alloc/src/vec/partial_eq.rs | 29 +- library/alloc/src/vec/spec_extend.rs | 21 +- library/alloc/src/vec/spec_from_elem.rs | 45 ++- library/alloc/src/vec/spec_from_iter.rs | 14 +- .../alloc/src/vec/spec_from_iter_nested.rs | 14 +- library/alloc/src/vec/splice.rs | 38 +- library/alloc/tests/boxed.rs | 1 + library/core/src/alloc/global.rs | 12 +- library/core/src/alloc/mod.rs | 67 +++- library/proc_macro/src/bridge/mod.rs | 4 +- library/proc_macro/src/bridge/rpc.rs | 6 +- library/proc_macro/src/diagnostic.rs | 2 +- library/std/src/alloc.rs | 2 +- library/std/src/ffi/os_str.rs | 1 + library/std/src/io/cursor.rs | 35 +- library/std/src/io/impls.rs | 17 +- library/std/src/lib.rs | 4 + .../std/src/sys/hermit/thread_local_dtor.rs | 4 +- .../std/src/sys/solid/thread_local_dtor.rs | 5 +- library/std/src/sys/unix/thread_local_dtor.rs | 11 +- library/std/src/sys/windows/mod.rs | 1 + .../std/src/sys/windows/thread_local_dtor.rs | 5 +- .../std/src/sys_common/thread_local_dtor.rs | 3 +- src/librustdoc/html/render/context.rs | 5 +- 49 files changed, 1299 insertions(+), 373 deletions(-) diff --git a/compiler/rustc_middle/src/mir/mod.rs b/compiler/rustc_middle/src/mir/mod.rs index cc02a3eb97158..c09fd09f1ae86 100644 --- a/compiler/rustc_middle/src/mir/mod.rs +++ b/compiler/rustc_middle/src/mir/mod.rs @@ -2,7 +2,6 @@ //! //! [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/mir/index.html -use core::alloc::GlobalCoAllocMeta; use crate::mir::interpret::{ AllocRange, ConstAllocation, ConstValue, ErrorHandled, GlobalAlloc, LitToConstInput, Scalar, }; @@ -14,6 +13,7 @@ use crate::ty::visit::{TypeVisitable, TypeVisitor}; use crate::ty::{self, ir, DefIdTree, List, Ty, TyCtxt}; use crate::ty::{AdtDef, InstanceDef, ScalarInt, UserTypeAnnotationIndex}; use crate::ty::{GenericArg, InternalSubsts, SubstsRef}; +use core::alloc::GlobalCoAllocMeta; use rustc_data_structures::captures::Captures; use rustc_errors::ErrorGuaranteed; diff --git a/compiler/rustc_middle/src/mir/syntax.rs b/compiler/rustc_middle/src/mir/syntax.rs index 9e87f4bb61892..267d3dfec41a3 100644 --- a/compiler/rustc_middle/src/mir/syntax.rs +++ b/compiler/rustc_middle/src/mir/syntax.rs @@ -3,9 +3,9 @@ //! This is in a dedicated file so that changes to this file can be reviewed more carefully. //! The intention is that this file only contains datatype declarations, no code. +use super::{BasicBlock, Constant, Field, Local, SwitchTargets, UserTypeProjection}; use core::alloc::GlobalCoAllocMeta; use core::mem; -use super::{BasicBlock, Constant, Field, Local, SwitchTargets, UserTypeProjection}; use crate::mir::coverage::{CodeRegion, CoverageKind}; use crate::traits::Reveal; diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs index 15833cdf268d7..3d541cad22cf5 100644 --- a/compiler/rustc_parse/src/parser/attr_wrapper.rs +++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs @@ -1,6 +1,6 @@ +use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken}; use core::alloc::GlobalCoAllocMeta; use core::mem; -use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken}; use rustc_ast::token::{self, Delimiter, Token, TokenKind}; use rustc_ast::tokenstream::{AttrTokenStream, AttributesData, ToAttrTokenStream}; use rustc_ast::tokenstream::{AttrTokenTree, DelimSpan, LazyAttrTokenStream, Spacing}; diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index f058907f188ea..7fb4e60f8a14f 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -11,8 +11,8 @@ mod stmt; mod ty; use crate::lexer::UnmatchedBrace; -use core::alloc::GlobalCoAllocMeta; pub use attr_wrapper::AttrWrapper; +use core::alloc::GlobalCoAllocMeta; pub use diagnostics::AttemptLocalParseRecovery; pub(crate) use item::FnParseMode; pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma}; diff --git a/library/alloc/src/boxed.rs b/library/alloc/src/boxed.rs index a563b2587236c..1daba5600696a 100644 --- a/library/alloc/src/boxed.rs +++ b/library/alloc/src/boxed.rs @@ -641,7 +641,8 @@ impl Box<[T]> { #[unstable(feature = "new_uninit", issue = "63291")] #[must_use] pub fn new_uninit_slice(len: usize) -> Box<[mem::MaybeUninit]> { - unsafe { RawVec::with_capacity(len).into_box(len) } + // false = no need for co-alloc metadata, since it would get lost once converted to Box. + unsafe { RawVec::::with_capacity(len).into_box(len) } } /// Constructs a new boxed slice with uninitialized contents, with the memory @@ -666,7 +667,8 @@ impl Box<[T]> { #[unstable(feature = "new_uninit", issue = "63291")] #[must_use] pub fn new_zeroed_slice(len: usize) -> Box<[mem::MaybeUninit]> { - unsafe { RawVec::with_capacity_zeroed(len).into_box(len) } + // false = no need for co-alloc metadata, since it would get lost once converted to Box. + unsafe { RawVec::::with_capacity_zeroed(len).into_box(len) } } /// Constructs a new boxed slice with uninitialized contents. Returns an error if @@ -698,7 +700,12 @@ impl Box<[T]> { Err(_) => return Err(AllocError), }; let ptr = Global.allocate(layout)?; - Ok(RawVec::from_raw_parts_in(ptr.as_mut_ptr() as *mut _, len, Global).into_box(len)) + Ok(RawVec::::from_raw_parts_in( + ptr.as_mut_ptr() as *mut _, + len, + Global, + ) + .into_box(len)) } } @@ -730,12 +737,20 @@ impl Box<[T]> { Err(_) => return Err(AllocError), }; let ptr = Global.allocate_zeroed(layout)?; - Ok(RawVec::from_raw_parts_in(ptr.as_mut_ptr() as *mut _, len, Global).into_box(len)) + Ok(RawVec::::from_raw_parts_in( + ptr.as_mut_ptr() as *mut _, + len, + Global, + ) + .into_box(len)) } } } -impl Box<[T], A> { +impl Box<[T], A> +where + [(); core::alloc::co_alloc_metadata_num_slots::()]:, +{ /// Constructs a new boxed slice with uninitialized contents in the provided allocator. /// /// # Examples @@ -762,8 +777,13 @@ impl Box<[T], A> { #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] #[must_use] - pub fn new_uninit_slice_in(len: usize, alloc: A) -> Box<[mem::MaybeUninit], A> { - unsafe { RawVec::with_capacity_in(len, alloc).into_box(len) } + #[allow(unused_braces)] + pub fn new_uninit_slice_in(len: usize, alloc: A) -> Box<[mem::MaybeUninit], A> + where + // false = no need for co-alloc metadata, since it would get lost once converted to Box. + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(false)]:, + { + unsafe { RawVec::::with_capacity_in(len, alloc).into_box(len) } } /// Constructs a new boxed slice with uninitialized contents in the provided allocator, @@ -790,8 +810,13 @@ impl Box<[T], A> { #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] #[must_use] - pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Box<[mem::MaybeUninit], A> { - unsafe { RawVec::with_capacity_zeroed_in(len, alloc).into_box(len) } + #[allow(unused_braces)] + pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Box<[mem::MaybeUninit], A> + where + // false = no need for co-alloc metadata, since it would get lost once converted to Box. + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(false)]:, + { + unsafe { RawVec::::with_capacity_zeroed_in(len, alloc).into_box(len) } } } @@ -1496,7 +1521,8 @@ impl From<&[T]> for Box<[T]> { /// ``` fn from(slice: &[T]) -> Box<[T]> { let len = slice.len(); - let buf = RawVec::with_capacity(len); + // false = no need for co-alloc metadata, since it would get lost once converted to Box. + let buf = RawVec::::with_capacity(len); unsafe { ptr::copy_nonoverlapping(slice.as_ptr(), buf.ptr(), len); buf.into_box(slice.len()).assume_init() @@ -1661,8 +1687,12 @@ impl TryFrom> for Box<[T; N]> { #[cfg(not(no_global_oom_handling))] #[stable(feature = "boxed_array_try_from_vec", since = "1.66.0")] -impl TryFrom> for Box<[T; N]> { - type Error = Vec; +impl TryFrom> + for Box<[T; N]> +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ + type Error = Vec; /// Attempts to convert a `Vec` into a `Box<[T; N]>`. /// @@ -1682,7 +1712,7 @@ impl TryFrom> for Box<[T; N]> { /// let state: Box<[f32; 100]> = vec![1.0; 100].try_into().unwrap(); /// assert_eq!(state.len(), 100); /// ``` - fn try_from(vec: Vec) -> Result { + fn try_from(vec: Vec) -> Result { if vec.len() == N { let boxed_slice = vec.into_boxed_slice(); Ok(unsafe { boxed_slice_as_array_unchecked(boxed_slice) }) @@ -2019,10 +2049,14 @@ impl FromIterator for Box<[I]> { #[cfg(not(no_global_oom_handling))] #[stable(feature = "box_slice_clone", since = "1.3.0")] -impl Clone for Box<[T], A> { +impl Clone for Box<[T], A> +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(false)]:, +{ fn clone(&self) -> Self { let alloc = Box::allocator(self).clone(); - self.to_vec_in(alloc).into_boxed_slice() + // false = no need for co-alloc metadata, since it would get lost once converted to the boxed slice. + self.to_vec_in::(alloc).into_boxed_slice() } fn clone_from(&mut self, other: &Self) { diff --git a/library/alloc/src/collections/binary_heap/mod.rs b/library/alloc/src/collections/binary_heap/mod.rs index 0b73b1af4eb35..05dfaa92f385b 100644 --- a/library/alloc/src/collections/binary_heap/mod.rs +++ b/library/alloc/src/collections/binary_heap/mod.rs @@ -150,9 +150,12 @@ use core::num::NonZeroUsize; use core::ops::{Deref, DerefMut}; use core::ptr; +use crate::alloc::Global; + use crate::collections::TryReserveError; use crate::slice; use crate::vec::{self, AsVecIntoIter, Vec}; +use crate::DEFAULT_COOP_PREFERRED; use super::SpecExtend; @@ -1241,7 +1244,8 @@ impl BinaryHeap { /// ``` #[inline] #[stable(feature = "drain", since = "1.6.0")] - pub fn drain(&mut self) -> Drain<'_, T> { + #[allow(unused_braces)] + pub fn drain(&mut self) -> Drain<'_, T, { SHORT_TERM_VEC_PREFERS_COOP!() }> { Drain { iter: self.data.drain(..) } } @@ -1521,12 +1525,18 @@ unsafe impl TrustedLen for IntoIterSorted {} /// [`drain`]: BinaryHeap::drain #[stable(feature = "drain", since = "1.6.0")] #[derive(Debug)] -pub struct Drain<'a, T: 'a> { - iter: vec::Drain<'a, T>, +pub struct Drain<'a, T: 'a, const COOP_PREFERRED: bool> +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ + iter: vec::Drain<'a, T, Global, COOP_PREFERRED>, } #[stable(feature = "drain", since = "1.6.0")] -impl Iterator for Drain<'_, T> { +impl Iterator for Drain<'_, T, COOP_PREFERRED> +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ type Item = T; #[inline] @@ -1541,7 +1551,10 @@ impl Iterator for Drain<'_, T> { } #[stable(feature = "drain", since = "1.6.0")] -impl DoubleEndedIterator for Drain<'_, T> { +impl DoubleEndedIterator for Drain<'_, T, COOP_PREFERRED> +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ #[inline] fn next_back(&mut self) -> Option { self.iter.next_back() @@ -1549,14 +1562,20 @@ impl DoubleEndedIterator for Drain<'_, T> { } #[stable(feature = "drain", since = "1.6.0")] -impl ExactSizeIterator for Drain<'_, T> { +impl ExactSizeIterator for Drain<'_, T, COOP_PREFERRED> +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn is_empty(&self) -> bool { self.iter.is_empty() } } #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for Drain<'_, T> {} +impl FusedIterator for Drain<'_, T, COOP_PREFERRED> where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: +{ +} /// A draining iterator over the elements of a `BinaryHeap`. /// @@ -1644,7 +1663,8 @@ impl From<[T; N]> for BinaryHeap { } #[stable(feature = "binary_heap_extras_15", since = "1.5.0")] -impl From> for Vec { +#[allow(unused_braces)] +impl From> for Vec { /// Converts a `BinaryHeap` into a `Vec`. /// /// This conversion requires no data movement or allocation, and has diff --git a/library/alloc/src/collections/vec_deque/drain.rs b/library/alloc/src/collections/vec_deque/drain.rs index 89feb361ddc11..cd0e6caace3f4 100644 --- a/library/alloc/src/collections/vec_deque/drain.rs +++ b/library/alloc/src/collections/vec_deque/drain.rs @@ -2,7 +2,7 @@ use core::iter::FusedIterator; use core::marker::PhantomData; use core::mem::{self, SizedTypeProperties}; use core::ptr::NonNull; -use core::{fmt, ptr}; +use core::{alloc, fmt, ptr}; use crate::alloc::{Allocator, Global}; @@ -15,14 +15,18 @@ use super::VecDeque; /// /// [`drain`]: VecDeque::drain #[stable(feature = "drain", since = "1.6.0")] +#[allow(unused_braces)] pub struct Drain< 'a, T: 'a, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, -> { + const COOP_PREFERRED: bool = { SHORT_TERM_VEC_PREFERS_COOP!() }, +> where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ // We can't just use a &mut VecDeque, as that would make Drain invariant over T // and we want it to be covariant instead - deque: NonNull>, + deque: NonNull>, // drain_start is stored in deque.len drain_len: usize, // index into the logical array, not the physical one (always lies in [0..deque.len)) @@ -34,9 +38,12 @@ pub struct Drain< _marker: PhantomData<&'a T>, } -impl<'a, T, A: Allocator> Drain<'a, T, A> { +impl<'a, T, A: Allocator, const COOP_PREFERRED: bool> Drain<'a, T, A, COOP_PREFERRED> +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ pub(super) unsafe fn new( - deque: &'a mut VecDeque, + deque: &'a mut VecDeque, drain_start: usize, drain_len: usize, ) -> Self { @@ -88,7 +95,11 @@ impl<'a, T, A: Allocator> Drain<'a, T, A> { } #[stable(feature = "collection_debug", since = "1.17.0")] -impl fmt::Debug for Drain<'_, T, A> { +impl fmt::Debug + for Drain<'_, T, A, COOP_PREFERRED> +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("Drain") .field(&self.drain_len) @@ -100,16 +111,37 @@ impl fmt::Debug for Drain<'_, T, A> { } #[stable(feature = "drain", since = "1.6.0")] -unsafe impl Sync for Drain<'_, T, A> {} +unsafe impl Sync + for Drain<'_, T, A, COOP_PREFERRED> +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ +} #[stable(feature = "drain", since = "1.6.0")] -unsafe impl Send for Drain<'_, T, A> {} +unsafe impl Send + for Drain<'_, T, A, COOP_PREFERRED> +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ +} #[stable(feature = "drain", since = "1.6.0")] -impl Drop for Drain<'_, T, A> { +impl Drop for Drain<'_, T, A, COOP_PREFERRED> +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn drop(&mut self) { - struct DropGuard<'r, 'a, T, A: Allocator>(&'r mut Drain<'a, T, A>); + struct DropGuard<'r, 'a, T, A: Allocator, const COOP_PREFERRED: bool>( + &'r mut Drain<'a, T, A, COOP_PREFERRED>, + ) + where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:; - impl<'r, 'a, T, A: Allocator> Drop for DropGuard<'r, 'a, T, A> { + impl<'r, 'a, T, A: Allocator, const COOP_PREFERRED: bool> Drop + for DropGuard<'r, 'a, T, A, COOP_PREFERRED> + where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + { fn drop(&mut self) { if self.0.remaining != 0 { unsafe { @@ -190,7 +222,10 @@ impl Drop for Drain<'_, T, A> { } #[stable(feature = "drain", since = "1.6.0")] -impl Iterator for Drain<'_, T, A> { +impl Iterator for Drain<'_, T, A, COOP_PREFERRED> +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ type Item = T; #[inline] @@ -212,7 +247,11 @@ impl Iterator for Drain<'_, T, A> { } #[stable(feature = "drain", since = "1.6.0")] -impl DoubleEndedIterator for Drain<'_, T, A> { +impl DoubleEndedIterator + for Drain<'_, T, A, COOP_PREFERRED> +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ #[inline] fn next_back(&mut self) -> Option { if self.remaining == 0 { @@ -225,7 +264,15 @@ impl DoubleEndedIterator for Drain<'_, T, A> { } #[stable(feature = "drain", since = "1.6.0")] -impl ExactSizeIterator for Drain<'_, T, A> {} +impl ExactSizeIterator + for Drain<'_, T, A, COOP_PREFERRED> +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ +} #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for Drain<'_, T, A> {} +impl FusedIterator for Drain<'_, T, A, COOP_PREFERRED> where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: +{ +} diff --git a/library/alloc/src/collections/vec_deque/into_iter.rs b/library/alloc/src/collections/vec_deque/into_iter.rs index 34bc0ce9177c4..f3f50e3048bc6 100644 --- a/library/alloc/src/collections/vec_deque/into_iter.rs +++ b/library/alloc/src/collections/vec_deque/into_iter.rs @@ -1,5 +1,5 @@ use core::iter::{FusedIterator, TrustedLen}; -use core::{array, fmt, mem::MaybeUninit, ops::Try, ptr}; +use core::{alloc, array, fmt, mem::MaybeUninit, ops::Try, ptr}; use crate::alloc::{Allocator, Global}; @@ -17,29 +17,42 @@ use super::VecDeque; pub struct IntoIter< T, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, -> { - inner: VecDeque, + const COOP_PREFERRED: bool = true, +> where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ + inner: VecDeque, } -impl IntoIter { - pub(super) fn new(inner: VecDeque) -> Self { +impl IntoIter +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ + pub(super) fn new(inner: VecDeque) -> Self { IntoIter { inner } } - pub(super) fn into_vecdeque(self) -> VecDeque { + pub(super) fn into_vecdeque(self) -> VecDeque { self.inner } } #[stable(feature = "collection_debug", since = "1.17.0")] -impl fmt::Debug for IntoIter { +impl fmt::Debug + for IntoIter +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("IntoIter").field(&self.inner).finish() } } #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for IntoIter { +impl Iterator for IntoIter +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ type Item = T; #[inline] @@ -175,7 +188,11 @@ impl Iterator for IntoIter { } #[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for IntoIter { +impl DoubleEndedIterator + for IntoIter +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ #[inline] fn next_back(&mut self) -> Option { self.inner.pop_back() @@ -245,7 +262,11 @@ impl DoubleEndedIterator for IntoIter { } #[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for IntoIter { +impl ExactSizeIterator + for IntoIter +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ #[inline] fn is_empty(&self) -> bool { self.inner.is_empty() @@ -253,7 +274,15 @@ impl ExactSizeIterator for IntoIter { } #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for IntoIter {} +impl FusedIterator for IntoIter where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: +{ +} #[unstable(feature = "trusted_len", issue = "37572")] -unsafe impl TrustedLen for IntoIter {} +unsafe impl TrustedLen + for IntoIter +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ +} diff --git a/library/alloc/src/collections/vec_deque/macros.rs b/library/alloc/src/collections/vec_deque/macros.rs index 5c7913073fe87..68ee43152b5b5 100644 --- a/library/alloc/src/collections/vec_deque/macros.rs +++ b/library/alloc/src/collections/vec_deque/macros.rs @@ -1,9 +1,10 @@ macro_rules! __impl_slice_eq1 { ([$($vars:tt)*] $lhs:ty, $rhs:ty, $($constraints:tt)*) => { #[stable(feature = "vec_deque_partial_eq_slice", since = "1.17.0")] - impl PartialEq<$rhs> for $lhs + impl PartialEq<$rhs> for $lhs where T: PartialEq, + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, $($constraints)* { fn eq(&self, other: &$rhs) -> bool { diff --git a/library/alloc/src/collections/vec_deque/mod.rs b/library/alloc/src/collections/vec_deque/mod.rs index 1573b3d77dc16..e98087ea9ad93 100644 --- a/library/alloc/src/collections/vec_deque/mod.rs +++ b/library/alloc/src/collections/vec_deque/mod.rs @@ -5,8 +5,10 @@ //! are not required to be copyable, and the queue will be sendable if the //! contained type is sendable. +#![feature(global_co_alloc)] #![stable(feature = "rust1", since = "1.0.0")] - +use crate::DEFAULT_COOP_PREFERRED; +use core::alloc; use core::cmp::{self, Ordering}; use core::fmt; use core::hash::{Hash, Hasher}; @@ -91,10 +93,14 @@ mod tests; #[cfg_attr(not(test), rustc_diagnostic_item = "VecDeque")] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_insignificant_dtor] +#[allow(unused_braces)] pub struct VecDeque< T, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, -> { + const COOP_PREFERRED: bool = { DEFAULT_COOP_PREFERRED!() }, +> where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ // `self[0]`, if it exists, is `buf[head]`. // `head < buf.capacity()`, unless `buf.capacity() == 0` when `head == 0`. head: usize, @@ -102,11 +108,15 @@ pub struct VecDeque< // if `len == 0`, the exact value of `head` is unimportant. // if `T` is zero-Sized, then `self.len <= usize::MAX`, otherwise `self.len <= isize::MAX as usize`. len: usize, - buf: RawVec, + buf: RawVec, } #[stable(feature = "rust1", since = "1.0.0")] -impl Clone for VecDeque { +impl Clone + for VecDeque +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn clone(&self) -> Self { let mut deq = Self::with_capacity_in(self.len(), self.allocator().clone()); deq.extend(self.iter().cloned()); @@ -120,7 +130,11 @@ impl Clone for VecDeque { } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<#[may_dangle] T, A: Allocator> Drop for VecDeque { +unsafe impl<#[may_dangle] T, A: Allocator, const COOP_PREFERRED: bool> Drop + for VecDeque +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn drop(&mut self) { /// Runs the destructor for all items in the slice when it gets dropped (normally or /// during unwinding). @@ -145,15 +159,21 @@ unsafe impl<#[may_dangle] T, A: Allocator> Drop for VecDeque { } #[stable(feature = "rust1", since = "1.0.0")] -impl Default for VecDeque { +impl Default for VecDeque +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ /// Creates an empty deque. #[inline] - fn default() -> VecDeque { - VecDeque::new() + fn default() -> VecDeque { + VecDeque::::new() } } -impl VecDeque { +impl VecDeque +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ /// Marginally more convenient #[inline] fn ptr(&self) -> *mut T { @@ -442,12 +462,18 @@ impl VecDeque { mut iter: impl Iterator, len: usize, ) -> usize { - struct Guard<'a, T, A: Allocator> { - deque: &'a mut VecDeque, + struct Guard<'a, T, A: Allocator, const COOP_PREFERRED: bool> + where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + { + deque: &'a mut VecDeque, written: usize, } - impl<'a, T, A: Allocator> Drop for Guard<'a, T, A> { + impl<'a, T, A: Allocator, const COOP_PREFERRED: bool> Drop for Guard<'a, T, A, COOP_PREFERRED> + where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + { fn drop(&mut self) { self.deque.len += self.written; } @@ -525,7 +551,11 @@ impl VecDeque { } } -impl VecDeque { +impl VecDeque +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ /// Creates an empty deque. /// /// # Examples @@ -539,7 +569,10 @@ impl VecDeque { #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_vec_deque_new", since = "1.68.0")] #[must_use] - pub const fn new() -> VecDeque { + pub const fn new() -> VecDeque + where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + { // FIXME: This should just be `VecDeque::new_in(Global)` once that hits stable. VecDeque { head: 0, len: 0, buf: RawVec::NEW } } @@ -556,12 +589,15 @@ impl VecDeque { #[inline] #[stable(feature = "rust1", since = "1.0.0")] #[must_use] - pub fn with_capacity(capacity: usize) -> VecDeque { - Self::with_capacity_in(capacity, Global) + pub fn with_capacity(capacity: usize) -> VecDeque { + VecDeque::::with_capacity_in(capacity, Global) } } -impl VecDeque { +impl VecDeque +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ /// Creates an empty deque. /// /// # Examples @@ -573,7 +609,7 @@ impl VecDeque { /// ``` #[inline] #[unstable(feature = "allocator_api", issue = "32838")] - pub const fn new_in(alloc: A) -> VecDeque { + pub const fn new_in(alloc: A) -> VecDeque { VecDeque { head: 0, len: 0, buf: RawVec::new_in(alloc) } } @@ -587,7 +623,7 @@ impl VecDeque { /// let deque: VecDeque = VecDeque::with_capacity(10); /// ``` #[unstable(feature = "allocator_api", issue = "32838")] - pub fn with_capacity_in(capacity: usize, alloc: A) -> VecDeque { + pub fn with_capacity_in(capacity: usize, alloc: A) -> VecDeque { VecDeque { head: 0, len: 0, buf: RawVec::with_capacity_in(capacity, alloc) } } @@ -1368,7 +1404,7 @@ impl VecDeque { /// ``` #[inline] #[stable(feature = "drain", since = "1.6.0")] - pub fn drain(&mut self, range: R) -> Drain<'_, T, A> + pub fn drain(&mut self, range: R) -> Drain<'_, T, A, COOP_PREFERRED> where R: RangeBounds, { @@ -2596,7 +2632,10 @@ impl VecDeque { } } -impl VecDeque { +impl VecDeque +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ /// Modifies the deque in-place so that `len()` is equal to new_len, /// either by removing excess elements from the back or by appending clones of `value` /// to the back. @@ -2641,7 +2680,11 @@ fn wrap_index(logical_index: usize, capacity: usize) -> usize { } #[stable(feature = "rust1", since = "1.0.0")] -impl PartialEq for VecDeque { +impl PartialEq + for VecDeque +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn eq(&self, other: &Self) -> bool { if self.len != other.len() { return false; @@ -2680,24 +2723,34 @@ impl PartialEq for VecDeque { } #[stable(feature = "rust1", since = "1.0.0")] -impl Eq for VecDeque {} +impl Eq for VecDeque where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: +{ +} -__impl_slice_eq1! { [] VecDeque, Vec, } -__impl_slice_eq1! { [] VecDeque, &[U], } -__impl_slice_eq1! { [] VecDeque, &mut [U], } -__impl_slice_eq1! { [const N: usize] VecDeque, [U; N], } -__impl_slice_eq1! { [const N: usize] VecDeque, &[U; N], } -__impl_slice_eq1! { [const N: usize] VecDeque, &mut [U; N], } +__impl_slice_eq1! { [] VecDeque, Vec, } +__impl_slice_eq1! { [] VecDeque, &[U], } +__impl_slice_eq1! { [] VecDeque, &mut [U], } +__impl_slice_eq1! { [const N: usize] VecDeque, [U; N], } +__impl_slice_eq1! { [const N: usize] VecDeque, &[U; N], } +__impl_slice_eq1! { [const N: usize] VecDeque, &mut [U; N], } #[stable(feature = "rust1", since = "1.0.0")] -impl PartialOrd for VecDeque { +impl PartialOrd + for VecDeque +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn partial_cmp(&self, other: &Self) -> Option { self.iter().partial_cmp(other.iter()) } } #[stable(feature = "rust1", since = "1.0.0")] -impl Ord for VecDeque { +impl Ord for VecDeque +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ #[inline] fn cmp(&self, other: &Self) -> Ordering { self.iter().cmp(other.iter()) @@ -2705,7 +2758,10 @@ impl Ord for VecDeque { } #[stable(feature = "rust1", since = "1.0.0")] -impl Hash for VecDeque { +impl Hash for VecDeque +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn hash(&self, state: &mut H) { state.write_length_prefix(self.len); // It's not possible to use Hash::hash_slice on slices @@ -2719,7 +2775,10 @@ impl Hash for VecDeque { } #[stable(feature = "rust1", since = "1.0.0")] -impl Index for VecDeque { +impl Index for VecDeque +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ type Output = T; #[inline] @@ -2729,7 +2788,10 @@ impl Index for VecDeque { } #[stable(feature = "rust1", since = "1.0.0")] -impl IndexMut for VecDeque { +impl IndexMut for VecDeque +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ #[inline] fn index_mut(&mut self, index: usize) -> &mut T { self.get_mut(index).expect("Out of bounds access") @@ -2737,26 +2799,36 @@ impl IndexMut for VecDeque { } #[stable(feature = "rust1", since = "1.0.0")] -impl FromIterator for VecDeque { - fn from_iter>(iter: I) -> VecDeque { +impl FromIterator for VecDeque +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ + fn from_iter>(iter: I) -> VecDeque { SpecFromIter::spec_from_iter(iter.into_iter()) } } #[stable(feature = "rust1", since = "1.0.0")] -impl IntoIterator for VecDeque { +impl IntoIterator for VecDeque +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ type Item = T; - type IntoIter = IntoIter; + type IntoIter = IntoIter; /// Consumes the deque into a front-to-back iterator yielding elements by /// value. - fn into_iter(self) -> IntoIter { + fn into_iter(self) -> IntoIter { IntoIter::new(self) } } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T, A: Allocator> IntoIterator for &'a VecDeque { +impl<'a, T, A: Allocator, const COOP_PREFERRED: bool> IntoIterator + for &'a VecDeque +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ type Item = &'a T; type IntoIter = Iter<'a, T>; @@ -2766,7 +2838,11 @@ impl<'a, T, A: Allocator> IntoIterator for &'a VecDeque { } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T, A: Allocator> IntoIterator for &'a mut VecDeque { +impl<'a, T, A: Allocator, const COOP_PREFERRED: bool> IntoIterator + for &'a mut VecDeque +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ type Item = &'a mut T; type IntoIter = IterMut<'a, T>; @@ -2776,7 +2852,10 @@ impl<'a, T, A: Allocator> IntoIterator for &'a mut VecDeque { } #[stable(feature = "rust1", since = "1.0.0")] -impl Extend for VecDeque { +impl Extend for VecDeque +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn extend>(&mut self, iter: I) { >::spec_extend(self, iter.into_iter()); } @@ -2793,7 +2872,11 @@ impl Extend for VecDeque { } #[stable(feature = "extend_ref", since = "1.2.0")] -impl<'a, T: 'a + Copy, A: Allocator> Extend<&'a T> for VecDeque { +impl<'a, T: 'a + Copy, A: Allocator, const COOP_PREFERRED: bool> Extend<&'a T> + for VecDeque +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn extend>(&mut self, iter: I) { self.spec_extend(iter.into_iter()); } @@ -2810,14 +2893,23 @@ impl<'a, T: 'a + Copy, A: Allocator> Extend<&'a T> for VecDeque { } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Debug for VecDeque { +impl fmt::Debug + for VecDeque +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.iter()).finish() } } #[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")] -impl From> for VecDeque { +impl + From> for VecDeque +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(OTHER_COOP_PREFERRED)]:, +{ /// Turn a [`Vec`] into a [`VecDeque`]. /// /// [`Vec`]: crate::vec::Vec @@ -2827,14 +2919,23 @@ impl From> for VecDeque { /// and to not re-allocate the `Vec`'s buffer or allocate /// any additional memory. #[inline] - fn from(other: Vec) -> Self { + fn from(other: Vec) -> Self { let (ptr, len, cap, alloc) = other.into_raw_parts_with_alloc(); - Self { head: 0, len, buf: unsafe { RawVec::from_raw_parts_in(ptr, cap, alloc) } } + Self { + head: 0, + len, + buf: unsafe { RawVec::::from_raw_parts_in(ptr, cap, alloc) }, + } } } #[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")] -impl From> for Vec { +impl + From> for Vec +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(VECDEQUE_COOP_PREFERRED)]:, +{ /// Turn a [`VecDeque`] into a [`Vec`]. /// /// [`Vec`]: crate::vec::Vec @@ -2864,7 +2965,10 @@ impl From> for Vec { /// assert_eq!(vec, [8, 9, 1, 2, 3, 4]); /// assert_eq!(vec.as_ptr(), ptr); /// ``` - fn from(mut other: VecDeque) -> Self { + fn from(mut other: VecDeque) -> Self + where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(VECDEQUE_COOP_PREFERRED)]:, + { other.make_contiguous(); unsafe { @@ -2877,13 +2981,18 @@ impl From> for Vec { if other.head != 0 { ptr::copy(buf.add(other.head), buf, len); } - Vec::from_raw_parts_in(buf, len, cap, alloc) + // @FIXME: COOP + Vec::::from_raw_parts_in(buf, len, cap, alloc) } } } #[stable(feature = "std_collections_from_array", since = "1.56.0")] -impl From<[T; N]> for VecDeque { +impl From<[T; N]> + for VecDeque +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ /// Converts a `[T; N]` into a `VecDeque`. /// /// ``` @@ -2894,11 +3003,12 @@ impl From<[T; N]> for VecDeque { /// assert_eq!(deq1, deq2); /// ``` fn from(arr: [T; N]) -> Self { - let mut deq = VecDeque::with_capacity(N); + let mut deq = VecDeque::::with_capacity(N); let arr = ManuallyDrop::new(arr); if !::IS_ZST { // SAFETY: VecDeque::with_capacity ensures that there is enough capacity. unsafe { + // @FIXME for COOP_PREFERRED: ptr::copy_nonoverlapping(arr.as_ptr(), deq.ptr(), N); } } diff --git a/library/alloc/src/collections/vec_deque/spec_extend.rs b/library/alloc/src/collections/vec_deque/spec_extend.rs index dccf40ccb38aa..b1a9d23fd4a99 100644 --- a/library/alloc/src/collections/vec_deque/spec_extend.rs +++ b/library/alloc/src/collections/vec_deque/spec_extend.rs @@ -1,5 +1,8 @@ +#![feature(min_specialization)] + use crate::alloc::Allocator; use crate::vec; +use core::alloc; use core::iter::TrustedLen; use core::slice; @@ -10,9 +13,11 @@ pub(super) trait SpecExtend { fn spec_extend(&mut self, iter: I); } -impl SpecExtend for VecDeque +impl SpecExtend + for VecDeque where I: Iterator, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { default fn spec_extend(&mut self, mut iter: I) { // This function should be the moral equivalent of: @@ -22,7 +27,12 @@ where // } // May only be called if `deque.len() < deque.capacity()` - unsafe fn push_unchecked(deque: &mut VecDeque, element: T) { + unsafe fn push_unchecked( + deque: &mut VecDeque, + element: T, + ) where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + { // SAFETY: Because of the precondition, it's guaranteed that there is space // in the logical array after the last element. unsafe { deque.buffer_write(deque.to_physical_idx(deque.len), element) }; @@ -49,9 +59,11 @@ where } } -impl SpecExtend for VecDeque +impl SpecExtend + for VecDeque where I: TrustedLen, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { default fn spec_extend(&mut self, iter: I) { // This is the case for a TrustedLen iterator. @@ -84,7 +96,11 @@ where } } -impl SpecExtend> for VecDeque { +impl SpecExtend> + for VecDeque +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn spec_extend(&mut self, mut iterator: vec::IntoIter) { let slice = iterator.as_slice(); self.reserve(slice.len()); @@ -97,19 +113,23 @@ impl SpecExtend> for VecDeque { } } -impl<'a, T: 'a, I, A: Allocator> SpecExtend<&'a T, I> for VecDeque +impl<'a, T: 'a, I, A: Allocator, const COOP_PREFERRED: bool> SpecExtend<&'a T, I> + for VecDeque where I: Iterator, T: Copy, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { default fn spec_extend(&mut self, iterator: I) { self.spec_extend(iterator.copied()) } } -impl<'a, T: 'a, A: Allocator> SpecExtend<&'a T, slice::Iter<'a, T>> for VecDeque +impl<'a, T: 'a, A: Allocator, const COOP_PREFERRED: bool> SpecExtend<&'a T, slice::Iter<'a, T>> + for VecDeque where T: Copy, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) { let slice = iterator.as_slice(); diff --git a/library/alloc/src/collections/vec_deque/spec_from_iter.rs b/library/alloc/src/collections/vec_deque/spec_from_iter.rs index 7650492ebdad1..b0380260fcc44 100644 --- a/library/alloc/src/collections/vec_deque/spec_from_iter.rs +++ b/library/alloc/src/collections/vec_deque/spec_from_iter.rs @@ -1,33 +1,44 @@ use super::{IntoIter, VecDeque}; +use crate::alloc::Global; +use core::alloc; /// Specialization trait used for `VecDeque::from_iter` pub(super) trait SpecFromIter { fn spec_from_iter(iter: I) -> Self; } -impl SpecFromIter for VecDeque +impl SpecFromIter for VecDeque where I: Iterator, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { default fn spec_from_iter(iterator: I) -> Self { // Since converting is O(1) now, just re-use the `Vec` logic for // anything where we can't do something extra-special for `VecDeque`, // especially as that could save us some monomorphiziation work // if one uses the same iterators (like slice ones) with both. - crate::vec::Vec::from_iter(iterator).into() + crate::vec::Vec::::from_iter(iterator).into() } } -impl SpecFromIter> for VecDeque { +impl SpecFromIter> + for VecDeque +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ #[inline] - fn spec_from_iter(iterator: crate::vec::IntoIter) -> Self { + fn spec_from_iter(iterator: crate::vec::IntoIter) -> Self { iterator.into_vecdeque() } } -impl SpecFromIter> for VecDeque { +impl SpecFromIter> + for VecDeque +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ #[inline] - fn spec_from_iter(iterator: IntoIter) -> Self { + fn spec_from_iter(iterator: IntoIter) -> Self { iterator.into_vecdeque() } } diff --git a/library/alloc/src/ffi/c_str.rs b/library/alloc/src/ffi/c_str.rs index f99395c72aa03..20c5f6b634e57 100644 --- a/library/alloc/src/ffi/c_str.rs +++ b/library/alloc/src/ffi/c_str.rs @@ -17,6 +17,8 @@ use core::ptr; use core::slice; use core::slice::memchr; use core::str::{self, Utf8Error}; +use crate::alloc::Global; +use crate::DEFAULT_COOP_PREFERRED; #[cfg(target_has_atomic = "ptr")] use crate::sync::Arc; @@ -723,7 +725,8 @@ impl fmt::Debug for CString { } #[stable(feature = "cstring_into", since = "1.7.0")] -impl From for Vec { +#[allow(unused_braces)] +impl From for Vec { /// Converts a [`CString`] into a [Vec]<[u8]>. /// /// The conversion consumes the [`CString`], and removes the terminating NUL byte. diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index fbbed4f636315..6edfaf9bf4c03 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -86,6 +86,7 @@ #![warn(deprecated_in_future)] #![warn(missing_debug_implementations)] #![warn(missing_docs)] +#![allow(incomplete_features)] #![allow(explicit_outlives_requirements)] #![cfg_attr(not(bootstrap), warn(multiple_supertrait_upcastable))] // @@ -123,6 +124,7 @@ #![feature(extend_one)] #![feature(fmt_internals)] #![feature(fn_traits)] +#![feature(generic_const_exprs)] #![feature(global_co_alloc_meta)] #![feature(hasher_prefixfree_extras)] #![feature(inline_const)] @@ -180,9 +182,17 @@ #![feature(fundamental)] #![cfg_attr(not(test), feature(generator_trait))] #![feature(global_co_alloc)] +#![feature(global_co_alloc_short_term_pref)] #![feature(hashmap_internals)] #![feature(lang_items)] -#![feature(min_specialization)] +#![feature(global_co_alloc_def)] +// When we used min_specialization instead of specialization, library/alloc/src/vec/mod.rs was failing with: +// - cannot specialize on predicate `the constant `core::alloc::co_alloc_metadata_num_slots::()` can be evaluated` +// - cannot specialize on predicate `[(); _] well-formed` +// - cannot specialize on predicate `the constant `core::alloc::co_alloc_metadata_num_slots::()` can be evaluated` +//#![feature(min_specialization)] +#![feature(associated_type_defaults)] +#![feature(specialization)] #![feature(negative_impls)] #![feature(never_type)] #![feature(rustc_allow_const_fn_unstable)] diff --git a/library/alloc/src/macros.rs b/library/alloc/src/macros.rs index 5198bf297d925..c2f1e9df41c11 100644 --- a/library/alloc/src/macros.rs +++ b/library/alloc/src/macros.rs @@ -41,7 +41,7 @@ #[allow_internal_unstable(rustc_attrs, liballoc_internals)] macro_rules! vec { () => ( - $crate::__rust_force_expr!($crate::vec::Vec::new()) + $crate::__rust_force_expr!($crate::vec::Vec::new_co()) ); ($elem:expr; $n:expr) => ( $crate::__rust_force_expr!($crate::vec::from_elem($elem, $n)) diff --git a/library/alloc/src/raw_vec.rs b/library/alloc/src/raw_vec.rs index af2813abd1ddb..c7a9606b65c86 100644 --- a/library/alloc/src/raw_vec.rs +++ b/library/alloc/src/raw_vec.rs @@ -1,6 +1,6 @@ #![unstable(feature = "raw_vec_internals", reason = "unstable const warnings", issue = "none")] -use core::alloc::{LayoutError, GlobalCoAllocMeta}; +use core::alloc::{self, GlobalCoAllocMeta, LayoutError, PtrAndMeta}; use core::cmp; use core::intrinsics; use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties}; @@ -14,6 +14,7 @@ use crate::alloc::{Allocator, Global, Layout}; use crate::boxed::Box; use crate::collections::TryReserveError; use crate::collections::TryReserveErrorKind::*; +use crate::DEFAULT_COOP_PREFERRED; #[cfg(test)] mod tests; @@ -49,15 +50,28 @@ enum AllocInit { /// `usize::MAX`. This means that you need to be careful when round-tripping this type with a /// `Box<[T]>`, since `capacity()` won't yield the length. #[allow(missing_debug_implementations)] -pub(crate) struct RawVec { +#[allow(unused_braces)] +pub(crate) struct RawVec< + T, + A: Allocator = Global, + const COOP_PREFERRED: bool = { DEFAULT_COOP_PREFERRED!() }, +> where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ ptr: Unique, cap: usize, alloc: A, - #[allow(dead_code)] - pub(crate) meta: GlobalCoAllocMeta, + // As of v1.67.0, `cmp` for `TypeId` is not `const`, unfortunately: + //pub(crate) meta: [GlobalCoAllocMeta; {if core::any::TypeId::of::()==core::any::TypeId::of::() {1} else {0}}], + //pub(crate) meta: [GlobalCoAllocMeta; mem::size_of::()], + pub(crate) metas: [GlobalCoAllocMeta; + alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)], } -impl RawVec { +impl RawVec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ /// HACK(Centril): This exists because stable `const fn` can only call stable `const fn`, so /// they cannot call `Self::new()`. /// @@ -104,7 +118,10 @@ impl RawVec { } } -impl RawVec { +impl RawVec +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ // Tiny Vecs are dumb. Skip to: // - 8 if the element size is 1, because any heap allocators is likely // to round up a request of less than 8 bytes to at least 8 bytes. @@ -122,7 +139,13 @@ impl RawVec { /// the returned `RawVec`. pub const fn new_in(alloc: A) -> Self { // `cap: 0` means "unallocated". zero-sized types are ignored. - Self { ptr: Unique::dangling(), cap: 0, alloc, meta: GlobalCoAllocMeta {/*one: 1*/ /* , two: 2, three: 3, four: 4*/} } + Self { + ptr: Unique::dangling(), + cap: 0, + alloc, + metas: [GlobalCoAllocMeta {/*one: 1*/ /* , two: 2, three: 3, four: 4*/}; + alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)], + } } /// Like `with_capacity`, but parameterized over the choice of @@ -199,7 +222,8 @@ impl RawVec { ptr: unsafe { Unique::new_unchecked(ptr.cast().as_ptr()) }, cap: capacity, alloc, - meta: GlobalCoAllocMeta {/*one: 1*/ /*, two: 2, three: 3, four: 4*/} + metas: [GlobalCoAllocMeta {/*one: 1*/ /*, two: 2, three: 3, four: 4*/}; + alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)], } } } @@ -216,7 +240,13 @@ impl RawVec { /// guaranteed. #[inline] pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, alloc: A) -> Self { - Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap: capacity, alloc, meta: GlobalCoAllocMeta {/*one: 1*/ /*, two: 2, three: 3, four: 4*/} } + Self { + ptr: unsafe { Unique::new_unchecked(ptr) }, + cap: capacity, + alloc, + metas: [GlobalCoAllocMeta {/*one: 1*/ /*, two: 2, three: 3, four: 4*/}; + alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)], + } } /// Gets a raw pointer to the start of the allocation. Note that this is @@ -285,11 +315,13 @@ impl RawVec { // handle_reserve behind a call, while making sure that this function is likely to be // inlined as just a comparison and a call if the comparison fails. #[cold] - fn do_reserve_and_handle( - slf: &mut RawVec, + fn do_reserve_and_handle( + slf: &mut RawVec, len: usize, additional: usize, - ) { + ) where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + { handle_reserve(slf.grow_amortized(len, additional)); } @@ -362,7 +394,10 @@ impl RawVec { } } -impl RawVec { +impl RawVec +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ /// Returns if the buffer needs to grow to fulfill the needed extra capacity. /// Mainly used to make inlining reserve-calls possible without inlining `grow`. fn needs_to_grow(&self, len: usize, additional: usize) -> bool { @@ -482,21 +517,20 @@ where memory.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () }.into()) } -unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawVec { +unsafe impl<#[may_dangle] T, A: Allocator, const COOP_PREFERRED: bool> Drop + for RawVec +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ /// Frees the memory owned by the `RawVec` *without* trying to drop its contents. default fn drop(&mut self) { if let Some((ptr, layout)) = self.current_memory() { - unsafe { self.alloc.deallocate(ptr, layout) } - } - } -} - -unsafe impl<#[may_dangle] T> Drop for RawVec { - /// Frees the memory owned by the `RawVec` *without* trying to drop its contents. - fn drop(&mut self) { - // @TODO - if let Some((ptr, layout)) = self.current_memory() { - unsafe { self.alloc.deallocate(ptr, layout) } + if A::IS_CO_ALLOCATOR && COOP_PREFERRED { + let meta = self.metas[0]; + unsafe { self.alloc.co_deallocate(PtrAndMeta { ptr, meta }, layout) } + } else { + unsafe { self.alloc.deallocate(ptr, layout) } + } } } } diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index fd1e3e0f75b09..a14e15549d5be 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -1987,7 +1987,10 @@ impl From> for Rc { #[cfg(not(no_global_oom_handling))] #[stable(feature = "shared_from_slice", since = "1.21.0")] -impl From> for Rc<[T]> { +impl From> for Rc<[T]> +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ /// Allocate a reference-counted slice and move `v`'s items into it. /// /// # Example @@ -1999,7 +2002,10 @@ impl From> for Rc<[T]> { /// assert_eq!(vec![1, 2, 3], *shared); /// ``` #[inline] - fn from(mut v: Vec) -> Rc<[T]> { + fn from(mut v: Vec) -> Rc<[T]> + where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + { unsafe { let rc = Rc::copy_from_slice(&v); // Allow the Vec to free its memory, but not destroy its contents @@ -2120,6 +2126,7 @@ trait ToRcSlice: Iterator + Sized { fn to_rc_slice(self) -> Rc<[T]>; } +// COOP_NOT_POSSIBLE #[cfg(not(no_global_oom_handling))] impl> ToRcSlice for I { default fn to_rc_slice(self) -> Rc<[T]> { diff --git a/library/alloc/src/slice.rs b/library/alloc/src/slice.rs index 093dcbbe8bf77..f2b4596c53a7d 100644 --- a/library/alloc/src/slice.rs +++ b/library/alloc/src/slice.rs @@ -97,7 +97,12 @@ pub(crate) mod hack { // We shouldn't add inline attribute to this since this is used in // `vec!` macro mostly and causes perf regression. See #71204 for // discussion and perf results. - pub fn into_vec(b: Box<[T], A>) -> Vec { + pub fn into_vec( + b: Box<[T], A>, + ) -> Vec + where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + { unsafe { let len = b.len(); let (b, alloc) = Box::into_raw_with_allocator(b); @@ -107,26 +112,48 @@ pub(crate) mod hack { #[cfg(not(no_global_oom_handling))] #[inline] - pub fn to_vec(s: &[T], alloc: A) -> Vec { + pub fn to_vec( + s: &[T], + alloc: A, + ) -> Vec + where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + { T::to_vec(s, alloc) } #[cfg(not(no_global_oom_handling))] pub trait ConvertVec { - fn to_vec(s: &[Self], alloc: A) -> Vec + fn to_vec( + s: &[Self], + alloc: A, + ) -> Vec where - Self: Sized; + Self: Sized, + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:; } #[cfg(not(no_global_oom_handling))] impl ConvertVec for T { #[inline] - default fn to_vec(s: &[Self], alloc: A) -> Vec { - struct DropGuard<'a, T, A: Allocator> { - vec: &'a mut Vec, + default fn to_vec( + s: &[Self], + alloc: A, + ) -> Vec + where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + { + struct DropGuard<'a, T, A: Allocator, const COOP_PREFERRED: bool> + where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + { + vec: &'a mut Vec, num_init: usize, } - impl<'a, T, A: Allocator> Drop for DropGuard<'a, T, A> { + impl<'a, T, A: Allocator, const COOP_PREFERRED: bool> Drop for DropGuard<'a, T, A, COOP_PREFERRED> + where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + { #[inline] fn drop(&mut self) { // SAFETY: @@ -158,7 +185,13 @@ pub(crate) mod hack { #[cfg(not(no_global_oom_handling))] impl ConvertVec for T { #[inline] - fn to_vec(s: &[Self], alloc: A) -> Vec { + fn to_vec( + s: &[Self], + alloc: A, + ) -> Vec + where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + { let mut v = Vec::with_capacity_in(s.len(), alloc); // SAFETY: // allocated above with the capacity of `s`, and initialize to `s.len()` in @@ -409,11 +442,12 @@ impl [T] { #[rustc_conversion_suggestion] #[stable(feature = "rust1", since = "1.0.0")] #[inline] - pub fn to_vec(&self) -> Vec + pub fn to_vec(&self) -> Vec where T: Clone, + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { - self.to_vec_in(Global) + self.to_vec_in::(Global) } /// Copies `self` into a new `Vec` with an allocator. @@ -433,9 +467,13 @@ impl [T] { #[rustc_allow_incoherent_impl] #[inline] #[unstable(feature = "allocator_api", issue = "32838")] - pub fn to_vec_in(&self, alloc: A) -> Vec + pub fn to_vec_in( + &self, + alloc: A, + ) -> Vec where T: Clone, + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { // N.B., see the `hack` module in this file for more details. hack::to_vec(self, alloc) @@ -458,7 +496,12 @@ impl [T] { #[rustc_allow_incoherent_impl] #[stable(feature = "rust1", since = "1.0.0")] #[inline] - pub fn into_vec(self: Box) -> Vec { + pub fn into_vec( + self: Box, + ) -> Vec + where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + { // N.B., see the `hack` module in this file for more details. hack::into_vec(self) } @@ -702,6 +745,7 @@ pub trait Join { fn join(slice: &Self, sep: Separator) -> Self::Output; } +// COOP_NOT_POSSIBLE #[cfg(not(no_global_oom_handling))] #[unstable(feature = "slice_concat_ext", issue = "27747")] impl> Concat for [V] { @@ -717,6 +761,7 @@ impl> Concat for [V] { } } +// COOP_NOT_POSSIBLE #[cfg(not(no_global_oom_handling))] #[unstable(feature = "slice_concat_ext", issue = "27747")] impl> Join<&T> for [V] { @@ -740,10 +785,11 @@ impl> Join<&T> for [V] { } } +// COOP_NOT_POSSIBLE #[cfg(not(no_global_oom_handling))] #[unstable(feature = "slice_concat_ext", issue = "27747")] impl> Join<&[T]> for [V] { - type Output = Vec; + type Output = Vec; fn join(slice: &Self, sep: &[T]) -> Vec { let mut iter = slice.iter(); @@ -769,14 +815,20 @@ impl> Join<&[T]> for [V] { //////////////////////////////////////////////////////////////////////////////// #[stable(feature = "rust1", since = "1.0.0")] -impl Borrow<[T]> for Vec { +impl Borrow<[T]> for Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn borrow(&self) -> &[T] { &self[..] } } #[stable(feature = "rust1", since = "1.0.0")] -impl BorrowMut<[T]> for Vec { +impl BorrowMut<[T]> for Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn borrow_mut(&mut self) -> &mut [T] { &mut self[..] } diff --git a/library/alloc/src/str.rs b/library/alloc/src/str.rs index afbe5cfaf8ef9..eedf856f5d253 100644 --- a/library/alloc/src/str.rs +++ b/library/alloc/src/str.rs @@ -14,6 +14,8 @@ use core::ptr; use core::str::pattern::{DoubleEndedSearcher, Pattern, ReverseSearcher, Searcher}; use core::unicode::conversions; +use crate::alloc; +use crate::alloc::Global; use crate::borrow::ToOwned; use crate::boxed::Box; use crate::slice::{Concat, Join, SliceIndex}; @@ -126,11 +128,15 @@ macro_rules! copy_slice_and_advance { // [T] and str both impl AsRef<[T]> for some T // => s.borrow().as_ref() and we always have slices #[cfg(not(no_global_oom_handling))] -fn join_generic_copy(slice: &[S], sep: &[T]) -> Vec +fn join_generic_copy( + slice: &[S], + sep: &[T], +) -> Vec where T: Copy, B: AsRef<[T]> + ?Sized, S: Borrow, + [(); alloc::co_alloc_metadata_num_slots_with_preference_global(COOP_PREFERRED)]:, { let sep_len = sep.len(); let mut iter = slice.iter(); diff --git a/library/alloc/src/vec/drain.rs b/library/alloc/src/vec/drain.rs index 2b1a787cc5499..a219ca302d4c4 100644 --- a/library/alloc/src/vec/drain.rs +++ b/library/alloc/src/vec/drain.rs @@ -3,7 +3,7 @@ use core::fmt; use core::iter::{FusedIterator, TrustedLen}; use core::mem::{self, ManuallyDrop, SizedTypeProperties}; use core::ptr::{self, NonNull}; -use core::slice::{self}; +use core::{alloc, slice}; use super::Vec; @@ -19,28 +19,39 @@ use super::Vec; /// let iter: std::vec::Drain<_> = v.drain(..); /// ``` #[stable(feature = "drain", since = "1.6.0")] +#[allow(unused_braces)] pub struct Drain< 'a, T: 'a, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + 'a = Global, -> { + const COOP_PREFERRED: bool = { SHORT_TERM_VEC_PREFERS_COOP!() }, +> where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ /// Index of tail to preserve pub(super) tail_start: usize, /// Length of tail pub(super) tail_len: usize, /// Current remaining range to remove pub(super) iter: slice::Iter<'a, T>, - pub(super) vec: NonNull>, + pub(super) vec: NonNull>, } #[stable(feature = "collection_debug", since = "1.17.0")] -impl fmt::Debug for Drain<'_, T, A> { +impl fmt::Debug + for Drain<'_, T, A, COOP_PREFERRED> +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("Drain").field(&self.iter.as_slice()).finish() } } -impl<'a, T, A: Allocator> Drain<'a, T, A> { +impl<'a, T, A: Allocator, const COOP_PREFERRED: bool> Drain<'a, T, A, COOP_PREFERRED> +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ /// Returns the remaining items of this iterator as a slice. /// /// # Examples @@ -139,19 +150,35 @@ impl<'a, T, A: Allocator> Drain<'a, T, A> { } #[stable(feature = "vec_drain_as_slice", since = "1.46.0")] -impl<'a, T, A: Allocator> AsRef<[T]> for Drain<'a, T, A> { +impl<'a, T, A: Allocator, const COOP_PREFERRED: bool> AsRef<[T]> for Drain<'a, T, A, COOP_PREFERRED> +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn as_ref(&self) -> &[T] { self.as_slice() } } #[stable(feature = "drain", since = "1.6.0")] -unsafe impl Sync for Drain<'_, T, A> {} +unsafe impl Sync + for Drain<'_, T, A, COOP_PREFERRED> +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ +} #[stable(feature = "drain", since = "1.6.0")] -unsafe impl Send for Drain<'_, T, A> {} +unsafe impl Send + for Drain<'_, T, A, COOP_PREFERRED> +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ +} #[stable(feature = "drain", since = "1.6.0")] -impl Iterator for Drain<'_, T, A> { +impl Iterator for Drain<'_, T, A, COOP_PREFERRED> +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ type Item = T; #[inline] @@ -165,7 +192,11 @@ impl Iterator for Drain<'_, T, A> { } #[stable(feature = "drain", since = "1.6.0")] -impl DoubleEndedIterator for Drain<'_, T, A> { +impl DoubleEndedIterator + for Drain<'_, T, A, COOP_PREFERRED> +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ #[inline] fn next_back(&mut self) -> Option { self.iter.next_back().map(|elt| unsafe { ptr::read(elt as *const _) }) @@ -173,12 +204,23 @@ impl DoubleEndedIterator for Drain<'_, T, A> { } #[stable(feature = "drain", since = "1.6.0")] -impl Drop for Drain<'_, T, A> { +impl Drop for Drain<'_, T, A, COOP_PREFERRED> +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn drop(&mut self) { /// Moves back the un-`Drain`ed elements to restore the original `Vec`. - struct DropGuard<'r, 'a, T, A: Allocator>(&'r mut Drain<'a, T, A>); - - impl<'r, 'a, T, A: Allocator> Drop for DropGuard<'r, 'a, T, A> { + struct DropGuard<'r, 'a, T, A: Allocator, const COOP_PREFERRED: bool>( + &'r mut Drain<'a, T, A, COOP_PREFERRED>, + ) + where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:; + + impl<'r, 'a, T, A: Allocator, const COOP_PREFERRED: bool> Drop + for DropGuard<'r, 'a, T, A, COOP_PREFERRED> + where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + { fn drop(&mut self) { if self.0.tail_len > 0 { unsafe { @@ -242,14 +284,26 @@ impl Drop for Drain<'_, T, A> { } #[stable(feature = "drain", since = "1.6.0")] -impl ExactSizeIterator for Drain<'_, T, A> { +impl ExactSizeIterator + for Drain<'_, T, A, COOP_PREFERRED> +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn is_empty(&self) -> bool { self.iter.is_empty() } } #[unstable(feature = "trusted_len", issue = "37572")] -unsafe impl TrustedLen for Drain<'_, T, A> {} +unsafe impl TrustedLen + for Drain<'_, T, A, COOP_PREFERRED> +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ +} #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for Drain<'_, T, A> {} +impl FusedIterator for Drain<'_, T, A, COOP_PREFERRED> where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: +{ +} diff --git a/library/alloc/src/vec/drain_filter.rs b/library/alloc/src/vec/drain_filter.rs index 8c03f1692d940..89baafca46729 100644 --- a/library/alloc/src/vec/drain_filter.rs +++ b/library/alloc/src/vec/drain_filter.rs @@ -1,7 +1,6 @@ use crate::alloc::{Allocator, Global}; use core::mem::{self, ManuallyDrop}; -use core::ptr; -use core::slice; +use core::{alloc, ptr, slice}; use super::Vec; @@ -25,10 +24,12 @@ pub struct DrainFilter< T, F, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, + const COOP_PREFERRED: bool = true, > where F: FnMut(&mut T) -> bool, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { - pub(super) vec: &'a mut Vec, + pub(super) vec: &'a mut Vec, /// The index of the item that will be inspected by the next call to `next`. pub(super) idx: usize, /// The number of items that have been drained (removed) thus far. @@ -45,9 +46,10 @@ pub struct DrainFilter< pub(super) panic_flag: bool, } -impl DrainFilter<'_, T, F, A> +impl DrainFilter<'_, T, F, A, COOP_PREFERRED> where F: FnMut(&mut T) -> bool, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { /// Returns a reference to the underlying allocator. #[unstable(feature = "allocator_api", issue = "32838")] @@ -113,9 +115,11 @@ where } #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] -impl Iterator for DrainFilter<'_, T, F, A> +impl Iterator + for DrainFilter<'_, T, F, A, COOP_PREFERRED> where F: FnMut(&mut T) -> bool, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { type Item = T; @@ -151,21 +155,26 @@ where } #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] -impl Drop for DrainFilter<'_, T, F, A> +impl Drop + for DrainFilter<'_, T, F, A, COOP_PREFERRED> where F: FnMut(&mut T) -> bool, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { fn drop(&mut self) { - struct BackshiftOnDrop<'a, 'b, T, F, A: Allocator> + struct BackshiftOnDrop<'a, 'b, T, F, A: Allocator, const COOP_PREFERRED: bool> where F: FnMut(&mut T) -> bool, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { - drain: &'b mut DrainFilter<'a, T, F, A>, + drain: &'b mut DrainFilter<'a, T, F, A, COOP_PREFERRED>, } - impl<'a, 'b, T, F, A: Allocator> Drop for BackshiftOnDrop<'a, 'b, T, F, A> + impl<'a, 'b, T, F, A: Allocator, const COOP_PREFERRED: bool> Drop + for BackshiftOnDrop<'a, 'b, T, F, A, COOP_PREFERRED> where F: FnMut(&mut T) -> bool, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { fn drop(&mut self) { unsafe { diff --git a/library/alloc/src/vec/in_place_collect.rs b/library/alloc/src/vec/in_place_collect.rs index 87d61deb1eb2f..3afbc754061c0 100644 --- a/library/alloc/src/vec/in_place_collect.rs +++ b/library/alloc/src/vec/in_place_collect.rs @@ -137,6 +137,8 @@ //! } //! vec.truncate(write_idx); //! ``` +use crate::alloc::Global; +use core::alloc; use core::iter::{InPlaceIterable, SourceIter, TrustedRandomAccessNoCoerce}; use core::mem::{self, ManuallyDrop, SizedTypeProperties}; use core::ptr::{self}; @@ -150,9 +152,11 @@ pub(super) trait InPlaceIterableMarker {} impl InPlaceIterableMarker for T where T: InPlaceIterable {} -impl SpecFromIter for Vec +#[allow(unused_braces)] +impl SpecFromIter for Vec where I: Iterator + SourceIter + InPlaceIterableMarker, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { default fn from_iter(mut iterator: I) -> Self { // See "Layout constraints" section in the module documentation. We rely on const diff --git a/library/alloc/src/vec/in_place_drop.rs b/library/alloc/src/vec/in_place_drop.rs index 25ca33c6a7bf0..bf2d004e9db39 100644 --- a/library/alloc/src/vec/in_place_drop.rs +++ b/library/alloc/src/vec/in_place_drop.rs @@ -1,3 +1,4 @@ +use crate::alloc::Global; use core::ptr::{self}; use core::slice::{self}; @@ -34,6 +35,7 @@ pub(super) struct InPlaceDstBufDrop { impl Drop for InPlaceDstBufDrop { #[inline] fn drop(&mut self) { - unsafe { super::Vec::from_raw_parts(self.ptr, self.len, self.cap) }; + // false = no need for co-alloc metadata, since it would get lost once converted to Box. + unsafe { super::Vec::::from_raw_parts(self.ptr, self.len, self.cap) }; } } diff --git a/library/alloc/src/vec/into_iter.rs b/library/alloc/src/vec/into_iter.rs index 37966007eb7e4..3555c64af9f8e 100644 --- a/library/alloc/src/vec/into_iter.rs +++ b/library/alloc/src/vec/into_iter.rs @@ -4,8 +4,6 @@ use crate::alloc::{Allocator, Global}; #[cfg(not(no_global_oom_handling))] use crate::collections::VecDeque; use crate::raw_vec::RawVec; -use core::array; -use core::fmt; use core::iter::{ FusedIterator, InPlaceIterable, SourceIter, TrustedLen, TrustedRandomAccessNoCoerce, }; @@ -15,6 +13,7 @@ use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties}; use core::ops::Deref; use core::ptr::{self, NonNull}; use core::slice::{self}; +use core::{alloc, array, fmt}; /// An iterator that moves out of a vector. /// @@ -29,10 +28,14 @@ use core::slice::{self}; /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_insignificant_dtor] +#[allow(unused_braces)] pub struct IntoIter< T, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, -> { + const COOP_PREFERRED: bool = { SHORT_TERM_VEC_PREFERS_COOP!() }, +> where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ pub(super) buf: NonNull, pub(super) phantom: PhantomData, pub(super) cap: usize, @@ -46,13 +49,20 @@ pub struct IntoIter< } #[stable(feature = "vec_intoiter_debug", since = "1.13.0")] -impl fmt::Debug for IntoIter { +impl fmt::Debug + for IntoIter +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("IntoIter").field(&self.as_slice()).finish() } } -impl IntoIter { +impl IntoIter +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ /// Returns the remaining items of this iterator as a slice. /// /// # Examples @@ -121,7 +131,17 @@ impl IntoIter { // struct and then overwriting &mut self. // this creates less assembly self.cap = 0; - self.buf = unsafe { NonNull::new_unchecked(RawVec::NEW.ptr()) }; + self.buf = unsafe { + // @FIXME The below if COOP_PREFERRED {..} else {..} + // branching exists, because the following fails. Otherwise we'd have a snowball effect of wide spread of where...Global... + // + // NonNull::new_unchecked(RawVec::::NEW.ptr()) + if COOP_PREFERRED { + NonNull::new_unchecked(RawVec::::NEW.ptr()) + } else { + NonNull::new_unchecked(RawVec::::NEW.ptr()) + } + }; self.ptr = self.buf.as_ptr(); self.end = self.buf.as_ptr(); @@ -141,7 +161,7 @@ impl IntoIter { #[cfg(not(no_global_oom_handling))] #[inline] - pub(crate) fn into_vecdeque(self) -> VecDeque { + pub(crate) fn into_vecdeque(self) -> VecDeque { // Keep our `Drop` impl from dropping the elements and the allocator let mut this = ManuallyDrop::new(self); @@ -168,19 +188,35 @@ impl IntoIter { } #[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")] -impl AsRef<[T]> for IntoIter { +impl AsRef<[T]> for IntoIter +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn as_ref(&self) -> &[T] { self.as_slice() } } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl Send for IntoIter {} +unsafe impl Send + for IntoIter +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ +} #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl Sync for IntoIter {} +unsafe impl Sync + for IntoIter +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ +} #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for IntoIter { +impl Iterator for IntoIter +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ type Item = T; #[inline] @@ -294,7 +330,11 @@ impl Iterator for IntoIter { } #[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for IntoIter { +impl DoubleEndedIterator + for IntoIter +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ #[inline] fn next_back(&mut self) -> Option { if self.end == self.ptr { @@ -335,17 +375,29 @@ impl DoubleEndedIterator for IntoIter { } #[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for IntoIter { +impl ExactSizeIterator + for IntoIter +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn is_empty(&self) -> bool { self.ptr == self.end } } #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for IntoIter {} +impl FusedIterator for IntoIter where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: +{ +} #[unstable(feature = "trusted_len", issue = "37572")] -unsafe impl TrustedLen for IntoIter {} +unsafe impl TrustedLen + for IntoIter +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ +} #[doc(hidden)] #[unstable(issue = "none", feature = "std_internals")] @@ -361,19 +413,32 @@ impl NonDrop for T {} #[unstable(issue = "none", feature = "std_internals")] // TrustedRandomAccess (without NoCoerce) must not be implemented because // subtypes/supertypes of `T` might not be `NonDrop` -unsafe impl TrustedRandomAccessNoCoerce for IntoIter +unsafe impl TrustedRandomAccessNoCoerce + for IntoIter where T: NonDrop, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { const MAY_HAVE_SIDE_EFFECT: bool = false; } #[cfg(not(no_global_oom_handling))] #[stable(feature = "vec_into_iter_clone", since = "1.8.0")] -impl Clone for IntoIter { +impl Clone + for IntoIter +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ #[cfg(not(test))] fn clone(&self) -> Self { - self.as_slice().to_vec_in(self.alloc.deref().clone()).into_iter() + // @FIXME Remove the following extras - used for type checks only + let slice = self.as_slice(); + let vec: crate::vec::Vec = + slice.to_vec_in::(self.alloc.deref().clone()); + let _iter: IntoIter = vec.into_iter(); + + //self.as_slice().to_vec_in::(self.alloc.deref().clone()).into_iter() + loop {} } #[cfg(test)] fn clone(&self) -> Self { @@ -382,17 +447,33 @@ impl Clone for IntoIter { } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter { +unsafe impl<#[may_dangle] T, A: Allocator, const COOP_PREFERRED: bool> Drop + for IntoIter +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn drop(&mut self) { - struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter); - - impl Drop for DropGuard<'_, T, A> { + struct DropGuard<'a, T, A: Allocator, const COOP_PREFERRED: bool>( + &'a mut IntoIter, + ) + where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:; + + impl Drop for DropGuard<'_, T, A, COOP_PREFERRED> + where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + { fn drop(&mut self) { unsafe { // `IntoIter::alloc` is not used anymore after this and will be dropped by RawVec let alloc = ManuallyDrop::take(&mut self.0.alloc); // RawVec handles deallocation - let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc); + // @FIXME pass true instead of COOP_PREFERRED - use e.g.: if COOP_PREFERRED {let _ = RawVec::::from_raw_parts_in(..) } else { let _ = from_raw_parts_in_coop(...)} } + let _ = RawVec::::from_raw_parts_in( + self.0.buf.as_ptr(), + self.0.cap, + alloc, + ); } } } @@ -410,11 +491,20 @@ unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter { // also refer to the vec::in_place_collect module documentation to get an overview #[unstable(issue = "none", feature = "inplace_iteration")] #[doc(hidden)] -unsafe impl InPlaceIterable for IntoIter {} +unsafe impl InPlaceIterable + for IntoIter +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ +} #[unstable(issue = "none", feature = "inplace_iteration")] #[doc(hidden)] -unsafe impl SourceIter for IntoIter { +unsafe impl SourceIter + for IntoIter +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ type Source = Self; #[inline] diff --git a/library/alloc/src/vec/mod.rs b/library/alloc/src/vec/mod.rs index a07f3da78d33e..966a113769dec 100644 --- a/library/alloc/src/vec/mod.rs +++ b/library/alloc/src/vec/mod.rs @@ -54,6 +54,7 @@ #![stable(feature = "rust1", since = "1.0.0")] #[cfg(not(no_global_oom_handling))] +use core::alloc; use core::cmp; use core::cmp::Ordering; use core::convert::TryFrom; @@ -148,6 +149,16 @@ use self::spec_extend::SpecExtend; #[cfg(not(no_global_oom_handling))] mod spec_extend; +/// Default `Vec`, `DefVec`, `DecVeque`, `DefDecVeq` "cooperation" (`COOP_PREFERRED`) generic parameter. +#[unstable(feature = "global_co_alloc_def", issue = "none")] +// pub const DEFAULT_COOP_PREFERRED: bool = true; +#[macro_export] +macro_rules! DEFAULT_COOP_PREFERRED { + () => { + true + }; +} + /// A contiguous growable array type, written as `Vec`, short for 'vector'. /// /// # Examples @@ -397,14 +408,40 @@ mod spec_extend; #[stable(feature = "rust1", since = "1.0.0")] #[cfg_attr(not(test), rustc_diagnostic_item = "Vec")] #[rustc_insignificant_dtor] -pub struct Vec { - buf: RawVec, +#[allow(unused_braces)] +pub struct Vec< + T, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, + //@FIXME: #[unstable(feature ="global_co_alloc_vec", issue="none")] + const COOP_PREFERRED: bool = { DEFAULT_COOP_PREFERRED!() }, +> where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ + buf: RawVec, len: usize, } -//////////////////////////////////////////////////////////////////////////////// -// Inherent methods -//////////////////////////////////////////////////////////////////////////////// +/// "Cooperative" Vector. Preferring co-alloc API (if Global alloc supports it). +#[unstable(feature = "global_co_alloc_covec", issue = "none")] +pub type CoVec = Vec; + +/// "Plain" Vec. Not "cooperative" - not carrying extra data to assist the allocator. +/// FIXME after cleanup, see if we still use this in core:: and/or alloc:: +#[unstable(feature = "global_co_alloc_plvec", issue = "none")] +pub type PlVec = Vec; + +/// "Default" Vec. Either "cooperative" or not - as specified by `DEFAULT_COOP_PREFERRED`. The +/// difference to `Vec` (used without specifying `COOP_PREFERRED`): `DefVec` indicates that the +/// author considered using `CoVec` or `PlVec`, but left it to default instead. +#[unstable(feature = "global_co_alloc_defvec", issue = "none")] +#[allow(unused_braces)] +pub type DefVec = Vec; + +/// "Weighted cooperative" Vec. Weight means how much it wants to cooperate (with the allocator). 0 +/// = always pack; u8::MAX = always cooperate (if `Global` supports it). +/// @FIXME A `pub const` threshold. +#[unstable(feature = "global_co_alloc_vec", issue = "none")] +pub type WeVec = Vec 127 }>; impl Vec { /// Constructs a new, empty `Vec`. @@ -422,6 +459,25 @@ impl Vec { #[stable(feature = "rust1", since = "1.0.0")] #[must_use] pub const fn new() -> Self { + #[allow(unused_braces)] + Vec::::new_co() + } +} + +//////////////////////////////////////////////////////////////////////////////// +// Inherent methods +//////////////////////////////////////////////////////////////////////////////// + +impl Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ + /// Like new(), but it respects COOP_PREFERRED. + #[inline] + #[rustc_const_stable(feature = "const_vec_new_co", since = "1.60.0")] //@FIXME This is `rustc_const_stable`, so that String::new() can be const and can call this. + #[unstable(feature = "vec_new_co", reason = "confirm_or_fix_the_function_name", issue = "none")] + #[must_use] + pub const fn new_co() -> Self { Vec { buf: RawVec::NEW, len: 0 } } @@ -593,7 +649,10 @@ impl Vec { } } -impl Vec { +impl Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ /// Constructs a new, empty `Vec`. /// /// The vector will not allocate until elements are pushed onto it. @@ -1606,14 +1665,20 @@ impl Vec { // This drop guard will be invoked when predicate or `drop` of element panicked. // It shifts unchecked elements to cover holes and `set_len` to the correct length. // In cases when predicate and `drop` never panick, it will be optimized out. - struct BackshiftOnDrop<'a, T, A: Allocator> { - v: &'a mut Vec, + struct BackshiftOnDrop<'a, T, A: Allocator, const VEC_IS_COOP: bool = true> + where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(VEC_IS_COOP)]:, + { + v: &'a mut Vec, processed_len: usize, deleted_cnt: usize, original_len: usize, } - impl Drop for BackshiftOnDrop<'_, T, A> { + impl Drop for BackshiftOnDrop<'_, T, A, VEC_IS_COOP> + where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(VEC_IS_COOP)]:, + { fn drop(&mut self) { if self.deleted_cnt > 0 { // SAFETY: Trailing unchecked items must be valid since we never touch them. @@ -1632,14 +1697,20 @@ impl Vec { } } - let mut g = BackshiftOnDrop { v: self, processed_len: 0, deleted_cnt: 0, original_len }; + let mut g = BackshiftOnDrop:: { + v: self, + processed_len: 0, + deleted_cnt: 0, + original_len, + }; - fn process_loop( + fn process_loop( original_len: usize, f: &mut F, - g: &mut BackshiftOnDrop<'_, T, A>, + g: &mut BackshiftOnDrop<'_, T, A, VEC_IS_COOP>, ) where F: FnMut(&mut T) -> bool, + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(VEC_IS_COOP)]:, { while g.processed_len != original_len { // SAFETY: Unchecked element must be valid. @@ -1670,10 +1741,10 @@ impl Vec { } // Stage 1: Nothing was deleted. - process_loop::(original_len, &mut f, &mut g); + process_loop::(original_len, &mut f, &mut g); // Stage 2: Some elements were deleted. - process_loop::(original_len, &mut f, &mut g); + process_loop::(original_len, &mut f, &mut g); // All item are processed. This can be optimized to `set_len` by LLVM. drop(g); @@ -1732,7 +1803,10 @@ impl Vec { } /* INVARIANT: vec.len() > read >= write > write-1 >= 0 */ - struct FillGapOnDrop<'a, T, A: core::alloc::Allocator> { + struct FillGapOnDrop<'a, T, A: core::alloc::Allocator, const COOP_PREFERRED: bool> + where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + { /* Offset of the element we want to check if it is duplicate */ read: usize, @@ -1741,10 +1815,14 @@ impl Vec { write: usize, /* The Vec that would need correction if `same_bucket` panicked */ - vec: &'a mut Vec, + vec: &'a mut Vec, } - impl<'a, T, A: core::alloc::Allocator> Drop for FillGapOnDrop<'a, T, A> { + impl<'a, T, A: core::alloc::Allocator, const COOP_PREFERRED: bool> Drop + for FillGapOnDrop<'a, T, A, COOP_PREFERRED> + where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + { fn drop(&mut self) { /* This code gets executed when `same_bucket` panics */ @@ -1986,7 +2064,7 @@ impl Vec { /// assert_eq!(v, &[]); /// ``` #[stable(feature = "drain", since = "1.6.0")] - pub fn drain(&mut self, range: R) -> Drain<'_, T, A> + pub fn drain(&mut self, range: R) -> Drain<'_, T, A, COOP_PREFERRED> where R: RangeBounds, { @@ -2337,7 +2415,10 @@ impl Vec { } } -impl Vec { +impl Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ /// Resizes the `Vec` in-place so that `len` is equal to `new_len`. /// /// If `new_len` is greater than `len`, the `Vec` is extended by the @@ -2436,7 +2517,10 @@ impl Vec { } } -impl Vec<[T; N], A> { +impl Vec<[T; N], A, COOP_PREFERRED> +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ /// Takes a `Vec<[T; N]>` and flattens it into a `Vec`. /// /// # Panics @@ -2459,7 +2543,7 @@ impl Vec<[T; N], A> { /// assert_eq!(flattened.pop(), Some(6)); /// ``` #[unstable(feature = "slice_flatten", issue = "95629")] - pub fn into_flattened(self) -> Vec { + pub fn into_flattened(self) -> Vec { let (ptr, len, cap, alloc) = self.into_raw_parts_with_alloc(); let (new_len, new_cap) = if T::IS_ZST { (len.checked_mul(N).expect("vec len overflow"), usize::MAX) @@ -2477,7 +2561,9 @@ impl Vec<[T; N], A> { // - `new_cap` refers to the same sized allocation as `cap` because // `new_cap * size_of::()` == `cap * size_of::<[T; N]>()` // - `len` <= `cap`, so `len * N` <= `cap * N`. - unsafe { Vec::::from_raw_parts_in(ptr.cast(), new_len, new_cap, alloc) } + unsafe { + Vec::::from_raw_parts_in(ptr.cast(), new_len, new_cap, alloc) + } } } @@ -2497,7 +2583,10 @@ impl ExtendWith for ExtendElement { } } -impl Vec { +impl Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ #[cfg(not(no_global_oom_handling))] /// Extend the vector by `n` values, using the given generator. fn extend_with>(&mut self, n: usize, mut value: E) { @@ -2529,7 +2618,10 @@ impl Vec { } } -impl Vec { +impl Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ /// Removes consecutive repeated elements in the vector according to the /// [`PartialEq`] trait implementation. /// @@ -2565,7 +2657,14 @@ pub fn from_elem(elem: T, n: usize) -> Vec { #[doc(hidden)] #[cfg(not(no_global_oom_handling))] #[unstable(feature = "allocator_api", issue = "32838")] -pub fn from_elem_in(elem: T, n: usize, alloc: A) -> Vec { +pub fn from_elem_in( + elem: T, + n: usize, + alloc: A, +) -> Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ ::from_elem(elem, n, alloc) } @@ -2577,7 +2676,11 @@ trait ExtendFromWithinSpec { unsafe fn spec_extend_from_within(&mut self, src: Range); } -impl ExtendFromWithinSpec for Vec { +impl ExtendFromWithinSpec + for Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ default unsafe fn spec_extend_from_within(&mut self, src: Range) { // SAFETY: // - len is increased only after initializing elements @@ -2596,7 +2699,11 @@ impl ExtendFromWithinSpec for Vec { } } -impl ExtendFromWithinSpec for Vec { +impl ExtendFromWithinSpec + for Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ unsafe fn spec_extend_from_within(&mut self, src: Range) { let count = src.len(); { @@ -2629,7 +2736,10 @@ impl ExtendFromWithinSpec for Vec { //////////////////////////////////////////////////////////////////////////////// #[stable(feature = "rust1", since = "1.0.0")] -impl ops::Deref for Vec { +impl ops::Deref for Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ type Target = [T]; #[inline] @@ -2639,7 +2749,10 @@ impl ops::Deref for Vec { } #[stable(feature = "rust1", since = "1.0.0")] -impl ops::DerefMut for Vec { +impl ops::DerefMut for Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ #[inline] fn deref_mut(&mut self) -> &mut [T] { unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.len) } @@ -2648,7 +2761,10 @@ impl ops::DerefMut for Vec { #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] -impl Clone for Vec { +impl Clone for Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ #[cfg(not(test))] fn clone(&self) -> Self { let alloc = self.allocator().clone(); @@ -2683,7 +2799,10 @@ impl Clone for Vec { /// assert_eq!(b.hash_one(v), b.hash_one(s)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] -impl Hash for Vec { +impl Hash for Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ #[inline] fn hash(&self, state: &mut H) { Hash::hash(&**self, state) @@ -2695,7 +2814,11 @@ impl Hash for Vec { message = "vector indices are of type `usize` or ranges of `usize`", label = "vector indices are of type `usize` or ranges of `usize`" )] -impl, A: Allocator> Index for Vec { +impl, A: Allocator, const COOP_PREFERRED: bool> Index + for Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ type Output = I::Output; #[inline] @@ -2709,7 +2832,11 @@ impl, A: Allocator> Index for Vec { message = "vector indices are of type `usize` or ranges of `usize`", label = "vector indices are of type `usize` or ranges of `usize`" )] -impl, A: Allocator> IndexMut for Vec { +impl, A: Allocator, const COOP_PREFERRED: bool> IndexMut + for Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ #[inline] fn index_mut(&mut self, index: I) -> &mut Self::Output { IndexMut::index_mut(&mut **self, index) @@ -2718,17 +2845,24 @@ impl, A: Allocator> IndexMut for Vec { #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] -impl FromIterator for Vec { +#[allow(unused_braces)] +impl FromIterator for Vec +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ #[inline] - fn from_iter>(iter: I) -> Vec { + fn from_iter>(iter: I) -> Vec { >::from_iter(iter.into_iter()) } } #[stable(feature = "rust1", since = "1.0.0")] -impl IntoIterator for Vec { +impl IntoIterator for Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ type Item = T; - type IntoIter = IntoIter; + type IntoIter = IntoIter; /// Creates a consuming iterator, that is, one that moves each value out of /// the vector (from start to end). The vector cannot be used after calling @@ -2771,7 +2905,10 @@ impl IntoIterator for Vec { } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T, A: Allocator> IntoIterator for &'a Vec { +impl<'a, T, A: Allocator, const COOP_PREFERRED: bool> IntoIterator for &'a Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ type Item = &'a T; type IntoIter = slice::Iter<'a, T>; @@ -2781,7 +2918,11 @@ impl<'a, T, A: Allocator> IntoIterator for &'a Vec { } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T, A: Allocator> IntoIterator for &'a mut Vec { +impl<'a, T, A: Allocator, const COOP_PREFERRED: bool> IntoIterator + for &'a mut Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ type Item = &'a mut T; type IntoIter = slice::IterMut<'a, T>; @@ -2792,7 +2933,10 @@ impl<'a, T, A: Allocator> IntoIterator for &'a mut Vec { #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] -impl Extend for Vec { +impl Extend for Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ #[inline] fn extend>(&mut self, iter: I) { >::spec_extend(self, iter.into_iter()) @@ -2809,7 +2953,10 @@ impl Extend for Vec { } } -impl Vec { +impl Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ // leaf method to which various SpecFrom/SpecExtend implementations delegate when // they have no further optimizations to apply #[cfg(not(no_global_oom_handling))] @@ -2907,10 +3054,15 @@ impl Vec { #[cfg(not(no_global_oom_handling))] #[inline] #[stable(feature = "vec_splice", since = "1.21.0")] - pub fn splice(&mut self, range: R, replace_with: I) -> Splice<'_, I::IntoIter, A> + pub fn splice( + &mut self, + range: R, + replace_with: I, + ) -> Splice<'_, I::IntoIter, A, COOP_PREFERRED> where R: RangeBounds, I: IntoIterator, + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(true)]:, { Splice { drain: self.drain(range), replace_with: replace_with.into_iter() } } @@ -2960,9 +3112,10 @@ impl Vec { /// assert_eq!(odds, vec![1, 3, 5, 9, 11, 13, 15]); /// ``` #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] - pub fn drain_filter(&mut self, filter: F) -> DrainFilter<'_, T, F, A> + pub fn drain_filter(&mut self, filter: F) -> DrainFilter<'_, T, F, A, COOP_PREFERRED> where F: FnMut(&mut T) -> bool, + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(true)]:, { let old_len = self.len(); @@ -2983,7 +3136,11 @@ impl Vec { /// [`copy_from_slice`]: slice::copy_from_slice #[cfg(not(no_global_oom_handling))] #[stable(feature = "extend_ref", since = "1.2.0")] -impl<'a, T: Copy + 'a, A: Allocator + 'a> Extend<&'a T> for Vec { +impl<'a, T: Copy + 'a, A: Allocator + 'a, const COOP_PREFERRED: bool> Extend<&'a T> + for Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn extend>(&mut self, iter: I) { self.spec_extend(iter.into_iter()) } @@ -3001,7 +3158,11 @@ impl<'a, T: Copy + 'a, A: Allocator + 'a> Extend<&'a T> for Vec { /// Implements comparison of vectors, [lexicographically](core::cmp::Ord#lexicographical-comparison). #[stable(feature = "rust1", since = "1.0.0")] -impl PartialOrd for Vec { +impl PartialOrd + for Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ #[inline] fn partial_cmp(&self, other: &Self) -> Option { PartialOrd::partial_cmp(&**self, &**other) @@ -3009,11 +3170,17 @@ impl PartialOrd for Vec { } #[stable(feature = "rust1", since = "1.0.0")] -impl Eq for Vec {} +impl Eq for Vec where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: +{ +} /// Implements ordering of vectors, [lexicographically](core::cmp::Ord#lexicographical-comparison). #[stable(feature = "rust1", since = "1.0.0")] -impl Ord for Vec { +impl Ord for Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ #[inline] fn cmp(&self, other: &Self) -> Ordering { Ord::cmp(&**self, &**other) @@ -3021,7 +3188,11 @@ impl Ord for Vec { } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<#[may_dangle] T, A: Allocator> Drop for Vec { +unsafe impl<#[may_dangle] T, A: Allocator, const COOP_PREFERRED: bool> Drop + for Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn drop(&mut self) { unsafe { // use drop for [T] @@ -3035,45 +3206,66 @@ unsafe impl<#[may_dangle] T, A: Allocator> Drop for Vec { #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_unstable(feature = "const_default_impls", issue = "87864")] -impl const Default for Vec { +impl const Default for Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ /// Creates an empty `Vec`. /// /// The vector will not allocate until elements are pushed onto it. - fn default() -> Vec { - Vec::new() + fn default() -> Vec { + Vec::new_co() } } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Debug for Vec { +impl fmt::Debug + for Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] -impl AsRef> for Vec { - fn as_ref(&self) -> &Vec { +impl AsRef> + for Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ + fn as_ref(&self) -> &Vec { self } } #[stable(feature = "vec_as_mut", since = "1.5.0")] -impl AsMut> for Vec { - fn as_mut(&mut self) -> &mut Vec { +impl AsMut> + for Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ + fn as_mut(&mut self) -> &mut Vec { self } } #[stable(feature = "rust1", since = "1.0.0")] -impl AsRef<[T]> for Vec { +impl AsRef<[T]> for Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn as_ref(&self) -> &[T] { self } } #[stable(feature = "vec_as_mut", since = "1.5.0")] -impl AsMut<[T]> for Vec { +impl AsMut<[T]> for Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn as_mut(&mut self) -> &mut [T] { self } @@ -3081,7 +3273,8 @@ impl AsMut<[T]> for Vec { #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] -impl From<&[T]> for Vec { +#[allow(unused_braces)] +impl From<&[T]> for Vec { /// Allocate a `Vec` and fill it by cloning `s`'s items. /// /// # Examples @@ -3101,7 +3294,8 @@ impl From<&[T]> for Vec { #[cfg(not(no_global_oom_handling))] #[stable(feature = "vec_from_mut", since = "1.19.0")] -impl From<&mut [T]> for Vec { +#[allow(unused_braces)] +impl From<&mut [T]> for Vec { /// Allocate a `Vec` and fill it by cloning `s`'s items. /// /// # Examples @@ -3121,7 +3315,8 @@ impl From<&mut [T]> for Vec { #[cfg(not(no_global_oom_handling))] #[stable(feature = "vec_from_array", since = "1.44.0")] -impl From<[T; N]> for Vec { +#[allow(unused_braces)] +impl From<[T; N]> for Vec { /// Allocate a `Vec` and move `s`'s items into it. /// /// # Examples @@ -3144,7 +3339,8 @@ impl From<[T; N]> for Vec { } #[stable(feature = "vec_from_cow_slice", since = "1.14.0")] -impl<'a, T> From> for Vec +#[allow(unused_braces)] +impl<'a, T> From> for Vec where [T]: ToOwned>, { @@ -3170,7 +3366,10 @@ where // note: test pulls in std, which causes errors here #[cfg(not(test))] #[stable(feature = "vec_from_box", since = "1.18.0")] -impl From> for Vec { +impl From> for Vec +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ /// Convert a boxed slice into a vector by transferring ownership of /// the existing heap allocation. /// @@ -3189,7 +3388,10 @@ impl From> for Vec { #[cfg(not(no_global_oom_handling))] #[cfg(not(test))] #[stable(feature = "box_from_vec", since = "1.20.0")] -impl From> for Box<[T], A> { +impl From> for Box<[T], A> +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ /// Convert a vector into a boxed slice. /// /// If `v` has excess capacity, its items will be moved into a @@ -3208,14 +3410,15 @@ impl From> for Box<[T], A> { /// /// assert_eq!(Box::from(vec), vec![1, 2, 3].into_boxed_slice()); /// ``` - fn from(v: Vec) -> Self { + fn from(v: Vec) -> Self { v.into_boxed_slice() } } #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] -impl From<&str> for Vec { +#[allow(unused_braces)] +impl From<&str> for Vec { /// Allocate a `Vec` and fill it with a UTF-8 string. /// /// # Examples @@ -3229,8 +3432,12 @@ impl From<&str> for Vec { } #[stable(feature = "array_try_from_vec", since = "1.48.0")] -impl TryFrom> for [T; N] { - type Error = Vec; +impl TryFrom> + for [T; N] +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ + type Error = Vec; /// Gets the entire contents of the `Vec` as an array, /// if its size exactly matches that of the requested array. @@ -3258,7 +3465,7 @@ impl TryFrom> for [T; N] { /// assert_eq!(a, b' '); /// assert_eq!(b, b'd'); /// ``` - fn try_from(mut vec: Vec) -> Result<[T; N], Vec> { + fn try_from(mut vec: Vec) -> Result<[T; N], Vec> { if vec.len() != N { return Err(vec); } diff --git a/library/alloc/src/vec/partial_eq.rs b/library/alloc/src/vec/partial_eq.rs index b0cf72577a1be..bcf52b7333218 100644 --- a/library/alloc/src/vec/partial_eq.rs +++ b/library/alloc/src/vec/partial_eq.rs @@ -1,3 +1,4 @@ +//use core::alloc; use crate::alloc::Allocator; #[cfg(not(no_global_oom_handling))] use crate::borrow::Cow; @@ -5,12 +6,12 @@ use crate::borrow::Cow; use super::Vec; macro_rules! __impl_slice_eq1 { - ([$($vars:tt)*] $lhs:ty, $rhs:ty $(where $ty:ty: $bound:ident)?, #[$stability:meta]) => { + ([$($vars:tt)*] $lhs:ty, $rhs:ty, #[$stability:meta], $($constraints:tt)*) => { #[$stability] impl PartialEq<$rhs> for $lhs where T: PartialEq, - $($ty: $bound)? + $($constraints)* { #[inline] fn eq(&self, other: &$rhs) -> bool { self[..] == other[..] } @@ -20,21 +21,21 @@ macro_rules! __impl_slice_eq1 { } } -__impl_slice_eq1! { [A1: Allocator, A2: Allocator] Vec, Vec, #[stable(feature = "rust1", since = "1.0.0")] } -__impl_slice_eq1! { [A: Allocator] Vec, &[U], #[stable(feature = "rust1", since = "1.0.0")] } -__impl_slice_eq1! { [A: Allocator] Vec, &mut [U], #[stable(feature = "rust1", since = "1.0.0")] } -__impl_slice_eq1! { [A: Allocator] &[T], Vec, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] } -__impl_slice_eq1! { [A: Allocator] &mut [T], Vec, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] } -__impl_slice_eq1! { [A: Allocator] Vec, [U], #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] } -__impl_slice_eq1! { [A: Allocator] [T], Vec, #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] } +__impl_slice_eq1! { [A1: Allocator, A2: Allocator, const COOP_PREFERRED1: bool, const COOP_PREFERRED2: bool] Vec, Vec, #[stable(feature = "rust1", since = "1.0.0")], [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED1)]:, [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED2)]: } +__impl_slice_eq1! { [A: Allocator, const COOP_PREFERRED: bool] Vec, &[U], #[stable(feature = "rust1", since = "1.0.0")], [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: } +__impl_slice_eq1! { [A: Allocator, const COOP_PREFERRED: bool] Vec, &mut [U], #[stable(feature = "rust1", since = "1.0.0")], [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: } +__impl_slice_eq1! { [A: Allocator, const COOP_PREFERRED: bool] &[T], Vec, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")], [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: } +__impl_slice_eq1! { [A: Allocator, const COOP_PREFERRED: bool] &mut [T], Vec, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")], [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: } +__impl_slice_eq1! { [A: Allocator, const COOP_PREFERRED: bool] Vec, [U], #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")], [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: } +__impl_slice_eq1! { [A: Allocator, const COOP_PREFERRED: bool] [T], Vec, #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")], [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: } #[cfg(not(no_global_oom_handling))] -__impl_slice_eq1! { [A: Allocator] Cow<'_, [T]>, Vec where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] } +__impl_slice_eq1! { [A: Allocator, const COOP_PREFERRED: bool] Cow<'_, [T]>, Vec, #[stable(feature = "rust1", since = "1.0.0")], T: Clone, [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: } #[cfg(not(no_global_oom_handling))] -__impl_slice_eq1! { [] Cow<'_, [T]>, &[U] where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] } +__impl_slice_eq1! { [] Cow<'_, [T]>, &[U], #[stable(feature = "rust1", since = "1.0.0")], T: Clone } #[cfg(not(no_global_oom_handling))] -__impl_slice_eq1! { [] Cow<'_, [T]>, &mut [U] where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] } -__impl_slice_eq1! { [A: Allocator, const N: usize] Vec, [U; N], #[stable(feature = "rust1", since = "1.0.0")] } -__impl_slice_eq1! { [A: Allocator, const N: usize] Vec, &[U; N], #[stable(feature = "rust1", since = "1.0.0")] } +__impl_slice_eq1! { [] Cow<'_, [T]>, &mut [U], #[stable(feature = "rust1", since = "1.0.0")], T: Clone } +__impl_slice_eq1! { [A: Allocator, const COOP_PREFERRED: bool, const N: usize] Vec, [U; N], #[stable(feature = "rust1", since = "1.0.0")], [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: } +__impl_slice_eq1! { [A: Allocator, const COOP_PREFERRED: bool, const N: usize] Vec, &[U; N], #[stable(feature = "rust1", since = "1.0.0")], [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: } // NOTE: some less important impls are omitted to reduce code bloat // FIXME(Centril): Reconsider this? diff --git a/library/alloc/src/vec/spec_extend.rs b/library/alloc/src/vec/spec_extend.rs index 56065ce565bfc..73cf325889290 100644 --- a/library/alloc/src/vec/spec_extend.rs +++ b/library/alloc/src/vec/spec_extend.rs @@ -1,4 +1,5 @@ use crate::alloc::Allocator; +use core::alloc; use core::iter::TrustedLen; use core::slice::{self}; @@ -9,25 +10,31 @@ pub(super) trait SpecExtend { fn spec_extend(&mut self, iter: I); } -impl SpecExtend for Vec +impl SpecExtend for Vec where I: Iterator, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { default fn spec_extend(&mut self, iter: I) { self.extend_desugared(iter) } } -impl SpecExtend for Vec +impl SpecExtend for Vec where I: TrustedLen, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { default fn spec_extend(&mut self, iterator: I) { self.extend_trusted(iterator) } } -impl SpecExtend> for Vec { +impl SpecExtend> + for Vec +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn spec_extend(&mut self, mut iterator: IntoIter) { unsafe { self.append_elements(iterator.as_slice() as _); @@ -36,19 +43,23 @@ impl SpecExtend> for Vec { } } -impl<'a, T: 'a, I, A: Allocator + 'a> SpecExtend<&'a T, I> for Vec +impl<'a, T: 'a, I, A: Allocator + 'a, const COOP_PREFERRED: bool> SpecExtend<&'a T, I> + for Vec where I: Iterator, T: Clone, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { default fn spec_extend(&mut self, iterator: I) { self.spec_extend(iterator.cloned()) } } -impl<'a, T: 'a, A: Allocator + 'a> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec +impl<'a, T: 'a, A: Allocator + 'a, const COOP_PREFERRED: bool> SpecExtend<&'a T, slice::Iter<'a, T>> + for Vec where T: Copy, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) { let slice = iterator.as_slice(); diff --git a/library/alloc/src/vec/spec_from_elem.rs b/library/alloc/src/vec/spec_from_elem.rs index ff364c033ee98..bc4169a24f0b2 100644 --- a/library/alloc/src/vec/spec_from_elem.rs +++ b/library/alloc/src/vec/spec_from_elem.rs @@ -2,16 +2,30 @@ use core::ptr; use crate::alloc::Allocator; use crate::raw_vec::RawVec; +use core::alloc; use super::{ExtendElement, IsZero, Vec}; // Specialization trait used for Vec::from_elem pub(super) trait SpecFromElem: Sized { - fn from_elem(elem: Self, n: usize, alloc: A) -> Vec; + fn from_elem( + elem: Self, + n: usize, + alloc: A, + ) -> Vec + where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:; } impl SpecFromElem for T { - default fn from_elem(elem: Self, n: usize, alloc: A) -> Vec { + default fn from_elem( + elem: Self, + n: usize, + alloc: A, + ) -> Vec + where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + { let mut v = Vec::with_capacity_in(n, alloc); v.extend_with(n, ExtendElement(elem)); v @@ -20,7 +34,14 @@ impl SpecFromElem for T { impl SpecFromElem for T { #[inline] - default fn from_elem(elem: T, n: usize, alloc: A) -> Vec { + default fn from_elem( + elem: T, + n: usize, + alloc: A, + ) -> Vec + where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + { if elem.is_zero() { return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n }; } @@ -32,7 +53,14 @@ impl SpecFromElem for T { impl SpecFromElem for i8 { #[inline] - fn from_elem(elem: i8, n: usize, alloc: A) -> Vec { + fn from_elem( + elem: i8, + n: usize, + alloc: A, + ) -> Vec + where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + { if elem == 0 { return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n }; } @@ -47,7 +75,14 @@ impl SpecFromElem for i8 { impl SpecFromElem for u8 { #[inline] - fn from_elem(elem: u8, n: usize, alloc: A) -> Vec { + fn from_elem( + elem: u8, + n: usize, + alloc: A, + ) -> Vec + where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + { if elem == 0 { return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n }; } diff --git a/library/alloc/src/vec/spec_from_iter.rs b/library/alloc/src/vec/spec_from_iter.rs index efa6868473e49..e5d790878aabb 100644 --- a/library/alloc/src/vec/spec_from_iter.rs +++ b/library/alloc/src/vec/spec_from_iter.rs @@ -1,3 +1,5 @@ +use crate::alloc::Global; +use core::alloc; use core::mem::ManuallyDrop; use core::ptr::{self}; @@ -25,16 +27,22 @@ pub(super) trait SpecFromIter { fn from_iter(iter: I) -> Self; } -impl SpecFromIter for Vec +#[allow(unused_braces)] +impl SpecFromIter for Vec where I: Iterator, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { default fn from_iter(iterator: I) -> Self { SpecFromIterNested::from_iter(iterator) } } -impl SpecFromIter> for Vec { +#[allow(unused_braces)] +impl SpecFromIter> for Vec +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn from_iter(iterator: IntoIter) -> Self { // A common case is passing a vector into a function which immediately // re-collects into a vector. We can short circuit this if the IntoIter @@ -55,7 +63,7 @@ impl SpecFromIter> for Vec { } } - let mut vec = Vec::new(); + let mut vec = Vec::::new_co(); // must delegate to spec_extend() since extend() itself delegates // to spec_from for empty Vecs vec.spec_extend(iterator); diff --git a/library/alloc/src/vec/spec_from_iter_nested.rs b/library/alloc/src/vec/spec_from_iter_nested.rs index f915ebb86e5a5..b71e86e83ba5a 100644 --- a/library/alloc/src/vec/spec_from_iter_nested.rs +++ b/library/alloc/src/vec/spec_from_iter_nested.rs @@ -1,8 +1,11 @@ +use core::alloc; use core::cmp; use core::iter::TrustedLen; use core::ptr; +use crate::alloc::Global; use crate::raw_vec::RawVec; +use crate::DEFAULT_COOP_PREFERRED; use super::{SpecExtend, Vec}; @@ -13,9 +16,11 @@ pub(super) trait SpecFromIterNested { fn from_iter(iter: I) -> Self; } -impl SpecFromIterNested for Vec +#[allow(unused_braces)] +impl SpecFromIterNested for Vec where I: Iterator, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { default fn from_iter(mut iterator: I) -> Self { // Unroll the first iteration, as the vector is going to be @@ -24,7 +29,7 @@ where // vector being full in the few subsequent loop iterations. // So we get better branch prediction. let mut vector = match iterator.next() { - None => return Vec::new(), + None => return Vec::new_co(), Some(element) => { let (lower, _) = iterator.size_hint(); let initial_capacity = @@ -40,12 +45,13 @@ where }; // must delegate to spec_extend() since extend() itself delegates // to spec_from for empty Vecs - as SpecExtend>::spec_extend(&mut vector, iterator); + as SpecExtend>::spec_extend(&mut vector, iterator); vector } } -impl SpecFromIterNested for Vec +#[allow(unused_braces)] +impl SpecFromIterNested for Vec where I: TrustedLen, { diff --git a/library/alloc/src/vec/splice.rs b/library/alloc/src/vec/splice.rs index 1861147fe72fb..acf5553c9cfb4 100644 --- a/library/alloc/src/vec/splice.rs +++ b/library/alloc/src/vec/splice.rs @@ -1,4 +1,5 @@ use crate::alloc::{Allocator, Global}; +use core::alloc; use core::ptr::{self}; use core::slice::{self}; @@ -22,13 +23,20 @@ pub struct Splice< 'a, I: Iterator + 'a, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + 'a = Global, -> { - pub(super) drain: Drain<'a, I::Item, A>, + const COOP_PREFERRED: bool = false, +> where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ + pub(super) drain: Drain<'a, I::Item, A, COOP_PREFERRED>, pub(super) replace_with: I, } #[stable(feature = "vec_splice", since = "1.21.0")] -impl Iterator for Splice<'_, I, A> { +impl Iterator + for Splice<'_, I, A, COOP_PREFERRED> +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ type Item = I::Item; fn next(&mut self) -> Option { @@ -41,17 +49,30 @@ impl Iterator for Splice<'_, I, A> { } #[stable(feature = "vec_splice", since = "1.21.0")] -impl DoubleEndedIterator for Splice<'_, I, A> { +impl DoubleEndedIterator + for Splice<'_, I, A, COOP_PREFERRED> +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn next_back(&mut self) -> Option { self.drain.next_back() } } #[stable(feature = "vec_splice", since = "1.21.0")] -impl ExactSizeIterator for Splice<'_, I, A> {} +impl ExactSizeIterator + for Splice<'_, I, A, COOP_PREFERRED> +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ +} #[stable(feature = "vec_splice", since = "1.21.0")] -impl Drop for Splice<'_, I, A> { +impl Drop + for Splice<'_, I, A, COOP_PREFERRED> +where + [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ fn drop(&mut self) { self.drain.by_ref().for_each(drop); // At this point draining is done and the only remaining tasks are splicing @@ -98,7 +119,10 @@ impl Drop for Splice<'_, I, A> { } /// Private helper methods for `Splice::drop` -impl Drain<'_, T, A> { +impl Drain<'_, T, A, COOP_PREFERRED> +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ /// The range from `self.vec.len` to `self.tail_start` contains elements /// that have been moved out. /// Fill that range as much as possible with new elements from the `replace_with` iterator. diff --git a/library/alloc/tests/boxed.rs b/library/alloc/tests/boxed.rs index af49826ff30a3..656e079f9c61c 100644 --- a/library/alloc/tests/boxed.rs +++ b/library/alloc/tests/boxed.rs @@ -61,6 +61,7 @@ fn box_deref_lval() { pub struct ConstAllocator; +//@FIXME unsafe impl const Allocator for ConstAllocator { fn allocate(&self, layout: Layout) -> Result, AllocError> { match layout.size() { diff --git a/library/core/src/alloc/global.rs b/library/core/src/alloc/global.rs index 26308e63a7bad..e048c01d34bce 100644 --- a/library/core/src/alloc/global.rs +++ b/library/core/src/alloc/global.rs @@ -1,7 +1,7 @@ +use crate::alloc::GlobalCoAllocMeta; use crate::alloc::Layout; use crate::cmp; use crate::ptr; -use crate::alloc::GlobalCoAllocMeta; #[unstable(feature = "global_co_alloc_meta", issue = "none")] #[allow(missing_debug_implementations)] @@ -166,7 +166,9 @@ pub unsafe trait GlobalAlloc { unsafe fn alloc(&self, layout: Layout) -> *mut u8; #[unstable(feature = "global_co_alloc", issue = "none")] - unsafe fn co_alloc(&self, _layout: Layout, mut _result: &mut RawAndMeta) {panic!("TODO")} + unsafe fn co_alloc(&self, _layout: Layout, mut _result: &mut RawAndMeta) { + panic!("@FIXME") + } /// Deallocate the block of memory at the given `ptr` pointer with the given `layout`. /// @@ -184,7 +186,9 @@ pub unsafe trait GlobalAlloc { unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout); #[unstable(feature = "global_co_alloc", issue = "none")] - unsafe fn co_dealloc(&self, _ptr_and_meta: RawAndMeta, _layout: Layout) {panic!("TODO")} + unsafe fn co_dealloc(&self, _ptr_and_meta: RawAndMeta, _layout: Layout) { + panic!("@FIXME") + } /// Behaves like `alloc`, but also ensures that the contents /// are set to zero before being returned. @@ -309,7 +313,7 @@ pub unsafe trait GlobalAlloc { ptr_and_meta: RawAndMeta, layout: Layout, new_size: usize, - mut result: &mut RawAndMeta + mut result: &mut RawAndMeta, ) { // SAFETY: the caller must ensure that the `new_size` does not overflow. // `layout.align()` comes from a `Layout` and is thus guaranteed to be valid. diff --git a/library/core/src/alloc/mod.rs b/library/core/src/alloc/mod.rs index 23bfdc732557e..c0d2af6098378 100644 --- a/library/core/src/alloc/mod.rs +++ b/library/core/src/alloc/mod.rs @@ -25,9 +25,16 @@ use crate::error::Error; use crate::fmt; use crate::ptr::{self, NonNull}; -// @TODO Make this target-specific +// @FIXME Make this target-specific +/// Metadata for `Vec/VecDeque/RawVec` to assist the allocator. Make sure its +/// alignment is not bigger than alignment of `usize`. Otherwise, even if (a +/// particular) `Vec/VecDeque/RawVec` generic instance doesn't use cooperation, +/// it would increase size of that `Vec/VecDeque/RawVec` because of alignment +/// rules! @FIXME compile time test that `GlobalCoAllocMeta` alignment <= +/// `usize` alignment. #[unstable(feature = "global_co_alloc_meta", issue = "none")] #[allow(missing_debug_implementations)] +#[derive(Clone, Copy)] pub struct GlobalCoAllocMeta { //pub one: usize, /*pub two: usize, @@ -73,15 +80,39 @@ pub struct SliceAndMeta { pub meta: GlobalCoAllocMeta, } +#[unstable(feature = "global_co_alloc_short_term_pref", issue = "none")] +//pub const SHORT_TERM_VEC_PREFERS_COOP: bool = true; +#[macro_export] +macro_rules! SHORT_TERM_VEC_PREFERS_COOP { + () => { + true + }; +} + #[unstable(feature = "global_co_alloc_meta", issue = "none")] #[allow(missing_debug_implementations)] pub type SliceAndMetaResult = Result; #[unstable(feature = "global_co_alloc", issue = "none")] pub const fn co_alloc_metadata_num_slots() -> usize { + // @FIXME later + if false { + panic!( + "FIXME - consider replacing co_alloc_metadata_num_slots() with co_alloc_metadata_num_slots_with_preference(bool), and adding const flags as appropriate." + ); + } if A::IS_CO_ALLOCATOR { 1 } else { 0 } } +#[unstable(feature = "global_co_alloc", issue = "none")] +/// Param `coop_preferred` - if false, then this returns `0`, regardless of +/// whether allocator `A` is cooperative. +pub const fn co_alloc_metadata_num_slots_with_preference( + coop_preferred: bool, +) -> usize { + if A::IS_CO_ALLOCATOR && coop_preferred { 1 } else { 0 } +} + /// An implementation of `Allocator` can allocate, grow, shrink, and deallocate arbitrary blocks of /// data described via [`Layout`][]. /// @@ -145,7 +176,8 @@ pub unsafe trait Allocator { // Can't have: const type Xyz; /// If this is any type with non-zero size, then the actual `Allocator` implementation supports cooperative functions (`co_*`) as first class citizens. //type IsCoAllocator = (); - // It applies to the global (default) allocator only. And/or System allocator?! TODO + // It applies to the global (default) allocator only. And/or System allocator?! @FIXME + // @FIXME make false by default const IS_CO_ALLOCATOR: bool = true; /// Attempts to allocate a block of memory. @@ -170,7 +202,9 @@ pub unsafe trait Allocator { /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html fn allocate(&self, layout: Layout) -> Result, AllocError>; - fn co_allocate(&self, _layout: Layout, _result: &mut SliceAndMetaResult) {panic!("TODO")} + fn co_allocate(&self, _layout: Layout, _result: &mut SliceAndMetaResult) { + panic!("FIXME") + } /// Behaves like `allocate`, but also ensures that the returned memory is zero-initialized. /// @@ -196,14 +230,9 @@ pub unsafe trait Allocator { fn co_allocate_zeroed(&self, layout: Layout, mut result: &mut SliceAndMetaResult) { self.co_allocate(layout, &mut result); - if let Ok(SliceAndMeta{slice, ..}) = result { + if let Ok(SliceAndMeta { slice, .. }) = result { // SAFETY: `alloc` returns a valid memory block - unsafe { - slice - .as_non_null_ptr() - .as_ptr() - .write_bytes(0, slice.len()) - } + unsafe { slice.as_non_null_ptr().as_ptr().write_bytes(0, slice.len()) } } } @@ -218,7 +247,9 @@ pub unsafe trait Allocator { /// [*fit*]: #memory-fitting unsafe fn deallocate(&self, ptr: NonNull, layout: Layout); - unsafe fn co_deallocate(&self, _ptr_and_meta: PtrAndMeta, _layout: Layout) {panic!("TODO")} + unsafe fn co_deallocate(&self, _ptr_and_meta: PtrAndMeta, _layout: Layout) { + panic!("FIXME") + } /// Attempts to extend the memory block. /// @@ -289,7 +320,7 @@ pub unsafe trait Allocator { ptr_and_meta: PtrAndMeta, old_layout: Layout, new_layout: Layout, - mut result: &mut SliceAndMetaResult + mut result: &mut SliceAndMetaResult, ) { debug_assert!( new_layout.size() >= old_layout.size(), @@ -298,7 +329,7 @@ pub unsafe trait Allocator { self.co_allocate(new_layout, &mut result); - if let Ok(SliceAndMeta {slice, ..}) = result { + if let Ok(SliceAndMeta { slice, .. }) = result { // SAFETY: because `new_layout.size()` must be greater than or equal to // `old_layout.size()`, both the old and new memory allocation are valid for reads and // writes for `old_layout.size()` bytes. Also, because the old allocation wasn't yet @@ -383,7 +414,7 @@ pub unsafe trait Allocator { ptr_and_meta: PtrAndMeta, old_layout: Layout, new_layout: Layout, - mut result: &mut SliceAndMetaResult + mut result: &mut SliceAndMetaResult, ) { debug_assert!( new_layout.size() >= old_layout.size(), @@ -392,7 +423,7 @@ pub unsafe trait Allocator { self.co_allocate_zeroed(new_layout, &mut result); - if let Ok(SliceAndMeta{ slice, ..}) = result { + if let Ok(SliceAndMeta { slice, .. }) = result { // SAFETY: because `new_layout.size()` must be greater than or equal to // `old_layout.size()`, both the old and new memory allocation are valid for reads and // writes for `old_layout.size()` bytes. Also, because the old allocation wasn't yet @@ -478,7 +509,7 @@ pub unsafe trait Allocator { ptr_and_meta: PtrAndMeta, old_layout: Layout, new_layout: Layout, - mut result: &mut SliceAndMetaResult + mut result: &mut SliceAndMetaResult, ) { debug_assert!( new_layout.size() <= old_layout.size(), @@ -487,7 +518,7 @@ pub unsafe trait Allocator { self.co_allocate(new_layout, &mut result); - if let Ok(SliceAndMeta{ slice, ..}) = result { + if let Ok(SliceAndMeta { slice, .. }) = result { // SAFETY: because `new_layout.size()` must be lower than or equal to // `old_layout.size()`, both the old and new memory allocation are valid for reads and // writes for `new_layout.size()` bytes. Also, because the old allocation wasn't yet @@ -516,7 +547,7 @@ pub unsafe trait Allocator { } } -// @TODO +// @FIXME #[unstable(feature = "allocator_api", issue = "32838")] unsafe impl Allocator for &A where diff --git a/library/proc_macro/src/bridge/mod.rs b/library/proc_macro/src/bridge/mod.rs index 4c1e196b5ad16..aae141dc2a118 100644 --- a/library/proc_macro/src/bridge/mod.rs +++ b/library/proc_macro/src/bridge/mod.rs @@ -252,14 +252,14 @@ impl<'a, T, M> Unmark for &'a mut Marked { } } -impl Mark for Vec { +impl Mark for Vec { type Unmarked = Vec; fn mark(unmarked: Self::Unmarked) -> Self { // Should be a no-op due to std's in-place collect optimizations. unmarked.into_iter().map(T::mark).collect() } } -impl Unmark for Vec { +impl Unmark for Vec { type Unmarked = Vec; fn unmark(self) -> Self::Unmarked { // Should be a no-op due to std's in-place collect optimizations. diff --git a/library/proc_macro/src/bridge/rpc.rs b/library/proc_macro/src/bridge/rpc.rs index 5b1bfb30983b2..b48a98903bdb4 100644 --- a/library/proc_macro/src/bridge/rpc.rs +++ b/library/proc_macro/src/bridge/rpc.rs @@ -224,7 +224,7 @@ impl DecodeMut<'_, '_, S> for String { } } -impl> Encode for Vec { +impl> Encode for Vec { fn encode(self, w: &mut Writer, s: &mut S) { self.len().encode(w, s); for x in self { @@ -233,7 +233,9 @@ impl> Encode for Vec { } } -impl<'a, S, T: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S> for Vec { +impl<'a, S, T: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S> + for Vec +{ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self { let len = usize::decode(r, s); let mut vec = Vec::with_capacity(len); diff --git a/library/proc_macro/src/diagnostic.rs b/library/proc_macro/src/diagnostic.rs index 5a209f7c7aa18..41a11b1003b84 100644 --- a/library/proc_macro/src/diagnostic.rs +++ b/library/proc_macro/src/diagnostic.rs @@ -30,7 +30,7 @@ impl MultiSpan for Span { } #[unstable(feature = "proc_macro_diagnostic", issue = "54140")] -impl MultiSpan for Vec { +impl MultiSpan for Vec { fn into_spans(self) -> Vec { self } diff --git a/library/std/src/alloc.rs b/library/std/src/alloc.rs index bee6edbc40731..6969c7d81dd5e 100644 --- a/library/std/src/alloc.rs +++ b/library/std/src/alloc.rs @@ -197,7 +197,7 @@ impl System { } } -// @TODO +// @FIXME // The Allocator impl checks the layout size to be non-zero and forwards to the GlobalAlloc impl, // which is in `std::sys::*::alloc`. #[unstable(feature = "allocator_api", issue = "32838")] diff --git a/library/std/src/ffi/os_str.rs b/library/std/src/ffi/os_str.rs index 80ed34157e6dc..6ab6323e19c83 100644 --- a/library/std/src/ffi/os_str.rs +++ b/library/std/src/ffi/os_str.rs @@ -1,6 +1,7 @@ #[cfg(test)] mod tests; +use crate::alloc::Global; use crate::borrow::{Borrow, Cow}; use crate::cmp; use crate::collections::TryReserveError; diff --git a/library/std/src/io/cursor.rs b/library/std/src/io/cursor.rs index d98ab021cadb1..63045ebcb8157 100644 --- a/library/std/src/io/cursor.rs +++ b/library/std/src/io/cursor.rs @@ -6,6 +6,7 @@ use crate::io::prelude::*; use crate::alloc::Allocator; use crate::cmp; use crate::io::{self, BorrowedCursor, ErrorKind, IoSlice, IoSliceMut, SeekFrom}; +use core::alloc; /// A `Cursor` wraps an in-memory buffer and provides it with a /// [`Seek`] implementation. @@ -397,11 +398,14 @@ fn slice_write_vectored( } /// Reserves the required space, and pads the vec with 0s if necessary. -fn reserve_and_pad( +fn reserve_and_pad( pos_mut: &mut u64, - vec: &mut Vec, + vec: &mut Vec, buf_len: usize, -) -> io::Result { +) -> io::Result +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ let pos: usize = (*pos_mut).try_into().map_err(|_| { io::const_io_error!( ErrorKind::InvalidInput, @@ -440,9 +444,14 @@ fn reserve_and_pad( /// Writes the slice to the vec without allocating /// # Safety: vec must have buf.len() spare capacity -unsafe fn vec_write_unchecked(pos: usize, vec: &mut Vec, buf: &[u8]) -> usize +unsafe fn vec_write_unchecked( + pos: usize, + vec: &mut Vec, + buf: &[u8], +) -> usize where A: Allocator, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { debug_assert!(vec.capacity() >= pos + buf.len()); vec.as_mut_ptr().add(pos).copy_from(buf.as_ptr(), buf.len()); @@ -458,9 +467,14 @@ where /// This also allows for the vec body to be empty, but with a position of N. /// This means that [`Write`] will pad the vec with 0 initially, /// before writing anything from that point -fn vec_write(pos_mut: &mut u64, vec: &mut Vec, buf: &[u8]) -> io::Result +fn vec_write( + pos_mut: &mut u64, + vec: &mut Vec, + buf: &[u8], +) -> io::Result where A: Allocator, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { let buf_len = buf.len(); let mut pos = reserve_and_pad(pos_mut, vec, buf_len)?; @@ -489,13 +503,14 @@ where /// This also allows for the vec body to be empty, but with a position of N. /// This means that [`Write`] will pad the vec with 0 initially, /// before writing anything from that point -fn vec_write_vectored( +fn vec_write_vectored( pos_mut: &mut u64, - vec: &mut Vec, + vec: &mut Vec, bufs: &[IoSlice<'_>], ) -> io::Result where A: Allocator, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { // For safety reasons, we don't want this sum to overflow ever. // If this saturates, the reserve should panic to avoid any unsound writing. @@ -543,9 +558,10 @@ impl Write for Cursor<&mut [u8]> { } #[stable(feature = "cursor_mut_vec", since = "1.25.0")] -impl Write for Cursor<&mut Vec> +impl Write for Cursor<&mut Vec> where A: Allocator, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { fn write(&mut self, buf: &[u8]) -> io::Result { vec_write(&mut self.pos, self.inner, buf) @@ -567,9 +583,10 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl Write for Cursor> +impl Write for Cursor> where A: Allocator, + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { fn write(&mut self, buf: &[u8]) -> io::Result { vec_write(&mut self.pos, &mut self.inner, buf) diff --git a/library/std/src/io/impls.rs b/library/std/src/io/impls.rs index e5048dcc8acd9..02cd7aa0d2cc0 100644 --- a/library/std/src/io/impls.rs +++ b/library/std/src/io/impls.rs @@ -1,7 +1,7 @@ #[cfg(test)] mod tests; -use crate::alloc::Allocator; +use crate::alloc::{self, Allocator}; use crate::cmp; use crate::collections::VecDeque; use crate::fmt; @@ -378,7 +378,10 @@ impl Write for &mut [u8] { /// Write is implemented for `Vec` by appending to the vector. /// The vector will grow as needed. #[stable(feature = "rust1", since = "1.0.0")] -impl Write for Vec { +impl Write for Vec +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, +{ #[inline] fn write(&mut self, buf: &[u8]) -> io::Result { self.extend_from_slice(buf); @@ -414,7 +417,10 @@ impl Write for Vec { /// Read is implemented for `VecDeque` by consuming bytes from the front of the `VecDeque`. #[stable(feature = "vecdeque_read_write", since = "1.63.0")] -impl Read for VecDeque { +impl Read for VecDeque +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(_COOP_PREFERRED)]:, +{ /// Fill `buf` with the contents of the "front" slice as returned by /// [`as_slices`][`VecDeque::as_slices`]. If the contained byte slices of the `VecDeque` are /// discontiguous, multiple calls to `read` will be needed to read the entire content. @@ -438,7 +444,10 @@ impl Read for VecDeque { /// Write is implemented for `VecDeque` by appending to the `VecDeque`, growing it as needed. #[stable(feature = "vecdeque_read_write", since = "1.63.0")] -impl Write for VecDeque { +impl Write for VecDeque +where + [(); alloc::co_alloc_metadata_num_slots_with_preference::(_COOP_PREFERRED)]:, +{ #[inline] fn write(&mut self, buf: &[u8]) -> io::Result { self.extend(buf); diff --git a/library/std/src/lib.rs b/library/std/src/lib.rs index 363a266717467..311795fbd9bf3 100644 --- a/library/std/src/lib.rs +++ b/library/std/src/lib.rs @@ -214,6 +214,10 @@ #![needs_panic_runtime] // // Lints: +#![allow(incomplete_features)] +#![feature(generic_const_exprs)] +#![feature(global_co_alloc)] +#![feature(global_co_alloc_plvec)] #![warn(deprecated_in_future)] #![warn(missing_docs)] #![warn(missing_debug_implementations)] diff --git a/library/std/src/sys/hermit/thread_local_dtor.rs b/library/std/src/sys/hermit/thread_local_dtor.rs index 613266b9530a8..09b2df46a0f29 100644 --- a/library/std/src/sys/hermit/thread_local_dtor.rs +++ b/library/std/src/sys/hermit/thread_local_dtor.rs @@ -1,14 +1,16 @@ #![cfg(target_thread_local)] #![unstable(feature = "thread_local_internals", issue = "none")] +#![feature(global_co_alloc_plvec)] // Simplify dtor registration by using a list of destructors. // The this solution works like the implementation of macOS and // doesn't additional OS support use crate::mem; +use core::alloc::PlVec; #[thread_local] -static mut DTORS: Vec<(*mut u8, unsafe extern "C" fn(*mut u8))> = Vec::new(); +static mut DTORS: PlVec<(*mut u8, unsafe extern "C" fn(*mut u8))> = PlVec::new(); pub unsafe fn register_dtor(t: *mut u8, dtor: unsafe extern "C" fn(*mut u8)) { let list = &mut DTORS; diff --git a/library/std/src/sys/solid/thread_local_dtor.rs b/library/std/src/sys/solid/thread_local_dtor.rs index bad14bb37f720..59263cd41c761 100644 --- a/library/std/src/sys/solid/thread_local_dtor.rs +++ b/library/std/src/sys/solid/thread_local_dtor.rs @@ -1,17 +1,20 @@ #![cfg(target_thread_local)] #![unstable(feature = "thread_local_internals", issue = "none")] +#![feature(global_co_alloc_plvec)] +#![feature(global_co_alloc_plvec)] // Simplify dtor registration by using a list of destructors. use super::{abi, itron::task}; use crate::cell::Cell; use crate::mem; +use core::alloc::PlVec; #[thread_local] static REGISTERED: Cell = Cell::new(false); #[thread_local] -static mut DTORS: Vec<(*mut u8, unsafe extern "C" fn(*mut u8))> = Vec::new(); +static mut DTORS: PlVec<(*mut u8, unsafe extern "C" fn(*mut u8))> = PlVec::new(); pub unsafe fn register_dtor(t: *mut u8, dtor: unsafe extern "C" fn(*mut u8)) { if !REGISTERED.get() { diff --git a/library/std/src/sys/unix/thread_local_dtor.rs b/library/std/src/sys/unix/thread_local_dtor.rs index 236d2f2ee2928..16f237fa6a61b 100644 --- a/library/std/src/sys/unix/thread_local_dtor.rs +++ b/library/std/src/sys/unix/thread_local_dtor.rs @@ -58,13 +58,22 @@ pub unsafe fn register_dtor(t: *mut u8, dtor: unsafe extern "C" fn(*mut u8)) { static REGISTERED: Cell = Cell::new(false); #[thread_local] - static mut DTORS: Vec<(*mut u8, unsafe extern "C" fn(*mut u8))> = Vec::new(); + static mut DTORS: PlVec<(*mut u8, unsafe extern "C" fn(*mut u8))> = PlVec::new(); if !REGISTERED.get() { _tlv_atexit(run_dtors, ptr::null_mut()); REGISTERED.set(true); } + type List = alloc::vec::PlVec<(*mut u8, unsafe extern "C" fn(*mut u8))>; + + #[thread_local] + static DTORS: Cell<*mut List> = Cell::new(ptr::null_mut()); + if DTORS.get().is_null() { + let v: Box = box Vec::new(); + DTORS.set(Box::into_raw(v)); + } + extern "C" { fn _tlv_atexit(dtor: unsafe extern "C" fn(*mut u8), arg: *mut u8); } diff --git a/library/std/src/sys/windows/mod.rs b/library/std/src/sys/windows/mod.rs index 77359abe42995..0296c2dab5552 100644 --- a/library/std/src/sys/windows/mod.rs +++ b/library/std/src/sys/windows/mod.rs @@ -204,6 +204,7 @@ where // incorrect size hints for some short paths: // https://github.com/dylni/normpath/issues/5 let mut stack_buf: [MaybeUninit; 512] = MaybeUninit::uninit_array(); + // @FIXME Use CoVec? let mut heap_buf: Vec> = Vec::new(); unsafe { let mut n = stack_buf.len(); diff --git a/library/std/src/sys/windows/thread_local_dtor.rs b/library/std/src/sys/windows/thread_local_dtor.rs index 9707a95dff21b..cbadd2dd23aea 100644 --- a/library/std/src/sys/windows/thread_local_dtor.rs +++ b/library/std/src/sys/windows/thread_local_dtor.rs @@ -3,10 +3,13 @@ #![unstable(feature = "thread_local_internals", issue = "none")] #![cfg(target_thread_local)] +#![feature(global_co_alloc_plvec)] + +use core::alloc::PlVec; // Using a per-thread list avoids the problems in synchronizing global state. #[thread_local] -static mut DESTRUCTORS: Vec<(*mut u8, unsafe extern "C" fn(*mut u8))> = Vec::new(); +static mut DESTRUCTORS: PlVec<(*mut u8, unsafe extern "C" fn(*mut u8))> = Vec::new(); // Ensure this can never be inlined because otherwise this may break in dylibs. // See #44391. diff --git a/library/std/src/sys_common/thread_local_dtor.rs b/library/std/src/sys_common/thread_local_dtor.rs index 844946eda031f..6ec2f3cd11601 100644 --- a/library/std/src/sys_common/thread_local_dtor.rs +++ b/library/std/src/sys_common/thread_local_dtor.rs @@ -15,6 +15,7 @@ use crate::ptr; use crate::sys_common::thread_local_key::StaticKey; +use alloc::vec::PlVec; pub unsafe fn register_dtor_fallback(t: *mut u8, dtor: unsafe extern "C" fn(*mut u8)) { // The fallback implementation uses a vanilla OS-based TLS key to track @@ -28,7 +29,7 @@ pub unsafe fn register_dtor_fallback(t: *mut u8, dtor: unsafe extern "C" fn(*mut // flagged for destruction. static DTORS: StaticKey = StaticKey::new(Some(run_dtors)); - type List = Vec<(*mut u8, unsafe extern "C" fn(*mut u8))>; + type List = PlVec<(*mut u8, unsafe extern "C" fn(*mut u8))>; if DTORS.get().is_null() { let v: Box = Box::new(Vec::new()); DTORS.set(Box::into_raw(v) as *mut u8); diff --git a/src/librustdoc/html/render/context.rs b/src/librustdoc/html/render/context.rs index 504938fef733b..d201fa6dd9a40 100644 --- a/src/librustdoc/html/render/context.rs +++ b/src/librustdoc/html/render/context.rs @@ -77,7 +77,10 @@ pub(crate) struct Context<'tcx> { // `Context` is cloned a lot, so we don't want the size to grow unexpectedly. #[cfg(all(not(windows), target_arch = "x86_64", target_pointer_width = "64"))] -rustc_data_structures::static_assert_size!(Context<'_>, 160 + 2 * mem::size_of::()); +rustc_data_structures::static_assert_size!( + Context<'_>, + 160 + 2 * mem::size_of::() +); /// Shared mutable state used in [`Context`] and elsewhere. pub(crate) struct SharedContext<'tcx> { From 91aa52b57756ba4c33cfc0d61daf4b3d10565eb1 Mon Sep 17 00:00:00 2001 From: Peter Kehl Date: Thu, 12 Jan 2023 23:16:40 -0800 Subject: [PATCH 4/9] CoAlloc: Uncommenting assert of BorrowType::TRAVERSAL_PERMIT. TODO Undo once compilable, FIXME --- library/alloc/src/collections/btree/node.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/library/alloc/src/collections/btree/node.rs b/library/alloc/src/collections/btree/node.rs index 3233a575ecf25..2e43ce937f06d 100644 --- a/library/alloc/src/collections/btree/node.rs +++ b/library/alloc/src/collections/btree/node.rs @@ -319,7 +319,8 @@ impl NodeRef self, ) -> Result, marker::Edge>, Self> { const { - assert!(BorrowType::TRAVERSAL_PERMIT); + //@FIXME uncomment once compilable + //assert!(BorrowType::TRAVERSAL_PERMIT); } // We need to use raw pointers to nodes because, if BorrowType is marker::ValMut, @@ -1063,7 +1064,8 @@ impl /// both, upon success, do nothing. pub fn descend(self) -> NodeRef { const { - assert!(BorrowType::TRAVERSAL_PERMIT); + // @FIXME uncomment once compilable + //assert!(BorrowType::TRAVERSAL_PERMIT); } // We need to use raw pointers to nodes because, if BorrowType is From 3139f35d8a9a3a285ec590a8bd521f53e378f527 Mon Sep 17 00:00:00 2001 From: Peter Kehl Date: Mon, 23 Jan 2023 16:35:42 -0800 Subject: [PATCH 5/9] CoAlloc: Vec + related (Allocator, GlobalAlloc, proc_macro; vec/macros). --- compiler/rustc_arena/src/lib.rs | 4 +- compiler/rustc_ast/src/ast.rs | 25 +- compiler/rustc_ast/src/lib.rs | 1 + compiler/rustc_parse/src/parser/mod.rs | 5 +- library/alloc/src/boxed.rs | 54 +- library/alloc/src/co_alloc.rs | 47 ++ .../alloc/src/collections/binary_heap/mod.rs | 34 +- .../src/collections/binary_heap/tests.rs | 6 +- .../alloc/src/collections/vec_deque/drain.rs | 82 +-- .../src/collections/vec_deque/into_iter.rs | 57 ++- .../alloc/src/collections/vec_deque/macros.rs | 4 +- .../alloc/src/collections/vec_deque/mod.rs | 308 ++++++++---- .../src/collections/vec_deque/spec_extend.rs | 43 +- .../collections/vec_deque/spec_from_iter.rs | 62 ++- library/alloc/src/ffi/c_str.rs | 6 +- library/alloc/src/lib.rs | 6 +- library/alloc/src/macros.rs | 300 +++++++++-- library/alloc/src/raw_vec.rs | 83 ++-- library/alloc/src/rc.rs | 11 +- library/alloc/src/slice.rs | 293 +++++++++-- library/alloc/src/str.rs | 12 +- library/alloc/src/vec/drain.rs | 92 ++-- library/alloc/src/vec/drain_filter.rs | 43 +- library/alloc/src/vec/in_place_collect.rs | 8 +- library/alloc/src/vec/in_place_drop.rs | 6 +- library/alloc/src/vec/into_iter.rs | 152 +++--- library/alloc/src/vec/mod.rs | 466 ++++++++++++------ library/alloc/src/vec/partial_eq.rs | 22 +- library/alloc/src/vec/spec_extend.rs | 35 +- library/alloc/src/vec/spec_from_elem.rs | 37 +- library/alloc/src/vec/spec_from_iter.rs | 15 +- .../alloc/src/vec/spec_from_iter_nested.rs | 15 +- library/alloc/src/vec/splice.rs | 42 +- library/alloc/tests/autotraits.rs | 9 +- library/alloc/tests/lib.rs | 1 + library/core/src/alloc/global.rs | 29 +- library/core/src/alloc/mod.rs | 146 +++--- library/proc_macro/src/bridge/mod.rs | 5 +- library/proc_macro/src/bridge/rpc.rs | 7 +- library/proc_macro/src/diagnostic.rs | 3 +- library/proc_macro/src/lib.rs | 4 + library/std/src/alloc.rs | 2 - library/std/src/ffi/os_str.rs | 1 - library/std/src/io/cursor.rs | 40 +- library/std/src/io/impls.rs | 18 +- library/std/src/lib.rs | 8 + .../std/src/sys_common/thread_local_dtor.rs | 2 +- library/test/src/lib.rs | 7 +- library/test/src/stats.rs | 5 +- 49 files changed, 1838 insertions(+), 825 deletions(-) create mode 100644 library/alloc/src/co_alloc.rs diff --git a/compiler/rustc_arena/src/lib.rs b/compiler/rustc_arena/src/lib.rs index 4fae5ef845f7d..ecd254cb4da22 100644 --- a/compiler/rustc_arena/src/lib.rs +++ b/compiler/rustc_arena/src/lib.rs @@ -11,10 +11,12 @@ html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/", test(no_crate_inject, attr(deny(warnings))) )] +#![allow(incomplete_features)] #![feature(dropck_eyepatch)] #![feature(new_uninit)] #![feature(maybe_uninit_slice)] -#![feature(min_specialization)] +//#![feature(min_specialization)] +#![feature(specialization)] #![feature(decl_macro)] #![feature(pointer_byte_offsets)] #![feature(rustc_attrs)] diff --git a/compiler/rustc_ast/src/ast.rs b/compiler/rustc_ast/src/ast.rs index aa3bec7297785..aecb0bf24b862 100644 --- a/compiler/rustc_ast/src/ast.rs +++ b/compiler/rustc_ast/src/ast.rs @@ -26,7 +26,6 @@ pub use UnsafeSource::*; use crate::ptr::P; use crate::token::{self, CommentKind, Delimiter}; use crate::tokenstream::{DelimSpan, LazyAttrTokenStream, TokenStream}; -use core::alloc::GlobalCoAllocMeta; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_data_structures::sync::Lrc; @@ -35,6 +34,8 @@ use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; use rustc_span::source_map::{respan, Spanned}; use rustc_span::symbol::{kw, sym, Ident, Symbol}; use rustc_span::{Span, DUMMY_SP}; +use std::alloc::Allocator; +use std::alloc::Global; use std::fmt; use std::mem; use thin_vec::{thin_vec, ThinVec}; @@ -3112,26 +3113,26 @@ mod size_asserts { static_assert_size!(AssocItem, 104); static_assert_size!(AssocItemKind, 32); static_assert_size!(Attribute, 32); - static_assert_size!(Block, 48 + mem::size_of::()); - static_assert_size!(Expr, 72 + mem::size_of::()); - static_assert_size!(ExprKind, 40 + mem::size_of::()); - static_assert_size!(Fn, 184 + 2 * mem::size_of::()); + static_assert_size!(Block, 48 + mem::size_of::<::CoAllocMeta>()); + static_assert_size!(Expr, 72 + mem::size_of::<::CoAllocMeta>()); + static_assert_size!(ExprKind, 40 + mem::size_of::<::CoAllocMeta>()); + static_assert_size!(Fn, 184 + 2 * mem::size_of::<::CoAllocMeta>()); static_assert_size!(ForeignItem, 96); static_assert_size!(ForeignItemKind, 24); static_assert_size!(GenericArg, 24); - static_assert_size!(GenericBound, 72 + mem::size_of::()); - static_assert_size!(Generics, 72 + 2 * mem::size_of::()); - static_assert_size!(Impl, 184 + 3 * mem::size_of::()); - static_assert_size!(Item, 184 + 3 * mem::size_of::()); - static_assert_size!(ItemKind, 112 + 3 * mem::size_of::()); + static_assert_size!(GenericBound, 72 + mem::size_of::<::CoAllocMeta>()); + static_assert_size!(Generics, 72 + 2 * mem::size_of::<::CoAllocMeta>()); + static_assert_size!(Impl, 184 + 3 * mem::size_of::<::CoAllocMeta>()); + static_assert_size!(Item, 184 + 3 * mem::size_of::<::CoAllocMeta>()); + static_assert_size!(ItemKind, 112 + 3 * mem::size_of::<::CoAllocMeta>()); static_assert_size!(LitKind, 24); static_assert_size!(Local, 72); static_assert_size!(MetaItemLit, 40); static_assert_size!(Param, 40); - static_assert_size!(Pat, 88 + mem::size_of::()); + static_assert_size!(Pat, 88 + mem::size_of::<::CoAllocMeta>()); static_assert_size!(Path, 24); static_assert_size!(PathSegment, 24); - static_assert_size!(PatKind, 64 + mem::size_of::()); + static_assert_size!(PatKind, 64 + mem::size_of::<::CoAllocMeta>()); static_assert_size!(Stmt, 32); static_assert_size!(StmtKind, 16); static_assert_size!(Ty, 64); diff --git a/compiler/rustc_ast/src/lib.rs b/compiler/rustc_ast/src/lib.rs index 225ad76802188..3baaf2c9b63b4 100644 --- a/compiler/rustc_ast/src/lib.rs +++ b/compiler/rustc_ast/src/lib.rs @@ -8,6 +8,7 @@ html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/", test(attr(deny(warnings))) )] +#![feature(allocator_api)] #![feature(associated_type_bounds)] #![feature(box_patterns)] #![feature(const_default_impls)] diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index 7fb4e60f8a14f..373f392899bf5 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -168,7 +168,10 @@ pub struct Parser<'a> { // This type is used a lot, e.g. it's cloned when matching many declarative macro rules with nonterminals. Make sure // it doesn't unintentionally get bigger. #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] -rustc_data_structures::static_assert_size!(Parser<'_>, 312 + 4 * mem::size_of::()); +rustc_data_structures::static_assert_size!( + Parser<'_>, + 312 + 4 * mem::size_of::() +); /// Stores span information about a closure. #[derive(Clone)] diff --git a/library/alloc/src/boxed.rs b/library/alloc/src/boxed.rs index 1daba5600696a..439e2a8001678 100644 --- a/library/alloc/src/boxed.rs +++ b/library/alloc/src/boxed.rs @@ -146,6 +146,7 @@ #![stable(feature = "rust1", since = "1.0.0")] +use crate::co_alloc::CoAllocPref; use core::any::Any; use core::async_iter::AsyncIterator; use core::borrow; @@ -642,7 +643,9 @@ impl Box<[T]> { #[must_use] pub fn new_uninit_slice(len: usize) -> Box<[mem::MaybeUninit]> { // false = no need for co-alloc metadata, since it would get lost once converted to Box. - unsafe { RawVec::::with_capacity(len).into_box(len) } + unsafe { + RawVec::::with_capacity(len).into_box(len) + } } /// Constructs a new boxed slice with uninitialized contents, with the memory @@ -668,7 +671,10 @@ impl Box<[T]> { #[must_use] pub fn new_zeroed_slice(len: usize) -> Box<[mem::MaybeUninit]> { // false = no need for co-alloc metadata, since it would get lost once converted to Box. - unsafe { RawVec::::with_capacity_zeroed(len).into_box(len) } + unsafe { + RawVec::::with_capacity_zeroed(len) + .into_box(len) + } } /// Constructs a new boxed slice with uninitialized contents. Returns an error if @@ -700,7 +706,7 @@ impl Box<[T]> { Err(_) => return Err(AllocError), }; let ptr = Global.allocate(layout)?; - Ok(RawVec::::from_raw_parts_in( + Ok(RawVec::::from_raw_parts_in( ptr.as_mut_ptr() as *mut _, len, Global, @@ -737,7 +743,7 @@ impl Box<[T]> { Err(_) => return Err(AllocError), }; let ptr = Global.allocate_zeroed(layout)?; - Ok(RawVec::::from_raw_parts_in( + Ok(RawVec::::from_raw_parts_in( ptr.as_mut_ptr() as *mut _, len, Global, @@ -747,9 +753,10 @@ impl Box<[T]> { } } +#[allow(unused_braces)] impl Box<[T], A> where - [(); core::alloc::co_alloc_metadata_num_slots::()]:, + [(); { crate::meta_num_slots!(A, crate::CO_ALLOC_PREF_META_NO!()) }]:, { /// Constructs a new boxed slice with uninitialized contents in the provided allocator. /// @@ -778,12 +785,10 @@ where // #[unstable(feature = "new_uninit", issue = "63291")] #[must_use] #[allow(unused_braces)] - pub fn new_uninit_slice_in(len: usize, alloc: A) -> Box<[mem::MaybeUninit], A> - where - // false = no need for co-alloc metadata, since it would get lost once converted to Box. - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(false)]:, - { - unsafe { RawVec::::with_capacity_in(len, alloc).into_box(len) } + pub fn new_uninit_slice_in(len: usize, alloc: A) -> Box<[mem::MaybeUninit], A> { + unsafe { + RawVec::::with_capacity_in(len, alloc).into_box(len) + } } /// Constructs a new boxed slice with uninitialized contents in the provided allocator, @@ -811,12 +816,11 @@ where // #[unstable(feature = "new_uninit", issue = "63291")] #[must_use] #[allow(unused_braces)] - pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Box<[mem::MaybeUninit], A> - where - // false = no need for co-alloc metadata, since it would get lost once converted to Box. - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(false)]:, - { - unsafe { RawVec::::with_capacity_zeroed_in(len, alloc).into_box(len) } + pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Box<[mem::MaybeUninit], A> { + unsafe { + RawVec::::with_capacity_zeroed_in(len, alloc) + .into_box(len) + } } } @@ -1522,7 +1526,7 @@ impl From<&[T]> for Box<[T]> { fn from(slice: &[T]) -> Box<[T]> { let len = slice.len(); // false = no need for co-alloc metadata, since it would get lost once converted to Box. - let buf = RawVec::::with_capacity(len); + let buf = RawVec::::with_capacity(len); unsafe { ptr::copy_nonoverlapping(slice.as_ptr(), buf.ptr(), len); buf.into_box(slice.len()).assume_init() @@ -1687,12 +1691,13 @@ impl TryFrom> for Box<[T; N]> { #[cfg(not(no_global_oom_handling))] #[stable(feature = "boxed_array_try_from_vec", since = "1.66.0")] -impl TryFrom> +#[allow(unused_braces)] +impl TryFrom> for Box<[T; N]> where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { meta_num_slots_global!(CO_ALLOC_PREF) }]:, { - type Error = Vec; + type Error = Vec; /// Attempts to convert a `Vec` into a `Box<[T; N]>`. /// @@ -1712,7 +1717,7 @@ where /// let state: Box<[f32; 100]> = vec![1.0; 100].try_into().unwrap(); /// assert_eq!(state.len(), 100); /// ``` - fn try_from(vec: Vec) -> Result { + fn try_from(vec: Vec) -> Result { if vec.len() == N { let boxed_slice = vec.into_boxed_slice(); Ok(unsafe { boxed_slice_as_array_unchecked(boxed_slice) }) @@ -2049,14 +2054,15 @@ impl FromIterator for Box<[I]> { #[cfg(not(no_global_oom_handling))] #[stable(feature = "box_slice_clone", since = "1.3.0")] +#[allow(unused_braces)] impl Clone for Box<[T], A> where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(false)]:, + [(); { crate::meta_num_slots!(A, crate::CO_ALLOC_PREF_META_NO!()) }]:, { fn clone(&self) -> Self { let alloc = Box::allocator(self).clone(); // false = no need for co-alloc metadata, since it would get lost once converted to the boxed slice. - self.to_vec_in::(alloc).into_boxed_slice() + self.to_vec_in_co::(alloc).into_boxed_slice() } fn clone_from(&mut self, other: &Self) { diff --git a/library/alloc/src/co_alloc.rs b/library/alloc/src/co_alloc.rs new file mode 100644 index 0000000000000..23b0598cf4ee1 --- /dev/null +++ b/library/alloc/src/co_alloc.rs @@ -0,0 +1,47 @@ +//! CoAlloction-specific types that only apply in heap-based applications (hence not a part of +//! [::core]). +//! +//! Types here have names with `CoAlloc` prefix. Yes, when using a q ualified path (like +//! ::alloc::co_alloc::CoAllocPref), that involves "stuttering", which is not recommended. +//! +//! However, as per Rust Book the common practice is to import type names fully and access them just +//! with their name (except for cases of conflict). And we don't want the type names any shorter +//! (such `Pref`), because thoe would be vague/confusing. + +/// `CoAllocPref` values indicate a type's preference for coallocation (in either user space, or +/// `std` space). Used as a `const` generic parameter type (usually called `CO_ALLOC_PREF`). +/// +/// The actual value may be overriden by the allocator. See also `CoAllocMetaNumSlotsPref` and +/// `co_alloc_pref` macro . +/// +/// This type WILL CHANGE (once ``#![feature(generic_const_exprs)]` and +/// `#![feature(adt_const_params)]` are stable) to a dedicated struct/enum. Hence: +/// - DO NOT construct instances, but use `co_alloc_pref` macro together with constants +/// `CO_ALLOC_PREF_META_YES` and `CO_ALLOC_PREF_META_NO`; +/// - DO NOT hard code any values; and +/// - DO NOT mix this/cast this with/to `u8`, `u16`, `usize` (nor any other integer). +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +pub type CoAllocPref = usize; //u8; + +/// `CoAllocMetaNumSlotsPref` values indicate that a type (but not necessarily an allocator) prefers +/// to coallocate by carrying metadata, or not. (In either user space, or `std` or `alloc` space). +/// Used as an argument to macro call of `co_alloc_pref`, which generates a `CoAllocPref` value. +/// +/// Currently this indicates only the (preferred) number of `CoAllocMetaBase` slots being used +/// (either 1 = coallocation, or 0 = no coallocation). However, in the future this type may have +/// other properties (serving as extra hints to the allocator). +/// +/// The actual value may be overriden by the allocator. For example, if the allocator doesn't +/// support coallocation, then whether this value prefers to coallocate or not makes no difference. +/// +/// This type WILL CHANGE (once ``#![feature(generic_const_exprs)]` and +/// `#![feature(adt_const_params)]` are stable) to a dedicated struct/enum. Hence: +/// - DO NOT mix this/cast this with/to `u8`, `u16`, (nor any other integer); and +/// - DO NOT hard code any values, but use `CO_ALLOC_PREF_META_YES` and `CO_ALLOC_PREF_META_NO`. +/// +/// This type is intentionally not `u16`, `u32`, nor `usize`. Why? This helps to prevent mistakes +/// when one would use `CO_ALLOC_PREF_META_YES` or `CO_ALLOC_PREF_META_NO` in place of `CoAllocPref` +/// vales, or in place of a result of `meta_num_slots` macro. That also prevents mixing up with +/// [core::alloc::CoAllocatorMetaNumSlots]. +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +pub type CoAllocMetaNumSlotsPref = u16; diff --git a/library/alloc/src/collections/binary_heap/mod.rs b/library/alloc/src/collections/binary_heap/mod.rs index 05dfaa92f385b..3f08c31fb6733 100644 --- a/library/alloc/src/collections/binary_heap/mod.rs +++ b/library/alloc/src/collections/binary_heap/mod.rs @@ -143,6 +143,7 @@ #![allow(missing_docs)] #![stable(feature = "rust1", since = "1.0.0")] +use crate::co_alloc::CoAllocPref; use core::fmt; use core::iter::{FromIterator, FusedIterator, InPlaceIterable, SourceIter, TrustedLen}; use core::mem::{self, swap, ManuallyDrop}; @@ -155,7 +156,7 @@ use crate::alloc::Global; use crate::collections::TryReserveError; use crate::slice; use crate::vec::{self, AsVecIntoIter, Vec}; -use crate::DEFAULT_COOP_PREFERRED; +use crate::CO_ALLOC_PREF_DEFAULT; use super::SpecExtend; @@ -1245,7 +1246,7 @@ impl BinaryHeap { #[inline] #[stable(feature = "drain", since = "1.6.0")] #[allow(unused_braces)] - pub fn drain(&mut self) -> Drain<'_, T, { SHORT_TERM_VEC_PREFERS_COOP!() }> { + pub fn drain(&mut self) -> Drain<'_, T, { SHORT_TERM_VEC_CO_ALLOC_PREF!() }> { Drain { iter: self.data.drain(..) } } @@ -1525,17 +1526,19 @@ unsafe impl TrustedLen for IntoIterSorted {} /// [`drain`]: BinaryHeap::drain #[stable(feature = "drain", since = "1.6.0")] #[derive(Debug)] -pub struct Drain<'a, T: 'a, const COOP_PREFERRED: bool> +#[allow(unused_braces)] +pub struct Drain<'a, T: 'a, const CO_ALLOC_PREF: CoAllocPref> where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { meta_num_slots_global!(CO_ALLOC_PREF) }]:, { - iter: vec::Drain<'a, T, Global, COOP_PREFERRED>, + iter: vec::Drain<'a, T, Global, CO_ALLOC_PREF>, } #[stable(feature = "drain", since = "1.6.0")] -impl Iterator for Drain<'_, T, COOP_PREFERRED> +#[allow(unused_braces)] +impl Iterator for Drain<'_, T, CO_ALLOC_PREF> where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { meta_num_slots_global!(CO_ALLOC_PREF) }]:, { type Item = T; @@ -1551,9 +1554,10 @@ where } #[stable(feature = "drain", since = "1.6.0")] -impl DoubleEndedIterator for Drain<'_, T, COOP_PREFERRED> +#[allow(unused_braces)] +impl DoubleEndedIterator for Drain<'_, T, CO_ALLOC_PREF> where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { meta_num_slots_global!(CO_ALLOC_PREF) }]:, { #[inline] fn next_back(&mut self) -> Option { @@ -1562,9 +1566,10 @@ where } #[stable(feature = "drain", since = "1.6.0")] -impl ExactSizeIterator for Drain<'_, T, COOP_PREFERRED> +#[allow(unused_braces)] +impl ExactSizeIterator for Drain<'_, T, CO_ALLOC_PREF> where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { meta_num_slots_global!(CO_ALLOC_PREF) }]:, { fn is_empty(&self) -> bool { self.iter.is_empty() @@ -1572,8 +1577,9 @@ where } #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for Drain<'_, T, COOP_PREFERRED> where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: +#[allow(unused_braces)] +impl FusedIterator for Drain<'_, T, CO_ALLOC_PREF> where + [(); { meta_num_slots_global!(CO_ALLOC_PREF) }]: { } @@ -1664,7 +1670,7 @@ impl From<[T; N]> for BinaryHeap { #[stable(feature = "binary_heap_extras_15", since = "1.5.0")] #[allow(unused_braces)] -impl From> for Vec { +impl From> for Vec { /// Converts a `BinaryHeap` into a `Vec`. /// /// This conversion requires no data movement or allocation, and has diff --git a/library/alloc/src/collections/binary_heap/tests.rs b/library/alloc/src/collections/binary_heap/tests.rs index ffbb6c80ac018..35a844c738ca3 100644 --- a/library/alloc/src/collections/binary_heap/tests.rs +++ b/library/alloc/src/collections/binary_heap/tests.rs @@ -1,5 +1,6 @@ use super::*; use crate::boxed::Box; +use crate::{CO_ALLOC_PREF_META_YES, CO_ALLOC_PREF_META_NO}; use crate::testing::crash_test::{CrashTestDummy, Panic}; use core::mem; use std::iter::TrustedLen; @@ -448,7 +449,10 @@ fn test_extend_specialization() { #[allow(dead_code)] fn assert_covariance() { - fn drain<'new>(d: Drain<'static, &'static str>) -> Drain<'new, &'new str> { + fn drain<'new>(d: Drain<'static, &'static str, {CO_ALLOC_PREF_META_NO!()}>) -> Drain<'new, &'new str, {CO_ALLOC_PREF_META_NO!()}> { + d + } + fn drain_co<'new>(d: Drain<'static, &'static str, {CO_ALLOC_PREF_META_YES!()}>) -> Drain<'new, &'new str, {CO_ALLOC_PREF_META_YES!()}> { d } } diff --git a/library/alloc/src/collections/vec_deque/drain.rs b/library/alloc/src/collections/vec_deque/drain.rs index cd0e6caace3f4..4dd85edac6eaa 100644 --- a/library/alloc/src/collections/vec_deque/drain.rs +++ b/library/alloc/src/collections/vec_deque/drain.rs @@ -1,8 +1,9 @@ +use crate::co_alloc::CoAllocPref; use core::iter::FusedIterator; use core::marker::PhantomData; use core::mem::{self, SizedTypeProperties}; use core::ptr::NonNull; -use core::{alloc, fmt, ptr}; +use core::{fmt, ptr}; use crate::alloc::{Allocator, Global}; @@ -20,13 +21,11 @@ pub struct Drain< 'a, T: 'a, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, - const COOP_PREFERRED: bool = { SHORT_TERM_VEC_PREFERS_COOP!() }, + const CO_ALLOC_PREF: CoAllocPref = { SHORT_TERM_VEC_CO_ALLOC_PREF!() }, > where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { - // We can't just use a &mut VecDeque, as that would make Drain invariant over T - // and we want it to be covariant instead - deque: NonNull>, + deque: NonNull>, // drain_start is stored in deque.len drain_len: usize, // index into the logical array, not the physical one (always lies in [0..deque.len)) @@ -38,12 +37,13 @@ pub struct Drain< _marker: PhantomData<&'a T>, } -impl<'a, T, A: Allocator, const COOP_PREFERRED: bool> Drain<'a, T, A, COOP_PREFERRED> +#[allow(unused_braces)] +impl<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Drain<'a, T, A, CO_ALLOC_PREF> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { pub(super) unsafe fn new( - deque: &'a mut VecDeque, + deque: &'a mut VecDeque, drain_start: usize, drain_len: usize, ) -> Self { @@ -95,10 +95,11 @@ where } #[stable(feature = "collection_debug", since = "1.17.0")] -impl fmt::Debug - for Drain<'_, T, A, COOP_PREFERRED> +#[allow(unused_braces)] +impl fmt::Debug + for Drain<'_, T, A, CO_ALLOC_PREF> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("Drain") @@ -111,36 +112,39 @@ where } #[stable(feature = "drain", since = "1.6.0")] -unsafe impl Sync - for Drain<'_, T, A, COOP_PREFERRED> +#[allow(unused_braces)] +unsafe impl Sync + for Drain<'_, T, A, CO_ALLOC_PREF> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { } #[stable(feature = "drain", since = "1.6.0")] -unsafe impl Send - for Drain<'_, T, A, COOP_PREFERRED> +#[allow(unused_braces)] +unsafe impl Send + for Drain<'_, T, A, CO_ALLOC_PREF> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { } #[stable(feature = "drain", since = "1.6.0")] -impl Drop for Drain<'_, T, A, COOP_PREFERRED> +#[allow(unused_braces)] +impl Drop for Drain<'_, T, A, CO_ALLOC_PREF> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn drop(&mut self) { - struct DropGuard<'r, 'a, T, A: Allocator, const COOP_PREFERRED: bool>( - &'r mut Drain<'a, T, A, COOP_PREFERRED>, + struct DropGuard<'r, 'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref>( + &'r mut Drain<'a, T, A, CO_ALLOC_PREF>, ) where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:; + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:; - impl<'r, 'a, T, A: Allocator, const COOP_PREFERRED: bool> Drop - for DropGuard<'r, 'a, T, A, COOP_PREFERRED> + impl<'r, 'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Drop + for DropGuard<'r, 'a, T, A, CO_ALLOC_PREF> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn drop(&mut self) { if self.0.remaining != 0 { @@ -222,9 +226,10 @@ where } #[stable(feature = "drain", since = "1.6.0")] -impl Iterator for Drain<'_, T, A, COOP_PREFERRED> +#[allow(unused_braces)] +impl Iterator for Drain<'_, T, A, CO_ALLOC_PREF> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { type Item = T; @@ -247,10 +252,11 @@ where } #[stable(feature = "drain", since = "1.6.0")] -impl DoubleEndedIterator - for Drain<'_, T, A, COOP_PREFERRED> +#[allow(unused_braces)] +impl DoubleEndedIterator + for Drain<'_, T, A, CO_ALLOC_PREF> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { #[inline] fn next_back(&mut self) -> Option { @@ -264,15 +270,19 @@ where } #[stable(feature = "drain", since = "1.6.0")] -impl ExactSizeIterator - for Drain<'_, T, A, COOP_PREFERRED> +#[allow(unused_braces)] +impl ExactSizeIterator + for Drain<'_, T, A, CO_ALLOC_PREF> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { } #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for Drain<'_, T, A, COOP_PREFERRED> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: +#[allow(unused_braces)] +impl FusedIterator + for Drain<'_, T, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { } diff --git a/library/alloc/src/collections/vec_deque/into_iter.rs b/library/alloc/src/collections/vec_deque/into_iter.rs index f3f50e3048bc6..f043ba9c82a4c 100644 --- a/library/alloc/src/collections/vec_deque/into_iter.rs +++ b/library/alloc/src/collections/vec_deque/into_iter.rs @@ -1,5 +1,6 @@ use core::iter::{FusedIterator, TrustedLen}; use core::{alloc, array, fmt, mem::MaybeUninit, ops::Try, ptr}; +use crate::co_alloc::CoAllocPref; use crate::alloc::{Allocator, Global}; @@ -14,34 +15,37 @@ use super::VecDeque; /// [`IntoIterator`]: core::iter::IntoIterator #[derive(Clone)] #[stable(feature = "rust1", since = "1.0.0")] +#[allow(unused_braces)] pub struct IntoIter< T, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, - const COOP_PREFERRED: bool = true, + const CO_ALLOC_PREF: CoAllocPref = { CO_ALLOC_PREF_DEFAULT!() }, > where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { - inner: VecDeque, + inner: VecDeque, } -impl IntoIter +#[allow(unused_braces)] +impl IntoIter where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { - pub(super) fn new(inner: VecDeque) -> Self { + pub(super) fn new(inner: VecDeque) -> Self { IntoIter { inner } } - pub(super) fn into_vecdeque(self) -> VecDeque { + pub(super) fn into_vecdeque(self) -> VecDeque { self.inner } } #[stable(feature = "collection_debug", since = "1.17.0")] -impl fmt::Debug - for IntoIter +#[allow(unused_braces)] +impl fmt::Debug + for IntoIter where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("IntoIter").field(&self.inner).finish() @@ -49,9 +53,10 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for IntoIter +#[allow(unused_braces)] +impl Iterator for IntoIter where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { type Item = T; @@ -188,10 +193,11 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator - for IntoIter +#[allow(unused_braces)] +impl DoubleEndedIterator + for IntoIter where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { #[inline] fn next_back(&mut self) -> Option { @@ -262,10 +268,11 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator - for IntoIter +#[allow(unused_braces)] +impl ExactSizeIterator + for IntoIter where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { #[inline] fn is_empty(&self) -> bool { @@ -274,15 +281,19 @@ where } #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for IntoIter where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: +#[allow(unused_braces)] +impl FusedIterator + for IntoIter +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { } #[unstable(feature = "trusted_len", issue = "37572")] -unsafe impl TrustedLen - for IntoIter +#[allow(unused_braces)] +unsafe impl TrustedLen + for IntoIter where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { } diff --git a/library/alloc/src/collections/vec_deque/macros.rs b/library/alloc/src/collections/vec_deque/macros.rs index 68ee43152b5b5..e92fc6a8fa3b4 100644 --- a/library/alloc/src/collections/vec_deque/macros.rs +++ b/library/alloc/src/collections/vec_deque/macros.rs @@ -1,10 +1,10 @@ macro_rules! __impl_slice_eq1 { ([$($vars:tt)*] $lhs:ty, $rhs:ty, $($constraints:tt)*) => { #[stable(feature = "vec_deque_partial_eq_slice", since = "1.17.0")] - impl PartialEq<$rhs> for $lhs + impl PartialEq<$rhs> for $lhs where T: PartialEq, - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); {crate::meta_num_slots!(A, CO_ALLOC_PREF)}]:, $($constraints)* { fn eq(&self, other: &$rhs) -> bool { diff --git a/library/alloc/src/collections/vec_deque/mod.rs b/library/alloc/src/collections/vec_deque/mod.rs index e98087ea9ad93..d6991b31368b1 100644 --- a/library/alloc/src/collections/vec_deque/mod.rs +++ b/library/alloc/src/collections/vec_deque/mod.rs @@ -7,8 +7,8 @@ #![feature(global_co_alloc)] #![stable(feature = "rust1", since = "1.0.0")] -use crate::DEFAULT_COOP_PREFERRED; -use core::alloc; +use crate::co_alloc::CoAllocPref; +use crate::CO_ALLOC_PREF_DEFAULT; use core::cmp::{self, Ordering}; use core::fmt; use core::hash::{Hash, Hasher}; @@ -57,7 +57,7 @@ use self::spec_extend::SpecExtend; mod spec_extend; -use self::spec_from_iter::SpecFromIter; +use self::spec_from_iter::SpecFromIterCo; mod spec_from_iter; @@ -97,25 +97,24 @@ mod tests; pub struct VecDeque< T, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, - const COOP_PREFERRED: bool = { DEFAULT_COOP_PREFERRED!() }, + const CO_ALLOC_PREF: CoAllocPref = { CO_ALLOC_PREF_DEFAULT!() }, > where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { - // `self[0]`, if it exists, is `buf[head]`. - // `head < buf.capacity()`, unless `buf.capacity() == 0` when `head == 0`. head: usize, // the number of initialized elements, starting from the one at `head` and potentially wrapping around. // if `len == 0`, the exact value of `head` is unimportant. // if `T` is zero-Sized, then `self.len <= usize::MAX`, otherwise `self.len <= isize::MAX as usize`. len: usize, - buf: RawVec, + buf: RawVec, } #[stable(feature = "rust1", since = "1.0.0")] -impl Clone - for VecDeque +#[allow(unused_braces)] +impl Clone + for VecDeque where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn clone(&self) -> Self { let mut deq = Self::with_capacity_in(self.len(), self.allocator().clone()); @@ -130,10 +129,11 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<#[may_dangle] T, A: Allocator, const COOP_PREFERRED: bool> Drop - for VecDeque +#[allow(unused_braces)] +unsafe impl<#[may_dangle] T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Drop + for VecDeque where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn drop(&mut self) { /// Runs the destructor for all items in the slice when it gets dropped (normally or @@ -159,20 +159,32 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl Default for VecDeque +#[allow(unused_braces)] +impl Default for VecDeque where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { meta_num_slots_global!(CO_ALLOC_PREF) }]:, { /// Creates an empty deque. #[inline] - fn default() -> VecDeque { - VecDeque::::new() + default fn default() -> VecDeque { + VecDeque::::new_co() + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +#[allow(unused_braces)] +impl Default for VecDeque { + /// Creates an empty deque. + #[inline] + fn default() -> VecDeque { + VecDeque::::new() } } -impl VecDeque +#[allow(unused_braces)] +impl VecDeque where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { /// Marginally more convenient #[inline] @@ -462,17 +474,18 @@ where mut iter: impl Iterator, len: usize, ) -> usize { - struct Guard<'a, T, A: Allocator, const COOP_PREFERRED: bool> + struct Guard<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { - deque: &'a mut VecDeque, + deque: &'a mut VecDeque, written: usize, } - impl<'a, T, A: Allocator, const COOP_PREFERRED: bool> Drop for Guard<'a, T, A, COOP_PREFERRED> + #[allow(unused_braces)] + impl<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Drop for Guard<'a, T, A, CO_ALLOC_PREF> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn drop(&mut self) { self.deque.len += self.written; @@ -551,11 +564,7 @@ where } } -impl VecDeque -where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, -{ +impl VecDeque { /// Creates an empty deque. /// /// # Examples @@ -569,10 +578,8 @@ where #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_vec_deque_new", since = "1.68.0")] #[must_use] - pub const fn new() -> VecDeque - where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, - { + #[allow(unused_braces)] + pub const fn new() -> VecDeque { // FIXME: This should just be `VecDeque::new_in(Global)` once that hits stable. VecDeque { head: 0, len: 0, buf: RawVec::NEW } } @@ -589,14 +596,41 @@ where #[inline] #[stable(feature = "rust1", since = "1.0.0")] #[must_use] - pub fn with_capacity(capacity: usize) -> VecDeque { - VecDeque::::with_capacity_in(capacity, Global) + #[allow(unused_braces)] + pub fn with_capacity(capacity: usize) -> VecDeque { + VecDeque::::with_capacity_in(capacity, Global) } } -impl VecDeque +#[allow(unused_braces)] +impl VecDeque where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots_global!(CO_ALLOC_PREF) }]:, +{ + /// Coallocation-aware version of `new`. + #[inline] + #[unstable(feature = "co_alloc_global", issue = "none")] + #[must_use] + #[allow(unused_braces)] + pub const fn new_co() -> VecDeque { + // FIXME: This should just be `VecDeque::new_in(Global)` once that hits stable. + VecDeque { head: 0, len: 0, buf: RawVec::NEW } + } + + /// Coallocation-aware version of `with_capacity`. + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + #[must_use] + #[allow(unused_braces)] + pub fn with_capacity_co(capacity: usize) -> VecDeque { + VecDeque::::with_capacity_in(capacity, Global) + } +} + +#[allow(unused_braces)] +impl VecDeque +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { /// Creates an empty deque. /// @@ -609,7 +643,7 @@ where /// ``` #[inline] #[unstable(feature = "allocator_api", issue = "32838")] - pub const fn new_in(alloc: A) -> VecDeque { + pub const fn new_in(alloc: A) -> VecDeque { VecDeque { head: 0, len: 0, buf: RawVec::new_in(alloc) } } @@ -623,7 +657,7 @@ where /// let deque: VecDeque = VecDeque::with_capacity(10); /// ``` #[unstable(feature = "allocator_api", issue = "32838")] - pub fn with_capacity_in(capacity: usize, alloc: A) -> VecDeque { + pub fn with_capacity_in(capacity: usize, alloc: A) -> VecDeque { VecDeque { head: 0, len: 0, buf: RawVec::with_capacity_in(capacity, alloc) } } @@ -1404,7 +1438,7 @@ where /// ``` #[inline] #[stable(feature = "drain", since = "1.6.0")] - pub fn drain(&mut self, range: R) -> Drain<'_, T, A, COOP_PREFERRED> + pub fn drain(&mut self, range: R) -> Drain<'_, T, A, CO_ALLOC_PREF> where R: RangeBounds, { @@ -2632,9 +2666,10 @@ where } } -impl VecDeque +#[allow(unused_braces)] +impl VecDeque where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { /// Modifies the deque in-place so that `len()` is equal to new_len, /// either by removing excess elements from the back or by appending clones of `value` @@ -2679,11 +2714,12 @@ fn wrap_index(logical_index: usize, capacity: usize) -> usize { if logical_index >= capacity { logical_index - capacity } else { logical_index } } +#[allow(unused_braces)] #[stable(feature = "rust1", since = "1.0.0")] -impl PartialEq - for VecDeque +impl PartialEq + for VecDeque where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn eq(&self, other: &Self) -> bool { if self.len != other.len() { @@ -2722,24 +2758,26 @@ where } } +#[allow(unused_braces)] #[stable(feature = "rust1", since = "1.0.0")] -impl Eq for VecDeque where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: +impl Eq for VecDeque where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]: { } -__impl_slice_eq1! { [] VecDeque, Vec, } -__impl_slice_eq1! { [] VecDeque, &[U], } -__impl_slice_eq1! { [] VecDeque, &mut [U], } -__impl_slice_eq1! { [const N: usize] VecDeque, [U; N], } -__impl_slice_eq1! { [const N: usize] VecDeque, &[U; N], } -__impl_slice_eq1! { [const N: usize] VecDeque, &mut [U; N], } +__impl_slice_eq1! { [] VecDeque, Vec, } +__impl_slice_eq1! { [] VecDeque, &[U], } +__impl_slice_eq1! { [] VecDeque, &mut [U], } +__impl_slice_eq1! { [const N: usize] VecDeque, [U; N], } +__impl_slice_eq1! { [const N: usize] VecDeque, &[U; N], } +__impl_slice_eq1! { [const N: usize] VecDeque, &mut [U; N], } #[stable(feature = "rust1", since = "1.0.0")] -impl PartialOrd - for VecDeque +#[allow(unused_braces)] +impl PartialOrd + for VecDeque where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn partial_cmp(&self, other: &Self) -> Option { self.iter().partial_cmp(other.iter()) @@ -2747,9 +2785,10 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl Ord for VecDeque +#[allow(unused_braces)] +impl Ord for VecDeque where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { #[inline] fn cmp(&self, other: &Self) -> Ordering { @@ -2758,9 +2797,10 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl Hash for VecDeque +#[allow(unused_braces)] +impl Hash for VecDeque where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn hash(&self, state: &mut H) { state.write_length_prefix(self.len); @@ -2775,9 +2815,11 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl Index for VecDeque +#[allow(unused_braces)] +impl Index + for VecDeque where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { type Output = T; @@ -2788,9 +2830,11 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl IndexMut for VecDeque +#[allow(unused_braces)] +impl IndexMut + for VecDeque where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { #[inline] fn index_mut(&mut self, index: usize) -> &mut T { @@ -2799,35 +2843,49 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl FromIterator for VecDeque +#[allow(unused_braces)] +impl FromIterator for VecDeque +{ + fn from_iter>(iter: I) -> VecDeque { + SpecFromIterCo::spec_from_iter_co(iter.into_iter()) + } +} + +#[unstable(feature = "global_co_alloc", issue = "none")] +#[allow(unused_braces)] +impl VecDeque where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots_global!(CO_ALLOC_PREF) }]:, { - fn from_iter>(iter: I) -> VecDeque { - SpecFromIter::spec_from_iter(iter.into_iter()) + /// Like [from_iter], but coallocation-aware. + pub fn from_iter_co>(iter: I) -> VecDeque { + SpecFromIterCo::spec_from_iter_co(iter.into_iter()) } } #[stable(feature = "rust1", since = "1.0.0")] -impl IntoIterator for VecDeque +#[allow(unused_braces)] +impl IntoIterator + for VecDeque where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { type Item = T; - type IntoIter = IntoIter; + type IntoIter = IntoIter; /// Consumes the deque into a front-to-back iterator yielding elements by /// value. - fn into_iter(self) -> IntoIter { + fn into_iter(self) -> IntoIter { IntoIter::new(self) } } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T, A: Allocator, const COOP_PREFERRED: bool> IntoIterator - for &'a VecDeque +#[allow(unused_braces)] +impl<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> IntoIterator + for &'a VecDeque where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { type Item = &'a T; type IntoIter = Iter<'a, T>; @@ -2838,10 +2896,11 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T, A: Allocator, const COOP_PREFERRED: bool> IntoIterator - for &'a mut VecDeque +#[allow(unused_braces)] +impl<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> IntoIterator + for &'a mut VecDeque where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { type Item = &'a mut T; type IntoIter = IterMut<'a, T>; @@ -2852,9 +2911,10 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl Extend for VecDeque +#[allow(unused_braces)] +impl Extend for VecDeque where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn extend>(&mut self, iter: I) { >::spec_extend(self, iter.into_iter()); @@ -2872,10 +2932,11 @@ where } #[stable(feature = "extend_ref", since = "1.2.0")] -impl<'a, T: 'a + Copy, A: Allocator, const COOP_PREFERRED: bool> Extend<&'a T> - for VecDeque +#[allow(unused_braces)] +impl<'a, T: 'a + Copy, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Extend<&'a T> + for VecDeque where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn extend>(&mut self, iter: I) { self.spec_extend(iter.into_iter()); @@ -2893,10 +2954,11 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Debug - for VecDeque +#[allow(unused_braces)] +impl fmt::Debug + for VecDeque where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.iter()).finish() @@ -2904,11 +2966,16 @@ where } #[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")] -impl - From> for VecDeque +#[allow(unused_braces)] +impl< + T, + A: Allocator, + /*const CO_ALLOC_PREF: CoAllocPref,*/ const OTHER_CO_ALLOC_PREF: CoAllocPref, +> From> for VecDeque +//, CO_ALLOC_PREF> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(OTHER_COOP_PREFERRED)]:, + //[(); {crate::meta_num_slots!(A, CO_ALLOC_PREF)}]:, + [(); { crate::meta_num_slots!(A, OTHER_CO_ALLOC_PREF) }]:, { /// Turn a [`Vec`] into a [`VecDeque`]. /// @@ -2919,22 +2986,52 @@ where /// and to not re-allocate the `Vec`'s buffer or allocate /// any additional memory. #[inline] - fn from(other: Vec) -> Self { + default fn from(other: Vec) -> Self { let (ptr, len, cap, alloc) = other.into_raw_parts_with_alloc(); Self { head: 0, len, - buf: unsafe { RawVec::::from_raw_parts_in(ptr, cap, alloc) }, + buf: unsafe { + RawVec::::from_raw_parts_in(ptr, cap, alloc) + }, } } } #[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")] -impl - From> for Vec +#[allow(unused_braces)] +impl + From> for VecDeque where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(VECDEQUE_COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + [(); { crate::meta_num_slots!(A, OTHER_CO_ALLOC_PREF) }]:, +{ + /// Turn a [`Vec`] into a [`VecDeque`]. + /// + /// [`Vec`]: crate::vec::Vec + /// [`VecDeque`]: crate::collections::VecDeque + /// + /// This conversion is guaranteed to run in *O*(1) time + /// and to not re-allocate the `Vec`'s buffer or allocate + /// any additional memory. + #[inline] + default fn from(other: Vec) -> Self { + let (ptr, len, cap, alloc) = other.into_raw_parts_with_alloc(); + Self { + head: 0, + len, + buf: unsafe { RawVec::::from_raw_parts_in(ptr, cap, alloc) }, + } + } +} + +#[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")] +#[allow(unused_braces)] +impl + From> for Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + [(); { crate::meta_num_slots!(A, VECDEQUE_CO_ALLOC_PREF) }]:, { /// Turn a [`VecDeque`] into a [`Vec`]. /// @@ -2965,9 +3062,9 @@ where /// assert_eq!(vec, [8, 9, 1, 2, 3, 4]); /// assert_eq!(vec.as_ptr(), ptr); /// ``` - fn from(mut other: VecDeque) -> Self + fn from(mut other: VecDeque) -> Self where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(VECDEQUE_COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, VECDEQUE_CO_ALLOC_PREF) }]:, { other.make_contiguous(); @@ -2982,17 +3079,14 @@ where ptr::copy(buf.add(other.head), buf, len); } // @FIXME: COOP - Vec::::from_raw_parts_in(buf, len, cap, alloc) + Vec::::from_raw_parts_in(buf, len, cap, alloc) } } } #[stable(feature = "std_collections_from_array", since = "1.56.0")] -impl From<[T; N]> - for VecDeque -where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, -{ +#[allow(unused_braces)] +impl From<[T; N]> for VecDeque { /// Converts a `[T; N]` into a `VecDeque`. /// /// ``` @@ -3003,12 +3097,12 @@ where /// assert_eq!(deq1, deq2); /// ``` fn from(arr: [T; N]) -> Self { - let mut deq = VecDeque::::with_capacity(N); + let mut deq = VecDeque::::with_capacity(N); let arr = ManuallyDrop::new(arr); if !::IS_ZST { // SAFETY: VecDeque::with_capacity ensures that there is enough capacity. unsafe { - // @FIXME for COOP_PREFERRED: + // @FIXME for CO_ALLOC_PREF: ptr::copy_nonoverlapping(arr.as_ptr(), deq.ptr(), N); } } diff --git a/library/alloc/src/collections/vec_deque/spec_extend.rs b/library/alloc/src/collections/vec_deque/spec_extend.rs index b1a9d23fd4a99..0c5d9cf7a5378 100644 --- a/library/alloc/src/collections/vec_deque/spec_extend.rs +++ b/library/alloc/src/collections/vec_deque/spec_extend.rs @@ -1,8 +1,8 @@ #![feature(min_specialization)] use crate::alloc::Allocator; +use crate::co_alloc::CoAllocPref; use crate::vec; -use core::alloc; use core::iter::TrustedLen; use core::slice; @@ -13,11 +13,12 @@ pub(super) trait SpecExtend { fn spec_extend(&mut self, iter: I); } -impl SpecExtend - for VecDeque +#[allow(unused_braces)] +impl SpecExtend + for VecDeque where I: Iterator, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { default fn spec_extend(&mut self, mut iter: I) { // This function should be the moral equivalent of: @@ -27,11 +28,11 @@ where // } // May only be called if `deque.len() < deque.capacity()` - unsafe fn push_unchecked( - deque: &mut VecDeque, + unsafe fn push_unchecked( + deque: &mut VecDeque, element: T, ) where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { // SAFETY: Because of the precondition, it's guaranteed that there is space // in the logical array after the last element. @@ -59,11 +60,12 @@ where } } -impl SpecExtend - for VecDeque +#[allow(unused_braces)] +impl SpecExtend + for VecDeque where I: TrustedLen, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { default fn spec_extend(&mut self, iter: I) { // This is the case for a TrustedLen iterator. @@ -96,10 +98,11 @@ where } } -impl SpecExtend> - for VecDeque +#[allow(unused_braces)] +impl SpecExtend> + for VecDeque where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn spec_extend(&mut self, mut iterator: vec::IntoIter) { let slice = iterator.as_slice(); @@ -113,23 +116,25 @@ where } } -impl<'a, T: 'a, I, A: Allocator, const COOP_PREFERRED: bool> SpecExtend<&'a T, I> - for VecDeque +#[allow(unused_braces)] +impl<'a, T: 'a, I, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> SpecExtend<&'a T, I> + for VecDeque where I: Iterator, T: Copy, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { default fn spec_extend(&mut self, iterator: I) { self.spec_extend(iterator.copied()) } } -impl<'a, T: 'a, A: Allocator, const COOP_PREFERRED: bool> SpecExtend<&'a T, slice::Iter<'a, T>> - for VecDeque +#[allow(unused_braces)] +impl<'a, T: 'a, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> + SpecExtend<&'a T, slice::Iter<'a, T>> for VecDeque where T: Copy, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) { let slice = iterator.as_slice(); diff --git a/library/alloc/src/collections/vec_deque/spec_from_iter.rs b/library/alloc/src/collections/vec_deque/spec_from_iter.rs index b0380260fcc44..1ef5150dda3b4 100644 --- a/library/alloc/src/collections/vec_deque/spec_from_iter.rs +++ b/library/alloc/src/collections/vec_deque/spec_from_iter.rs @@ -1,44 +1,82 @@ use super::{IntoIter, VecDeque}; use crate::alloc::Global; -use core::alloc; +use crate::co_alloc::CoAllocPref; /// Specialization trait used for `VecDeque::from_iter` pub(super) trait SpecFromIter { fn spec_from_iter(iter: I) -> Self; } -impl SpecFromIter for VecDeque +/// Specialization trait used for `VecDeque::from_iter_co` +pub(super) trait SpecFromIterCo { + fn spec_from_iter_co(iter: I) -> Self; +} + +impl SpecFromIter for VecDeque where I: Iterator, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, { default fn spec_from_iter(iterator: I) -> Self { // Since converting is O(1) now, just re-use the `Vec` logic for // anything where we can't do something extra-special for `VecDeque`, // especially as that could save us some monomorphiziation work // if one uses the same iterators (like slice ones) with both. - crate::vec::Vec::::from_iter(iterator).into() + crate::vec::Vec::from_iter(iterator).into() + } +} + +impl SpecFromIter> for VecDeque { + #[inline] + fn spec_from_iter(iterator: crate::vec::IntoIter) -> Self { + iterator.into_vecdeque() + } +} + +impl SpecFromIter> for VecDeque { + #[inline] + fn spec_from_iter(iterator: IntoIter) -> Self { + iterator.into_vecdeque() + } +} +// ---- + +#[allow(unused_braces)] +impl SpecFromIterCo + for VecDeque +where + I: Iterator, + [(); { crate::meta_num_slots_global!(CO_ALLOC_PREF) }]:, +{ + default fn spec_from_iter_co(iterator: I) -> Self { + // Since converting is O(1) now, just re-use the `Vec` logic for + // anything where we can't do something extra-special for `VecDeque`, + // especially as that could save us some monomorphiziation work + // if one uses the same iterators (like slice ones) with both. + crate::vec::Vec::::from_iter_co(iterator).into() } } -impl SpecFromIter> - for VecDeque +#[allow(unused_braces)] +impl + SpecFromIterCo> + for VecDeque where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots_global!(CO_ALLOC_PREF) }]:, { #[inline] - fn spec_from_iter(iterator: crate::vec::IntoIter) -> Self { + fn spec_from_iter_co(iterator: crate::vec::IntoIter) -> Self { iterator.into_vecdeque() } } -impl SpecFromIter> - for VecDeque +#[allow(unused_braces)] +impl SpecFromIterCo> + for VecDeque where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots_global!(CO_ALLOC_PREF) }]:, { #[inline] - fn spec_from_iter(iterator: IntoIter) -> Self { + fn spec_from_iter_co(iterator: IntoIter) -> Self { iterator.into_vecdeque() } } diff --git a/library/alloc/src/ffi/c_str.rs b/library/alloc/src/ffi/c_str.rs index 20c5f6b634e57..9d7321270d834 100644 --- a/library/alloc/src/ffi/c_str.rs +++ b/library/alloc/src/ffi/c_str.rs @@ -1,12 +1,14 @@ #[cfg(test)] mod tests; +use crate::alloc::Global; use crate::borrow::{Cow, ToOwned}; use crate::boxed::Box; use crate::rc::Rc; use crate::slice::hack::into_vec; use crate::string::String; use crate::vec::Vec; +use crate::CO_ALLOC_PREF_DEFAULT; use core::borrow::Borrow; use core::ffi::{c_char, CStr}; use core::fmt; @@ -17,8 +19,6 @@ use core::ptr; use core::slice; use core::slice::memchr; use core::str::{self, Utf8Error}; -use crate::alloc::Global; -use crate::DEFAULT_COOP_PREFERRED; #[cfg(target_has_atomic = "ptr")] use crate::sync::Arc; @@ -726,7 +726,7 @@ impl fmt::Debug for CString { #[stable(feature = "cstring_into", since = "1.7.0")] #[allow(unused_braces)] -impl From for Vec { +impl From for Vec { /// Converts a [`CString`] into a [Vec]<[u8]>. /// /// The conversion consumes the [`CString`], and removes the terminating NUL byte. diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index 6edfaf9bf4c03..ad0e529f53eab 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -182,10 +182,9 @@ #![feature(fundamental)] #![cfg_attr(not(test), feature(generator_trait))] #![feature(global_co_alloc)] -#![feature(global_co_alloc_short_term_pref)] #![feature(hashmap_internals)] #![feature(lang_items)] -#![feature(global_co_alloc_def)] +#![feature(global_co_alloc_default)] // When we used min_specialization instead of specialization, library/alloc/src/vec/mod.rs was failing with: // - cannot specialize on predicate `the constant `core::alloc::co_alloc_metadata_num_slots::()` can be evaluated` // - cannot specialize on predicate `[(); _] well-formed` @@ -249,6 +248,9 @@ mod boxed { pub use std::boxed::Box; } pub mod borrow; +#[macro_use] +#[unstable(feature = "global_co_alloc", issue = "none")] +pub mod co_alloc; pub mod collections; #[cfg(all(not(no_rc), not(no_sync), not(no_global_oom_handling)))] pub mod ffi; diff --git a/library/alloc/src/macros.rs b/library/alloc/src/macros.rs index c2f1e9df41c11..29ec2799d47a7 100644 --- a/library/alloc/src/macros.rs +++ b/library/alloc/src/macros.rs @@ -1,7 +1,7 @@ /// Creates a [`Vec`] containing the arguments. /// -/// `vec!` allows `Vec`s to be defined with the same syntax as array expressions. -/// There are two forms of this macro: +/// `vec!` allows `Vec`s to be defined with the same syntax as array expressions. There are two +/// forms of this macro: /// /// - Create a [`Vec`] containing a given list of elements: /// @@ -19,19 +19,17 @@ /// assert_eq!(v, [1, 1, 1]); /// ``` /// -/// Note that unlike array expressions this syntax supports all elements -/// which implement [`Clone`] and the number of elements doesn't have to be -/// a constant. +/// Note that unlike array expressions this syntax supports all elements which implement [`Clone`] +/// and the number of elements doesn't have to be a constant. /// -/// This will use `clone` to duplicate an expression, so one should be careful -/// using this with types having a nonstandard `Clone` implementation. For -/// example, `vec![Rc::new(1); 5]` will create a vector of five references -/// to the same boxed integer value, not five references pointing to independently -/// boxed integers. +/// This will use `clone` to duplicate an expression, so one should be careful using this with types +/// having a nonstandard `Clone` implementation. For example, `vec![Rc::new(1); 5]` will create a +/// vector of five references to the same boxed integer value, not five references pointing to +/// independently boxed integers. /// -/// Also, note that `vec![expr; 0]` is allowed, and produces an empty vector. -/// This will still evaluate `expr`, however, and immediately drop the resulting value, so -/// be mindful of side effects. +/// Also, note that `vec![expr; 0]` is allowed, and produces an empty vector. This will still +/// evaluate `expr`, however, and immediately drop the resulting value, so be mindful of side +/// effects. /// /// [`Vec`]: crate::vec::Vec #[cfg(all(not(no_global_oom_handling), not(test)))] @@ -41,7 +39,7 @@ #[allow_internal_unstable(rustc_attrs, liballoc_internals)] macro_rules! vec { () => ( - $crate::__rust_force_expr!($crate::vec::Vec::new_co()) + $crate::__rust_force_expr!($crate::vec::Vec::new()) ); ($elem:expr; $n:expr) => ( $crate::__rust_force_expr!($crate::vec::from_elem($elem, $n)) @@ -54,10 +52,9 @@ macro_rules! vec { ); } -// HACK(japaric): with cfg(test) the inherent `[T]::into_vec` method, which is -// required for this macro definition, is not available. Instead use the -// `slice::into_vec` function which is only available with cfg(test) -// NB see the slice::hack module in slice.rs for more information +// HACK(japaric): with cfg(test) the inherent `[T]::into_vec` method, which is required for this +// macro definition, is not available. Instead use the `slice::into_vec` function which is only +// available with cfg(test) NB see the slice::hack module in slice.rs for more information #[cfg(all(not(no_global_oom_handling), test))] #[allow(unused_macro_rules)] macro_rules! vec { @@ -75,19 +72,18 @@ macro_rules! vec { /// Creates a `String` using interpolation of runtime expressions. /// -/// The first argument `format!` receives is a format string. This must be a string -/// literal. The power of the formatting string is in the `{}`s contained. +/// The first argument `format!` receives is a format string. This must be a string literal. The +/// power of the formatting string is in the `{}`s contained. /// -/// Additional parameters passed to `format!` replace the `{}`s within the -/// formatting string in the order given unless named or positional parameters -/// are used; see [`std::fmt`] for more information. +/// Additional parameters passed to `format!` replace the `{}`s within the formatting string in the +/// order given unless named or positional parameters are used; see [`std::fmt`] for more +/// information. /// -/// A common use for `format!` is concatenation and interpolation of strings. -/// The same convention is used with [`print!`] and [`write!`] macros, -/// depending on the intended destination of the string. +/// A common use for `format!` is concatenation and interpolation of strings. The same convention is +/// used with [`print!`] and [`write!`] macros, depending on the intended destination of the string. /// -/// To convert a single value to a string, use the [`to_string`] method. This -/// will use the [`Display`] formatting trait. +/// To convert a single value to a string, use the [`to_string`] method. This will use the +/// [`Display`] formatting trait. /// /// [`std::fmt`]: ../std/fmt/index.html /// [`print!`]: ../std/macro.print.html @@ -97,9 +93,8 @@ macro_rules! vec { /// /// # Panics /// -/// `format!` panics if a formatting trait implementation returns an error. -/// This indicates an incorrect implementation -/// since `fmt::Write for String` never returns an error itself. +/// `format!` panics if a formatting trait implementation returns an error. This indicates an +/// incorrect implementation since `fmt::Write for String` never returns an error itself. /// /// # Examples /// @@ -129,3 +124,246 @@ macro_rules! __rust_force_expr { $e }; } + +// ----- CoAlloc ICE workaround macro: +/// This "validates" type of a given `const` expression, and it casts it. That helps to prevent mix ups with macros/integer constant values. +#[doc(hidden)] +#[macro_export] +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +macro_rules! check_type_and_cast { + // Use the following for compile-time/build check only. And use it + // with a hard-coded `0` version of `meta_num_slots` - otherwise you get an ICE. + // + /*($e:expr, $t_check:ty, $t_cast:ty) => { + ($e + 0 as $t_check) as $t_cast + }*/ + // Use the following to build for testing/using, while rustc causes an ICE with the above and + // with a full version of `meta_num_slots`. + ($e:expr, $t_check:ty, $t_cast:ty) => { + $e + }; +} + +// ----- CoAlloc constant-like macros: +/// Coallocation option/parameter about using metadata that does prefer to use meta data. This is of type [::alloc::co_alloc::CoAllocMetaNumSlotsPref] (but not a whole []::alloc::co_alloc::CoAllocPref]). +#[doc(hidden)] +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +#[macro_export] +macro_rules! CO_ALLOC_PREF_NUM_META_SLOTS_ONE { + () => { + $crate::check_type_and_cast!(1, i32, $crate::co_alloc::CoAllocMetaNumSlotsPref) + }; +} + +/// Coallocation option/parameter about using metadata that prefers NOT to use meta data. This is of type [::alloc::co_alloc::CoAllocMetaNumSlotsPref] (but not a whole []::alloc::co_alloc::CoAllocPref]). +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +#[macro_export] +macro_rules! CO_ALLOC_PREF_NUM_META_SLOTS_ZERO { + () => { + $crate::check_type_and_cast!(0, i32, $crate::co_alloc::CoAllocMetaNumSlotsPref) + }; +} + +/// Default coallocation option/parameter about using metadata (whether to use meta data, or not). This is of type [::alloc::co_alloc::CoAllocMetaNumSlotsPref] (but not a whole []::alloc::co_alloc::CoAllocPref]). +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +#[macro_export] +macro_rules! CO_ALLOC_PREF_NUM_META_SLOTS_DEFAULT { + () => { + $crate::check_type_and_cast!(0, i32, $crate::co_alloc::CoAllocMetaNumSlotsPref) + }; +} + +/// "Yes" as a type's preference for coallocation using metadata (in either user space, or `alloc` +/// or `std` space). +/// +/// It may be overriden by the allocator. For example, if the allocator doesn't support +/// coallocation, then this value makes no difference. +/// +/// This constant and its type WILL CHANGE (once ``#![feature(generic_const_exprs)]` and +/// `#![feature(adt_const_params)]` are stable) to a dedicated struct/enum. Hence DO NOT hard +/// code/replace/mix this any other values/parameters. +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +#[macro_export] +macro_rules! CO_ALLOC_PREF_META_YES { + () => { + //1usize + $crate::co_alloc_pref!($crate::CO_ALLOC_PREF_NUM_META_SLOTS_ONE!()) + }; +} + +/// "No" as a type's preference for coallocation using metadata (in either user space, or `alloc` or +/// `std` space). +/// +/// Any allocator is required to respect this. Even if the allocator does support coallocation, it +/// will not coallocate types that use this value. +/// +/// This constant and its type WILL CHANGE (once ``#![feature(generic_const_exprs)]` and +/// `#![feature(adt_const_params)]` are stable) to a dedicated struct/enum. Hence DO NOT hard +/// code/replace/mix this any other values/parameters. +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +#[macro_export] +macro_rules! CO_ALLOC_PREF_META_NO { + () => { + //0usize + $crate::co_alloc_pref!($crate::CO_ALLOC_PREF_NUM_META_SLOTS_ZERO!()) + }; +} + +/// "Default" as a type's preference for coallocation using metadata (in either user space, or +/// `alloc` or `std` space). +/// +/// This value and its type WILL CHANGE (once ``#![feature(generic_const_exprs)]` and +/// `#![feature(adt_const_params)]` are stable) to a dedicated struct/enum. Hence DO NOT hard +/// code/replace/mix this any other values/parameters. +/// +/// (@FIXME) This WILL BE BECOME OBSOLETE and it WILL BE REPLACED with a `const` (and/or some kind +/// of compile time preference) once a related ICE is fixed (@FIXME add the ICE link here). Then +/// consider moving such a `const` to a submodule, for example `::alloc::co_alloc`. +#[unstable(feature = "global_co_alloc_default", issue = "none")] +#[macro_export] +macro_rules! CO_ALLOC_PREF_META_DEFAULT { + () => { + //0usize + $crate::co_alloc_pref!($crate::CO_ALLOC_PREF_NUM_META_SLOTS_DEFAULT!()) + }; +} + +/// Default [::alloc::CoAllocPref] value/config, based on `CO_ALLOC_PREF_META_DEFAULT`. +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +#[macro_export] +macro_rules! CO_ALLOC_PREF_DEFAULT { + () => { + //0usize + $crate::CO_ALLOC_PREF_META_DEFAULT!() + }; +} + +/// Coallocation preference for (internal) short term vectors. +#[unstable(feature = "global_co_alloc", issue = "none")] +//pub const SHORT_TERM_VEC_CO_ALLOC_PREF: bool = true; +#[macro_export] +macro_rules! SHORT_TERM_VEC_CO_ALLOC_PREF { + () => { + //0usize + $crate::CO_ALLOC_PREF_META_NO!() + }; +} + +// ------ CoAlloc preference/config conversion macros: + +/// Create a `CoAllocPref` value based on the given parameter(s). For now, only one parameter is +/// supported, and it's required: `meta_pref`. +/// +/// @param `meta_pref` is one of: `CO_ALLOC_PREF_META_YES, CO_ALLOC_PREF_META_NO`, or +/// `CO_ALLOC_PREF_META_DEFAULT`. +/// +/// @return `CoAllocPref` value +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +#[macro_export] +macro_rules! co_alloc_pref { + // ($meta_pref + (0 as CoAllocMetaNumSlotsPref)) ensures that $meta_pref is of type + // `CoAllocMetaNumSlotsPref`. Otherwise the casting of the result to `CoAllocPref` would not + // report the incorrect type of $meta_pref (if $meta_pref were some other integer, casting would + // compile, and we would not be notified). + ($meta_pref:expr) => { + $crate::check_type_and_cast!( + $meta_pref, + $crate::co_alloc::CoAllocMetaNumSlotsPref, + $crate::co_alloc::CoAllocPref + ) + }; +} + +/// Return 0 or 1, indicating whether to use coallocation metadata (or not) with the given allocator +/// type `alloc` and cooperation preference `co_alloc_pref`. +/// +/// NOT for public use. Param `co_alloc_pref` - can override the allocator's default preference for +/// cooperation, or can make the type not cooperative, regardless of whether allocator `A` is +/// cooperative. +/// +/// @param `alloc` Allocator (implementation) type. @param `co_alloc_pref` The heap-based type's +/// preference for coallocation, as an [::alloc::CoAllocPref] value. +/// +/// The type of second parameter `co_alloc_pref` WILL CHANGE. DO NOT hardcode/cast/mix that type. +/// Instead, use [::alloc::CoAllocPref]. +/// +// FIXME replace the macro with an (updated version of the below) `const` function). Only once +// generic_const_exprs is stable (that is, when consumer crates don't need to declare +// generic_const_exprs feature anymore). Then consider moving the function to a submodule, for +// example ::alloc::co_alloc. +#[unstable(feature = "global_co_alloc", issue = "none")] +#[macro_export] +macro_rules! meta_num_slots { + // @FIXME Use this only + // - once the ICE gets fixed, or + // - (until the ICE is fixed) with a related change in `check_type_and_cast` that makes it pass + // the given expression (parameter) unchecked & uncast. + /*($alloc:ty, $co_alloc_pref:expr) => { + $crate::check_type_and_cast!(<$alloc as ::core::alloc::Allocator>::CO_ALLOC_META_NUM_SLOTS,::core::alloc::CoAllocatorMetaNumSlots, + usize) * + $crate::check_type_and_cast!($co_alloc_pref, $crate::co_alloc::CoAllocPref, usize) + };*/ + // Use for testing & production, until ICE gets fixed. (Regardless of $co_alloc_pref.) + // + // Why still ICE?! + ($alloc:ty, $co_alloc_pref:expr) => { + // The following fails here - even if not used from meta_num_slots_default nor from meta_num_slots_global! + //<$alloc as ::core::alloc::Allocator>::CO_ALLOC_META_NUM_SLOTS + //<$crate::alloc::Global as ::core::alloc::Allocator>::CO_ALLOC_META_NUM_SLOTS + //1usize + $co_alloc_pref + } + // Use for testing & production as enforcing no meta. + /*($alloc:ty, $co_alloc_pref:expr) => { + 0usize // compiles + }*/ +} +// -\---> replace with something like: +/* +#[unstable(feature = "global_co_alloc", issue = "none")] +pub const fn meta_num_slots( + CO_ALLOC_PREF: bool, +) -> usize { + if A::CO_ALLOC_META_NUM_SLOTS && CO_ALLOC_PREF { 1 } else { 0 } +} +*/ + +/// Like `meta_num_slots`, but for the default coallocation preference (`DEFAULT_CO_ALLOC_PREF`). +/// +/// Return 0 or 1, indicating whether to use coallocation metadata (or not) with the given allocator +/// type `alloc` and the default coallocation preference (`DEFAULT_CO_ALLOC_PREF()!`). +/// +// FIXME replace the macro with a `const` function. Only once generic_const_exprs is stable (that +// is, when consumer crates don't need to declare generic_const_exprs feature anymore). Then +// consider moving the function to a submodule, for example ::alloc::co_alloc. +#[unstable(feature = "global_co_alloc", issue = "none")] +#[macro_export] +macro_rules! meta_num_slots_default { + // Can't generate if ... {1} else {0} + // because it's "overly complex generic constant". + ($alloc:ty) => { + // EITHER of the following are OK here + $crate::meta_num_slots!($alloc, $crate::CO_ALLOC_PREF_DEFAULT!()) + //<$alloc as ::core::alloc::Allocator>::CO_ALLOC_META_NUM_SLOTS + //<$crate::alloc::Global as ::core::alloc::Allocator>::CO_ALLOC_META_NUM_SLOTS + }; +} + +/// Like `meta_num_slots`, but for the default coallocation preference (`DEFAULT_CO_ALLOC_PREF`). +/// +/// Return 0 or 1, indicating whether to use coallocation metadata (or not) with the global allocator +/// type `alloc` and the given coallocation preference `co_alloc_`. +/// +// FIXME replace the macro with a `const` function. Only once generic_const_exprs is stable (that +// is, when consumer crates don't need to declare `generic_const_exprs` feature anymore). Then +// consider moving the function to a submodule, for example ::alloc::co_alloc. See above. +#[unstable(feature = "global_co_alloc", issue = "none")] +#[macro_export] +macro_rules! meta_num_slots_global { + ($co_alloc_pref:expr) => { + // EITHER of the following are OK here + $crate::meta_num_slots!($crate::alloc::Global, $co_alloc_pref) + // The following is OK here: + //<$crate::alloc::Global as ::core::alloc::Allocator>::CO_ALLOC_META_NUM_SLOTS + }; +} diff --git a/library/alloc/src/raw_vec.rs b/library/alloc/src/raw_vec.rs index c7a9606b65c86..4b5345f0912f3 100644 --- a/library/alloc/src/raw_vec.rs +++ b/library/alloc/src/raw_vec.rs @@ -1,6 +1,9 @@ #![unstable(feature = "raw_vec_internals", reason = "unstable const warnings", issue = "none")] -use core::alloc::{self, GlobalCoAllocMeta, LayoutError, PtrAndMeta}; +use crate::co_alloc::CoAllocPref; +use crate::meta_num_slots_default; +use core::alloc::CoAllocMetaBase; +use core::alloc::{LayoutError, PtrAndMeta}; use core::cmp; use core::intrinsics; use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties}; @@ -14,7 +17,7 @@ use crate::alloc::{Allocator, Global, Layout}; use crate::boxed::Box; use crate::collections::TryReserveError; use crate::collections::TryReserveErrorKind::*; -use crate::DEFAULT_COOP_PREFERRED; +use crate::CO_ALLOC_PREF_DEFAULT; #[cfg(test)] mod tests; @@ -50,13 +53,13 @@ enum AllocInit { /// `usize::MAX`. This means that you need to be careful when round-tripping this type with a /// `Box<[T]>`, since `capacity()` won't yield the length. #[allow(missing_debug_implementations)] -#[allow(unused_braces)] +#[allow(unused_braces)] //@FIXME remove #[allow(unused_braces)] once that false positive warning fix is included on stable pub(crate) struct RawVec< T, A: Allocator = Global, - const COOP_PREFERRED: bool = { DEFAULT_COOP_PREFERRED!() }, + const CO_ALLOC_PREF: CoAllocPref = { CO_ALLOC_PREF_DEFAULT!() }, > where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { meta_num_slots!(A, CO_ALLOC_PREF) }]:, { ptr: Unique, cap: usize, @@ -64,13 +67,13 @@ pub(crate) struct RawVec< // As of v1.67.0, `cmp` for `TypeId` is not `const`, unfortunately: //pub(crate) meta: [GlobalCoAllocMeta; {if core::any::TypeId::of::()==core::any::TypeId::of::() {1} else {0}}], //pub(crate) meta: [GlobalCoAllocMeta; mem::size_of::()], - pub(crate) metas: [GlobalCoAllocMeta; - alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)], + pub(crate) metas: [A::CoAllocMeta; { crate::meta_num_slots!(A, CO_ALLOC_PREF) }], } -impl RawVec +#[allow(unused_braces)] +impl RawVec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { meta_num_slots_global!(CO_ALLOC_PREF) }]:, { /// HACK(Centril): This exists because stable `const fn` can only call stable `const fn`, so /// they cannot call `Self::new()`. @@ -118,10 +121,16 @@ where } } -impl RawVec +#[allow(unused_braces)] +impl RawVec where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { + #[allow(dead_code)] + const fn new_plain_metas() -> [A::CoAllocMeta; { meta_num_slots_default!(A) }] { + loop {} + } + // Tiny Vecs are dumb. Skip to: // - 8 if the element size is 1, because any heap allocators is likely // to round up a request of less than 8 bytes to at least 8 bytes. @@ -143,8 +152,8 @@ where ptr: Unique::dangling(), cap: 0, alloc, - metas: [GlobalCoAllocMeta {/*one: 1*/ /* , two: 2, three: 3, four: 4*/}; - alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)], + metas: [A::CoAllocMeta::new_plain(); // @FIXME CoAlloc + {crate::meta_num_slots!(A, CO_ALLOC_PREF)}], } } @@ -218,12 +227,13 @@ where // Allocators currently return a `NonNull<[u8]>` whose length // matches the size requested. If that ever changes, the capacity // here should change to `ptr.len() / mem::size_of::()`. + #[allow(unreachable_code)] // @FIXME CoAlloc Self { ptr: unsafe { Unique::new_unchecked(ptr.cast().as_ptr()) }, cap: capacity, alloc, - metas: [GlobalCoAllocMeta {/*one: 1*/ /*, two: 2, three: 3, four: 4*/}; - alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)], + metas: [A::CoAllocMeta::new_plain(); // @FIXME CoAlloc + {crate::meta_num_slots!(A, CO_ALLOC_PREF)}], } } } @@ -240,12 +250,13 @@ where /// guaranteed. #[inline] pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, alloc: A) -> Self { + #[allow(unreachable_code)] //@FIXME CoAlloc Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap: capacity, alloc, - metas: [GlobalCoAllocMeta {/*one: 1*/ /*, two: 2, three: 3, four: 4*/}; - alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)], + metas: [A::CoAllocMeta::new_plain(); //@FIXME CoAlloc + {crate::meta_num_slots!(A, CO_ALLOC_PREF)}], } } @@ -270,6 +281,11 @@ where &self.alloc } + #[inline] + const fn assert_alignment() { + assert!(mem::size_of::() % mem::align_of::() == 0); + } + fn current_memory(&self) -> Option<(NonNull, Layout)> { if T::IS_ZST || self.cap == 0 { None @@ -278,7 +294,8 @@ where // and could hypothetically handle differences between stride and size, but this memory // has already been allocated so we know it can't overflow and currently rust does not // support such types. So we can do better by skipping some checks and avoid an unwrap. - let _: () = const { assert!(mem::size_of::() % mem::align_of::() == 0) }; + let _: () = Self::assert_alignment(); + //let _: () = const { assert!(mem::size_of::() % mem::align_of::() == 0) }; unsafe { let align = mem::align_of::(); let size = mem::size_of::().unchecked_mul(self.cap); @@ -309,18 +326,19 @@ where /// Aborts on OOM. #[cfg(not(no_global_oom_handling))] #[inline] + #[allow(unused_braces)] pub fn reserve(&mut self, len: usize, additional: usize) { // Callers expect this function to be very cheap when there is already sufficient capacity. // Therefore, we move all the resizing and error-handling logic from grow_amortized and // handle_reserve behind a call, while making sure that this function is likely to be // inlined as just a comparison and a call if the comparison fails. #[cold] - fn do_reserve_and_handle( - slf: &mut RawVec, + fn do_reserve_and_handle( + slf: &mut RawVec, len: usize, additional: usize, ) where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { handle_reserve(slf.grow_amortized(len, additional)); } @@ -394,9 +412,10 @@ where } } -impl RawVec +#[allow(unused_braces)] +impl RawVec where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { /// Returns if the buffer needs to grow to fulfill the needed extra capacity. /// Mainly used to make inlining reserve-calls possible without inlining `grow`. @@ -470,7 +489,8 @@ where let (ptr, layout) = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) }; // See current_memory() why this assert is here - let _: () = const { assert!(mem::size_of::() % mem::align_of::() == 0) }; + let _: () = Self::assert_alignment(); + //let _: () = const { assert!(mem::size_of::() % mem::align_of::() == 0) }; let ptr = unsafe { // `Layout::array` cannot overflow here because it would have // overflowed earlier when capacity was larger. @@ -517,15 +537,22 @@ where memory.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () }.into()) } -unsafe impl<#[may_dangle] T, A: Allocator, const COOP_PREFERRED: bool> Drop - for RawVec +#[allow(unused_braces)] +unsafe impl<#[may_dangle] T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Drop + for RawVec where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { /// Frees the memory owned by the `RawVec` *without* trying to drop its contents. default fn drop(&mut self) { if let Some((ptr, layout)) = self.current_memory() { - if A::IS_CO_ALLOCATOR && COOP_PREFERRED { + let meta_num_slots = crate::meta_num_slots!(A, CO_ALLOC_PREF); + if meta_num_slots != 0 { + debug_assert!( + meta_num_slots == 1, + "Number of coallocation meta slots can be only 0 or 1, but it is {}!", + meta_num_slots + ); let meta = self.metas[0]; unsafe { self.alloc.co_deallocate(PtrAndMeta { ptr, meta }, layout) } } else { diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index a14e15549d5be..52af085bc470a 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -275,6 +275,7 @@ use crate::alloc::handle_alloc_error; use crate::alloc::{box_free, WriteCloneIntoRaw}; use crate::alloc::{AllocError, Allocator, Global, Layout}; use crate::borrow::{Cow, ToOwned}; +use crate::co_alloc::CoAllocPref; #[cfg(not(no_global_oom_handling))] use crate::string::String; #[cfg(not(no_global_oom_handling))] @@ -1987,9 +1988,10 @@ impl From> for Rc { #[cfg(not(no_global_oom_handling))] #[stable(feature = "shared_from_slice", since = "1.21.0")] -impl From> for Rc<[T]> +#[allow(unused_braces)] +impl From> for Rc<[T]> where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { meta_num_slots_global!(CO_ALLOC_PREF) }]:, { /// Allocate a reference-counted slice and move `v`'s items into it. /// @@ -2002,9 +2004,10 @@ where /// assert_eq!(vec![1, 2, 3], *shared); /// ``` #[inline] - fn from(mut v: Vec) -> Rc<[T]> + #[allow(unused_braces)] + fn from(mut v: Vec) -> Rc<[T]> where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { meta_num_slots_global!(CO_ALLOC_PREF) }]:, { unsafe { let rc = Rc::copy_from_slice(&v); diff --git a/library/alloc/src/slice.rs b/library/alloc/src/slice.rs index f2b4596c53a7d..e6a2d869468b4 100644 --- a/library/alloc/src/slice.rs +++ b/library/alloc/src/slice.rs @@ -28,6 +28,7 @@ use crate::alloc::{self, Global}; #[cfg(not(no_global_oom_handling))] use crate::borrow::ToOwned; use crate::boxed::Box; +use crate::co_alloc::CoAllocPref; use crate::vec::Vec; #[cfg(test)] @@ -84,6 +85,9 @@ pub use hack::into_vec; #[cfg(test)] pub use hack::to_vec; +#[cfg(test)] +pub use hack::to_vec_co; + // HACK(japaric): With cfg(test) `impl [T]` is not available, these three // functions are actually methods that are in `impl [T]` but not in // `core::slice::SliceExt` - we need to supply these functions for the @@ -92,16 +96,28 @@ pub(crate) mod hack { use core::alloc::Allocator; use crate::boxed::Box; + use crate::co_alloc::CoAllocPref; use crate::vec::Vec; // We shouldn't add inline attribute to this since this is used in // `vec!` macro mostly and causes perf regression. See #71204 for // discussion and perf results. - pub fn into_vec( + #[allow(unused_braces)] + pub fn into_vec( b: Box<[T], A>, - ) -> Vec + ) -> Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots_default!(A) }]:, + { + into_vec_co::(b) + } + + #[allow(unused_braces)] + pub fn into_vec_co( + b: Box<[T], A>, + ) -> Vec + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { unsafe { let len = b.len(); @@ -112,47 +128,140 @@ pub(crate) mod hack { #[cfg(not(no_global_oom_handling))] #[inline] - pub fn to_vec( + #[allow(unused_braces)] + pub fn to_vec(s: &[T], alloc: A) -> Vec + where + [(); { crate::meta_num_slots_default!(A) }]:, + { + T::to_vec(s, alloc) + } + + #[cfg(not(no_global_oom_handling))] + #[inline] + #[allow(unused_braces)] + pub fn to_vec_co( s: &[T], alloc: A, - ) -> Vec + ) -> Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { - T::to_vec(s, alloc) + T::to_vec_co(s, alloc) } #[cfg(not(no_global_oom_handling))] + #[allow(unused_braces)] pub trait ConvertVec { - fn to_vec( + fn to_vec(s: &[Self], alloc: A) -> Vec + where + Self: Sized, + [(); { crate::meta_num_slots_default!(A) }]:; + } + + #[allow(unused_braces)] + pub trait ConvertVecCo { + fn to_vec_co( s: &[Self], alloc: A, - ) -> Vec + ) -> Vec where Self: Sized, - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:; + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:; } #[cfg(not(no_global_oom_handling))] + #[allow(unused_braces)] impl ConvertVec for T { #[inline] - default fn to_vec( + #[allow(unused_braces)] + default fn to_vec(s: &[Self], alloc: A) -> Vec + where + [(); { crate::meta_num_slots_default!(A) }]:, + { + #[allow(unused_braces)] + struct DropGuard<'a, T, A: Allocator> + where + [(); { crate::meta_num_slots_default!(A) }]:, + { + vec: &'a mut Vec, + num_init: usize, + } + impl<'a, T, A: Allocator> Drop for DropGuard<'a, T, A> + where + [(); { crate::meta_num_slots_default!(A) }]:, + { + #[inline] + fn drop(&mut self) { + // SAFETY: + // items were marked initialized in the loop below + unsafe { + self.vec.set_len(self.num_init); + } + } + } + let mut vec = Vec::with_capacity_in(s.len(), alloc); + let mut guard = DropGuard { vec: &mut vec, num_init: 0 }; + let slots = guard.vec.spare_capacity_mut(); + // .take(slots.len()) is necessary for LLVM to remove bounds checks + // and has better codegen than zip. + for (i, b) in s.iter().enumerate().take(slots.len()) { + guard.num_init = i; + slots[i].write(b.clone()); + } + core::mem::forget(guard); + // SAFETY: + // the vec was allocated and initialized above to at least this length. + unsafe { + vec.set_len(s.len()); + } + vec + } + } + + #[cfg(not(no_global_oom_handling))] + impl ConvertVec for T { + #[inline] + #[allow(unused_braces)] + fn to_vec(s: &[Self], alloc: A) -> Vec + where + [(); { crate::meta_num_slots_default!(A) }]:, + { + let mut v = Vec::with_capacity_in(s.len(), alloc); + // SAFETY: + // allocated above with the capacity of `s`, and initialize to `s.len()` in + // ptr::copy_to_non_overlapping below. + unsafe { + s.as_ptr().copy_to_nonoverlapping(v.as_mut_ptr(), s.len()); + v.set_len(s.len()); + } + v + } + } + + #[cfg(not(no_global_oom_handling))] + #[allow(unused_braces)] + impl ConvertVecCo for T { + #[inline] + #[allow(unused_braces)] + default fn to_vec_co( s: &[Self], alloc: A, - ) -> Vec + ) -> Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { - struct DropGuard<'a, T, A: Allocator, const COOP_PREFERRED: bool> + #[allow(unused_braces)] + struct DropGuard<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { - vec: &'a mut Vec, + vec: &'a mut Vec, num_init: usize, } - impl<'a, T, A: Allocator, const COOP_PREFERRED: bool> Drop for DropGuard<'a, T, A, COOP_PREFERRED> + impl<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Drop + for DropGuard<'a, T, A, CO_ALLOC_PREF> where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { #[inline] fn drop(&mut self) { @@ -183,14 +292,15 @@ pub(crate) mod hack { } #[cfg(not(no_global_oom_handling))] - impl ConvertVec for T { + impl ConvertVecCo for T { #[inline] - fn to_vec( + #[allow(unused_braces)] + fn to_vec_co( s: &[Self], alloc: A, - ) -> Vec + ) -> Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { let mut v = Vec::with_capacity_in(s.len(), alloc); // SAFETY: @@ -439,15 +549,30 @@ impl [T] { /// ``` #[cfg(not(no_global_oom_handling))] #[rustc_allow_incoherent_impl] + #[allow(unused_braces)] #[rustc_conversion_suggestion] #[stable(feature = "rust1", since = "1.0.0")] #[inline] - pub fn to_vec(&self) -> Vec + pub fn to_vec(&self) -> Vec + where + T: Clone, + { + self.to_vec_in::(Global) + } + + /// Coallocation-aware alternative to `to_vec`. + #[cfg(not(no_global_oom_handling))] + #[rustc_allow_incoherent_impl] + #[allow(unused_braces)] + #[rustc_conversion_suggestion] + #[unstable(feature = "global_co_alloc", issue = "none")] + #[inline] + pub fn to_vec_co(&self) -> Vec where T: Clone, - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { meta_num_slots_global!(CO_ALLOC_PREF) }]:, { - self.to_vec_in::(Global) + self.to_vec_in_co::(Global) } /// Copies `self` into a new `Vec` with an allocator. @@ -467,16 +592,32 @@ impl [T] { #[rustc_allow_incoherent_impl] #[inline] #[unstable(feature = "allocator_api", issue = "32838")] - pub fn to_vec_in( + #[allow(unused_braces)] + pub fn to_vec_in(&self, alloc: A) -> Vec + where + T: Clone, + [(); { crate::meta_num_slots_default!(A) }]:, + { + // N.B., see the `hack` module in this file for more details. + hack::to_vec(self, alloc) + } + + /// Coallocation-aware version of `to_vec_in`. + #[cfg(not(no_global_oom_handling))] + #[rustc_allow_incoherent_impl] + #[inline] + #[unstable(feature = "global_co_alloc", issue = "none")] + #[allow(unused_braces)] + pub fn to_vec_in_co( &self, alloc: A, - ) -> Vec + ) -> Vec where T: Clone, - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { // N.B., see the `hack` module in this file for more details. - hack::to_vec(self, alloc) + hack::to_vec_co(self, alloc) } /// Converts `self` into a vector without clones or allocation. @@ -496,16 +637,30 @@ impl [T] { #[rustc_allow_incoherent_impl] #[stable(feature = "rust1", since = "1.0.0")] #[inline] - pub fn into_vec( - self: Box, - ) -> Vec + #[allow(unused_braces)] + pub fn into_vec(self: Box) -> Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots_default!(A) }]:, { // N.B., see the `hack` module in this file for more details. hack::into_vec(self) } + /// Coallocation-aware version of [into_vec]. + #[rustc_allow_incoherent_impl] + #[unstable(feature = "global_co_alloc", issue = "none")] + #[inline] + #[allow(unused_braces)] + pub fn into_vec_co( + self: Box, + ) -> Vec + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { + // N.B., see the `hack` module in this file for more details. + hack::into_vec_co(self) + } + /// Creates a vector by copying a slice `n` times. /// /// # Panics @@ -815,9 +970,10 @@ impl> Join<&[T]> for [V] { //////////////////////////////////////////////////////////////////////////////// #[stable(feature = "rust1", since = "1.0.0")] -impl Borrow<[T]> for Vec +#[allow(unused_braces)] +impl Borrow<[T]> for Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn borrow(&self) -> &[T] { &self[..] @@ -825,9 +981,10 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl BorrowMut<[T]> for Vec +#[allow(unused_braces)] +impl BorrowMut<[T]> for Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn borrow_mut(&mut self) -> &mut [T] { &mut self[..] @@ -838,12 +995,20 @@ where // public in the crate and has the Allocator parameter so that // vec::clone_from use it too. #[cfg(not(no_global_oom_handling))] -pub(crate) trait SpecCloneIntoVec { +#[allow(unused_braces)] +pub(crate) trait SpecCloneIntoVec +where + [(); { crate::meta_num_slots_default!(A) }]:, +{ fn clone_into(&self, target: &mut Vec); } #[cfg(not(no_global_oom_handling))] -impl SpecCloneIntoVec for [T] { +#[allow(unused_braces)] +impl SpecCloneIntoVec for [T] +where + [(); { crate::meta_num_slots_default!(A) }]:, +{ default fn clone_into(&self, target: &mut Vec) { // drop anything in target that will not be overwritten target.truncate(self.len()); @@ -859,13 +1024,61 @@ impl SpecCloneIntoVec for [T] { } #[cfg(not(no_global_oom_handling))] -impl SpecCloneIntoVec for [T] { +#[allow(unused_braces)] +impl SpecCloneIntoVec for [T] +where + [(); { crate::meta_num_slots_default!(A) }]:, +{ fn clone_into(&self, target: &mut Vec) { target.clear(); target.extend_from_slice(self); } } +/// Coallocation-aware version of `SpecCloneIntoVec`. +#[cfg(not(no_global_oom_handling))] +#[allow(unused_braces)] +pub(crate) trait SpecCloneIntoVecCo +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ + fn clone_into_co(&self, target: &mut Vec); +} + +#[cfg(not(no_global_oom_handling))] +#[allow(unused_braces)] +impl + SpecCloneIntoVecCo for [T] +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ + default fn clone_into_co(&self, target: &mut Vec) { + // drop anything in target that will not be overwritten + target.truncate(self.len()); + + // target.len <= self.len due to the truncate above, so the + // slices here are always in-bounds. + let (init, tail) = self.split_at(target.len()); + + // reuse the contained values' allocations/resources. + target.clone_from_slice(init); + target.extend_from_slice(tail); + } +} + +#[cfg(not(no_global_oom_handling))] +#[allow(unused_braces)] +impl + SpecCloneIntoVecCo for [T] +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ + fn clone_into_co(&self, target: &mut Vec) { + target.clear(); + target.extend_from_slice(self); + } +} + #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] impl ToOwned for [T] { diff --git a/library/alloc/src/str.rs b/library/alloc/src/str.rs index eedf856f5d253..3c900c1164536 100644 --- a/library/alloc/src/str.rs +++ b/library/alloc/src/str.rs @@ -18,6 +18,7 @@ use crate::alloc; use crate::alloc::Global; use crate::borrow::ToOwned; use crate::boxed::Box; +use crate::co_alloc::CoAllocPref; use crate::slice::{Concat, Join, SliceIndex}; use crate::string::String; use crate::vec::Vec; @@ -128,15 +129,16 @@ macro_rules! copy_slice_and_advance { // [T] and str both impl AsRef<[T]> for some T // => s.borrow().as_ref() and we always have slices #[cfg(not(no_global_oom_handling))] -fn join_generic_copy( +#[allow(unused_braces)] +fn join_generic_copy( slice: &[S], sep: &[T], -) -> Vec +) -> Vec where T: Copy, B: AsRef<[T]> + ?Sized, S: Borrow, - [(); alloc::co_alloc_metadata_num_slots_with_preference_global(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots_global!(CO_ALLOC_PREF) }]:, { let sep_len = sep.len(); let mut iter = slice.iter(); @@ -144,7 +146,7 @@ where // the first slice is the only one without a separator preceding it let first = match iter.next() { Some(first) => first, - None => return vec![], + None => return Vec::new_co(), }; // compute the exact total length of the joined Vec @@ -159,7 +161,7 @@ where .expect("attempt to join into collection with len > usize::MAX"); // prepare an uninitialized buffer - let mut result = Vec::with_capacity(reserved_len); + let mut result = Vec::with_capacity_co(reserved_len); debug_assert!(result.capacity() >= reserved_len); result.extend_from_slice(first.borrow().as_ref()); diff --git a/library/alloc/src/vec/drain.rs b/library/alloc/src/vec/drain.rs index a219ca302d4c4..eaea943baecd0 100644 --- a/library/alloc/src/vec/drain.rs +++ b/library/alloc/src/vec/drain.rs @@ -1,9 +1,10 @@ use crate::alloc::{Allocator, Global}; +use crate::co_alloc::CoAllocPref; use core::fmt; use core::iter::{FusedIterator, TrustedLen}; use core::mem::{self, ManuallyDrop, SizedTypeProperties}; use core::ptr::{self, NonNull}; -use core::{alloc, slice}; +use core::slice; use super::Vec; @@ -24,9 +25,9 @@ pub struct Drain< 'a, T: 'a, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + 'a = Global, - const COOP_PREFERRED: bool = { SHORT_TERM_VEC_PREFERS_COOP!() }, + const CO_ALLOC_PREF: CoAllocPref = { SHORT_TERM_VEC_CO_ALLOC_PREF!() }, > where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { /// Index of tail to preserve pub(super) tail_start: usize, @@ -34,23 +35,25 @@ pub struct Drain< pub(super) tail_len: usize, /// Current remaining range to remove pub(super) iter: slice::Iter<'a, T>, - pub(super) vec: NonNull>, + pub(super) vec: NonNull>, } #[stable(feature = "collection_debug", since = "1.17.0")] -impl fmt::Debug - for Drain<'_, T, A, COOP_PREFERRED> +#[allow(unused_braces)] +impl fmt::Debug + for Drain<'_, T, A, CO_ALLOC_PREF> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("Drain").field(&self.iter.as_slice()).finish() } } -impl<'a, T, A: Allocator, const COOP_PREFERRED: bool> Drain<'a, T, A, COOP_PREFERRED> +#[allow(unused_braces)] +impl<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Drain<'a, T, A, CO_ALLOC_PREF> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { /// Returns the remaining items of this iterator as a slice. /// @@ -150,9 +153,11 @@ where } #[stable(feature = "vec_drain_as_slice", since = "1.46.0")] -impl<'a, T, A: Allocator, const COOP_PREFERRED: bool> AsRef<[T]> for Drain<'a, T, A, COOP_PREFERRED> +#[allow(unused_braces)] +impl<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> AsRef<[T]> + for Drain<'a, T, A, CO_ALLOC_PREF> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn as_ref(&self) -> &[T] { self.as_slice() @@ -160,24 +165,27 @@ where } #[stable(feature = "drain", since = "1.6.0")] -unsafe impl Sync - for Drain<'_, T, A, COOP_PREFERRED> +#[allow(unused_braces)] +unsafe impl Sync + for Drain<'_, T, A, CO_ALLOC_PREF> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { } #[stable(feature = "drain", since = "1.6.0")] -unsafe impl Send - for Drain<'_, T, A, COOP_PREFERRED> +#[allow(unused_braces)] +unsafe impl Send + for Drain<'_, T, A, CO_ALLOC_PREF> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { } #[stable(feature = "drain", since = "1.6.0")] -impl Iterator for Drain<'_, T, A, COOP_PREFERRED> +#[allow(unused_braces)] +impl Iterator for Drain<'_, T, A, CO_ALLOC_PREF> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { type Item = T; @@ -192,10 +200,11 @@ where } #[stable(feature = "drain", since = "1.6.0")] -impl DoubleEndedIterator - for Drain<'_, T, A, COOP_PREFERRED> +#[allow(unused_braces)] +impl DoubleEndedIterator + for Drain<'_, T, A, CO_ALLOC_PREF> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { #[inline] fn next_back(&mut self) -> Option { @@ -204,22 +213,24 @@ where } #[stable(feature = "drain", since = "1.6.0")] -impl Drop for Drain<'_, T, A, COOP_PREFERRED> +#[allow(unused_braces)] +impl Drop for Drain<'_, T, A, CO_ALLOC_PREF> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn drop(&mut self) { /// Moves back the un-`Drain`ed elements to restore the original `Vec`. - struct DropGuard<'r, 'a, T, A: Allocator, const COOP_PREFERRED: bool>( - &'r mut Drain<'a, T, A, COOP_PREFERRED>, + #[allow(unused_braces)] + struct DropGuard<'r, 'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref>( + &'r mut Drain<'a, T, A, CO_ALLOC_PREF>, ) where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:; + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:; - impl<'r, 'a, T, A: Allocator, const COOP_PREFERRED: bool> Drop - for DropGuard<'r, 'a, T, A, COOP_PREFERRED> + impl<'r, 'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Drop + for DropGuard<'r, 'a, T, A, CO_ALLOC_PREF> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn drop(&mut self) { if self.0.tail_len > 0 { @@ -284,10 +295,11 @@ where } #[stable(feature = "drain", since = "1.6.0")] -impl ExactSizeIterator - for Drain<'_, T, A, COOP_PREFERRED> +#[allow(unused_braces)] +impl ExactSizeIterator + for Drain<'_, T, A, CO_ALLOC_PREF> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn is_empty(&self) -> bool { self.iter.is_empty() @@ -295,15 +307,19 @@ where } #[unstable(feature = "trusted_len", issue = "37572")] -unsafe impl TrustedLen - for Drain<'_, T, A, COOP_PREFERRED> +#[allow(unused_braces)] +unsafe impl TrustedLen + for Drain<'_, T, A, CO_ALLOC_PREF> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { } #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for Drain<'_, T, A, COOP_PREFERRED> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: +#[allow(unused_braces)] +impl FusedIterator + for Drain<'_, T, A, CO_ALLOC_PREF> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { } diff --git a/library/alloc/src/vec/drain_filter.rs b/library/alloc/src/vec/drain_filter.rs index 89baafca46729..c47d21733cd94 100644 --- a/library/alloc/src/vec/drain_filter.rs +++ b/library/alloc/src/vec/drain_filter.rs @@ -1,6 +1,7 @@ use crate::alloc::{Allocator, Global}; +use crate::co_alloc::CoAllocPref; use core::mem::{self, ManuallyDrop}; -use core::{alloc, ptr, slice}; +use core::{ptr, slice}; use super::Vec; @@ -19,17 +20,18 @@ use super::Vec; /// ``` #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] #[derive(Debug)] +#[allow(unused_braces)] pub struct DrainFilter< 'a, T, F, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, - const COOP_PREFERRED: bool = true, + const CO_ALLOC_PREF: CoAllocPref = { CO_ALLOC_PREF_DEFAULT!() }, > where F: FnMut(&mut T) -> bool, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { - pub(super) vec: &'a mut Vec, + pub(super) vec: &'a mut Vec, /// The index of the item that will be inspected by the next call to `next`. pub(super) idx: usize, /// The number of items that have been drained (removed) thus far. @@ -46,10 +48,11 @@ pub struct DrainFilter< pub(super) panic_flag: bool, } -impl DrainFilter<'_, T, F, A, COOP_PREFERRED> +#[allow(unused_braces)] +impl DrainFilter<'_, T, F, A, CO_ALLOC_PREF> where F: FnMut(&mut T) -> bool, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { /// Returns a reference to the underlying allocator. #[unstable(feature = "allocator_api", issue = "32838")] @@ -115,11 +118,12 @@ where } #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] -impl Iterator - for DrainFilter<'_, T, F, A, COOP_PREFERRED> +#[allow(unused_braces)] +impl Iterator + for DrainFilter<'_, T, F, A, CO_ALLOC_PREF> where F: FnMut(&mut T) -> bool, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { type Item = T; @@ -155,26 +159,29 @@ where } #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] -impl Drop - for DrainFilter<'_, T, F, A, COOP_PREFERRED> +#[allow(unused_braces)] +impl Drop + for DrainFilter<'_, T, F, A, CO_ALLOC_PREF> where F: FnMut(&mut T) -> bool, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { + #[allow(unused_braces)] fn drop(&mut self) { - struct BackshiftOnDrop<'a, 'b, T, F, A: Allocator, const COOP_PREFERRED: bool> + struct BackshiftOnDrop<'a, 'b, T, F, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> where F: FnMut(&mut T) -> bool, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { - drain: &'b mut DrainFilter<'a, T, F, A, COOP_PREFERRED>, + drain: &'b mut DrainFilter<'a, T, F, A, CO_ALLOC_PREF>, } - impl<'a, 'b, T, F, A: Allocator, const COOP_PREFERRED: bool> Drop - for BackshiftOnDrop<'a, 'b, T, F, A, COOP_PREFERRED> + #[allow(unused_braces)] + impl<'a, 'b, T, F, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Drop + for BackshiftOnDrop<'a, 'b, T, F, A, CO_ALLOC_PREF> where F: FnMut(&mut T) -> bool, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn drop(&mut self) { unsafe { diff --git a/library/alloc/src/vec/in_place_collect.rs b/library/alloc/src/vec/in_place_collect.rs index 3afbc754061c0..f2d9579c20276 100644 --- a/library/alloc/src/vec/in_place_collect.rs +++ b/library/alloc/src/vec/in_place_collect.rs @@ -138,7 +138,7 @@ //! vec.truncate(write_idx); //! ``` use crate::alloc::Global; -use core::alloc; +use crate::co_alloc::CoAllocPref; use core::iter::{InPlaceIterable, SourceIter, TrustedRandomAccessNoCoerce}; use core::mem::{self, ManuallyDrop, SizedTypeProperties}; use core::ptr::{self}; @@ -153,10 +153,10 @@ pub(super) trait InPlaceIterableMarker {} impl InPlaceIterableMarker for T where T: InPlaceIterable {} #[allow(unused_braces)] -impl SpecFromIter for Vec +impl SpecFromIter for Vec where I: Iterator + SourceIter + InPlaceIterableMarker, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots_global!(CO_ALLOC_PREF) }]:, { default fn from_iter(mut iterator: I) -> Self { // See "Layout constraints" section in the module documentation. We rely on const @@ -210,7 +210,7 @@ where src.forget_allocation_drop_remaining(); mem::forget(dst_guard); - let vec = unsafe { Vec::from_raw_parts(dst_buf, len, cap) }; + let vec = unsafe { Vec::from_raw_parts_co(dst_buf, len, cap) }; vec } diff --git a/library/alloc/src/vec/in_place_drop.rs b/library/alloc/src/vec/in_place_drop.rs index bf2d004e9db39..42ebb6ca395d7 100644 --- a/library/alloc/src/vec/in_place_drop.rs +++ b/library/alloc/src/vec/in_place_drop.rs @@ -36,6 +36,10 @@ impl Drop for InPlaceDstBufDrop { #[inline] fn drop(&mut self) { // false = no need for co-alloc metadata, since it would get lost once converted to Box. - unsafe { super::Vec::::from_raw_parts(self.ptr, self.len, self.cap) }; + unsafe { + super::Vec::::from_raw_parts( + self.ptr, self.len, self.cap, + ) + }; } } diff --git a/library/alloc/src/vec/into_iter.rs b/library/alloc/src/vec/into_iter.rs index 3555c64af9f8e..9bba9b992e601 100644 --- a/library/alloc/src/vec/into_iter.rs +++ b/library/alloc/src/vec/into_iter.rs @@ -1,6 +1,7 @@ #[cfg(not(no_global_oom_handling))] use super::AsVecIntoIter; use crate::alloc::{Allocator, Global}; +use crate::co_alloc::CoAllocPref; #[cfg(not(no_global_oom_handling))] use crate::collections::VecDeque; use crate::raw_vec::RawVec; @@ -13,7 +14,7 @@ use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties}; use core::ops::Deref; use core::ptr::{self, NonNull}; use core::slice::{self}; -use core::{alloc, array, fmt}; +use core::{array, fmt}; /// An iterator that moves out of a vector. /// @@ -32,9 +33,9 @@ use core::{alloc, array, fmt}; pub struct IntoIter< T, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, - const COOP_PREFERRED: bool = { SHORT_TERM_VEC_PREFERS_COOP!() }, + const CO_ALLOC_PREF: CoAllocPref = { SHORT_TERM_VEC_CO_ALLOC_PREF!() }, > where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { pub(super) buf: NonNull, pub(super) phantom: PhantomData, @@ -49,19 +50,21 @@ pub struct IntoIter< } #[stable(feature = "vec_intoiter_debug", since = "1.13.0")] -impl fmt::Debug - for IntoIter +#[allow(unused_braces)] +impl fmt::Debug + for IntoIter where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("IntoIter").field(&self.as_slice()).finish() } } -impl IntoIter +#[allow(unused_braces)] +impl IntoIter where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { /// Returns the remaining items of this iterator as a slice. /// @@ -132,14 +135,22 @@ where // this creates less assembly self.cap = 0; self.buf = unsafe { - // @FIXME The below if COOP_PREFERRED {..} else {..} - // branching exists, because the following fails. Otherwise we'd have a snowball effect of wide spread of where...Global... + // @FIXME The below if .. {..} else {..} + // branching exists, because the following fails. Otherwise we'd have a snowball effect of wide spread of where...Global... bounds. // - // NonNull::new_unchecked(RawVec::::NEW.ptr()) - if COOP_PREFERRED { - NonNull::new_unchecked(RawVec::::NEW.ptr()) + //NonNull::new_unchecked(RawVec::::NEW.ptr()); + let meta_num_slots = crate::meta_num_slots!(A, CO_ALLOC_PREF); + if meta_num_slots > 0 { + debug_assert!( + meta_num_slots == 1, + "Number of coallocation meta slots can be only 0 or 1, but it is {}!", + meta_num_slots + ); + NonNull::new_unchecked( + RawVec::::NEW.ptr(), + ) } else { - NonNull::new_unchecked(RawVec::::NEW.ptr()) + NonNull::new_unchecked(RawVec::::NEW.ptr()) } }; self.ptr = self.buf.as_ptr(); @@ -161,7 +172,7 @@ where #[cfg(not(no_global_oom_handling))] #[inline] - pub(crate) fn into_vecdeque(self) -> VecDeque { + pub(crate) fn into_vecdeque(self) -> VecDeque { // Keep our `Drop` impl from dropping the elements and the allocator let mut this = ManuallyDrop::new(self); @@ -188,9 +199,10 @@ where } #[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")] -impl AsRef<[T]> for IntoIter +#[allow(unused_braces)] +impl AsRef<[T]> for IntoIter where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn as_ref(&self) -> &[T] { self.as_slice() @@ -198,24 +210,27 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl Send - for IntoIter +#[allow(unused_braces)] +unsafe impl Send + for IntoIter where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl Sync - for IntoIter +#[allow(unused_braces)] +unsafe impl Sync + for IntoIter where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { } #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for IntoIter +#[allow(unused_braces)] +impl Iterator for IntoIter where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { type Item = T; @@ -330,10 +345,11 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator - for IntoIter +#[allow(unused_braces)] +impl DoubleEndedIterator + for IntoIter where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { #[inline] fn next_back(&mut self) -> Option { @@ -375,10 +391,11 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator - for IntoIter +#[allow(unused_braces)] +impl ExactSizeIterator + for IntoIter where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn is_empty(&self) -> bool { self.ptr == self.end @@ -386,16 +403,20 @@ where } #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for IntoIter where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: +#[allow(unused_braces)] +impl FusedIterator + for IntoIter +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { } #[unstable(feature = "trusted_len", issue = "37572")] -unsafe impl TrustedLen - for IntoIter +#[allow(unused_braces)] +unsafe impl TrustedLen + for IntoIter where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { } @@ -411,65 +432,68 @@ impl NonDrop for T {} #[doc(hidden)] #[unstable(issue = "none", feature = "std_internals")] +#[allow(unused_braces)] // TrustedRandomAccess (without NoCoerce) must not be implemented because // subtypes/supertypes of `T` might not be `NonDrop` -unsafe impl TrustedRandomAccessNoCoerce - for IntoIter +unsafe impl TrustedRandomAccessNoCoerce + for IntoIter where T: NonDrop, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { const MAY_HAVE_SIDE_EFFECT: bool = false; } #[cfg(not(no_global_oom_handling))] #[stable(feature = "vec_into_iter_clone", since = "1.8.0")] -impl Clone - for IntoIter +#[allow(unused_braces)] +impl Clone + for IntoIter where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { #[cfg(not(test))] fn clone(&self) -> Self { // @FIXME Remove the following extras - used for type checks only let slice = self.as_slice(); - let vec: crate::vec::Vec = - slice.to_vec_in::(self.alloc.deref().clone()); - let _iter: IntoIter = vec.into_iter(); + let vec: crate::vec::Vec = + slice.to_vec_in_co::(self.alloc.deref().clone()); + let _iter: IntoIter = vec.into_iter(); - //self.as_slice().to_vec_in::(self.alloc.deref().clone()).into_iter() + //self.as_slice().to_vec_in::(self.alloc.deref().clone()).into_iter() loop {} } #[cfg(test)] fn clone(&self) -> Self { - crate::slice::to_vec(self.as_slice(), self.alloc.deref().clone()).into_iter() + crate::slice::to_vec_co(self.as_slice(), self.alloc.deref().clone()).into_iter() } } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<#[may_dangle] T, A: Allocator, const COOP_PREFERRED: bool> Drop - for IntoIter +#[allow(unused_braces)] +unsafe impl<#[may_dangle] T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Drop + for IntoIter where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn drop(&mut self) { - struct DropGuard<'a, T, A: Allocator, const COOP_PREFERRED: bool>( - &'a mut IntoIter, + struct DropGuard<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref>( + &'a mut IntoIter, ) where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:; + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:; - impl Drop for DropGuard<'_, T, A, COOP_PREFERRED> + impl Drop for DropGuard<'_, T, A, CO_ALLOC_PREF> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn drop(&mut self) { unsafe { // `IntoIter::alloc` is not used anymore after this and will be dropped by RawVec let alloc = ManuallyDrop::take(&mut self.0.alloc); // RawVec handles deallocation - // @FIXME pass true instead of COOP_PREFERRED - use e.g.: if COOP_PREFERRED {let _ = RawVec::::from_raw_parts_in(..) } else { let _ = from_raw_parts_in_coop(...)} } - let _ = RawVec::::from_raw_parts_in( + // @FIXME pass true instead of CO_ALLOC_PREF - use e.g.: if CO_ALLOC_PREF {let _ = RawVec::::from_raw_parts_in(..) } else { let _ = from_raw_parts_in_coop(...)} } + let _ = RawVec::::from_raw_parts_in( self.0.buf.as_ptr(), self.0.cap, alloc, @@ -491,19 +515,21 @@ where // also refer to the vec::in_place_collect module documentation to get an overview #[unstable(issue = "none", feature = "inplace_iteration")] #[doc(hidden)] -unsafe impl InPlaceIterable - for IntoIter +#[allow(unused_braces)] +unsafe impl InPlaceIterable + for IntoIter where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { } #[unstable(issue = "none", feature = "inplace_iteration")] #[doc(hidden)] -unsafe impl SourceIter - for IntoIter +#[allow(unused_braces)] +unsafe impl SourceIter + for IntoIter where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { type Source = Self; diff --git a/library/alloc/src/vec/mod.rs b/library/alloc/src/vec/mod.rs index 966a113769dec..a5a31bfd70913 100644 --- a/library/alloc/src/vec/mod.rs +++ b/library/alloc/src/vec/mod.rs @@ -54,7 +54,6 @@ #![stable(feature = "rust1", since = "1.0.0")] #[cfg(not(no_global_oom_handling))] -use core::alloc; use core::cmp; use core::cmp::Ordering; use core::convert::TryFrom; @@ -73,6 +72,7 @@ use core::slice::{self, SliceIndex}; use crate::alloc::{Allocator, Global}; use crate::borrow::{Cow, ToOwned}; use crate::boxed::Box; +use crate::co_alloc::CoAllocPref; use crate::collections::TryReserveError; use crate::raw_vec::RawVec; @@ -149,16 +149,6 @@ use self::spec_extend::SpecExtend; #[cfg(not(no_global_oom_handling))] mod spec_extend; -/// Default `Vec`, `DefVec`, `DecVeque`, `DefDecVeq` "cooperation" (`COOP_PREFERRED`) generic parameter. -#[unstable(feature = "global_co_alloc_def", issue = "none")] -// pub const DEFAULT_COOP_PREFERRED: bool = true; -#[macro_export] -macro_rules! DEFAULT_COOP_PREFERRED { - () => { - true - }; -} - /// A contiguous growable array type, written as `Vec`, short for 'vector'. /// /// # Examples @@ -413,37 +403,35 @@ pub struct Vec< T, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, //@FIXME: #[unstable(feature ="global_co_alloc_vec", issue="none")] - const COOP_PREFERRED: bool = { DEFAULT_COOP_PREFERRED!() }, + const CO_ALLOC_PREF: CoAllocPref = { CO_ALLOC_PREF_DEFAULT!() }, > where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { - buf: RawVec, + buf: RawVec, len: usize, } /// "Cooperative" Vector. Preferring co-alloc API (if Global alloc supports it). #[unstable(feature = "global_co_alloc_covec", issue = "none")] -pub type CoVec = Vec; +pub type CoVec = Vec; /// "Plain" Vec. Not "cooperative" - not carrying extra data to assist the allocator. /// FIXME after cleanup, see if we still use this in core:: and/or alloc:: #[unstable(feature = "global_co_alloc_plvec", issue = "none")] -pub type PlVec = Vec; +pub type PlVec = Vec; -/// "Default" Vec. Either "cooperative" or not - as specified by `DEFAULT_COOP_PREFERRED`. The -/// difference to `Vec` (used without specifying `COOP_PREFERRED`): `DefVec` indicates that the +/// "Default" Vec. Either "cooperative" or not - as specified by `DEFAULT_CO_ALLOC_PREF`. The +/// difference to `Vec` (used without specifying `CO_ALLOC_PREF`): `DefVec` indicates that the /// author considered using `CoVec` or `PlVec`, but left it to default instead. #[unstable(feature = "global_co_alloc_defvec", issue = "none")] #[allow(unused_braces)] -pub type DefVec = Vec; - -/// "Weighted cooperative" Vec. Weight means how much it wants to cooperate (with the allocator). 0 -/// = always pack; u8::MAX = always cooperate (if `Global` supports it). -/// @FIXME A `pub const` threshold. -#[unstable(feature = "global_co_alloc_vec", issue = "none")] -pub type WeVec = Vec 127 }>; +pub type DefVec = Vec; impl Vec { + /*impl Vec + where + [(); {meta_num_slots_global!(CO_ALLOC_PREF)}]:, + {*/ /// Constructs a new, empty `Vec`. /// /// The vector will not allocate until elements are pushed onto it. @@ -460,7 +448,65 @@ impl Vec { #[must_use] pub const fn new() -> Self { #[allow(unused_braces)] - Vec::::new_co() + Vec::::new_co() + //Self::new_co() + } + + /// Constructs a new, empty `Vec` with at least the specified capacity. + /// + /// The vector will be able to hold at least `capacity` elements without + /// reallocating. This method is allowed to allocate for more elements than + /// `capacity`. If `capacity` is 0, the vector will not allocate. + /// + /// It is important to note that although the returned vector has the + /// minimum *capacity* specified, the vector will have a zero *length*. For + /// an explanation of the difference between length and capacity, see + /// *[Capacity and reallocation]*. + /// + /// If it is important to know the exact allocated capacity of a `Vec`, + /// always use the [`capacity`] method after construction. + /// + /// For `Vec` where `T` is a zero-sized type, there will be no allocation + /// and the capacity will always be `usize::MAX`. + /// + /// [Capacity and reallocation]: #capacity-and-reallocation + /// [`capacity`]: Vec::capacity + /// + /// # Panics + /// + /// Panics if the new capacity exceeds `isize::MAX` bytes. + /// + /// # Examples + /// + /// ``` + /// let mut vec = Vec::with_capacity(10); + /// + /// // The vector contains no items, even though it has capacity for more + /// assert_eq!(vec.len(), 0); + /// assert!(vec.capacity() >= 10); + /// + /// // These are all done without reallocating... + /// for i in 0..10 { + /// vec.push(i); + /// } + /// assert_eq!(vec.len(), 10); + /// assert!(vec.capacity() >= 10); + /// + /// // ...but this may make the vector reallocate + /// vec.push(11); + /// assert_eq!(vec.len(), 11); + /// assert!(vec.capacity() >= 11); + /// + /// // A vector of a zero-sized type will always over-allocate, since no + /// // allocation is necessary + /// let vec_units = Vec::<()>::with_capacity(10); + /// assert_eq!(vec_units.capacity(), usize::MAX); + /// ``` #[cfg(not(no_global_oom_handling))] + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + #[must_use] + pub fn with_capacity(capacity: usize) -> Self { + Self::with_capacity_in(capacity, Global) } } @@ -468,11 +514,13 @@ impl Vec { // Inherent methods //////////////////////////////////////////////////////////////////////////////// -impl Vec +/**/ +#[allow(unused_braces)] +impl Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { meta_num_slots_global!(CO_ALLOC_PREF) }]:, { - /// Like new(), but it respects COOP_PREFERRED. + /// Like new(), but it respects CO_ALLOC_PREF. #[inline] #[rustc_const_stable(feature = "const_vec_new_co", since = "1.60.0")] //@FIXME This is `rustc_const_stable`, so that String::new() can be const and can call this. #[unstable(feature = "vec_new_co", reason = "confirm_or_fix_the_function_name", issue = "none")] @@ -481,6 +529,7 @@ where Vec { buf: RawVec::NEW, len: 0 } } + // @FIXME document co-allocation /// Constructs a new, empty `Vec` with at least the specified capacity. /// /// The vector will be able to hold at least `capacity` elements without @@ -533,12 +582,21 @@ where /// ``` #[cfg(not(no_global_oom_handling))] #[inline] - #[stable(feature = "rust1", since = "1.0.0")] + #[unstable(feature = "vec_new_co", reason = "confirm_or_fix_the_function_name", issue = "none")] #[must_use] - pub fn with_capacity(capacity: usize) -> Self { + pub fn with_capacity_co(capacity: usize) -> Self { Self::with_capacity_in(capacity, Global) } + /// Coallocation-aware alternative to `from_row_parts`. + #[inline] + #[unstable(feature = "global_co_alloc", issue = "none")] + pub unsafe fn from_raw_parts_co(ptr: *mut T, length: usize, capacity: usize) -> Self { + unsafe { Self::from_raw_parts_in(ptr, length, capacity, Global) } + } +} + +impl Vec { /// Creates a `Vec` directly from a pointer, a capacity, and a length. /// /// # Safety @@ -649,9 +707,10 @@ where } } -impl Vec +#[allow(unused_braces)] +impl Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { /// Constructs a new, empty `Vec`. /// @@ -1665,19 +1724,26 @@ where // This drop guard will be invoked when predicate or `drop` of element panicked. // It shifts unchecked elements to cover holes and `set_len` to the correct length. // In cases when predicate and `drop` never panick, it will be optimized out. - struct BackshiftOnDrop<'a, T, A: Allocator, const VEC_IS_COOP: bool = true> + struct BackshiftOnDrop< + 'a, + T, + A: Allocator, + const VEC_CO_ALLOC_PREF: CoAllocPref = { CO_ALLOC_PREF_META_DEFAULT!() }, + > where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(VEC_IS_COOP)]:, + [(); { crate::meta_num_slots!(A, VEC_CO_ALLOC_PREF) }]:, { - v: &'a mut Vec, + v: &'a mut Vec, processed_len: usize, deleted_cnt: usize, original_len: usize, } - impl Drop for BackshiftOnDrop<'_, T, A, VEC_IS_COOP> + #[allow(unused_braces)] + impl Drop + for BackshiftOnDrop<'_, T, A, VEC_CO_ALLOC_PREF> where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(VEC_IS_COOP)]:, + [(); { crate::meta_num_slots!(A, VEC_CO_ALLOC_PREF) }]:, { fn drop(&mut self) { if self.deleted_cnt > 0 { @@ -1697,20 +1763,26 @@ where } } - let mut g = BackshiftOnDrop:: { + let mut g = BackshiftOnDrop:: { v: self, processed_len: 0, deleted_cnt: 0, original_len, }; - fn process_loop( + fn process_loop< + F, + T, + A: Allocator, + const DELETED: bool, + const VEC_CO_ALLOC_PREF: CoAllocPref, + >( original_len: usize, f: &mut F, - g: &mut BackshiftOnDrop<'_, T, A, VEC_IS_COOP>, + g: &mut BackshiftOnDrop<'_, T, A, VEC_CO_ALLOC_PREF>, ) where F: FnMut(&mut T) -> bool, - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(VEC_IS_COOP)]:, + [(); { crate::meta_num_slots!(A, VEC_CO_ALLOC_PREF) }]:, { while g.processed_len != original_len { // SAFETY: Unchecked element must be valid. @@ -1741,10 +1813,10 @@ where } // Stage 1: Nothing was deleted. - process_loop::(original_len, &mut f, &mut g); + process_loop::(original_len, &mut f, &mut g); // Stage 2: Some elements were deleted. - process_loop::(original_len, &mut f, &mut g); + process_loop::(original_len, &mut f, &mut g); // All item are processed. This can be optimized to `set_len` by LLVM. drop(g); @@ -1803,9 +1875,10 @@ where } /* INVARIANT: vec.len() > read >= write > write-1 >= 0 */ - struct FillGapOnDrop<'a, T, A: core::alloc::Allocator, const COOP_PREFERRED: bool> + #[allow(unused_braces)] + struct FillGapOnDrop<'a, T, A: core::alloc::Allocator, const CO_ALLOC_PREF: CoAllocPref> where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { /* Offset of the element we want to check if it is duplicate */ read: usize, @@ -1815,13 +1888,14 @@ where write: usize, /* The Vec that would need correction if `same_bucket` panicked */ - vec: &'a mut Vec, + vec: &'a mut Vec, } - impl<'a, T, A: core::alloc::Allocator, const COOP_PREFERRED: bool> Drop - for FillGapOnDrop<'a, T, A, COOP_PREFERRED> + #[allow(unused_braces)] + impl<'a, T, A: core::alloc::Allocator, const CO_ALLOC_PREF: CoAllocPref> Drop + for FillGapOnDrop<'a, T, A, CO_ALLOC_PREF> where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn drop(&mut self) { /* This code gets executed when `same_bucket` panics */ @@ -2064,7 +2138,7 @@ where /// assert_eq!(v, &[]); /// ``` #[stable(feature = "drain", since = "1.6.0")] - pub fn drain(&mut self, range: R) -> Drain<'_, T, A, COOP_PREFERRED> + pub fn drain(&mut self, range: R) -> Drain<'_, T, A, CO_ALLOC_PREF> where R: RangeBounds, { @@ -2415,9 +2489,10 @@ where } } -impl Vec +#[allow(unused_braces)] +impl Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { /// Resizes the `Vec` in-place so that `len` is equal to `new_len`. /// @@ -2517,9 +2592,11 @@ where } } -impl Vec<[T; N], A, COOP_PREFERRED> +#[allow(unused_braces)] +impl + Vec<[T; N], A, CO_ALLOC_PREF> where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { /// Takes a `Vec<[T; N]>` and flattens it into a `Vec`. /// @@ -2543,7 +2620,7 @@ where /// assert_eq!(flattened.pop(), Some(6)); /// ``` #[unstable(feature = "slice_flatten", issue = "95629")] - pub fn into_flattened(self) -> Vec { + pub fn into_flattened(self) -> Vec { let (ptr, len, cap, alloc) = self.into_raw_parts_with_alloc(); let (new_len, new_cap) = if T::IS_ZST { (len.checked_mul(N).expect("vec len overflow"), usize::MAX) @@ -2562,7 +2639,7 @@ where // `new_cap * size_of::()` == `cap * size_of::<[T; N]>()` // - `len` <= `cap`, so `len * N` <= `cap * N`. unsafe { - Vec::::from_raw_parts_in(ptr.cast(), new_len, new_cap, alloc) + Vec::::from_raw_parts_in(ptr.cast(), new_len, new_cap, alloc) } } } @@ -2583,9 +2660,10 @@ impl ExtendWith for ExtendElement { } } -impl Vec +#[allow(unused_braces)] +impl Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { #[cfg(not(no_global_oom_handling))] /// Extend the vector by `n` values, using the given generator. @@ -2618,9 +2696,10 @@ where } } -impl Vec +#[allow(unused_braces)] +impl Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { /// Removes consecutive repeated elements in the vector according to the /// [`PartialEq`] trait implementation. @@ -2657,13 +2736,14 @@ pub fn from_elem(elem: T, n: usize) -> Vec { #[doc(hidden)] #[cfg(not(no_global_oom_handling))] #[unstable(feature = "allocator_api", issue = "32838")] -pub fn from_elem_in( +#[allow(unused_braces)] +pub fn from_elem_in( elem: T, n: usize, alloc: A, -) -> Vec +) -> Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { ::from_elem(elem, n, alloc) } @@ -2676,10 +2756,11 @@ trait ExtendFromWithinSpec { unsafe fn spec_extend_from_within(&mut self, src: Range); } -impl ExtendFromWithinSpec - for Vec +#[allow(unused_braces)] +impl ExtendFromWithinSpec + for Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { default unsafe fn spec_extend_from_within(&mut self, src: Range) { // SAFETY: @@ -2699,10 +2780,11 @@ where } } -impl ExtendFromWithinSpec - for Vec +#[allow(unused_braces)] +impl ExtendFromWithinSpec + for Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { unsafe fn spec_extend_from_within(&mut self, src: Range) { let count = src.len(); @@ -2736,9 +2818,10 @@ where //////////////////////////////////////////////////////////////////////////////// #[stable(feature = "rust1", since = "1.0.0")] -impl ops::Deref for Vec +#[allow(unused_braces)] +impl ops::Deref for Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { type Target = [T]; @@ -2749,9 +2832,10 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl ops::DerefMut for Vec +#[allow(unused_braces)] +impl ops::DerefMut for Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { #[inline] fn deref_mut(&mut self) -> &mut [T] { @@ -2761,14 +2845,16 @@ where #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] -impl Clone for Vec +#[allow(unused_braces)] +impl Clone + for Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { #[cfg(not(test))] fn clone(&self) -> Self { let alloc = self.allocator().clone(); - <[T]>::to_vec_in(&**self, alloc) + <[T]>::to_vec_in_co(&**self, alloc) } // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is @@ -2778,11 +2864,11 @@ where #[cfg(test)] fn clone(&self) -> Self { let alloc = self.allocator().clone(); - crate::slice::to_vec(&**self, alloc) + crate::slice::to_vec_co(&**self, alloc) } fn clone_from(&mut self, other: &Self) { - crate::slice::SpecCloneIntoVec::clone_into(other.as_slice(), self); + crate::slice::SpecCloneIntoVecCo::clone_into_co(other.as_slice(), self); } } @@ -2799,9 +2885,10 @@ where /// assert_eq!(b.hash_one(v), b.hash_one(s)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] -impl Hash for Vec +#[allow(unused_braces)] +impl Hash for Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { #[inline] fn hash(&self, state: &mut H) { @@ -2814,10 +2901,11 @@ where message = "vector indices are of type `usize` or ranges of `usize`", label = "vector indices are of type `usize` or ranges of `usize`" )] -impl, A: Allocator, const COOP_PREFERRED: bool> Index - for Vec +#[allow(unused_braces)] +impl, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Index + for Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { type Output = I::Output; @@ -2832,10 +2920,11 @@ where message = "vector indices are of type `usize` or ranges of `usize`", label = "vector indices are of type `usize` or ranges of `usize`" )] -impl, A: Allocator, const COOP_PREFERRED: bool> IndexMut - for Vec +#[allow(unused_braces)] +impl, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> IndexMut + for Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { #[inline] fn index_mut(&mut self, index: I) -> &mut Self::Output { @@ -2846,23 +2935,35 @@ where #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] #[allow(unused_braces)] -impl FromIterator for Vec +impl FromIterator for Vec { + #[inline] + fn from_iter>(iter: I) -> Vec { + >::from_iter(iter.into_iter()) + } +} + +#[cfg(not(no_global_oom_handling))] +#[unstable(feature = "global_co_alloc", issue = "none")] +#[allow(unused_braces)] +impl Vec where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots_global!(CO_ALLOC_PREF) }]:, { + /// Coallocation-aware alternative to `from_iter`. #[inline] - fn from_iter>(iter: I) -> Vec { + pub fn from_iter_co>(iter: I) -> Vec { >::from_iter(iter.into_iter()) } } #[stable(feature = "rust1", since = "1.0.0")] -impl IntoIterator for Vec +#[allow(unused_braces)] +impl IntoIterator for Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { type Item = T; - type IntoIter = IntoIter; + type IntoIter = IntoIter; /// Creates a consuming iterator, that is, one that moves each value out of /// the vector (from start to end). The vector cannot be used after calling @@ -2905,9 +3006,11 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T, A: Allocator, const COOP_PREFERRED: bool> IntoIterator for &'a Vec +#[allow(unused_braces)] +impl<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> IntoIterator + for &'a Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { type Item = &'a T; type IntoIter = slice::Iter<'a, T>; @@ -2918,10 +3021,11 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T, A: Allocator, const COOP_PREFERRED: bool> IntoIterator - for &'a mut Vec +#[allow(unused_braces)] +impl<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> IntoIterator + for &'a mut Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { type Item = &'a mut T; type IntoIter = slice::IterMut<'a, T>; @@ -2933,9 +3037,10 @@ where #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] -impl Extend for Vec +#[allow(unused_braces)] +impl Extend for Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { #[inline] fn extend>(&mut self, iter: I) { @@ -2953,9 +3058,10 @@ where } } -impl Vec +#[allow(unused_braces)] +impl Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { // leaf method to which various SpecFrom/SpecExtend implementations delegate when // they have no further optimizations to apply @@ -3058,11 +3164,11 @@ where &mut self, range: R, replace_with: I, - ) -> Splice<'_, I::IntoIter, A, COOP_PREFERRED> + ) -> Splice<'_, I::IntoIter, A, CO_ALLOC_PREF> where R: RangeBounds, I: IntoIterator, - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(true)]:, + [(); { crate::meta_num_slots!(A, crate::CO_ALLOC_PREF_META_YES!()) }]:, { Splice { drain: self.drain(range), replace_with: replace_with.into_iter() } } @@ -3112,10 +3218,10 @@ where /// assert_eq!(odds, vec![1, 3, 5, 9, 11, 13, 15]); /// ``` #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] - pub fn drain_filter(&mut self, filter: F) -> DrainFilter<'_, T, F, A, COOP_PREFERRED> + pub fn drain_filter(&mut self, filter: F) -> DrainFilter<'_, T, F, A, CO_ALLOC_PREF> where F: FnMut(&mut T) -> bool, - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(true)]:, + [(); { crate::meta_num_slots!(A, crate::CO_ALLOC_PREF_META_YES!()) }]:, { let old_len = self.len(); @@ -3136,10 +3242,11 @@ where /// [`copy_from_slice`]: slice::copy_from_slice #[cfg(not(no_global_oom_handling))] #[stable(feature = "extend_ref", since = "1.2.0")] -impl<'a, T: Copy + 'a, A: Allocator + 'a, const COOP_PREFERRED: bool> Extend<&'a T> - for Vec +#[allow(unused_braces)] +impl<'a, T: Copy + 'a, A: Allocator + 'a, const CO_ALLOC_PREF: CoAllocPref> Extend<&'a T> + for Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn extend>(&mut self, iter: I) { self.spec_extend(iter.into_iter()) @@ -3158,10 +3265,11 @@ where /// Implements comparison of vectors, [lexicographically](core::cmp::Ord#lexicographical-comparison). #[stable(feature = "rust1", since = "1.0.0")] -impl PartialOrd - for Vec +#[allow(unused_braces)] +impl PartialOrd + for Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { #[inline] fn partial_cmp(&self, other: &Self) -> Option { @@ -3170,16 +3278,18 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl Eq for Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: +#[allow(unused_braces)] +impl Eq for Vec where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]: { } /// Implements ordering of vectors, [lexicographically](core::cmp::Ord#lexicographical-comparison). #[stable(feature = "rust1", since = "1.0.0")] -impl Ord for Vec +#[allow(unused_braces)] +impl Ord for Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { #[inline] fn cmp(&self, other: &Self) -> Ordering { @@ -3188,10 +3298,11 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<#[may_dangle] T, A: Allocator, const COOP_PREFERRED: bool> Drop - for Vec +#[allow(unused_braces)] +unsafe impl<#[may_dangle] T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Drop + for Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn drop(&mut self) { unsafe { @@ -3206,23 +3317,36 @@ where #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_unstable(feature = "const_default_impls", issue = "87864")] -impl const Default for Vec +#[allow(unused_braces)] +impl const Default for Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { meta_num_slots_global!(CO_ALLOC_PREF) }]:, { /// Creates an empty `Vec`. /// /// The vector will not allocate until elements are pushed onto it. - fn default() -> Vec { + default fn default() -> Vec { Vec::new_co() } } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Debug - for Vec +#[rustc_const_unstable(feature = "const_default_impls", issue = "87864")] +impl const Default for Vec { + /// Creates an empty `Vec`. + /// + /// The vector will not allocate until elements are pushed onto it. + fn default() -> Vec { + Vec::new_co() + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +#[allow(unused_braces)] +impl fmt::Debug + for Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&**self, f) @@ -3230,31 +3354,34 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl AsRef> - for Vec +#[allow(unused_braces)] +impl AsRef> + for Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { - fn as_ref(&self) -> &Vec { + fn as_ref(&self) -> &Vec { self } } #[stable(feature = "vec_as_mut", since = "1.5.0")] -impl AsMut> - for Vec +#[allow(unused_braces)] +impl AsMut> + for Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { - fn as_mut(&mut self) -> &mut Vec { + fn as_mut(&mut self) -> &mut Vec { self } } #[stable(feature = "rust1", since = "1.0.0")] -impl AsRef<[T]> for Vec +#[allow(unused_braces)] +impl AsRef<[T]> for Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn as_ref(&self) -> &[T] { self @@ -3262,9 +3389,10 @@ where } #[stable(feature = "vec_as_mut", since = "1.5.0")] -impl AsMut<[T]> for Vec +#[allow(unused_braces)] +impl AsMut<[T]> for Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn as_mut(&mut self) -> &mut [T] { self @@ -3274,7 +3402,7 @@ where #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] #[allow(unused_braces)] -impl From<&[T]> for Vec { +impl From<&[T]> for Vec { /// Allocate a `Vec` and fill it by cloning `s`'s items. /// /// # Examples @@ -3295,7 +3423,7 @@ impl From<&[T]> for Vec { #[cfg(not(no_global_oom_handling))] #[stable(feature = "vec_from_mut", since = "1.19.0")] #[allow(unused_braces)] -impl From<&mut [T]> for Vec { +impl From<&mut [T]> for Vec { /// Allocate a `Vec` and fill it by cloning `s`'s items. /// /// # Examples @@ -3316,7 +3444,7 @@ impl From<&mut [T]> for Vec #[cfg(not(no_global_oom_handling))] #[stable(feature = "vec_from_array", since = "1.44.0")] #[allow(unused_braces)] -impl From<[T; N]> for Vec { +impl From<[T; N]> for Vec { /// Allocate a `Vec` and move `s`'s items into it. /// /// # Examples @@ -3340,7 +3468,7 @@ impl From<[T; N]> for Vec From> for Vec +impl<'a, T> From> for Vec where [T]: ToOwned>, { @@ -3363,12 +3491,27 @@ where } } -// note: test pulls in std, which causes errors here +// @FIXME unsure about test +#[cfg(not(test))] +#[allow(ineffective_unstable_trait_impl)] //@FIXME What/why is #[unstable(...)] ignored here? +#[unstable(feature = "global_co_alloc", issue = "none")] +#[allow(unused_braces)] +impl From> + for Vec +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ + default fn from(s: Box<[T], A>) -> Self { + s.into_vec_co() + } +} + #[cfg(not(test))] #[stable(feature = "vec_from_box", since = "1.18.0")] -impl From> for Vec +#[allow(unused_braces)] +impl From> for Vec where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots_default!(A) }]:, { /// Convert a boxed slice into a vector by transferring ownership of /// the existing heap allocation. @@ -3384,13 +3527,29 @@ where } } +#[cfg(not(no_global_oom_handling))] +// @FIXME Can this apply to test? +#[cfg(not(test))] +#[allow(ineffective_unstable_trait_impl)] //@FIXME What/why is #[unstable(...)] ignored here? +#[unstable(feature = "global_co_alloc", issue = "none")] +#[allow(unused_braces)] +impl From> + for Box<[T], A> +where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, +{ + default fn from(v: Vec) -> Self { + v.into_boxed_slice() + } +} // note: test pulls in std, which causes errors here #[cfg(not(no_global_oom_handling))] #[cfg(not(test))] #[stable(feature = "box_from_vec", since = "1.20.0")] -impl From> for Box<[T], A> +#[allow(unused_braces)] +impl From> for Box<[T], A> where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots_default!(A) }]:, { /// Convert a vector into a boxed slice. /// @@ -3410,7 +3569,7 @@ where /// /// assert_eq!(Box::from(vec), vec![1, 2, 3].into_boxed_slice()); /// ``` - fn from(v: Vec) -> Self { + fn from(v: Vec) -> Self { v.into_boxed_slice() } } @@ -3418,7 +3577,7 @@ where #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] #[allow(unused_braces)] -impl From<&str> for Vec { +impl From<&str> for Vec { /// Allocate a `Vec` and fill it with a UTF-8 string. /// /// # Examples @@ -3432,12 +3591,13 @@ impl From<&str> for Vec { } #[stable(feature = "array_try_from_vec", since = "1.48.0")] -impl TryFrom> - for [T; N] +#[allow(unused_braces)] +impl + TryFrom> for [T; N] where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { - type Error = Vec; + type Error = Vec; /// Gets the entire contents of the `Vec` as an array, /// if its size exactly matches that of the requested array. @@ -3465,7 +3625,7 @@ where /// assert_eq!(a, b' '); /// assert_eq!(b, b'd'); /// ``` - fn try_from(mut vec: Vec) -> Result<[T; N], Vec> { + fn try_from(mut vec: Vec) -> Result<[T; N], Vec> { if vec.len() != N { return Err(vec); } diff --git a/library/alloc/src/vec/partial_eq.rs b/library/alloc/src/vec/partial_eq.rs index bcf52b7333218..a4fb19794b4db 100644 --- a/library/alloc/src/vec/partial_eq.rs +++ b/library/alloc/src/vec/partial_eq.rs @@ -2,12 +2,14 @@ use crate::alloc::Allocator; #[cfg(not(no_global_oom_handling))] use crate::borrow::Cow; +use crate::co_alloc::CoAllocPref; use super::Vec; macro_rules! __impl_slice_eq1 { ([$($vars:tt)*] $lhs:ty, $rhs:ty, #[$stability:meta], $($constraints:tt)*) => { #[$stability] + #[allow(unused_braces)] impl PartialEq<$rhs> for $lhs where T: PartialEq, @@ -21,21 +23,21 @@ macro_rules! __impl_slice_eq1 { } } -__impl_slice_eq1! { [A1: Allocator, A2: Allocator, const COOP_PREFERRED1: bool, const COOP_PREFERRED2: bool] Vec, Vec, #[stable(feature = "rust1", since = "1.0.0")], [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED1)]:, [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED2)]: } -__impl_slice_eq1! { [A: Allocator, const COOP_PREFERRED: bool] Vec, &[U], #[stable(feature = "rust1", since = "1.0.0")], [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: } -__impl_slice_eq1! { [A: Allocator, const COOP_PREFERRED: bool] Vec, &mut [U], #[stable(feature = "rust1", since = "1.0.0")], [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: } -__impl_slice_eq1! { [A: Allocator, const COOP_PREFERRED: bool] &[T], Vec, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")], [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: } -__impl_slice_eq1! { [A: Allocator, const COOP_PREFERRED: bool] &mut [T], Vec, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")], [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: } -__impl_slice_eq1! { [A: Allocator, const COOP_PREFERRED: bool] Vec, [U], #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")], [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: } -__impl_slice_eq1! { [A: Allocator, const COOP_PREFERRED: bool] [T], Vec, #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")], [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: } +__impl_slice_eq1! { [A1: Allocator, A2: Allocator, const CO_ALLOC_PREF1: crate::co_alloc::CoAllocPref, const CO_ALLOC_PREF2: crate::co_alloc::CoAllocPref] Vec, Vec, #[stable(feature = "rust1", since = "1.0.0")], [(); {crate::meta_num_slots!(A1, CO_ALLOC_PREF1)}]:, [(); {crate::meta_num_slots!(A2, CO_ALLOC_PREF2)}]: } +__impl_slice_eq1! { [A: Allocator, const CO_ALLOC_PREF: CoAllocPref] Vec, &[U], #[stable(feature = "rust1", since = "1.0.0")], [(); {crate::meta_num_slots!(A, CO_ALLOC_PREF)}]: } +__impl_slice_eq1! { [A: Allocator, const CO_ALLOC_PREF: CoAllocPref] Vec, &mut [U], #[stable(feature = "rust1", since = "1.0.0")], [(); {crate::meta_num_slots!(A, CO_ALLOC_PREF)}]: } +__impl_slice_eq1! { [A: Allocator, const CO_ALLOC_PREF: CoAllocPref] &[T], Vec, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")], [(); {crate::meta_num_slots!(A, CO_ALLOC_PREF)}]: } +__impl_slice_eq1! { [A: Allocator, const CO_ALLOC_PREF: CoAllocPref] &mut [T], Vec, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")], [(); {crate::meta_num_slots!(A, CO_ALLOC_PREF)}]: } +__impl_slice_eq1! { [A: Allocator, const CO_ALLOC_PREF: CoAllocPref] Vec, [U], #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")], [(); {crate::meta_num_slots!(A, CO_ALLOC_PREF)}]: } +__impl_slice_eq1! { [A: Allocator, const CO_ALLOC_PREF: CoAllocPref] [T], Vec, #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")], [(); {crate::meta_num_slots!(A, CO_ALLOC_PREF)}]: } #[cfg(not(no_global_oom_handling))] -__impl_slice_eq1! { [A: Allocator, const COOP_PREFERRED: bool] Cow<'_, [T]>, Vec, #[stable(feature = "rust1", since = "1.0.0")], T: Clone, [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: } +__impl_slice_eq1! { [A: Allocator, const CO_ALLOC_PREF: CoAllocPref] Cow<'_, [T]>, Vec, #[stable(feature = "rust1", since = "1.0.0")], T: Clone, [(); {crate::meta_num_slots!(A, CO_ALLOC_PREF)}]: } #[cfg(not(no_global_oom_handling))] __impl_slice_eq1! { [] Cow<'_, [T]>, &[U], #[stable(feature = "rust1", since = "1.0.0")], T: Clone } #[cfg(not(no_global_oom_handling))] __impl_slice_eq1! { [] Cow<'_, [T]>, &mut [U], #[stable(feature = "rust1", since = "1.0.0")], T: Clone } -__impl_slice_eq1! { [A: Allocator, const COOP_PREFERRED: bool, const N: usize] Vec, [U; N], #[stable(feature = "rust1", since = "1.0.0")], [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: } -__impl_slice_eq1! { [A: Allocator, const COOP_PREFERRED: bool, const N: usize] Vec, &[U; N], #[stable(feature = "rust1", since = "1.0.0")], [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]: } +__impl_slice_eq1! { [A: Allocator, const CO_ALLOC_PREF: CoAllocPref, const N: usize] Vec, [U; N], #[stable(feature = "rust1", since = "1.0.0")], [(); {crate::meta_num_slots!(A, CO_ALLOC_PREF)}]: } +__impl_slice_eq1! { [A: Allocator, const CO_ALLOC_PREF: CoAllocPref, const N: usize] Vec, &[U; N], #[stable(feature = "rust1", since = "1.0.0")], [(); {crate::meta_num_slots!(A, CO_ALLOC_PREF)}]: } // NOTE: some less important impls are omitted to reduce code bloat // FIXME(Centril): Reconsider this? diff --git a/library/alloc/src/vec/spec_extend.rs b/library/alloc/src/vec/spec_extend.rs index 73cf325889290..546e2c0e0ae77 100644 --- a/library/alloc/src/vec/spec_extend.rs +++ b/library/alloc/src/vec/spec_extend.rs @@ -1,5 +1,5 @@ use crate::alloc::Allocator; -use core::alloc; +use crate::co_alloc::CoAllocPref; use core::iter::TrustedLen; use core::slice::{self}; @@ -10,30 +10,35 @@ pub(super) trait SpecExtend { fn spec_extend(&mut self, iter: I); } -impl SpecExtend for Vec +#[allow(unused_braces)] +impl SpecExtend + for Vec where I: Iterator, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { default fn spec_extend(&mut self, iter: I) { self.extend_desugared(iter) } } -impl SpecExtend for Vec +#[allow(unused_braces)] +impl SpecExtend + for Vec where I: TrustedLen, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { default fn spec_extend(&mut self, iterator: I) { self.extend_trusted(iterator) } } -impl SpecExtend> - for Vec +#[allow(unused_braces)] +impl SpecExtend> + for Vec where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn spec_extend(&mut self, mut iterator: IntoIter) { unsafe { @@ -43,23 +48,25 @@ where } } -impl<'a, T: 'a, I, A: Allocator + 'a, const COOP_PREFERRED: bool> SpecExtend<&'a T, I> - for Vec +#[allow(unused_braces)] +impl<'a, T: 'a, I, A: Allocator + 'a, const CO_ALLOC_PREF: CoAllocPref> SpecExtend<&'a T, I> + for Vec where I: Iterator, T: Clone, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { default fn spec_extend(&mut self, iterator: I) { self.spec_extend(iterator.cloned()) } } -impl<'a, T: 'a, A: Allocator + 'a, const COOP_PREFERRED: bool> SpecExtend<&'a T, slice::Iter<'a, T>> - for Vec +#[allow(unused_braces)] +impl<'a, T: 'a, A: Allocator + 'a, const CO_ALLOC_PREF: CoAllocPref> + SpecExtend<&'a T, slice::Iter<'a, T>> for Vec where T: Copy, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) { let slice = iterator.as_slice(); diff --git a/library/alloc/src/vec/spec_from_elem.rs b/library/alloc/src/vec/spec_from_elem.rs index bc4169a24f0b2..f6ddf0a6ef22e 100644 --- a/library/alloc/src/vec/spec_from_elem.rs +++ b/library/alloc/src/vec/spec_from_elem.rs @@ -1,30 +1,32 @@ use core::ptr; use crate::alloc::Allocator; +use crate::co_alloc::CoAllocPref; use crate::raw_vec::RawVec; -use core::alloc; use super::{ExtendElement, IsZero, Vec}; // Specialization trait used for Vec::from_elem pub(super) trait SpecFromElem: Sized { - fn from_elem( + #[allow(unused_braces)] + fn from_elem( elem: Self, n: usize, alloc: A, - ) -> Vec + ) -> Vec where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:; + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:; } +#[allow(unused_braces)] impl SpecFromElem for T { - default fn from_elem( + default fn from_elem( elem: Self, n: usize, alloc: A, - ) -> Vec + ) -> Vec where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { let mut v = Vec::with_capacity_in(n, alloc); v.extend_with(n, ExtendElement(elem)); @@ -32,15 +34,16 @@ impl SpecFromElem for T { } } +#[allow(unused_braces)] impl SpecFromElem for T { #[inline] - default fn from_elem( + default fn from_elem( elem: T, n: usize, alloc: A, - ) -> Vec + ) -> Vec where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { if elem.is_zero() { return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n }; @@ -53,13 +56,14 @@ impl SpecFromElem for T { impl SpecFromElem for i8 { #[inline] - fn from_elem( + #[allow(unused_braces)] + fn from_elem( elem: i8, n: usize, alloc: A, - ) -> Vec + ) -> Vec where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { if elem == 0 { return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n }; @@ -75,13 +79,14 @@ impl SpecFromElem for i8 { impl SpecFromElem for u8 { #[inline] - fn from_elem( + #[allow(unused_braces)] + fn from_elem( elem: u8, n: usize, alloc: A, - ) -> Vec + ) -> Vec where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { if elem == 0 { return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n }; diff --git a/library/alloc/src/vec/spec_from_iter.rs b/library/alloc/src/vec/spec_from_iter.rs index e5d790878aabb..cde02b319bef6 100644 --- a/library/alloc/src/vec/spec_from_iter.rs +++ b/library/alloc/src/vec/spec_from_iter.rs @@ -1,5 +1,5 @@ use crate::alloc::Global; -use core::alloc; +use crate::co_alloc::CoAllocPref; use core::mem::ManuallyDrop; use core::ptr::{self}; @@ -28,10 +28,10 @@ pub(super) trait SpecFromIter { } #[allow(unused_braces)] -impl SpecFromIter for Vec +impl SpecFromIter for Vec where I: Iterator, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots_global!(CO_ALLOC_PREF) }]:, { default fn from_iter(iterator: I) -> Self { SpecFromIterNested::from_iter(iterator) @@ -39,9 +39,10 @@ where } #[allow(unused_braces)] -impl SpecFromIter> for Vec +impl SpecFromIter> + for Vec where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots_global!(CO_ALLOC_PREF) }]:, { fn from_iter(iterator: IntoIter) -> Self { // A common case is passing a vector into a function which immediately @@ -59,11 +60,11 @@ where if has_advanced { ptr::copy(it.ptr, it.buf.as_ptr(), it.len()); } - return Vec::from_raw_parts(it.buf.as_ptr(), it.len(), it.cap); + return Vec::from_raw_parts_co(it.buf.as_ptr(), it.len(), it.cap); } } - let mut vec = Vec::::new_co(); + let mut vec = Vec::::new_co(); // must delegate to spec_extend() since extend() itself delegates // to spec_from for empty Vecs vec.spec_extend(iterator); diff --git a/library/alloc/src/vec/spec_from_iter_nested.rs b/library/alloc/src/vec/spec_from_iter_nested.rs index b71e86e83ba5a..e567d3697fd28 100644 --- a/library/alloc/src/vec/spec_from_iter_nested.rs +++ b/library/alloc/src/vec/spec_from_iter_nested.rs @@ -1,11 +1,11 @@ -use core::alloc; use core::cmp; use core::iter::TrustedLen; use core::ptr; use crate::alloc::Global; +use crate::co_alloc::CoAllocPref; use crate::raw_vec::RawVec; -use crate::DEFAULT_COOP_PREFERRED; +use crate::CO_ALLOC_PREF_DEFAULT; use super::{SpecExtend, Vec}; @@ -17,10 +17,11 @@ pub(super) trait SpecFromIterNested { } #[allow(unused_braces)] -impl SpecFromIterNested for Vec +impl SpecFromIterNested + for Vec where I: Iterator, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots_global!(CO_ALLOC_PREF) }]:, { default fn from_iter(mut iterator: I) -> Self { // Unroll the first iteration, as the vector is going to be @@ -34,7 +35,7 @@ where let (lower, _) = iterator.size_hint(); let initial_capacity = cmp::max(RawVec::::MIN_NON_ZERO_CAP, lower.saturating_add(1)); - let mut vector = Vec::with_capacity(initial_capacity); + let mut vector = Vec::with_capacity_co(initial_capacity); unsafe { // SAFETY: We requested capacity at least 1 ptr::write(vector.as_mut_ptr(), element); @@ -45,13 +46,13 @@ where }; // must delegate to spec_extend() since extend() itself delegates // to spec_from for empty Vecs - as SpecExtend>::spec_extend(&mut vector, iterator); + as SpecExtend>::spec_extend(&mut vector, iterator); vector } } #[allow(unused_braces)] -impl SpecFromIterNested for Vec +impl SpecFromIterNested for Vec where I: TrustedLen, { diff --git a/library/alloc/src/vec/splice.rs b/library/alloc/src/vec/splice.rs index acf5553c9cfb4..b48e021e522f0 100644 --- a/library/alloc/src/vec/splice.rs +++ b/library/alloc/src/vec/splice.rs @@ -1,5 +1,5 @@ use crate::alloc::{Allocator, Global}; -use core::alloc; +use crate::co_alloc::CoAllocPref; use core::ptr::{self}; use core::slice::{self}; @@ -19,23 +19,25 @@ use super::{Drain, Vec}; /// ``` #[derive(Debug)] #[stable(feature = "vec_splice", since = "1.21.0")] +#[allow(unused_braces)] pub struct Splice< 'a, I: Iterator + 'a, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + 'a = Global, - const COOP_PREFERRED: bool = false, + const CO_ALLOC_PREF: CoAllocPref = { CO_ALLOC_PREF_DEFAULT!() }, > where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { - pub(super) drain: Drain<'a, I::Item, A, COOP_PREFERRED>, + pub(super) drain: Drain<'a, I::Item, A, CO_ALLOC_PREF>, pub(super) replace_with: I, } #[stable(feature = "vec_splice", since = "1.21.0")] -impl Iterator - for Splice<'_, I, A, COOP_PREFERRED> +#[allow(unused_braces)] +impl Iterator + for Splice<'_, I, A, CO_ALLOC_PREF> where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { type Item = I::Item; @@ -49,10 +51,11 @@ where } #[stable(feature = "vec_splice", since = "1.21.0")] -impl DoubleEndedIterator - for Splice<'_, I, A, COOP_PREFERRED> +#[allow(unused_braces)] +impl DoubleEndedIterator + for Splice<'_, I, A, CO_ALLOC_PREF> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn next_back(&mut self) -> Option { self.drain.next_back() @@ -60,18 +63,20 @@ where } #[stable(feature = "vec_splice", since = "1.21.0")] -impl ExactSizeIterator - for Splice<'_, I, A, COOP_PREFERRED> +#[allow(unused_braces)] +impl ExactSizeIterator + for Splice<'_, I, A, CO_ALLOC_PREF> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { } #[stable(feature = "vec_splice", since = "1.21.0")] -impl Drop - for Splice<'_, I, A, COOP_PREFERRED> +#[allow(unused_braces)] +impl Drop + for Splice<'_, I, A, CO_ALLOC_PREF> where - [(); core::alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn drop(&mut self) { self.drain.by_ref().for_each(drop); @@ -119,9 +124,10 @@ where } /// Private helper methods for `Splice::drop` -impl Drain<'_, T, A, COOP_PREFERRED> +#[allow(unused_braces)] +impl Drain<'_, T, A, CO_ALLOC_PREF> where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { /// The range from `self.vec.len` to `self.tail_start` contains elements /// that have been moved out. diff --git a/library/alloc/tests/autotraits.rs b/library/alloc/tests/autotraits.rs index 879e32b3fa309..0a14b165a74f3 100644 --- a/library/alloc/tests/autotraits.rs +++ b/library/alloc/tests/autotraits.rs @@ -1,3 +1,5 @@ +use alloc::{CO_ALLOC_PREF_META_YES, CO_ALLOC_PREF_META_NO}; + fn require_sync(_: T) {} fn require_send_sync(_: T) {} @@ -192,7 +194,12 @@ fn test_binary_heap() { }); require_send_sync(async { - let _v = None::>; + let _v = None::>; + async {}.await; + }); + + require_send_sync(async { + let _v = None::>; async {}.await; }); diff --git a/library/alloc/tests/lib.rs b/library/alloc/tests/lib.rs index 2a93a242d5174..1a435018e0b17 100644 --- a/library/alloc/tests/lib.rs +++ b/library/alloc/tests/lib.rs @@ -2,6 +2,7 @@ #![feature(alloc_layout_extra)] #![feature(assert_matches)] #![feature(btree_drain_filter)] +#![feature(global_co_alloc_meta)] #![feature(cow_is_borrowed)] #![feature(const_box)] #![feature(const_convert)] diff --git a/library/core/src/alloc/global.rs b/library/core/src/alloc/global.rs index e048c01d34bce..081858e2fb512 100644 --- a/library/core/src/alloc/global.rs +++ b/library/core/src/alloc/global.rs @@ -1,14 +1,14 @@ -use crate::alloc::GlobalCoAllocMeta; use crate::alloc::Layout; +use crate::alloc::{CoAllocMetaBase, CoAllocMetaPlain}; use crate::cmp; use crate::ptr; #[unstable(feature = "global_co_alloc_meta", issue = "none")] -#[allow(missing_debug_implementations)] +#[derive(Debug)] /// Used for parameters and results (to/from `GlobalCoAllocator`'s functions, where applicable). -pub struct RawAndMeta { +pub struct RawAndMeta { pub ptr: *mut u8, - pub meta: GlobalCoAllocMeta, + pub meta: M, } /// A memory allocator that can be registered as the standard library’s default @@ -130,6 +130,13 @@ pub struct RawAndMeta { /// having side effects. #[stable(feature = "global_alloc", since = "1.28.0")] pub unsafe trait GlobalAlloc { + /// NOT for public use. The default value MAY be REMOVED or CHANGED. + /// + /// @FIXME Validate (preferrable at compile time, otherwise as a test) that this type's + /// alignment <= `usize` alignment. + #[unstable(feature = "global_co_alloc_meta", issue = "none")] + type CoAllocMeta: CoAllocMetaBase = CoAllocMetaPlain; + /// Allocate memory as described by the given `layout`. /// /// Returns a pointer to newly-allocated memory, @@ -166,7 +173,7 @@ pub unsafe trait GlobalAlloc { unsafe fn alloc(&self, layout: Layout) -> *mut u8; #[unstable(feature = "global_co_alloc", issue = "none")] - unsafe fn co_alloc(&self, _layout: Layout, mut _result: &mut RawAndMeta) { + unsafe fn co_alloc(&self, _layout: Layout, mut _result: &mut RawAndMeta) { panic!("@FIXME") } @@ -186,7 +193,7 @@ pub unsafe trait GlobalAlloc { unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout); #[unstable(feature = "global_co_alloc", issue = "none")] - unsafe fn co_dealloc(&self, _ptr_and_meta: RawAndMeta, _layout: Layout) { + unsafe fn co_dealloc(&self, _ptr_and_meta: RawAndMeta, _layout: Layout) { panic!("@FIXME") } @@ -223,7 +230,11 @@ pub unsafe trait GlobalAlloc { } #[unstable(feature = "global_co_alloc", issue = "none")] - unsafe fn co_alloc_zeroed(&self, layout: Layout, mut result: &mut RawAndMeta) { + unsafe fn co_alloc_zeroed( + &self, + layout: Layout, + mut result: &mut RawAndMeta, + ) { let size = layout.size(); // SAFETY: the safety contract for `alloc` must be upheld by the caller. unsafe { self.co_alloc(layout, &mut result) }; @@ -310,10 +321,10 @@ pub unsafe trait GlobalAlloc { #[unstable(feature = "global_co_alloc", issue = "none")] unsafe fn co_realloc( &self, - ptr_and_meta: RawAndMeta, + ptr_and_meta: RawAndMeta, layout: Layout, new_size: usize, - mut result: &mut RawAndMeta, + mut result: &mut RawAndMeta, ) { // SAFETY: the caller must ensure that the `new_size` does not overflow. // `layout.align()` comes from a `Layout` and is thus guaranteed to be valid. diff --git a/library/core/src/alloc/mod.rs b/library/core/src/alloc/mod.rs index c0d2af6098378..609cbedda3410 100644 --- a/library/core/src/alloc/mod.rs +++ b/library/core/src/alloc/mod.rs @@ -25,23 +25,6 @@ use crate::error::Error; use crate::fmt; use crate::ptr::{self, NonNull}; -// @FIXME Make this target-specific -/// Metadata for `Vec/VecDeque/RawVec` to assist the allocator. Make sure its -/// alignment is not bigger than alignment of `usize`. Otherwise, even if (a -/// particular) `Vec/VecDeque/RawVec` generic instance doesn't use cooperation, -/// it would increase size of that `Vec/VecDeque/RawVec` because of alignment -/// rules! @FIXME compile time test that `GlobalCoAllocMeta` alignment <= -/// `usize` alignment. -#[unstable(feature = "global_co_alloc_meta", issue = "none")] -#[allow(missing_debug_implementations)] -#[derive(Clone, Copy)] -pub struct GlobalCoAllocMeta { - //pub one: usize, - /*pub two: usize, - pub three: usize, - pub four: usize,*/ -} - /// The `AllocError` error indicates an allocation failure /// that may be due to resource exhaustion or to /// something wrong when combining the given input arguments with this @@ -65,53 +48,76 @@ impl fmt::Display for AllocError { } } +/// (Non-Null) Pointer and coallocation metadata. #[unstable(feature = "global_co_alloc_meta", issue = "none")] -#[allow(missing_debug_implementations)] -pub struct PtrAndMeta { +#[derive(Clone, Copy, Debug)] +pub struct PtrAndMeta { pub ptr: NonNull, - pub meta: GlobalCoAllocMeta, + pub meta: M, } +/// (NonNull) Slice and coallocation metadata. #[unstable(feature = "global_co_alloc_meta", issue = "none")] -#[allow(missing_debug_implementations)] +#[derive(Clone, Copy, Debug)] /// Used for results (from `CoAllocator`'s functions, where applicable). -pub struct SliceAndMeta { +pub struct SliceAndMeta { pub slice: NonNull<[u8]>, - pub meta: GlobalCoAllocMeta, + pub meta: M, } -#[unstable(feature = "global_co_alloc_short_term_pref", issue = "none")] -//pub const SHORT_TERM_VEC_PREFERS_COOP: bool = true; -#[macro_export] -macro_rules! SHORT_TERM_VEC_PREFERS_COOP { - () => { - true - }; +/// `Result` of `SliceAndMeta` or `AllocError`. +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +pub type SliceAndMetaResult = Result, AllocError>; + +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +#[const_trait] +pub trait CoAllocMetaBase: Clone + Copy { + /// NOT for public use. This MAY BE REMOVED or CHANGED. + /// + /// For EXPERIMENTATION only. + const ZERO_METAS: [Self; 0]; + const ONE_METAS: [Self; 1]; + + /// NOT for public use. This MAY BE REMOVED or CHANGED. + /// + /// For EXPERIMENTATION only. + fn new_plain() -> Self; } #[unstable(feature = "global_co_alloc_meta", issue = "none")] -#[allow(missing_debug_implementations)] -pub type SliceAndMetaResult = Result; - -#[unstable(feature = "global_co_alloc", issue = "none")] -pub const fn co_alloc_metadata_num_slots() -> usize { - // @FIXME later - if false { - panic!( - "FIXME - consider replacing co_alloc_metadata_num_slots() with co_alloc_metadata_num_slots_with_preference(bool), and adding const flags as appropriate." - ); +#[derive(Clone, Copy, Debug)] +pub struct CoAllocMetaPlain {} + +const CO_ALLOC_META_PLAIN: CoAllocMetaPlain = CoAllocMetaPlain {}; + +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +impl const CoAllocMetaBase for CoAllocMetaPlain { + const ZERO_METAS: [Self; 0] = []; + const ONE_METAS: [Self; 1] = [CO_ALLOC_META_PLAIN]; + + fn new_plain() -> Self { + CO_ALLOC_META_PLAIN } - if A::IS_CO_ALLOCATOR { 1 } else { 0 } } -#[unstable(feature = "global_co_alloc", issue = "none")] -/// Param `coop_preferred` - if false, then this returns `0`, regardless of -/// whether allocator `A` is cooperative. -pub const fn co_alloc_metadata_num_slots_with_preference( - coop_preferred: bool, -) -> usize { - if A::IS_CO_ALLOCATOR && coop_preferred { 1 } else { 0 } -} +/// Whether an `Allocator` implementation supports coallocation. +/// +/// This type WILL CHANGE (once ``#![feature(generic_const_exprs)]` and +/// `#![feature(adt_const_params)]` are stable) to a dedicated struct/enum. Hence: +/// - DO NOT mix this/cast this with/to `u8`, `u16`, (nor any other integer); and +/// - DO NOT hard code any values, but use `CO_ALLOCATOR_SUPPORTS_META_YES` and `CO_ALLOCATOR_SUPPORTS_META_NO`. +// @FIXME Once ICE is fixed: Change to `u32` (or any other unused unsinged integer type, and other +// than `usize`, so we can't mix it up with `usize`). +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +pub type CoAllocatorMetaNumSlots = usize; + +/// Indicating that an Allocator supports coallocation (if a type of the allocated instances supports it, too). +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +pub const CO_ALLOCATOR_SUPPORTS_META_YES: CoAllocatorMetaNumSlots = 1; + +/// Indicating that an Allocator does not support coallocation. +#[unstable(feature = "global_co_alloc_meta", issue = "none")] +pub const CO_ALLOCATOR_SUPPORTS_META_NO: CoAllocatorMetaNumSlots = 0; /// An implementation of `Allocator` can allocate, grow, shrink, and deallocate arbitrary blocks of /// data described via [`Layout`][]. @@ -172,13 +178,19 @@ pub const fn co_alloc_metadata_num_slots_with_preference( #[unstable(feature = "allocator_api", issue = "32838")] #[const_trait] pub unsafe trait Allocator { - //const fn is_co_allocator() -> bool {false} - // Can't have: const type Xyz; - /// If this is any type with non-zero size, then the actual `Allocator` implementation supports cooperative functions (`co_*`) as first class citizens. - //type IsCoAllocator = (); - // It applies to the global (default) allocator only. And/or System allocator?! @FIXME - // @FIXME make false by default - const IS_CO_ALLOCATOR: bool = true; + /// NOT for public use. MAY CHANGE. + const CO_ALLOC_META_NUM_SLOTS: CoAllocatorMetaNumSlots = CO_ALLOCATOR_SUPPORTS_META_NO; + + /// Type to store coallocation metadata (if both the allocator and the heap-based type support + /// coallocation, and if coallocation is used). + /// + /// If this is any type with non-zero size, then the actual `Allocator` implementation supports + /// cooperative functions (`co_*`) as first class citizens. NOT for public use. The default + /// value MAY be REMOVED or CHANGED. + /// + /// @FIXME Validate (preferrable at compile time, otherwise as a test) that this type's + /// alignment <= `usize` alignment. + type CoAllocMeta: ~const CoAllocMetaBase = CoAllocMetaPlain; /// Attempts to allocate a block of memory. /// @@ -202,7 +214,7 @@ pub unsafe trait Allocator { /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html fn allocate(&self, layout: Layout) -> Result, AllocError>; - fn co_allocate(&self, _layout: Layout, _result: &mut SliceAndMetaResult) { + fn co_allocate(&self, _layout: Layout, _result: &mut SliceAndMetaResult) { panic!("FIXME") } @@ -228,7 +240,11 @@ pub unsafe trait Allocator { Ok(ptr) } - fn co_allocate_zeroed(&self, layout: Layout, mut result: &mut SliceAndMetaResult) { + fn co_allocate_zeroed( + &self, + layout: Layout, + mut result: &mut SliceAndMetaResult, + ) { self.co_allocate(layout, &mut result); if let Ok(SliceAndMeta { slice, .. }) = result { // SAFETY: `alloc` returns a valid memory block @@ -247,7 +263,7 @@ pub unsafe trait Allocator { /// [*fit*]: #memory-fitting unsafe fn deallocate(&self, ptr: NonNull, layout: Layout); - unsafe fn co_deallocate(&self, _ptr_and_meta: PtrAndMeta, _layout: Layout) { + unsafe fn co_deallocate(&self, _ptr_and_meta: PtrAndMeta, _layout: Layout) { panic!("FIXME") } @@ -317,10 +333,10 @@ pub unsafe trait Allocator { unsafe fn co_grow( &self, - ptr_and_meta: PtrAndMeta, + ptr_and_meta: PtrAndMeta, old_layout: Layout, new_layout: Layout, - mut result: &mut SliceAndMetaResult, + mut result: &mut SliceAndMetaResult, ) { debug_assert!( new_layout.size() >= old_layout.size(), @@ -411,10 +427,10 @@ pub unsafe trait Allocator { unsafe fn co_grow_zeroed( &self, - ptr_and_meta: PtrAndMeta, + ptr_and_meta: PtrAndMeta, old_layout: Layout, new_layout: Layout, - mut result: &mut SliceAndMetaResult, + mut result: &mut SliceAndMetaResult, ) { debug_assert!( new_layout.size() >= old_layout.size(), @@ -506,10 +522,10 @@ pub unsafe trait Allocator { unsafe fn co_shrink( &self, - ptr_and_meta: PtrAndMeta, + ptr_and_meta: PtrAndMeta, old_layout: Layout, new_layout: Layout, - mut result: &mut SliceAndMetaResult, + mut result: &mut SliceAndMetaResult, ) { debug_assert!( new_layout.size() <= old_layout.size(), diff --git a/library/proc_macro/src/bridge/mod.rs b/library/proc_macro/src/bridge/mod.rs index aae141dc2a118..e7980d44fd498 100644 --- a/library/proc_macro/src/bridge/mod.rs +++ b/library/proc_macro/src/bridge/mod.rs @@ -9,6 +9,7 @@ #![deny(unsafe_code)] use crate::{Delimiter, Level, LineColumn, Spacing}; +use std::alloc::Global; use std::fmt; use std::hash::Hash; use std::marker; @@ -252,14 +253,14 @@ impl<'a, T, M> Unmark for &'a mut Marked { } } -impl Mark for Vec { +impl Mark for Vec { type Unmarked = Vec; fn mark(unmarked: Self::Unmarked) -> Self { // Should be a no-op due to std's in-place collect optimizations. unmarked.into_iter().map(T::mark).collect() } } -impl Unmark for Vec { +impl Unmark for Vec { type Unmarked = Vec; fn unmark(self) -> Self::Unmarked { // Should be a no-op due to std's in-place collect optimizations. diff --git a/library/proc_macro/src/bridge/rpc.rs b/library/proc_macro/src/bridge/rpc.rs index b48a98903bdb4..e116a54b7e73f 100644 --- a/library/proc_macro/src/bridge/rpc.rs +++ b/library/proc_macro/src/bridge/rpc.rs @@ -1,9 +1,12 @@ //! Serialization for client-server communication. +use std::alloc::Global; use std::any::Any; use std::io::Write; use std::num::NonZeroU32; use std::str; +//use alloc::alloc::Global; +//use std::CO_ALLOC_PREF_DEFAULT; pub(super) type Writer = super::buffer::Buffer; @@ -224,7 +227,7 @@ impl DecodeMut<'_, '_, S> for String { } } -impl> Encode for Vec { +impl> Encode for Vec { fn encode(self, w: &mut Writer, s: &mut S) { self.len().encode(w, s); for x in self { @@ -234,7 +237,7 @@ impl> Encode for Vec { } impl<'a, S, T: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S> - for Vec + for Vec { fn decode(r: &mut Reader<'a>, s: &mut S) -> Self { let len = usize::decode(r, s); diff --git a/library/proc_macro/src/diagnostic.rs b/library/proc_macro/src/diagnostic.rs index 41a11b1003b84..9a81e60061141 100644 --- a/library/proc_macro/src/diagnostic.rs +++ b/library/proc_macro/src/diagnostic.rs @@ -1,4 +1,5 @@ use crate::Span; +use std::alloc::Global; /// An enum representing a diagnostic level. #[unstable(feature = "proc_macro_diagnostic", issue = "54140")] @@ -30,7 +31,7 @@ impl MultiSpan for Span { } #[unstable(feature = "proc_macro_diagnostic", issue = "54140")] -impl MultiSpan for Vec { +impl MultiSpan for Vec { fn into_spans(self) -> Vec { self } diff --git a/library/proc_macro/src/lib.rs b/library/proc_macro/src/lib.rs index 938935771d64e..5e6be69bf8102 100644 --- a/library/proc_macro/src/lib.rs +++ b/library/proc_macro/src/lib.rs @@ -22,6 +22,7 @@ // to make it compile with rust-analyzer on stable. #![feature(rustc_allow_const_fn_unstable)] #![feature(staged_api)] +#![feature(allocator_api)] #![feature(allow_internal_unstable)] #![feature(decl_macro)] #![feature(local_key_cell_methods)] @@ -33,6 +34,9 @@ #![feature(min_specialization)] #![feature(strict_provenance)] #![recursion_limit = "256"] +#![feature(global_co_alloc)] +#![feature(global_co_alloc_default)] +#![feature(global_co_alloc_meta)] #[unstable(feature = "proc_macro_internals", issue = "27812")] #[doc(hidden)] diff --git a/library/std/src/alloc.rs b/library/std/src/alloc.rs index 6969c7d81dd5e..a2dcb9d5b78a1 100644 --- a/library/std/src/alloc.rs +++ b/library/std/src/alloc.rs @@ -202,8 +202,6 @@ impl System { // which is in `std::sys::*::alloc`. #[unstable(feature = "allocator_api", issue = "32838")] unsafe impl Allocator for System { - const IS_CO_ALLOCATOR: bool = false; - #[inline] fn allocate(&self, layout: Layout) -> Result, AllocError> { self.alloc_impl(layout, false) diff --git a/library/std/src/ffi/os_str.rs b/library/std/src/ffi/os_str.rs index 6ab6323e19c83..80ed34157e6dc 100644 --- a/library/std/src/ffi/os_str.rs +++ b/library/std/src/ffi/os_str.rs @@ -1,7 +1,6 @@ #[cfg(test)] mod tests; -use crate::alloc::Global; use crate::borrow::{Borrow, Cow}; use crate::cmp; use crate::collections::TryReserveError; diff --git a/library/std/src/io/cursor.rs b/library/std/src/io/cursor.rs index 63045ebcb8157..2a95ad4ceb2d2 100644 --- a/library/std/src/io/cursor.rs +++ b/library/std/src/io/cursor.rs @@ -6,7 +6,7 @@ use crate::io::prelude::*; use crate::alloc::Allocator; use crate::cmp; use crate::io::{self, BorrowedCursor, ErrorKind, IoSlice, IoSliceMut, SeekFrom}; -use core::alloc; +use ::alloc::{co_alloc::CoAllocPref, meta_num_slots}; /// A `Cursor` wraps an in-memory buffer and provides it with a /// [`Seek`] implementation. @@ -398,13 +398,14 @@ fn slice_write_vectored( } /// Reserves the required space, and pads the vec with 0s if necessary. -fn reserve_and_pad( +#[allow(unused_braces)] +fn reserve_and_pad( pos_mut: &mut u64, - vec: &mut Vec, + vec: &mut Vec, buf_len: usize, ) -> io::Result where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { meta_num_slots!(A, CO_ALLOC_PREF) }]:, { let pos: usize = (*pos_mut).try_into().map_err(|_| { io::const_io_error!( @@ -444,14 +445,15 @@ where /// Writes the slice to the vec without allocating /// # Safety: vec must have buf.len() spare capacity -unsafe fn vec_write_unchecked( +#[allow(unused_braces)] +unsafe fn vec_write_unchecked( pos: usize, - vec: &mut Vec, + vec: &mut Vec, buf: &[u8], ) -> usize where A: Allocator, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { meta_num_slots!(A, CO_ALLOC_PREF) }]:, { debug_assert!(vec.capacity() >= pos + buf.len()); vec.as_mut_ptr().add(pos).copy_from(buf.as_ptr(), buf.len()); @@ -467,14 +469,15 @@ where /// This also allows for the vec body to be empty, but with a position of N. /// This means that [`Write`] will pad the vec with 0 initially, /// before writing anything from that point -fn vec_write( +#[allow(unused_braces)] +fn vec_write( pos_mut: &mut u64, - vec: &mut Vec, + vec: &mut Vec, buf: &[u8], ) -> io::Result where A: Allocator, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { meta_num_slots!(A, CO_ALLOC_PREF) }]:, { let buf_len = buf.len(); let mut pos = reserve_and_pad(pos_mut, vec, buf_len)?; @@ -503,14 +506,15 @@ where /// This also allows for the vec body to be empty, but with a position of N. /// This means that [`Write`] will pad the vec with 0 initially, /// before writing anything from that point -fn vec_write_vectored( +#[allow(unused_braces)] +fn vec_write_vectored( pos_mut: &mut u64, - vec: &mut Vec, + vec: &mut Vec, bufs: &[IoSlice<'_>], ) -> io::Result where A: Allocator, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { meta_num_slots!(A, CO_ALLOC_PREF) }]:, { // For safety reasons, we don't want this sum to overflow ever. // If this saturates, the reserve should panic to avoid any unsound writing. @@ -558,10 +562,11 @@ impl Write for Cursor<&mut [u8]> { } #[stable(feature = "cursor_mut_vec", since = "1.25.0")] -impl Write for Cursor<&mut Vec> +#[allow(unused_braces)] +impl Write for Cursor<&mut Vec> where A: Allocator, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn write(&mut self, buf: &[u8]) -> io::Result { vec_write(&mut self.pos, self.inner, buf) @@ -583,10 +588,11 @@ where } #[stable(feature = "rust1", since = "1.0.0")] -impl Write for Cursor> +#[allow(unused_braces)] +impl Write for Cursor> where A: Allocator, - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { meta_num_slots!(A, CO_ALLOC_PREF) }]:, { fn write(&mut self, buf: &[u8]) -> io::Result { vec_write(&mut self.pos, &mut self.inner, buf) diff --git a/library/std/src/io/impls.rs b/library/std/src/io/impls.rs index 02cd7aa0d2cc0..541f882d84ccf 100644 --- a/library/std/src/io/impls.rs +++ b/library/std/src/io/impls.rs @@ -1,7 +1,7 @@ #[cfg(test)] mod tests; -use crate::alloc::{self, Allocator}; +use crate::alloc::Allocator; use crate::cmp; use crate::collections::VecDeque; use crate::fmt; @@ -9,6 +9,7 @@ use crate::io::{ self, BorrowedCursor, BufRead, ErrorKind, IoSlice, IoSliceMut, Read, Seek, SeekFrom, Write, }; use crate::mem; +use ::alloc::{co_alloc::CoAllocPref, meta_num_slots}; // ============================================================================= // Forwarding implementations @@ -378,9 +379,10 @@ impl Write for &mut [u8] { /// Write is implemented for `Vec` by appending to the vector. /// The vector will grow as needed. #[stable(feature = "rust1", since = "1.0.0")] -impl Write for Vec +#[allow(unused_braces)] +impl Write for Vec where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(COOP_PREFERRED)]:, + [(); { meta_num_slots!(A, CO_ALLOC_PREF) }]:, { #[inline] fn write(&mut self, buf: &[u8]) -> io::Result { @@ -417,9 +419,10 @@ where /// Read is implemented for `VecDeque` by consuming bytes from the front of the `VecDeque`. #[stable(feature = "vecdeque_read_write", since = "1.63.0")] -impl Read for VecDeque +#[allow(unused_braces)] +impl Read for VecDeque where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(_COOP_PREFERRED)]:, + [(); { meta_num_slots!(A, CO_ALLOC_PREF) }]:, { /// Fill `buf` with the contents of the "front" slice as returned by /// [`as_slices`][`VecDeque::as_slices`]. If the contained byte slices of the `VecDeque` are @@ -444,9 +447,10 @@ where /// Write is implemented for `VecDeque` by appending to the `VecDeque`, growing it as needed. #[stable(feature = "vecdeque_read_write", since = "1.63.0")] -impl Write for VecDeque +#[allow(unused_braces)] +impl Write for VecDeque where - [(); alloc::co_alloc_metadata_num_slots_with_preference::(_COOP_PREFERRED)]:, + [(); { meta_num_slots!(A, CO_ALLOC_PREF) }]:, { #[inline] fn write(&mut self, buf: &[u8]) -> io::Result { diff --git a/library/std/src/lib.rs b/library/std/src/lib.rs index 311795fbd9bf3..7ffb41a32d731 100644 --- a/library/std/src/lib.rs +++ b/library/std/src/lib.rs @@ -217,7 +217,9 @@ #![allow(incomplete_features)] #![feature(generic_const_exprs)] #![feature(global_co_alloc)] +#![feature(global_co_alloc_default)] #![feature(global_co_alloc_plvec)] +#![feature(global_co_alloc_meta)] #![warn(deprecated_in_future)] #![warn(missing_docs)] #![warn(missing_debug_implementations)] @@ -324,6 +326,7 @@ #![feature(try_reserve_kind)] #![feature(vec_into_raw_parts)] #![feature(slice_concat_trait)] +#![feature(vec_new_co)] // // Library features (unwind): #![feature(panic_unwind)] @@ -414,6 +417,9 @@ pub mod prelude; pub use alloc_crate::borrow; #[stable(feature = "rust1", since = "1.0.0")] pub use alloc_crate::boxed; +#[unstable(feature = "global_co_alloc", issue = "none")] +pub use alloc_crate::co_alloc; +// @FIXME ugly - someone move this to a better place, please #[stable(feature = "rust1", since = "1.0.0")] pub use alloc_crate::fmt; #[stable(feature = "rust1", since = "1.0.0")] @@ -428,6 +434,8 @@ pub use alloc_crate::str; pub use alloc_crate::string; #[stable(feature = "rust1", since = "1.0.0")] pub use alloc_crate::vec; +#[unstable(feature = "global_co_alloc", issue = "none")] +pub use alloc_crate::{CO_ALLOC_PREF_DEFAULT, SHORT_TERM_VEC_CO_ALLOC_PREF}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::any; #[stable(feature = "core_array", since = "1.36.0")] diff --git a/library/std/src/sys_common/thread_local_dtor.rs b/library/std/src/sys_common/thread_local_dtor.rs index 6ec2f3cd11601..dcab8a89e7f53 100644 --- a/library/std/src/sys_common/thread_local_dtor.rs +++ b/library/std/src/sys_common/thread_local_dtor.rs @@ -31,7 +31,7 @@ pub unsafe fn register_dtor_fallback(t: *mut u8, dtor: unsafe extern "C" fn(*mut static DTORS: StaticKey = StaticKey::new(Some(run_dtors)); type List = PlVec<(*mut u8, unsafe extern "C" fn(*mut u8))>; if DTORS.get().is_null() { - let v: Box = Box::new(Vec::new()); + let v: Box = Box::new(Vec::new_co()); DTORS.set(Box::into_raw(v) as *mut u8); } let list: &mut List = &mut *(DTORS.get() as *mut List); diff --git a/library/test/src/lib.rs b/library/test/src/lib.rs index 69fb529d7f563..bf28cde1bf033 100644 --- a/library/test/src/lib.rs +++ b/library/test/src/lib.rs @@ -16,6 +16,9 @@ #![unstable(feature = "test", issue = "50297")] #![doc(test(attr(deny(warnings))))] +#![feature(allocator_api)] +#![feature(global_co_alloc)] +#![feature(global_co_alloc_meta)] #![feature(internal_output_capture)] #![feature(is_terminal)] #![feature(staged_api)] @@ -52,6 +55,7 @@ pub mod test { } use std::{ + alloc::Global, collections::VecDeque, env, io, io::prelude::Write, @@ -346,7 +350,8 @@ where }; let mut running_tests: TestMap = HashMap::default(); - let mut timeout_queue: VecDeque = VecDeque::new(); + // @FIXME See if we can remove `Global` generic param: + let mut timeout_queue: VecDeque = VecDeque::new(); fn get_timed_out_tests( running_tests: &TestMap, diff --git a/library/test/src/stats.rs b/library/test/src/stats.rs index b33b080126131..bc892745d75a3 100644 --- a/library/test/src/stats.rs +++ b/library/test/src/stats.rs @@ -1,6 +1,7 @@ #![allow(missing_docs)] use std::mem; +use std::SHORT_TERM_VEC_CO_ALLOC_PREF; #[cfg(test)] mod tests; @@ -232,13 +233,13 @@ impl Stats for [f64] { } fn percentile(&self, pct: f64) -> f64 { - let mut tmp = self.to_vec(); + let mut tmp = self.to_vec_co::<{ SHORT_TERM_VEC_CO_ALLOC_PREF!() }>(); local_sort(&mut tmp); percentile_of_sorted(&tmp, pct) } fn quartiles(&self) -> (f64, f64, f64) { - let mut tmp = self.to_vec(); + let mut tmp = self.to_vec_co::<{ SHORT_TERM_VEC_CO_ALLOC_PREF!() }>(); local_sort(&mut tmp); let first = 25_f64; let a = percentile_of_sorted(&tmp, first); From d57e141155ccd10935a08727f15e9f9ea4844a1f Mon Sep 17 00:00:00 2001 From: Peter Kehl Date: Fri, 17 Feb 2023 12:54:30 -0800 Subject: [PATCH 6/9] CoAlloc: FIXME: raw_vec::assert_alignment() --- library/alloc/src/raw_vec.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/library/alloc/src/raw_vec.rs b/library/alloc/src/raw_vec.rs index 4b5345f0912f3..39827052b75c1 100644 --- a/library/alloc/src/raw_vec.rs +++ b/library/alloc/src/raw_vec.rs @@ -281,6 +281,7 @@ where &self.alloc } + // @FIXME #[inline] const fn assert_alignment() { assert!(mem::size_of::() % mem::align_of::() == 0); From 622db98818be6f270f43027dcda15d5caf8866e7 Mon Sep 17 00:00:00 2001 From: Peter Kehl Date: Sat, 18 Feb 2023 15:06:52 -0800 Subject: [PATCH 7/9] CoAlloc: Added CO_ALLOC_PREF to two Guard structs inside vec_deque::into_iter. Tidy. --- .../src/collections/binary_heap/tests.rs | 10 +++++-- .../src/collections/vec_deque/into_iter.rs | 28 +++++++++++++------ .../alloc/src/collections/vec_deque/mod.rs | 3 +- library/alloc/src/slice.rs | 6 ++-- 4 files changed, 30 insertions(+), 17 deletions(-) diff --git a/library/alloc/src/collections/binary_heap/tests.rs b/library/alloc/src/collections/binary_heap/tests.rs index 35a844c738ca3..4f46bc5385aa4 100644 --- a/library/alloc/src/collections/binary_heap/tests.rs +++ b/library/alloc/src/collections/binary_heap/tests.rs @@ -1,7 +1,7 @@ use super::*; use crate::boxed::Box; -use crate::{CO_ALLOC_PREF_META_YES, CO_ALLOC_PREF_META_NO}; use crate::testing::crash_test::{CrashTestDummy, Panic}; +use crate::{CO_ALLOC_PREF_META_NO, CO_ALLOC_PREF_META_YES}; use core::mem; use std::iter::TrustedLen; use std::panic::{catch_unwind, AssertUnwindSafe}; @@ -449,10 +449,14 @@ fn test_extend_specialization() { #[allow(dead_code)] fn assert_covariance() { - fn drain<'new>(d: Drain<'static, &'static str, {CO_ALLOC_PREF_META_NO!()}>) -> Drain<'new, &'new str, {CO_ALLOC_PREF_META_NO!()}> { + fn drain<'new>( + d: Drain<'static, &'static str, { CO_ALLOC_PREF_META_NO!() }>, + ) -> Drain<'new, &'new str, { CO_ALLOC_PREF_META_NO!() }> { d } - fn drain_co<'new>(d: Drain<'static, &'static str, {CO_ALLOC_PREF_META_YES!()}>) -> Drain<'new, &'new str, {CO_ALLOC_PREF_META_YES!()}> { + fn drain_co<'new>( + d: Drain<'static, &'static str, { CO_ALLOC_PREF_META_YES!() }>, + ) -> Drain<'new, &'new str, { CO_ALLOC_PREF_META_YES!() }> { d } } diff --git a/library/alloc/src/collections/vec_deque/into_iter.rs b/library/alloc/src/collections/vec_deque/into_iter.rs index f043ba9c82a4c..0615290f7a56d 100644 --- a/library/alloc/src/collections/vec_deque/into_iter.rs +++ b/library/alloc/src/collections/vec_deque/into_iter.rs @@ -1,6 +1,6 @@ -use core::iter::{FusedIterator, TrustedLen}; -use core::{alloc, array, fmt, mem::MaybeUninit, ops::Try, ptr}; use crate::co_alloc::CoAllocPref; +use core::iter::{FusedIterator, TrustedLen}; +use core::{array, fmt, mem::MaybeUninit, ops::Try, ptr}; use crate::alloc::{Allocator, Global}; @@ -93,13 +93,19 @@ where F: FnMut(B, Self::Item) -> R, R: Try, { - struct Guard<'a, T, A: Allocator> { - deque: &'a mut VecDeque, + struct Guard<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { + deque: &'a mut VecDeque, // `consumed <= deque.len` always holds. consumed: usize, } - impl<'a, T, A: Allocator> Drop for Guard<'a, T, A> { + impl<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Drop for Guard<'a, T, A, CO_ALLOC_PREF> + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { fn drop(&mut self) { self.deque.len -= self.consumed; self.deque.head = self.deque.to_physical_idx(self.consumed); @@ -221,13 +227,19 @@ where F: FnMut(B, Self::Item) -> R, R: Try, { - struct Guard<'a, T, A: Allocator> { - deque: &'a mut VecDeque, + struct Guard<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { + deque: &'a mut VecDeque, // `consumed <= deque.len` always holds. consumed: usize, } - impl<'a, T, A: Allocator> Drop for Guard<'a, T, A> { + impl<'a, T, A: Allocator, const CO_ALLOC_PREF: CoAllocPref> Drop for Guard<'a, T, A, CO_ALLOC_PREF> + where + [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, + { fn drop(&mut self) { self.deque.len -= self.consumed; } diff --git a/library/alloc/src/collections/vec_deque/mod.rs b/library/alloc/src/collections/vec_deque/mod.rs index d6991b31368b1..eb5229d484aa2 100644 --- a/library/alloc/src/collections/vec_deque/mod.rs +++ b/library/alloc/src/collections/vec_deque/mod.rs @@ -2844,8 +2844,7 @@ where #[stable(feature = "rust1", since = "1.0.0")] #[allow(unused_braces)] -impl FromIterator for VecDeque -{ +impl FromIterator for VecDeque { fn from_iter>(iter: I) -> VecDeque { SpecFromIterCo::spec_from_iter_co(iter.into_iter()) } diff --git a/library/alloc/src/slice.rs b/library/alloc/src/slice.rs index e6a2d869468b4..0fecde66d9f11 100644 --- a/library/alloc/src/slice.rs +++ b/library/alloc/src/slice.rs @@ -103,13 +103,11 @@ pub(crate) mod hack { // `vec!` macro mostly and causes perf regression. See #71204 for // discussion and perf results. #[allow(unused_braces)] - pub fn into_vec( - b: Box<[T], A>, - ) -> Vec + pub fn into_vec(b: Box<[T], A>) -> Vec where [(); { crate::meta_num_slots_default!(A) }]:, { - into_vec_co::(b) + into_vec_co::(b) } #[allow(unused_braces)] From b959940b15ee696b5ff1531fc7a8a4941176f5a4 Mon Sep 17 00:00:00 2001 From: Peter Kehl Date: Mon, 20 Feb 2023 04:13:34 -0800 Subject: [PATCH 8/9] CoAlloc: Workaround rust-lang/rustfmt issue #5691. FIXME undo once rustfmt is fixed on nightly. --- library/alloc/src/vec/mod.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/library/alloc/src/vec/mod.rs b/library/alloc/src/vec/mod.rs index a5a31bfd70913..23c90d83d03cd 100644 --- a/library/alloc/src/vec/mod.rs +++ b/library/alloc/src/vec/mod.rs @@ -1880,6 +1880,7 @@ where where [(); { crate::meta_num_slots!(A, CO_ALLOC_PREF) }]:, { + /// @FIXME This doc-comment here is only to workaround rust-lang/rustfmt issue #5691. Remove once it's fixed on nightly. /* Offset of the element we want to check if it is duplicate */ read: usize, From 6a54c48ab2b26d667073be8a37420385988511d7 Mon Sep 17 00:00:00 2001 From: Peter Kehl Date: Mon, 20 Feb 2023 05:27:29 -0800 Subject: [PATCH 9/9] CoAlloc: compiler/ uses ::CoAllocMeta --- compiler/rustc_middle/src/mir/mod.rs | 7 +++++-- compiler/rustc_middle/src/mir/syntax.rs | 5 +++-- compiler/rustc_parse/src/lib.rs | 1 + compiler/rustc_parse/src/parser/attr_wrapper.rs | 7 +++++-- compiler/rustc_parse/src/parser/mod.rs | 4 ++-- compiler/rustc_trait_selection/src/lib.rs | 1 + compiler/rustc_trait_selection/src/traits/fulfill.rs | 7 +++++-- 7 files changed, 22 insertions(+), 10 deletions(-) diff --git a/compiler/rustc_middle/src/mir/mod.rs b/compiler/rustc_middle/src/mir/mod.rs index c09fd09f1ae86..b9fd6b135ba61 100644 --- a/compiler/rustc_middle/src/mir/mod.rs +++ b/compiler/rustc_middle/src/mir/mod.rs @@ -13,7 +13,6 @@ use crate::ty::visit::{TypeVisitable, TypeVisitor}; use crate::ty::{self, ir, DefIdTree, List, Ty, TyCtxt}; use crate::ty::{AdtDef, InstanceDef, ScalarInt, UserTypeAnnotationIndex}; use crate::ty::{GenericArg, InternalSubsts, SubstsRef}; -use core::alloc::GlobalCoAllocMeta; use rustc_data_structures::captures::Captures; use rustc_errors::ErrorGuaranteed; @@ -35,6 +34,7 @@ use rustc_span::{Span, DUMMY_SP}; use either::Either; +use std::alloc::{Allocator, Global}; use std::borrow::Cow; use std::fmt::{self, Debug, Display, Formatter, Write}; use std::ops::{ControlFlow, Index, IndexMut}; @@ -3078,7 +3078,10 @@ mod size_asserts { use super::*; use rustc_data_structures::static_assert_size; // tidy-alphabetical-start - static_assert_size!(BasicBlockData<'_>, 144 + mem::size_of::()); + static_assert_size!( + BasicBlockData<'_>, + 144 + mem::size_of::<::CoAllocMeta>() + ); static_assert_size!(LocalDecl<'_>, 56); static_assert_size!(Statement<'_>, 32); static_assert_size!(StatementKind<'_>, 16); diff --git a/compiler/rustc_middle/src/mir/syntax.rs b/compiler/rustc_middle/src/mir/syntax.rs index 267d3dfec41a3..9557b0d9cd084 100644 --- a/compiler/rustc_middle/src/mir/syntax.rs +++ b/compiler/rustc_middle/src/mir/syntax.rs @@ -4,7 +4,6 @@ //! The intention is that this file only contains datatype declarations, no code. use super::{BasicBlock, Constant, Field, Local, SwitchTargets, UserTypeProjection}; -use core::alloc::GlobalCoAllocMeta; use core::mem; use crate::mir::coverage::{CodeRegion, CoverageKind}; @@ -26,6 +25,8 @@ use rustc_span::symbol::Symbol; use rustc_span::Span; use rustc_target::asm::InlineAsmRegOrRegClass; +use std::alloc::{Allocator, Global}; + /// Represents the "flavors" of MIR. /// /// All flavors of MIR use the same data structure, but there are some important differences. These @@ -1286,6 +1287,6 @@ mod size_asserts { static_assert_size!(Operand<'_>, 24); static_assert_size!(Place<'_>, 16); static_assert_size!(PlaceElem<'_>, 24); - static_assert_size!(Rvalue<'_>, 40 + mem::size_of::()); + static_assert_size!(Rvalue<'_>, 40 + mem::size_of::<::CoAllocMeta>()); // tidy-alphabetical-end } diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index 1ba1049b0bc80..b2968280e12aa 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -1,5 +1,6 @@ //! The main parser interface. +#![feature(allocator_api)] #![feature(array_windows)] #![feature(box_patterns)] #![feature(global_co_alloc_meta)] diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs index 3d541cad22cf5..0f5cbed923092 100644 --- a/compiler/rustc_parse/src/parser/attr_wrapper.rs +++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs @@ -1,5 +1,4 @@ use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken}; -use core::alloc::GlobalCoAllocMeta; use core::mem; use rustc_ast::token::{self, Delimiter, Token, TokenKind}; use rustc_ast::tokenstream::{AttrTokenStream, AttributesData, ToAttrTokenStream}; @@ -10,6 +9,7 @@ use rustc_errors::PResult; use rustc_session::parse::ParseSess; use rustc_span::{sym, Span, DUMMY_SP}; +use std::alloc::{Allocator, Global}; use std::ops::Range; /// A wrapper type to ensure that the parser handles outer attributes correctly. @@ -471,6 +471,9 @@ mod size_asserts { use rustc_data_structures::static_assert_size; // tidy-alphabetical-start static_assert_size!(AttrWrapper, 16); - static_assert_size!(LazyAttrTokenStreamImpl, 120 + mem::size_of::()); + static_assert_size!( + LazyAttrTokenStreamImpl, + 120 + mem::size_of::<::CoAllocMeta>() + ); // tidy-alphabetical-end } diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index 373f392899bf5..0ea735e51610f 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -12,7 +12,6 @@ mod ty; use crate::lexer::UnmatchedBrace; pub use attr_wrapper::AttrWrapper; -use core::alloc::GlobalCoAllocMeta; pub use diagnostics::AttemptLocalParseRecovery; pub(crate) use item::FnParseMode; pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma}; @@ -38,6 +37,7 @@ use rustc_session::parse::ParseSess; use rustc_span::source_map::{Span, DUMMY_SP}; use rustc_span::symbol::{kw, sym, Ident, Symbol}; +use std::alloc::{Allocator, Global}; use std::ops::Range; use std::{cmp, mem, slice}; @@ -170,7 +170,7 @@ pub struct Parser<'a> { #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] rustc_data_structures::static_assert_size!( Parser<'_>, - 312 + 4 * mem::size_of::() + 312 + 4 * mem::size_of::<::CoAllocMeta>() ); /// Stores span information about a closure. diff --git a/compiler/rustc_trait_selection/src/lib.rs b/compiler/rustc_trait_selection/src/lib.rs index d35cf93347b6e..8d855b62fa806 100644 --- a/compiler/rustc_trait_selection/src/lib.rs +++ b/compiler/rustc_trait_selection/src/lib.rs @@ -11,6 +11,7 @@ //! This API is completely unstable and subject to change. #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] +#![feature(allocator_api)] #![feature(associated_type_bounds)] #![feature(box_patterns)] #![feature(control_flow_enum)] diff --git a/compiler/rustc_trait_selection/src/traits/fulfill.rs b/compiler/rustc_trait_selection/src/traits/fulfill.rs index 3f85de7d7e446..30b923ad68334 100644 --- a/compiler/rustc_trait_selection/src/traits/fulfill.rs +++ b/compiler/rustc_trait_selection/src/traits/fulfill.rs @@ -1,5 +1,4 @@ use crate::infer::{InferCtxt, TyOrConstInferVar}; -use core::alloc::GlobalCoAllocMeta; use core::mem; // use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::obligation_forest::ProcessResult; @@ -12,6 +11,7 @@ use rustc_middle::ty::abstract_const::NotConstEvaluatable; use rustc_middle::ty::error::{ExpectedFound, TypeError}; use rustc_middle::ty::subst::SubstsRef; use rustc_middle::ty::{self, Binder, Const, TypeVisitable}; +use std::alloc::{Allocator, Global}; use std::marker::PhantomData; use super::const_evaluatable; @@ -80,7 +80,10 @@ pub struct PendingPredicateObligation<'tcx> { // `PendingPredicateObligation` is used a lot. Make sure it doesn't unintentionally get bigger. #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] -static_assert_size!(PendingPredicateObligation<'_>, 72 + mem::size_of::()); +static_assert_size!( + PendingPredicateObligation<'_>, + 72 + mem::size_of::<::CoAllocMeta>() +); impl<'a, 'tcx> FulfillmentContext<'tcx> { /// Creates a new fulfillment context.