diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index 3ecb9576b7..a1b0b52145 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -720,12 +720,8 @@ impl ExprCollector<'_> { fn collect_expr_path(&mut self, e: ast::PathExpr) -> Option<(Path, HygieneId)> { e.path().and_then(|path| { let path = self.parse_path(path)?; - let Path::Normal { type_anchor, mod_path, generic_args } = &path else { - panic!("path parsing produced a non-normal path"); - }; // Need to enable `mod_path.len() < 1` for `self`. - let may_be_variable = - type_anchor.is_none() && mod_path.len() <= 1 && generic_args.is_none(); + let may_be_variable = matches!(&path, Path::BarePath(mod_path) if mod_path.len() <= 1); let hygiene = if may_be_variable { self.hygiene_id_for(e.syntax().text_range().start()) } else { @@ -797,7 +793,7 @@ impl ExprCollector<'_> { ast::Expr::PathExpr(e) => { let (path, hygiene) = self .collect_expr_path(e.clone()) - .map(|(path, hygiene)| (Pat::Path(Box::new(path)), hygiene)) + .map(|(path, hygiene)| (Pat::Path(path), hygiene)) .unwrap_or((Pat::Missing, HygieneId::ROOT)); let pat_id = self.alloc_pat_from_expr(path, syntax_ptr); if !hygiene.is_root() { @@ -1059,7 +1055,7 @@ impl ExprCollector<'_> { syntax_ptr, ); let none_arm = MatchArm { - pat: self.alloc_pat_desugared(Pat::Path(Box::new(option_none))), + pat: self.alloc_pat_desugared(Pat::Path(option_none)), guard: None, expr: self.alloc_expr(Expr::Break { expr: None, label: None }, syntax_ptr), }; @@ -1561,7 +1557,7 @@ impl ExprCollector<'_> { Pat::Ref { pat, mutability } } ast::Pat::PathPat(p) => { - let path = p.path().and_then(|path| self.parse_path(path)).map(Box::new); + let path = p.path().and_then(|path| self.parse_path(path)); path.map(Pat::Path).unwrap_or(Pat::Missing) } ast::Pat::OrPat(p) => 'b: { diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs index 20a6e5cc2d..b27a97ab47 100644 --- a/crates/hir-def/src/generics.rs +++ b/crates/hir-def/src/generics.rs @@ -21,7 +21,7 @@ use crate::{ item_tree::{AttrOwner, FileItemTreeId, GenericModItem, GenericsItemTreeNode, ItemTree}, lower::LowerCtx, nameres::{DefMap, MacroSubNs}, - path::{AssociatedTypeBinding, GenericArg, GenericArgs, Path}, + path::{AssociatedTypeBinding, GenericArg, GenericArgs, NormalPath, Path}, type_ref::{ConstRef, LifetimeRef, TypeBound, TypeRef, TypeRefId, TypesMap, TypesSourceMap}, AdtId, ConstParamId, GenericDefId, HasModule, ItemTreeLoc, LifetimeParamId, LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId, @@ -788,19 +788,16 @@ fn copy_path( to_source_map: &mut TypesSourceMap, ) -> Path { match path { - Path::Normal { type_anchor, mod_path, generic_args } => { - let type_anchor = type_anchor + Path::BarePath(mod_path) => Path::BarePath(mod_path.clone()), + Path::Normal(path) => { + let type_anchor = path + .type_anchor() .map(|type_ref| copy_type_ref(type_ref, from, from_source_map, to, to_source_map)); - let mod_path = mod_path.clone(); - let generic_args = generic_args.as_ref().map(|generic_args| { - generic_args - .iter() - .map(|generic_args| { - copy_generic_args(generic_args, from, from_source_map, to, to_source_map) - }) - .collect() + let mod_path = path.mod_path().clone(); + let generic_args = path.generic_args().iter().map(|generic_args| { + copy_generic_args(generic_args, from, from_source_map, to, to_source_map) }); - Path::Normal { type_anchor, mod_path, generic_args } + Path::Normal(NormalPath::new(type_anchor, mod_path, generic_args)) } Path::LangItem(lang_item, name) => Path::LangItem(*lang_item, name.clone()), } diff --git a/crates/hir-def/src/hir.rs b/crates/hir-def/src/hir.rs index df1e103230..8596346943 100644 --- a/crates/hir-def/src/hir.rs +++ b/crates/hir-def/src/hir.rs @@ -583,7 +583,7 @@ pub enum Pat { suffix: Box<[PatId]>, }, /// This might refer to a variable if a single segment path (specifically, on destructuring assignment). - Path(Box), + Path(Path), Lit(ExprId), Bind { id: BindingId, diff --git a/crates/hir-def/src/hir/type_ref.rs b/crates/hir-def/src/hir/type_ref.rs index b9f47ffdd0..38d95084e7 100644 --- a/crates/hir-def/src/hir/type_ref.rs +++ b/crates/hir-def/src/hir/type_ref.rs @@ -201,7 +201,7 @@ pub enum TypeBound { } #[cfg(target_pointer_width = "64")] -const _: [(); 48] = [(); ::std::mem::size_of::()]; +const _: [(); 32] = [(); ::std::mem::size_of::()]; #[derive(Clone, PartialEq, Eq, Hash, Debug)] pub enum UseArgRef { diff --git a/crates/hir-def/src/path.rs b/crates/hir-def/src/path.rs index 4f02a59f8d..dc6947c5b5 100644 --- a/crates/hir-def/src/path.rs +++ b/crates/hir-def/src/path.rs @@ -14,6 +14,7 @@ use crate::{ use hir_expand::name::Name; use intern::Interned; use span::Edition; +use stdx::thin_vec::thin_vec_with_header_struct; use syntax::ast; pub use hir_expand::mod_path::{path, ModPath, PathKind}; @@ -47,20 +48,33 @@ impl Display for ImportAliasDisplay<'_> { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum Path { - /// A normal path - Normal { - /// Type based path like `::foo`. - /// Note that paths like `::foo` are desugared to `Trait::::foo`. - type_anchor: Option, - mod_path: Interned, - /// Invariant: the same len as `self.mod_path.segments` or `None` if all segments are `None`. - generic_args: Option]>>, - }, + /// `BarePath` is used when the path has neither generics nor type anchor, since the vast majority of paths + /// are in this category, and splitting `Path` this way allows it to be more thin. When the path has either generics + /// or type anchor, it is `Path::Normal` with the generics filled with `None` even if there are none (practically + /// this is not a problem since many more paths have generics than a type anchor). + BarePath(Interned), + /// `Path::Normal` may have empty generics and type anchor (but generic args will be filled with `None`). + Normal(NormalPath), /// A link to a lang item. It is used in desugaring of things like `it?`. We can show these /// links via a normal path since they might be private and not accessible in the usage place. LangItem(LangItemTarget, Option), } +// This type is being used a lot, make sure it doesn't grow unintentionally. +#[cfg(target_arch = "x86_64")] +const _: () = { + assert!(size_of::() == 16); + assert!(size_of::>() == 16); +}; + +thin_vec_with_header_struct! { + pub new(pub(crate)) struct NormalPath, NormalPathHeader { + pub generic_args: [Option], + pub type_anchor: Option, + pub mod_path: Interned; ref, + } +} + /// Generic arguments to a path segment (e.g. the `i32` in `Option`). This /// also includes bindings of associated types, like in `Iterator`. #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -112,50 +126,49 @@ impl Path { } /// Converts a known mod path to `Path`. - pub fn from_known_path( - path: ModPath, - generic_args: impl Into]>>, - ) -> Path { - let generic_args = generic_args.into(); - assert_eq!(path.len(), generic_args.len()); - Path::Normal { - type_anchor: None, - mod_path: Interned::new(path), - generic_args: Some(generic_args), - } + pub fn from_known_path(path: ModPath, generic_args: Vec>) -> Path { + Path::Normal(NormalPath::new(None, Interned::new(path), generic_args)) } /// Converts a known mod path to `Path`. pub fn from_known_path_with_no_generic(path: ModPath) -> Path { - Path::Normal { type_anchor: None, mod_path: Interned::new(path), generic_args: None } + Path::BarePath(Interned::new(path)) } + #[inline] pub fn kind(&self) -> &PathKind { match self { - Path::Normal { mod_path, .. } => &mod_path.kind, + Path::BarePath(mod_path) => &mod_path.kind, + Path::Normal(path) => &path.mod_path().kind, Path::LangItem(..) => &PathKind::Abs, } } + #[inline] pub fn type_anchor(&self) -> Option { match self { - Path::Normal { type_anchor, .. } => *type_anchor, - Path::LangItem(..) => None, + Path::Normal(path) => path.type_anchor(), + Path::LangItem(..) | Path::BarePath(_) => None, + } + } + + #[inline] + pub fn generic_args(&self) -> Option<&[Option]> { + match self { + Path::Normal(path) => Some(path.generic_args()), + Path::LangItem(..) | Path::BarePath(_) => None, } } pub fn segments(&self) -> PathSegments<'_> { match self { - Path::Normal { mod_path, generic_args, .. } => { - let s = PathSegments { - segments: mod_path.segments(), - generic_args: generic_args.as_deref(), - }; - if let Some(generic_args) = s.generic_args { - assert_eq!(s.segments.len(), generic_args.len()); - } - s + Path::BarePath(mod_path) => { + PathSegments { segments: mod_path.segments(), generic_args: None } } + Path::Normal(path) => PathSegments { + segments: path.mod_path().segments(), + generic_args: Some(path.generic_args()), + }, Path::LangItem(_, seg) => PathSegments { segments: seg.as_ref().map_or(&[], |seg| std::slice::from_ref(seg)), generic_args: None, @@ -165,34 +178,55 @@ impl Path { pub fn mod_path(&self) -> Option<&ModPath> { match self { - Path::Normal { mod_path, .. } => Some(mod_path), + Path::BarePath(mod_path) => Some(mod_path), + Path::Normal(path) => Some(path.mod_path()), Path::LangItem(..) => None, } } pub fn qualifier(&self) -> Option { - let Path::Normal { mod_path, generic_args, type_anchor } = self else { - return None; - }; - if mod_path.is_ident() { - return None; + match self { + Path::BarePath(mod_path) => { + if mod_path.is_ident() { + return None; + } + Some(Path::BarePath(Interned::new(ModPath::from_segments( + mod_path.kind, + mod_path.segments()[..mod_path.segments().len() - 1].iter().cloned(), + )))) + } + Path::Normal(path) => { + let mod_path = path.mod_path(); + if mod_path.is_ident() { + return None; + } + let type_anchor = path.type_anchor(); + let generic_args = path.generic_args(); + let qualifier_mod_path = Interned::new(ModPath::from_segments( + mod_path.kind, + mod_path.segments()[..mod_path.segments().len() - 1].iter().cloned(), + )); + let qualifier_generic_args = &generic_args[..generic_args.len() - 1]; + Some(Path::Normal(NormalPath::new( + type_anchor, + qualifier_mod_path, + qualifier_generic_args.iter().cloned(), + ))) + } + Path::LangItem(..) => None, } - let res = Path::Normal { - type_anchor: *type_anchor, - mod_path: Interned::new(ModPath::from_segments( - mod_path.kind, - mod_path.segments()[..mod_path.segments().len() - 1].iter().cloned(), - )), - generic_args: generic_args.as_ref().map(|it| it[..it.len() - 1].to_vec().into()), - }; - Some(res) } pub fn is_self_type(&self) -> bool { - let Path::Normal { mod_path, generic_args, type_anchor } = self else { - return false; - }; - type_anchor.is_none() && generic_args.as_deref().is_none() && mod_path.is_Self() + match self { + Path::BarePath(mod_path) => mod_path.is_Self(), + Path::Normal(path) => { + path.type_anchor().is_none() + && path.mod_path().is_Self() + && path.generic_args().iter().all(|args| args.is_none()) + } + Path::LangItem(..) => false, + } } } @@ -268,16 +302,6 @@ impl GenericArgs { impl From for Path { fn from(name: Name) -> Path { - Path::Normal { - type_anchor: None, - mod_path: Interned::new(ModPath::from_segments(PathKind::Plain, iter::once(name))), - generic_args: None, - } - } -} - -impl From for Box { - fn from(name: Name) -> Box { - Box::new(Path::from(name)) + Path::BarePath(Interned::new(ModPath::from_segments(PathKind::Plain, iter::once(name)))) } } diff --git a/crates/hir-def/src/path/lower.rs b/crates/hir-def/src/path/lower.rs index 5472da59b5..df036ef3b6 100644 --- a/crates/hir-def/src/path/lower.rs +++ b/crates/hir-def/src/path/lower.rs @@ -2,7 +2,7 @@ use std::iter; -use crate::{lower::LowerCtx, type_ref::ConstRef}; +use crate::{lower::LowerCtx, path::NormalPath, type_ref::ConstRef}; use hir_expand::{ mod_path::resolve_crate_root, @@ -74,11 +74,9 @@ pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option>::Foo desugars to Trait::Foo Some(trait_ref) => { - let Path::Normal { mod_path, generic_args: path_generic_args, .. } = - Path::from_src(ctx, trait_ref.path()?)? - else { - return None; - }; + let path = Path::from_src(ctx, trait_ref.path()?)?; + let mod_path = path.mod_path()?; + let path_generic_args = path.generic_args(); let num_segments = mod_path.segments().len(); kind = mod_path.kind; @@ -136,7 +134,7 @@ pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option, mut path: ast::Path) -> Option Option { if let Some(q) = path.qualifier() { diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs index 0b49ee8051..42c7ea7c09 100644 --- a/crates/hir-def/src/resolver.rs +++ b/crates/hir-def/src/resolver.rs @@ -167,7 +167,8 @@ impl Resolver { path: &Path, ) -> Option<(TypeNs, Option, Option)> { let path = match path { - Path::Normal { mod_path, .. } => mod_path, + Path::BarePath(mod_path) => mod_path, + Path::Normal(it) => it.mod_path(), Path::LangItem(l, seg) => { let type_ns = match *l { LangItemTarget::Union(it) => TypeNs::AdtId(it.into()), @@ -265,7 +266,8 @@ impl Resolver { mut hygiene_id: HygieneId, ) -> Option { let path = match path { - Path::Normal { mod_path, .. } => mod_path, + Path::BarePath(mod_path) => mod_path, + Path::Normal(it) => it.mod_path(), Path::LangItem(l, None) => { return Some(ResolveValueResult::ValueNs( match *l { diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index ee55dbe1c3..32b4ea2f28 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -198,7 +198,7 @@ impl InferenceContext<'_> { match &self.body[expr] { // Lang item paths cannot currently be local variables or statics. Expr::Path(Path::LangItem(_, _)) => false, - Expr::Path(Path::Normal { type_anchor: Some(_), .. }) => false, + Expr::Path(Path::Normal(path)) => path.type_anchor().is_none(), Expr::Path(path) => self .resolver .resolve_path_in_value_ns_fully( @@ -1214,7 +1214,7 @@ impl InferenceContext<'_> { let ty = match self.infer_path(path, id) { Some(ty) => ty, None => { - if matches!(path, Path::Normal { mod_path, .. } if mod_path.is_ident() || mod_path.is_self()) + if path.mod_path().is_some_and(|mod_path| mod_path.is_ident() || mod_path.is_self()) { self.push_diagnostic(InferenceDiagnostic::UnresolvedIdent { id }); } diff --git a/crates/hir-ty/src/infer/path.rs b/crates/hir-ty/src/infer/path.rs index 0df44f5df2..442daa9f9e 100644 --- a/crates/hir-ty/src/infer/path.rs +++ b/crates/hir-ty/src/infer/path.rs @@ -222,7 +222,7 @@ impl InferenceContext<'_> { let _d; let (resolved_segment, remaining_segments) = match path { - Path::Normal { .. } => { + Path::Normal { .. } | Path::BarePath(_) => { assert!(remaining_index < path.segments().len()); ( path.segments().get(remaining_index - 1).unwrap(), diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index 3339422760..c4e0640051 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -1373,12 +1373,11 @@ impl<'ctx> MirLowerCtx<'ctx> { ), }; let edition = self.edition(); - let unresolved_name = || { - MirLowerError::unresolved_path(self.db, c.as_ref(), edition, &self.body.types) - }; + let unresolved_name = + || MirLowerError::unresolved_path(self.db, c, edition, &self.body.types); let pr = self .resolver - .resolve_path_in_value_ns(self.db.upcast(), c.as_ref(), HygieneId::ROOT) + .resolve_path_in_value_ns(self.db.upcast(), c, HygieneId::ROOT) .ok_or_else(unresolved_name)?; match pr { ResolveValueResult::ValueNs(v, _) => { diff --git a/crates/stdx/src/lib.rs b/crates/stdx/src/lib.rs index 76dbd42ff6..3e36394182 100644 --- a/crates/stdx/src/lib.rs +++ b/crates/stdx/src/lib.rs @@ -10,6 +10,7 @@ pub mod non_empty_vec; pub mod panic_context; pub mod process; pub mod rand; +pub mod thin_vec; pub mod thread; pub use always_assert::{always, never}; diff --git a/crates/stdx/src/thin_vec.rs b/crates/stdx/src/thin_vec.rs new file mode 100644 index 0000000000..700220e1d3 --- /dev/null +++ b/crates/stdx/src/thin_vec.rs @@ -0,0 +1,472 @@ +use std::alloc::{dealloc, handle_alloc_error, Layout}; +use std::fmt; +use std::hash::{Hash, Hasher}; +use std::marker::PhantomData; +use std::ops::{Deref, DerefMut}; +use std::ptr::{addr_of_mut, slice_from_raw_parts_mut, NonNull}; + +/// A type that is functionally equivalent to `(Header, Box<[Item]>)`, +/// but all data is stored in one heap allocation and the pointer is thin, +/// so the whole thing's size is like a pointer. +pub struct ThinVecWithHeader { + /// INVARIANT: Points to a valid heap allocation that contains `ThinVecInner
`, + /// followed by (suitably aligned) `len` `Item`s. + ptr: NonNull>, + _marker: PhantomData<(Header, Box<[Item]>)>, +} + +// SAFETY: We essentially own both the header and the items. +unsafe impl Send for ThinVecWithHeader {} +unsafe impl Sync for ThinVecWithHeader {} + +#[derive(Clone)] +struct ThinVecInner
{ + header: Header, + len: usize, +} + +impl ThinVecWithHeader { + /// # Safety + /// + /// The iterator must produce `len` elements. + #[inline] + unsafe fn from_trusted_len_iter( + header: Header, + len: usize, + items: impl Iterator, + ) -> Self { + let (ptr, layout, items_offset) = Self::allocate(len); + + struct DeallocGuard(*mut u8, Layout); + impl Drop for DeallocGuard { + fn drop(&mut self) { + // SAFETY: We allocated this above. + unsafe { + dealloc(self.0, self.1); + } + } + } + let _dealloc_guard = DeallocGuard(ptr.as_ptr().cast::(), layout); + + // INVARIANT: Between `0..1` there are only initialized items. + struct ItemsGuard(*mut Item, *mut Item); + impl Drop for ItemsGuard { + fn drop(&mut self) { + // SAFETY: Our invariant. + unsafe { + slice_from_raw_parts_mut(self.0, self.1.offset_from(self.0) as usize) + .drop_in_place(); + } + } + } + + // SAFETY: We allocated enough space. + let mut items_ptr = unsafe { ptr.as_ptr().byte_add(items_offset).cast::() }; + // INVARIANT: There are zero elements in this range. + let mut items_guard = ItemsGuard(items_ptr, items_ptr); + items.for_each(|item| { + // SAFETY: Our precondition guarantee we won't get more than `len` items, and we allocated + // enough space for `len` items. + unsafe { + items_ptr.write(item); + items_ptr = items_ptr.add(1); + } + // INVARIANT: We just initialized this item. + items_guard.1 = items_ptr; + }); + + // SAFETY: We allocated enough space. + unsafe { + ptr.write(ThinVecInner { header, len }); + } + + std::mem::forget(items_guard); + + std::mem::forget(_dealloc_guard); + + // INVARIANT: We allocated and initialized all fields correctly. + Self { ptr, _marker: PhantomData } + } + + #[inline] + fn allocate(len: usize) -> (NonNull>, Layout, usize) { + let (layout, items_offset) = Self::layout(len); + // SAFETY: We always have `len`, so our allocation cannot be zero-sized. + let ptr = unsafe { std::alloc::alloc(layout).cast::>() }; + let Some(ptr) = NonNull::>::new(ptr) else { + handle_alloc_error(layout); + }; + (ptr, layout, items_offset) + } + + #[inline] + #[allow(clippy::should_implement_trait)] + pub fn from_iter(header: Header, items: I) -> Self + where + I: IntoIterator, + I::IntoIter: TrustedLen, + { + let items = items.into_iter(); + // SAFETY: `TrustedLen` guarantees the iterator length is exact. + unsafe { Self::from_trusted_len_iter(header, items.len(), items) } + } + + #[inline] + fn items_offset(&self) -> usize { + // SAFETY: We `pad_to_align()` in `layout()`, so at most where accessing past the end of the allocation, + // which is allowed. + unsafe { + Layout::new::>().extend(Layout::new::()).unwrap_unchecked().1 + } + } + + #[inline] + fn header_and_len(&self) -> &ThinVecInner
{ + // SAFETY: By `ptr`'s invariant, it is correctly allocated and initialized. + unsafe { &*self.ptr.as_ptr() } + } + + #[inline] + fn items_ptr(&self) -> *mut [Item] { + let len = self.header_and_len().len; + // SAFETY: `items_offset()` returns the correct offset of the items, where they are allocated. + let ptr = unsafe { self.ptr.as_ptr().byte_add(self.items_offset()).cast::() }; + slice_from_raw_parts_mut(ptr, len) + } + + #[inline] + pub fn header(&self) -> &Header { + &self.header_and_len().header + } + + #[inline] + pub fn header_mut(&mut self) -> &mut Header { + // SAFETY: By `ptr`'s invariant, it is correctly allocated and initialized. + unsafe { &mut *addr_of_mut!((*self.ptr.as_ptr()).header) } + } + + #[inline] + pub fn items(&self) -> &[Item] { + // SAFETY: `items_ptr()` gives a valid pointer. + unsafe { &*self.items_ptr() } + } + + #[inline] + pub fn items_mut(&mut self) -> &mut [Item] { + // SAFETY: `items_ptr()` gives a valid pointer. + unsafe { &mut *self.items_ptr() } + } + + #[inline] + pub fn len(&self) -> usize { + self.header_and_len().len + } + + #[inline] + fn layout(len: usize) -> (Layout, usize) { + let (layout, items_offset) = Layout::new::>() + .extend(Layout::array::(len).expect("too big `ThinVec` requested")) + .expect("too big `ThinVec` requested"); + let layout = layout.pad_to_align(); + (layout, items_offset) + } +} + +/// # Safety +/// +/// The length reported must be exactly the number of items yielded. +pub unsafe trait TrustedLen: ExactSizeIterator {} + +unsafe impl TrustedLen for std::vec::IntoIter {} +unsafe impl TrustedLen for std::slice::Iter<'_, T> {} +unsafe impl<'a, T: Clone + 'a, I: TrustedLen> TrustedLen for std::iter::Cloned {} +unsafe impl T> TrustedLen for std::iter::Map {} +unsafe impl TrustedLen for std::vec::Drain<'_, T> {} +unsafe impl TrustedLen for std::array::IntoIter {} + +impl Clone for ThinVecWithHeader { + #[inline] + fn clone(&self) -> Self { + Self::from_iter(self.header().clone(), self.items().iter().cloned()) + } +} + +impl Drop for ThinVecWithHeader { + #[inline] + fn drop(&mut self) { + // This must come before we drop `header`, because after that we cannot make a reference to it in `len()`. + let len = self.len(); + + // SAFETY: The contents are allocated and initialized. + unsafe { + addr_of_mut!((*self.ptr.as_ptr()).header).drop_in_place(); + self.items_ptr().drop_in_place(); + } + + let (layout, _) = Self::layout(len); + // SAFETY: This was allocated in `new()` with the same layout calculation. + unsafe { + dealloc(self.ptr.as_ptr().cast::(), layout); + } + } +} + +impl fmt::Debug for ThinVecWithHeader { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("ThinVecWithHeader") + .field("header", self.header()) + .field("items", &self.items()) + .finish() + } +} + +impl PartialEq for ThinVecWithHeader { + #[inline] + fn eq(&self, other: &Self) -> bool { + self.header() == other.header() && self.items() == other.items() + } +} + +impl Eq for ThinVecWithHeader {} + +impl Hash for ThinVecWithHeader { + #[inline] + fn hash(&self, state: &mut H) { + self.header().hash(state); + self.items().hash(state); + } +} + +#[derive(Clone, PartialEq, Eq, Hash)] +pub struct ThinVec(ThinVecWithHeader<(), T>); + +impl ThinVec { + #[inline] + #[allow(clippy::should_implement_trait)] + pub fn from_iter(values: I) -> Self + where + I: IntoIterator, + I::IntoIter: TrustedLen, + { + Self(ThinVecWithHeader::from_iter((), values)) + } + + #[inline] + pub fn len(&self) -> usize { + self.0.len() + } + + #[inline] + pub fn iter(&self) -> std::slice::Iter<'_, T> { + (**self).iter() + } + + #[inline] + pub fn iter_mut(&mut self) -> std::slice::IterMut<'_, T> { + (**self).iter_mut() + } +} + +impl Deref for ThinVec { + type Target = [T]; + + #[inline] + fn deref(&self) -> &Self::Target { + self.0.items() + } +} + +impl DerefMut for ThinVec { + #[inline] + fn deref_mut(&mut self) -> &mut Self::Target { + self.0.items_mut() + } +} + +impl<'a, T> IntoIterator for &'a ThinVec { + type IntoIter = std::slice::Iter<'a, T>; + type Item = &'a T; + + #[inline] + fn into_iter(self) -> Self::IntoIter { + self.iter() + } +} + +impl<'a, T> IntoIterator for &'a mut ThinVec { + type IntoIter = std::slice::IterMut<'a, T>; + type Item = &'a mut T; + + #[inline] + fn into_iter(self) -> Self::IntoIter { + self.iter_mut() + } +} + +impl fmt::Debug for ThinVec { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().entries(&**self).finish() + } +} + +/// A [`ThinVec`] that requires no allocation for the empty case. +#[derive(Clone, PartialEq, Eq, Hash)] +pub struct EmptyOptimizedThinVec(Option>); + +impl EmptyOptimizedThinVec { + #[inline] + #[allow(clippy::should_implement_trait)] + pub fn from_iter(values: I) -> Self + where + I: IntoIterator, + I::IntoIter: TrustedLen, + { + let values = values.into_iter(); + if values.len() == 0 { + Self::empty() + } else { + Self(Some(ThinVec::from_iter(values))) + } + } + + #[inline] + pub fn empty() -> Self { + Self(None) + } + + #[inline] + pub fn len(&self) -> usize { + self.0.as_ref().map_or(0, ThinVec::len) + } + + #[inline] + pub fn iter(&self) -> std::slice::Iter<'_, T> { + (**self).iter() + } + + #[inline] + pub fn iter_mut(&mut self) -> std::slice::IterMut<'_, T> { + (**self).iter_mut() + } +} + +impl Default for EmptyOptimizedThinVec { + #[inline] + fn default() -> Self { + Self::empty() + } +} + +impl Deref for EmptyOptimizedThinVec { + type Target = [T]; + + #[inline] + fn deref(&self) -> &Self::Target { + self.0.as_deref().unwrap_or_default() + } +} + +impl DerefMut for EmptyOptimizedThinVec { + #[inline] + fn deref_mut(&mut self) -> &mut Self::Target { + self.0.as_deref_mut().unwrap_or_default() + } +} + +impl<'a, T> IntoIterator for &'a EmptyOptimizedThinVec { + type IntoIter = std::slice::Iter<'a, T>; + type Item = &'a T; + + #[inline] + fn into_iter(self) -> Self::IntoIter { + self.iter() + } +} + +impl<'a, T> IntoIterator for &'a mut EmptyOptimizedThinVec { + type IntoIter = std::slice::IterMut<'a, T>; + type Item = &'a mut T; + + #[inline] + fn into_iter(self) -> Self::IntoIter { + self.iter_mut() + } +} + +impl fmt::Debug for EmptyOptimizedThinVec { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().entries(&**self).finish() + } +} + +/// Syntax: +/// +/// ```ignore +/// thin_vec_with_header_struct! { +/// pub new(pub(crate)) struct MyCoolStruct, MyCoolStructHeader { +/// pub(crate) variable_length: [Ty], +/// pub field1: CopyTy, +/// pub field2: NonCopyTy; ref, +/// } +/// } +/// ``` +#[doc(hidden)] +#[macro_export] +macro_rules! thin_vec_with_header_struct_ { + (@maybe_ref (ref) $($t:tt)*) => { &$($t)* }; + (@maybe_ref () $($t:tt)*) => { $($t)* }; + ( + $vis:vis new($new_vis:vis) struct $struct:ident, $header:ident { + $items_vis:vis $items:ident : [$items_ty:ty], + $( $header_var_vis:vis $header_var:ident : $header_var_ty:ty $(; $ref:ident)?, )+ + } + ) => { + #[derive(Debug, Clone, Eq, PartialEq, Hash)] + struct $header { + $( $header_var : $header_var_ty, )+ + } + + #[derive(Clone, Eq, PartialEq, Hash)] + $vis struct $struct($crate::thin_vec::ThinVecWithHeader<$header, $items_ty>); + + impl $struct { + #[inline] + #[allow(unused)] + $new_vis fn new( + $( $header_var: $header_var_ty, )+ + $items: I, + ) -> Self + where + I: ::std::iter::IntoIterator, + I::IntoIter: $crate::thin_vec::TrustedLen, + { + Self($crate::thin_vec::ThinVecWithHeader::from_iter( + $header { $( $header_var, )+ }, + $items, + )) + } + + #[inline] + $items_vis fn $items(&self) -> &[$items_ty] { + self.0.items() + } + + $( + #[inline] + $header_var_vis fn $header_var(&self) -> $crate::thin_vec_with_header_struct_!(@maybe_ref ($($ref)?) $header_var_ty) { + $crate::thin_vec_with_header_struct_!(@maybe_ref ($($ref)?) self.0.header().$header_var) + } + )+ + } + + impl ::std::fmt::Debug for $struct { + fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { + f.debug_struct(stringify!($struct)) + $( .field(stringify!($header_var), &self.$header_var()) )* + .field(stringify!($items), &self.$items()) + .finish() + } + } + }; +} +pub use crate::thin_vec_with_header_struct_ as thin_vec_with_header_struct; diff --git a/xtask/src/tidy.rs b/xtask/src/tidy.rs index 0268e2473c..c3d531344a 100644 --- a/xtask/src/tidy.rs +++ b/xtask/src/tidy.rs @@ -223,7 +223,7 @@ struct TidyDocs { impl TidyDocs { fn visit(&mut self, path: &Path, text: &str) { // Tests and diagnostic fixes don't need module level comments. - if is_exclude_dir(path, &["tests", "test_data", "fixes", "grammar", "ra-salsa"]) { + if is_exclude_dir(path, &["tests", "test_data", "fixes", "grammar", "ra-salsa", "stdx"]) { return; }