Shrink Path to 16 bytes

Thanks to the observation (supported by counting) that the vast majority paths have neither generics no type anchors, and thanks to a new datastructure `ThinVecWithHeader` that is essentially `(T, Box<[U]>)` but with the size of a single pointer, we are able to reach this feat.

This (together with `ThinVecWithHeader`) makes the possibility to shrink `TypeRef`, because most types are paths.
This commit is contained in:
Chayim Refael Friedman 2024-10-20 00:50:18 +03:00
parent bccf0062b7
commit 061e5d7f71
13 changed files with 596 additions and 107 deletions

View file

@ -720,12 +720,8 @@ impl ExprCollector<'_> {
fn collect_expr_path(&mut self, e: ast::PathExpr) -> Option<(Path, HygieneId)> {
e.path().and_then(|path| {
let path = self.parse_path(path)?;
let Path::Normal { type_anchor, mod_path, generic_args } = &path else {
panic!("path parsing produced a non-normal path");
};
// Need to enable `mod_path.len() < 1` for `self`.
let may_be_variable =
type_anchor.is_none() && mod_path.len() <= 1 && generic_args.is_none();
let may_be_variable = matches!(&path, Path::BarePath(mod_path) if mod_path.len() <= 1);
let hygiene = if may_be_variable {
self.hygiene_id_for(e.syntax().text_range().start())
} else {
@ -797,7 +793,7 @@ impl ExprCollector<'_> {
ast::Expr::PathExpr(e) => {
let (path, hygiene) = self
.collect_expr_path(e.clone())
.map(|(path, hygiene)| (Pat::Path(Box::new(path)), hygiene))
.map(|(path, hygiene)| (Pat::Path(path), hygiene))
.unwrap_or((Pat::Missing, HygieneId::ROOT));
let pat_id = self.alloc_pat_from_expr(path, syntax_ptr);
if !hygiene.is_root() {
@ -1059,7 +1055,7 @@ impl ExprCollector<'_> {
syntax_ptr,
);
let none_arm = MatchArm {
pat: self.alloc_pat_desugared(Pat::Path(Box::new(option_none))),
pat: self.alloc_pat_desugared(Pat::Path(option_none)),
guard: None,
expr: self.alloc_expr(Expr::Break { expr: None, label: None }, syntax_ptr),
};
@ -1561,7 +1557,7 @@ impl ExprCollector<'_> {
Pat::Ref { pat, mutability }
}
ast::Pat::PathPat(p) => {
let path = p.path().and_then(|path| self.parse_path(path)).map(Box::new);
let path = p.path().and_then(|path| self.parse_path(path));
path.map(Pat::Path).unwrap_or(Pat::Missing)
}
ast::Pat::OrPat(p) => 'b: {

View file

@ -21,7 +21,7 @@ use crate::{
item_tree::{AttrOwner, FileItemTreeId, GenericModItem, GenericsItemTreeNode, ItemTree},
lower::LowerCtx,
nameres::{DefMap, MacroSubNs},
path::{AssociatedTypeBinding, GenericArg, GenericArgs, Path},
path::{AssociatedTypeBinding, GenericArg, GenericArgs, NormalPath, Path},
type_ref::{ConstRef, LifetimeRef, TypeBound, TypeRef, TypeRefId, TypesMap, TypesSourceMap},
AdtId, ConstParamId, GenericDefId, HasModule, ItemTreeLoc, LifetimeParamId,
LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId,
@ -788,19 +788,16 @@ fn copy_path(
to_source_map: &mut TypesSourceMap,
) -> Path {
match path {
Path::Normal { type_anchor, mod_path, generic_args } => {
let type_anchor = type_anchor
Path::BarePath(mod_path) => Path::BarePath(mod_path.clone()),
Path::Normal(path) => {
let type_anchor = path
.type_anchor()
.map(|type_ref| copy_type_ref(type_ref, from, from_source_map, to, to_source_map));
let mod_path = mod_path.clone();
let generic_args = generic_args.as_ref().map(|generic_args| {
generic_args
.iter()
.map(|generic_args| {
copy_generic_args(generic_args, from, from_source_map, to, to_source_map)
})
.collect()
let mod_path = path.mod_path().clone();
let generic_args = path.generic_args().iter().map(|generic_args| {
copy_generic_args(generic_args, from, from_source_map, to, to_source_map)
});
Path::Normal { type_anchor, mod_path, generic_args }
Path::Normal(NormalPath::new(type_anchor, mod_path, generic_args))
}
Path::LangItem(lang_item, name) => Path::LangItem(*lang_item, name.clone()),
}

View file

@ -583,7 +583,7 @@ pub enum Pat {
suffix: Box<[PatId]>,
},
/// This might refer to a variable if a single segment path (specifically, on destructuring assignment).
Path(Box<Path>),
Path(Path),
Lit(ExprId),
Bind {
id: BindingId,

View file

@ -201,7 +201,7 @@ pub enum TypeBound {
}
#[cfg(target_pointer_width = "64")]
const _: [(); 48] = [(); ::std::mem::size_of::<TypeBound>()];
const _: [(); 32] = [(); ::std::mem::size_of::<TypeBound>()];
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub enum UseArgRef {

View file

@ -14,6 +14,7 @@ use crate::{
use hir_expand::name::Name;
use intern::Interned;
use span::Edition;
use stdx::thin_vec::thin_vec_with_header_struct;
use syntax::ast;
pub use hir_expand::mod_path::{path, ModPath, PathKind};
@ -47,20 +48,33 @@ impl Display for ImportAliasDisplay<'_> {
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Path {
/// A normal path
Normal {
/// Type based path like `<T>::foo`.
/// Note that paths like `<Type as Trait>::foo` are desugared to `Trait::<Self=Type>::foo`.
type_anchor: Option<TypeRefId>,
mod_path: Interned<ModPath>,
/// Invariant: the same len as `self.mod_path.segments` or `None` if all segments are `None`.
generic_args: Option<Box<[Option<GenericArgs>]>>,
},
/// `BarePath` is used when the path has neither generics nor type anchor, since the vast majority of paths
/// are in this category, and splitting `Path` this way allows it to be more thin. When the path has either generics
/// or type anchor, it is `Path::Normal` with the generics filled with `None` even if there are none (practically
/// this is not a problem since many more paths have generics than a type anchor).
BarePath(Interned<ModPath>),
/// `Path::Normal` may have empty generics and type anchor (but generic args will be filled with `None`).
Normal(NormalPath),
/// A link to a lang item. It is used in desugaring of things like `it?`. We can show these
/// links via a normal path since they might be private and not accessible in the usage place.
LangItem(LangItemTarget, Option<Name>),
}
// This type is being used a lot, make sure it doesn't grow unintentionally.
#[cfg(target_arch = "x86_64")]
const _: () = {
assert!(size_of::<Path>() == 16);
assert!(size_of::<Option<Path>>() == 16);
};
thin_vec_with_header_struct! {
pub new(pub(crate)) struct NormalPath, NormalPathHeader {
pub generic_args: [Option<GenericArgs>],
pub type_anchor: Option<TypeRefId>,
pub mod_path: Interned<ModPath>; ref,
}
}
/// Generic arguments to a path segment (e.g. the `i32` in `Option<i32>`). This
/// also includes bindings of associated types, like in `Iterator<Item = Foo>`.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@ -112,50 +126,49 @@ impl Path {
}
/// Converts a known mod path to `Path`.
pub fn from_known_path(
path: ModPath,
generic_args: impl Into<Box<[Option<GenericArgs>]>>,
) -> Path {
let generic_args = generic_args.into();
assert_eq!(path.len(), generic_args.len());
Path::Normal {
type_anchor: None,
mod_path: Interned::new(path),
generic_args: Some(generic_args),
}
pub fn from_known_path(path: ModPath, generic_args: Vec<Option<GenericArgs>>) -> Path {
Path::Normal(NormalPath::new(None, Interned::new(path), generic_args))
}
/// Converts a known mod path to `Path`.
pub fn from_known_path_with_no_generic(path: ModPath) -> Path {
Path::Normal { type_anchor: None, mod_path: Interned::new(path), generic_args: None }
Path::BarePath(Interned::new(path))
}
#[inline]
pub fn kind(&self) -> &PathKind {
match self {
Path::Normal { mod_path, .. } => &mod_path.kind,
Path::BarePath(mod_path) => &mod_path.kind,
Path::Normal(path) => &path.mod_path().kind,
Path::LangItem(..) => &PathKind::Abs,
}
}
#[inline]
pub fn type_anchor(&self) -> Option<TypeRefId> {
match self {
Path::Normal { type_anchor, .. } => *type_anchor,
Path::LangItem(..) => None,
Path::Normal(path) => path.type_anchor(),
Path::LangItem(..) | Path::BarePath(_) => None,
}
}
#[inline]
pub fn generic_args(&self) -> Option<&[Option<GenericArgs>]> {
match self {
Path::Normal(path) => Some(path.generic_args()),
Path::LangItem(..) | Path::BarePath(_) => None,
}
}
pub fn segments(&self) -> PathSegments<'_> {
match self {
Path::Normal { mod_path, generic_args, .. } => {
let s = PathSegments {
segments: mod_path.segments(),
generic_args: generic_args.as_deref(),
};
if let Some(generic_args) = s.generic_args {
assert_eq!(s.segments.len(), generic_args.len());
}
s
Path::BarePath(mod_path) => {
PathSegments { segments: mod_path.segments(), generic_args: None }
}
Path::Normal(path) => PathSegments {
segments: path.mod_path().segments(),
generic_args: Some(path.generic_args()),
},
Path::LangItem(_, seg) => PathSegments {
segments: seg.as_ref().map_or(&[], |seg| std::slice::from_ref(seg)),
generic_args: None,
@ -165,34 +178,55 @@ impl Path {
pub fn mod_path(&self) -> Option<&ModPath> {
match self {
Path::Normal { mod_path, .. } => Some(mod_path),
Path::BarePath(mod_path) => Some(mod_path),
Path::Normal(path) => Some(path.mod_path()),
Path::LangItem(..) => None,
}
}
pub fn qualifier(&self) -> Option<Path> {
let Path::Normal { mod_path, generic_args, type_anchor } = self else {
return None;
};
if mod_path.is_ident() {
return None;
match self {
Path::BarePath(mod_path) => {
if mod_path.is_ident() {
return None;
}
Some(Path::BarePath(Interned::new(ModPath::from_segments(
mod_path.kind,
mod_path.segments()[..mod_path.segments().len() - 1].iter().cloned(),
))))
}
Path::Normal(path) => {
let mod_path = path.mod_path();
if mod_path.is_ident() {
return None;
}
let type_anchor = path.type_anchor();
let generic_args = path.generic_args();
let qualifier_mod_path = Interned::new(ModPath::from_segments(
mod_path.kind,
mod_path.segments()[..mod_path.segments().len() - 1].iter().cloned(),
));
let qualifier_generic_args = &generic_args[..generic_args.len() - 1];
Some(Path::Normal(NormalPath::new(
type_anchor,
qualifier_mod_path,
qualifier_generic_args.iter().cloned(),
)))
}
Path::LangItem(..) => None,
}
let res = Path::Normal {
type_anchor: *type_anchor,
mod_path: Interned::new(ModPath::from_segments(
mod_path.kind,
mod_path.segments()[..mod_path.segments().len() - 1].iter().cloned(),
)),
generic_args: generic_args.as_ref().map(|it| it[..it.len() - 1].to_vec().into()),
};
Some(res)
}
pub fn is_self_type(&self) -> bool {
let Path::Normal { mod_path, generic_args, type_anchor } = self else {
return false;
};
type_anchor.is_none() && generic_args.as_deref().is_none() && mod_path.is_Self()
match self {
Path::BarePath(mod_path) => mod_path.is_Self(),
Path::Normal(path) => {
path.type_anchor().is_none()
&& path.mod_path().is_Self()
&& path.generic_args().iter().all(|args| args.is_none())
}
Path::LangItem(..) => false,
}
}
}
@ -268,16 +302,6 @@ impl GenericArgs {
impl From<Name> for Path {
fn from(name: Name) -> Path {
Path::Normal {
type_anchor: None,
mod_path: Interned::new(ModPath::from_segments(PathKind::Plain, iter::once(name))),
generic_args: None,
}
}
}
impl From<Name> for Box<Path> {
fn from(name: Name) -> Box<Path> {
Box::new(Path::from(name))
Path::BarePath(Interned::new(ModPath::from_segments(PathKind::Plain, iter::once(name))))
}
}

View file

@ -2,7 +2,7 @@
use std::iter;
use crate::{lower::LowerCtx, type_ref::ConstRef};
use crate::{lower::LowerCtx, path::NormalPath, type_ref::ConstRef};
use hir_expand::{
mod_path::resolve_crate_root,
@ -74,11 +74,9 @@ pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option<Path
}
// <T as Trait<A>>::Foo desugars to Trait<Self=T, A>::Foo
Some(trait_ref) => {
let Path::Normal { mod_path, generic_args: path_generic_args, .. } =
Path::from_src(ctx, trait_ref.path()?)?
else {
return None;
};
let path = Path::from_src(ctx, trait_ref.path()?)?;
let mod_path = path.mod_path()?;
let path_generic_args = path.generic_args();
let num_segments = mod_path.segments().len();
kind = mod_path.kind;
@ -136,7 +134,7 @@ pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option<Path
};
}
segments.reverse();
if !generic_args.is_empty() {
if !generic_args.is_empty() || type_anchor.is_some() {
generic_args.resize(segments.len(), None);
generic_args.reverse();
}
@ -165,11 +163,11 @@ pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option<Path
}
let mod_path = Interned::new(ModPath::from_segments(kind, segments));
return Some(Path::Normal {
type_anchor,
mod_path,
generic_args: if generic_args.is_empty() { None } else { Some(generic_args.into()) },
});
if type_anchor.is_none() && generic_args.is_empty() {
return Some(Path::BarePath(mod_path));
} else {
return Some(Path::Normal(NormalPath::new(type_anchor, mod_path, generic_args)));
}
fn qualifier(path: &ast::Path) -> Option<ast::Path> {
if let Some(q) = path.qualifier() {

View file

@ -167,7 +167,8 @@ impl Resolver {
path: &Path,
) -> Option<(TypeNs, Option<usize>, Option<ImportOrExternCrate>)> {
let path = match path {
Path::Normal { mod_path, .. } => mod_path,
Path::BarePath(mod_path) => mod_path,
Path::Normal(it) => it.mod_path(),
Path::LangItem(l, seg) => {
let type_ns = match *l {
LangItemTarget::Union(it) => TypeNs::AdtId(it.into()),
@ -265,7 +266,8 @@ impl Resolver {
mut hygiene_id: HygieneId,
) -> Option<ResolveValueResult> {
let path = match path {
Path::Normal { mod_path, .. } => mod_path,
Path::BarePath(mod_path) => mod_path,
Path::Normal(it) => it.mod_path(),
Path::LangItem(l, None) => {
return Some(ResolveValueResult::ValueNs(
match *l {

View file

@ -198,7 +198,7 @@ impl InferenceContext<'_> {
match &self.body[expr] {
// Lang item paths cannot currently be local variables or statics.
Expr::Path(Path::LangItem(_, _)) => false,
Expr::Path(Path::Normal { type_anchor: Some(_), .. }) => false,
Expr::Path(Path::Normal(path)) => path.type_anchor().is_none(),
Expr::Path(path) => self
.resolver
.resolve_path_in_value_ns_fully(
@ -1214,7 +1214,7 @@ impl InferenceContext<'_> {
let ty = match self.infer_path(path, id) {
Some(ty) => ty,
None => {
if matches!(path, Path::Normal { mod_path, .. } if mod_path.is_ident() || mod_path.is_self())
if path.mod_path().is_some_and(|mod_path| mod_path.is_ident() || mod_path.is_self())
{
self.push_diagnostic(InferenceDiagnostic::UnresolvedIdent { id });
}

View file

@ -222,7 +222,7 @@ impl InferenceContext<'_> {
let _d;
let (resolved_segment, remaining_segments) = match path {
Path::Normal { .. } => {
Path::Normal { .. } | Path::BarePath(_) => {
assert!(remaining_index < path.segments().len());
(
path.segments().get(remaining_index - 1).unwrap(),

View file

@ -1373,12 +1373,11 @@ impl<'ctx> MirLowerCtx<'ctx> {
),
};
let edition = self.edition();
let unresolved_name = || {
MirLowerError::unresolved_path(self.db, c.as_ref(), edition, &self.body.types)
};
let unresolved_name =
|| MirLowerError::unresolved_path(self.db, c, edition, &self.body.types);
let pr = self
.resolver
.resolve_path_in_value_ns(self.db.upcast(), c.as_ref(), HygieneId::ROOT)
.resolve_path_in_value_ns(self.db.upcast(), c, HygieneId::ROOT)
.ok_or_else(unresolved_name)?;
match pr {
ResolveValueResult::ValueNs(v, _) => {

View file

@ -10,6 +10,7 @@ pub mod non_empty_vec;
pub mod panic_context;
pub mod process;
pub mod rand;
pub mod thin_vec;
pub mod thread;
pub use always_assert::{always, never};

472
crates/stdx/src/thin_vec.rs Normal file
View file

@ -0,0 +1,472 @@
use std::alloc::{dealloc, handle_alloc_error, Layout};
use std::fmt;
use std::hash::{Hash, Hasher};
use std::marker::PhantomData;
use std::ops::{Deref, DerefMut};
use std::ptr::{addr_of_mut, slice_from_raw_parts_mut, NonNull};
/// A type that is functionally equivalent to `(Header, Box<[Item]>)`,
/// but all data is stored in one heap allocation and the pointer is thin,
/// so the whole thing's size is like a pointer.
pub struct ThinVecWithHeader<Header, Item> {
/// INVARIANT: Points to a valid heap allocation that contains `ThinVecInner<Header>`,
/// followed by (suitably aligned) `len` `Item`s.
ptr: NonNull<ThinVecInner<Header>>,
_marker: PhantomData<(Header, Box<[Item]>)>,
}
// SAFETY: We essentially own both the header and the items.
unsafe impl<Header: Send, Item: Send> Send for ThinVecWithHeader<Header, Item> {}
unsafe impl<Header: Sync, Item: Sync> Sync for ThinVecWithHeader<Header, Item> {}
#[derive(Clone)]
struct ThinVecInner<Header> {
header: Header,
len: usize,
}
impl<Header, Item> ThinVecWithHeader<Header, Item> {
/// # Safety
///
/// The iterator must produce `len` elements.
#[inline]
unsafe fn from_trusted_len_iter(
header: Header,
len: usize,
items: impl Iterator<Item = Item>,
) -> Self {
let (ptr, layout, items_offset) = Self::allocate(len);
struct DeallocGuard(*mut u8, Layout);
impl Drop for DeallocGuard {
fn drop(&mut self) {
// SAFETY: We allocated this above.
unsafe {
dealloc(self.0, self.1);
}
}
}
let _dealloc_guard = DeallocGuard(ptr.as_ptr().cast::<u8>(), layout);
// INVARIANT: Between `0..1` there are only initialized items.
struct ItemsGuard<Item>(*mut Item, *mut Item);
impl<Item> Drop for ItemsGuard<Item> {
fn drop(&mut self) {
// SAFETY: Our invariant.
unsafe {
slice_from_raw_parts_mut(self.0, self.1.offset_from(self.0) as usize)
.drop_in_place();
}
}
}
// SAFETY: We allocated enough space.
let mut items_ptr = unsafe { ptr.as_ptr().byte_add(items_offset).cast::<Item>() };
// INVARIANT: There are zero elements in this range.
let mut items_guard = ItemsGuard(items_ptr, items_ptr);
items.for_each(|item| {
// SAFETY: Our precondition guarantee we won't get more than `len` items, and we allocated
// enough space for `len` items.
unsafe {
items_ptr.write(item);
items_ptr = items_ptr.add(1);
}
// INVARIANT: We just initialized this item.
items_guard.1 = items_ptr;
});
// SAFETY: We allocated enough space.
unsafe {
ptr.write(ThinVecInner { header, len });
}
std::mem::forget(items_guard);
std::mem::forget(_dealloc_guard);
// INVARIANT: We allocated and initialized all fields correctly.
Self { ptr, _marker: PhantomData }
}
#[inline]
fn allocate(len: usize) -> (NonNull<ThinVecInner<Header>>, Layout, usize) {
let (layout, items_offset) = Self::layout(len);
// SAFETY: We always have `len`, so our allocation cannot be zero-sized.
let ptr = unsafe { std::alloc::alloc(layout).cast::<ThinVecInner<Header>>() };
let Some(ptr) = NonNull::<ThinVecInner<Header>>::new(ptr) else {
handle_alloc_error(layout);
};
(ptr, layout, items_offset)
}
#[inline]
#[allow(clippy::should_implement_trait)]
pub fn from_iter<I>(header: Header, items: I) -> Self
where
I: IntoIterator,
I::IntoIter: TrustedLen<Item = Item>,
{
let items = items.into_iter();
// SAFETY: `TrustedLen` guarantees the iterator length is exact.
unsafe { Self::from_trusted_len_iter(header, items.len(), items) }
}
#[inline]
fn items_offset(&self) -> usize {
// SAFETY: We `pad_to_align()` in `layout()`, so at most where accessing past the end of the allocation,
// which is allowed.
unsafe {
Layout::new::<ThinVecInner<Header>>().extend(Layout::new::<Item>()).unwrap_unchecked().1
}
}
#[inline]
fn header_and_len(&self) -> &ThinVecInner<Header> {
// SAFETY: By `ptr`'s invariant, it is correctly allocated and initialized.
unsafe { &*self.ptr.as_ptr() }
}
#[inline]
fn items_ptr(&self) -> *mut [Item] {
let len = self.header_and_len().len;
// SAFETY: `items_offset()` returns the correct offset of the items, where they are allocated.
let ptr = unsafe { self.ptr.as_ptr().byte_add(self.items_offset()).cast::<Item>() };
slice_from_raw_parts_mut(ptr, len)
}
#[inline]
pub fn header(&self) -> &Header {
&self.header_and_len().header
}
#[inline]
pub fn header_mut(&mut self) -> &mut Header {
// SAFETY: By `ptr`'s invariant, it is correctly allocated and initialized.
unsafe { &mut *addr_of_mut!((*self.ptr.as_ptr()).header) }
}
#[inline]
pub fn items(&self) -> &[Item] {
// SAFETY: `items_ptr()` gives a valid pointer.
unsafe { &*self.items_ptr() }
}
#[inline]
pub fn items_mut(&mut self) -> &mut [Item] {
// SAFETY: `items_ptr()` gives a valid pointer.
unsafe { &mut *self.items_ptr() }
}
#[inline]
pub fn len(&self) -> usize {
self.header_and_len().len
}
#[inline]
fn layout(len: usize) -> (Layout, usize) {
let (layout, items_offset) = Layout::new::<ThinVecInner<Header>>()
.extend(Layout::array::<Item>(len).expect("too big `ThinVec` requested"))
.expect("too big `ThinVec` requested");
let layout = layout.pad_to_align();
(layout, items_offset)
}
}
/// # Safety
///
/// The length reported must be exactly the number of items yielded.
pub unsafe trait TrustedLen: ExactSizeIterator {}
unsafe impl<T> TrustedLen for std::vec::IntoIter<T> {}
unsafe impl<T> TrustedLen for std::slice::Iter<'_, T> {}
unsafe impl<'a, T: Clone + 'a, I: TrustedLen<Item = &'a T>> TrustedLen for std::iter::Cloned<I> {}
unsafe impl<T, I: TrustedLen, F: FnMut(I::Item) -> T> TrustedLen for std::iter::Map<I, F> {}
unsafe impl<T> TrustedLen for std::vec::Drain<'_, T> {}
unsafe impl<T, const N: usize> TrustedLen for std::array::IntoIter<T, N> {}
impl<Header: Clone, Item: Clone> Clone for ThinVecWithHeader<Header, Item> {
#[inline]
fn clone(&self) -> Self {
Self::from_iter(self.header().clone(), self.items().iter().cloned())
}
}
impl<Header, Item> Drop for ThinVecWithHeader<Header, Item> {
#[inline]
fn drop(&mut self) {
// This must come before we drop `header`, because after that we cannot make a reference to it in `len()`.
let len = self.len();
// SAFETY: The contents are allocated and initialized.
unsafe {
addr_of_mut!((*self.ptr.as_ptr()).header).drop_in_place();
self.items_ptr().drop_in_place();
}
let (layout, _) = Self::layout(len);
// SAFETY: This was allocated in `new()` with the same layout calculation.
unsafe {
dealloc(self.ptr.as_ptr().cast::<u8>(), layout);
}
}
}
impl<Header: fmt::Debug, Item: fmt::Debug> fmt::Debug for ThinVecWithHeader<Header, Item> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("ThinVecWithHeader")
.field("header", self.header())
.field("items", &self.items())
.finish()
}
}
impl<Header: PartialEq, Item: PartialEq> PartialEq for ThinVecWithHeader<Header, Item> {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.header() == other.header() && self.items() == other.items()
}
}
impl<Header: Eq, Item: Eq> Eq for ThinVecWithHeader<Header, Item> {}
impl<Header: Hash, Item: Hash> Hash for ThinVecWithHeader<Header, Item> {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
self.header().hash(state);
self.items().hash(state);
}
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct ThinVec<T>(ThinVecWithHeader<(), T>);
impl<T> ThinVec<T> {
#[inline]
#[allow(clippy::should_implement_trait)]
pub fn from_iter<I>(values: I) -> Self
where
I: IntoIterator,
I::IntoIter: TrustedLen<Item = T>,
{
Self(ThinVecWithHeader::from_iter((), values))
}
#[inline]
pub fn len(&self) -> usize {
self.0.len()
}
#[inline]
pub fn iter(&self) -> std::slice::Iter<'_, T> {
(**self).iter()
}
#[inline]
pub fn iter_mut(&mut self) -> std::slice::IterMut<'_, T> {
(**self).iter_mut()
}
}
impl<T> Deref for ThinVec<T> {
type Target = [T];
#[inline]
fn deref(&self) -> &Self::Target {
self.0.items()
}
}
impl<T> DerefMut for ThinVec<T> {
#[inline]
fn deref_mut(&mut self) -> &mut Self::Target {
self.0.items_mut()
}
}
impl<'a, T> IntoIterator for &'a ThinVec<T> {
type IntoIter = std::slice::Iter<'a, T>;
type Item = &'a T;
#[inline]
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a, T> IntoIterator for &'a mut ThinVec<T> {
type IntoIter = std::slice::IterMut<'a, T>;
type Item = &'a mut T;
#[inline]
fn into_iter(self) -> Self::IntoIter {
self.iter_mut()
}
}
impl<T: fmt::Debug> fmt::Debug for ThinVec<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(&**self).finish()
}
}
/// A [`ThinVec`] that requires no allocation for the empty case.
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct EmptyOptimizedThinVec<T>(Option<ThinVec<T>>);
impl<T> EmptyOptimizedThinVec<T> {
#[inline]
#[allow(clippy::should_implement_trait)]
pub fn from_iter<I>(values: I) -> Self
where
I: IntoIterator,
I::IntoIter: TrustedLen<Item = T>,
{
let values = values.into_iter();
if values.len() == 0 {
Self::empty()
} else {
Self(Some(ThinVec::from_iter(values)))
}
}
#[inline]
pub fn empty() -> Self {
Self(None)
}
#[inline]
pub fn len(&self) -> usize {
self.0.as_ref().map_or(0, ThinVec::len)
}
#[inline]
pub fn iter(&self) -> std::slice::Iter<'_, T> {
(**self).iter()
}
#[inline]
pub fn iter_mut(&mut self) -> std::slice::IterMut<'_, T> {
(**self).iter_mut()
}
}
impl<T> Default for EmptyOptimizedThinVec<T> {
#[inline]
fn default() -> Self {
Self::empty()
}
}
impl<T> Deref for EmptyOptimizedThinVec<T> {
type Target = [T];
#[inline]
fn deref(&self) -> &Self::Target {
self.0.as_deref().unwrap_or_default()
}
}
impl<T> DerefMut for EmptyOptimizedThinVec<T> {
#[inline]
fn deref_mut(&mut self) -> &mut Self::Target {
self.0.as_deref_mut().unwrap_or_default()
}
}
impl<'a, T> IntoIterator for &'a EmptyOptimizedThinVec<T> {
type IntoIter = std::slice::Iter<'a, T>;
type Item = &'a T;
#[inline]
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a, T> IntoIterator for &'a mut EmptyOptimizedThinVec<T> {
type IntoIter = std::slice::IterMut<'a, T>;
type Item = &'a mut T;
#[inline]
fn into_iter(self) -> Self::IntoIter {
self.iter_mut()
}
}
impl<T: fmt::Debug> fmt::Debug for EmptyOptimizedThinVec<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(&**self).finish()
}
}
/// Syntax:
///
/// ```ignore
/// thin_vec_with_header_struct! {
/// pub new(pub(crate)) struct MyCoolStruct, MyCoolStructHeader {
/// pub(crate) variable_length: [Ty],
/// pub field1: CopyTy,
/// pub field2: NonCopyTy; ref,
/// }
/// }
/// ```
#[doc(hidden)]
#[macro_export]
macro_rules! thin_vec_with_header_struct_ {
(@maybe_ref (ref) $($t:tt)*) => { &$($t)* };
(@maybe_ref () $($t:tt)*) => { $($t)* };
(
$vis:vis new($new_vis:vis) struct $struct:ident, $header:ident {
$items_vis:vis $items:ident : [$items_ty:ty],
$( $header_var_vis:vis $header_var:ident : $header_var_ty:ty $(; $ref:ident)?, )+
}
) => {
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
struct $header {
$( $header_var : $header_var_ty, )+
}
#[derive(Clone, Eq, PartialEq, Hash)]
$vis struct $struct($crate::thin_vec::ThinVecWithHeader<$header, $items_ty>);
impl $struct {
#[inline]
#[allow(unused)]
$new_vis fn new<I>(
$( $header_var: $header_var_ty, )+
$items: I,
) -> Self
where
I: ::std::iter::IntoIterator,
I::IntoIter: $crate::thin_vec::TrustedLen<Item = $items_ty>,
{
Self($crate::thin_vec::ThinVecWithHeader::from_iter(
$header { $( $header_var, )+ },
$items,
))
}
#[inline]
$items_vis fn $items(&self) -> &[$items_ty] {
self.0.items()
}
$(
#[inline]
$header_var_vis fn $header_var(&self) -> $crate::thin_vec_with_header_struct_!(@maybe_ref ($($ref)?) $header_var_ty) {
$crate::thin_vec_with_header_struct_!(@maybe_ref ($($ref)?) self.0.header().$header_var)
}
)+
}
impl ::std::fmt::Debug for $struct {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
f.debug_struct(stringify!($struct))
$( .field(stringify!($header_var), &self.$header_var()) )*
.field(stringify!($items), &self.$items())
.finish()
}
}
};
}
pub use crate::thin_vec_with_header_struct_ as thin_vec_with_header_struct;

View file

@ -223,7 +223,7 @@ struct TidyDocs {
impl TidyDocs {
fn visit(&mut self, path: &Path, text: &str) {
// Tests and diagnostic fixes don't need module level comments.
if is_exclude_dir(path, &["tests", "test_data", "fixes", "grammar", "ra-salsa"]) {
if is_exclude_dir(path, &["tests", "test_data", "fixes", "grammar", "ra-salsa", "stdx"]) {
return;
}