Implicit format args support

This commit is contained in:
Lukas Wirth 2023-12-05 15:42:39 +01:00
parent 5b8e386bae
commit d2cd30007c
37 changed files with 615 additions and 174 deletions

View file

@ -95,6 +95,8 @@ pub struct BodySourceMap {
field_map_back: FxHashMap<ExprId, FieldSource>, field_map_back: FxHashMap<ExprId, FieldSource>,
pat_field_map_back: FxHashMap<PatId, PatFieldSource>, pat_field_map_back: FxHashMap<PatId, PatFieldSource>,
format_args_template_map: FxHashMap<ExprId, Vec<(syntax::TextRange, Name)>>,
expansions: FxHashMap<InFile<AstPtr<ast::MacroCall>>, HirFileId>, expansions: FxHashMap<InFile<AstPtr<ast::MacroCall>>, HirFileId>,
/// Diagnostics accumulated during body lowering. These contain `AstPtr`s and so are stored in /// Diagnostics accumulated during body lowering. These contain `AstPtr`s and so are stored in
@ -387,6 +389,14 @@ impl BodySourceMap {
self.expr_map.get(&src).copied() self.expr_map.get(&src).copied()
} }
pub fn implicit_format_args(
&self,
node: InFile<&ast::FormatArgsExpr>,
) -> Option<&[(syntax::TextRange, Name)]> {
let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::Expr>);
self.format_args_template_map.get(self.expr_map.get(&src)?).map(std::ops::Deref::deref)
}
/// Get a reference to the body source map's diagnostics. /// Get a reference to the body source map's diagnostics.
pub fn diagnostics(&self) -> &[BodyDiagnostic] { pub fn diagnostics(&self) -> &[BodyDiagnostic] {
&self.diagnostics &self.diagnostics
@ -403,8 +413,10 @@ impl BodySourceMap {
field_map_back, field_map_back,
pat_field_map_back, pat_field_map_back,
expansions, expansions,
format_args_template_map,
diagnostics, diagnostics,
} = self; } = self;
format_args_template_map.shrink_to_fit();
expr_map.shrink_to_fit(); expr_map.shrink_to_fit();
expr_map_back.shrink_to_fit(); expr_map_back.shrink_to_fit();
pat_map.shrink_to_fit(); pat_map.shrink_to_fit();

View file

@ -1597,12 +1597,20 @@ impl ExprCollector<'_> {
}); });
let template = f.template(); let template = f.template();
let fmt_snippet = template.as_ref().map(ToString::to_string); let fmt_snippet = template.as_ref().map(ToString::to_string);
let mut mappings = vec![];
let fmt = match template.and_then(|it| self.expand_macros_to_string(it)) { let fmt = match template.and_then(|it| self.expand_macros_to_string(it)) {
Some((s, is_direct_literal)) => { Some((s, is_direct_literal)) => format_args::parse(
format_args::parse(&s, fmt_snippet, args, is_direct_literal, |name| { &s,
self.alloc_expr_desugared(Expr::Path(Path::from(name))) fmt_snippet,
}) args,
} is_direct_literal,
|name| self.alloc_expr_desugared(Expr::Path(Path::from(name))),
|name, span| {
if let Some(span) = span {
mappings.push((span, name.clone()))
}
},
),
None => FormatArgs { template: Default::default(), arguments: args.finish() }, None => FormatArgs { template: Default::default(), arguments: args.finish() },
}; };
@ -1746,14 +1754,16 @@ impl ExprCollector<'_> {
tail: Some(unsafe_arg_new), tail: Some(unsafe_arg_new),
}); });
self.alloc_expr( let idx = self.alloc_expr(
Expr::Call { Expr::Call {
callee: new_v1_formatted, callee: new_v1_formatted,
args: Box::new([lit_pieces, args, format_options, unsafe_arg_new]), args: Box::new([lit_pieces, args, format_options, unsafe_arg_new]),
is_assignee_expr: false, is_assignee_expr: false,
}, },
syntax_ptr, syntax_ptr,
) );
self.source_map.format_args_template_map.insert(idx, mappings);
idx
} }
/// Generate a hir expression for a format_args placeholder specification. /// Generate a hir expression for a format_args placeholder specification.

View file

@ -160,7 +160,7 @@ fn main() {
let count = 10; let count = 10;
builtin#lang(Arguments::new_v1_formatted)( builtin#lang(Arguments::new_v1_formatted)(
&[ &[
"\"hello ", " ", " friends, we ", " ", "", "\"", "hello ", " ", " friends, we ", " ", "",
], ],
&[ &[
builtin#lang(Argument::new_display)( builtin#lang(Argument::new_display)(
@ -261,7 +261,7 @@ impl SsrError {
_ = $crate::error::SsrError::new( _ = $crate::error::SsrError::new(
builtin#lang(Arguments::new_v1_formatted)( builtin#lang(Arguments::new_v1_formatted)(
&[ &[
"\"Failed to resolve path `", "`\"", "Failed to resolve path `", "`",
], ],
&[ &[
builtin#lang(Argument::new_display)( builtin#lang(Argument::new_display)(
@ -320,7 +320,7 @@ fn f() {
$crate::panicking::panic_fmt( $crate::panicking::panic_fmt(
builtin#lang(Arguments::new_v1_formatted)( builtin#lang(Arguments::new_v1_formatted)(
&[ &[
"\"cc\"", "cc",
], ],
&[], &[],
&[], &[],

View file

@ -5,7 +5,7 @@ use hir_expand::name::Name;
use rustc_dependencies::parse_format as parse; use rustc_dependencies::parse_format as parse;
use syntax::{ use syntax::{
ast::{self, IsString}, ast::{self, IsString},
AstToken, SmolStr, TextRange, SmolStr, TextRange, TextSize,
}; };
use crate::hir::ExprId; use crate::hir::ExprId;
@ -170,15 +170,18 @@ pub(crate) fn parse(
mut args: FormatArgumentsCollector, mut args: FormatArgumentsCollector,
is_direct_literal: bool, is_direct_literal: bool,
mut synth: impl FnMut(Name) -> ExprId, mut synth: impl FnMut(Name) -> ExprId,
mut record_usage: impl FnMut(Name, Option<TextRange>),
) -> FormatArgs { ) -> FormatArgs {
let text = s.text(); let text = s.text_without_quotes();
let str_style = match s.quote_offsets() { let str_style = match s.quote_offsets() {
Some(offsets) => { Some(offsets) => {
let raw = u32::from(offsets.quotes.0.len()) - 1; let raw = u32::from(offsets.quotes.0.len()) - 1;
(raw != 0).then_some(raw as usize) // subtract 1 for the `r` prefix
(raw != 0).then(|| raw as usize - 1)
} }
None => None, None => None,
}; };
let mut parser = let mut parser =
parse::Parser::new(text, str_style, fmt_snippet, false, parse::ParseMode::Format); parse::Parser::new(text, str_style, fmt_snippet, false, parse::ParseMode::Format);
@ -199,6 +202,7 @@ pub(crate) fn parse(
let to_span = |inner_span: parse::InnerSpan| { let to_span = |inner_span: parse::InnerSpan| {
is_source_literal.then(|| { is_source_literal.then(|| {
TextRange::new(inner_span.start.try_into().unwrap(), inner_span.end.try_into().unwrap()) TextRange::new(inner_span.start.try_into().unwrap(), inner_span.end.try_into().unwrap())
- TextSize::from(str_style.map(|it| it + 1).unwrap_or(0) as u32 + 1)
}) })
}; };
@ -230,9 +234,10 @@ pub(crate) fn parse(
Err(index) Err(index)
} }
} }
ArgRef::Name(name, _span) => { ArgRef::Name(name, span) => {
let name = Name::new_text_dont_use(SmolStr::new(name)); let name = Name::new_text_dont_use(SmolStr::new(name));
if let Some((index, _)) = args.by_name(&name) { if let Some((index, _)) = args.by_name(&name) {
record_usage(name, span);
// Name found in `args`, so we resolve it to its index. // Name found in `args`, so we resolve it to its index.
if index < args.explicit_args().len() { if index < args.explicit_args().len() {
// Mark it as used, if it was an explicit argument. // Mark it as used, if it was an explicit argument.
@ -246,6 +251,7 @@ pub(crate) fn parse(
// disabled (see RFC #2795) // disabled (see RFC #2795)
// FIXME: Diagnose // FIXME: Diagnose
} }
record_usage(name.clone(), span);
Ok(args.add(FormatArgument { Ok(args.add(FormatArgument {
kind: FormatArgumentKind::Captured(name.clone()), kind: FormatArgumentKind::Captured(name.clone()),
// FIXME: This is problematic, we might want to synthesize a dummy // FIXME: This is problematic, we might want to synthesize a dummy

View file

@ -29,8 +29,9 @@ use smallvec::{smallvec, SmallVec};
use stdx::TupleExt; use stdx::TupleExt;
use syntax::{ use syntax::{
algo::skip_trivia_token, algo::skip_trivia_token,
ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody}, ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody, IsString as _},
match_ast, AstNode, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize, match_ast, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken,
TextRange, TextSize,
}; };
use crate::{ use crate::{
@ -49,7 +50,7 @@ pub enum DescendPreference {
None, None,
} }
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum PathResolution { pub enum PathResolution {
/// An item /// An item
Def(ModuleDef), Def(ModuleDef),
@ -402,6 +403,41 @@ impl<'db> SemanticsImpl<'db> {
) )
} }
pub fn resolve_offset_in_format_args(
&self,
string: ast::String,
offset: TextSize,
) -> Option<(TextRange, Option<PathResolution>)> {
debug_assert!(offset <= string.syntax().text_range().len());
let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?;
let source_analyzer = &self.analyze_no_infer(format_args.syntax())?;
let format_args = self.wrap_node_infile(format_args);
source_analyzer.resolve_offset_in_format_args(self.db, format_args.as_ref(), offset)
}
pub fn check_for_format_args_template(
&self,
original_token: SyntaxToken,
offset: TextSize,
) -> Option<(TextRange, Option<PathResolution>)> {
if let Some(original_string) = ast::String::cast(original_token.clone()) {
if let Some(quote) = original_string.open_quote_text_range() {
return self
.descend_into_macros(DescendPreference::SameText, original_token.clone())
.into_iter()
.find_map(|token| {
self.resolve_offset_in_format_args(
ast::String::cast(token)?,
offset - quote.end(),
)
})
.map(|(range, res)| (range + quote.end(), res));
}
}
None
}
/// Maps a node down by mapping its first and last token down. /// Maps a node down by mapping its first and last token down.
pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> { pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
// This might not be the correct way to do this, but it works for now // This might not be the correct way to do this, but it works for now
@ -419,8 +455,12 @@ impl<'db> SemanticsImpl<'db> {
if first == last { if first == last {
// node is just the token, so descend the token // node is just the token, so descend the token
self.descend_into_macros_impl(first, 0.into(), &mut |InFile { value, .. }| { self.descend_into_macros_impl(first, &mut |InFile { value, .. }| {
if let Some(node) = value.parent_ancestors().find_map(N::cast) { if let Some(node) = value
.parent_ancestors()
.take_while(|it| it.text_range() == value.text_range())
.find_map(N::cast)
{
res.push(node) res.push(node)
} }
ControlFlow::Continue(()) ControlFlow::Continue(())
@ -428,7 +468,7 @@ impl<'db> SemanticsImpl<'db> {
} else { } else {
// Descend first and last token, then zip them to look for the node they belong to // Descend first and last token, then zip them to look for the node they belong to
let mut scratch: SmallVec<[_; 1]> = smallvec![]; let mut scratch: SmallVec<[_; 1]> = smallvec![];
self.descend_into_macros_impl(first, 0.into(), &mut |token| { self.descend_into_macros_impl(first, &mut |token| {
scratch.push(token); scratch.push(token);
ControlFlow::Continue(()) ControlFlow::Continue(())
}); });
@ -436,7 +476,6 @@ impl<'db> SemanticsImpl<'db> {
let mut scratch = scratch.into_iter(); let mut scratch = scratch.into_iter();
self.descend_into_macros_impl( self.descend_into_macros_impl(
last, last,
0.into(),
&mut |InFile { value: last, file_id: last_fid }| { &mut |InFile { value: last, file_id: last_fid }| {
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() { if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
if first_fid == last_fid { if first_fid == last_fid {
@ -467,7 +506,6 @@ impl<'db> SemanticsImpl<'db> {
&self, &self,
mode: DescendPreference, mode: DescendPreference,
token: SyntaxToken, token: SyntaxToken,
offset: TextSize,
) -> SmallVec<[SyntaxToken; 1]> { ) -> SmallVec<[SyntaxToken; 1]> {
enum Dp<'t> { enum Dp<'t> {
SameText(&'t str), SameText(&'t str),
@ -487,7 +525,7 @@ impl<'db> SemanticsImpl<'db> {
DescendPreference::None => Dp::None, DescendPreference::None => Dp::None,
}; };
let mut res = smallvec![]; let mut res = smallvec![];
self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| { self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
let is_a_match = match mode { let is_a_match = match mode {
Dp::SameText(text) => value.text() == text, Dp::SameText(text) => value.text() == text,
Dp::SameKind(preferred_kind) => { Dp::SameKind(preferred_kind) => {
@ -513,7 +551,6 @@ impl<'db> SemanticsImpl<'db> {
&self, &self,
mode: DescendPreference, mode: DescendPreference,
token: SyntaxToken, token: SyntaxToken,
offset: TextSize,
) -> SyntaxToken { ) -> SyntaxToken {
enum Dp<'t> { enum Dp<'t> {
SameText(&'t str), SameText(&'t str),
@ -533,7 +570,7 @@ impl<'db> SemanticsImpl<'db> {
DescendPreference::None => Dp::None, DescendPreference::None => Dp::None,
}; };
let mut res = token.clone(); let mut res = token.clone();
self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| { self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
let is_a_match = match mode { let is_a_match = match mode {
Dp::SameText(text) => value.text() == text, Dp::SameText(text) => value.text() == text,
Dp::SameKind(preferred_kind) => { Dp::SameKind(preferred_kind) => {
@ -558,9 +595,6 @@ impl<'db> SemanticsImpl<'db> {
fn descend_into_macros_impl( fn descend_into_macros_impl(
&self, &self,
token: SyntaxToken, token: SyntaxToken,
// FIXME: We might want this to be Option<TextSize> to be able to opt out of subrange
// mapping, specifically for node downmapping
_offset: TextSize,
f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>, f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
) { ) {
// FIXME: Clean this up // FIXME: Clean this up
@ -729,7 +763,7 @@ impl<'db> SemanticsImpl<'db> {
offset: TextSize, offset: TextSize,
) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ { ) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
node.token_at_offset(offset) node.token_at_offset(offset)
.map(move |token| self.descend_into_macros(DescendPreference::None, token, offset)) .map(move |token| self.descend_into_macros(DescendPreference::None, token))
.map(|descendants| { .map(|descendants| {
descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it)) descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
}) })

View file

@ -820,6 +820,29 @@ impl SourceAnalyzer {
false false
} }
pub(crate) fn resolve_offset_in_format_args(
&self,
db: &dyn HirDatabase,
format_args: InFile<&ast::FormatArgsExpr>,
offset: TextSize,
) -> Option<(TextRange, Option<PathResolution>)> {
let implicits = self.body_source_map()?.implicit_format_args(format_args)?;
implicits.iter().find(|(range, _)| range.contains_inclusive(offset)).map(|(range, name)| {
(
*range,
resolve_hir_value_path(
db,
&self.resolver,
self.resolver.body_owner(),
&Path::from_known_path_with_no_generic(ModPath::from_segments(
PathKind::Plain,
Some(name.clone()),
)),
),
)
})
}
fn resolve_impl_method_or_trait_def( fn resolve_impl_method_or_trait_def(
&self, &self,
db: &dyn HirDatabase, db: &dyn HirDatabase,
@ -1038,24 +1061,7 @@ fn resolve_hir_path_(
}; };
let body_owner = resolver.body_owner(); let body_owner = resolver.body_owner();
let values = || { let values = || resolve_hir_value_path(db, resolver, body_owner, path);
resolver.resolve_path_in_value_ns_fully(db.upcast(), path).and_then(|val| {
let res = match val {
ValueNs::LocalBinding(binding_id) => {
let var = Local { parent: body_owner?, binding_id };
PathResolution::Local(var)
}
ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()),
ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()),
ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()),
ValueNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
ValueNs::ImplSelf(impl_id) => PathResolution::SelfType(impl_id.into()),
ValueNs::GenericParam(id) => PathResolution::ConstParam(id.into()),
};
Some(res)
})
};
let items = || { let items = || {
resolver resolver
@ -1075,6 +1081,30 @@ fn resolve_hir_path_(
.or_else(macros) .or_else(macros)
} }
fn resolve_hir_value_path(
db: &dyn HirDatabase,
resolver: &Resolver,
body_owner: Option<DefWithBodyId>,
path: &Path,
) -> Option<PathResolution> {
resolver.resolve_path_in_value_ns_fully(db.upcast(), path).and_then(|val| {
let res = match val {
ValueNs::LocalBinding(binding_id) => {
let var = Local { parent: body_owner?, binding_id };
PathResolution::Local(var)
}
ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()),
ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()),
ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()),
ValueNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
ValueNs::ImplSelf(impl_id) => PathResolution::SelfType(impl_id.into()),
ValueNs::GenericParam(id) => PathResolution::ConstParam(id.into()),
};
Some(res)
})
}
/// Resolves a path where we know it is a qualifier of another path. /// Resolves a path where we know it is a qualifier of another path.
/// ///
/// For example, if we have: /// For example, if we have:

View file

@ -328,6 +328,7 @@ fn augment_references_with_imports(
references references
.iter() .iter()
.filter_map(|FileReference { range, name, .. }| { .filter_map(|FileReference { range, name, .. }| {
let name = name.clone().into_name_like()?;
ctx.sema.scope(name.syntax()).map(|scope| (*range, name, scope.module())) ctx.sema.scope(name.syntax()).map(|scope| (*range, name, scope.module()))
}) })
.map(|(range, name, ref_module)| { .map(|(range, name, ref_module)| {
@ -455,6 +456,7 @@ fn add_enum_def(
.iter() .iter()
.flat_map(|(_, refs)| refs) .flat_map(|(_, refs)| refs)
.filter_map(|FileReference { name, .. }| { .filter_map(|FileReference { name, .. }| {
let name = name.clone().into_name_like()?;
ctx.sema.scope(name.syntax()).map(|scope| scope.module()) ctx.sema.scope(name.syntax()).map(|scope| scope.module())
}) })
.any(|module| module.nearest_non_block_module(ctx.db()) != *target_module); .any(|module| module.nearest_non_block_module(ctx.db()) != *target_module);

View file

@ -186,6 +186,7 @@ fn augment_references_with_imports(
references references
.iter() .iter()
.filter_map(|FileReference { name, .. }| { .filter_map(|FileReference { name, .. }| {
let name = name.clone().into_name_like()?;
ctx.sema.scope(name.syntax()).map(|scope| (name, scope.module())) ctx.sema.scope(name.syntax()).map(|scope| (name, scope.module()))
}) })
.map(|(name, ref_module)| { .map(|(name, ref_module)| {
@ -238,6 +239,7 @@ fn add_tuple_struct_def(
.iter() .iter()
.flat_map(|(_, refs)| refs) .flat_map(|(_, refs)| refs)
.filter_map(|FileReference { name, .. }| { .filter_map(|FileReference { name, .. }| {
let name = name.clone().into_name_like()?;
ctx.sema.scope(name.syntax()).map(|scope| scope.module()) ctx.sema.scope(name.syntax()).map(|scope| scope.module())
}) })
.any(|module| module.nearest_non_block_module(ctx.db()) != *target_module); .any(|module| module.nearest_non_block_module(ctx.db()) != *target_module);

View file

@ -35,11 +35,10 @@ pub(crate) fn extract_expressions_from_format_string(
let fmt_string = ctx.find_token_at_offset::<ast::String>()?; let fmt_string = ctx.find_token_at_offset::<ast::String>()?;
let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?; let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?;
let expanded_t = ast::String::cast(ctx.sema.descend_into_macros_single( let expanded_t = ast::String::cast(
DescendPreference::SameKind, ctx.sema
fmt_string.syntax().clone(), .descend_into_macros_single(DescendPreference::SameKind, fmt_string.syntax().clone()),
0.into(), )?;
))?;
if !is_format_string(&expanded_t) { if !is_format_string(&expanded_t) {
return None; return None;
} }

View file

@ -751,9 +751,7 @@ impl FunctionBody {
.descendants_with_tokens() .descendants_with_tokens()
.filter_map(SyntaxElement::into_token) .filter_map(SyntaxElement::into_token)
.filter(|it| matches!(it.kind(), SyntaxKind::IDENT | T![self])) .filter(|it| matches!(it.kind(), SyntaxKind::IDENT | T![self]))
.flat_map(|t| { .flat_map(|t| sema.descend_into_macros(DescendPreference::None, t))
sema.descend_into_macros(DescendPreference::None, t, 0.into())
})
.for_each(|t| add_name_if_local(t.parent().and_then(ast::NameRef::cast))); .for_each(|t| add_name_if_local(t.parent().and_then(ast::NameRef::cast)));
} }
} }

View file

@ -8,7 +8,7 @@ use ide_db::{
defs::Definition, defs::Definition,
imports::insert_use::remove_path_if_in_use_stmt, imports::insert_use::remove_path_if_in_use_stmt,
path_transform::PathTransform, path_transform::PathTransform,
search::{FileReference, SearchScope}, search::{FileReference, FileReferenceNode, SearchScope},
source_change::SourceChangeBuilder, source_change::SourceChangeBuilder,
syntax_helpers::{insert_whitespace_into_node::insert_ws_into, node_ext::expr_as_name_ref}, syntax_helpers::{insert_whitespace_into_node::insert_ws_into, node_ext::expr_as_name_ref},
RootDatabase, RootDatabase,
@ -148,7 +148,7 @@ pub(super) fn split_refs_and_uses<T: ast::AstNode>(
) -> (Vec<T>, Vec<ast::Path>) { ) -> (Vec<T>, Vec<ast::Path>) {
iter.into_iter() iter.into_iter()
.filter_map(|file_ref| match file_ref.name { .filter_map(|file_ref| match file_ref.name {
ast::NameLike::NameRef(name_ref) => Some(name_ref), FileReferenceNode::NameRef(name_ref) => Some(name_ref),
_ => None, _ => None,
}) })
.filter_map(|name_ref| match name_ref.syntax().ancestors().find_map(ast::UseTree::cast) { .filter_map(|name_ref| match name_ref.syntax().ancestors().find_map(ast::UseTree::cast) {
@ -346,7 +346,7 @@ fn inline(
match param.as_local(sema.db) { match param.as_local(sema.db) {
Some(l) => usages_for_locals(l) Some(l) => usages_for_locals(l)
.map(|FileReference { name, range, .. }| match name { .map(|FileReference { name, range, .. }| match name {
ast::NameLike::NameRef(_) => body FileReferenceNode::NameRef(_) => body
.syntax() .syntax()
.covering_element(range) .covering_element(range)
.ancestors() .ancestors()
@ -372,7 +372,7 @@ fn inline(
if let Some(self_local) = params[0].2.as_local(sema.db) { if let Some(self_local) = params[0].2.as_local(sema.db) {
usages_for_locals(self_local) usages_for_locals(self_local)
.filter_map(|FileReference { name, range, .. }| match name { .filter_map(|FileReference { name, range, .. }| match name {
ast::NameLike::NameRef(_) => Some(body.syntax().covering_element(range)), FileReferenceNode::NameRef(_) => Some(body.syntax().covering_element(range)),
_ => None, _ => None,
}) })
.for_each(|usage| { .for_each(|usage| {

View file

@ -2,7 +2,7 @@ use hir::{PathResolution, Semantics};
use ide_db::{ use ide_db::{
base_db::FileId, base_db::FileId,
defs::Definition, defs::Definition,
search::{FileReference, UsageSearchResult}, search::{FileReference, FileReferenceNode, UsageSearchResult},
RootDatabase, RootDatabase,
}; };
use syntax::{ use syntax::{
@ -63,7 +63,7 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext<'_>)
let wrap_in_parens = references let wrap_in_parens = references
.into_iter() .into_iter()
.filter_map(|FileReference { range, name, .. }| match name { .filter_map(|FileReference { range, name, .. }| match name {
ast::NameLike::NameRef(name) => Some((range, name)), FileReferenceNode::NameRef(name) => Some((range, name)),
_ => None, _ => None,
}) })
.map(|(range, name_ref)| { .map(|(range, name_ref)| {

View file

@ -59,7 +59,10 @@ pub(crate) fn replace_named_generic_with_impl(
let mut path_types_to_replace = Vec::new(); let mut path_types_to_replace = Vec::new();
for (_a, refs) in usage_refs.iter() { for (_a, refs) in usage_refs.iter() {
for usage_ref in refs { for usage_ref in refs {
let param_node = find_path_type(&ctx.sema, &type_param_name, &usage_ref.name)?; let Some(name_like) = usage_ref.name.clone().into_name_like() else {
continue;
};
let param_node = find_path_type(&ctx.sema, &type_param_name, &name_like)?;
path_types_to_replace.push(param_node); path_types_to_replace.push(param_node);
} }
} }

View file

@ -2,11 +2,11 @@ use ide_db::{
assists::{AssistId, AssistKind}, assists::{AssistId, AssistKind},
base_db::FileId, base_db::FileId,
defs::Definition, defs::Definition,
search::FileReference, search::{FileReference, FileReferenceNode},
syntax_helpers::node_ext::full_path_of_name_ref, syntax_helpers::node_ext::full_path_of_name_ref,
}; };
use syntax::{ use syntax::{
ast::{self, NameLike, NameRef}, ast::{self, NameRef},
AstNode, SyntaxKind, TextRange, AstNode, SyntaxKind, TextRange,
}; };
@ -76,7 +76,7 @@ pub(crate) fn unnecessary_async(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
for await_expr in find_all_references(ctx, &Definition::Function(fn_def)) for await_expr in find_all_references(ctx, &Definition::Function(fn_def))
// Keep only references that correspond NameRefs. // Keep only references that correspond NameRefs.
.filter_map(|(_, reference)| match reference.name { .filter_map(|(_, reference)| match reference.name {
NameLike::NameRef(nameref) => Some(nameref), FileReferenceNode::NameRef(nameref) => Some(nameref),
_ => None, _ => None,
}) })
// Keep only references that correspond to await expressions // Keep only references that correspond to await expressions

View file

@ -117,7 +117,7 @@ pub fn get_definition(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken, token: SyntaxToken,
) -> Option<Definition> { ) -> Option<Definition> {
for token in sema.descend_into_macros(DescendPreference::None, token, 0.into()) { for token in sema.descend_into_macros(DescendPreference::None, token) {
let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops); let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops);
if let Some(&[x]) = def.as_deref() { if let Some(&[x]) = def.as_deref() {
return Some(x); return Some(x);

View file

@ -34,7 +34,7 @@ use text_edit::{TextEdit, TextEditBuilder};
use crate::{ use crate::{
defs::Definition, defs::Definition,
search::FileReference, search::{FileReference, FileReferenceNode},
source_change::{FileSystemEdit, SourceChange}, source_change::{FileSystemEdit, SourceChange},
syntax_helpers::node_ext::expr_as_name_ref, syntax_helpers::node_ext::expr_as_name_ref,
traits::convert_to_def_in_trait, traits::convert_to_def_in_trait,
@ -361,7 +361,7 @@ pub fn source_edit_from_references(
// macros can cause multiple refs to occur for the same text range, so keep track of what we have edited so far // macros can cause multiple refs to occur for the same text range, so keep track of what we have edited so far
let mut edited_ranges = Vec::new(); let mut edited_ranges = Vec::new();
for &FileReference { range, ref name, .. } in references { for &FileReference { range, ref name, .. } in references {
let name_range = name.syntax().text_range(); let name_range = name.text_range();
if name_range.len() != range.len() { if name_range.len() != range.len() {
// This usage comes from a different token kind that was downmapped to a NameLike in a macro // This usage comes from a different token kind that was downmapped to a NameLike in a macro
// Renaming this will most likely break things syntax-wise // Renaming this will most likely break things syntax-wise
@ -371,17 +371,17 @@ pub fn source_edit_from_references(
// if the ranges differ then the node is inside a macro call, we can't really attempt // if the ranges differ then the node is inside a macro call, we can't really attempt
// to make special rewrites like shorthand syntax and such, so just rename the node in // to make special rewrites like shorthand syntax and such, so just rename the node in
// the macro input // the macro input
ast::NameLike::NameRef(name_ref) if name_range == range => { FileReferenceNode::NameRef(name_ref) if name_range == range => {
source_edit_from_name_ref(&mut edit, name_ref, new_name, def) source_edit_from_name_ref(&mut edit, name_ref, new_name, def)
} }
ast::NameLike::Name(name) if name_range == range => { FileReferenceNode::Name(name) if name_range == range => {
source_edit_from_name(&mut edit, name, new_name) source_edit_from_name(&mut edit, name, new_name)
} }
_ => false, _ => false,
}; };
if !has_emitted_edit && !edited_ranges.contains(&range.start()) { if !has_emitted_edit && !edited_ranges.contains(&range.start()) {
let (range, new_name) = match name { let (range, new_name) = match name {
ast::NameLike::Lifetime(_) => ( FileReferenceNode::Lifetime(_) => (
TextRange::new(range.start() + syntax::TextSize::from(1), range.end()), TextRange::new(range.start() + syntax::TextSize::from(1), range.end()),
new_name.strip_prefix('\'').unwrap_or(new_name).to_owned(), new_name.strip_prefix('\'').unwrap_or(new_name).to_owned(),
), ),

View file

@ -9,13 +9,13 @@ use std::mem;
use base_db::{salsa::Database, FileId, FileRange, SourceDatabase, SourceDatabaseExt}; use base_db::{salsa::Database, FileId, FileRange, SourceDatabase, SourceDatabaseExt};
use hir::{ use hir::{
AsAssocItem, DefWithBody, DescendPreference, HasAttrs, HasSource, HirFileIdExt, InFile, AsAssocItem, DefWithBody, DescendPreference, HasAttrs, HasSource, HirFileIdExt, InFile,
InRealFile, ModuleSource, Semantics, Visibility, InRealFile, ModuleSource, PathResolution, Semantics, Visibility,
}; };
use memchr::memmem::Finder; use memchr::memmem::Finder;
use nohash_hasher::IntMap; use nohash_hasher::IntMap;
use once_cell::unsync::Lazy; use once_cell::unsync::Lazy;
use parser::SyntaxKind; use parser::SyntaxKind;
use syntax::{ast, match_ast, AstNode, TextRange, TextSize}; use syntax::{ast, match_ast, AstNode, AstToken, SyntaxElement, TextRange, TextSize};
use triomphe::Arc; use triomphe::Arc;
use crate::{ use crate::{
@ -63,10 +63,67 @@ pub struct FileReference {
/// The range of the reference in the original file /// The range of the reference in the original file
pub range: TextRange, pub range: TextRange,
/// The node of the reference in the (macro-)file /// The node of the reference in the (macro-)file
pub name: ast::NameLike, pub name: FileReferenceNode,
pub category: Option<ReferenceCategory>, pub category: Option<ReferenceCategory>,
} }
#[derive(Debug, Clone)]
pub enum FileReferenceNode {
Name(ast::Name),
NameRef(ast::NameRef),
Lifetime(ast::Lifetime),
FormatStringEntry(ast::String, TextRange),
}
impl FileReferenceNode {
pub fn text_range(&self) -> TextRange {
match self {
FileReferenceNode::Name(it) => it.syntax().text_range(),
FileReferenceNode::NameRef(it) => it.syntax().text_range(),
FileReferenceNode::Lifetime(it) => it.syntax().text_range(),
FileReferenceNode::FormatStringEntry(_, range) => *range,
}
}
pub fn syntax(&self) -> SyntaxElement {
match self {
FileReferenceNode::Name(it) => it.syntax().clone().into(),
FileReferenceNode::NameRef(it) => it.syntax().clone().into(),
FileReferenceNode::Lifetime(it) => it.syntax().clone().into(),
FileReferenceNode::FormatStringEntry(it, _) => it.syntax().clone().into(),
}
}
pub fn into_name_like(self) -> Option<ast::NameLike> {
match self {
FileReferenceNode::Name(it) => Some(ast::NameLike::Name(it)),
FileReferenceNode::NameRef(it) => Some(ast::NameLike::NameRef(it)),
FileReferenceNode::Lifetime(it) => Some(ast::NameLike::Lifetime(it)),
FileReferenceNode::FormatStringEntry(_, _) => None,
}
}
pub fn as_name_ref(&self) -> Option<&ast::NameRef> {
match self {
FileReferenceNode::NameRef(name_ref) => Some(name_ref),
_ => None,
}
}
pub fn as_lifetime(&self) -> Option<&ast::Lifetime> {
match self {
FileReferenceNode::Lifetime(lifetime) => Some(lifetime),
_ => None,
}
}
pub fn text(&self) -> syntax::TokenText<'_> {
match self {
FileReferenceNode::NameRef(name_ref) => name_ref.text(),
FileReferenceNode::Name(name) => name.text(),
FileReferenceNode::Lifetime(lifetime) => lifetime.text(),
FileReferenceNode::FormatStringEntry(it, range) => {
syntax::TokenText::borrowed(&it.text()[*range - it.syntax().text_range().start()])
}
}
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum ReferenceCategory { pub enum ReferenceCategory {
// FIXME: Add this variant and delete the `retain_adt_literal_usages` function. // FIXME: Add this variant and delete the `retain_adt_literal_usages` function.
@ -467,7 +524,7 @@ impl<'a> FindUsages<'a> {
// every textual hit. That function is notoriously // every textual hit. That function is notoriously
// expensive even for things that do not get down mapped // expensive even for things that do not get down mapped
// into macros. // into macros.
sema.descend_into_macros(DescendPreference::None, token, offset) sema.descend_into_macros(DescendPreference::None, token)
.into_iter() .into_iter()
.filter_map(|it| it.parent()) .filter_map(|it| it.parent())
}) })
@ -479,6 +536,17 @@ impl<'a> FindUsages<'a> {
// Search for occurrences of the items name // Search for occurrences of the items name
for offset in match_indices(&text, finder, search_range) { for offset in match_indices(&text, finder, search_range) {
tree.token_at_offset(offset).into_iter().for_each(|token| {
let Some(str_token) = ast::String::cast(token.clone()) else { return };
if let Some((range, nameres)) =
sema.check_for_format_args_template(token.clone(), offset)
{
if self.found_format_args_ref(file_id, range, str_token, nameres, sink) {
return;
}
}
});
for name in find_nodes(name, &tree, offset).filter_map(ast::NameLike::cast) { for name in find_nodes(name, &tree, offset).filter_map(ast::NameLike::cast) {
if match name { if match name {
ast::NameLike::NameRef(name_ref) => self.found_name_ref(&name_ref, sink), ast::NameLike::NameRef(name_ref) => self.found_name_ref(&name_ref, sink),
@ -593,7 +661,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
let reference = FileReference { let reference = FileReference {
range, range,
name: ast::NameLike::NameRef(name_ref.clone()), name: FileReferenceNode::NameRef(name_ref.clone()),
category: None, category: None,
}; };
sink(file_id, reference) sink(file_id, reference)
@ -612,7 +680,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
let reference = FileReference { let reference = FileReference {
range, range,
name: ast::NameLike::NameRef(name_ref.clone()), name: FileReferenceNode::NameRef(name_ref.clone()),
category: is_name_ref_in_import(name_ref).then_some(ReferenceCategory::Import), category: is_name_ref_in_import(name_ref).then_some(ReferenceCategory::Import),
}; };
sink(file_id, reference) sink(file_id, reference)
@ -621,6 +689,27 @@ impl<'a> FindUsages<'a> {
} }
} }
fn found_format_args_ref(
&self,
file_id: FileId,
range: TextRange,
token: ast::String,
res: Option<PathResolution>,
sink: &mut dyn FnMut(FileId, FileReference) -> bool,
) -> bool {
match res.map(Definition::from) {
Some(def) if def == self.def => {
let reference = FileReference {
range,
name: FileReferenceNode::FormatStringEntry(token, range),
category: Some(ReferenceCategory::Read),
};
sink(file_id, reference)
}
_ => false,
}
}
fn found_lifetime( fn found_lifetime(
&self, &self,
lifetime: &ast::Lifetime, lifetime: &ast::Lifetime,
@ -631,7 +720,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(lifetime.syntax()); let FileRange { file_id, range } = self.sema.original_range(lifetime.syntax());
let reference = FileReference { let reference = FileReference {
range, range,
name: ast::NameLike::Lifetime(lifetime.clone()), name: FileReferenceNode::Lifetime(lifetime.clone()),
category: None, category: None,
}; };
sink(file_id, reference) sink(file_id, reference)
@ -655,7 +744,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
let reference = FileReference { let reference = FileReference {
range, range,
name: ast::NameLike::NameRef(name_ref.clone()), name: FileReferenceNode::NameRef(name_ref.clone()),
category: ReferenceCategory::new(&def, name_ref), category: ReferenceCategory::new(&def, name_ref),
}; };
sink(file_id, reference) sink(file_id, reference)
@ -671,7 +760,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
let reference = FileReference { let reference = FileReference {
range, range,
name: ast::NameLike::NameRef(name_ref.clone()), name: FileReferenceNode::NameRef(name_ref.clone()),
category: ReferenceCategory::new(&def, name_ref), category: ReferenceCategory::new(&def, name_ref),
}; };
sink(file_id, reference) sink(file_id, reference)
@ -681,7 +770,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
let reference = FileReference { let reference = FileReference {
range, range,
name: ast::NameLike::NameRef(name_ref.clone()), name: FileReferenceNode::NameRef(name_ref.clone()),
category: ReferenceCategory::new(&def, name_ref), category: ReferenceCategory::new(&def, name_ref),
}; };
sink(file_id, reference) sink(file_id, reference)
@ -705,7 +794,7 @@ impl<'a> FindUsages<'a> {
}; };
let reference = FileReference { let reference = FileReference {
range, range,
name: ast::NameLike::NameRef(name_ref.clone()), name: FileReferenceNode::NameRef(name_ref.clone()),
category: access, category: access,
}; };
sink(file_id, reference) sink(file_id, reference)
@ -728,7 +817,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name.syntax()); let FileRange { file_id, range } = self.sema.original_range(name.syntax());
let reference = FileReference { let reference = FileReference {
range, range,
name: ast::NameLike::Name(name.clone()), name: FileReferenceNode::Name(name.clone()),
// FIXME: mutable patterns should have `Write` access // FIXME: mutable patterns should have `Write` access
category: Some(ReferenceCategory::Read), category: Some(ReferenceCategory::Read),
}; };
@ -738,7 +827,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name.syntax()); let FileRange { file_id, range } = self.sema.original_range(name.syntax());
let reference = FileReference { let reference = FileReference {
range, range,
name: ast::NameLike::Name(name.clone()), name: FileReferenceNode::Name(name.clone()),
category: None, category: None,
}; };
sink(file_id, reference) sink(file_id, reference)
@ -763,7 +852,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name.syntax()); let FileRange { file_id, range } = self.sema.original_range(name.syntax());
let reference = FileReference { let reference = FileReference {
range, range,
name: ast::NameLike::Name(name.clone()), name: FileReferenceNode::Name(name.clone()),
category: None, category: None,
}; };
sink(file_id, reference) sink(file_id, reference)

View file

@ -87,7 +87,7 @@ pub(crate) fn outgoing_calls(
})?; })?;
let mut calls = CallLocations::default(); let mut calls = CallLocations::default();
sema.descend_into_macros(DescendPreference::None, token, offset) sema.descend_into_macros(DescendPreference::None, token)
.into_iter() .into_iter()
.filter_map(|it| it.parent_ancestors().nth(1).and_then(ast::Item::cast)) .filter_map(|it| it.parent_ancestors().nth(1).and_then(ast::Item::cast))
.filter_map(|item| match item { .filter_map(|item| match item {

View file

@ -146,7 +146,7 @@ pub(crate) fn external_docs(
kind if kind.is_trivia() => 0, kind if kind.is_trivia() => 0,
_ => 1, _ => 1,
})?; })?;
let token = sema.descend_into_macros_single(DescendPreference::None, token, offset); let token = sema.descend_into_macros_single(DescendPreference::None, token);
let node = token.parent()?; let node = token.parent()?;
let definition = match_ast! { let definition = match_ast! {
@ -288,7 +288,7 @@ impl DocCommentToken {
let original_start = doc_token.text_range().start(); let original_start = doc_token.text_range().start();
let relative_comment_offset = offset - original_start - prefix_len; let relative_comment_offset = offset - original_start - prefix_len;
sema.descend_into_macros(DescendPreference::None,doc_token, offset).into_iter().find_map(|t| { sema.descend_into_macros(DescendPreference::None, doc_token).into_iter().find_map(|t| {
let (node, descended_prefix_len) = match_ast! { let (node, descended_prefix_len) = match_ast! {
match t { match t {
ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?), ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?),

View file

@ -41,7 +41,7 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
// ``` // ```
let derive = sema let derive = sema
.descend_into_macros(DescendPreference::None, tok.clone(), 0.into()) .descend_into_macros(DescendPreference::None, tok.clone())
.into_iter() .into_iter()
.find_map(|descended| { .find_map(|descended| {
let hir_file = sema.hir_file_for(&descended.parent()?); let hir_file = sema.hir_file_for(&descended.parent()?);

View file

@ -140,16 +140,10 @@ fn extend_tokens_from_range(
// compute original mapped token range // compute original mapped token range
let extended = { let extended = {
let fst_expanded = sema.descend_into_macros_single( let fst_expanded =
DescendPreference::None, sema.descend_into_macros_single(DescendPreference::None, first_token.clone());
first_token.clone(), let lst_expanded =
original_range.start(), sema.descend_into_macros_single(DescendPreference::None, last_token.clone());
);
let lst_expanded = sema.descend_into_macros_single(
DescendPreference::None,
last_token.clone(),
original_range.end(),
);
let mut lca = let mut lca =
algo::least_common_ancestor(&fst_expanded.parent()?, &lst_expanded.parent()?)?; algo::least_common_ancestor(&fst_expanded.parent()?, &lst_expanded.parent()?)?;
lca = shallowest_node(&lca); lca = shallowest_node(&lca);
@ -160,11 +154,10 @@ fn extend_tokens_from_range(
}; };
// Compute parent node range // Compute parent node range
let validate = |offset: TextSize| { let validate = || {
let extended = &extended; let extended = &extended;
move |token: &SyntaxToken| -> bool { move |token: &SyntaxToken| -> bool {
let expanded = let expanded = sema.descend_into_macros_single(DescendPreference::None, token.clone());
sema.descend_into_macros_single(DescendPreference::None, token.clone(), offset);
let parent = match expanded.parent() { let parent = match expanded.parent() {
Some(it) => it, Some(it) => it,
None => return false, None => return false,
@ -178,14 +171,14 @@ fn extend_tokens_from_range(
let token = token.prev_token()?; let token = token.prev_token()?;
skip_trivia_token(token, Direction::Prev) skip_trivia_token(token, Direction::Prev)
}) })
.take_while(validate(original_range.start())) .take_while(validate())
.last()?; .last()?;
let last = successors(Some(last_token), |token| { let last = successors(Some(last_token), |token| {
let token = token.next_token()?; let token = token.next_token()?;
skip_trivia_token(token, Direction::Next) skip_trivia_token(token, Direction::Next)
}) })
.take_while(validate(original_range.end())) .take_while(validate())
.last()?; .last()?;
let range = first.text_range().cover(last.text_range()); let range = first.text_range().cover(last.text_range());

View file

@ -29,7 +29,7 @@ pub(crate) fn goto_declaration(
.find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?; .find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?;
let range = original_token.text_range(); let range = original_token.text_range();
let info: Vec<NavigationTarget> = sema let info: Vec<NavigationTarget> = sema
.descend_into_macros(DescendPreference::None, original_token, offset) .descend_into_macros(DescendPreference::None, original_token)
.iter() .iter()
.filter_map(|token| { .filter_map(|token| {
let parent = token.parent()?; let parent = token.parent()?;

View file

@ -55,8 +55,21 @@ pub(crate) fn goto_definition(
Some(RangeInfo::new(link_range, vec![nav])) Some(RangeInfo::new(link_range, vec![nav]))
}); });
} }
if let Some((range, resolution)) =
sema.check_for_format_args_template(original_token.clone(), offset)
{
return Some(RangeInfo::new(
range,
match resolution {
Some(res) => def_to_nav(db, Definition::from(res)),
None => vec![],
},
));
}
let navs = sema let navs = sema
.descend_into_macros(DescendPreference::None, original_token.clone(), offset) .descend_into_macros(DescendPreference::None, original_token.clone())
.into_iter() .into_iter()
.filter_map(|token| { .filter_map(|token| {
let parent = token.parent()?; let parent = token.parent()?;
@ -809,18 +822,13 @@ mod confuse_index { fn foo(); }
fn goto_through_format() { fn goto_through_format() {
check( check(
r#" r#"
//- minicore: fmt
#[macro_export] #[macro_export]
macro_rules! format { macro_rules! format {
($($arg:tt)*) => ($crate::fmt::format($crate::__export::format_args!($($arg)*))) ($($arg:tt)*) => ($crate::fmt::format($crate::__export::format_args!($($arg)*)))
} }
#[rustc_builtin_macro]
#[macro_export]
macro_rules! format_args {
($fmt:expr) => ({ /* compiler built-in */ });
($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
}
pub mod __export { pub mod __export {
pub use crate::format_args; pub use core::format_args;
fn foo() {} // for index confusion fn foo() {} // for index confusion
} }
fn foo() -> i8 {} fn foo() -> i8 {}
@ -2056,6 +2064,20 @@ fn f2() {
struct S2; struct S2;
S1::e$0(); S1::e$0();
} }
"#,
);
}
#[test]
fn implicit_format_args() {
check(
r#"
//- minicore: fmt
fn test() {
let a = "world";
// ^
format_args!("hello {a$0}");
}
"#, "#,
); );
} }

View file

@ -34,7 +34,7 @@ pub(crate) fn goto_implementation(
})?; })?;
let range = original_token.text_range(); let range = original_token.text_range();
let navs = let navs =
sema.descend_into_macros(DescendPreference::None, original_token, offset) sema.descend_into_macros(DescendPreference::None, original_token)
.into_iter() .into_iter()
.filter_map(|token| token.parent().and_then(ast::NameLike::cast)) .filter_map(|token| token.parent().and_then(ast::NameLike::cast))
.filter_map(|node| match &node { .filter_map(|node| match &node {

View file

@ -1,4 +1,4 @@
use hir::DescendPreference; use hir::{DescendPreference, GenericParam};
use ide_db::{base_db::Upcast, defs::Definition, helpers::pick_best_token, RootDatabase}; use ide_db::{base_db::Upcast, defs::Definition, helpers::pick_best_token, RootDatabase};
use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, T}; use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, T};
@ -37,8 +37,37 @@ pub(crate) fn goto_type_definition(
} }
} }
}; };
let mut process_ty = |ty: hir::Type| {
// collect from each `ty` into the `res` result vec
let ty = ty.strip_references();
ty.walk(db, |t| {
if let Some(adt) = t.as_adt() {
push(adt.into());
} else if let Some(trait_) = t.as_dyn_trait() {
push(trait_.into());
} else if let Some(traits) = t.as_impl_traits(db) {
traits.for_each(|it| push(it.into()));
} else if let Some(trait_) = t.as_associated_type_parent_trait(db) {
push(trait_.into());
}
});
};
if let Some((range, resolution)) = sema.check_for_format_args_template(token.clone(), offset) {
if let Some(ty) = resolution.and_then(|res| match Definition::from(res) {
Definition::Const(it) => Some(it.ty(db)),
Definition::Static(it) => Some(it.ty(db)),
Definition::GenericParam(GenericParam::ConstParam(it)) => Some(it.ty(db)),
Definition::Local(it) => Some(it.ty(db)),
Definition::Adt(hir::Adt::Struct(it)) => Some(it.ty(db)),
_ => None,
}) {
process_ty(ty);
}
return Some(RangeInfo::new(range, res));
}
let range = token.text_range(); let range = token.text_range();
sema.descend_into_macros(DescendPreference::None,token, offset) sema.descend_into_macros(DescendPreference::None, token)
.into_iter() .into_iter()
.filter_map(|token| { .filter_map(|token| {
let ty = sema let ty = sema
@ -76,21 +105,7 @@ pub(crate) fn goto_type_definition(
}); });
ty ty
}) })
.for_each(|ty| { .for_each(process_ty);
// collect from each `ty` into the `res` result vec
let ty = ty.strip_references();
ty.walk(db, |t| {
if let Some(adt) = t.as_adt() {
push(adt.into());
} else if let Some(trait_) = t.as_dyn_trait() {
push(trait_.into());
} else if let Some(traits) = t.as_impl_traits(db) {
traits.for_each(|it| push(it.into()));
} else if let Some(trait_) = t.as_associated_type_parent_trait(db) {
push(trait_.into());
}
});
});
Some(RangeInfo::new(range, res)) Some(RangeInfo::new(range, res))
} }
@ -326,6 +341,42 @@ struct Baz<T>(T);
//^^^ //^^^
fn foo(x$0: Bar<Baz<Foo>, Baz<usize>) {} fn foo(x$0: Bar<Baz<Foo>, Baz<usize>) {}
"#,
);
}
#[test]
fn implicit_format_args() {
check(
r#"
//- minicore: fmt
struct Bar;
// ^^^
fn test() {
let a = Bar;
format_args!("hello {a$0}");
}
"#,
);
check(
r#"
//- minicore: fmt
struct Bar;
// ^^^
fn test() {
format_args!("hello {Bar$0}");
}
"#,
);
check(
r#"
//- minicore: fmt
struct Bar;
// ^^^
const BAR: Bar = Bar;
fn test() {
format_args!("hello {BAR$0}");
}
"#, "#,
); );
} }

View file

@ -1,3 +1,5 @@
use std::iter;
use hir::{DescendPreference, Semantics}; use hir::{DescendPreference, Semantics};
use ide_db::{ use ide_db::{
base_db::{FileId, FilePosition, FileRange}, base_db::{FileId, FilePosition, FileRange},
@ -15,7 +17,6 @@ use syntax::{
SyntaxKind::{self, IDENT, INT_NUMBER}, SyntaxKind::{self, IDENT, INT_NUMBER},
SyntaxNode, SyntaxToken, TextRange, T, SyntaxNode, SyntaxToken, TextRange, T,
}; };
use text_edit::TextSize;
use crate::{navigation_target::ToNav, references, NavigationTarget, TryToNav}; use crate::{navigation_target::ToNav, references, NavigationTarget, TryToNav};
@ -132,7 +133,16 @@ fn highlight_references(
token: SyntaxToken, token: SyntaxToken,
FilePosition { file_id, offset }: FilePosition, FilePosition { file_id, offset }: FilePosition,
) -> Option<Vec<HighlightedRange>> { ) -> Option<Vec<HighlightedRange>> {
let defs = find_defs(sema, token.clone(), offset); let defs = if let Some((range, resolution)) =
sema.check_for_format_args_template(token.clone(), offset)
{
match resolution.map(Definition::from) {
Some(def) => iter::once(def).collect(),
None => return Some(vec![HighlightedRange { range, category: None }]),
}
} else {
find_defs(sema, token.clone())
};
let usages = defs let usages = defs
.iter() .iter()
.filter_map(|&d| { .filter_map(|&d| {
@ -456,12 +466,8 @@ fn cover_range(r0: Option<TextRange>, r1: Option<TextRange>) -> Option<TextRange
} }
} }
fn find_defs( fn find_defs(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> FxHashSet<Definition> {
sema: &Semantics<'_, RootDatabase>, sema.descend_into_macros(DescendPreference::None, token)
token: SyntaxToken,
offset: TextSize,
) -> FxHashSet<Definition> {
sema.descend_into_macros(DescendPreference::None, token, offset)
.into_iter() .into_iter()
.filter_map(|token| IdentClass::classify_token(sema, &token)) .filter_map(|token| IdentClass::classify_token(sema, &token))
.map(IdentClass::definitions_no_ops) .map(IdentClass::definitions_no_ops)
@ -1620,6 +1626,23 @@ fn f2<T: Foo>(t: T) {
T::C; T::C;
T::f(); T::f();
} }
"#,
);
}
#[test]
fn implicit_format_args() {
check(
r#"
//- minicore: fmt
fn test() {
let a = "foo";
// ^
format_args!("hello {a} {a$0} {}", a);
// ^read
// ^read
// ^read
}
"#, "#,
); );
} }

View file

@ -150,6 +150,19 @@ fn hover_simple(
}); });
} }
if let Some((range, resolution)) =
sema.check_for_format_args_template(original_token.clone(), offset)
{
let res = hover_for_definition(
sema,
file_id,
Definition::from(resolution?),
&original_token.parent()?,
config,
)?;
return Some(RangeInfo::new(range, res));
}
let in_attr = original_token let in_attr = original_token
.parent_ancestors() .parent_ancestors()
.filter_map(ast::Item::cast) .filter_map(ast::Item::cast)
@ -164,7 +177,6 @@ fn hover_simple(
let descended = sema.descend_into_macros( let descended = sema.descend_into_macros(
if in_attr { DescendPreference::SameKind } else { DescendPreference::SameText }, if in_attr { DescendPreference::SameKind } else { DescendPreference::SameText },
original_token.clone(), original_token.clone(),
offset,
); );
let descended = || descended.iter(); let descended = || descended.iter();
@ -298,11 +310,11 @@ pub(crate) fn hover_for_definition(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
file_id: FileId, file_id: FileId,
definition: Definition, definition: Definition,
node: &SyntaxNode, scope_node: &SyntaxNode,
config: &HoverConfig, config: &HoverConfig,
) -> Option<HoverResult> { ) -> Option<HoverResult> {
let famous_defs = match &definition { let famous_defs = match &definition {
Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(node)?.krate())), Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(scope_node)?.krate())),
_ => None, _ => None,
}; };
render::definition(sema.db, definition, famous_defs.as_ref(), config).map(|markup| { render::definition(sema.db, definition, famous_defs.as_ref(), config).map(|markup| {

View file

@ -6613,3 +6613,63 @@ fn test() {
"#]], "#]],
); );
} }
#[test]
fn format_args_implicit() {
check(
r#"
//- minicore: fmt
fn test() {
let aaaaa = "foo";
format_args!("{aaaaa$0}");
}
"#,
expect![[r#"
*aaaaa*
```rust
let aaaaa: &str // size = 16 (0x10), align = 8, niches = 1
```
"#]],
);
}
#[test]
fn format_args_implicit2() {
check(
r#"
//- minicore: fmt
fn test() {
let aaaaa = "foo";
format_args!("{$0aaaaa}");
}
"#,
expect![[r#"
*aaaaa*
```rust
let aaaaa: &str // size = 16 (0x10), align = 8, niches = 1
```
"#]],
);
}
#[test]
fn format_args_implicit_raw() {
check(
r#"
//- minicore: fmt
fn test() {
let aaaaa = "foo";
format_args!(r"{$0aaaaa}");
}
"#,
expect![[r#"
*aaaaa*
```rust
let aaaaa: &str // size = 16 (0x10), align = 8, niches = 1
```
"#]],
);
}

View file

@ -99,7 +99,7 @@ pub(crate) fn moniker(
}); });
} }
let navs = sema let navs = sema
.descend_into_macros(DescendPreference::None, original_token.clone(), offset) .descend_into_macros(DescendPreference::None, original_token.clone())
.into_iter() .into_iter()
.filter_map(|token| { .filter_map(|token| {
IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops).map(|it| { IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops).map(|it| {

View file

@ -109,7 +109,7 @@ pub(crate) fn find_all_refs(
} }
None => { None => {
let search = make_searcher(false); let search = make_searcher(false);
Some(find_defs(sema, &syntax, position.offset)?.map(search).collect()) Some(find_defs(sema, &syntax, position.offset)?.into_iter().map(search).collect())
} }
} }
} }
@ -118,15 +118,27 @@ pub(crate) fn find_defs<'a>(
sema: &'a Semantics<'_, RootDatabase>, sema: &'a Semantics<'_, RootDatabase>,
syntax: &SyntaxNode, syntax: &SyntaxNode,
offset: TextSize, offset: TextSize,
) -> Option<impl Iterator<Item = Definition> + 'a> { ) -> Option<impl IntoIterator<Item = Definition> + 'a> {
let token = syntax.token_at_offset(offset).find(|t| { let token = syntax.token_at_offset(offset).find(|t| {
matches!( matches!(
t.kind(), t.kind(),
IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self] IDENT
| INT_NUMBER
| LIFETIME_IDENT
| STRING
| T![self]
| T![super]
| T![crate]
| T![Self]
) )
}); })?;
token.map(|token| {
sema.descend_into_macros(DescendPreference::SameText, token, offset) if let Some((_, resolution)) = sema.check_for_format_args_template(token.clone(), offset) {
return resolution.map(Definition::from).map(|it| vec![it]);
}
Some(
sema.descend_into_macros(DescendPreference::SameText, token)
.into_iter() .into_iter()
.filter_map(|it| ast::NameLike::cast(it.parent()?)) .filter_map(|it| ast::NameLike::cast(it.parent()?))
.filter_map(move |name_like| { .filter_map(move |name_like| {
@ -162,7 +174,8 @@ pub(crate) fn find_defs<'a>(
}; };
Some(def) Some(def)
}) })
}) .collect(),
)
} }
pub(crate) fn decl_mutability(def: &Definition, syntax: &SyntaxNode, range: TextRange) -> bool { pub(crate) fn decl_mutability(def: &Definition, syntax: &SyntaxNode, range: TextRange) -> bool {
@ -2092,4 +2105,27 @@ fn main() { r#fn(); }
"#]], "#]],
); );
} }
#[test]
fn implicit_format_args() {
check(
r#"
//- minicore: fmt
fn test() {
let a = "foo";
format_args!("hello {a} {a$0} {}", a);
// ^
// ^
// ^
}
"#,
expect![[r#"
a Local FileId(0) 20..21 20..21
FileId(0) 56..57 Read
FileId(0) 60..61 Read
FileId(0) 68..69 Read
"#]],
);
}
} }

View file

@ -6,14 +6,16 @@
use hir::{AsAssocItem, HirFileIdExt, InFile, Semantics}; use hir::{AsAssocItem, HirFileIdExt, InFile, Semantics};
use ide_db::{ use ide_db::{
base_db::FileId, base_db::{FileId, FileRange},
defs::{Definition, NameClass, NameRefClass}, defs::{Definition, NameClass, NameRefClass},
rename::{bail, format_err, source_edit_from_references, IdentifierKind}, rename::{bail, format_err, source_edit_from_references, IdentifierKind},
RootDatabase, RootDatabase,
}; };
use itertools::Itertools; use itertools::Itertools;
use stdx::{always, never}; use stdx::{always, never};
use syntax::{ast, utils::is_raw_identifier, AstNode, SmolStr, SyntaxNode, TextRange, TextSize}; use syntax::{
ast, utils::is_raw_identifier, AstNode, SmolStr, SyntaxKind, SyntaxNode, TextRange, TextSize,
};
use text_edit::TextEdit; use text_edit::TextEdit;
@ -34,23 +36,20 @@ pub(crate) fn prepare_rename(
let syntax = source_file.syntax(); let syntax = source_file.syntax();
let res = find_definitions(&sema, syntax, position)? let res = find_definitions(&sema, syntax, position)?
.map(|(name_like, def)| { .map(|(frange, kind, def)| {
// ensure all ranges are valid // ensure all ranges are valid
if def.range_for_rename(&sema).is_none() { if def.range_for_rename(&sema).is_none() {
bail!("No references found at position") bail!("No references found at position")
} }
let Some(frange) = sema.original_range_opt(name_like.syntax()) else {
bail!("No references found at position");
};
always!( always!(
frange.range.contains_inclusive(position.offset) frange.range.contains_inclusive(position.offset)
&& frange.file_id == position.file_id && frange.file_id == position.file_id
); );
Ok(match name_like { Ok(match kind {
ast::NameLike::Lifetime(_) => { SyntaxKind::LIFETIME => {
TextRange::new(frange.range.start() + TextSize::from(1), frange.range.end()) TextRange::new(frange.range.start() + TextSize::from(1), frange.range.end())
} }
_ => frange.range, _ => frange.range,
@ -93,7 +92,7 @@ pub(crate) fn rename(
let defs = find_definitions(&sema, syntax, position)?; let defs = find_definitions(&sema, syntax, position)?;
let ops: RenameResult<Vec<SourceChange>> = defs let ops: RenameResult<Vec<SourceChange>> = defs
.map(|(_namelike, def)| { .map(|(.., def)| {
if let Definition::Local(local) = def { if let Definition::Local(local) = def {
if let Some(self_param) = local.as_self_param(sema.db) { if let Some(self_param) = local.as_self_param(sema.db) {
cov_mark::hit!(rename_self_to_param); cov_mark::hit!(rename_self_to_param);
@ -134,11 +133,27 @@ pub(crate) fn will_rename_file(
fn find_definitions( fn find_definitions(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
syntax: &SyntaxNode, syntax: &SyntaxNode,
position: FilePosition, FilePosition { file_id, offset }: FilePosition,
) -> RenameResult<impl Iterator<Item = (ast::NameLike, Definition)>> { ) -> RenameResult<impl Iterator<Item = (FileRange, SyntaxKind, Definition)>> {
let symbols = sema let token = syntax.token_at_offset(offset).find(|t| matches!(t.kind(), SyntaxKind::STRING));
.find_nodes_at_offset_with_descend::<ast::NameLike>(syntax, position.offset)
.map(|name_like| { if let Some((range, Some(resolution))) =
token.and_then(|token| sema.check_for_format_args_template(token, offset))
{
return Ok(vec![(
FileRange { file_id, range },
SyntaxKind::STRING,
Definition::from(resolution),
)]
.into_iter());
}
let symbols =
sema.find_nodes_at_offset_with_descend::<ast::NameLike>(syntax, offset).map(|name_like| {
let kind = name_like.syntax().kind();
let range = sema
.original_range_opt(name_like.syntax())
.ok_or_else(|| format_err!("No references found at position"))?;
let res = match &name_like { let res = match &name_like {
// renaming aliases would rename the item being aliased as the HIR doesn't track aliases yet // renaming aliases would rename the item being aliased as the HIR doesn't track aliases yet
ast::NameLike::Name(name) ast::NameLike::Name(name)
@ -163,7 +178,6 @@ fn find_definitions(
Definition::Local(local_def) Definition::Local(local_def)
} }
}) })
.map(|def| (name_like.clone(), def))
.ok_or_else(|| format_err!("No references found at position")), .ok_or_else(|| format_err!("No references found at position")),
ast::NameLike::NameRef(name_ref) => { ast::NameLike::NameRef(name_ref) => {
NameRefClass::classify(sema, name_ref) NameRefClass::classify(sema, name_ref)
@ -187,7 +201,7 @@ fn find_definitions(
{ {
Err(format_err!("Renaming aliases is currently unsupported")) Err(format_err!("Renaming aliases is currently unsupported"))
} else { } else {
Ok((name_like.clone(), def)) Ok(def)
} }
}) })
} }
@ -203,11 +217,10 @@ fn find_definitions(
_ => None, _ => None,
}) })
}) })
.map(|def| (name_like, def))
.ok_or_else(|| format_err!("No references found at position")) .ok_or_else(|| format_err!("No references found at position"))
} }
}; };
res res.map(|def| (range, kind, def))
}); });
let res: RenameResult<Vec<_>> = symbols.collect(); let res: RenameResult<Vec<_>> = symbols.collect();
@ -218,7 +231,7 @@ fn find_definitions(
Err(format_err!("No references found at position")) Err(format_err!("No references found at position"))
} else { } else {
// remove duplicates, comparing `Definition`s // remove duplicates, comparing `Definition`s
Ok(v.into_iter().unique_by(|t| t.1)) Ok(v.into_iter().unique_by(|&(.., def)| def).collect::<Vec<_>>().into_iter())
} }
} }
Err(e) => Err(e), Err(e) => Err(e),
@ -2663,4 +2676,44 @@ struct A;
"error: Cannot rename a non-local definition.", "error: Cannot rename a non-local definition.",
) )
} }
#[test]
fn implicit_format_args() {
check(
"fbar",
r#"
//- minicore: fmt
fn test() {
let foo = "foo";
format_args!("hello {foo} {foo$0} {}", foo);
}
"#,
r#"
fn test() {
let fbar = "foo";
format_args!("hello {fbar} {fbar} {}", fbar);
}
"#,
);
}
#[test]
fn implicit_format_args2() {
check(
"fo",
r#"
//- minicore: fmt
fn test() {
let foo = "foo";
format_args!("hello {foo} {foo$0} {}", foo);
}
"#,
r#"
fn test() {
let fo = "foo";
format_args!("hello {fo} {fo} {}", fo);
}
"#,
);
}
} }

View file

@ -9,7 +9,7 @@ use ide_db::{
defs::Definition, defs::Definition,
documentation::docs_from_attrs, documentation::docs_from_attrs,
helpers::visit_file_defs, helpers::visit_file_defs,
search::SearchScope, search::{FileReferenceNode, SearchScope},
FxHashMap, FxHashSet, RootDatabase, SymbolKind, FxHashMap, FxHashSet, RootDatabase, SymbolKind,
}; };
use itertools::Itertools; use itertools::Itertools;
@ -240,7 +240,7 @@ fn find_related_tests(
.flatten(); .flatten();
for ref_ in defs { for ref_ in defs {
let name_ref = match ref_.name { let name_ref = match ref_.name {
ast::NameLike::NameRef(name_ref) => name_ref, FileReferenceNode::NameRef(name_ref) => name_ref,
_ => continue, _ => continue,
}; };
if let Some(fn_def) = if let Some(fn_def) =

View file

@ -82,7 +82,7 @@ pub(crate) fn signature_help(
// if the cursor is sandwiched between two space tokens and the call is unclosed // if the cursor is sandwiched between two space tokens and the call is unclosed
// this prevents us from leaving the CallExpression // this prevents us from leaving the CallExpression
.and_then(|tok| algo::skip_trivia_token(tok, Direction::Prev))?; .and_then(|tok| algo::skip_trivia_token(tok, Direction::Prev))?;
let token = sema.descend_into_macros_single(DescendPreference::None, token, offset); let token = sema.descend_into_macros_single(DescendPreference::None, token);
for node in token.parent_ancestors() { for node in token.parent_ancestors() {
match_ast! { match_ast! {

View file

@ -399,7 +399,6 @@ fn traverse(
Some(AttrOrDerive::Derive(_)) | None => DescendPreference::None, Some(AttrOrDerive::Derive(_)) | None => DescendPreference::None,
}, },
token, token,
0.into(),
); );
match token.parent().and_then(ast::NameLike::cast) { match token.parent().and_then(ast::NameLike::cast) {
// Remap the token into the wrapping single token nodes // Remap the token into the wrapping single token nodes

View file

@ -35,6 +35,7 @@ impl<S: Span> SpanMap<S> {
/// ///
/// Note this does a linear search through the entire backing vector. /// Note this does a linear search through the entire backing vector.
pub fn ranges_with_span(&self, span: S) -> impl Iterator<Item = TextRange> + '_ { pub fn ranges_with_span(&self, span: S) -> impl Iterator<Item = TextRange> + '_ {
// FIXME: This should ignore the syntax context!
self.spans.iter().enumerate().filter_map(move |(idx, &(end, s))| { self.spans.iter().enumerate().filter_map(move |(idx, &(end, s))| {
if s != span { if s != span {
return None; return None;

View file

@ -121,6 +121,7 @@ impl ast::Whitespace {
} }
} }
#[derive(Debug)]
pub struct QuoteOffsets { pub struct QuoteOffsets {
pub quotes: (TextRange, TextRange), pub quotes: (TextRange, TextRange),
pub contents: TextRange, pub contents: TextRange,
@ -167,6 +168,11 @@ pub trait IsString: AstToken {
fn text_range_between_quotes(&self) -> Option<TextRange> { fn text_range_between_quotes(&self) -> Option<TextRange> {
self.quote_offsets().map(|it| it.contents) self.quote_offsets().map(|it| it.contents)
} }
fn text_without_quotes(&self) -> &str {
let text = self.text();
let Some(offsets) = self.text_range_between_quotes() else { return text };
&text[offsets - self.syntax().text_range().start()]
}
fn open_quote_text_range(&self) -> Option<TextRange> { fn open_quote_text_range(&self) -> Option<TextRange> {
self.quote_offsets().map(|it| it.quotes.0) self.quote_offsets().map(|it| it.quotes.0)
} }

View file

@ -13,7 +13,7 @@ pub(crate) enum Repr<'a> {
} }
impl<'a> TokenText<'a> { impl<'a> TokenText<'a> {
pub(crate) fn borrowed(text: &'a str) -> Self { pub fn borrowed(text: &'a str) -> Self {
TokenText(Repr::Borrowed(text)) TokenText(Repr::Borrowed(text))
} }