Implicit format args support

This commit is contained in:
Lukas Wirth 2023-12-05 15:42:39 +01:00
parent 5b8e386bae
commit d2cd30007c
37 changed files with 615 additions and 174 deletions

View file

@ -95,6 +95,8 @@ pub struct BodySourceMap {
field_map_back: FxHashMap<ExprId, FieldSource>,
pat_field_map_back: FxHashMap<PatId, PatFieldSource>,
format_args_template_map: FxHashMap<ExprId, Vec<(syntax::TextRange, Name)>>,
expansions: FxHashMap<InFile<AstPtr<ast::MacroCall>>, HirFileId>,
/// Diagnostics accumulated during body lowering. These contain `AstPtr`s and so are stored in
@ -387,6 +389,14 @@ impl BodySourceMap {
self.expr_map.get(&src).copied()
}
pub fn implicit_format_args(
&self,
node: InFile<&ast::FormatArgsExpr>,
) -> Option<&[(syntax::TextRange, Name)]> {
let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::Expr>);
self.format_args_template_map.get(self.expr_map.get(&src)?).map(std::ops::Deref::deref)
}
/// Get a reference to the body source map's diagnostics.
pub fn diagnostics(&self) -> &[BodyDiagnostic] {
&self.diagnostics
@ -403,8 +413,10 @@ impl BodySourceMap {
field_map_back,
pat_field_map_back,
expansions,
format_args_template_map,
diagnostics,
} = self;
format_args_template_map.shrink_to_fit();
expr_map.shrink_to_fit();
expr_map_back.shrink_to_fit();
pat_map.shrink_to_fit();

View file

@ -1597,12 +1597,20 @@ impl ExprCollector<'_> {
});
let template = f.template();
let fmt_snippet = template.as_ref().map(ToString::to_string);
let mut mappings = vec![];
let fmt = match template.and_then(|it| self.expand_macros_to_string(it)) {
Some((s, is_direct_literal)) => {
format_args::parse(&s, fmt_snippet, args, is_direct_literal, |name| {
self.alloc_expr_desugared(Expr::Path(Path::from(name)))
})
}
Some((s, is_direct_literal)) => format_args::parse(
&s,
fmt_snippet,
args,
is_direct_literal,
|name| self.alloc_expr_desugared(Expr::Path(Path::from(name))),
|name, span| {
if let Some(span) = span {
mappings.push((span, name.clone()))
}
},
),
None => FormatArgs { template: Default::default(), arguments: args.finish() },
};
@ -1746,14 +1754,16 @@ impl ExprCollector<'_> {
tail: Some(unsafe_arg_new),
});
self.alloc_expr(
let idx = self.alloc_expr(
Expr::Call {
callee: new_v1_formatted,
args: Box::new([lit_pieces, args, format_options, unsafe_arg_new]),
is_assignee_expr: false,
},
syntax_ptr,
)
);
self.source_map.format_args_template_map.insert(idx, mappings);
idx
}
/// Generate a hir expression for a format_args placeholder specification.

View file

@ -160,7 +160,7 @@ fn main() {
let count = 10;
builtin#lang(Arguments::new_v1_formatted)(
&[
"\"hello ", " ", " friends, we ", " ", "", "\"",
"hello ", " ", " friends, we ", " ", "",
],
&[
builtin#lang(Argument::new_display)(
@ -261,7 +261,7 @@ impl SsrError {
_ = $crate::error::SsrError::new(
builtin#lang(Arguments::new_v1_formatted)(
&[
"\"Failed to resolve path `", "`\"",
"Failed to resolve path `", "`",
],
&[
builtin#lang(Argument::new_display)(
@ -320,7 +320,7 @@ fn f() {
$crate::panicking::panic_fmt(
builtin#lang(Arguments::new_v1_formatted)(
&[
"\"cc\"",
"cc",
],
&[],
&[],

View file

@ -5,7 +5,7 @@ use hir_expand::name::Name;
use rustc_dependencies::parse_format as parse;
use syntax::{
ast::{self, IsString},
AstToken, SmolStr, TextRange,
SmolStr, TextRange, TextSize,
};
use crate::hir::ExprId;
@ -170,15 +170,18 @@ pub(crate) fn parse(
mut args: FormatArgumentsCollector,
is_direct_literal: bool,
mut synth: impl FnMut(Name) -> ExprId,
mut record_usage: impl FnMut(Name, Option<TextRange>),
) -> FormatArgs {
let text = s.text();
let text = s.text_without_quotes();
let str_style = match s.quote_offsets() {
Some(offsets) => {
let raw = u32::from(offsets.quotes.0.len()) - 1;
(raw != 0).then_some(raw as usize)
// subtract 1 for the `r` prefix
(raw != 0).then(|| raw as usize - 1)
}
None => None,
};
let mut parser =
parse::Parser::new(text, str_style, fmt_snippet, false, parse::ParseMode::Format);
@ -199,6 +202,7 @@ pub(crate) fn parse(
let to_span = |inner_span: parse::InnerSpan| {
is_source_literal.then(|| {
TextRange::new(inner_span.start.try_into().unwrap(), inner_span.end.try_into().unwrap())
- TextSize::from(str_style.map(|it| it + 1).unwrap_or(0) as u32 + 1)
})
};
@ -230,9 +234,10 @@ pub(crate) fn parse(
Err(index)
}
}
ArgRef::Name(name, _span) => {
ArgRef::Name(name, span) => {
let name = Name::new_text_dont_use(SmolStr::new(name));
if let Some((index, _)) = args.by_name(&name) {
record_usage(name, span);
// Name found in `args`, so we resolve it to its index.
if index < args.explicit_args().len() {
// Mark it as used, if it was an explicit argument.
@ -246,6 +251,7 @@ pub(crate) fn parse(
// disabled (see RFC #2795)
// FIXME: Diagnose
}
record_usage(name.clone(), span);
Ok(args.add(FormatArgument {
kind: FormatArgumentKind::Captured(name.clone()),
// FIXME: This is problematic, we might want to synthesize a dummy

View file

@ -29,8 +29,9 @@ use smallvec::{smallvec, SmallVec};
use stdx::TupleExt;
use syntax::{
algo::skip_trivia_token,
ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody},
match_ast, AstNode, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize,
ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody, IsString as _},
match_ast, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken,
TextRange, TextSize,
};
use crate::{
@ -49,7 +50,7 @@ pub enum DescendPreference {
None,
}
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum PathResolution {
/// An item
Def(ModuleDef),
@ -402,6 +403,41 @@ impl<'db> SemanticsImpl<'db> {
)
}
pub fn resolve_offset_in_format_args(
&self,
string: ast::String,
offset: TextSize,
) -> Option<(TextRange, Option<PathResolution>)> {
debug_assert!(offset <= string.syntax().text_range().len());
let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?;
let source_analyzer = &self.analyze_no_infer(format_args.syntax())?;
let format_args = self.wrap_node_infile(format_args);
source_analyzer.resolve_offset_in_format_args(self.db, format_args.as_ref(), offset)
}
pub fn check_for_format_args_template(
&self,
original_token: SyntaxToken,
offset: TextSize,
) -> Option<(TextRange, Option<PathResolution>)> {
if let Some(original_string) = ast::String::cast(original_token.clone()) {
if let Some(quote) = original_string.open_quote_text_range() {
return self
.descend_into_macros(DescendPreference::SameText, original_token.clone())
.into_iter()
.find_map(|token| {
self.resolve_offset_in_format_args(
ast::String::cast(token)?,
offset - quote.end(),
)
})
.map(|(range, res)| (range + quote.end(), res));
}
}
None
}
/// Maps a node down by mapping its first and last token down.
pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
// This might not be the correct way to do this, but it works for now
@ -419,8 +455,12 @@ impl<'db> SemanticsImpl<'db> {
if first == last {
// node is just the token, so descend the token
self.descend_into_macros_impl(first, 0.into(), &mut |InFile { value, .. }| {
if let Some(node) = value.parent_ancestors().find_map(N::cast) {
self.descend_into_macros_impl(first, &mut |InFile { value, .. }| {
if let Some(node) = value
.parent_ancestors()
.take_while(|it| it.text_range() == value.text_range())
.find_map(N::cast)
{
res.push(node)
}
ControlFlow::Continue(())
@ -428,7 +468,7 @@ impl<'db> SemanticsImpl<'db> {
} else {
// Descend first and last token, then zip them to look for the node they belong to
let mut scratch: SmallVec<[_; 1]> = smallvec![];
self.descend_into_macros_impl(first, 0.into(), &mut |token| {
self.descend_into_macros_impl(first, &mut |token| {
scratch.push(token);
ControlFlow::Continue(())
});
@ -436,7 +476,6 @@ impl<'db> SemanticsImpl<'db> {
let mut scratch = scratch.into_iter();
self.descend_into_macros_impl(
last,
0.into(),
&mut |InFile { value: last, file_id: last_fid }| {
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
if first_fid == last_fid {
@ -467,7 +506,6 @@ impl<'db> SemanticsImpl<'db> {
&self,
mode: DescendPreference,
token: SyntaxToken,
offset: TextSize,
) -> SmallVec<[SyntaxToken; 1]> {
enum Dp<'t> {
SameText(&'t str),
@ -487,7 +525,7 @@ impl<'db> SemanticsImpl<'db> {
DescendPreference::None => Dp::None,
};
let mut res = smallvec![];
self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| {
self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
let is_a_match = match mode {
Dp::SameText(text) => value.text() == text,
Dp::SameKind(preferred_kind) => {
@ -513,7 +551,6 @@ impl<'db> SemanticsImpl<'db> {
&self,
mode: DescendPreference,
token: SyntaxToken,
offset: TextSize,
) -> SyntaxToken {
enum Dp<'t> {
SameText(&'t str),
@ -533,7 +570,7 @@ impl<'db> SemanticsImpl<'db> {
DescendPreference::None => Dp::None,
};
let mut res = token.clone();
self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| {
self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
let is_a_match = match mode {
Dp::SameText(text) => value.text() == text,
Dp::SameKind(preferred_kind) => {
@ -558,9 +595,6 @@ impl<'db> SemanticsImpl<'db> {
fn descend_into_macros_impl(
&self,
token: SyntaxToken,
// FIXME: We might want this to be Option<TextSize> to be able to opt out of subrange
// mapping, specifically for node downmapping
_offset: TextSize,
f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
) {
// FIXME: Clean this up
@ -729,7 +763,7 @@ impl<'db> SemanticsImpl<'db> {
offset: TextSize,
) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
node.token_at_offset(offset)
.map(move |token| self.descend_into_macros(DescendPreference::None, token, offset))
.map(move |token| self.descend_into_macros(DescendPreference::None, token))
.map(|descendants| {
descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
})

View file

@ -820,6 +820,29 @@ impl SourceAnalyzer {
false
}
pub(crate) fn resolve_offset_in_format_args(
&self,
db: &dyn HirDatabase,
format_args: InFile<&ast::FormatArgsExpr>,
offset: TextSize,
) -> Option<(TextRange, Option<PathResolution>)> {
let implicits = self.body_source_map()?.implicit_format_args(format_args)?;
implicits.iter().find(|(range, _)| range.contains_inclusive(offset)).map(|(range, name)| {
(
*range,
resolve_hir_value_path(
db,
&self.resolver,
self.resolver.body_owner(),
&Path::from_known_path_with_no_generic(ModPath::from_segments(
PathKind::Plain,
Some(name.clone()),
)),
),
)
})
}
fn resolve_impl_method_or_trait_def(
&self,
db: &dyn HirDatabase,
@ -1038,24 +1061,7 @@ fn resolve_hir_path_(
};
let body_owner = resolver.body_owner();
let values = || {
resolver.resolve_path_in_value_ns_fully(db.upcast(), path).and_then(|val| {
let res = match val {
ValueNs::LocalBinding(binding_id) => {
let var = Local { parent: body_owner?, binding_id };
PathResolution::Local(var)
}
ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()),
ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()),
ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()),
ValueNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
ValueNs::ImplSelf(impl_id) => PathResolution::SelfType(impl_id.into()),
ValueNs::GenericParam(id) => PathResolution::ConstParam(id.into()),
};
Some(res)
})
};
let values = || resolve_hir_value_path(db, resolver, body_owner, path);
let items = || {
resolver
@ -1075,6 +1081,30 @@ fn resolve_hir_path_(
.or_else(macros)
}
fn resolve_hir_value_path(
db: &dyn HirDatabase,
resolver: &Resolver,
body_owner: Option<DefWithBodyId>,
path: &Path,
) -> Option<PathResolution> {
resolver.resolve_path_in_value_ns_fully(db.upcast(), path).and_then(|val| {
let res = match val {
ValueNs::LocalBinding(binding_id) => {
let var = Local { parent: body_owner?, binding_id };
PathResolution::Local(var)
}
ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()),
ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()),
ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()),
ValueNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
ValueNs::ImplSelf(impl_id) => PathResolution::SelfType(impl_id.into()),
ValueNs::GenericParam(id) => PathResolution::ConstParam(id.into()),
};
Some(res)
})
}
/// Resolves a path where we know it is a qualifier of another path.
///
/// For example, if we have:

View file

@ -328,6 +328,7 @@ fn augment_references_with_imports(
references
.iter()
.filter_map(|FileReference { range, name, .. }| {
let name = name.clone().into_name_like()?;
ctx.sema.scope(name.syntax()).map(|scope| (*range, name, scope.module()))
})
.map(|(range, name, ref_module)| {
@ -455,6 +456,7 @@ fn add_enum_def(
.iter()
.flat_map(|(_, refs)| refs)
.filter_map(|FileReference { name, .. }| {
let name = name.clone().into_name_like()?;
ctx.sema.scope(name.syntax()).map(|scope| scope.module())
})
.any(|module| module.nearest_non_block_module(ctx.db()) != *target_module);

View file

@ -186,6 +186,7 @@ fn augment_references_with_imports(
references
.iter()
.filter_map(|FileReference { name, .. }| {
let name = name.clone().into_name_like()?;
ctx.sema.scope(name.syntax()).map(|scope| (name, scope.module()))
})
.map(|(name, ref_module)| {
@ -238,6 +239,7 @@ fn add_tuple_struct_def(
.iter()
.flat_map(|(_, refs)| refs)
.filter_map(|FileReference { name, .. }| {
let name = name.clone().into_name_like()?;
ctx.sema.scope(name.syntax()).map(|scope| scope.module())
})
.any(|module| module.nearest_non_block_module(ctx.db()) != *target_module);

View file

@ -35,11 +35,10 @@ pub(crate) fn extract_expressions_from_format_string(
let fmt_string = ctx.find_token_at_offset::<ast::String>()?;
let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?;
let expanded_t = ast::String::cast(ctx.sema.descend_into_macros_single(
DescendPreference::SameKind,
fmt_string.syntax().clone(),
0.into(),
))?;
let expanded_t = ast::String::cast(
ctx.sema
.descend_into_macros_single(DescendPreference::SameKind, fmt_string.syntax().clone()),
)?;
if !is_format_string(&expanded_t) {
return None;
}

View file

@ -751,9 +751,7 @@ impl FunctionBody {
.descendants_with_tokens()
.filter_map(SyntaxElement::into_token)
.filter(|it| matches!(it.kind(), SyntaxKind::IDENT | T![self]))
.flat_map(|t| {
sema.descend_into_macros(DescendPreference::None, t, 0.into())
})
.flat_map(|t| sema.descend_into_macros(DescendPreference::None, t))
.for_each(|t| add_name_if_local(t.parent().and_then(ast::NameRef::cast)));
}
}

View file

@ -8,7 +8,7 @@ use ide_db::{
defs::Definition,
imports::insert_use::remove_path_if_in_use_stmt,
path_transform::PathTransform,
search::{FileReference, SearchScope},
search::{FileReference, FileReferenceNode, SearchScope},
source_change::SourceChangeBuilder,
syntax_helpers::{insert_whitespace_into_node::insert_ws_into, node_ext::expr_as_name_ref},
RootDatabase,
@ -148,7 +148,7 @@ pub(super) fn split_refs_and_uses<T: ast::AstNode>(
) -> (Vec<T>, Vec<ast::Path>) {
iter.into_iter()
.filter_map(|file_ref| match file_ref.name {
ast::NameLike::NameRef(name_ref) => Some(name_ref),
FileReferenceNode::NameRef(name_ref) => Some(name_ref),
_ => None,
})
.filter_map(|name_ref| match name_ref.syntax().ancestors().find_map(ast::UseTree::cast) {
@ -346,7 +346,7 @@ fn inline(
match param.as_local(sema.db) {
Some(l) => usages_for_locals(l)
.map(|FileReference { name, range, .. }| match name {
ast::NameLike::NameRef(_) => body
FileReferenceNode::NameRef(_) => body
.syntax()
.covering_element(range)
.ancestors()
@ -372,7 +372,7 @@ fn inline(
if let Some(self_local) = params[0].2.as_local(sema.db) {
usages_for_locals(self_local)
.filter_map(|FileReference { name, range, .. }| match name {
ast::NameLike::NameRef(_) => Some(body.syntax().covering_element(range)),
FileReferenceNode::NameRef(_) => Some(body.syntax().covering_element(range)),
_ => None,
})
.for_each(|usage| {

View file

@ -2,7 +2,7 @@ use hir::{PathResolution, Semantics};
use ide_db::{
base_db::FileId,
defs::Definition,
search::{FileReference, UsageSearchResult},
search::{FileReference, FileReferenceNode, UsageSearchResult},
RootDatabase,
};
use syntax::{
@ -63,7 +63,7 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext<'_>)
let wrap_in_parens = references
.into_iter()
.filter_map(|FileReference { range, name, .. }| match name {
ast::NameLike::NameRef(name) => Some((range, name)),
FileReferenceNode::NameRef(name) => Some((range, name)),
_ => None,
})
.map(|(range, name_ref)| {

View file

@ -59,7 +59,10 @@ pub(crate) fn replace_named_generic_with_impl(
let mut path_types_to_replace = Vec::new();
for (_a, refs) in usage_refs.iter() {
for usage_ref in refs {
let param_node = find_path_type(&ctx.sema, &type_param_name, &usage_ref.name)?;
let Some(name_like) = usage_ref.name.clone().into_name_like() else {
continue;
};
let param_node = find_path_type(&ctx.sema, &type_param_name, &name_like)?;
path_types_to_replace.push(param_node);
}
}

View file

@ -2,11 +2,11 @@ use ide_db::{
assists::{AssistId, AssistKind},
base_db::FileId,
defs::Definition,
search::FileReference,
search::{FileReference, FileReferenceNode},
syntax_helpers::node_ext::full_path_of_name_ref,
};
use syntax::{
ast::{self, NameLike, NameRef},
ast::{self, NameRef},
AstNode, SyntaxKind, TextRange,
};
@ -76,7 +76,7 @@ pub(crate) fn unnecessary_async(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
for await_expr in find_all_references(ctx, &Definition::Function(fn_def))
// Keep only references that correspond NameRefs.
.filter_map(|(_, reference)| match reference.name {
NameLike::NameRef(nameref) => Some(nameref),
FileReferenceNode::NameRef(nameref) => Some(nameref),
_ => None,
})
// Keep only references that correspond to await expressions

View file

@ -117,7 +117,7 @@ pub fn get_definition(
sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken,
) -> Option<Definition> {
for token in sema.descend_into_macros(DescendPreference::None, token, 0.into()) {
for token in sema.descend_into_macros(DescendPreference::None, token) {
let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops);
if let Some(&[x]) = def.as_deref() {
return Some(x);

View file

@ -34,7 +34,7 @@ use text_edit::{TextEdit, TextEditBuilder};
use crate::{
defs::Definition,
search::FileReference,
search::{FileReference, FileReferenceNode},
source_change::{FileSystemEdit, SourceChange},
syntax_helpers::node_ext::expr_as_name_ref,
traits::convert_to_def_in_trait,
@ -361,7 +361,7 @@ pub fn source_edit_from_references(
// macros can cause multiple refs to occur for the same text range, so keep track of what we have edited so far
let mut edited_ranges = Vec::new();
for &FileReference { range, ref name, .. } in references {
let name_range = name.syntax().text_range();
let name_range = name.text_range();
if name_range.len() != range.len() {
// This usage comes from a different token kind that was downmapped to a NameLike in a macro
// Renaming this will most likely break things syntax-wise
@ -371,17 +371,17 @@ pub fn source_edit_from_references(
// if the ranges differ then the node is inside a macro call, we can't really attempt
// to make special rewrites like shorthand syntax and such, so just rename the node in
// the macro input
ast::NameLike::NameRef(name_ref) if name_range == range => {
FileReferenceNode::NameRef(name_ref) if name_range == range => {
source_edit_from_name_ref(&mut edit, name_ref, new_name, def)
}
ast::NameLike::Name(name) if name_range == range => {
FileReferenceNode::Name(name) if name_range == range => {
source_edit_from_name(&mut edit, name, new_name)
}
_ => false,
};
if !has_emitted_edit && !edited_ranges.contains(&range.start()) {
let (range, new_name) = match name {
ast::NameLike::Lifetime(_) => (
FileReferenceNode::Lifetime(_) => (
TextRange::new(range.start() + syntax::TextSize::from(1), range.end()),
new_name.strip_prefix('\'').unwrap_or(new_name).to_owned(),
),

View file

@ -9,13 +9,13 @@ use std::mem;
use base_db::{salsa::Database, FileId, FileRange, SourceDatabase, SourceDatabaseExt};
use hir::{
AsAssocItem, DefWithBody, DescendPreference, HasAttrs, HasSource, HirFileIdExt, InFile,
InRealFile, ModuleSource, Semantics, Visibility,
InRealFile, ModuleSource, PathResolution, Semantics, Visibility,
};
use memchr::memmem::Finder;
use nohash_hasher::IntMap;
use once_cell::unsync::Lazy;
use parser::SyntaxKind;
use syntax::{ast, match_ast, AstNode, TextRange, TextSize};
use syntax::{ast, match_ast, AstNode, AstToken, SyntaxElement, TextRange, TextSize};
use triomphe::Arc;
use crate::{
@ -63,10 +63,67 @@ pub struct FileReference {
/// The range of the reference in the original file
pub range: TextRange,
/// The node of the reference in the (macro-)file
pub name: ast::NameLike,
pub name: FileReferenceNode,
pub category: Option<ReferenceCategory>,
}
#[derive(Debug, Clone)]
pub enum FileReferenceNode {
Name(ast::Name),
NameRef(ast::NameRef),
Lifetime(ast::Lifetime),
FormatStringEntry(ast::String, TextRange),
}
impl FileReferenceNode {
pub fn text_range(&self) -> TextRange {
match self {
FileReferenceNode::Name(it) => it.syntax().text_range(),
FileReferenceNode::NameRef(it) => it.syntax().text_range(),
FileReferenceNode::Lifetime(it) => it.syntax().text_range(),
FileReferenceNode::FormatStringEntry(_, range) => *range,
}
}
pub fn syntax(&self) -> SyntaxElement {
match self {
FileReferenceNode::Name(it) => it.syntax().clone().into(),
FileReferenceNode::NameRef(it) => it.syntax().clone().into(),
FileReferenceNode::Lifetime(it) => it.syntax().clone().into(),
FileReferenceNode::FormatStringEntry(it, _) => it.syntax().clone().into(),
}
}
pub fn into_name_like(self) -> Option<ast::NameLike> {
match self {
FileReferenceNode::Name(it) => Some(ast::NameLike::Name(it)),
FileReferenceNode::NameRef(it) => Some(ast::NameLike::NameRef(it)),
FileReferenceNode::Lifetime(it) => Some(ast::NameLike::Lifetime(it)),
FileReferenceNode::FormatStringEntry(_, _) => None,
}
}
pub fn as_name_ref(&self) -> Option<&ast::NameRef> {
match self {
FileReferenceNode::NameRef(name_ref) => Some(name_ref),
_ => None,
}
}
pub fn as_lifetime(&self) -> Option<&ast::Lifetime> {
match self {
FileReferenceNode::Lifetime(lifetime) => Some(lifetime),
_ => None,
}
}
pub fn text(&self) -> syntax::TokenText<'_> {
match self {
FileReferenceNode::NameRef(name_ref) => name_ref.text(),
FileReferenceNode::Name(name) => name.text(),
FileReferenceNode::Lifetime(lifetime) => lifetime.text(),
FileReferenceNode::FormatStringEntry(it, range) => {
syntax::TokenText::borrowed(&it.text()[*range - it.syntax().text_range().start()])
}
}
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum ReferenceCategory {
// FIXME: Add this variant and delete the `retain_adt_literal_usages` function.
@ -467,7 +524,7 @@ impl<'a> FindUsages<'a> {
// every textual hit. That function is notoriously
// expensive even for things that do not get down mapped
// into macros.
sema.descend_into_macros(DescendPreference::None, token, offset)
sema.descend_into_macros(DescendPreference::None, token)
.into_iter()
.filter_map(|it| it.parent())
})
@ -479,6 +536,17 @@ impl<'a> FindUsages<'a> {
// Search for occurrences of the items name
for offset in match_indices(&text, finder, search_range) {
tree.token_at_offset(offset).into_iter().for_each(|token| {
let Some(str_token) = ast::String::cast(token.clone()) else { return };
if let Some((range, nameres)) =
sema.check_for_format_args_template(token.clone(), offset)
{
if self.found_format_args_ref(file_id, range, str_token, nameres, sink) {
return;
}
}
});
for name in find_nodes(name, &tree, offset).filter_map(ast::NameLike::cast) {
if match name {
ast::NameLike::NameRef(name_ref) => self.found_name_ref(&name_ref, sink),
@ -593,7 +661,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
let reference = FileReference {
range,
name: ast::NameLike::NameRef(name_ref.clone()),
name: FileReferenceNode::NameRef(name_ref.clone()),
category: None,
};
sink(file_id, reference)
@ -612,7 +680,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
let reference = FileReference {
range,
name: ast::NameLike::NameRef(name_ref.clone()),
name: FileReferenceNode::NameRef(name_ref.clone()),
category: is_name_ref_in_import(name_ref).then_some(ReferenceCategory::Import),
};
sink(file_id, reference)
@ -621,6 +689,27 @@ impl<'a> FindUsages<'a> {
}
}
fn found_format_args_ref(
&self,
file_id: FileId,
range: TextRange,
token: ast::String,
res: Option<PathResolution>,
sink: &mut dyn FnMut(FileId, FileReference) -> bool,
) -> bool {
match res.map(Definition::from) {
Some(def) if def == self.def => {
let reference = FileReference {
range,
name: FileReferenceNode::FormatStringEntry(token, range),
category: Some(ReferenceCategory::Read),
};
sink(file_id, reference)
}
_ => false,
}
}
fn found_lifetime(
&self,
lifetime: &ast::Lifetime,
@ -631,7 +720,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(lifetime.syntax());
let reference = FileReference {
range,
name: ast::NameLike::Lifetime(lifetime.clone()),
name: FileReferenceNode::Lifetime(lifetime.clone()),
category: None,
};
sink(file_id, reference)
@ -655,7 +744,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
let reference = FileReference {
range,
name: ast::NameLike::NameRef(name_ref.clone()),
name: FileReferenceNode::NameRef(name_ref.clone()),
category: ReferenceCategory::new(&def, name_ref),
};
sink(file_id, reference)
@ -671,7 +760,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
let reference = FileReference {
range,
name: ast::NameLike::NameRef(name_ref.clone()),
name: FileReferenceNode::NameRef(name_ref.clone()),
category: ReferenceCategory::new(&def, name_ref),
};
sink(file_id, reference)
@ -681,7 +770,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
let reference = FileReference {
range,
name: ast::NameLike::NameRef(name_ref.clone()),
name: FileReferenceNode::NameRef(name_ref.clone()),
category: ReferenceCategory::new(&def, name_ref),
};
sink(file_id, reference)
@ -705,7 +794,7 @@ impl<'a> FindUsages<'a> {
};
let reference = FileReference {
range,
name: ast::NameLike::NameRef(name_ref.clone()),
name: FileReferenceNode::NameRef(name_ref.clone()),
category: access,
};
sink(file_id, reference)
@ -728,7 +817,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name.syntax());
let reference = FileReference {
range,
name: ast::NameLike::Name(name.clone()),
name: FileReferenceNode::Name(name.clone()),
// FIXME: mutable patterns should have `Write` access
category: Some(ReferenceCategory::Read),
};
@ -738,7 +827,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name.syntax());
let reference = FileReference {
range,
name: ast::NameLike::Name(name.clone()),
name: FileReferenceNode::Name(name.clone()),
category: None,
};
sink(file_id, reference)
@ -763,7 +852,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name.syntax());
let reference = FileReference {
range,
name: ast::NameLike::Name(name.clone()),
name: FileReferenceNode::Name(name.clone()),
category: None,
};
sink(file_id, reference)

View file

@ -87,7 +87,7 @@ pub(crate) fn outgoing_calls(
})?;
let mut calls = CallLocations::default();
sema.descend_into_macros(DescendPreference::None, token, offset)
sema.descend_into_macros(DescendPreference::None, token)
.into_iter()
.filter_map(|it| it.parent_ancestors().nth(1).and_then(ast::Item::cast))
.filter_map(|item| match item {

View file

@ -146,7 +146,7 @@ pub(crate) fn external_docs(
kind if kind.is_trivia() => 0,
_ => 1,
})?;
let token = sema.descend_into_macros_single(DescendPreference::None, token, offset);
let token = sema.descend_into_macros_single(DescendPreference::None, token);
let node = token.parent()?;
let definition = match_ast! {
@ -288,7 +288,7 @@ impl DocCommentToken {
let original_start = doc_token.text_range().start();
let relative_comment_offset = offset - original_start - prefix_len;
sema.descend_into_macros(DescendPreference::None,doc_token, offset).into_iter().find_map(|t| {
sema.descend_into_macros(DescendPreference::None, doc_token).into_iter().find_map(|t| {
let (node, descended_prefix_len) = match_ast! {
match t {
ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?),

View file

@ -41,7 +41,7 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
// ```
let derive = sema
.descend_into_macros(DescendPreference::None, tok.clone(), 0.into())
.descend_into_macros(DescendPreference::None, tok.clone())
.into_iter()
.find_map(|descended| {
let hir_file = sema.hir_file_for(&descended.parent()?);

View file

@ -140,16 +140,10 @@ fn extend_tokens_from_range(
// compute original mapped token range
let extended = {
let fst_expanded = sema.descend_into_macros_single(
DescendPreference::None,
first_token.clone(),
original_range.start(),
);
let lst_expanded = sema.descend_into_macros_single(
DescendPreference::None,
last_token.clone(),
original_range.end(),
);
let fst_expanded =
sema.descend_into_macros_single(DescendPreference::None, first_token.clone());
let lst_expanded =
sema.descend_into_macros_single(DescendPreference::None, last_token.clone());
let mut lca =
algo::least_common_ancestor(&fst_expanded.parent()?, &lst_expanded.parent()?)?;
lca = shallowest_node(&lca);
@ -160,11 +154,10 @@ fn extend_tokens_from_range(
};
// Compute parent node range
let validate = |offset: TextSize| {
let validate = || {
let extended = &extended;
move |token: &SyntaxToken| -> bool {
let expanded =
sema.descend_into_macros_single(DescendPreference::None, token.clone(), offset);
let expanded = sema.descend_into_macros_single(DescendPreference::None, token.clone());
let parent = match expanded.parent() {
Some(it) => it,
None => return false,
@ -178,14 +171,14 @@ fn extend_tokens_from_range(
let token = token.prev_token()?;
skip_trivia_token(token, Direction::Prev)
})
.take_while(validate(original_range.start()))
.take_while(validate())
.last()?;
let last = successors(Some(last_token), |token| {
let token = token.next_token()?;
skip_trivia_token(token, Direction::Next)
})
.take_while(validate(original_range.end()))
.take_while(validate())
.last()?;
let range = first.text_range().cover(last.text_range());

View file

@ -29,7 +29,7 @@ pub(crate) fn goto_declaration(
.find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?;
let range = original_token.text_range();
let info: Vec<NavigationTarget> = sema
.descend_into_macros(DescendPreference::None, original_token, offset)
.descend_into_macros(DescendPreference::None, original_token)
.iter()
.filter_map(|token| {
let parent = token.parent()?;

View file

@ -55,8 +55,21 @@ pub(crate) fn goto_definition(
Some(RangeInfo::new(link_range, vec![nav]))
});
}
if let Some((range, resolution)) =
sema.check_for_format_args_template(original_token.clone(), offset)
{
return Some(RangeInfo::new(
range,
match resolution {
Some(res) => def_to_nav(db, Definition::from(res)),
None => vec![],
},
));
}
let navs = sema
.descend_into_macros(DescendPreference::None, original_token.clone(), offset)
.descend_into_macros(DescendPreference::None, original_token.clone())
.into_iter()
.filter_map(|token| {
let parent = token.parent()?;
@ -809,18 +822,13 @@ mod confuse_index { fn foo(); }
fn goto_through_format() {
check(
r#"
//- minicore: fmt
#[macro_export]
macro_rules! format {
($($arg:tt)*) => ($crate::fmt::format($crate::__export::format_args!($($arg)*)))
}
#[rustc_builtin_macro]
#[macro_export]
macro_rules! format_args {
($fmt:expr) => ({ /* compiler built-in */ });
($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
}
pub mod __export {
pub use crate::format_args;
pub use core::format_args;
fn foo() {} // for index confusion
}
fn foo() -> i8 {}
@ -2056,6 +2064,20 @@ fn f2() {
struct S2;
S1::e$0();
}
"#,
);
}
#[test]
fn implicit_format_args() {
check(
r#"
//- minicore: fmt
fn test() {
let a = "world";
// ^
format_args!("hello {a$0}");
}
"#,
);
}

View file

@ -34,7 +34,7 @@ pub(crate) fn goto_implementation(
})?;
let range = original_token.text_range();
let navs =
sema.descend_into_macros(DescendPreference::None, original_token, offset)
sema.descend_into_macros(DescendPreference::None, original_token)
.into_iter()
.filter_map(|token| token.parent().and_then(ast::NameLike::cast))
.filter_map(|node| match &node {

View file

@ -1,4 +1,4 @@
use hir::DescendPreference;
use hir::{DescendPreference, GenericParam};
use ide_db::{base_db::Upcast, defs::Definition, helpers::pick_best_token, RootDatabase};
use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, T};
@ -37,8 +37,37 @@ pub(crate) fn goto_type_definition(
}
}
};
let mut process_ty = |ty: hir::Type| {
// collect from each `ty` into the `res` result vec
let ty = ty.strip_references();
ty.walk(db, |t| {
if let Some(adt) = t.as_adt() {
push(adt.into());
} else if let Some(trait_) = t.as_dyn_trait() {
push(trait_.into());
} else if let Some(traits) = t.as_impl_traits(db) {
traits.for_each(|it| push(it.into()));
} else if let Some(trait_) = t.as_associated_type_parent_trait(db) {
push(trait_.into());
}
});
};
if let Some((range, resolution)) = sema.check_for_format_args_template(token.clone(), offset) {
if let Some(ty) = resolution.and_then(|res| match Definition::from(res) {
Definition::Const(it) => Some(it.ty(db)),
Definition::Static(it) => Some(it.ty(db)),
Definition::GenericParam(GenericParam::ConstParam(it)) => Some(it.ty(db)),
Definition::Local(it) => Some(it.ty(db)),
Definition::Adt(hir::Adt::Struct(it)) => Some(it.ty(db)),
_ => None,
}) {
process_ty(ty);
}
return Some(RangeInfo::new(range, res));
}
let range = token.text_range();
sema.descend_into_macros(DescendPreference::None,token, offset)
sema.descend_into_macros(DescendPreference::None, token)
.into_iter()
.filter_map(|token| {
let ty = sema
@ -76,21 +105,7 @@ pub(crate) fn goto_type_definition(
});
ty
})
.for_each(|ty| {
// collect from each `ty` into the `res` result vec
let ty = ty.strip_references();
ty.walk(db, |t| {
if let Some(adt) = t.as_adt() {
push(adt.into());
} else if let Some(trait_) = t.as_dyn_trait() {
push(trait_.into());
} else if let Some(traits) = t.as_impl_traits(db) {
traits.for_each(|it| push(it.into()));
} else if let Some(trait_) = t.as_associated_type_parent_trait(db) {
push(trait_.into());
}
});
});
.for_each(process_ty);
Some(RangeInfo::new(range, res))
}
@ -326,6 +341,42 @@ struct Baz<T>(T);
//^^^
fn foo(x$0: Bar<Baz<Foo>, Baz<usize>) {}
"#,
);
}
#[test]
fn implicit_format_args() {
check(
r#"
//- minicore: fmt
struct Bar;
// ^^^
fn test() {
let a = Bar;
format_args!("hello {a$0}");
}
"#,
);
check(
r#"
//- minicore: fmt
struct Bar;
// ^^^
fn test() {
format_args!("hello {Bar$0}");
}
"#,
);
check(
r#"
//- minicore: fmt
struct Bar;
// ^^^
const BAR: Bar = Bar;
fn test() {
format_args!("hello {BAR$0}");
}
"#,
);
}

View file

@ -1,3 +1,5 @@
use std::iter;
use hir::{DescendPreference, Semantics};
use ide_db::{
base_db::{FileId, FilePosition, FileRange},
@ -15,7 +17,6 @@ use syntax::{
SyntaxKind::{self, IDENT, INT_NUMBER},
SyntaxNode, SyntaxToken, TextRange, T,
};
use text_edit::TextSize;
use crate::{navigation_target::ToNav, references, NavigationTarget, TryToNav};
@ -132,7 +133,16 @@ fn highlight_references(
token: SyntaxToken,
FilePosition { file_id, offset }: FilePosition,
) -> Option<Vec<HighlightedRange>> {
let defs = find_defs(sema, token.clone(), offset);
let defs = if let Some((range, resolution)) =
sema.check_for_format_args_template(token.clone(), offset)
{
match resolution.map(Definition::from) {
Some(def) => iter::once(def).collect(),
None => return Some(vec![HighlightedRange { range, category: None }]),
}
} else {
find_defs(sema, token.clone())
};
let usages = defs
.iter()
.filter_map(|&d| {
@ -456,12 +466,8 @@ fn cover_range(r0: Option<TextRange>, r1: Option<TextRange>) -> Option<TextRange
}
}
fn find_defs(
sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken,
offset: TextSize,
) -> FxHashSet<Definition> {
sema.descend_into_macros(DescendPreference::None, token, offset)
fn find_defs(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> FxHashSet<Definition> {
sema.descend_into_macros(DescendPreference::None, token)
.into_iter()
.filter_map(|token| IdentClass::classify_token(sema, &token))
.map(IdentClass::definitions_no_ops)
@ -1620,6 +1626,23 @@ fn f2<T: Foo>(t: T) {
T::C;
T::f();
}
"#,
);
}
#[test]
fn implicit_format_args() {
check(
r#"
//- minicore: fmt
fn test() {
let a = "foo";
// ^
format_args!("hello {a} {a$0} {}", a);
// ^read
// ^read
// ^read
}
"#,
);
}

View file

@ -150,6 +150,19 @@ fn hover_simple(
});
}
if let Some((range, resolution)) =
sema.check_for_format_args_template(original_token.clone(), offset)
{
let res = hover_for_definition(
sema,
file_id,
Definition::from(resolution?),
&original_token.parent()?,
config,
)?;
return Some(RangeInfo::new(range, res));
}
let in_attr = original_token
.parent_ancestors()
.filter_map(ast::Item::cast)
@ -164,7 +177,6 @@ fn hover_simple(
let descended = sema.descend_into_macros(
if in_attr { DescendPreference::SameKind } else { DescendPreference::SameText },
original_token.clone(),
offset,
);
let descended = || descended.iter();
@ -298,11 +310,11 @@ pub(crate) fn hover_for_definition(
sema: &Semantics<'_, RootDatabase>,
file_id: FileId,
definition: Definition,
node: &SyntaxNode,
scope_node: &SyntaxNode,
config: &HoverConfig,
) -> Option<HoverResult> {
let famous_defs = match &definition {
Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(node)?.krate())),
Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(scope_node)?.krate())),
_ => None,
};
render::definition(sema.db, definition, famous_defs.as_ref(), config).map(|markup| {

View file

@ -6613,3 +6613,63 @@ fn test() {
"#]],
);
}
#[test]
fn format_args_implicit() {
check(
r#"
//- minicore: fmt
fn test() {
let aaaaa = "foo";
format_args!("{aaaaa$0}");
}
"#,
expect![[r#"
*aaaaa*
```rust
let aaaaa: &str // size = 16 (0x10), align = 8, niches = 1
```
"#]],
);
}
#[test]
fn format_args_implicit2() {
check(
r#"
//- minicore: fmt
fn test() {
let aaaaa = "foo";
format_args!("{$0aaaaa}");
}
"#,
expect![[r#"
*aaaaa*
```rust
let aaaaa: &str // size = 16 (0x10), align = 8, niches = 1
```
"#]],
);
}
#[test]
fn format_args_implicit_raw() {
check(
r#"
//- minicore: fmt
fn test() {
let aaaaa = "foo";
format_args!(r"{$0aaaaa}");
}
"#,
expect![[r#"
*aaaaa*
```rust
let aaaaa: &str // size = 16 (0x10), align = 8, niches = 1
```
"#]],
);
}

View file

@ -99,7 +99,7 @@ pub(crate) fn moniker(
});
}
let navs = sema
.descend_into_macros(DescendPreference::None, original_token.clone(), offset)
.descend_into_macros(DescendPreference::None, original_token.clone())
.into_iter()
.filter_map(|token| {
IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops).map(|it| {

View file

@ -109,7 +109,7 @@ pub(crate) fn find_all_refs(
}
None => {
let search = make_searcher(false);
Some(find_defs(sema, &syntax, position.offset)?.map(search).collect())
Some(find_defs(sema, &syntax, position.offset)?.into_iter().map(search).collect())
}
}
}
@ -118,15 +118,27 @@ pub(crate) fn find_defs<'a>(
sema: &'a Semantics<'_, RootDatabase>,
syntax: &SyntaxNode,
offset: TextSize,
) -> Option<impl Iterator<Item = Definition> + 'a> {
) -> Option<impl IntoIterator<Item = Definition> + 'a> {
let token = syntax.token_at_offset(offset).find(|t| {
matches!(
t.kind(),
IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self]
IDENT
| INT_NUMBER
| LIFETIME_IDENT
| STRING
| T![self]
| T![super]
| T![crate]
| T![Self]
)
});
token.map(|token| {
sema.descend_into_macros(DescendPreference::SameText, token, offset)
})?;
if let Some((_, resolution)) = sema.check_for_format_args_template(token.clone(), offset) {
return resolution.map(Definition::from).map(|it| vec![it]);
}
Some(
sema.descend_into_macros(DescendPreference::SameText, token)
.into_iter()
.filter_map(|it| ast::NameLike::cast(it.parent()?))
.filter_map(move |name_like| {
@ -162,7 +174,8 @@ pub(crate) fn find_defs<'a>(
};
Some(def)
})
})
.collect(),
)
}
pub(crate) fn decl_mutability(def: &Definition, syntax: &SyntaxNode, range: TextRange) -> bool {
@ -2092,4 +2105,27 @@ fn main() { r#fn(); }
"#]],
);
}
#[test]
fn implicit_format_args() {
check(
r#"
//- minicore: fmt
fn test() {
let a = "foo";
format_args!("hello {a} {a$0} {}", a);
// ^
// ^
// ^
}
"#,
expect![[r#"
a Local FileId(0) 20..21 20..21
FileId(0) 56..57 Read
FileId(0) 60..61 Read
FileId(0) 68..69 Read
"#]],
);
}
}

View file

@ -6,14 +6,16 @@
use hir::{AsAssocItem, HirFileIdExt, InFile, Semantics};
use ide_db::{
base_db::FileId,
base_db::{FileId, FileRange},
defs::{Definition, NameClass, NameRefClass},
rename::{bail, format_err, source_edit_from_references, IdentifierKind},
RootDatabase,
};
use itertools::Itertools;
use stdx::{always, never};
use syntax::{ast, utils::is_raw_identifier, AstNode, SmolStr, SyntaxNode, TextRange, TextSize};
use syntax::{
ast, utils::is_raw_identifier, AstNode, SmolStr, SyntaxKind, SyntaxNode, TextRange, TextSize,
};
use text_edit::TextEdit;
@ -34,23 +36,20 @@ pub(crate) fn prepare_rename(
let syntax = source_file.syntax();
let res = find_definitions(&sema, syntax, position)?
.map(|(name_like, def)| {
.map(|(frange, kind, def)| {
// ensure all ranges are valid
if def.range_for_rename(&sema).is_none() {
bail!("No references found at position")
}
let Some(frange) = sema.original_range_opt(name_like.syntax()) else {
bail!("No references found at position");
};
always!(
frange.range.contains_inclusive(position.offset)
&& frange.file_id == position.file_id
);
Ok(match name_like {
ast::NameLike::Lifetime(_) => {
Ok(match kind {
SyntaxKind::LIFETIME => {
TextRange::new(frange.range.start() + TextSize::from(1), frange.range.end())
}
_ => frange.range,
@ -93,7 +92,7 @@ pub(crate) fn rename(
let defs = find_definitions(&sema, syntax, position)?;
let ops: RenameResult<Vec<SourceChange>> = defs
.map(|(_namelike, def)| {
.map(|(.., def)| {
if let Definition::Local(local) = def {
if let Some(self_param) = local.as_self_param(sema.db) {
cov_mark::hit!(rename_self_to_param);
@ -134,11 +133,27 @@ pub(crate) fn will_rename_file(
fn find_definitions(
sema: &Semantics<'_, RootDatabase>,
syntax: &SyntaxNode,
position: FilePosition,
) -> RenameResult<impl Iterator<Item = (ast::NameLike, Definition)>> {
let symbols = sema
.find_nodes_at_offset_with_descend::<ast::NameLike>(syntax, position.offset)
.map(|name_like| {
FilePosition { file_id, offset }: FilePosition,
) -> RenameResult<impl Iterator<Item = (FileRange, SyntaxKind, Definition)>> {
let token = syntax.token_at_offset(offset).find(|t| matches!(t.kind(), SyntaxKind::STRING));
if let Some((range, Some(resolution))) =
token.and_then(|token| sema.check_for_format_args_template(token, offset))
{
return Ok(vec![(
FileRange { file_id, range },
SyntaxKind::STRING,
Definition::from(resolution),
)]
.into_iter());
}
let symbols =
sema.find_nodes_at_offset_with_descend::<ast::NameLike>(syntax, offset).map(|name_like| {
let kind = name_like.syntax().kind();
let range = sema
.original_range_opt(name_like.syntax())
.ok_or_else(|| format_err!("No references found at position"))?;
let res = match &name_like {
// renaming aliases would rename the item being aliased as the HIR doesn't track aliases yet
ast::NameLike::Name(name)
@ -163,7 +178,6 @@ fn find_definitions(
Definition::Local(local_def)
}
})
.map(|def| (name_like.clone(), def))
.ok_or_else(|| format_err!("No references found at position")),
ast::NameLike::NameRef(name_ref) => {
NameRefClass::classify(sema, name_ref)
@ -187,7 +201,7 @@ fn find_definitions(
{
Err(format_err!("Renaming aliases is currently unsupported"))
} else {
Ok((name_like.clone(), def))
Ok(def)
}
})
}
@ -203,11 +217,10 @@ fn find_definitions(
_ => None,
})
})
.map(|def| (name_like, def))
.ok_or_else(|| format_err!("No references found at position"))
}
};
res
res.map(|def| (range, kind, def))
});
let res: RenameResult<Vec<_>> = symbols.collect();
@ -218,7 +231,7 @@ fn find_definitions(
Err(format_err!("No references found at position"))
} else {
// remove duplicates, comparing `Definition`s
Ok(v.into_iter().unique_by(|t| t.1))
Ok(v.into_iter().unique_by(|&(.., def)| def).collect::<Vec<_>>().into_iter())
}
}
Err(e) => Err(e),
@ -2663,4 +2676,44 @@ struct A;
"error: Cannot rename a non-local definition.",
)
}
#[test]
fn implicit_format_args() {
check(
"fbar",
r#"
//- minicore: fmt
fn test() {
let foo = "foo";
format_args!("hello {foo} {foo$0} {}", foo);
}
"#,
r#"
fn test() {
let fbar = "foo";
format_args!("hello {fbar} {fbar} {}", fbar);
}
"#,
);
}
#[test]
fn implicit_format_args2() {
check(
"fo",
r#"
//- minicore: fmt
fn test() {
let foo = "foo";
format_args!("hello {foo} {foo$0} {}", foo);
}
"#,
r#"
fn test() {
let fo = "foo";
format_args!("hello {fo} {fo} {}", fo);
}
"#,
);
}
}

View file

@ -9,7 +9,7 @@ use ide_db::{
defs::Definition,
documentation::docs_from_attrs,
helpers::visit_file_defs,
search::SearchScope,
search::{FileReferenceNode, SearchScope},
FxHashMap, FxHashSet, RootDatabase, SymbolKind,
};
use itertools::Itertools;
@ -240,7 +240,7 @@ fn find_related_tests(
.flatten();
for ref_ in defs {
let name_ref = match ref_.name {
ast::NameLike::NameRef(name_ref) => name_ref,
FileReferenceNode::NameRef(name_ref) => name_ref,
_ => continue,
};
if let Some(fn_def) =

View file

@ -82,7 +82,7 @@ pub(crate) fn signature_help(
// if the cursor is sandwiched between two space tokens and the call is unclosed
// this prevents us from leaving the CallExpression
.and_then(|tok| algo::skip_trivia_token(tok, Direction::Prev))?;
let token = sema.descend_into_macros_single(DescendPreference::None, token, offset);
let token = sema.descend_into_macros_single(DescendPreference::None, token);
for node in token.parent_ancestors() {
match_ast! {

View file

@ -399,7 +399,6 @@ fn traverse(
Some(AttrOrDerive::Derive(_)) | None => DescendPreference::None,
},
token,
0.into(),
);
match token.parent().and_then(ast::NameLike::cast) {
// Remap the token into the wrapping single token nodes

View file

@ -35,6 +35,7 @@ impl<S: Span> SpanMap<S> {
///
/// Note this does a linear search through the entire backing vector.
pub fn ranges_with_span(&self, span: S) -> impl Iterator<Item = TextRange> + '_ {
// FIXME: This should ignore the syntax context!
self.spans.iter().enumerate().filter_map(move |(idx, &(end, s))| {
if s != span {
return None;

View file

@ -121,6 +121,7 @@ impl ast::Whitespace {
}
}
#[derive(Debug)]
pub struct QuoteOffsets {
pub quotes: (TextRange, TextRange),
pub contents: TextRange,
@ -167,6 +168,11 @@ pub trait IsString: AstToken {
fn text_range_between_quotes(&self) -> Option<TextRange> {
self.quote_offsets().map(|it| it.contents)
}
fn text_without_quotes(&self) -> &str {
let text = self.text();
let Some(offsets) = self.text_range_between_quotes() else { return text };
&text[offsets - self.syntax().text_range().start()]
}
fn open_quote_text_range(&self) -> Option<TextRange> {
self.quote_offsets().map(|it| it.quotes.0)
}

View file

@ -13,7 +13,7 @@ pub(crate) enum Repr<'a> {
}
impl<'a> TokenText<'a> {
pub(crate) fn borrowed(text: &'a str) -> Self {
pub fn borrowed(text: &'a str) -> Self {
TokenText(Repr::Borrowed(text))
}