mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-27 13:33:31 +00:00
Option begone part 2
This commit is contained in:
parent
96a774261f
commit
a2a3fecae3
31 changed files with 113 additions and 152 deletions
|
@ -498,7 +498,7 @@ impl AttrsWithOwner {
|
||||||
AttrDefId::FieldId(id) => {
|
AttrDefId::FieldId(id) => {
|
||||||
let map = db.fields_attrs_source_map(id.parent);
|
let map = db.fields_attrs_source_map(id.parent);
|
||||||
let file_id = id.parent.file_id(db);
|
let file_id = id.parent.file_id(db);
|
||||||
let root = db.parse_or_expand(file_id).unwrap();
|
let root = db.parse_or_expand(file_id);
|
||||||
let owner = match &map[id.local_id] {
|
let owner = match &map[id.local_id] {
|
||||||
Either::Left(it) => ast::AnyHasAttrs::new(it.to_node(&root)),
|
Either::Left(it) => ast::AnyHasAttrs::new(it.to_node(&root)),
|
||||||
Either::Right(it) => ast::AnyHasAttrs::new(it.to_node(&root)),
|
Either::Right(it) => ast::AnyHasAttrs::new(it.to_node(&root)),
|
||||||
|
@ -514,7 +514,7 @@ impl AttrsWithOwner {
|
||||||
AttrDefId::EnumVariantId(id) => {
|
AttrDefId::EnumVariantId(id) => {
|
||||||
let map = db.variants_attrs_source_map(id.parent);
|
let map = db.variants_attrs_source_map(id.parent);
|
||||||
let file_id = id.parent.lookup(db).id.file_id();
|
let file_id = id.parent.lookup(db).id.file_id();
|
||||||
let root = db.parse_or_expand(file_id).unwrap();
|
let root = db.parse_or_expand(file_id);
|
||||||
InFile::new(file_id, ast::AnyHasAttrs::new(map[id.local_id].to_node(&root)))
|
InFile::new(file_id, ast::AnyHasAttrs::new(map[id.local_id].to_node(&root)))
|
||||||
}
|
}
|
||||||
AttrDefId::StaticId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
|
AttrDefId::StaticId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
|
||||||
|
|
|
@ -174,30 +174,12 @@ impl Expander {
|
||||||
fn enter_expand_inner(
|
fn enter_expand_inner(
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
call_id: MacroCallId,
|
call_id: MacroCallId,
|
||||||
mut error: Option<ExpandError>,
|
error: Option<ExpandError>,
|
||||||
) -> ExpandResult<Option<InFile<Parse<SyntaxNode>>>> {
|
) -> ExpandResult<Option<InFile<Parse<SyntaxNode>>>> {
|
||||||
let file_id = call_id.as_file();
|
let file_id = call_id.as_file();
|
||||||
let ExpandResult { value, err } = db.parse_or_expand_with_err(file_id);
|
let ExpandResult { value, err } = db.parse_or_expand_with_err(file_id);
|
||||||
|
|
||||||
if error.is_none() {
|
ExpandResult { value: Some(InFile::new(file_id, value)), err: error.or(err) }
|
||||||
error = err;
|
|
||||||
}
|
|
||||||
|
|
||||||
let parse = match value {
|
|
||||||
Some(it) => it,
|
|
||||||
None => {
|
|
||||||
// Only `None` if the macro expansion produced no usable AST.
|
|
||||||
if error.is_none() {
|
|
||||||
tracing::warn!("no error despite `parse_or_expand` failing");
|
|
||||||
}
|
|
||||||
|
|
||||||
return ExpandResult::only_err(error.unwrap_or_else(|| {
|
|
||||||
ExpandError::Other("failed to parse macro invocation".into())
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
ExpandResult { value: Some(InFile::new(file_id, parse)), err: error }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) {
|
pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) {
|
||||||
|
|
|
@ -640,22 +640,20 @@ impl<'a> AssocItemCollector<'a> {
|
||||||
AssocItem::MacroCall(call) => {
|
AssocItem::MacroCall(call) => {
|
||||||
let file_id = self.expander.current_file_id();
|
let file_id = self.expander.current_file_id();
|
||||||
let root = self.db.parse_or_expand(file_id);
|
let root = self.db.parse_or_expand(file_id);
|
||||||
if let Some(root) = root {
|
let call = &item_tree[call];
|
||||||
let call = &item_tree[call];
|
|
||||||
|
|
||||||
let ast_id_map = self.db.ast_id_map(file_id);
|
let ast_id_map = self.db.ast_id_map(file_id);
|
||||||
let macro_call = ast_id_map.get(call.ast_id).to_node(&root);
|
let macro_call = ast_id_map.get(call.ast_id).to_node(&root);
|
||||||
let _cx = stdx::panic_context::enter(format!(
|
let _cx = stdx::panic_context::enter(format!(
|
||||||
"collect_items MacroCall: {macro_call}"
|
"collect_items MacroCall: {macro_call}"
|
||||||
));
|
));
|
||||||
if let Ok(res) =
|
if let Ok(res) =
|
||||||
self.expander.enter_expand::<ast::MacroItems>(self.db, macro_call)
|
self.expander.enter_expand::<ast::MacroItems>(self.db, macro_call)
|
||||||
{
|
{
|
||||||
self.collect_macro_items(res, &|| hir_expand::MacroCallKind::FnLike {
|
self.collect_macro_items(res, &|| hir_expand::MacroCallKind::FnLike {
|
||||||
ast_id: InFile::new(file_id, call.ast_id),
|
ast_id: InFile::new(file_id, call.ast_id),
|
||||||
expand_to: hir_expand::ExpandTo::Items,
|
expand_to: hir_expand::ExpandTo::Items,
|
||||||
});
|
});
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -108,10 +108,7 @@ pub struct ItemTree {
|
||||||
impl ItemTree {
|
impl ItemTree {
|
||||||
pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
|
pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
|
||||||
let _p = profile::span("file_item_tree_query").detail(|| format!("{file_id:?}"));
|
let _p = profile::span("file_item_tree_query").detail(|| format!("{file_id:?}"));
|
||||||
let syntax = match db.parse_or_expand(file_id) {
|
let syntax = db.parse_or_expand(file_id);
|
||||||
Some(node) => node,
|
|
||||||
None => return Default::default(),
|
|
||||||
};
|
|
||||||
if never!(syntax.kind() == SyntaxKind::ERROR, "{:?} from {:?} {}", file_id, syntax, syntax)
|
if never!(syntax.kind() == SyntaxKind::ERROR, "{:?} from {:?} {}", file_id, syntax, syntax)
|
||||||
{
|
{
|
||||||
// FIXME: not 100% sure why these crop up, but return an empty tree to avoid a panic
|
// FIXME: not 100% sure why these crop up, but return an empty tree to avoid a panic
|
||||||
|
|
|
@ -20,7 +20,7 @@ impl<N: ItemTreeNode> HasSource for AssocItemLoc<N> {
|
||||||
fn source(&self, db: &dyn DefDatabase) -> InFile<N::Source> {
|
fn source(&self, db: &dyn DefDatabase) -> InFile<N::Source> {
|
||||||
let tree = self.id.item_tree(db);
|
let tree = self.id.item_tree(db);
|
||||||
let ast_id_map = db.ast_id_map(self.id.file_id());
|
let ast_id_map = db.ast_id_map(self.id.file_id());
|
||||||
let root = db.parse_or_expand(self.id.file_id()).unwrap();
|
let root = db.parse_or_expand(self.id.file_id());
|
||||||
let node = &tree[self.id.value];
|
let node = &tree[self.id.value];
|
||||||
|
|
||||||
InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root))
|
InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root))
|
||||||
|
@ -33,7 +33,7 @@ impl<N: ItemTreeNode> HasSource for ItemLoc<N> {
|
||||||
fn source(&self, db: &dyn DefDatabase) -> InFile<N::Source> {
|
fn source(&self, db: &dyn DefDatabase) -> InFile<N::Source> {
|
||||||
let tree = self.id.item_tree(db);
|
let tree = self.id.item_tree(db);
|
||||||
let ast_id_map = db.ast_id_map(self.id.file_id());
|
let ast_id_map = db.ast_id_map(self.id.file_id());
|
||||||
let root = db.parse_or_expand(self.id.file_id()).unwrap();
|
let root = db.parse_or_expand(self.id.file_id());
|
||||||
let node = &tree[self.id.value];
|
let node = &tree[self.id.value];
|
||||||
|
|
||||||
InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root))
|
InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root))
|
||||||
|
@ -46,7 +46,7 @@ impl HasSource for Macro2Loc {
|
||||||
fn source(&self, db: &dyn DefDatabase) -> InFile<Self::Value> {
|
fn source(&self, db: &dyn DefDatabase) -> InFile<Self::Value> {
|
||||||
let tree = self.id.item_tree(db);
|
let tree = self.id.item_tree(db);
|
||||||
let ast_id_map = db.ast_id_map(self.id.file_id());
|
let ast_id_map = db.ast_id_map(self.id.file_id());
|
||||||
let root = db.parse_or_expand(self.id.file_id()).unwrap();
|
let root = db.parse_or_expand(self.id.file_id());
|
||||||
let node = &tree[self.id.value];
|
let node = &tree[self.id.value];
|
||||||
|
|
||||||
InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root))
|
InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root))
|
||||||
|
@ -59,7 +59,7 @@ impl HasSource for MacroRulesLoc {
|
||||||
fn source(&self, db: &dyn DefDatabase) -> InFile<Self::Value> {
|
fn source(&self, db: &dyn DefDatabase) -> InFile<Self::Value> {
|
||||||
let tree = self.id.item_tree(db);
|
let tree = self.id.item_tree(db);
|
||||||
let ast_id_map = db.ast_id_map(self.id.file_id());
|
let ast_id_map = db.ast_id_map(self.id.file_id());
|
||||||
let root = db.parse_or_expand(self.id.file_id()).unwrap();
|
let root = db.parse_or_expand(self.id.file_id());
|
||||||
let node = &tree[self.id.value];
|
let node = &tree[self.id.value];
|
||||||
|
|
||||||
InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root))
|
InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root))
|
||||||
|
@ -72,7 +72,7 @@ impl HasSource for ProcMacroLoc {
|
||||||
fn source(&self, db: &dyn DefDatabase) -> InFile<Self::Value> {
|
fn source(&self, db: &dyn DefDatabase) -> InFile<Self::Value> {
|
||||||
let tree = self.id.item_tree(db);
|
let tree = self.id.item_tree(db);
|
||||||
let ast_id_map = db.ast_id_map(self.id.file_id());
|
let ast_id_map = db.ast_id_map(self.id.file_id());
|
||||||
let root = db.parse_or_expand(self.id.file_id()).unwrap();
|
let root = db.parse_or_expand(self.id.file_id());
|
||||||
let node = &tree[self.id.value];
|
let node = &tree[self.id.value];
|
||||||
|
|
||||||
InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root))
|
InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root))
|
||||||
|
|
|
@ -198,7 +198,7 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
|
||||||
fn expand_simple_derive(tt: &tt::Subtree, trait_path: tt::Subtree) -> ExpandResult<tt::Subtree> {
|
fn expand_simple_derive(tt: &tt::Subtree, trait_path: tt::Subtree) -> ExpandResult<tt::Subtree> {
|
||||||
let info = match parse_adt(tt) {
|
let info = match parse_adt(tt) {
|
||||||
Ok(info) => info,
|
Ok(info) => info,
|
||||||
Err(e) => return ExpandResult::with_err(tt::Subtree::empty(), e),
|
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
|
||||||
};
|
};
|
||||||
let mut where_block = vec![];
|
let mut where_block = vec![];
|
||||||
let (params, args): (Vec<_>, Vec<_>) = info
|
let (params, args): (Vec<_>, Vec<_>) = info
|
||||||
|
|
|
@ -249,10 +249,7 @@ fn format_args_expand(
|
||||||
let mut args = parse_exprs_with_sep(tt, ',');
|
let mut args = parse_exprs_with_sep(tt, ',');
|
||||||
|
|
||||||
if args.is_empty() {
|
if args.is_empty() {
|
||||||
return ExpandResult::with_err(
|
return ExpandResult::new(tt::Subtree::empty(), mbe::ExpandError::NoMatchingRule.into());
|
||||||
tt::Subtree::empty(),
|
|
||||||
mbe::ExpandError::NoMatchingRule.into(),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
for arg in &mut args {
|
for arg in &mut args {
|
||||||
// Remove `key =`.
|
// Remove `key =`.
|
||||||
|
@ -575,7 +572,7 @@ fn include_expand(
|
||||||
Ok((subtree, map, file_id)) => {
|
Ok((subtree, map, file_id)) => {
|
||||||
ExpandResult::ok(ExpandedEager { subtree, included_file: Some((file_id, map)) })
|
ExpandResult::ok(ExpandedEager { subtree, included_file: Some((file_id, map)) })
|
||||||
}
|
}
|
||||||
Err(e) => ExpandResult::with_err(
|
Err(e) => ExpandResult::new(
|
||||||
ExpandedEager { subtree: tt::Subtree::empty(), included_file: None },
|
ExpandedEager { subtree: tt::Subtree::empty(), included_file: None },
|
||||||
e,
|
e,
|
||||||
),
|
),
|
||||||
|
@ -588,7 +585,7 @@ fn include_bytes_expand(
|
||||||
tt: &tt::Subtree,
|
tt: &tt::Subtree,
|
||||||
) -> ExpandResult<ExpandedEager> {
|
) -> ExpandResult<ExpandedEager> {
|
||||||
if let Err(e) = parse_string(tt) {
|
if let Err(e) = parse_string(tt) {
|
||||||
return ExpandResult::with_err(
|
return ExpandResult::new(
|
||||||
ExpandedEager { subtree: tt::Subtree::empty(), included_file: None },
|
ExpandedEager { subtree: tt::Subtree::empty(), included_file: None },
|
||||||
e,
|
e,
|
||||||
);
|
);
|
||||||
|
@ -613,7 +610,7 @@ fn include_str_expand(
|
||||||
let path = match parse_string(tt) {
|
let path = match parse_string(tt) {
|
||||||
Ok(it) => it,
|
Ok(it) => it,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
return ExpandResult::with_err(
|
return ExpandResult::new(
|
||||||
ExpandedEager { subtree: tt::Subtree::empty(), included_file: None },
|
ExpandedEager { subtree: tt::Subtree::empty(), included_file: None },
|
||||||
e,
|
e,
|
||||||
)
|
)
|
||||||
|
@ -650,7 +647,7 @@ fn env_expand(
|
||||||
let key = match parse_string(tt) {
|
let key = match parse_string(tt) {
|
||||||
Ok(it) => it,
|
Ok(it) => it,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
return ExpandResult::with_err(
|
return ExpandResult::new(
|
||||||
ExpandedEager { subtree: tt::Subtree::empty(), included_file: None },
|
ExpandedEager { subtree: tt::Subtree::empty(), included_file: None },
|
||||||
e,
|
e,
|
||||||
)
|
)
|
||||||
|
@ -686,7 +683,7 @@ fn option_env_expand(
|
||||||
let key = match parse_string(tt) {
|
let key = match parse_string(tt) {
|
||||||
Ok(it) => it,
|
Ok(it) => it,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
return ExpandResult::with_err(
|
return ExpandResult::new(
|
||||||
ExpandedEager { subtree: tt::Subtree::empty(), included_file: None },
|
ExpandedEager { subtree: tt::Subtree::empty(), included_file: None },
|
||||||
e,
|
e,
|
||||||
)
|
)
|
||||||
|
|
|
@ -98,12 +98,9 @@ pub trait ExpandDatabase: SourceDatabase {
|
||||||
/// Main public API -- parses a hir file, not caring whether it's a real
|
/// Main public API -- parses a hir file, not caring whether it's a real
|
||||||
/// file or a macro expansion.
|
/// file or a macro expansion.
|
||||||
#[salsa::transparent]
|
#[salsa::transparent]
|
||||||
fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode>;
|
fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode;
|
||||||
#[salsa::transparent]
|
#[salsa::transparent]
|
||||||
fn parse_or_expand_with_err(
|
fn parse_or_expand_with_err(&self, file_id: HirFileId) -> ExpandResult<Parse<SyntaxNode>>;
|
||||||
&self,
|
|
||||||
file_id: HirFileId,
|
|
||||||
) -> ExpandResult<Option<Parse<SyntaxNode>>>;
|
|
||||||
/// Implementation for the macro case.
|
/// Implementation for the macro case.
|
||||||
fn parse_macro_expansion(
|
fn parse_macro_expansion(
|
||||||
&self,
|
&self,
|
||||||
|
@ -252,27 +249,26 @@ pub fn expand_speculative(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
|
fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
|
||||||
let map = db.parse_or_expand(file_id).map(|it| AstIdMap::from_source(&it)).unwrap_or_default();
|
Arc::new(AstIdMap::from_source(&db.parse_or_expand(file_id)))
|
||||||
Arc::new(map)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> Option<SyntaxNode> {
|
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
|
||||||
Some(match file_id.repr() {
|
match file_id.repr() {
|
||||||
HirFileIdRepr::FileId(file_id) => db.parse(file_id).tree().syntax().clone(),
|
HirFileIdRepr::FileId(file_id) => db.parse(file_id).tree().syntax().clone(),
|
||||||
HirFileIdRepr::MacroFile(macro_file) => {
|
HirFileIdRepr::MacroFile(macro_file) => {
|
||||||
db.parse_macro_expansion(macro_file).value.0.syntax_node()
|
db.parse_macro_expansion(macro_file).value.0.syntax_node()
|
||||||
}
|
}
|
||||||
})
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_or_expand_with_err(
|
fn parse_or_expand_with_err(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
file_id: HirFileId,
|
file_id: HirFileId,
|
||||||
) -> ExpandResult<Option<Parse<SyntaxNode>>> {
|
) -> ExpandResult<Parse<SyntaxNode>> {
|
||||||
match file_id.repr() {
|
match file_id.repr() {
|
||||||
HirFileIdRepr::FileId(file_id) => ExpandResult::ok(Some(db.parse(file_id).to_syntax())),
|
HirFileIdRepr::FileId(file_id) => ExpandResult::ok(db.parse(file_id).to_syntax()),
|
||||||
HirFileIdRepr::MacroFile(macro_file) => {
|
HirFileIdRepr::MacroFile(macro_file) => {
|
||||||
db.parse_macro_expansion(macro_file).map(|it| Some(it.0))
|
db.parse_macro_expansion(macro_file).map(|(it, _)| it)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -111,7 +111,7 @@ fn lazy_expand(
|
||||||
def: &MacroDefId,
|
def: &MacroDefId,
|
||||||
macro_call: InFile<ast::MacroCall>,
|
macro_call: InFile<ast::MacroCall>,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
) -> ExpandResult<Option<InFile<Parse<SyntaxNode>>>> {
|
) -> ExpandResult<InFile<Parse<SyntaxNode>>> {
|
||||||
let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value);
|
let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value);
|
||||||
|
|
||||||
let expand_to = ExpandTo::from_call_site(¯o_call.value);
|
let expand_to = ExpandTo::from_call_site(¯o_call.value);
|
||||||
|
@ -121,8 +121,7 @@ fn lazy_expand(
|
||||||
MacroCallKind::FnLike { ast_id: macro_call.with_value(ast_id), expand_to },
|
MacroCallKind::FnLike { ast_id: macro_call.with_value(ast_id), expand_to },
|
||||||
);
|
);
|
||||||
|
|
||||||
db.parse_or_expand_with_err(id.as_file())
|
db.parse_or_expand_with_err(id.as_file()).map(|parse| InFile::new(id.as_file(), parse))
|
||||||
.map(|parse| parse.map(|parse| InFile::new(id.as_file(), parse)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eager_macro_recur(
|
fn eager_macro_recur(
|
||||||
|
@ -162,8 +161,7 @@ fn eager_macro_recur(
|
||||||
Err(err) => return Err(err),
|
Err(err) => return Err(err),
|
||||||
};
|
};
|
||||||
id.map(|call| {
|
id.map(|call| {
|
||||||
call.and_then(|call| db.parse_or_expand(call.as_file()))
|
call.map(|call| db.parse_or_expand(call.as_file()).clone_for_update())
|
||||||
.map(|it| it.clone_for_update())
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
MacroDefKind::Declarative(_)
|
MacroDefKind::Declarative(_)
|
||||||
|
@ -174,23 +172,18 @@ fn eager_macro_recur(
|
||||||
let ExpandResult { value, err } =
|
let ExpandResult { value, err } =
|
||||||
lazy_expand(db, &def, curr.with_value(child.clone()), krate);
|
lazy_expand(db, &def, curr.with_value(child.clone()), krate);
|
||||||
|
|
||||||
match value {
|
// replace macro inside
|
||||||
Some(val) => {
|
let hygiene = Hygiene::new(db, value.file_id);
|
||||||
// replace macro inside
|
let ExpandResult { value, err: error } = eager_macro_recur(
|
||||||
let hygiene = Hygiene::new(db, val.file_id);
|
db,
|
||||||
let ExpandResult { value, err: error } = eager_macro_recur(
|
&hygiene,
|
||||||
db,
|
// FIXME: We discard parse errors here
|
||||||
&hygiene,
|
value.map(|it| it.syntax_node()),
|
||||||
// FIXME: We discard parse errors here
|
krate,
|
||||||
val.map(|it| it.syntax_node()),
|
macro_resolver,
|
||||||
krate,
|
)?;
|
||||||
macro_resolver,
|
let err = if err.is_none() { error } else { err };
|
||||||
)?;
|
ExpandResult { value, err }
|
||||||
let err = if err.is_none() { error } else { err };
|
|
||||||
ExpandResult { value, err }
|
|
||||||
}
|
|
||||||
None => ExpandResult { value: None, err },
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
if err.is_some() {
|
if err.is_some() {
|
||||||
|
|
|
@ -730,7 +730,7 @@ pub type AstId<N> = InFile<FileAstId<N>>;
|
||||||
|
|
||||||
impl<N: AstNode> AstId<N> {
|
impl<N: AstNode> AstId<N> {
|
||||||
pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N {
|
pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N {
|
||||||
let root = db.parse_or_expand(self.file_id).unwrap();
|
let root = db.parse_or_expand(self.file_id);
|
||||||
db.ast_id_map(self.file_id).get(self.value).to_node(&root)
|
db.ast_id_map(self.file_id).get(self.value).to_node(&root)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -766,7 +766,7 @@ impl<T> InFile<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
|
pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
|
||||||
db.parse_or_expand(self.file_id).expect("source created from invalid file")
|
db.parse_or_expand(self.file_id)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -38,7 +38,7 @@ impl ProcMacroExpander {
|
||||||
Some(Ok(proc_macros)) => proc_macros,
|
Some(Ok(proc_macros)) => proc_macros,
|
||||||
Some(Err(_)) | None => {
|
Some(Err(_)) | None => {
|
||||||
never!("Non-dummy expander even though there are no proc macros");
|
never!("Non-dummy expander even though there are no proc macros");
|
||||||
return ExpandResult::with_err(
|
return ExpandResult::new(
|
||||||
tt::Subtree::empty(),
|
tt::Subtree::empty(),
|
||||||
ExpandError::Other("Internal error".into()),
|
ExpandError::Other("Internal error".into()),
|
||||||
);
|
);
|
||||||
|
@ -52,7 +52,7 @@ impl ProcMacroExpander {
|
||||||
proc_macros.len(),
|
proc_macros.len(),
|
||||||
id.0
|
id.0
|
||||||
);
|
);
|
||||||
return ExpandResult::with_err(
|
return ExpandResult::new(
|
||||||
tt::Subtree::empty(),
|
tt::Subtree::empty(),
|
||||||
ExpandError::Other("Internal error".into()),
|
ExpandError::Other("Internal error".into()),
|
||||||
);
|
);
|
||||||
|
@ -75,17 +75,15 @@ impl ProcMacroExpander {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ProcMacroExpansionError::System(text)
|
ProcMacroExpansionError::System(text)
|
||||||
| ProcMacroExpansionError::Panic(text) => ExpandResult::with_err(
|
| ProcMacroExpansionError::Panic(text) => {
|
||||||
tt::Subtree::empty(),
|
ExpandResult::new(tt::Subtree::empty(), ExpandError::Other(text.into()))
|
||||||
ExpandError::Other(text.into()),
|
}
|
||||||
),
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => ExpandResult::with_err(
|
None => {
|
||||||
tt::Subtree::empty(),
|
ExpandResult::new(tt::Subtree::empty(), ExpandError::UnresolvedProcMacro(def_crate))
|
||||||
ExpandError::UnresolvedProcMacro(def_crate),
|
}
|
||||||
),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -246,7 +246,7 @@ fn expr_node(
|
||||||
) -> Option<InFile<SyntaxNode>> {
|
) -> Option<InFile<SyntaxNode>> {
|
||||||
Some(match body_source_map.expr_syntax(expr) {
|
Some(match body_source_map.expr_syntax(expr) {
|
||||||
Ok(sp) => {
|
Ok(sp) => {
|
||||||
let root = db.parse_or_expand(sp.file_id).unwrap();
|
let root = db.parse_or_expand(sp.file_id);
|
||||||
sp.map(|ptr| ptr.to_node(&root).syntax().clone())
|
sp.map(|ptr| ptr.to_node(&root).syntax().clone())
|
||||||
}
|
}
|
||||||
Err(SyntheticSyntax) => return None,
|
Err(SyntheticSyntax) => return None,
|
||||||
|
@ -260,7 +260,7 @@ fn pat_node(
|
||||||
) -> Option<InFile<SyntaxNode>> {
|
) -> Option<InFile<SyntaxNode>> {
|
||||||
Some(match body_source_map.pat_syntax(pat) {
|
Some(match body_source_map.pat_syntax(pat) {
|
||||||
Ok(sp) => {
|
Ok(sp) => {
|
||||||
let root = db.parse_or_expand(sp.file_id).unwrap();
|
let root = db.parse_or_expand(sp.file_id);
|
||||||
sp.map(|ptr| {
|
sp.map(|ptr| {
|
||||||
ptr.either(
|
ptr.either(
|
||||||
|it| it.to_node(&root).syntax().clone(),
|
|it| it.to_node(&root).syntax().clone(),
|
||||||
|
@ -290,7 +290,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
|
||||||
for (pat, ty) in inference_result.type_of_pat.iter() {
|
for (pat, ty) in inference_result.type_of_pat.iter() {
|
||||||
let syntax_ptr = match body_source_map.pat_syntax(pat) {
|
let syntax_ptr = match body_source_map.pat_syntax(pat) {
|
||||||
Ok(sp) => {
|
Ok(sp) => {
|
||||||
let root = db.parse_or_expand(sp.file_id).unwrap();
|
let root = db.parse_or_expand(sp.file_id);
|
||||||
sp.map(|ptr| {
|
sp.map(|ptr| {
|
||||||
ptr.either(
|
ptr.either(
|
||||||
|it| it.to_node(&root).syntax().clone(),
|
|it| it.to_node(&root).syntax().clone(),
|
||||||
|
@ -309,7 +309,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
|
||||||
for (expr, ty) in inference_result.type_of_expr.iter() {
|
for (expr, ty) in inference_result.type_of_expr.iter() {
|
||||||
let node = match body_source_map.expr_syntax(expr) {
|
let node = match body_source_map.expr_syntax(expr) {
|
||||||
Ok(sp) => {
|
Ok(sp) => {
|
||||||
let root = db.parse_or_expand(sp.file_id).unwrap();
|
let root = db.parse_or_expand(sp.file_id);
|
||||||
sp.map(|ptr| ptr.to_node(&root).syntax().clone())
|
sp.map(|ptr| ptr.to_node(&root).syntax().clone())
|
||||||
}
|
}
|
||||||
Err(SyntheticSyntax) => continue,
|
Err(SyntheticSyntax) => continue,
|
||||||
|
|
|
@ -140,7 +140,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
||||||
self.imp.parse(file_id)
|
self.imp.parse(file_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode> {
|
pub fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode {
|
||||||
self.imp.parse_or_expand(file_id)
|
self.imp.parse_or_expand(file_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -518,23 +518,23 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
tree
|
tree
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode> {
|
fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode {
|
||||||
let node = self.db.parse_or_expand(file_id)?;
|
let node = self.db.parse_or_expand(file_id);
|
||||||
self.cache(node.clone(), file_id);
|
self.cache(node.clone(), file_id);
|
||||||
Some(node)
|
node
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
|
fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
|
||||||
let sa = self.analyze_no_infer(macro_call.syntax())?;
|
let sa = self.analyze_no_infer(macro_call.syntax())?;
|
||||||
let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
|
let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
|
||||||
let node = self.parse_or_expand(file_id)?;
|
let node = self.parse_or_expand(file_id);
|
||||||
Some(node)
|
Some(node)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
|
fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
|
||||||
let src = self.wrap_node_infile(item.clone());
|
let src = self.wrap_node_infile(item.clone());
|
||||||
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src))?;
|
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src))?;
|
||||||
self.parse_or_expand(macro_call_id.as_file())
|
Some(self.parse_or_expand(macro_call_id.as_file()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
|
fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
|
||||||
|
@ -543,7 +543,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
let call_id = self.with_ctx(|ctx| {
|
let call_id = self.with_ctx(|ctx| {
|
||||||
ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it)
|
ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it)
|
||||||
})?;
|
})?;
|
||||||
self.parse_or_expand(call_id.as_file())
|
Some(self.parse_or_expand(call_id.as_file()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> {
|
fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> {
|
||||||
|
@ -566,7 +566,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|call| {
|
.flat_map(|call| {
|
||||||
let file_id = call?.as_file();
|
let file_id = call?.as_file();
|
||||||
let node = self.db.parse_or_expand(file_id)?;
|
let node = self.db.parse_or_expand(file_id);
|
||||||
self.cache(node.clone(), file_id);
|
self.cache(node.clone(), file_id);
|
||||||
Some(node)
|
Some(node)
|
||||||
})
|
})
|
||||||
|
@ -990,7 +990,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange {
|
fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange {
|
||||||
let root = self.parse_or_expand(src.file_id).unwrap();
|
let root = self.parse_or_expand(src.file_id);
|
||||||
let node = src.map(|it| it.to_node(&root));
|
let node = src.map(|it| it.to_node(&root));
|
||||||
node.as_ref().original_file_range(self.db.upcast())
|
node.as_ref().original_file_range(self.db.upcast())
|
||||||
}
|
}
|
||||||
|
|
|
@ -141,7 +141,7 @@ impl SourceAnalyzer {
|
||||||
expr: InFile<ast::MacroCall>,
|
expr: InFile<ast::MacroCall>,
|
||||||
) -> Option<InFile<ast::Expr>> {
|
) -> Option<InFile<ast::Expr>> {
|
||||||
let macro_file = self.body_source_map()?.node_macro_file(expr.as_ref())?;
|
let macro_file = self.body_source_map()?.node_macro_file(expr.as_ref())?;
|
||||||
let expanded = db.parse_or_expand(macro_file)?;
|
let expanded = db.parse_or_expand(macro_file);
|
||||||
let res = if let Some(stmts) = ast::MacroStmts::cast(expanded.clone()) {
|
let res = if let Some(stmts) = ast::MacroStmts::cast(expanded.clone()) {
|
||||||
match stmts.expr()? {
|
match stmts.expr()? {
|
||||||
ast::Expr::MacroExpr(mac) => {
|
ast::Expr::MacroExpr(mac) => {
|
||||||
|
|
|
@ -32,18 +32,18 @@ pub struct DeclarationLocation {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DeclarationLocation {
|
impl DeclarationLocation {
|
||||||
pub fn syntax<DB: HirDatabase>(&self, sema: &Semantics<'_, DB>) -> Option<SyntaxNode> {
|
pub fn syntax<DB: HirDatabase>(&self, sema: &Semantics<'_, DB>) -> SyntaxNode {
|
||||||
let root = sema.parse_or_expand(self.hir_file_id)?;
|
let root = sema.parse_or_expand(self.hir_file_id);
|
||||||
Some(self.ptr.to_node(&root))
|
self.ptr.to_node(&root)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn original_range(&self, db: &dyn HirDatabase) -> Option<FileRange> {
|
pub fn original_range(&self, db: &dyn HirDatabase) -> FileRange {
|
||||||
let node = resolve_node(db, self.hir_file_id, &self.ptr)?;
|
let node = resolve_node(db, self.hir_file_id, &self.ptr);
|
||||||
Some(node.as_ref().original_file_range(db.upcast()))
|
node.as_ref().original_file_range(db.upcast())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn original_name_range(&self, db: &dyn HirDatabase) -> Option<FileRange> {
|
pub fn original_name_range(&self, db: &dyn HirDatabase) -> Option<FileRange> {
|
||||||
let node = resolve_node(db, self.hir_file_id, &self.name_ptr)?;
|
let node = resolve_node(db, self.hir_file_id, &self.name_ptr);
|
||||||
node.as_ref().original_file_range_opt(db.upcast())
|
node.as_ref().original_file_range_opt(db.upcast())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -52,10 +52,10 @@ fn resolve_node(
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
file_id: HirFileId,
|
file_id: HirFileId,
|
||||||
ptr: &SyntaxNodePtr,
|
ptr: &SyntaxNodePtr,
|
||||||
) -> Option<InFile<SyntaxNode>> {
|
) -> InFile<SyntaxNode> {
|
||||||
let root = db.parse_or_expand(file_id)?;
|
let root = db.parse_or_expand(file_id);
|
||||||
let node = ptr.to_node(&root);
|
let node = ptr.to_node(&root);
|
||||||
Some(InFile::new(file_id, node))
|
InFile::new(file_id, node)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)]
|
#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)]
|
||||||
|
|
|
@ -59,7 +59,7 @@ pub(crate) fn replace_derive_with_manual_impl(
|
||||||
// collect the derive paths from the #[derive] expansion
|
// collect the derive paths from the #[derive] expansion
|
||||||
let current_derives = ctx
|
let current_derives = ctx
|
||||||
.sema
|
.sema
|
||||||
.parse_or_expand(hir_file)?
|
.parse_or_expand(hir_file)
|
||||||
.descendants()
|
.descendants()
|
||||||
.filter_map(ast::Attr::cast)
|
.filter_map(ast::Attr::cast)
|
||||||
.filter_map(|attr| attr.path())
|
.filter_map(|attr| attr.path())
|
||||||
|
|
|
@ -136,7 +136,7 @@ fn get_name_definition(
|
||||||
) -> Option<Definition> {
|
) -> Option<Definition> {
|
||||||
let _p = profile::span("get_name_definition");
|
let _p = profile::span("get_name_definition");
|
||||||
|
|
||||||
let candidate_node = import_candidate.loc.syntax(sema)?;
|
let candidate_node = import_candidate.loc.syntax(sema);
|
||||||
let candidate_name_node = if candidate_node.kind() != NAME {
|
let candidate_name_node = if candidate_node.kind() != NAME {
|
||||||
candidate_node.children().find(|it| it.kind() == NAME)?
|
candidate_node.children().find(|it| it.kind() == NAME)?
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -27,7 +27,7 @@ pub(crate) fn incorrect_case(ctx: &DiagnosticsContext<'_>, d: &hir::IncorrectCas
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::IncorrectCase) -> Option<Vec<Assist>> {
|
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::IncorrectCase) -> Option<Vec<Assist>> {
|
||||||
let root = ctx.sema.db.parse_or_expand(d.file)?;
|
let root = ctx.sema.db.parse_or_expand(d.file);
|
||||||
let name_node = d.ident.to_node(&root);
|
let name_node = d.ident.to_node(&root);
|
||||||
let def = NameClass::classify(&ctx.sema, &name_node)?.defined()?;
|
let def = NameClass::classify(&ctx.sema, &name_node)?.defined()?;
|
||||||
|
|
||||||
|
|
|
@ -56,7 +56,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
let root = ctx.sema.db.parse_or_expand(d.file)?;
|
let root = ctx.sema.db.parse_or_expand(d.file);
|
||||||
|
|
||||||
let current_module = match &d.field_list_parent {
|
let current_module = match &d.field_list_parent {
|
||||||
Either::Left(ptr) => ctx.sema.scope(ptr.to_node(&root).syntax()).map(|it| it.module()),
|
Either::Left(ptr) => ctx.sema.scope(ptr.to_node(&root).syntax()).map(|it| it.module()),
|
||||||
|
|
|
@ -24,7 +24,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingUnsafe) -> Option<Vec<Ass
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
let root = ctx.sema.db.parse_or_expand(d.expr.file_id)?;
|
let root = ctx.sema.db.parse_or_expand(d.expr.file_id);
|
||||||
let expr = d.expr.value.to_node(&root);
|
let expr = d.expr.value.to_node(&root);
|
||||||
|
|
||||||
let node_to_add_unsafe_block = pick_best_node_to_add_unsafe_block(&expr)?;
|
let node_to_add_unsafe_block = pick_best_node_to_add_unsafe_block(&expr)?;
|
||||||
|
|
|
@ -21,7 +21,7 @@ pub(crate) fn no_such_field(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Option<Vec<Assist>> {
|
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Option<Vec<Assist>> {
|
||||||
let root = ctx.sema.db.parse_or_expand(d.field.file_id)?;
|
let root = ctx.sema.db.parse_or_expand(d.field.file_id);
|
||||||
missing_record_expr_field_fixes(
|
missing_record_expr_field_fixes(
|
||||||
&ctx.sema,
|
&ctx.sema,
|
||||||
d.field.file_id.original_file(ctx.sema.db),
|
d.field.file_id.original_file(ctx.sema.db),
|
||||||
|
|
|
@ -28,7 +28,7 @@ fn fixes(
|
||||||
ctx: &DiagnosticsContext<'_>,
|
ctx: &DiagnosticsContext<'_>,
|
||||||
d: &hir::ReplaceFilterMapNextWithFindMap,
|
d: &hir::ReplaceFilterMapNextWithFindMap,
|
||||||
) -> Option<Vec<Assist>> {
|
) -> Option<Vec<Assist>> {
|
||||||
let root = ctx.sema.db.parse_or_expand(d.file)?;
|
let root = ctx.sema.db.parse_or_expand(d.file);
|
||||||
let next_expr = d.next_expr.to_node(&root);
|
let next_expr = d.next_expr.to_node(&root);
|
||||||
let next_call = ast::MethodCallExpr::cast(next_expr.syntax().clone())?;
|
let next_call = ast::MethodCallExpr::cast(next_expr.syntax().clone())?;
|
||||||
|
|
||||||
|
|
|
@ -93,7 +93,7 @@ fn add_missing_ok_or_some(
|
||||||
expr_ptr: &InFile<AstPtr<ast::Expr>>,
|
expr_ptr: &InFile<AstPtr<ast::Expr>>,
|
||||||
acc: &mut Vec<Assist>,
|
acc: &mut Vec<Assist>,
|
||||||
) -> Option<()> {
|
) -> Option<()> {
|
||||||
let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id)?;
|
let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id);
|
||||||
let expr = expr_ptr.value.to_node(&root);
|
let expr = expr_ptr.value.to_node(&root);
|
||||||
let expr_range = expr.syntax().text_range();
|
let expr_range = expr.syntax().text_range();
|
||||||
let scope = ctx.sema.scope(expr.syntax())?;
|
let scope = ctx.sema.scope(expr.syntax())?;
|
||||||
|
@ -133,7 +133,7 @@ fn remove_semicolon(
|
||||||
expr_ptr: &InFile<AstPtr<ast::Expr>>,
|
expr_ptr: &InFile<AstPtr<ast::Expr>>,
|
||||||
acc: &mut Vec<Assist>,
|
acc: &mut Vec<Assist>,
|
||||||
) -> Option<()> {
|
) -> Option<()> {
|
||||||
let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id)?;
|
let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id);
|
||||||
let expr = expr_ptr.value.to_node(&root);
|
let expr = expr_ptr.value.to_node(&root);
|
||||||
if !d.actual.is_unit() {
|
if !d.actual.is_unit() {
|
||||||
return None;
|
return None;
|
||||||
|
@ -169,7 +169,7 @@ fn str_ref_to_owned(
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id)?;
|
let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id);
|
||||||
let expr = expr_ptr.value.to_node(&root);
|
let expr = expr_ptr.value.to_node(&root);
|
||||||
let expr_range = expr.syntax().text_range();
|
let expr_range = expr.syntax().text_range();
|
||||||
|
|
||||||
|
|
|
@ -50,7 +50,7 @@ fn method_fix(
|
||||||
ctx: &DiagnosticsContext<'_>,
|
ctx: &DiagnosticsContext<'_>,
|
||||||
expr_ptr: &InFile<AstPtr<ast::Expr>>,
|
expr_ptr: &InFile<AstPtr<ast::Expr>>,
|
||||||
) -> Option<Vec<Assist>> {
|
) -> Option<Vec<Assist>> {
|
||||||
let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id)?;
|
let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id);
|
||||||
let expr = expr_ptr.value.to_node(&root);
|
let expr = expr_ptr.value.to_node(&root);
|
||||||
let FileRange { range, file_id } = ctx.sema.original_range_opt(expr.syntax())?;
|
let FileRange { range, file_id } = ctx.sema.original_range_opt(expr.syntax())?;
|
||||||
Some(vec![Assist {
|
Some(vec![Assist {
|
||||||
|
|
|
@ -53,7 +53,7 @@ fn field_fix(
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let expr_ptr = &d.expr;
|
let expr_ptr = &d.expr;
|
||||||
let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id)?;
|
let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id);
|
||||||
let expr = expr_ptr.value.to_node(&root);
|
let expr = expr_ptr.value.to_node(&root);
|
||||||
let (file_id, range) = match expr {
|
let (file_id, range) = match expr {
|
||||||
ast::Expr::MethodCallExpr(mcall) => {
|
ast::Expr::MethodCallExpr(mcall) => {
|
||||||
|
|
|
@ -31,7 +31,7 @@ pub(crate) fn unresolved_module(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedModule) -> Option<Vec<Assist>> {
|
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedModule) -> Option<Vec<Assist>> {
|
||||||
let root = ctx.sema.db.parse_or_expand(d.decl.file_id)?;
|
let root = ctx.sema.db.parse_or_expand(d.decl.file_id);
|
||||||
let unresolved_module = d.decl.value.to_node(&root);
|
let unresolved_module = d.decl.value.to_node(&root);
|
||||||
Some(
|
Some(
|
||||||
d.candidates
|
d.candidates
|
||||||
|
|
|
@ -201,7 +201,7 @@ impl<'a> DiagnosticsContext<'a> {
|
||||||
let sema = &self.sema;
|
let sema = &self.sema;
|
||||||
(|| {
|
(|| {
|
||||||
let precise_location = precise_location?;
|
let precise_location = precise_location?;
|
||||||
let root = sema.parse_or_expand(node.file_id)?;
|
let root = sema.parse_or_expand(node.file_id);
|
||||||
match root.covering_element(precise_location) {
|
match root.covering_element(precise_location) {
|
||||||
syntax::NodeOrToken::Node(it) => Some(sema.original_range(&it)),
|
syntax::NodeOrToken::Node(it) => Some(sema.original_range(&it)),
|
||||||
syntax::NodeOrToken::Token(it) => {
|
syntax::NodeOrToken::Token(it) => {
|
||||||
|
|
|
@ -160,7 +160,7 @@ impl NavigationTarget {
|
||||||
|
|
||||||
impl TryToNav for FileSymbol {
|
impl TryToNav for FileSymbol {
|
||||||
fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
|
fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
|
||||||
let full_range = self.loc.original_range(db)?;
|
let full_range = self.loc.original_range(db);
|
||||||
let name_range = self.loc.original_name_range(db)?;
|
let name_range = self.loc.original_name_range(db)?;
|
||||||
|
|
||||||
Some(NavigationTarget {
|
Some(NavigationTarget {
|
||||||
|
@ -549,7 +549,7 @@ impl TryToNav for hir::ConstParam {
|
||||||
/// e.g. `struct Name`, `enum Name`, `fn Name`
|
/// e.g. `struct Name`, `enum Name`, `fn Name`
|
||||||
pub(crate) fn description_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> Option<String> {
|
pub(crate) fn description_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> Option<String> {
|
||||||
let sema = Semantics::new(db);
|
let sema = Semantics::new(db);
|
||||||
let node = symbol.loc.syntax(&sema)?;
|
let node = symbol.loc.syntax(&sema);
|
||||||
|
|
||||||
match_ast! {
|
match_ast! {
|
||||||
match node {
|
match node {
|
||||||
|
|
|
@ -45,7 +45,7 @@ pub(crate) fn expand_rules(
|
||||||
transcriber::transcribe(&rule.rhs, &match_.bindings);
|
transcriber::transcribe(&rule.rhs, &match_.bindings);
|
||||||
ExpandResult { value, err: match_.err.or(transcribe_err) }
|
ExpandResult { value, err: match_.err.or(transcribe_err) }
|
||||||
} else {
|
} else {
|
||||||
ExpandResult::with_err(
|
ExpandResult::new(
|
||||||
tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] },
|
tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] },
|
||||||
ExpandError::NoMatchingRule,
|
ExpandError::NoMatchingRule,
|
||||||
)
|
)
|
||||||
|
|
|
@ -324,12 +324,12 @@ pub struct ValueResult<T, E> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T, E> ValueResult<T, E> {
|
impl<T, E> ValueResult<T, E> {
|
||||||
pub fn ok(value: T) -> Self {
|
pub fn new(value: T, err: E) -> Self {
|
||||||
Self { value, err: None }
|
Self { value, err: Some(err) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_err(value: T, err: E) -> Self {
|
pub fn ok(value: T) -> Self {
|
||||||
Self { value, err: Some(err) }
|
Self { value, err: None }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn only_err(err: E) -> Self
|
pub fn only_err(err: E) -> Self
|
||||||
|
|
|
@ -532,7 +532,7 @@ fn location_csv_expr(
|
||||||
Ok(s) => s,
|
Ok(s) => s,
|
||||||
Err(SyntheticSyntax) => return "synthetic,,".to_string(),
|
Err(SyntheticSyntax) => return "synthetic,,".to_string(),
|
||||||
};
|
};
|
||||||
let root = db.parse_or_expand(src.file_id).unwrap();
|
let root = db.parse_or_expand(src.file_id);
|
||||||
let node = src.map(|e| e.to_node(&root).syntax().clone());
|
let node = src.map(|e| e.to_node(&root).syntax().clone());
|
||||||
let original_range = node.as_ref().original_file_range(db);
|
let original_range = node.as_ref().original_file_range(db);
|
||||||
let path = vfs.file_path(original_range.file_id);
|
let path = vfs.file_path(original_range.file_id);
|
||||||
|
@ -554,7 +554,7 @@ fn location_csv_pat(
|
||||||
Ok(s) => s,
|
Ok(s) => s,
|
||||||
Err(SyntheticSyntax) => return "synthetic,,".to_string(),
|
Err(SyntheticSyntax) => return "synthetic,,".to_string(),
|
||||||
};
|
};
|
||||||
let root = db.parse_or_expand(src.file_id).unwrap();
|
let root = db.parse_or_expand(src.file_id);
|
||||||
let node = src.map(|e| {
|
let node = src.map(|e| {
|
||||||
e.either(|it| it.to_node(&root).syntax().clone(), |it| it.to_node(&root).syntax().clone())
|
e.either(|it| it.to_node(&root).syntax().clone(), |it| it.to_node(&root).syntax().clone())
|
||||||
});
|
});
|
||||||
|
@ -576,7 +576,7 @@ fn expr_syntax_range(
|
||||||
) -> Option<(VfsPath, LineCol, LineCol)> {
|
) -> Option<(VfsPath, LineCol, LineCol)> {
|
||||||
let src = sm.expr_syntax(expr_id);
|
let src = sm.expr_syntax(expr_id);
|
||||||
if let Ok(src) = src {
|
if let Ok(src) = src {
|
||||||
let root = db.parse_or_expand(src.file_id).unwrap();
|
let root = db.parse_or_expand(src.file_id);
|
||||||
let node = src.map(|e| e.to_node(&root).syntax().clone());
|
let node = src.map(|e| e.to_node(&root).syntax().clone());
|
||||||
let original_range = node.as_ref().original_file_range(db);
|
let original_range = node.as_ref().original_file_range(db);
|
||||||
let path = vfs.file_path(original_range.file_id);
|
let path = vfs.file_path(original_range.file_id);
|
||||||
|
@ -598,7 +598,7 @@ fn pat_syntax_range(
|
||||||
) -> Option<(VfsPath, LineCol, LineCol)> {
|
) -> Option<(VfsPath, LineCol, LineCol)> {
|
||||||
let src = sm.pat_syntax(pat_id);
|
let src = sm.pat_syntax(pat_id);
|
||||||
if let Ok(src) = src {
|
if let Ok(src) = src {
|
||||||
let root = db.parse_or_expand(src.file_id).unwrap();
|
let root = db.parse_or_expand(src.file_id);
|
||||||
let node = src.map(|e| {
|
let node = src.map(|e| {
|
||||||
e.either(
|
e.either(
|
||||||
|it| it.to_node(&root).syntax().clone(),
|
|it| it.to_node(&root).syntax().clone(),
|
||||||
|
|
Loading…
Reference in a new issue