diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs index 6927a017b1..a00337ae9c 100644 --- a/crates/hir-def/src/attr.rs +++ b/crates/hir-def/src/attr.rs @@ -498,7 +498,7 @@ impl AttrsWithOwner { AttrDefId::FieldId(id) => { let map = db.fields_attrs_source_map(id.parent); let file_id = id.parent.file_id(db); - let root = db.parse_or_expand(file_id).unwrap(); + let root = db.parse_or_expand(file_id); let owner = match &map[id.local_id] { Either::Left(it) => ast::AnyHasAttrs::new(it.to_node(&root)), Either::Right(it) => ast::AnyHasAttrs::new(it.to_node(&root)), @@ -514,7 +514,7 @@ impl AttrsWithOwner { AttrDefId::EnumVariantId(id) => { let map = db.variants_attrs_source_map(id.parent); let file_id = id.parent.lookup(db).id.file_id(); - let root = db.parse_or_expand(file_id).unwrap(); + let root = db.parse_or_expand(file_id); InFile::new(file_id, ast::AnyHasAttrs::new(map[id.local_id].to_node(&root))) } AttrDefId::StaticId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new), diff --git a/crates/hir-def/src/body.rs b/crates/hir-def/src/body.rs index 1d082d5554..cf2227b379 100644 --- a/crates/hir-def/src/body.rs +++ b/crates/hir-def/src/body.rs @@ -174,30 +174,12 @@ impl Expander { fn enter_expand_inner( db: &dyn DefDatabase, call_id: MacroCallId, - mut error: Option, + error: Option, ) -> ExpandResult>>> { let file_id = call_id.as_file(); let ExpandResult { value, err } = db.parse_or_expand_with_err(file_id); - if error.is_none() { - error = err; - } - - let parse = match value { - Some(it) => it, - None => { - // Only `None` if the macro expansion produced no usable AST. - if error.is_none() { - tracing::warn!("no error despite `parse_or_expand` failing"); - } - - return ExpandResult::only_err(error.unwrap_or_else(|| { - ExpandError::Other("failed to parse macro invocation".into()) - })); - } - }; - - ExpandResult { value: Some(InFile::new(file_id, parse)), err: error } + ExpandResult { value: Some(InFile::new(file_id, value)), err: error.or(err) } } pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) { diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs index 668b436e01..3fdd09b004 100644 --- a/crates/hir-def/src/data.rs +++ b/crates/hir-def/src/data.rs @@ -640,22 +640,20 @@ impl<'a> AssocItemCollector<'a> { AssocItem::MacroCall(call) => { let file_id = self.expander.current_file_id(); let root = self.db.parse_or_expand(file_id); - if let Some(root) = root { - let call = &item_tree[call]; + let call = &item_tree[call]; - let ast_id_map = self.db.ast_id_map(file_id); - let macro_call = ast_id_map.get(call.ast_id).to_node(&root); - let _cx = stdx::panic_context::enter(format!( - "collect_items MacroCall: {macro_call}" - )); - if let Ok(res) = - self.expander.enter_expand::(self.db, macro_call) - { - self.collect_macro_items(res, &|| hir_expand::MacroCallKind::FnLike { - ast_id: InFile::new(file_id, call.ast_id), - expand_to: hir_expand::ExpandTo::Items, - }); - } + let ast_id_map = self.db.ast_id_map(file_id); + let macro_call = ast_id_map.get(call.ast_id).to_node(&root); + let _cx = stdx::panic_context::enter(format!( + "collect_items MacroCall: {macro_call}" + )); + if let Ok(res) = + self.expander.enter_expand::(self.db, macro_call) + { + self.collect_macro_items(res, &|| hir_expand::MacroCallKind::FnLike { + ast_id: InFile::new(file_id, call.ast_id), + expand_to: hir_expand::ExpandTo::Items, + }); } } } diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs index 48c1baf308..64e61fe5a4 100644 --- a/crates/hir-def/src/item_tree.rs +++ b/crates/hir-def/src/item_tree.rs @@ -108,10 +108,7 @@ pub struct ItemTree { impl ItemTree { pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc { let _p = profile::span("file_item_tree_query").detail(|| format!("{file_id:?}")); - let syntax = match db.parse_or_expand(file_id) { - Some(node) => node, - None => return Default::default(), - }; + let syntax = db.parse_or_expand(file_id); if never!(syntax.kind() == SyntaxKind::ERROR, "{:?} from {:?} {}", file_id, syntax, syntax) { // FIXME: not 100% sure why these crop up, but return an empty tree to avoid a panic diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs b/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs index e5cb9b9567..e5fb15b6be 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs @@ -33,7 +33,7 @@ m!(&k"); "#, expect![[r#" macro_rules! m { ($i:literal) => {}; } -/* error: Failed to lower macro args to token tree */"#]], +/* error: invalid token tree */"#]], ); } diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs b/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs index b8d2ca687c..ae56934f63 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs @@ -98,7 +98,7 @@ macro_rules! m1 { ($x:ident) => { ($x } } macro_rules! m2 { ($x:ident) => {} } /* error: invalid macro definition: expected subtree */ -/* error: Failed to lower macro args to token tree */ +/* error: invalid token tree */ "#]], ) } diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs index 73a495d89b..552d69ca2e 100644 --- a/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -151,34 +151,33 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream if let Some(err) = exp.err { format_to!(expn_text, "/* error: {} */", err); } - if let Some((parse, token_map)) = exp.value { - if expect_errors { - assert!(!parse.errors().is_empty(), "no parse errors in expansion"); - for e in parse.errors() { - format_to!(expn_text, "/* parse error: {} */\n", e); - } - } else { - assert!( - parse.errors().is_empty(), - "parse errors in expansion: \n{:#?}", - parse.errors() - ); + let (parse, token_map) = exp.value; + if expect_errors { + assert!(!parse.errors().is_empty(), "no parse errors in expansion"); + for e in parse.errors() { + format_to!(expn_text, "/* parse error: {} */\n", e); } - let pp = pretty_print_macro_expansion( - parse.syntax_node(), - show_token_ids.then_some(&*token_map), + } else { + assert!( + parse.errors().is_empty(), + "parse errors in expansion: \n{:#?}", + parse.errors() ); - let indent = IndentLevel::from_node(call.syntax()); - let pp = reindent(indent, pp); - format_to!(expn_text, "{}", pp); + } + let pp = pretty_print_macro_expansion( + parse.syntax_node(), + show_token_ids.then_some(&*token_map), + ); + let indent = IndentLevel::from_node(call.syntax()); + let pp = reindent(indent, pp); + format_to!(expn_text, "{}", pp); - if tree { - let tree = format!("{:#?}", parse.syntax_node()) - .split_inclusive('\n') - .map(|line| format!("// {line}")) - .collect::(); - format_to!(expn_text, "\n{}", tree) - } + if tree { + let tree = format!("{:#?}", parse.syntax_node()) + .split_inclusive('\n') + .map(|line| format!("// {line}")) + .collect::(); + format_to!(expn_text, "\n{}", tree) } let range = call.syntax().text_range(); let range: Range = range.into(); diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 6592c6b902..461b498fa0 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -1371,7 +1371,7 @@ impl DefCollector<'_> { self.def_map.diagnostics.push(diag); } - if let Some(errors) = value { + if let errors @ [_, ..] = &*value { let loc: MacroCallLoc = self.db.lookup_intern_macro_call(macro_call_id); let diag = DefDiagnostic::macro_expansion_parse_error(module_id, loc.kind, &errors); self.def_map.diagnostics.push(diag); diff --git a/crates/hir-def/src/nameres/tests/incremental.rs b/crates/hir-def/src/nameres/tests/incremental.rs index b07462cde0..d2e3575d5e 100644 --- a/crates/hir-def/src/nameres/tests/incremental.rs +++ b/crates/hir-def/src/nameres/tests/incremental.rs @@ -109,7 +109,7 @@ fn typing_inside_a_macro_should_not_invalidate_def_map() { } #[test] -fn typing_inside_a_function_should_not_invalidate_expansions() { +fn typing_inside_a_function_should_not_invalidate_item_expansions() { let (mut db, pos) = TestDB::with_position( r#" //- /lib.rs @@ -161,7 +161,7 @@ m!(Z); let n_recalculated_item_trees = events.iter().filter(|it| it.contains("item_tree")).count(); assert_eq!(n_recalculated_item_trees, 1); let n_reparsed_macros = - events.iter().filter(|it| it.contains("parse_macro_expansion")).count(); + events.iter().filter(|it| it.contains("parse_macro_expansion(")).count(); assert_eq!(n_reparsed_macros, 0); } } diff --git a/crates/hir-def/src/src.rs b/crates/hir-def/src/src.rs index f69356cac8..6047f770d4 100644 --- a/crates/hir-def/src/src.rs +++ b/crates/hir-def/src/src.rs @@ -20,7 +20,7 @@ impl HasSource for AssocItemLoc { fn source(&self, db: &dyn DefDatabase) -> InFile { let tree = self.id.item_tree(db); let ast_id_map = db.ast_id_map(self.id.file_id()); - let root = db.parse_or_expand(self.id.file_id()).unwrap(); + let root = db.parse_or_expand(self.id.file_id()); let node = &tree[self.id.value]; InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root)) @@ -33,7 +33,7 @@ impl HasSource for ItemLoc { fn source(&self, db: &dyn DefDatabase) -> InFile { let tree = self.id.item_tree(db); let ast_id_map = db.ast_id_map(self.id.file_id()); - let root = db.parse_or_expand(self.id.file_id()).unwrap(); + let root = db.parse_or_expand(self.id.file_id()); let node = &tree[self.id.value]; InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root)) @@ -46,7 +46,7 @@ impl HasSource for Macro2Loc { fn source(&self, db: &dyn DefDatabase) -> InFile { let tree = self.id.item_tree(db); let ast_id_map = db.ast_id_map(self.id.file_id()); - let root = db.parse_or_expand(self.id.file_id()).unwrap(); + let root = db.parse_or_expand(self.id.file_id()); let node = &tree[self.id.value]; InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root)) @@ -59,7 +59,7 @@ impl HasSource for MacroRulesLoc { fn source(&self, db: &dyn DefDatabase) -> InFile { let tree = self.id.item_tree(db); let ast_id_map = db.ast_id_map(self.id.file_id()); - let root = db.parse_or_expand(self.id.file_id()).unwrap(); + let root = db.parse_or_expand(self.id.file_id()); let node = &tree[self.id.value]; InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root)) @@ -72,7 +72,7 @@ impl HasSource for ProcMacroLoc { fn source(&self, db: &dyn DefDatabase) -> InFile { let tree = self.id.item_tree(db); let ast_id_map = db.ast_id_map(self.id.file_id()); - let root = db.parse_or_expand(self.id.file_id()).unwrap(); + let root = db.parse_or_expand(self.id.file_id()); let node = &tree[self.id.value]; InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root)) diff --git a/crates/hir-expand/src/builtin_derive_macro.rs b/crates/hir-expand/src/builtin_derive_macro.rs index 7e753663c0..4b9f9704c2 100644 --- a/crates/hir-expand/src/builtin_derive_macro.rs +++ b/crates/hir-expand/src/builtin_derive_macro.rs @@ -198,7 +198,7 @@ fn parse_adt(tt: &tt::Subtree) -> Result { fn expand_simple_derive(tt: &tt::Subtree, trait_path: tt::Subtree) -> ExpandResult { let info = match parse_adt(tt) { Ok(info) => info, - Err(e) => return ExpandResult::with_err(tt::Subtree::empty(), e), + Err(e) => return ExpandResult::new(tt::Subtree::empty(), e), }; let mut where_block = vec![]; let (params, args): (Vec<_>, Vec<_>) = info diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index 08c924d0e9..9a25141d8f 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -249,10 +249,7 @@ fn format_args_expand( let mut args = parse_exprs_with_sep(tt, ','); if args.is_empty() { - return ExpandResult::with_err( - tt::Subtree::empty(), - mbe::ExpandError::NoMatchingRule.into(), - ); + return ExpandResult::new(tt::Subtree::empty(), mbe::ExpandError::NoMatchingRule.into()); } for arg in &mut args { // Remove `key =`. @@ -575,7 +572,7 @@ fn include_expand( Ok((subtree, map, file_id)) => { ExpandResult::ok(ExpandedEager { subtree, included_file: Some((file_id, map)) }) } - Err(e) => ExpandResult::with_err( + Err(e) => ExpandResult::new( ExpandedEager { subtree: tt::Subtree::empty(), included_file: None }, e, ), @@ -588,7 +585,7 @@ fn include_bytes_expand( tt: &tt::Subtree, ) -> ExpandResult { if let Err(e) = parse_string(tt) { - return ExpandResult::with_err( + return ExpandResult::new( ExpandedEager { subtree: tt::Subtree::empty(), included_file: None }, e, ); @@ -613,7 +610,7 @@ fn include_str_expand( let path = match parse_string(tt) { Ok(it) => it, Err(e) => { - return ExpandResult::with_err( + return ExpandResult::new( ExpandedEager { subtree: tt::Subtree::empty(), included_file: None }, e, ) @@ -650,7 +647,7 @@ fn env_expand( let key = match parse_string(tt) { Ok(it) => it, Err(e) => { - return ExpandResult::with_err( + return ExpandResult::new( ExpandedEager { subtree: tt::Subtree::empty(), included_file: None }, e, ) @@ -686,7 +683,7 @@ fn option_env_expand( let key = match parse_string(tt) { Ok(it) => it, Err(e) => { - return ExpandResult::with_err( + return ExpandResult::new( ExpandedEager { subtree: tt::Subtree::empty(), included_file: None }, e, ) diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index afc2be0741..bed04b3a34 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -98,17 +98,14 @@ pub trait ExpandDatabase: SourceDatabase { /// Main public API -- parses a hir file, not caring whether it's a real /// file or a macro expansion. #[salsa::transparent] - fn parse_or_expand(&self, file_id: HirFileId) -> Option; + fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode; #[salsa::transparent] - fn parse_or_expand_with_err( - &self, - file_id: HirFileId, - ) -> ExpandResult>>; + fn parse_or_expand_with_err(&self, file_id: HirFileId) -> ExpandResult>; /// Implementation for the macro case. fn parse_macro_expansion( &self, macro_file: MacroFile, - ) -> ExpandResult, Arc)>>; + ) -> ExpandResult<(Parse, Arc)>; /// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the /// reason why we use salsa at all. @@ -133,7 +130,7 @@ pub trait ExpandDatabase: SourceDatabase { fn macro_def(&self, id: MacroDefId) -> Result, mbe::ParseError>; /// Expand macro call to a token tree. - fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult>>; + fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult>; /// Special case of the previous query for procedural macros. We can't LRU /// proc macros, since they are not deterministic in general, and /// non-determinism breaks salsa in a very, very, very bad way. @edwin0cheng @@ -143,7 +140,7 @@ pub trait ExpandDatabase: SourceDatabase { fn parse_macro_expansion_error( &self, macro_call: MacroCallId, - ) -> ExpandResult>>; + ) -> ExpandResult>; fn hygiene_frame(&self, file_id: HirFileId) -> Arc; } @@ -252,15 +249,14 @@ pub fn expand_speculative( } fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc { - let map = db.parse_or_expand(file_id).map(|it| AstIdMap::from_source(&it)).unwrap_or_default(); - Arc::new(map) + Arc::new(AstIdMap::from_source(&db.parse_or_expand(file_id))) } -fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> Option { +fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode { match file_id.repr() { - HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()), + HirFileIdRepr::FileId(file_id) => db.parse(file_id).tree().syntax().clone(), HirFileIdRepr::MacroFile(macro_file) => { - db.parse_macro_expansion(macro_file).value.map(|(it, _)| it.syntax_node()) + db.parse_macro_expansion(macro_file).value.0.syntax_node() } } } @@ -268,11 +264,11 @@ fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> Option ExpandResult>> { +) -> ExpandResult> { match file_id.repr() { - HirFileIdRepr::FileId(file_id) => ExpandResult::ok(Some(db.parse(file_id).to_syntax())), + HirFileIdRepr::FileId(file_id) => ExpandResult::ok(db.parse(file_id).to_syntax()), HirFileIdRepr::MacroFile(macro_file) => { - db.parse_macro_expansion(macro_file).map(|it| it.map(|(parse, _)| parse)) + db.parse_macro_expansion(macro_file).map(|(it, _)| it) } } } @@ -280,9 +276,9 @@ fn parse_or_expand_with_err( fn parse_macro_expansion( db: &dyn ExpandDatabase, macro_file: MacroFile, -) -> ExpandResult, Arc)>> { +) -> ExpandResult<(Parse, Arc)> { let _p = profile::span("parse_macro_expansion"); - let mbe::ValueResult { value, err } = db.macro_expand(macro_file.macro_call_id); + let mbe::ValueResult { value: tt, err } = db.macro_expand(macro_file.macro_call_id); if let Some(err) = &err { if tracing::enabled!(tracing::Level::DEBUG) { @@ -308,10 +304,6 @@ fn parse_macro_expansion( ); } } - let tt = match value { - Some(tt) => tt, - None => return ExpandResult { value: None, err }, - }; let expand_to = macro_expand_to(db, macro_file.macro_call_id); @@ -320,7 +312,7 @@ fn parse_macro_expansion( let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to); - ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err } + ExpandResult { value: (parse, Arc::new(rev_token_map)), err } } fn macro_arg( @@ -448,29 +440,13 @@ fn macro_def( } } -fn macro_expand( - db: &dyn ExpandDatabase, - id: MacroCallId, - // FIXME: Remove the OPtion if possible -) -> ExpandResult>> { +fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult> { let _p = profile::span("macro_expand"); let loc: MacroCallLoc = db.lookup_intern_macro_call(id); if let Some(eager) = &loc.eager { - return ExpandResult { - value: Some(eager.arg_or_expansion.clone()), - err: eager.error.clone(), - }; + return ExpandResult { value: eager.arg_or_expansion.clone(), err: eager.error.clone() }; } - let macro_arg = match db.macro_arg(id) { - Some(it) => it, - None => { - return ExpandResult::only_err(ExpandError::Other( - "Failed to lower macro args to token tree".into(), - )) - } - }; - let expander = match db.macro_def(loc.def) { Ok(it) => it, // FIXME: This is weird -- we effectively report macro *definition* @@ -478,48 +454,75 @@ fn macro_expand( // be reported at the definition site when we construct a def map. // (Note we do report them also at the definition site in the late diagnostic pass) Err(err) => { - return ExpandResult::only_err(ExpandError::Other( - format!("invalid macro definition: {err}").into(), - )) + return ExpandResult { + value: Arc::new(tt::Subtree { + delimiter: tt::Delimiter::UNSPECIFIED, + token_trees: vec![], + }), + err: Some(ExpandError::Other(format!("invalid macro definition: {err}").into())), + } } }; + let Some(macro_arg) = db.macro_arg(id) else { + return ExpandResult { + value: Arc::new( + tt::Subtree { + delimiter: tt::Delimiter::UNSPECIFIED, + token_trees: Vec::new(), + }, + ), + err: Some(ExpandError::Other( + "invalid token tree" + .into(), + )), + }; + }; let ExpandResult { value: mut tt, err } = expander.expand(db, id, ¯o_arg.0); // Set a hard limit for the expanded tt let count = tt.count(); if TOKEN_LIMIT.check(count).is_err() { - return ExpandResult::only_err(ExpandError::Other( - format!( - "macro invocation exceeds token limit: produced {} tokens, limit is {}", - count, - TOKEN_LIMIT.inner(), - ) - .into(), - )); + return ExpandResult { + value: Arc::new(tt::Subtree { + delimiter: tt::Delimiter::UNSPECIFIED, + token_trees: vec![], + }), + err: Some(ExpandError::Other( + format!( + "macro invocation exceeds token limit: produced {} tokens, limit is {}", + count, + TOKEN_LIMIT.inner(), + ) + .into(), + )), + }; } fixup::reverse_fixups(&mut tt, ¯o_arg.1, ¯o_arg.2); - ExpandResult { value: Some(Arc::new(tt)), err } + ExpandResult { value: Arc::new(tt), err } } fn parse_macro_expansion_error( db: &dyn ExpandDatabase, macro_call_id: MacroCallId, -) -> ExpandResult>> { +) -> ExpandResult> { db.parse_macro_expansion(MacroFile { macro_call_id }) - .map(|it| it.map(|(it, _)| it.errors().to_vec().into_boxed_slice())) + .map(|it| it.0.errors().to_vec().into_boxed_slice()) } fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult { let loc: MacroCallLoc = db.lookup_intern_macro_call(id); - let macro_arg = match db.macro_arg(id) { - Some(it) => it, - None => { - return ExpandResult::with_err( - tt::Subtree::empty(), - ExpandError::Other("No arguments for proc-macro".into()), - ) - } + let Some(macro_arg) = db.macro_arg(id) else { + return ExpandResult { + value: tt::Subtree { + delimiter: tt::Delimiter::UNSPECIFIED, + token_trees: Vec::new(), + }, + err: Some(ExpandError::Other( + "invalid token tree" + .into(), + )), + }; }; let expander = match loc.def.kind { diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs index 84f391316c..b108e92044 100644 --- a/crates/hir-expand/src/eager.rs +++ b/crates/hir-expand/src/eager.rs @@ -111,7 +111,7 @@ fn lazy_expand( def: &MacroDefId, macro_call: InFile, krate: CrateId, -) -> ExpandResult>>> { +) -> ExpandResult>> { let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value); let expand_to = ExpandTo::from_call_site(¯o_call.value); @@ -121,8 +121,7 @@ fn lazy_expand( MacroCallKind::FnLike { ast_id: macro_call.with_value(ast_id), expand_to }, ); - db.parse_or_expand_with_err(id.as_file()) - .map(|parse| parse.map(|parse| InFile::new(id.as_file(), parse))) + db.parse_or_expand_with_err(id.as_file()).map(|parse| InFile::new(id.as_file(), parse)) } fn eager_macro_recur( @@ -162,8 +161,7 @@ fn eager_macro_recur( Err(err) => return Err(err), }; id.map(|call| { - call.and_then(|call| db.parse_or_expand(call.as_file())) - .map(|it| it.clone_for_update()) + call.map(|call| db.parse_or_expand(call.as_file()).clone_for_update()) }) } MacroDefKind::Declarative(_) @@ -174,23 +172,18 @@ fn eager_macro_recur( let ExpandResult { value, err } = lazy_expand(db, &def, curr.with_value(child.clone()), krate); - match value { - Some(val) => { - // replace macro inside - let hygiene = Hygiene::new(db, val.file_id); - let ExpandResult { value, err: error } = eager_macro_recur( - db, - &hygiene, - // FIXME: We discard parse errors here - val.map(|it| it.syntax_node()), - krate, - macro_resolver, - )?; - let err = if err.is_none() { error } else { err }; - ExpandResult { value, err } - } - None => ExpandResult { value: None, err }, - } + // replace macro inside + let hygiene = Hygiene::new(db, value.file_id); + let ExpandResult { value, err: error } = eager_macro_recur( + db, + &hygiene, + // FIXME: We discard parse errors here + value.map(|it| it.syntax_node()), + krate, + macro_resolver, + )?; + let err = if err.is_none() { error } else { err }; + ExpandResult { value, err } } }; if err.is_some() { diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index b273f21768..00796e7c0d 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -14,7 +14,7 @@ use tt::token_id::Subtree; /// The result of calculating fixes for a syntax node -- a bunch of changes /// (appending to and replacing nodes), the information that is needed to /// reverse those changes afterwards, and a token map. -#[derive(Debug)] +#[derive(Debug, Default)] pub(crate) struct SyntaxFixups { pub(crate) append: FxHashMap>, pub(crate) replace: FxHashMap>, @@ -24,7 +24,7 @@ pub(crate) struct SyntaxFixups { } /// This is the information needed to reverse the fixups. -#[derive(Debug, PartialEq, Eq)] +#[derive(Debug, Default, PartialEq, Eq)] pub struct SyntaxFixupUndoInfo { original: Vec, } diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs index 2eb56fc9e8..9af38fed04 100644 --- a/crates/hir-expand/src/hygiene.rs +++ b/crates/hir-expand/src/hygiene.rs @@ -200,8 +200,14 @@ fn make_hygiene_info( }); let macro_def = db.macro_def(loc.def).ok()?; - let (_, exp_map) = db.parse_macro_expansion(macro_file).value?; - let macro_arg = db.macro_arg(macro_file.macro_call_id)?; + let (_, exp_map) = db.parse_macro_expansion(macro_file).value; + let macro_arg = db.macro_arg(macro_file.macro_call_id).unwrap_or_else(|| { + Arc::new(( + tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() }, + Default::default(), + Default::default(), + )) + }); Some(HygieneInfo { file: macro_file, diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 9685320cf5..965b9e850b 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -257,8 +257,14 @@ impl HirFileId { let arg_tt = loc.kind.arg(db)?; let macro_def = db.macro_def(loc.def).ok()?; - let (parse, exp_map) = db.parse_macro_expansion(macro_file).value?; - let macro_arg = db.macro_arg(macro_file.macro_call_id)?; + let (parse, exp_map) = db.parse_macro_expansion(macro_file).value; + let macro_arg = db.macro_arg(macro_file.macro_call_id).unwrap_or_else(|| { + Arc::new(( + tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() }, + Default::default(), + Default::default(), + )) + }); let def = loc.def.ast_id().left().and_then(|id| { let def_tt = match id.to_node(db) { @@ -730,7 +736,7 @@ pub type AstId = InFile>; impl AstId { pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N { - let root = db.parse_or_expand(self.file_id).unwrap(); + let root = db.parse_or_expand(self.file_id); db.ast_id_map(self.file_id).get(self.value).to_node(&root) } } @@ -766,7 +772,7 @@ impl InFile { } pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode { - db.parse_or_expand(self.file_id).expect("source created from invalid file") + db.parse_or_expand(self.file_id) } } diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs index ad98935874..dc04a41155 100644 --- a/crates/hir-expand/src/proc_macro.rs +++ b/crates/hir-expand/src/proc_macro.rs @@ -38,7 +38,7 @@ impl ProcMacroExpander { Some(Ok(proc_macros)) => proc_macros, Some(Err(_)) | None => { never!("Non-dummy expander even though there are no proc macros"); - return ExpandResult::with_err( + return ExpandResult::new( tt::Subtree::empty(), ExpandError::Other("Internal error".into()), ); @@ -52,7 +52,7 @@ impl ProcMacroExpander { proc_macros.len(), id.0 ); - return ExpandResult::with_err( + return ExpandResult::new( tt::Subtree::empty(), ExpandError::Other("Internal error".into()), ); @@ -75,17 +75,15 @@ impl ProcMacroExpander { } } ProcMacroExpansionError::System(text) - | ProcMacroExpansionError::Panic(text) => ExpandResult::with_err( - tt::Subtree::empty(), - ExpandError::Other(text.into()), - ), + | ProcMacroExpansionError::Panic(text) => { + ExpandResult::new(tt::Subtree::empty(), ExpandError::Other(text.into())) + } }, } } - None => ExpandResult::with_err( - tt::Subtree::empty(), - ExpandError::UnresolvedProcMacro(def_crate), - ), + None => { + ExpandResult::new(tt::Subtree::empty(), ExpandError::UnresolvedProcMacro(def_crate)) + } } } } diff --git a/crates/hir-ty/src/tests.rs b/crates/hir-ty/src/tests.rs index 245617ab82..fee03ed0bd 100644 --- a/crates/hir-ty/src/tests.rs +++ b/crates/hir-ty/src/tests.rs @@ -246,7 +246,7 @@ fn expr_node( ) -> Option> { Some(match body_source_map.expr_syntax(expr) { Ok(sp) => { - let root = db.parse_or_expand(sp.file_id).unwrap(); + let root = db.parse_or_expand(sp.file_id); sp.map(|ptr| ptr.to_node(&root).syntax().clone()) } Err(SyntheticSyntax) => return None, @@ -260,7 +260,7 @@ fn pat_node( ) -> Option> { Some(match body_source_map.pat_syntax(pat) { Ok(sp) => { - let root = db.parse_or_expand(sp.file_id).unwrap(); + let root = db.parse_or_expand(sp.file_id); sp.map(|ptr| { ptr.either( |it| it.to_node(&root).syntax().clone(), @@ -290,7 +290,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String { for (pat, ty) in inference_result.type_of_pat.iter() { let syntax_ptr = match body_source_map.pat_syntax(pat) { Ok(sp) => { - let root = db.parse_or_expand(sp.file_id).unwrap(); + let root = db.parse_or_expand(sp.file_id); sp.map(|ptr| { ptr.either( |it| it.to_node(&root).syntax().clone(), @@ -309,7 +309,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String { for (expr, ty) in inference_result.type_of_expr.iter() { let node = match body_source_map.expr_syntax(expr) { Ok(sp) => { - let root = db.parse_or_expand(sp.file_id).unwrap(); + let root = db.parse_or_expand(sp.file_id); sp.map(|ptr| ptr.to_node(&root).syntax().clone()) } Err(SyntheticSyntax) => continue, diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index ffd0b0ad7a..b32efe1cb5 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -140,7 +140,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { self.imp.parse(file_id) } - pub fn parse_or_expand(&self, file_id: HirFileId) -> Option { + pub fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode { self.imp.parse_or_expand(file_id) } @@ -518,23 +518,23 @@ impl<'db> SemanticsImpl<'db> { tree } - fn parse_or_expand(&self, file_id: HirFileId) -> Option { - let node = self.db.parse_or_expand(file_id)?; + fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode { + let node = self.db.parse_or_expand(file_id); self.cache(node.clone(), file_id); - Some(node) + node } fn expand(&self, macro_call: &ast::MacroCall) -> Option { let sa = self.analyze_no_infer(macro_call.syntax())?; let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?; - let node = self.parse_or_expand(file_id)?; + let node = self.parse_or_expand(file_id); Some(node) } fn expand_attr_macro(&self, item: &ast::Item) -> Option { let src = self.wrap_node_infile(item.clone()); let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src))?; - self.parse_or_expand(macro_call_id.as_file()) + Some(self.parse_or_expand(macro_call_id.as_file())) } fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option { @@ -543,7 +543,7 @@ impl<'db> SemanticsImpl<'db> { let call_id = self.with_ctx(|ctx| { ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it) })?; - self.parse_or_expand(call_id.as_file()) + Some(self.parse_or_expand(call_id.as_file())) } fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option>> { @@ -566,7 +566,7 @@ impl<'db> SemanticsImpl<'db> { .into_iter() .flat_map(|call| { let file_id = call?.as_file(); - let node = self.db.parse_or_expand(file_id)?; + let node = self.db.parse_or_expand(file_id); self.cache(node.clone(), file_id); Some(node) }) @@ -990,7 +990,7 @@ impl<'db> SemanticsImpl<'db> { } fn diagnostics_display_range(&self, src: InFile) -> FileRange { - let root = self.parse_or_expand(src.file_id).unwrap(); + let root = self.parse_or_expand(src.file_id); let node = src.map(|it| it.to_node(&root)); node.as_ref().original_file_range(self.db.upcast()) } diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 2a7a0ae4c4..d0bf1c23ac 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -141,7 +141,7 @@ impl SourceAnalyzer { expr: InFile, ) -> Option> { let macro_file = self.body_source_map()?.node_macro_file(expr.as_ref())?; - let expanded = db.parse_or_expand(macro_file)?; + let expanded = db.parse_or_expand(macro_file); let res = if let Some(stmts) = ast::MacroStmts::cast(expanded.clone()) { match stmts.expr()? { ast::Expr::MacroExpr(mac) => { diff --git a/crates/hir/src/symbols.rs b/crates/hir/src/symbols.rs index a9afa1c6f4..3eafd97b99 100644 --- a/crates/hir/src/symbols.rs +++ b/crates/hir/src/symbols.rs @@ -32,18 +32,18 @@ pub struct DeclarationLocation { } impl DeclarationLocation { - pub fn syntax(&self, sema: &Semantics<'_, DB>) -> Option { - let root = sema.parse_or_expand(self.hir_file_id)?; - Some(self.ptr.to_node(&root)) + pub fn syntax(&self, sema: &Semantics<'_, DB>) -> SyntaxNode { + let root = sema.parse_or_expand(self.hir_file_id); + self.ptr.to_node(&root) } - pub fn original_range(&self, db: &dyn HirDatabase) -> Option { - let node = resolve_node(db, self.hir_file_id, &self.ptr)?; - Some(node.as_ref().original_file_range(db.upcast())) + pub fn original_range(&self, db: &dyn HirDatabase) -> FileRange { + let node = resolve_node(db, self.hir_file_id, &self.ptr); + node.as_ref().original_file_range(db.upcast()) } pub fn original_name_range(&self, db: &dyn HirDatabase) -> Option { - let node = resolve_node(db, self.hir_file_id, &self.name_ptr)?; + let node = resolve_node(db, self.hir_file_id, &self.name_ptr); node.as_ref().original_file_range_opt(db.upcast()) } } @@ -52,10 +52,10 @@ fn resolve_node( db: &dyn HirDatabase, file_id: HirFileId, ptr: &SyntaxNodePtr, -) -> Option> { - let root = db.parse_or_expand(file_id)?; +) -> InFile { + let root = db.parse_or_expand(file_id); let node = ptr.to_node(&root); - Some(InFile::new(file_id, node)) + InFile::new(file_id, node) } #[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)] diff --git a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs index 4cfae0c721..36ac8c71d8 100644 --- a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs @@ -59,7 +59,7 @@ pub(crate) fn replace_derive_with_manual_impl( // collect the derive paths from the #[derive] expansion let current_derives = ctx .sema - .parse_or_expand(hir_file)? + .parse_or_expand(hir_file) .descendants() .filter_map(ast::Attr::cast) .filter_map(|attr| attr.path()) diff --git a/crates/ide-db/src/items_locator.rs b/crates/ide-db/src/items_locator.rs index 07a57c883b..74f7e0fe0d 100644 --- a/crates/ide-db/src/items_locator.rs +++ b/crates/ide-db/src/items_locator.rs @@ -136,7 +136,7 @@ fn get_name_definition( ) -> Option { let _p = profile::span("get_name_definition"); - let candidate_node = import_candidate.loc.syntax(sema)?; + let candidate_node = import_candidate.loc.syntax(sema); let candidate_name_node = if candidate_node.kind() != NAME { candidate_node.children().find(|it| it.kind() == NAME)? } else { diff --git a/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/crates/ide-diagnostics/src/handlers/incorrect_case.rs index db88bf7b93..ad2427017d 100644 --- a/crates/ide-diagnostics/src/handlers/incorrect_case.rs +++ b/crates/ide-diagnostics/src/handlers/incorrect_case.rs @@ -27,7 +27,7 @@ pub(crate) fn incorrect_case(ctx: &DiagnosticsContext<'_>, d: &hir::IncorrectCas } fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::IncorrectCase) -> Option> { - let root = ctx.sema.db.parse_or_expand(d.file)?; + let root = ctx.sema.db.parse_or_expand(d.file); let name_node = d.ident.to_node(&root); let def = NameClass::classify(&ctx.sema, &name_node)?.defined()?; diff --git a/crates/ide-diagnostics/src/handlers/missing_fields.rs b/crates/ide-diagnostics/src/handlers/missing_fields.rs index a33a2cd85e..17a2b44ccd 100644 --- a/crates/ide-diagnostics/src/handlers/missing_fields.rs +++ b/crates/ide-diagnostics/src/handlers/missing_fields.rs @@ -56,7 +56,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option ctx.sema.scope(ptr.to_node(&root).syntax()).map(|it| it.module()), diff --git a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs index eb32db2506..d73f4e7721 100644 --- a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs +++ b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs @@ -24,7 +24,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingUnsafe) -> Option, d: &hir::NoSuchField) } fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Option> { - let root = ctx.sema.db.parse_or_expand(d.field.file_id)?; + let root = ctx.sema.db.parse_or_expand(d.field.file_id); missing_record_expr_field_fixes( &ctx.sema, d.field.file_id.original_file(ctx.sema.db), diff --git a/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs b/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs index 738339cfa6..d3eda3c5eb 100644 --- a/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs +++ b/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs @@ -28,7 +28,7 @@ fn fixes( ctx: &DiagnosticsContext<'_>, d: &hir::ReplaceFilterMapNextWithFindMap, ) -> Option> { - let root = ctx.sema.db.parse_or_expand(d.file)?; + let root = ctx.sema.db.parse_or_expand(d.file); let next_expr = d.next_expr.to_node(&root); let next_call = ast::MethodCallExpr::cast(next_expr.syntax().clone())?; diff --git a/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/crates/ide-diagnostics/src/handlers/type_mismatch.rs index c5fa1cb027..488c75b903 100644 --- a/crates/ide-diagnostics/src/handlers/type_mismatch.rs +++ b/crates/ide-diagnostics/src/handlers/type_mismatch.rs @@ -93,7 +93,7 @@ fn add_missing_ok_or_some( expr_ptr: &InFile>, acc: &mut Vec, ) -> Option<()> { - let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id)?; + let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id); let expr = expr_ptr.value.to_node(&root); let expr_range = expr.syntax().text_range(); let scope = ctx.sema.scope(expr.syntax())?; @@ -133,7 +133,7 @@ fn remove_semicolon( expr_ptr: &InFile>, acc: &mut Vec, ) -> Option<()> { - let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id)?; + let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id); let expr = expr_ptr.value.to_node(&root); if !d.actual.is_unit() { return None; @@ -169,7 +169,7 @@ fn str_ref_to_owned( return None; } - let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id)?; + let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id); let expr = expr_ptr.value.to_node(&root); let expr_range = expr.syntax().text_range(); diff --git a/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/crates/ide-diagnostics/src/handlers/unresolved_field.rs index cefa74e523..09bb9f3ebf 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_field.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_field.rs @@ -50,7 +50,7 @@ fn method_fix( ctx: &DiagnosticsContext<'_>, expr_ptr: &InFile>, ) -> Option> { - let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id)?; + let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id); let expr = expr_ptr.value.to_node(&root); let FileRange { range, file_id } = ctx.sema.original_range_opt(expr.syntax())?; Some(vec![Assist { diff --git a/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/crates/ide-diagnostics/src/handlers/unresolved_method.rs index f3ec6efa75..8a0b457857 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_method.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_method.rs @@ -53,7 +53,7 @@ fn field_fix( return None; } let expr_ptr = &d.expr; - let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id)?; + let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id); let expr = expr_ptr.value.to_node(&root); let (file_id, range) = match expr { ast::Expr::MethodCallExpr(mcall) => { diff --git a/crates/ide-diagnostics/src/handlers/unresolved_module.rs b/crates/ide-diagnostics/src/handlers/unresolved_module.rs index 94614f11c3..6e3fd3b42b 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_module.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_module.rs @@ -31,7 +31,7 @@ pub(crate) fn unresolved_module( } fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedModule) -> Option> { - let root = ctx.sema.db.parse_or_expand(d.decl.file_id)?; + let root = ctx.sema.db.parse_or_expand(d.decl.file_id); let unresolved_module = d.decl.value.to_node(&root); Some( d.candidates diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index 7c8cb7a447..25d3568950 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -201,7 +201,7 @@ impl<'a> DiagnosticsContext<'a> { let sema = &self.sema; (|| { let precise_location = precise_location?; - let root = sema.parse_or_expand(node.file_id)?; + let root = sema.parse_or_expand(node.file_id); match root.covering_element(precise_location) { syntax::NodeOrToken::Node(it) => Some(sema.original_range(&it)), syntax::NodeOrToken::Token(it) => { diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs index 6aae82f981..c1a775136f 100644 --- a/crates/ide/src/navigation_target.rs +++ b/crates/ide/src/navigation_target.rs @@ -160,7 +160,7 @@ impl NavigationTarget { impl TryToNav for FileSymbol { fn try_to_nav(&self, db: &RootDatabase) -> Option { - let full_range = self.loc.original_range(db)?; + let full_range = self.loc.original_range(db); let name_range = self.loc.original_name_range(db)?; Some(NavigationTarget { @@ -549,7 +549,7 @@ impl TryToNav for hir::ConstParam { /// e.g. `struct Name`, `enum Name`, `fn Name` pub(crate) fn description_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> Option { let sema = Semantics::new(db); - let node = symbol.loc.syntax(&sema)?; + let node = symbol.loc.syntax(&sema); match_ast! { match node { diff --git a/crates/ide/src/status.rs b/crates/ide/src/status.rs index 7ce782f93b..71fc91cf31 100644 --- a/crates/ide/src/status.rs +++ b/crates/ide/src/status.rs @@ -120,12 +120,12 @@ impl FromIterator>> for SyntaxTreeStat } } -impl FromIterator, M)>>>> +impl FromIterator, M)>>> for SyntaxTreeStats { fn from_iter(iter: T) -> SyntaxTreeStats where - T: IntoIterator, M)>>>>, + T: IntoIterator, M)>>>, { let mut res = SyntaxTreeStats::default(); for entry in iter { diff --git a/crates/mbe/src/expander.rs b/crates/mbe/src/expander.rs index 7537dc3226..6b7f4a22d2 100644 --- a/crates/mbe/src/expander.rs +++ b/crates/mbe/src/expander.rs @@ -45,7 +45,7 @@ pub(crate) fn expand_rules( transcriber::transcribe(&rule.rhs, &match_.bindings); ExpandResult { value, err: match_.err.or(transcribe_err) } } else { - ExpandResult::with_err( + ExpandResult::new( tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] }, ExpandError::NoMatchingRule, ) diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 23ec3235d2..a043e8222d 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -324,12 +324,12 @@ pub struct ValueResult { } impl ValueResult { - pub fn ok(value: T) -> Self { - Self { value, err: None } + pub fn new(value: T, err: E) -> Self { + Self { value, err: Some(err) } } - pub fn with_err(value: T, err: E) -> Self { - Self { value, err: Some(err) } + pub fn ok(value: T) -> Self { + Self { value, err: None } } pub fn only_err(err: E) -> Self diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index bd47777571..cdf40777ba 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -180,9 +180,8 @@ impl flags::AnalysisStats { let mut total_macro_file_size = Bytes::default(); for e in hir::db::ParseMacroExpansionQuery.in_db(db).entries::>() { - if let Some((val, _)) = db.parse_macro_expansion(e.key).value { - total_macro_file_size += syntax_len(val.syntax_node()) - } + let val = db.parse_macro_expansion(e.key).value.0; + total_macro_file_size += syntax_len(val.syntax_node()) } eprintln!("source files: {total_file_size}, macro files: {total_macro_file_size}"); } @@ -533,7 +532,7 @@ fn location_csv_expr( Ok(s) => s, Err(SyntheticSyntax) => return "synthetic,,".to_string(), }; - let root = db.parse_or_expand(src.file_id).unwrap(); + let root = db.parse_or_expand(src.file_id); let node = src.map(|e| e.to_node(&root).syntax().clone()); let original_range = node.as_ref().original_file_range(db); let path = vfs.file_path(original_range.file_id); @@ -555,7 +554,7 @@ fn location_csv_pat( Ok(s) => s, Err(SyntheticSyntax) => return "synthetic,,".to_string(), }; - let root = db.parse_or_expand(src.file_id).unwrap(); + let root = db.parse_or_expand(src.file_id); let node = src.map(|e| { e.either(|it| it.to_node(&root).syntax().clone(), |it| it.to_node(&root).syntax().clone()) }); @@ -577,7 +576,7 @@ fn expr_syntax_range( ) -> Option<(VfsPath, LineCol, LineCol)> { let src = sm.expr_syntax(expr_id); if let Ok(src) = src { - let root = db.parse_or_expand(src.file_id).unwrap(); + let root = db.parse_or_expand(src.file_id); let node = src.map(|e| e.to_node(&root).syntax().clone()); let original_range = node.as_ref().original_file_range(db); let path = vfs.file_path(original_range.file_id); @@ -599,7 +598,7 @@ fn pat_syntax_range( ) -> Option<(VfsPath, LineCol, LineCol)> { let src = sm.pat_syntax(pat_id); if let Ok(src) = src { - let root = db.parse_or_expand(src.file_id).unwrap(); + let root = db.parse_or_expand(src.file_id); let node = src.map(|e| { e.either( |it| it.to_node(&root).syntax().clone(),