This commit is contained in:
Lukas Wirth 2021-09-19 23:34:07 +02:00
parent 3987bf5d6f
commit 9c39363ada
9 changed files with 24 additions and 33 deletions

View file

@ -218,7 +218,7 @@ impl ChangeFixture {
); );
roots.push(SourceRoot::new_library(fs)); roots.push(SourceRoot::new_library(fs));
change.change_file(proc_lib_file, Some(Arc::new(String::from(source)))); change.change_file(proc_lib_file, Some(Arc::new(source)));
let all_crates = crate_graph.crates_in_topological_order(); let all_crates = crate_graph.crates_in_topological_order();
@ -275,7 +275,7 @@ pub fn input_replace(attr: TokenStream, _item: TokenStream) -> TokenStream {
expander: Arc::new(AttributeInputReplaceProcMacroExpander), expander: Arc::new(AttributeInputReplaceProcMacroExpander),
}, },
]) ])
.filter(|pm| proc_macros.iter().any(|name| name == &pm.name)) .filter(|pm| proc_macros.iter().any(|name| name == pm.name))
.collect(); .collect();
(proc_macros, source.into()) (proc_macros, source.into())
} }

View file

@ -307,17 +307,12 @@ impl<'a> TyLoweringContext<'a> {
let mut expander = self.expander.borrow_mut(); let mut expander = self.expander.borrow_mut();
if expander.is_some() { if expander.is_some() {
(Some(expander), false) (Some(expander), false)
} else if let Some(module_id) = self.resolver.module() {
*expander =
Some(Expander::new(self.db.upcast(), macro_call.file_id, module_id));
(Some(expander), true)
} else { } else {
if let Some(module_id) = self.resolver.module() { (None, false)
*expander = Some(Expander::new(
self.db.upcast(),
macro_call.file_id,
module_id,
));
(Some(expander), true)
} else {
(None, false)
}
} }
}; };
let ty = if let Some(mut expander) = expander { let ty = if let Some(mut expander) = expander {

View file

@ -172,7 +172,7 @@ impl<'db> MatchFinder<'db> {
for m in self.matches().matches { for m in self.matches().matches {
matches_by_file matches_by_file
.entry(m.range.file_id) .entry(m.range.file_id)
.or_insert_with(|| SsrMatches::default()) .or_insert_with(SsrMatches::default)
.matches .matches
.push(m); .push(m);
} }
@ -331,7 +331,7 @@ impl SsrMatches {
fn flatten_into(self, out: &mut SsrMatches) { fn flatten_into(self, out: &mut SsrMatches) {
for mut m in self.matches { for mut m in self.matches {
for p in m.placeholder_values.values_mut() { for p in m.placeholder_values.values_mut() {
std::mem::replace(&mut p.inner_matches, SsrMatches::default()).flatten_into(out); std::mem::take(&mut p.inner_matches).flatten_into(out);
} }
out.matches.push(m); out.matches.push(m);
} }

View file

@ -462,7 +462,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
let mut last_matched_token = child; let mut last_matched_token = child;
// Read code tokens util we reach one equal to the next token from our pattern // Read code tokens util we reach one equal to the next token from our pattern
// or we reach the end of the token tree. // or we reach the end of the token tree.
while let Some(next) = children.next() { for next in &mut children {
match &next { match &next {
SyntaxElement::Token(t) => { SyntaxElement::Token(t) => {
if Some(t.to_string()) == next_pattern_token { if Some(t.to_string()) == next_pattern_token {
@ -763,7 +763,7 @@ impl Iterator for PatternIterator {
type Item = SyntaxElement; type Item = SyntaxElement;
fn next(&mut self) -> Option<SyntaxElement> { fn next(&mut self) -> Option<SyntaxElement> {
while let Some(element) = self.iter.next() { for element in &mut self.iter {
if !element.kind().is_trivia() { if !element.kind().is_trivia() {
return Some(element); return Some(element);
} }

View file

@ -65,7 +65,7 @@ fn try_add_sub_match(m: Match, existing: &mut Match, sema: &hir::Semantics<ide_d
// will have 0 and a few will have 1. More than that should hopefully be // will have 0 and a few will have 1. More than that should hopefully be
// exceptional. // exceptional.
let mut collector = MatchCollector::default(); let mut collector = MatchCollector::default();
for m in std::mem::replace(&mut p.inner_matches.matches, Vec::new()) { for m in std::mem::take(&mut p.inner_matches.matches) {
collector.matches_by_node.insert(m.matched_node.clone(), m); collector.matches_by_node.insert(m.matched_node.clone(), m);
} }
collector.add_match(m, sema); collector.add_match(m, sema);

View file

@ -243,10 +243,10 @@ impl<'db> ResolutionScope<'db> {
use syntax::ast::AstNode; use syntax::ast::AstNode;
if let Some(path) = ast::Path::cast(path.clone()) { if let Some(path) = ast::Path::cast(path.clone()) {
if let Some(qualifier) = path.qualifier() { if let Some(qualifier) = path.qualifier() {
if let Some(resolved_qualifier) = self.resolve_path(&qualifier) { if let Some(hir::PathResolution::Def(hir::ModuleDef::Adt(adt))) =
if let hir::PathResolution::Def(hir::ModuleDef::Adt(adt)) = resolved_qualifier { self.resolve_path(&qualifier)
return Some(adt.ty(self.scope.db)); {
} return Some(adt.ty(self.scope.db));
} }
} }
} }

View file

@ -80,7 +80,7 @@ impl<'db> MatchFinder<'db> {
if let Some(path) = if let Some(path) =
self.sema.find_node_at_offset_with_descend::<ast::Path>(file.syntax(), offset) self.sema.find_node_at_offset_with_descend::<ast::Path>(file.syntax(), offset)
{ {
self.sema.ancestors_with_macros(path.syntax().clone()).skip(depth).next() self.sema.ancestors_with_macros(path.syntax().clone()).nth(depth)
} else if let Some(path) = } else if let Some(path) =
self.sema.find_node_at_offset_with_descend::<ast::MethodCallExpr>(file.syntax(), offset) self.sema.find_node_at_offset_with_descend::<ast::MethodCallExpr>(file.syntax(), offset)
{ {
@ -96,8 +96,7 @@ impl<'db> MatchFinder<'db> {
} }
self.sema self.sema
.ancestors_with_macros(path.syntax().clone()) .ancestors_with_macros(path.syntax().clone())
.skip(depth - PATH_DEPTH_IN_CALL_EXPR) .nth(depth - PATH_DEPTH_IN_CALL_EXPR)
.next()
} else { } else {
None None
} }

View file

@ -246,7 +246,7 @@ impl<'a> Writer<'a> {
fn enqueue(&mut self, subtree: &'a tt::Subtree) -> u32 { fn enqueue(&mut self, subtree: &'a tt::Subtree) -> u32 {
let idx = self.subtree.len(); let idx = self.subtree.len();
let delimiter_id = subtree.delimiter.map(|it| it.id).unwrap_or(TokenId::unspecified()); let delimiter_id = subtree.delimiter.map(|it| it.id).unwrap_or_else(TokenId::unspecified);
let delimiter_kind = subtree.delimiter.map(|it| it.kind); let delimiter_kind = subtree.delimiter.map(|it| it.kind);
self.subtree.push(SubtreeRepr { id: delimiter_id, kind: delimiter_kind, tt: [!0, !0] }); self.subtree.push(SubtreeRepr { id: delimiter_id, kind: delimiter_kind, tt: [!0, !0] });
self.work.push_back((idx, subtree)); self.work.push_back((idx, subtree));
@ -320,7 +320,7 @@ impl Reader {
}) })
.collect(), .collect(),
}; };
res[i] = Some(s.into()) res[i] = Some(s)
} }
res[0].take().unwrap() res[0].take().unwrap()

View file

@ -211,13 +211,10 @@ fn validate_numeric_name(name_ref: Option<ast::NameRef>, errors: &mut Vec<Syntax
} }
fn validate_visibility(vis: ast::Visibility, errors: &mut Vec<SyntaxError>) { fn validate_visibility(vis: ast::Visibility, errors: &mut Vec<SyntaxError>) {
if vis.in_token().is_none() { let path_without_in_token = vis.in_token().is_none()
if vis.path().and_then(|p| p.as_single_name_ref()).and_then(|n| n.ident_token()).is_some() { && vis.path().and_then(|p| p.as_single_name_ref()).and_then(|n| n.ident_token()).is_some();
errors.push(SyntaxError::new( if path_without_in_token {
"incorrect visibility restriction", errors.push(SyntaxError::new("incorrect visibility restriction", vis.syntax.text_range()));
vis.syntax.text_range(),
));
}
} }
let parent = match vis.syntax().parent() { let parent = match vis.syntax().parent() {
Some(it) => it, Some(it) => it,