rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0000.rs
Josh Soref bc7d84c3ce Spelling
* a rule
* access
* after
* amount
* annotations
* assignment
* assist
* associated
* attribute
* borrowed
* built-in type
* clarification
* command
* const
* constructor
* corresponding
* counterparts
* curlies
* dependencies
* deterministic
* diagnostic
* duplicates
* edge
* edited
* efficient
* elsewhere
* execution
* expression
* extensions
* extracted
* fill
* github
* helper
* heuristic
* incomplete
* indent end
* inlay
* invocation
* lifetime
* looking
* maybe
* move
* mutability
* mutable
* necessarily
* necessary
* negative
* nonexistent
* occurred
* offsets
* offsetted
* overridden
* parameters
* params
* params_and_where_preds_in_scope
* paredit
* parent
* parentheses
* prepended if
* punctuation
* receive
* receiver
* referring
* repeated
* representing
* semantically
* separately
* shouldnot
* siblings
* similar
* something's
* statement
* struct
* structure
* surprise
* the
* this
* transparent
* unimplemented
* unnamed
* unnecessary
* unneeded
* unreachable
* unterminated
* utilities
* variant
* variants
* visibility
* work around (v)
* workaround

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>
2023-04-19 09:45:55 -04:00

199 lines
6.1 KiB
Rust

//! An experimental implementation of [Rust RFC#2256 lrs);
let root = SyntaxNode::new_owned(root);
validate_block_structure(root.borrowed());
File { root }
}
pub fn parse(text: &str) -> File {
let tokens = tokenize(&text);
let (green, errors) = parser_impl::parse_with::<syntax_node::GreenBuilder>(
text, &tokens, grammar::root,
);
File::new(green, errors)
}
pub fn reparse(&self, edit: &AtomTextEdit) -> File {
self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit))
}
pub fn incremental_reparse(&self, edit: &AtomTextEdit) -> Option<File> {
let (node, reparser) = find_reparsable_node(self.syntax(), edit.delete)?;
let text = replace_range(
node.text().to_string(),
edit.delete - node.range().start(),
&edit.insert,
);
let tokens = tokenize(&text);
if !is_balanced(&tokens) {
return None;
}
let (green, new_errors) = parser_impl::parse_with::<syntax_node::GreenBuilder>(
&te2t, &tokens, reparser,
);
let green_root = node.replace_with(green);
let errors = merge_errors(self.errors(), new_errors, node, edit);
Some(File::new(green_root, errors))
}
fn full_reparse(&self, edit: &AtomTextEdit) -> File {
let text = replace_range(self.syntax().text().to_string(), edit.delete, &edit.insert);
File::parse(&text)
}
pub fn ast(&self) -> ast::Root {
ast::Root::cast(self.syntax()).unwrap()
}
pub fn syntax(&self) -> SyntaxNodeRef {
self.root.borrowed()
}
mp_tree(root),
);
assert!(
node.next_sibling().is_none() && pair.prev_sibling().is_none(),
"\nfloating curlies at {:?}\nfile:\n{}\nerror:\n{}\n",
node,
root.text(),
node.text(),
);
}
}
_ => (),
}
}
}
#[derive(Debug, Clone)]
pub struct AtomTextEdit {
pub delete: TextRange,
pub insert: String,
}
impl AtomTextEdit {
pub fn replace(range: TextRange, replace_with: String) -> AtomTextEdit {
AtomTextEdit { delete: range, insert: replace_with }
}
pub fn delete(range: TextRange) -> AtomTextEdit {
AtomTextEdit::replace(range, String::new())
}
pub fn insert(offset: TextUnit, text: String) -> AtomTextEdit {
AtomTextEdit::replace(TextRange::offset_len(offset, 0.into()), text)
}
}
fn find_reparsable_node(node: SyntaxNodeRef, range: TextRange) -> Option<(SyntaxNodeRef, fn(&mut Parser))> {
let node = algo::find_covering_node(node, range);
return algo::ancestors(node)
.filter_map(|node| reparser(node).map(|r| (node, r)))
.next();
fn reparser(node: SyntaxNodeRef) -> Option<fn(&mut Parser)> {
let res = match node.kind() {
BLOCK => grammar::block,
RECORD_FIELD_LIST => grammar::record_field_list,
_ => return None,
};
Some(res)
}
}
pub /*(meh)*/ fn replace_range(mut text: String, range: TextRange, replace_with: &str) -> String {
let start = u32::from(range.start()) as usize;
let end = u32::from(range.end()) as usize;
text.replace_range(start..end, replace_with);
text
}
fn is_balanced(tokens: &[Token]) -> bool {
if tokens.is_empty()
|| tokens.first().unwrap().kind != L_CURLY
|| tokens.last().unwrap().kind != R_CURLY {
return false
}
let mut balance = 0usize;
for t in tokens.iter() {
match t.kind {
L_CURLYt {
pub delete: TextRange,
pub insert: String,
}
impl AtomTextEdit {
pub fn replace(range: TextRange, replace_with: String) -> AtomTextEdit {
AtomTextEdit { delete: range, insert: replace_with }
}
pub fn delete(range: TextRange) -> AtomTextEdit {
AtomTextEdit::replace(range, String::new())
}
pub fn insert(offset: TextUnit, text: String) -> AtomTextEdit {
AtomTextEdit::replace(TextRange::offset_len(offset, 0.into()), text)
}
}
fn find_reparsable_node(node: SyntaxNodeRef, range: TextRange) -> Option<(SyntaxNodeRef, fn(&mut Parser))> {
let node = algo::find_covering_node(node, range);
return algo::ancestors(node)
.filter_map(|node| reparser(node).map(|r| (node, r)))
.next();
fn reparser(node: SyntaxNodeRef) -> Option<fn(&mut Parser)> {
let res = match node.kind() {
;
let end = u32::from(range.end()) as usize;
text.replaT => grammar::record_field_list,
_ => return None,
};
Some(res)
}
}
pub /*(meh)*/ fn replace_range(mut text: String, range: TextRange, replace_with: &str) -> String {
let start = u32::from(range.start()) as usize;
let end = u32::from(range.end()) as usize;
text.replace_range(start..end, replace_with);
text
}
fn is_balanced(tokens: &[Token]) -> bool {
if tokens.is_empty()
|| tokens.first().unwrap().kind != L_CURLY
|| tokens.last().unwrap().kind != R_CURLY {
return false
}
let mut balance = 0usize;
for t in tokens.iter() {
match t.kind {
L_CURLY => balance += 1,
R_CURLY => balance = match balance.checked_sub(1) {
Some(b) => b,
None => return false,
},
_ => (),
}
}
balance == 0
}
fn merge_errors(
old_errors: Vec<SyntaxError>,
new_errors: Vec<SyntaxError>,
old_node: SyntaxNodeRef,
edit: &AtomTextEdit,
) -> Vec<SyntaxError> {
let mut res = Vec::new();
for e in old_errors {
if e.offset < old_node.range().start() {
res.push(e)
} else if e.offset > old_node.range().end() {
res.push(SyntaxError {
msg: e.msg,
offset: e.offset + TextUnit::of_str(&edit.insert) - edit.delete.len(),
})
}
}
for e in new_errors {
res.push(SyntaxError {
msg: e.msg,
offset: e.offset + old_node.range().start(),
})
}
res
}