mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-11 20:58:54 +00:00
Rename File -> SourceFileNode
This commit is contained in:
parent
2119fe2143
commit
f8b36bbc3b
23 changed files with 117 additions and 102 deletions
|
@ -2,7 +2,7 @@ use ra_editor::find_node_at_offset;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
algo::visit::{visitor, visitor_ctx, Visitor, VisitorCtx},
|
algo::visit::{visitor, visitor_ctx, Visitor, VisitorCtx},
|
||||||
ast::{self, AstChildren, LoopBodyOwner, ModuleItemOwner},
|
ast::{self, AstChildren, LoopBodyOwner, ModuleItemOwner},
|
||||||
AstNode, AtomEdit, File,
|
AstNode, AtomEdit, SourceFileNode,
|
||||||
SyntaxKind::*,
|
SyntaxKind::*,
|
||||||
SyntaxNodeRef, TextUnit,
|
SyntaxNodeRef, TextUnit,
|
||||||
};
|
};
|
||||||
|
@ -63,7 +63,7 @@ pub(crate) fn resolve_based_completion(
|
||||||
pub(crate) fn find_target_module(
|
pub(crate) fn find_target_module(
|
||||||
module_tree: &ModuleTree,
|
module_tree: &ModuleTree,
|
||||||
module_id: ModuleId,
|
module_id: ModuleId,
|
||||||
file: &File,
|
file: &SourceFileNode,
|
||||||
offset: TextUnit,
|
offset: TextUnit,
|
||||||
) -> Option<ModuleId> {
|
) -> Option<ModuleId> {
|
||||||
let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), offset)?;
|
let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), offset)?;
|
||||||
|
@ -142,7 +142,7 @@ pub(crate) fn scope_completion(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn complete_module_items(
|
fn complete_module_items(
|
||||||
file: &File,
|
file: &SourceFileNode,
|
||||||
items: AstChildren<ast::ModuleItem>,
|
items: AstChildren<ast::ModuleItem>,
|
||||||
this_item: Option<ast::NameRef>,
|
this_item: Option<ast::NameRef>,
|
||||||
acc: &mut Vec<CompletionItem>,
|
acc: &mut Vec<CompletionItem>,
|
||||||
|
@ -164,7 +164,7 @@ fn complete_module_items(
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn complete_name_ref(file: &File, name_ref: ast::NameRef, acc: &mut Vec<CompletionItem>) {
|
fn complete_name_ref(file: &SourceFileNode, name_ref: ast::NameRef, acc: &mut Vec<CompletionItem>) {
|
||||||
if !is_node::<ast::Path>(name_ref.syntax()) {
|
if !is_node::<ast::Path>(name_ref.syntax()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -239,7 +239,7 @@ fn is_node<'a, N: AstNode<'a>>(node: SyntaxNodeRef<'a>) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn complete_expr_keywords(
|
fn complete_expr_keywords(
|
||||||
file: &File,
|
file: &SourceFileNode,
|
||||||
fn_def: ast::FnDef,
|
fn_def: ast::FnDef,
|
||||||
name_ref: ast::NameRef,
|
name_ref: ast::NameRef,
|
||||||
acc: &mut Vec<CompletionItem>,
|
acc: &mut Vec<CompletionItem>,
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use ra_editor::LineIndex;
|
use ra_editor::LineIndex;
|
||||||
use ra_syntax::{File, SyntaxNode};
|
use ra_syntax::{SourceFileNode, SyntaxNode};
|
||||||
use salsa::{self, Database};
|
use salsa::{self, Database};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -85,7 +85,7 @@ salsa::database_storage! {
|
||||||
|
|
||||||
salsa::query_group! {
|
salsa::query_group! {
|
||||||
pub(crate) trait SyntaxDatabase: crate::input::FilesDatabase {
|
pub(crate) trait SyntaxDatabase: crate::input::FilesDatabase {
|
||||||
fn file_syntax(file_id: FileId) -> File {
|
fn file_syntax(file_id: FileId) -> SourceFileNode {
|
||||||
type FileSyntaxQuery;
|
type FileSyntaxQuery;
|
||||||
}
|
}
|
||||||
fn file_lines(file_id: FileId) -> Arc<LineIndex> {
|
fn file_lines(file_id: FileId) -> Arc<LineIndex> {
|
||||||
|
@ -103,9 +103,9 @@ salsa::query_group! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn file_syntax(db: &impl SyntaxDatabase, file_id: FileId) -> File {
|
fn file_syntax(db: &impl SyntaxDatabase, file_id: FileId) -> SourceFileNode {
|
||||||
let text = db.file_text(file_id);
|
let text = db.file_text(file_id);
|
||||||
File::parse(&*text)
|
SourceFileNode::parse(&*text)
|
||||||
}
|
}
|
||||||
fn file_lines(db: &impl SyntaxDatabase, file_id: FileId) -> Arc<LineIndex> {
|
fn file_lines(db: &impl SyntaxDatabase, file_id: FileId) -> Arc<LineIndex> {
|
||||||
let text = db.file_text(file_id);
|
let text = db.file_text(file_id);
|
||||||
|
|
|
@ -272,7 +272,7 @@ pub fn resolve_local_name<'a>(
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use ra_editor::find_node_at_offset;
|
use ra_editor::find_node_at_offset;
|
||||||
use ra_syntax::File;
|
use ra_syntax::SourceFileNode;
|
||||||
use test_utils::extract_offset;
|
use test_utils::extract_offset;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
@ -287,7 +287,7 @@ mod tests {
|
||||||
buf.push_str(&code[off..]);
|
buf.push_str(&code[off..]);
|
||||||
buf
|
buf
|
||||||
};
|
};
|
||||||
let file = File::parse(&code);
|
let file = SourceFileNode::parse(&code);
|
||||||
let marker: ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap();
|
let marker: ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap();
|
||||||
let fn_def: ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap();
|
let fn_def: ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap();
|
||||||
let scopes = FnScopes::new(fn_def);
|
let scopes = FnScopes::new(fn_def);
|
||||||
|
@ -376,7 +376,7 @@ mod tests {
|
||||||
|
|
||||||
fn do_check_local_name(code: &str, expected_offset: u32) {
|
fn do_check_local_name(code: &str, expected_offset: u32) {
|
||||||
let (off, code) = extract_offset(code);
|
let (off, code) = extract_offset(code);
|
||||||
let file = File::parse(&code);
|
let file = SourceFileNode::parse(&code);
|
||||||
let fn_def: ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap();
|
let fn_def: ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap();
|
||||||
let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap();
|
let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap();
|
||||||
|
|
||||||
|
|
|
@ -95,10 +95,10 @@ fn collect_imports(tree: ast::UseTree, acc: &mut Vec<Entry>) {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use ra_syntax::{ast::ModuleItemOwner, File};
|
use ra_syntax::{ast::ModuleItemOwner, SourceFileNode};
|
||||||
|
|
||||||
fn do_check(code: &str, expected: &[&str]) {
|
fn do_check(code: &str, expected: &[&str]) {
|
||||||
let file = File::parse(&code);
|
let file = SourceFileNode::parse(&code);
|
||||||
let scope = ModuleScope::new(file.ast().items());
|
let scope = ModuleScope::new(file.ast().items());
|
||||||
let actual = scope.entries.iter().map(|it| it.name()).collect::<Vec<_>>();
|
let actual = scope.entries.iter().map(|it| it.name()).collect::<Vec<_>>();
|
||||||
assert_eq!(expected, actual.as_slice());
|
assert_eq!(expected, actual.as_slice());
|
||||||
|
|
|
@ -7,7 +7,7 @@ use std::{
|
||||||
use ra_editor::{self, find_node_at_offset, FileSymbol, LineIndex, LocalEdit};
|
use ra_editor::{self, find_node_at_offset, FileSymbol, LineIndex, LocalEdit};
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
ast::{self, ArgListOwner, Expr, NameOwner},
|
ast::{self, ArgListOwner, Expr, NameOwner},
|
||||||
AstNode, File, SmolStr,
|
AstNode, SourceFileNode, SmolStr,
|
||||||
SyntaxKind::*,
|
SyntaxKind::*,
|
||||||
SyntaxNodeRef, TextRange, TextUnit,
|
SyntaxNodeRef, TextRange, TextUnit,
|
||||||
};
|
};
|
||||||
|
@ -27,7 +27,7 @@ use crate::{
|
||||||
input::{FilesDatabase, SourceRoot, SourceRootId, WORKSPACE},
|
input::{FilesDatabase, SourceRoot, SourceRootId, WORKSPACE},
|
||||||
symbol_index::SymbolIndex,
|
symbol_index::SymbolIndex,
|
||||||
AnalysisChange, Cancelable, CrateGraph, CrateId, Diagnostic, FileId, FileResolver,
|
AnalysisChange, Cancelable, CrateGraph, CrateId, Diagnostic, FileId, FileResolver,
|
||||||
FileSystemEdit, FilePosition, Query, SourceChange, SourceFileEdit,
|
FileSystemEdit, FilePosition, Query, SourceChange, SourceFileNodeEdit,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
|
@ -180,7 +180,7 @@ impl fmt::Debug for AnalysisImpl {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AnalysisImpl {
|
impl AnalysisImpl {
|
||||||
pub fn file_syntax(&self, file_id: FileId) -> File {
|
pub fn file_syntax(&self, file_id: FileId) -> SourceFileNode {
|
||||||
self.db.file_syntax(file_id)
|
self.db.file_syntax(file_id)
|
||||||
}
|
}
|
||||||
pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> {
|
pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> {
|
||||||
|
@ -562,7 +562,7 @@ impl AnalysisImpl {
|
||||||
|
|
||||||
impl SourceChange {
|
impl SourceChange {
|
||||||
pub(crate) fn from_local_edit(file_id: FileId, label: &str, edit: LocalEdit) -> SourceChange {
|
pub(crate) fn from_local_edit(file_id: FileId, label: &str, edit: LocalEdit) -> SourceChange {
|
||||||
let file_edit = SourceFileEdit {
|
let file_edit = SourceFileNodeEdit {
|
||||||
file_id,
|
file_id,
|
||||||
edits: edit.edit.into_atoms(),
|
edits: edit.edit.into_atoms(),
|
||||||
};
|
};
|
||||||
|
|
|
@ -20,7 +20,7 @@ pub mod mock_analysis;
|
||||||
|
|
||||||
use std::{fmt, sync::Arc};
|
use std::{fmt, sync::Arc};
|
||||||
|
|
||||||
use ra_syntax::{AtomEdit, File, TextRange, TextUnit};
|
use ra_syntax::{AtomEdit, SourceFileNode, TextRange, TextUnit};
|
||||||
use rayon::prelude::*;
|
use rayon::prelude::*;
|
||||||
use relative_path::RelativePathBuf;
|
use relative_path::RelativePathBuf;
|
||||||
|
|
||||||
|
@ -128,13 +128,13 @@ pub struct FilePosition {
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct SourceChange {
|
pub struct SourceChange {
|
||||||
pub label: String,
|
pub label: String,
|
||||||
pub source_file_edits: Vec<SourceFileEdit>,
|
pub source_file_edits: Vec<SourceFileNodeEdit>,
|
||||||
pub file_system_edits: Vec<FileSystemEdit>,
|
pub file_system_edits: Vec<FileSystemEdit>,
|
||||||
pub cursor_position: Option<FilePosition>,
|
pub cursor_position: Option<FilePosition>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct SourceFileEdit {
|
pub struct SourceFileNodeEdit {
|
||||||
pub file_id: FileId,
|
pub file_id: FileId,
|
||||||
pub edits: Vec<AtomEdit>,
|
pub edits: Vec<AtomEdit>,
|
||||||
}
|
}
|
||||||
|
@ -204,16 +204,16 @@ pub struct Analysis {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Analysis {
|
impl Analysis {
|
||||||
pub fn file_syntax(&self, file_id: FileId) -> File {
|
pub fn file_syntax(&self, file_id: FileId) -> SourceFileNode {
|
||||||
self.imp.file_syntax(file_id).clone()
|
self.imp.file_syntax(file_id).clone()
|
||||||
}
|
}
|
||||||
pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> {
|
pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> {
|
||||||
self.imp.file_line_index(file_id)
|
self.imp.file_line_index(file_id)
|
||||||
}
|
}
|
||||||
pub fn extend_selection(&self, file: &File, range: TextRange) -> TextRange {
|
pub fn extend_selection(&self, file: &SourceFileNode, range: TextRange) -> TextRange {
|
||||||
ra_editor::extend_selection(file, range).unwrap_or(range)
|
ra_editor::extend_selection(file, range).unwrap_or(range)
|
||||||
}
|
}
|
||||||
pub fn matching_brace(&self, file: &File, offset: TextUnit) -> Option<TextUnit> {
|
pub fn matching_brace(&self, file: &SourceFileNode, offset: TextUnit) -> Option<TextUnit> {
|
||||||
ra_editor::matching_brace(file, offset)
|
ra_editor::matching_brace(file, offset)
|
||||||
}
|
}
|
||||||
pub fn syntax_tree(&self, file_id: FileId) -> String {
|
pub fn syntax_tree(&self, file_id: FileId) -> String {
|
||||||
|
@ -309,7 +309,7 @@ pub struct LibraryData {
|
||||||
impl LibraryData {
|
impl LibraryData {
|
||||||
pub fn prepare(files: Vec<(FileId, String)>, file_resolver: Arc<FileResolver>) -> LibraryData {
|
pub fn prepare(files: Vec<(FileId, String)>, file_resolver: Arc<FileResolver>) -> LibraryData {
|
||||||
let symbol_index = SymbolIndex::for_files(files.par_iter().map(|(file_id, text)| {
|
let symbol_index = SymbolIndex::for_files(files.par_iter().map(|(file_id, text)| {
|
||||||
let file = File::parse(text);
|
let file = SourceFileNode::parse(text);
|
||||||
(*file_id, file)
|
(*file_id, file)
|
||||||
}));
|
}));
|
||||||
LibraryData {
|
LibraryData {
|
||||||
|
|
|
@ -6,7 +6,7 @@ use std::{
|
||||||
use fst::{self, Streamer};
|
use fst::{self, Streamer};
|
||||||
use ra_editor::{file_symbols, FileSymbol};
|
use ra_editor::{file_symbols, FileSymbol};
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
File,
|
SourceFileNode,
|
||||||
SyntaxKind::{self, *},
|
SyntaxKind::{self, *},
|
||||||
};
|
};
|
||||||
use rayon::prelude::*;
|
use rayon::prelude::*;
|
||||||
|
@ -34,7 +34,9 @@ impl Hash for SymbolIndex {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SymbolIndex {
|
impl SymbolIndex {
|
||||||
pub(crate) fn for_files(files: impl ParallelIterator<Item = (FileId, File)>) -> SymbolIndex {
|
pub(crate) fn for_files(
|
||||||
|
files: impl ParallelIterator<Item = (FileId, SourceFileNode)>,
|
||||||
|
) -> SymbolIndex {
|
||||||
let mut symbols = files
|
let mut symbols = files
|
||||||
.flat_map(|(file_id, file)| {
|
.flat_map(|(file_id, file)| {
|
||||||
file_symbols(&file)
|
file_symbols(&file)
|
||||||
|
@ -51,7 +53,7 @@ impl SymbolIndex {
|
||||||
SymbolIndex { symbols, map }
|
SymbolIndex { symbols, map }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn for_file(file_id: FileId, file: File) -> SymbolIndex {
|
pub(crate) fn for_file(file_id: FileId, file: SourceFileNode) -> SymbolIndex {
|
||||||
SymbolIndex::for_files(rayon::iter::once((file_id, file)))
|
SymbolIndex::for_files(rayon::iter::once((file_id, file)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use ra_syntax::{File, SyntaxKind, SyntaxNode, SyntaxNodeRef, TextRange};
|
use ra_syntax::{SourceFileNode, SyntaxKind, SyntaxNode, SyntaxNodeRef, TextRange};
|
||||||
|
|
||||||
use crate::db::SyntaxDatabase;
|
use crate::db::SyntaxDatabase;
|
||||||
use crate::FileId;
|
use crate::FileId;
|
||||||
|
@ -43,7 +43,7 @@ impl LocalSyntaxPtr {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn resolve(self, file: &File) -> SyntaxNode {
|
pub(crate) fn resolve(self, file: &SourceFileNode) -> SyntaxNode {
|
||||||
let mut curr = file.syntax();
|
let mut curr = file.syntax();
|
||||||
loop {
|
loop {
|
||||||
if curr.range() == self.range && curr.kind() == self.kind {
|
if curr.range() == self.range && curr.kind() == self.kind {
|
||||||
|
@ -67,7 +67,7 @@ impl LocalSyntaxPtr {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_local_syntax_ptr() {
|
fn test_local_syntax_ptr() {
|
||||||
use ra_syntax::{ast, AstNode};
|
use ra_syntax::{ast, AstNode};
|
||||||
let file = File::parse("struct Foo { f: u32, }");
|
let file = SourceFileNode::parse("struct Foo { f: u32, }");
|
||||||
let field = file
|
let field = file
|
||||||
.syntax()
|
.syntax()
|
||||||
.descendants()
|
.descendants()
|
||||||
|
|
|
@ -11,7 +11,7 @@ use std::{fs, io::Read, path::Path, time::Instant};
|
||||||
use clap::{App, Arg, SubCommand};
|
use clap::{App, Arg, SubCommand};
|
||||||
use join_to_string::join;
|
use join_to_string::join;
|
||||||
use ra_editor::{extend_selection, file_structure, syntax_tree};
|
use ra_editor::{extend_selection, file_structure, syntax_tree};
|
||||||
use ra_syntax::{File, TextRange};
|
use ra_syntax::{SourceFileNode, TextRange};
|
||||||
use tools::collect_tests;
|
use tools::collect_tests;
|
||||||
|
|
||||||
type Result<T> = ::std::result::Result<T, failure::Error>;
|
type Result<T> = ::std::result::Result<T, failure::Error>;
|
||||||
|
@ -79,9 +79,9 @@ fn main() -> Result<()> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn file() -> Result<File> {
|
fn file() -> Result<SourceFileNode> {
|
||||||
let text = read_stdin()?;
|
let text = read_stdin()?;
|
||||||
Ok(File::parse(&text))
|
Ok(SourceFileNode::parse(&text))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_stdin() -> Result<String> {
|
fn read_stdin() -> Result<String> {
|
||||||
|
@ -100,12 +100,12 @@ fn render_test(file: &Path, line: usize) -> Result<(String, String)> {
|
||||||
None => bail!("No test found at line {} at {}", line, file.display()),
|
None => bail!("No test found at line {} at {}", line, file.display()),
|
||||||
Some((_start_line, test)) => test,
|
Some((_start_line, test)) => test,
|
||||||
};
|
};
|
||||||
let file = File::parse(&test.text);
|
let file = SourceFileNode::parse(&test.text);
|
||||||
let tree = syntax_tree(&file);
|
let tree = syntax_tree(&file);
|
||||||
Ok((test.text, tree))
|
Ok((test.text, tree))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn selections(file: &File, start: u32, end: u32) -> String {
|
fn selections(file: &SourceFileNode, start: u32, end: u32) -> String {
|
||||||
let mut ranges = Vec::new();
|
let mut ranges = Vec::new();
|
||||||
let mut cur = Some(TextRange::from_to((start - 1).into(), (end - 1).into()));
|
let mut cur = Some(TextRange::from_to((start - 1).into(), (end - 1).into()));
|
||||||
while let Some(r) = cur {
|
while let Some(r) = cur {
|
||||||
|
|
|
@ -3,7 +3,7 @@ use join_to_string::join;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
algo::{find_covering_node, find_leaf_at_offset},
|
algo::{find_covering_node, find_leaf_at_offset},
|
||||||
ast::{self, AstNode, AttrsOwner, NameOwner, TypeParamsOwner},
|
ast::{self, AstNode, AttrsOwner, NameOwner, TypeParamsOwner},
|
||||||
Direction, File,
|
Direction, SourceFileNode,
|
||||||
SyntaxKind::{COMMA, WHITESPACE},
|
SyntaxKind::{COMMA, WHITESPACE},
|
||||||
SyntaxNodeRef, TextRange, TextUnit,
|
SyntaxNodeRef, TextRange, TextUnit,
|
||||||
};
|
};
|
||||||
|
@ -16,7 +16,10 @@ pub struct LocalEdit {
|
||||||
pub cursor_position: Option<TextUnit>,
|
pub cursor_position: Option<TextUnit>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn flip_comma<'a>(file: &'a File, offset: TextUnit) -> Option<impl FnOnce() -> LocalEdit + 'a> {
|
pub fn flip_comma<'a>(
|
||||||
|
file: &'a SourceFileNode,
|
||||||
|
offset: TextUnit,
|
||||||
|
) -> Option<impl FnOnce() -> LocalEdit + 'a> {
|
||||||
let syntax = file.syntax();
|
let syntax = file.syntax();
|
||||||
|
|
||||||
let comma = find_leaf_at_offset(syntax, offset).find(|leaf| leaf.kind() == COMMA)?;
|
let comma = find_leaf_at_offset(syntax, offset).find(|leaf| leaf.kind() == COMMA)?;
|
||||||
|
@ -33,7 +36,10 @@ pub fn flip_comma<'a>(file: &'a File, offset: TextUnit) -> Option<impl FnOnce()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_derive<'a>(file: &'a File, offset: TextUnit) -> Option<impl FnOnce() -> LocalEdit + 'a> {
|
pub fn add_derive<'a>(
|
||||||
|
file: &'a SourceFileNode,
|
||||||
|
offset: TextUnit,
|
||||||
|
) -> Option<impl FnOnce() -> LocalEdit + 'a> {
|
||||||
let nominal = find_node_at_offset::<ast::NominalDef>(file.syntax(), offset)?;
|
let nominal = find_node_at_offset::<ast::NominalDef>(file.syntax(), offset)?;
|
||||||
Some(move || {
|
Some(move || {
|
||||||
let derive_attr = nominal
|
let derive_attr = nominal
|
||||||
|
@ -58,7 +64,10 @@ pub fn add_derive<'a>(file: &'a File, offset: TextUnit) -> Option<impl FnOnce()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_impl<'a>(file: &'a File, offset: TextUnit) -> Option<impl FnOnce() -> LocalEdit + 'a> {
|
pub fn add_impl<'a>(
|
||||||
|
file: &'a SourceFileNode,
|
||||||
|
offset: TextUnit,
|
||||||
|
) -> Option<impl FnOnce() -> LocalEdit + 'a> {
|
||||||
let nominal = find_node_at_offset::<ast::NominalDef>(file.syntax(), offset)?;
|
let nominal = find_node_at_offset::<ast::NominalDef>(file.syntax(), offset)?;
|
||||||
let name = nominal.name()?;
|
let name = nominal.name()?;
|
||||||
|
|
||||||
|
@ -98,7 +107,7 @@ pub fn add_impl<'a>(file: &'a File, offset: TextUnit) -> Option<impl FnOnce() ->
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn introduce_variable<'a>(
|
pub fn introduce_variable<'a>(
|
||||||
file: &'a File,
|
file: &'a SourceFileNode,
|
||||||
range: TextRange,
|
range: TextRange,
|
||||||
) -> Option<impl FnOnce() -> LocalEdit + 'a> {
|
) -> Option<impl FnOnce() -> LocalEdit + 'a> {
|
||||||
let node = find_covering_node(file.syntax(), range);
|
let node = find_covering_node(file.syntax(), range);
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
algo::{find_covering_node, find_leaf_at_offset, LeafAtOffset},
|
algo::{find_covering_node, find_leaf_at_offset, LeafAtOffset},
|
||||||
Direction, File,
|
Direction, SourceFileNode,
|
||||||
SyntaxKind::*,
|
SyntaxKind::*,
|
||||||
SyntaxNodeRef, TextRange, TextUnit,
|
SyntaxNodeRef, TextRange, TextUnit,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn extend_selection(file: &File, range: TextRange) -> Option<TextRange> {
|
pub fn extend_selection(file: &SourceFileNode, range: TextRange) -> Option<TextRange> {
|
||||||
let syntax = file.syntax();
|
let syntax = file.syntax();
|
||||||
extend(syntax.borrowed(), range)
|
extend(syntax.borrowed(), range)
|
||||||
}
|
}
|
||||||
|
@ -120,7 +120,7 @@ mod tests {
|
||||||
|
|
||||||
fn do_check(before: &str, afters: &[&str]) {
|
fn do_check(before: &str, afters: &[&str]) {
|
||||||
let (cursor, before) = extract_offset(before);
|
let (cursor, before) = extract_offset(before);
|
||||||
let file = File::parse(&before);
|
let file = SourceFileNode::parse(&before);
|
||||||
let mut range = TextRange::offset_len(cursor, 0.into());
|
let mut range = TextRange::offset_len(cursor, 0.into());
|
||||||
for &after in afters {
|
for &after in afters {
|
||||||
range = extend_selection(&file, range).unwrap();
|
range = extend_selection(&file, range).unwrap();
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
|
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
ast, AstNode, Direction, File,
|
ast, AstNode, Direction, SourceFileNode,
|
||||||
SyntaxKind::{self, *},
|
SyntaxKind::{self, *},
|
||||||
SyntaxNodeRef, TextRange,
|
SyntaxNodeRef, TextRange,
|
||||||
};
|
};
|
||||||
|
@ -18,7 +18,7 @@ pub struct Fold {
|
||||||
pub kind: FoldKind,
|
pub kind: FoldKind,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn folding_ranges(file: &File) -> Vec<Fold> {
|
pub fn folding_ranges(file: &SourceFileNode) -> Vec<Fold> {
|
||||||
let mut res = vec![];
|
let mut res = vec![];
|
||||||
let mut visited_comments = FxHashSet::default();
|
let mut visited_comments = FxHashSet::default();
|
||||||
let mut visited_imports = FxHashSet::default();
|
let mut visited_imports = FxHashSet::default();
|
||||||
|
@ -171,7 +171,7 @@ mod tests {
|
||||||
|
|
||||||
fn do_check(text: &str, fold_kinds: &[FoldKind]) {
|
fn do_check(text: &str, fold_kinds: &[FoldKind]) {
|
||||||
let (ranges, text) = extract_ranges(text);
|
let (ranges, text) = extract_ranges(text);
|
||||||
let file = File::parse(&text);
|
let file = SourceFileNode::parse(&text);
|
||||||
let folds = folding_ranges(&file);
|
let folds = folding_ranges(&file);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
|
|
@ -30,7 +30,7 @@ pub use ra_syntax::AtomEdit;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
algo::find_leaf_at_offset,
|
algo::find_leaf_at_offset,
|
||||||
ast::{self, AstNode, NameOwner},
|
ast::{self, AstNode, NameOwner},
|
||||||
File,
|
SourceFileNode,
|
||||||
Location,
|
Location,
|
||||||
SyntaxKind::{self, *},
|
SyntaxKind::{self, *},
|
||||||
SyntaxNodeRef, TextRange, TextUnit,
|
SyntaxNodeRef, TextRange, TextUnit,
|
||||||
|
@ -60,7 +60,7 @@ pub enum RunnableKind {
|
||||||
Bin,
|
Bin,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn matching_brace(file: &File, offset: TextUnit) -> Option<TextUnit> {
|
pub fn matching_brace(file: &SourceFileNode, offset: TextUnit) -> Option<TextUnit> {
|
||||||
const BRACES: &[SyntaxKind] = &[
|
const BRACES: &[SyntaxKind] = &[
|
||||||
L_CURLY, R_CURLY, L_BRACK, R_BRACK, L_PAREN, R_PAREN, L_ANGLE, R_ANGLE,
|
L_CURLY, R_CURLY, L_BRACK, R_BRACK, L_PAREN, R_PAREN, L_ANGLE, R_ANGLE,
|
||||||
];
|
];
|
||||||
|
@ -78,7 +78,7 @@ pub fn matching_brace(file: &File, offset: TextUnit) -> Option<TextUnit> {
|
||||||
Some(matching_node.range().start())
|
Some(matching_node.range().start())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn highlight(file: &File) -> Vec<HighlightedRange> {
|
pub fn highlight(file: &SourceFileNode) -> Vec<HighlightedRange> {
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
for node in file.syntax().descendants() {
|
for node in file.syntax().descendants() {
|
||||||
let tag = match node.kind() {
|
let tag = match node.kind() {
|
||||||
|
@ -100,7 +100,7 @@ pub fn highlight(file: &File) -> Vec<HighlightedRange> {
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn diagnostics(file: &File) -> Vec<Diagnostic> {
|
pub fn diagnostics(file: &SourceFileNode) -> Vec<Diagnostic> {
|
||||||
fn location_to_range(location: Location) -> TextRange {
|
fn location_to_range(location: Location) -> TextRange {
|
||||||
match location {
|
match location {
|
||||||
Location::Offset(offset) => TextRange::offset_len(offset, 1.into()),
|
Location::Offset(offset) => TextRange::offset_len(offset, 1.into()),
|
||||||
|
@ -117,11 +117,11 @@ pub fn diagnostics(file: &File) -> Vec<Diagnostic> {
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn syntax_tree(file: &File) -> String {
|
pub fn syntax_tree(file: &SourceFileNode) -> String {
|
||||||
::ra_syntax::utils::dump_tree(file.syntax())
|
::ra_syntax::utils::dump_tree(file.syntax())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn runnables(file: &File) -> Vec<Runnable> {
|
pub fn runnables(file: &SourceFileNode) -> Vec<Runnable> {
|
||||||
file.syntax()
|
file.syntax()
|
||||||
.descendants()
|
.descendants()
|
||||||
.filter_map(ast::FnDef::cast)
|
.filter_map(ast::FnDef::cast)
|
||||||
|
@ -163,7 +163,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_highlighting() {
|
fn test_highlighting() {
|
||||||
let file = File::parse(
|
let file = SourceFileNode::parse(
|
||||||
r#"
|
r#"
|
||||||
// comment
|
// comment
|
||||||
fn main() {}
|
fn main() {}
|
||||||
|
@ -184,7 +184,7 @@ fn main() {}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_runnables() {
|
fn test_runnables() {
|
||||||
let file = File::parse(
|
let file = SourceFileNode::parse(
|
||||||
r#"
|
r#"
|
||||||
fn main() {}
|
fn main() {}
|
||||||
|
|
||||||
|
@ -209,7 +209,7 @@ fn test_foo() {}
|
||||||
fn test_matching_brace() {
|
fn test_matching_brace() {
|
||||||
fn do_check(before: &str, after: &str) {
|
fn do_check(before: &str, after: &str) {
|
||||||
let (pos, before) = extract_offset(before);
|
let (pos, before) = extract_offset(before);
|
||||||
let file = File::parse(&before);
|
let file = SourceFileNode::parse(&before);
|
||||||
let new_pos = match matching_brace(&file, pos) {
|
let new_pos = match matching_brace(&file, pos) {
|
||||||
None => pos,
|
None => pos,
|
||||||
Some(pos) => pos,
|
Some(pos) => pos,
|
||||||
|
|
|
@ -3,7 +3,7 @@ use crate::TextRange;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
algo::visit::{visitor, Visitor},
|
algo::visit::{visitor, Visitor},
|
||||||
ast::{self, DocCommentsOwner, NameOwner},
|
ast::{self, DocCommentsOwner, NameOwner},
|
||||||
AstNode, File, SmolStr, SyntaxKind, SyntaxNodeRef, WalkEvent,
|
AstNode, SourceFileNode, SmolStr, SyntaxKind, SyntaxNodeRef, WalkEvent,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
|
@ -23,7 +23,7 @@ pub struct FileSymbol {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FileSymbol {
|
impl FileSymbol {
|
||||||
pub fn docs(&self, file: &File) -> Option<String> {
|
pub fn docs(&self, file: &SourceFileNode) -> Option<String> {
|
||||||
file.syntax()
|
file.syntax()
|
||||||
.descendants()
|
.descendants()
|
||||||
.filter(|node| node.kind() == self.kind && node.range() == self.node_range)
|
.filter(|node| node.kind() == self.kind && node.range() == self.node_range)
|
||||||
|
@ -52,7 +52,7 @@ impl FileSymbol {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn file_symbols(file: &File) -> Vec<FileSymbol> {
|
pub fn file_symbols(file: &SourceFileNode) -> Vec<FileSymbol> {
|
||||||
file.syntax().descendants().filter_map(to_symbol).collect()
|
file.syntax().descendants().filter_map(to_symbol).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -77,7 +77,7 @@ fn to_symbol(node: SyntaxNodeRef) -> Option<FileSymbol> {
|
||||||
.accept(node)?
|
.accept(node)?
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn file_structure(file: &File) -> Vec<StructureNode> {
|
pub fn file_structure(file: &SourceFileNode) -> Vec<StructureNode> {
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
let mut stack = Vec::new();
|
let mut stack = Vec::new();
|
||||||
|
|
||||||
|
@ -153,7 +153,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_file_structure() {
|
fn test_file_structure() {
|
||||||
let file = File::parse(
|
let file = SourceFileNode::parse(
|
||||||
r#"
|
r#"
|
||||||
struct Foo {
|
struct Foo {
|
||||||
x: i32
|
x: i32
|
||||||
|
|
|
@ -1,10 +1,14 @@
|
||||||
use crate::LocalEdit;
|
use crate::LocalEdit;
|
||||||
pub use crate::_test_utils::*;
|
pub use crate::_test_utils::*;
|
||||||
use ra_syntax::{File, TextRange, TextUnit};
|
use ra_syntax::{SourceFileNode, TextRange, TextUnit};
|
||||||
|
|
||||||
pub fn check_action<F: Fn(&File, TextUnit) -> Option<LocalEdit>>(before: &str, after: &str, f: F) {
|
pub fn check_action<F: Fn(&SourceFileNode, TextUnit) -> Option<LocalEdit>>(
|
||||||
|
before: &str,
|
||||||
|
after: &str,
|
||||||
|
f: F,
|
||||||
|
) {
|
||||||
let (before_cursor_pos, before) = extract_offset(before);
|
let (before_cursor_pos, before) = extract_offset(before);
|
||||||
let file = File::parse(&before);
|
let file = SourceFileNode::parse(&before);
|
||||||
let result = f(&file, before_cursor_pos).expect("code action is not applicable");
|
let result = f(&file, before_cursor_pos).expect("code action is not applicable");
|
||||||
let actual = result.edit.apply(&before);
|
let actual = result.edit.apply(&before);
|
||||||
let actual_cursor_pos = match result.cursor_position {
|
let actual_cursor_pos = match result.cursor_position {
|
||||||
|
@ -15,13 +19,13 @@ pub fn check_action<F: Fn(&File, TextUnit) -> Option<LocalEdit>>(before: &str, a
|
||||||
assert_eq_text!(after, &actual);
|
assert_eq_text!(after, &actual);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn check_action_range<F: Fn(&File, TextRange) -> Option<LocalEdit>>(
|
pub fn check_action_range<F: Fn(&SourceFileNode, TextRange) -> Option<LocalEdit>>(
|
||||||
before: &str,
|
before: &str,
|
||||||
after: &str,
|
after: &str,
|
||||||
f: F,
|
f: F,
|
||||||
) {
|
) {
|
||||||
let (range, before) = extract_range(before);
|
let (range, before) = extract_range(before);
|
||||||
let file = File::parse(&before);
|
let file = SourceFileNode::parse(&before);
|
||||||
let result = f(&file, range).expect("code action is not applicable");
|
let result = f(&file, range).expect("code action is not applicable");
|
||||||
let actual = result.edit.apply(&before);
|
let actual = result.edit.apply(&before);
|
||||||
let actual_cursor_pos = match result.cursor_position {
|
let actual_cursor_pos = match result.cursor_position {
|
||||||
|
|
|
@ -4,14 +4,14 @@ use ra_syntax::{
|
||||||
algo::{find_covering_node, find_leaf_at_offset, LeafAtOffset},
|
algo::{find_covering_node, find_leaf_at_offset, LeafAtOffset},
|
||||||
ast,
|
ast,
|
||||||
text_utils::{contains_offset_nonstrict, intersect},
|
text_utils::{contains_offset_nonstrict, intersect},
|
||||||
AstNode, File, SyntaxKind,
|
AstNode, SourceFileNode, SyntaxKind,
|
||||||
SyntaxKind::*,
|
SyntaxKind::*,
|
||||||
SyntaxNodeRef, TextRange, TextUnit,
|
SyntaxNodeRef, TextRange, TextUnit,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{find_node_at_offset, EditBuilder, LocalEdit};
|
use crate::{find_node_at_offset, EditBuilder, LocalEdit};
|
||||||
|
|
||||||
pub fn join_lines(file: &File, range: TextRange) -> LocalEdit {
|
pub fn join_lines(file: &SourceFileNode, range: TextRange) -> LocalEdit {
|
||||||
let range = if range.is_empty() {
|
let range = if range.is_empty() {
|
||||||
let syntax = file.syntax();
|
let syntax = file.syntax();
|
||||||
let text = syntax.text().slice(range.start()..);
|
let text = syntax.text().slice(range.start()..);
|
||||||
|
@ -55,7 +55,7 @@ pub fn join_lines(file: &File, range: TextRange) -> LocalEdit {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn on_enter(file: &File, offset: TextUnit) -> Option<LocalEdit> {
|
pub fn on_enter(file: &SourceFileNode, offset: TextUnit) -> Option<LocalEdit> {
|
||||||
let comment = find_leaf_at_offset(file.syntax(), offset)
|
let comment = find_leaf_at_offset(file.syntax(), offset)
|
||||||
.left_biased()
|
.left_biased()
|
||||||
.and_then(ast::Comment::cast)?;
|
.and_then(ast::Comment::cast)?;
|
||||||
|
@ -80,7 +80,7 @@ pub fn on_enter(file: &File, offset: TextUnit) -> Option<LocalEdit> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn node_indent<'a>(file: &'a File, node: SyntaxNodeRef) -> Option<&'a str> {
|
fn node_indent<'a>(file: &'a SourceFileNode, node: SyntaxNodeRef) -> Option<&'a str> {
|
||||||
let ws = match find_leaf_at_offset(file.syntax(), node.range().start()) {
|
let ws = match find_leaf_at_offset(file.syntax(), node.range().start()) {
|
||||||
LeafAtOffset::Between(l, r) => {
|
LeafAtOffset::Between(l, r) => {
|
||||||
assert!(r == node);
|
assert!(r == node);
|
||||||
|
@ -100,7 +100,7 @@ fn node_indent<'a>(file: &'a File, node: SyntaxNodeRef) -> Option<&'a str> {
|
||||||
Some(&text[pos..])
|
Some(&text[pos..])
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn on_eq_typed(file: &File, offset: TextUnit) -> Option<LocalEdit> {
|
pub fn on_eq_typed(file: &SourceFileNode, offset: TextUnit) -> Option<LocalEdit> {
|
||||||
let let_stmt: ast::LetStmt = find_node_at_offset(file.syntax(), offset)?;
|
let let_stmt: ast::LetStmt = find_node_at_offset(file.syntax(), offset)?;
|
||||||
if let_stmt.has_semi() {
|
if let_stmt.has_semi() {
|
||||||
return None;
|
return None;
|
||||||
|
@ -390,7 +390,7 @@ fn foo() {
|
||||||
|
|
||||||
fn check_join_lines_sel(before: &str, after: &str) {
|
fn check_join_lines_sel(before: &str, after: &str) {
|
||||||
let (sel, before) = extract_range(before);
|
let (sel, before) = extract_range(before);
|
||||||
let file = File::parse(&before);
|
let file = SourceFileNode::parse(&before);
|
||||||
let result = join_lines(&file, sel);
|
let result = join_lines(&file, sel);
|
||||||
let actual = result.edit.apply(&before);
|
let actual = result.edit.apply(&before);
|
||||||
assert_eq_text!(after, &actual);
|
assert_eq_text!(after, &actual);
|
||||||
|
@ -469,7 +469,7 @@ pub fn handle_find_matching_brace() {
|
||||||
fn test_on_eq_typed() {
|
fn test_on_eq_typed() {
|
||||||
fn do_check(before: &str, after: &str) {
|
fn do_check(before: &str, after: &str) {
|
||||||
let (offset, before) = extract_offset(before);
|
let (offset, before) = extract_offset(before);
|
||||||
let file = File::parse(&before);
|
let file = SourceFileNode::parse(&before);
|
||||||
let result = on_eq_typed(&file, offset).unwrap();
|
let result = on_eq_typed(&file, offset).unwrap();
|
||||||
let actual = result.edit.apply(&before);
|
let actual = result.edit.apply(&before);
|
||||||
assert_eq_text!(after, &actual);
|
assert_eq_text!(after, &actual);
|
||||||
|
@ -513,7 +513,7 @@ fn foo() {
|
||||||
fn test_on_enter() {
|
fn test_on_enter() {
|
||||||
fn apply_on_enter(before: &str) -> Option<String> {
|
fn apply_on_enter(before: &str) -> Option<String> {
|
||||||
let (offset, before) = extract_offset(before);
|
let (offset, before) = extract_offset(before);
|
||||||
let file = File::parse(&before);
|
let file = SourceFileNode::parse(&before);
|
||||||
let result = on_enter(&file, offset)?;
|
let result = on_enter(&file, offset)?;
|
||||||
let actual = result.edit.apply(&before);
|
let actual = result.edit.apply(&before);
|
||||||
let actual = add_cursor(&actual, result.cursor_position.unwrap());
|
let actual = add_cursor(&actual, result.cursor_position.unwrap());
|
||||||
|
|
|
@ -2,7 +2,7 @@ use languageserver_types::{
|
||||||
Location, Position, Range, SymbolKind, TextDocumentEdit, TextDocumentIdentifier,
|
Location, Position, Range, SymbolKind, TextDocumentEdit, TextDocumentIdentifier,
|
||||||
TextDocumentItem, TextDocumentPositionParams, TextEdit, Url, VersionedTextDocumentIdentifier,
|
TextDocumentItem, TextDocumentPositionParams, TextEdit, Url, VersionedTextDocumentIdentifier,
|
||||||
};
|
};
|
||||||
use ra_analysis::{FileId, FileSystemEdit, SourceChange, SourceFileEdit, FilePosition};
|
use ra_analysis::{FileId, FileSystemEdit, SourceChange, SourceFileNodeEdit, FilePosition};
|
||||||
use ra_editor::{AtomEdit, Edit, LineCol, LineIndex};
|
use ra_editor::{AtomEdit, Edit, LineCol, LineIndex};
|
||||||
use ra_syntax::{SyntaxKind, TextRange, TextUnit};
|
use ra_syntax::{SyntaxKind, TextRange, TextUnit};
|
||||||
|
|
||||||
|
@ -257,7 +257,7 @@ fn translate_offset_with_edit(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TryConvWith for SourceFileEdit {
|
impl TryConvWith for SourceFileNodeEdit {
|
||||||
type Ctx = ServerWorld;
|
type Ctx = ServerWorld;
|
||||||
type Output = TextDocumentEdit;
|
type Output = TextDocumentEdit;
|
||||||
fn try_conv_with(self, world: &ServerWorld) -> Result<TextDocumentEdit> {
|
fn try_conv_with(self, world: &ServerWorld) -> Result<TextDocumentEdit> {
|
||||||
|
|
|
@ -61,12 +61,12 @@ pub use crate::{
|
||||||
|
|
||||||
use crate::yellow::GreenNode;
|
use crate::yellow::GreenNode;
|
||||||
|
|
||||||
// TODO: pick a single name for everything. SourceFile maybe?
|
// TODO: pick a single name for everything. SourceFileNode maybe?
|
||||||
/// File represents a parse tree for a single Rust file.
|
/// File represents a parse tree for a single Rust file.
|
||||||
pub type File = ast::RootNode;
|
pub type SourceFileNode = ast::RootNode;
|
||||||
|
|
||||||
impl File {
|
impl SourceFileNode {
|
||||||
fn new(green: GreenNode, errors: Vec<SyntaxError>) -> File {
|
fn new(green: GreenNode, errors: Vec<SyntaxError>) -> SourceFileNode {
|
||||||
let root = SyntaxNode::new(green, errors);
|
let root = SyntaxNode::new(green, errors);
|
||||||
if cfg!(debug_assertions) {
|
if cfg!(debug_assertions) {
|
||||||
utils::validate_block_structure(root.borrowed());
|
utils::validate_block_structure(root.borrowed());
|
||||||
|
@ -74,24 +74,24 @@ impl File {
|
||||||
assert_eq!(root.kind(), SyntaxKind::ROOT);
|
assert_eq!(root.kind(), SyntaxKind::ROOT);
|
||||||
ast::RootNode { syntax: root }
|
ast::RootNode { syntax: root }
|
||||||
}
|
}
|
||||||
pub fn parse(text: &str) -> File {
|
pub fn parse(text: &str) -> SourceFileNode {
|
||||||
let tokens = tokenize(&text);
|
let tokens = tokenize(&text);
|
||||||
let (green, errors) =
|
let (green, errors) =
|
||||||
parser_impl::parse_with(yellow::GreenBuilder::new(), text, &tokens, grammar::root);
|
parser_impl::parse_with(yellow::GreenBuilder::new(), text, &tokens, grammar::root);
|
||||||
File::new(green, errors)
|
SourceFileNode::new(green, errors)
|
||||||
}
|
}
|
||||||
pub fn reparse(&self, edit: &AtomEdit) -> File {
|
pub fn reparse(&self, edit: &AtomEdit) -> SourceFileNode {
|
||||||
self.incremental_reparse(edit)
|
self.incremental_reparse(edit)
|
||||||
.unwrap_or_else(|| self.full_reparse(edit))
|
.unwrap_or_else(|| self.full_reparse(edit))
|
||||||
}
|
}
|
||||||
pub fn incremental_reparse(&self, edit: &AtomEdit) -> Option<File> {
|
pub fn incremental_reparse(&self, edit: &AtomEdit) -> Option<SourceFileNode> {
|
||||||
reparsing::incremental_reparse(self.syntax(), edit, self.errors())
|
reparsing::incremental_reparse(self.syntax(), edit, self.errors())
|
||||||
.map(|(green_node, errors)| File::new(green_node, errors))
|
.map(|(green_node, errors)| SourceFileNode::new(green_node, errors))
|
||||||
}
|
}
|
||||||
fn full_reparse(&self, edit: &AtomEdit) -> File {
|
fn full_reparse(&self, edit: &AtomEdit) -> SourceFileNode {
|
||||||
let text =
|
let text =
|
||||||
text_utils::replace_range(self.syntax().text().to_string(), edit.delete, &edit.insert);
|
text_utils::replace_range(self.syntax().text().to_string(), edit.delete, &edit.insert);
|
||||||
File::parse(&text)
|
SourceFileNode::parse(&text)
|
||||||
}
|
}
|
||||||
/// Typed AST representation of the parse tree.
|
/// Typed AST representation of the parse tree.
|
||||||
pub fn ast(&self) -> ast::Root {
|
pub fn ast(&self) -> ast::Root {
|
||||||
|
|
|
@ -180,7 +180,7 @@ fn merge_errors(
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::{
|
use super::{
|
||||||
super::{test_utils::extract_range, text_utils::replace_range, utils::dump_tree, File},
|
super::{test_utils::extract_range, text_utils::replace_range, utils::dump_tree, SourceFileNode},
|
||||||
reparse_block, reparse_leaf, AtomEdit, GreenNode, SyntaxError, SyntaxNodeRef,
|
reparse_block, reparse_leaf, AtomEdit, GreenNode, SyntaxError, SyntaxNodeRef,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -192,9 +192,9 @@ mod tests {
|
||||||
let (range, before) = extract_range(before);
|
let (range, before) = extract_range(before);
|
||||||
let after = replace_range(before.clone(), range, replace_with);
|
let after = replace_range(before.clone(), range, replace_with);
|
||||||
|
|
||||||
let fully_reparsed = File::parse(&after);
|
let fully_reparsed = SourceFileNode::parse(&after);
|
||||||
let incrementally_reparsed = {
|
let incrementally_reparsed = {
|
||||||
let f = File::parse(&before);
|
let f = SourceFileNode::parse(&before);
|
||||||
let edit = AtomEdit {
|
let edit = AtomEdit {
|
||||||
delete: range,
|
delete: range,
|
||||||
insert: replace_with.to_string(),
|
insert: replace_with.to_string(),
|
||||||
|
@ -203,7 +203,7 @@ mod tests {
|
||||||
reparser(f.syntax(), &edit).expect("cannot incrementally reparse");
|
reparser(f.syntax(), &edit).expect("cannot incrementally reparse");
|
||||||
let green_root = node.replace_with(green);
|
let green_root = node.replace_with(green);
|
||||||
let errors = super::merge_errors(f.errors(), new_errors, node, &edit);
|
let errors = super::merge_errors(f.errors(), new_errors, node, &edit);
|
||||||
File::new(green_root, errors)
|
SourceFileNode::new(green_root, errors)
|
||||||
};
|
};
|
||||||
|
|
||||||
assert_eq_text!(
|
assert_eq_text!(
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use crate::{File, SyntaxKind, SyntaxNodeRef, WalkEvent};
|
use crate::{SourceFileNode, SyntaxKind, SyntaxNodeRef, WalkEvent};
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
use std::str;
|
use std::str;
|
||||||
|
|
||||||
|
@ -45,7 +45,7 @@ pub fn dump_tree(syntax: SyntaxNodeRef) -> String {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn check_fuzz_invariants(text: &str) {
|
pub fn check_fuzz_invariants(text: &str) {
|
||||||
let file = File::parse(text);
|
let file = SourceFileNode::parse(text);
|
||||||
let root = file.syntax();
|
let root = file.syntax();
|
||||||
validate_block_structure(root);
|
validate_block_structure(root);
|
||||||
let _ = file.ast();
|
let _ = file.ast();
|
||||||
|
|
|
@ -5,7 +5,7 @@ use arrayvec::ArrayString;
|
||||||
use crate::{
|
use crate::{
|
||||||
algo::visit::{visitor_ctx, VisitorCtx},
|
algo::visit::{visitor_ctx, VisitorCtx},
|
||||||
ast::{self, AstNode},
|
ast::{self, AstNode},
|
||||||
File,
|
SourceFileNode,
|
||||||
string_lexing::{self, CharComponentKind},
|
string_lexing::{self, CharComponentKind},
|
||||||
yellow::{
|
yellow::{
|
||||||
SyntaxError,
|
SyntaxError,
|
||||||
|
@ -13,7 +13,7 @@ use crate::{
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(crate) fn validate(file: &File) -> Vec<SyntaxError> {
|
pub(crate) fn validate(file: &SourceFileNode) -> Vec<SyntaxError> {
|
||||||
let mut errors = Vec::new();
|
let mut errors = Vec::new();
|
||||||
for node in file.syntax().descendants() {
|
for node in file.syntax().descendants() {
|
||||||
let _ = visitor_ctx(&mut errors)
|
let _ = visitor_ctx(&mut errors)
|
||||||
|
@ -155,11 +155,11 @@ fn is_ascii_escape(code: char) -> bool {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use crate::File;
|
use crate::SourceFileNode;
|
||||||
|
|
||||||
fn build_file(literal: &str) -> File {
|
fn build_file(literal: &str) -> SourceFileNode {
|
||||||
let src = format!("const C: char = '{}';", literal);
|
let src = format!("const C: char = '{}';", literal);
|
||||||
File::parse(&src)
|
SourceFileNode::parse(&src)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn assert_valid_char(literal: &str) {
|
fn assert_valid_char(literal: &str) {
|
||||||
|
|
|
@ -11,7 +11,7 @@ use std::{
|
||||||
|
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
utils::{check_fuzz_invariants, dump_tree},
|
utils::{check_fuzz_invariants, dump_tree},
|
||||||
File,
|
SourceFileNode,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -25,7 +25,7 @@ fn lexer_tests() {
|
||||||
#[test]
|
#[test]
|
||||||
fn parser_tests() {
|
fn parser_tests() {
|
||||||
dir_tests(&["parser/inline", "parser/ok", "parser/err"], |text| {
|
dir_tests(&["parser/inline", "parser/ok", "parser/err"], |text| {
|
||||||
let file = File::parse(text);
|
let file = SourceFileNode::parse(text);
|
||||||
dump_tree(file.syntax())
|
dump_tree(file.syntax())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,14 +12,14 @@ interface FileSystemEdit {
|
||||||
|
|
||||||
export interface SourceChange {
|
export interface SourceChange {
|
||||||
label: string;
|
label: string;
|
||||||
sourceFileEdits: lc.TextDocumentEdit[];
|
SourceFileNodeEdits: lc.TextDocumentEdit[];
|
||||||
fileSystemEdits: FileSystemEdit[];
|
fileSystemEdits: FileSystemEdit[];
|
||||||
cursorPosition?: lc.TextDocumentPositionParams;
|
cursorPosition?: lc.TextDocumentPositionParams;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function handle(change: SourceChange) {
|
export async function handle(change: SourceChange) {
|
||||||
const wsEdit = new vscode.WorkspaceEdit();
|
const wsEdit = new vscode.WorkspaceEdit();
|
||||||
for (const sourceEdit of change.sourceFileEdits) {
|
for (const sourceEdit of change.SourceFileNodeEdits) {
|
||||||
const uri = Server.client.protocol2CodeConverter.asUri(
|
const uri = Server.client.protocol2CodeConverter.asUri(
|
||||||
sourceEdit.textDocument.uri
|
sourceEdit.textDocument.uri
|
||||||
);
|
);
|
||||||
|
|
Loading…
Reference in a new issue