Cargo Format

Run `cargo fmt` and ignore generated files
This commit is contained in:
Jeremy A. Kolb 2018-10-15 17:44:23 -04:00
parent 39cb6c6d3f
commit 61f3a438d3
76 changed files with 1936 additions and 1530 deletions

View file

@ -59,7 +59,6 @@
//! } //! }
//! ``` //! ```
#[macro_use] #[macro_use]
extern crate failure; extern crate failure;
#[macro_use] #[macro_use]
@ -74,16 +73,16 @@ extern crate languageserver_types;
mod msg; mod msg;
mod stdio; mod stdio;
use crossbeam_channel::{Sender, Receiver}; use crossbeam_channel::{Receiver, Sender};
use languageserver_types::{ use languageserver_types::{
ServerCapabilities, InitializeResult, InitializeParams, notification::{Exit, Initialized},
request::{Initialize, Shutdown}, request::{Initialize, Shutdown},
notification::{Initialized, Exit}, InitializeParams, InitializeResult, ServerCapabilities,
}; };
pub type Result<T> = ::std::result::Result<T, failure::Error>; pub type Result<T> = ::std::result::Result<T, failure::Error>;
pub use { pub use {
msg::{RawMessage, RawRequest, RawResponse, RawResponseError, RawNotification, ErrorCode}, msg::{ErrorCode, RawMessage, RawNotification, RawRequest, RawResponse, RawResponseError},
stdio::{stdio_transport, Threads}, stdio::{stdio_transport, Threads},
}; };
@ -97,11 +96,7 @@ pub fn run_server(
caps: ServerCapabilities, caps: ServerCapabilities,
receiver: Receiver<RawMessage>, receiver: Receiver<RawMessage>,
sender: Sender<RawMessage>, sender: Sender<RawMessage>,
server: impl FnOnce( server: impl FnOnce(InitializeParams, &Receiver<RawMessage>, &Sender<RawMessage>) -> Result<()>,
InitializeParams,
&Receiver<RawMessage>,
&Sender<RawMessage>,
) -> Result<()>,
) -> Result<()> { ) -> Result<()> {
info!("lsp server initializes"); info!("lsp server initializes");
let params = initialize(&receiver, &sender, caps)?; let params = initialize(&receiver, &sender, caps)?;
@ -109,12 +104,10 @@ pub fn run_server(
server(params, &receiver, &sender)?; server(params, &receiver, &sender)?;
info!("lsp server waiting for exit notification"); info!("lsp server waiting for exit notification");
match receiver.recv() { match receiver.recv() {
Some(RawMessage::Notification(n)) => { Some(RawMessage::Notification(n)) => n
n.cast::<Exit>().map_err(|n| format_err!( .cast::<Exit>()
"unexpected notification during shutdown: {:?}", n .map_err(|n| format_err!("unexpected notification during shutdown: {:?}", n))?,
))? m => bail!("unexpected message during shutdown: {:?}", m),
}
m => bail!("unexpected message during shutdown: {:?}", m)
} }
info!("lsp server shutdown complete"); info!("lsp server shutdown complete");
Ok(()) Ok(())
@ -141,17 +134,15 @@ fn initialize(
Some(RawMessage::Request(req)) => match req.cast::<Initialize>() { Some(RawMessage::Request(req)) => match req.cast::<Initialize>() {
Err(req) => bail!("expected initialize request, got {:?}", req), Err(req) => bail!("expected initialize request, got {:?}", req),
Ok(req) => req, Ok(req) => req,
} },
msg => msg => bail!("expected initialize request, got {:?}", msg),
bail!("expected initialize request, got {:?}", msg),
}; };
let resp = RawResponse::ok::<Initialize>(id, &InitializeResult { capabilities: caps }); let resp = RawResponse::ok::<Initialize>(id, &InitializeResult { capabilities: caps });
sender.send(RawMessage::Response(resp)); sender.send(RawMessage::Response(resp));
match receiver.recv() { match receiver.recv() {
Some(RawMessage::Notification(n)) => { Some(RawMessage::Notification(n)) => {
n.cast::<Initialized>().map_err(|_| format_err!( n.cast::<Initialized>()
"expected initialized notification" .map_err(|_| format_err!("expected initialized notification"))?;
))?;
} }
_ => bail!("expected initialized notification"), _ => bail!("expected initialized notification"),
} }

View file

@ -1,11 +1,8 @@
use std::io::{BufRead, Write}; use std::io::{BufRead, Write};
use serde_json::{Value, from_str, to_string, from_value, to_value}; use languageserver_types::{notification::Notification, request::Request};
use serde::{Serialize, de::DeserializeOwned}; use serde::{de::DeserializeOwned, Serialize};
use languageserver_types::{ use serde_json::{from_str, from_value, to_string, to_value, Value};
request::Request,
notification::Notification,
};
use Result; use Result;
@ -81,7 +78,10 @@ impl RawMessage {
#[serde(flatten)] #[serde(flatten)]
msg: RawMessage, msg: RawMessage,
} }
let text = to_string(&JsonRpc { jsonrpc: "2.0", msg: self })?; let text = to_string(&JsonRpc {
jsonrpc: "2.0",
msg: self,
})?;
write_msg_text(w, &text)?; write_msg_text(w, &text)?;
Ok(()) Ok(())
} }
@ -115,7 +115,8 @@ impl RawRequest {
impl RawResponse { impl RawResponse {
pub fn ok<R>(id: u64, result: &R::Result) -> RawResponse pub fn ok<R>(id: u64, result: &R::Result) -> RawResponse
where R: Request, where
R: Request,
R::Result: Serialize, R::Result: Serialize,
{ {
RawResponse { RawResponse {
@ -125,7 +126,11 @@ impl RawResponse {
} }
} }
pub fn err(id: u64, code: i32, message: String) -> RawResponse { pub fn err(id: u64, code: i32, message: String) -> RawResponse {
let error = RawResponseError { code, message, data: None }; let error = RawResponseError {
code,
message,
data: None,
};
RawResponse { RawResponse {
id, id,
result: None, result: None,
@ -174,7 +179,9 @@ fn read_msg_text(inp: &mut impl BufRead) -> Result<Option<String>> {
} }
let mut parts = buf.splitn(2, ": "); let mut parts = buf.splitn(2, ": ");
let header_name = parts.next().unwrap(); let header_name = parts.next().unwrap();
let header_value = parts.next().ok_or_else(|| format_err!("malformed header: {:?}", buf))?; let header_value = parts
.next()
.ok_or_else(|| format_err!("malformed header: {:?}", buf))?;
if header_name == "Content-Length" { if header_name == "Content-Length" {
size = Some(header_value.parse::<usize>()?); size = Some(header_value.parse::<usize>()?);
} }

View file

@ -1,11 +1,9 @@
use std::{ use std::{
io::{stdin, stdout},
thread, thread,
io::{
stdout, stdin,
},
}; };
use crossbeam_channel::{Receiver, Sender, bounded}; use crossbeam_channel::{bounded, Receiver, Sender};
use {RawMessage, Result}; use {RawMessage, Result};

View file

@ -1,16 +1,17 @@
use crate::{
module_map::{ModuleDescriptorQuery, ModuleTreeQuery, ModulesDatabase},
symbol_index::SymbolIndex,
FileId, FileResolverImp,
};
use ra_editor::LineIndex;
use ra_syntax::File;
use rustc_hash::FxHashSet;
use salsa;
use std::{ use std::{
fmt, fmt,
sync::Arc,
hash::{Hash, Hasher}, hash::{Hash, Hasher},
}; sync::Arc,
use salsa;
use rustc_hash::FxHashSet;
use ra_syntax::File;
use ra_editor::{LineIndex};
use crate::{
symbol_index::SymbolIndex,
module_map::{ModulesDatabase, ModuleTreeQuery, ModuleDescriptorQuery},
FileId, FileResolverImp,
}; };
#[derive(Default)] #[derive(Default)]

View file

@ -1,35 +1,28 @@
use std::{ use crate::{imp::FileResolverImp, FileId};
collections::BTreeMap, use ra_syntax::{
ast::{self, AstNode, NameOwner},
text_utils::is_subrange,
SmolStr,
}; };
use relative_path::RelativePathBuf; use relative_path::RelativePathBuf;
use ra_syntax::{
SmolStr, use std::collections::BTreeMap;
ast::{self, NameOwner, AstNode},
text_utils::is_subrange
};
use crate::{
FileId,
imp::FileResolverImp,
};
#[derive(Debug, PartialEq, Eq, Hash)] #[derive(Debug, PartialEq, Eq, Hash)]
pub struct ModuleDescriptor { pub struct ModuleDescriptor {
pub submodules: Vec<Submodule> pub submodules: Vec<Submodule>,
} }
impl ModuleDescriptor { impl ModuleDescriptor {
pub fn new(root: ast::Root) -> ModuleDescriptor { pub fn new(root: ast::Root) -> ModuleDescriptor {
let submodules = modules(root) let submodules = modules(root).map(|(name, _)| Submodule { name }).collect();
.map(|(name, _)| Submodule { name })
.collect();
ModuleDescriptor { submodules } } ModuleDescriptor { submodules }
}
} }
fn modules<'a>(root: ast::Root<'a>) -> impl Iterator<Item = (SmolStr, ast::Module<'a>)> { fn modules<'a>(root: ast::Root<'a>) -> impl Iterator<Item = (SmolStr, ast::Module<'a>)> {
root root.modules().filter_map(|module| {
.modules()
.filter_map(|module| {
let name = module.name()?.text(); let name = module.name()?.text();
if !module.has_semi() { if !module.has_semi() {
return None; return None;
@ -56,7 +49,7 @@ struct Node(usize);
struct NodeData { struct NodeData {
file_id: FileId, file_id: FileId,
links: Vec<Link>, links: Vec<Link>,
parents: Vec<Link> parents: Vec<Link>,
} }
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
@ -69,7 +62,6 @@ struct LinkData {
problem: Option<Problem>, problem: Option<Problem>,
} }
#[derive(Clone, Debug, Hash, PartialEq, Eq)] #[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub enum Problem { pub enum Problem {
UnresolvedModule { UnresolvedModule {
@ -78,7 +70,7 @@ pub enum Problem {
NotDirOwner { NotDirOwner {
move_to: RelativePathBuf, move_to: RelativePathBuf,
candidate: RelativePathBuf, candidate: RelativePathBuf,
} },
} }
impl ModuleTreeDescriptor { impl ModuleTreeDescriptor {
@ -87,7 +79,9 @@ impl ModuleTreeDescriptor {
file_resolver: &FileResolverImp, file_resolver: &FileResolverImp,
) -> ModuleTreeDescriptor { ) -> ModuleTreeDescriptor {
let mut file_id2node = BTreeMap::new(); let mut file_id2node = BTreeMap::new();
let mut nodes: Vec<NodeData> = files.clone().enumerate() let mut nodes: Vec<NodeData> = files
.clone()
.enumerate()
.map(|(idx, (file_id, _))| { .map(|(idx, (file_id, _))| {
file_id2node.insert(file_id, Node(idx)); file_id2node.insert(file_id, Node(idx));
NodeData { NodeData {
@ -120,20 +114,19 @@ impl ModuleTreeDescriptor {
points_to, points_to,
problem, problem,
}) })
} }
} }
ModuleTreeDescriptor { ModuleTreeDescriptor {
nodes, links, file_id2node nodes,
links,
file_id2node,
} }
} }
pub(crate) fn parent_modules(&self, file_id: FileId) -> Vec<Link> { pub(crate) fn parent_modules(&self, file_id: FileId) -> Vec<Link> {
let node = self.file_id2node[&file_id]; let node = self.file_id2node[&file_id];
self.node(node) self.node(node).parents.clone()
.parents
.clone()
} }
pub(crate) fn child_module_by_name(&self, file_id: FileId, name: &str) -> Vec<FileId> { pub(crate) fn child_module_by_name(&self, file_id: FileId, name: &str) -> Vec<FileId> {
let node = self.file_id2node[&file_id]; let node = self.file_id2node[&file_id];
@ -141,10 +134,18 @@ impl ModuleTreeDescriptor {
.links .links
.iter() .iter()
.filter(|it| it.name(self) == name) .filter(|it| it.name(self) == name)
.flat_map(|link| link.points_to(self).iter().map(|&node| self.node(node).file_id)) .flat_map(|link| {
link.points_to(self)
.iter()
.map(|&node| self.node(node).file_id)
})
.collect() .collect()
} }
pub(crate) fn problems<'a, 'b>(&'b self, file_id: FileId, root: ast::Root<'a>) -> Vec<(ast::Name<'a>, &'b Problem)> { pub(crate) fn problems<'a, 'b>(
&'b self,
file_id: FileId,
root: ast::Root<'a>,
) -> Vec<(ast::Name<'a>, &'b Problem)> {
let node = self.file_id2node[&file_id]; let node = self.file_id2node[&file_id];
self.node(node) self.node(node)
.links .links
@ -176,7 +177,11 @@ impl Link {
fn points_to(self, tree: &ModuleTreeDescriptor) -> &[Node] { fn points_to(self, tree: &ModuleTreeDescriptor) -> &[Node] {
&tree.link(self).points_to &tree.link(self).points_to
} }
pub(crate) fn bind_source<'a>(self, tree: &ModuleTreeDescriptor, root: ast::Root<'a>) -> ast::Module<'a> { pub(crate) fn bind_source<'a>(
self,
tree: &ModuleTreeDescriptor,
root: ast::Root<'a>,
) -> ast::Module<'a> {
modules(root) modules(root)
.filter(|(name, _)| name == &tree.link(self).name) .filter(|(name, _)| name == &tree.link(self).name)
.next() .next()
@ -185,22 +190,21 @@ impl Link {
} }
} }
fn resolve_submodule( fn resolve_submodule(
file_id: FileId, file_id: FileId,
name: &SmolStr, name: &SmolStr,
file_resolver: &FileResolverImp file_resolver: &FileResolverImp,
) -> (Vec<FileId>, Option<Problem>) { ) -> (Vec<FileId>, Option<Problem>) {
let mod_name = file_resolver.file_stem(file_id); let mod_name = file_resolver.file_stem(file_id);
let is_dir_owner = let is_dir_owner = mod_name == "mod" || mod_name == "lib" || mod_name == "main";
mod_name == "mod" || mod_name == "lib" || mod_name == "main";
let file_mod = RelativePathBuf::from(format!("../{}.rs", name)); let file_mod = RelativePathBuf::from(format!("../{}.rs", name));
let dir_mod = RelativePathBuf::from(format!("../{}/mod.rs", name)); let dir_mod = RelativePathBuf::from(format!("../{}/mod.rs", name));
let points_to: Vec<FileId>; let points_to: Vec<FileId>;
let problem: Option<Problem>; let problem: Option<Problem>;
if is_dir_owner { if is_dir_owner {
points_to = [&file_mod, &dir_mod].iter() points_to = [&file_mod, &dir_mod]
.iter()
.filter_map(|path| file_resolver.resolve(file_id, path)) .filter_map(|path| file_resolver.resolve(file_id, path))
.collect(); .collect();
problem = if points_to.is_empty() { problem = if points_to.is_empty() {
@ -235,7 +239,9 @@ impl FnDescriptor {
// Strip the body out for the label. // Strip the body out for the label.
let label: String = if let Some(body) = node.body() { let label: String = if let Some(body) = node.body() {
let body_range = body.syntax().range(); let body_range = body.syntax().range();
let label : String = node.syntax().children() let label: String = node
.syntax()
.children()
.filter(|child| !is_subrange(body_range, child.range())) .filter(|child| !is_subrange(body_range, child.range()))
.map(|node| node.text().to_string()) .map(|node| node.text().to_string())
.collect(); .collect();
@ -251,7 +257,7 @@ impl FnDescriptor {
name, name,
ret_type, ret_type,
params, params,
label label,
}) })
} }
@ -264,9 +270,11 @@ impl FnDescriptor {
// Maybe use param.pat here? See if we can just extract the name? // Maybe use param.pat here? See if we can just extract the name?
//res.extend(param_list.params().map(|p| p.syntax().text().to_string())); //res.extend(param_list.params().map(|p| p.syntax().text().to_string()));
res.extend(param_list.params() res.extend(
param_list
.params()
.filter_map(|p| p.pat()) .filter_map(|p| p.pat())
.map(|pat| pat.syntax().text().to_string()) .map(|pat| pat.syntax().text().to_string()),
); );
} }
res res

View file

@ -1,32 +1,31 @@
use std::{ use std::{
sync::{
Arc,
},
hash::{Hash, Hasher},
fmt,
collections::VecDeque, collections::VecDeque,
fmt,
hash::{Hash, Hasher},
iter, iter,
sync::Arc,
}; };
use ra_editor::{self, find_node_at_offset, resolve_local_name, FileSymbol, LineIndex, LocalEdit};
use ra_syntax::{
ast::{self, ArgListOwner, Expr, NameOwner},
AstNode, File, SmolStr,
SyntaxKind::*,
SyntaxNodeRef, TextRange, TextUnit,
};
use relative_path::RelativePath; use relative_path::RelativePath;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use ra_editor::{self, FileSymbol, LineIndex, find_node_at_offset, LocalEdit, resolve_local_name};
use ra_syntax::{
TextUnit, TextRange, SmolStr, File, AstNode, SyntaxNodeRef,
SyntaxKind::*,
ast::{self, NameOwner, ArgListOwner, Expr},
};
use crate::{ use crate::{
FileId, FileResolver, Query, Diagnostic, SourceChange, SourceFileEdit, Position, FileSystemEdit,
JobToken, CrateGraph, CrateId,
roots::{SourceRoot, ReadonlySourceRoot, WritableSourceRoot},
descriptors::{FnDescriptor, ModuleTreeDescriptor, Problem}, descriptors::{FnDescriptor, ModuleTreeDescriptor, Problem},
roots::{ReadonlySourceRoot, SourceRoot, WritableSourceRoot},
CrateGraph, CrateId, Diagnostic, FileId, FileResolver, FileSystemEdit, JobToken, Position,
Query, SourceChange, SourceFileEdit,
}; };
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub(crate) struct FileResolverImp { pub(crate) struct FileResolverImp {
inner: Arc<FileResolver> inner: Arc<FileResolver>,
} }
impl PartialEq for FileResolverImp { impl PartialEq for FileResolverImp {
@ -35,8 +34,7 @@ impl PartialEq for FileResolverImp {
} }
} }
impl Eq for FileResolverImp { impl Eq for FileResolverImp {}
}
impl Hash for FileResolverImp { impl Hash for FileResolverImp {
fn hash<H: Hasher>(&self, hasher: &mut H) { fn hash<H: Hasher>(&self, hasher: &mut H) {
@ -67,17 +65,23 @@ impl Default for FileResolverImp {
fn file_stem(&self, _file_: FileId) -> String { fn file_stem(&self, _file_: FileId) -> String {
panic!("file resolver not set") panic!("file resolver not set")
} }
fn resolve(&self, _file_id: FileId, _path: &::relative_path::RelativePath) -> Option<FileId> { fn resolve(
&self,
_file_id: FileId,
_path: &::relative_path::RelativePath,
) -> Option<FileId> {
panic!("file resolver not set") panic!("file resolver not set")
} }
} }
FileResolverImp { inner: Arc::new(DummyResolver) } FileResolverImp {
inner: Arc::new(DummyResolver),
}
} }
} }
#[derive(Debug)] #[derive(Debug)]
pub(crate) struct AnalysisHostImpl { pub(crate) struct AnalysisHostImpl {
data: WorldData data: WorldData,
} }
impl AnalysisHostImpl { impl AnalysisHostImpl {
@ -92,12 +96,12 @@ impl AnalysisHostImpl {
} }
} }
pub fn change_files(&mut self, changes: &mut dyn Iterator<Item = (FileId, Option<String>)>) { pub fn change_files(&mut self, changes: &mut dyn Iterator<Item = (FileId, Option<String>)>) {
self.data_mut() self.data_mut().root.apply_changes(changes, None);
.root.apply_changes(changes, None);
} }
pub fn set_file_resolver(&mut self, resolver: FileResolverImp) { pub fn set_file_resolver(&mut self, resolver: FileResolverImp) {
self.data_mut() self.data_mut()
.root.apply_changes(&mut iter::empty(), Some(resolver)); .root
.apply_changes(&mut iter::empty(), Some(resolver));
} }
pub fn set_crate_graph(&mut self, graph: CrateGraph) { pub fn set_crate_graph(&mut self, graph: CrateGraph) {
let mut visited = FxHashSet::default(); let mut visited = FxHashSet::default();
@ -131,7 +135,12 @@ impl AnalysisImpl {
if self.data.root.contains(file_id) { if self.data.root.contains(file_id) {
return &self.data.root; return &self.data.root;
} }
&**self.data.libs.iter().find(|it| it.contains(file_id)).unwrap() &**self
.data
.libs
.iter()
.find(|it| it.contains(file_id))
.unwrap()
} }
pub fn file_syntax(&self, file_id: FileId) -> File { pub fn file_syntax(&self, file_id: FileId) -> File {
self.root(file_id).syntax(file_id) self.root(file_id).syntax(file_id)
@ -142,18 +151,17 @@ impl AnalysisImpl {
pub fn world_symbols(&self, query: Query, token: &JobToken) -> Vec<(FileId, FileSymbol)> { pub fn world_symbols(&self, query: Query, token: &JobToken) -> Vec<(FileId, FileSymbol)> {
let mut buf = Vec::new(); let mut buf = Vec::new();
if query.libs { if query.libs {
self.data.libs.iter() self.data.libs.iter().for_each(|it| it.symbols(&mut buf));
.for_each(|it| it.symbols(&mut buf));
} else { } else {
self.data.root.symbols(&mut buf); self.data.root.symbols(&mut buf);
} }
query.search(&buf, token) query.search(&buf, token)
} }
pub fn parent_module(&self, file_id: FileId) -> Vec<(FileId, FileSymbol)> { pub fn parent_module(&self, file_id: FileId) -> Vec<(FileId, FileSymbol)> {
let root = self.root(file_id); let root = self.root(file_id);
let module_tree = root.module_tree(); let module_tree = root.module_tree();
module_tree.parent_modules(file_id) module_tree
.parent_modules(file_id)
.iter() .iter()
.map(|link| { .map(|link| {
let file_id = link.owner(&module_tree); let file_id = link.owner(&module_tree);
@ -203,15 +211,17 @@ impl AnalysisImpl {
let file = root.syntax(file_id); let file = root.syntax(file_id);
let syntax = file.syntax(); let syntax = file.syntax();
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, offset) { if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, offset) {
// First try to resolve the symbol locally // First try to resolve the symbol locally
if let Some((name, range)) = resolve_local_name(&file, offset, name_ref) { if let Some((name, range)) = resolve_local_name(&file, offset, name_ref) {
let mut vec = vec![]; let mut vec = vec![];
vec.push((file_id, FileSymbol { vec.push((
file_id,
FileSymbol {
name, name,
node_range: range, node_range: range,
kind : NAME kind: NAME,
})); },
));
return vec; return vec;
} else { } else {
@ -224,8 +234,11 @@ impl AnalysisImpl {
if module.has_semi() { if module.has_semi() {
let file_ids = self.resolve_module(&*module_tree, file_id, module); let file_ids = self.resolve_module(&*module_tree, file_id, module);
let res = file_ids.into_iter().map(|id| { let res = file_ids
let name = module.name() .into_iter()
.map(|id| {
let name = module
.name()
.map(|n| n.text()) .map(|n| n.text())
.unwrap_or_else(|| SmolStr::new("")); .unwrap_or_else(|| SmolStr::new(""));
let symbol = FileSymbol { let symbol = FileSymbol {
@ -234,7 +247,8 @@ impl AnalysisImpl {
kind: MODULE, kind: MODULE,
}; };
(id, symbol) (id, symbol)
}).collect(); })
.collect();
return res; return res;
} }
@ -250,7 +264,11 @@ impl AnalysisImpl {
let mut res = ra_editor::diagnostics(&syntax) let mut res = ra_editor::diagnostics(&syntax)
.into_iter() .into_iter()
.map(|d| Diagnostic { range: d.range, message: d.msg, fix: None }) .map(|d| Diagnostic {
range: d.range,
message: d.msg,
fix: None,
})
.collect::<Vec<_>>(); .collect::<Vec<_>>();
for (name_node, problem) in module_tree.problems(file_id, syntax.ast()) { for (name_node, problem) in module_tree.problems(file_id, syntax.ast()) {
@ -273,8 +291,14 @@ impl AnalysisImpl {
} }
} }
Problem::NotDirOwner { move_to, candidate } => { Problem::NotDirOwner { move_to, candidate } => {
let move_file = FileSystemEdit::MoveFile { file: file_id, path: move_to.clone() }; let move_file = FileSystemEdit::MoveFile {
let create_file = FileSystemEdit::CreateFile { anchor: file_id, path: move_to.join(candidate) }; file: file_id,
path: move_to.clone(),
};
let create_file = FileSystemEdit::CreateFile {
anchor: file_id,
path: move_to.join(candidate),
};
let fix = SourceChange { let fix = SourceChange {
label: "move file and create module".to_string(), label: "move file and create module".to_string(),
source_file_edits: Vec::new(), source_file_edits: Vec::new(),
@ -297,23 +321,34 @@ impl AnalysisImpl {
let file = self.file_syntax(file_id); let file = self.file_syntax(file_id);
let offset = range.start(); let offset = range.start();
let actions = vec![ let actions = vec![
("flip comma", ra_editor::flip_comma(&file, offset).map(|f| f())), (
("add `#[derive]`", ra_editor::add_derive(&file, offset).map(|f| f())), "flip comma",
ra_editor::flip_comma(&file, offset).map(|f| f()),
),
(
"add `#[derive]`",
ra_editor::add_derive(&file, offset).map(|f| f()),
),
("add impl", ra_editor::add_impl(&file, offset).map(|f| f())), ("add impl", ra_editor::add_impl(&file, offset).map(|f| f())),
("introduce variable", ra_editor::introduce_variable(&file, range).map(|f| f())), (
"introduce variable",
ra_editor::introduce_variable(&file, range).map(|f| f()),
),
]; ];
actions.into_iter() actions
.into_iter()
.filter_map(|(name, local_edit)| { .filter_map(|(name, local_edit)| {
Some(SourceChange::from_local_edit( Some(SourceChange::from_local_edit(file_id, name, local_edit?))
file_id, name, local_edit?,
))
}) })
.collect() .collect()
} }
pub fn resolve_callable(&self, file_id: FileId, offset: TextUnit, token: &JobToken) pub fn resolve_callable(
-> Option<(FnDescriptor, Option<usize>)> { &self,
file_id: FileId,
offset: TextUnit,
token: &JobToken,
) -> Option<(FnDescriptor, Option<usize>)> {
let root = self.root(file_id); let root = self.root(file_id);
let file = root.syntax(file_id); let file = root.syntax(file_id);
let syntax = file.syntax(); let syntax = file.syntax();
@ -332,9 +367,7 @@ impl AnalysisImpl {
let mut current_parameter = None; let mut current_parameter = None;
let num_params = descriptor.params.len(); let num_params = descriptor.params.len();
let has_self = fn_def.param_list() let has_self = fn_def.param_list().and_then(|l| l.self_param()).is_some();
.and_then(|l| l.self_param())
.is_some();
if num_params == 1 { if num_params == 1 {
if !has_self { if !has_self {
@ -350,8 +383,11 @@ impl AnalysisImpl {
let start = arg_list.syntax().range().start(); let start = arg_list.syntax().range().start();
let range_search = TextRange::from_to(start, offset); let range_search = TextRange::from_to(start, offset);
let mut commas: usize = arg_list.syntax().text() let mut commas: usize = arg_list
.slice(range_search).to_string() .syntax()
.text()
.slice(range_search)
.to_string()
.matches(",") .matches(",")
.count(); .count();
@ -381,7 +417,12 @@ impl AnalysisImpl {
self.world_symbols(query, token) self.world_symbols(query, token)
} }
fn resolve_module(&self, module_tree: &ModuleTreeDescriptor, file_id: FileId, module: ast::Module) -> Vec<FileId> { fn resolve_module(
&self,
module_tree: &ModuleTreeDescriptor,
file_id: FileId,
module: ast::Module,
) -> Vec<FileId> {
let name = match module.name() { let name = match module.name() {
Some(name) => name.text(), Some(name) => name.text(),
None => return Vec::new(), None => return Vec::new(),
@ -407,15 +448,17 @@ impl SourceChange {
label: label.to_string(), label: label.to_string(),
source_file_edits: vec![file_edit], source_file_edits: vec![file_edit],
file_system_edits: vec![], file_system_edits: vec![],
cursor_position: edit.cursor_position cursor_position: edit
.map(|offset| Position { offset, file_id }) .cursor_position
.map(|offset| Position { offset, file_id }),
} }
} }
} }
impl CrateGraph { impl CrateGraph {
fn crate_id_for_crate_root(&self, file_id: FileId) -> Option<CrateId> { fn crate_id_for_crate_root(&self, file_id: FileId) -> Option<CrateId> {
let (&crate_id, _) = self.crate_roots let (&crate_id, _) = self
.crate_roots
.iter() .iter()
.find(|(_crate_id, &root_id)| root_id == file_id)?; .find(|(_crate_id, &root_id)| root_id == file_id)?;
Some(crate_id) Some(crate_id)
@ -424,7 +467,7 @@ impl CrateGraph {
enum FnCallNode<'a> { enum FnCallNode<'a> {
CallExpr(ast::CallExpr<'a>), CallExpr(ast::CallExpr<'a>),
MethodCallExpr(ast::MethodCallExpr<'a>) MethodCallExpr(ast::MethodCallExpr<'a>),
} }
impl<'a> FnCallNode<'a> { impl<'a> FnCallNode<'a> {
@ -440,27 +483,23 @@ impl<'a> FnCallNode<'a> {
pub fn name_ref(&self) -> Option<ast::NameRef> { pub fn name_ref(&self) -> Option<ast::NameRef> {
match *self { match *self {
FnCallNode::CallExpr(call_expr) => { FnCallNode::CallExpr(call_expr) => Some(match call_expr.expr()? {
Some(match call_expr.expr()? { Expr::PathExpr(path_expr) => path_expr.path()?.segment()?.name_ref()?,
Expr::PathExpr(path_expr) => { _ => return None,
path_expr.path()?.segment()?.name_ref()? }),
},
_ => return None
})
},
FnCallNode::MethodCallExpr(call_expr) => { FnCallNode::MethodCallExpr(call_expr) => call_expr
call_expr.syntax().children() .syntax()
.children()
.filter_map(ast::NameRef::cast) .filter_map(ast::NameRef::cast)
.nth(0) .nth(0),
}
} }
} }
pub fn arg_list(&self) -> Option<ast::ArgList> { pub fn arg_list(&self) -> Option<ast::ArgList> {
match *self { match *self {
FnCallNode::CallExpr(expr) => expr.arg_list(), FnCallNode::CallExpr(expr) => expr.arg_list(),
FnCallNode::MethodCallExpr(expr) => expr.arg_list() FnCallNode::MethodCallExpr(expr) => expr.arg_list(),
} }
} }
} }

View file

@ -14,15 +14,20 @@ impl JobHandle {
pub fn new() -> (JobHandle, JobToken) { pub fn new() -> (JobHandle, JobToken) {
let (sender_alive, receiver_alive) = bounded(0); let (sender_alive, receiver_alive) = bounded(0);
let (sender_canceled, receiver_canceled) = bounded(0); let (sender_canceled, receiver_canceled) = bounded(0);
let token = JobToken { _job_alive: sender_alive, job_canceled: receiver_canceled }; let token = JobToken {
let handle = JobHandle { job_alive: receiver_alive, _job_canceled: sender_canceled }; _job_alive: sender_alive,
job_canceled: receiver_canceled,
};
let handle = JobHandle {
job_alive: receiver_alive,
_job_canceled: sender_canceled,
};
(handle, token) (handle, token)
} }
pub fn has_completed(&self) -> bool { pub fn has_completed(&self) -> bool {
is_closed(&self.job_alive) is_closed(&self.job_alive)
} }
pub fn cancel(self) { pub fn cancel(self) {}
}
} }
impl JobToken { impl JobToken {
@ -31,7 +36,6 @@ impl JobToken {
} }
} }
// We don't actually send messages through the channels, // We don't actually send messages through the channels,
// and instead just check if the channel is closed, // and instead just check if the channel is closed,
// so we use uninhabited enum as a message type // so we use uninhabited enum as a message type

View file

@ -1,44 +1,40 @@
extern crate parking_lot; extern crate parking_lot;
#[macro_use] #[macro_use]
extern crate log; extern crate log;
extern crate once_cell;
extern crate ra_syntax;
extern crate ra_editor;
extern crate fst; extern crate fst;
extern crate once_cell;
extern crate ra_editor;
extern crate ra_syntax;
extern crate rayon; extern crate rayon;
extern crate relative_path; extern crate relative_path;
#[macro_use] #[macro_use]
extern crate crossbeam_channel; extern crate crossbeam_channel;
extern crate im; extern crate im;
extern crate salsa;
extern crate rustc_hash; extern crate rustc_hash;
extern crate salsa;
mod symbol_index;
mod module_map;
mod imp;
mod job;
mod roots;
mod db; mod db;
mod descriptors; mod descriptors;
mod imp;
mod job;
mod module_map;
mod roots;
mod symbol_index;
use std::{ use std::{fmt::Debug, sync::Arc};
sync::Arc,
fmt::Debug,
};
use crate::imp::{AnalysisHostImpl, AnalysisImpl, FileResolverImp};
use ra_syntax::{AtomEdit, File, TextRange, TextUnit};
use relative_path::{RelativePath, RelativePathBuf}; use relative_path::{RelativePath, RelativePathBuf};
use ra_syntax::{File, TextRange, TextUnit, AtomEdit};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use crate::imp::{AnalysisImpl, AnalysisHostImpl, FileResolverImp};
pub use ra_editor::{
StructureNode, LineIndex, FileSymbol,
Runnable, RunnableKind, HighlightedRange, CompletionItem,
Fold, FoldKind
};
pub use crate::{ pub use crate::{
job::{JobToken, JobHandle},
descriptors::FnDescriptor, descriptors::FnDescriptor,
job::{JobHandle, JobToken},
};
pub use ra_editor::{
CompletionItem, FileSymbol, Fold, FoldKind, HighlightedRange, LineIndex, Runnable,
RunnableKind, StructureNode,
}; };
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
@ -59,15 +55,19 @@ pub trait FileResolver: Debug + Send + Sync + 'static {
#[derive(Debug)] #[derive(Debug)]
pub struct AnalysisHost { pub struct AnalysisHost {
imp: AnalysisHostImpl imp: AnalysisHostImpl,
} }
impl AnalysisHost { impl AnalysisHost {
pub fn new() -> AnalysisHost { pub fn new() -> AnalysisHost {
AnalysisHost { imp: AnalysisHostImpl::new() } AnalysisHost {
imp: AnalysisHostImpl::new(),
}
} }
pub fn analysis(&self) -> Analysis { pub fn analysis(&self) -> Analysis {
Analysis { imp: self.imp.analysis() } Analysis {
imp: self.imp.analysis(),
}
} }
pub fn change_file(&mut self, file_id: FileId, text: Option<String>) { pub fn change_file(&mut self, file_id: FileId, text: Option<String>) {
self.change_files(::std::iter::once((file_id, text))); self.change_files(::std::iter::once((file_id, text)));
@ -115,7 +115,7 @@ pub enum FileSystemEdit {
MoveFile { MoveFile {
file: FileId, file: FileId,
path: RelativePathBuf, path: RelativePathBuf,
} },
} }
#[derive(Debug)] #[derive(Debug)]
@ -144,7 +144,7 @@ impl Query {
only_types: false, only_types: false,
libs: false, libs: false,
exact: false, exact: false,
limit: usize::max_value() limit: usize::max_value(),
} }
} }
pub fn only_types(&mut self) { pub fn only_types(&mut self) {
@ -163,7 +163,7 @@ impl Query {
#[derive(Debug)] #[derive(Debug)]
pub struct Analysis { pub struct Analysis {
imp: AnalysisImpl imp: AnalysisImpl,
} }
impl Analysis { impl Analysis {
@ -195,7 +195,11 @@ impl Analysis {
} }
pub fn on_eq_typed(&self, file_id: FileId, offset: TextUnit) -> Option<SourceChange> { pub fn on_eq_typed(&self, file_id: FileId, offset: TextUnit) -> Option<SourceChange> {
let file = self.imp.file_syntax(file_id); let file = self.imp.file_syntax(file_id);
Some(SourceChange::from_local_edit(file_id, "add semicolon", ra_editor::on_eq_typed(&file, offset)?)) Some(SourceChange::from_local_edit(
file_id,
"add semicolon",
ra_editor::on_eq_typed(&file, offset)?,
))
} }
pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> { pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> {
let file = self.imp.file_syntax(file_id); let file = self.imp.file_syntax(file_id);
@ -204,8 +208,14 @@ impl Analysis {
pub fn symbol_search(&self, query: Query, token: &JobToken) -> Vec<(FileId, FileSymbol)> { pub fn symbol_search(&self, query: Query, token: &JobToken) -> Vec<(FileId, FileSymbol)> {
self.imp.world_symbols(query, token) self.imp.world_symbols(query, token)
} }
pub fn approximately_resolve_symbol(&self, file_id: FileId, offset: TextUnit, token: &JobToken) -> Vec<(FileId, FileSymbol)> { pub fn approximately_resolve_symbol(
self.imp.approximately_resolve_symbol(file_id, offset, token) &self,
file_id: FileId,
offset: TextUnit,
token: &JobToken,
) -> Vec<(FileId, FileSymbol)> {
self.imp
.approximately_resolve_symbol(file_id, offset, token)
} }
pub fn parent_module(&self, file_id: FileId) -> Vec<(FileId, FileSymbol)> { pub fn parent_module(&self, file_id: FileId) -> Vec<(FileId, FileSymbol)> {
self.imp.parent_module(file_id) self.imp.parent_module(file_id)
@ -239,15 +249,19 @@ impl Analysis {
ra_editor::folding_ranges(&file) ra_editor::folding_ranges(&file)
} }
pub fn resolve_callable(&self, file_id: FileId, offset: TextUnit, token: &JobToken) pub fn resolve_callable(
-> Option<(FnDescriptor, Option<usize>)> { &self,
file_id: FileId,
offset: TextUnit,
token: &JobToken,
) -> Option<(FnDescriptor, Option<usize>)> {
self.imp.resolve_callable(file_id, offset, token) self.imp.resolve_callable(file_id, offset, token)
} }
} }
#[derive(Debug)] #[derive(Debug)]
pub struct LibraryData { pub struct LibraryData {
root: roots::ReadonlySourceRoot root: roots::ReadonlySourceRoot,
} }
impl LibraryData { impl LibraryData {

View file

@ -1,10 +1,11 @@
use std::sync::Arc;
use crate::{ use crate::{
FileId, db::SyntaxDatabase,
db::{SyntaxDatabase},
descriptors::{ModuleDescriptor, ModuleTreeDescriptor}, descriptors::{ModuleDescriptor, ModuleTreeDescriptor},
FileId,
}; };
use std::sync::Arc;
salsa::query_group! { salsa::query_group! {
pub(crate) trait ModulesDatabase: SyntaxDatabase { pub(crate) trait ModulesDatabase: SyntaxDatabase {
fn module_tree(key: ()) -> Arc<ModuleTreeDescriptor> { fn module_tree(key: ()) -> Arc<ModuleTreeDescriptor> {
@ -16,7 +17,6 @@ salsa::query_group! {
} }
} }
fn module_descriptor(db: &impl ModulesDatabase, file_id: FileId) -> Arc<ModuleDescriptor> { fn module_descriptor(db: &impl ModulesDatabase, file_id: FileId) -> Arc<ModuleDescriptor> {
let file = db.file_syntax(file_id); let file = db.file_syntax(file_id);
Arc::new(ModuleDescriptor::new(file.ast())) Arc::new(ModuleDescriptor::new(file.ast()))
@ -29,6 +29,9 @@ fn module_tree(db: &impl ModulesDatabase, (): ()) -> Arc<ModuleTreeDescriptor> {
let module_descr = db.module_descriptor(file_id); let module_descr = db.module_descriptor(file_id);
files.push((file_id, module_descr)); files.push((file_id, module_descr));
} }
let res = ModuleTreeDescriptor::new(files.iter().map(|(file_id, descr)| (*file_id, &**descr)), &file_set.resolver); let res = ModuleTreeDescriptor::new(
files.iter().map(|(file_id, descr)| (*file_id, &**descr)),
&file_set.resolver,
);
Arc::new(res) Arc::new(res)
} }

View file

@ -1,22 +1,19 @@
use std::{ use std::{panic, sync::Arc};
sync::Arc,
panic,
};
use once_cell::sync::OnceCell; use once_cell::sync::OnceCell;
use rayon::prelude::*;
use salsa::Database;
use rustc_hash::{FxHashMap, FxHashSet};
use ra_editor::LineIndex; use ra_editor::LineIndex;
use ra_syntax::File; use ra_syntax::File;
use rayon::prelude::*;
use rustc_hash::{FxHashMap, FxHashSet};
use salsa::Database;
use crate::{ use crate::{
FileId,
imp::FileResolverImp,
symbol_index::SymbolIndex,
descriptors::{ModuleDescriptor, ModuleTreeDescriptor},
db::{self, FilesDatabase, SyntaxDatabase}, db::{self, FilesDatabase, SyntaxDatabase},
descriptors::{ModuleDescriptor, ModuleTreeDescriptor},
imp::FileResolverImp,
module_map::ModulesDatabase, module_map::ModulesDatabase,
symbol_index::SymbolIndex,
FileId,
}; };
pub(crate) trait SourceRoot { pub(crate) trait SourceRoot {
@ -46,22 +43,22 @@ impl WritableSourceRoot {
removed.insert(file_id); removed.insert(file_id);
} }
Some(text) => { Some(text) => {
self.db.query(db::FileTextQuery) self.db
.query(db::FileTextQuery)
.set(file_id, Arc::new(text)); .set(file_id, Arc::new(text));
changed.insert(file_id); changed.insert(file_id);
} }
} }
} }
let file_set = self.db.file_set(()); let file_set = self.db.file_set(());
let mut files: FxHashSet<FileId> = file_set let mut files: FxHashSet<FileId> = file_set.files.clone();
.files
.clone();
for file_id in removed { for file_id in removed {
files.remove(&file_id); files.remove(&file_id);
} }
files.extend(changed); files.extend(changed);
let resolver = file_resolver.unwrap_or_else(|| file_set.resolver.clone()); let resolver = file_resolver.unwrap_or_else(|| file_set.resolver.clone());
self.db.query(db::FileSetQuery) self.db
.query(db::FileSetQuery)
.set((), Arc::new(db::FileSet { files, resolver })); .set((), Arc::new(db::FileSet { files, resolver }));
} }
} }
@ -71,9 +68,7 @@ impl SourceRoot for WritableSourceRoot {
self.db.module_tree(()) self.db.module_tree(())
} }
fn contains(&self, file_id: FileId) -> bool { fn contains(&self, file_id: FileId) -> bool {
self.db.file_set(()) self.db.file_set(()).files.contains(&file_id)
.files
.contains(&file_id)
} }
fn lines(&self, file_id: FileId) -> Arc<LineIndex> { fn lines(&self, file_id: FileId) -> Arc<LineIndex> {
self.db.file_lines(file_id) self.db.file_lines(file_id)
@ -108,12 +103,15 @@ impl FileData {
} }
} }
fn lines(&self) -> &Arc<LineIndex> { fn lines(&self) -> &Arc<LineIndex> {
self.lines.get_or_init(|| Arc::new(LineIndex::new(&self.text))) self.lines
.get_or_init(|| Arc::new(LineIndex::new(&self.text)))
} }
fn syntax(&self) -> &File { fn syntax(&self) -> &File {
let text = &self.text; let text = &self.text;
let syntax = &self.syntax; let syntax = &self.syntax;
match panic::catch_unwind(panic::AssertUnwindSafe(|| syntax.get_or_init(|| File::parse(text)))) { match panic::catch_unwind(panic::AssertUnwindSafe(|| {
syntax.get_or_init(|| File::parse(text))
})) {
Ok(file) => file, Ok(file) => file,
Err(err) => { Err(err) => {
error!("Parser paniced on:\n------\n{}\n------\n", text); error!("Parser paniced on:\n------\n{}\n------\n", text);
@ -131,22 +129,23 @@ pub(crate) struct ReadonlySourceRoot {
} }
impl ReadonlySourceRoot { impl ReadonlySourceRoot {
pub(crate) fn new(files: Vec<(FileId, String)>, file_resolver: FileResolverImp) -> ReadonlySourceRoot { pub(crate) fn new(
let modules = files.par_iter() files: Vec<(FileId, String)>,
file_resolver: FileResolverImp,
) -> ReadonlySourceRoot {
let modules = files
.par_iter()
.map(|(file_id, text)| { .map(|(file_id, text)| {
let syntax = File::parse(text); let syntax = File::parse(text);
let mod_descr = ModuleDescriptor::new(syntax.ast()); let mod_descr = ModuleDescriptor::new(syntax.ast());
(*file_id, syntax, mod_descr) (*file_id, syntax, mod_descr)
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let module_tree = ModuleTreeDescriptor::new( let module_tree =
modules.iter().map(|it| (it.0, &it.2)), ModuleTreeDescriptor::new(modules.iter().map(|it| (it.0, &it.2)), &file_resolver);
&file_resolver,
);
let symbol_index = SymbolIndex::for_files( let symbol_index =
modules.par_iter().map(|it| (it.0, it.1.clone())) SymbolIndex::for_files(modules.par_iter().map(|it| (it.0, it.1.clone())));
);
let file_map: FxHashMap<FileId, FileData> = files let file_map: FxHashMap<FileId, FileData> = files
.into_iter() .into_iter()
.map(|(id, text)| (id, FileData::new(text))) .map(|(id, text)| (id, FileData::new(text)))

View file

@ -1,15 +1,16 @@
use std::{ use crate::{FileId, JobToken, Query};
sync::Arc, use fst::{self, Streamer};
hash::{Hash, Hasher}, use ra_editor::{file_symbols, FileSymbol};
};
use ra_editor::{FileSymbol, file_symbols};
use ra_syntax::{ use ra_syntax::{
File, File,
SyntaxKind::{self, *}, SyntaxKind::{self, *},
}; };
use fst::{self, Streamer};
use rayon::prelude::*; use rayon::prelude::*;
use crate::{Query, FileId, JobToken};
use std::{
hash::{Hash, Hasher},
sync::Arc,
};
#[derive(Debug)] #[derive(Debug)]
pub(crate) struct SymbolIndex { pub(crate) struct SymbolIndex {
@ -23,8 +24,7 @@ impl PartialEq for SymbolIndex {
} }
} }
impl Eq for SymbolIndex { impl Eq for SymbolIndex {}
}
impl Hash for SymbolIndex { impl Hash for SymbolIndex {
fn hash<H: Hasher>(&self, hasher: &mut H) { fn hash<H: Hasher>(&self, hasher: &mut H) {
@ -38,9 +38,7 @@ impl SymbolIndex {
.flat_map(|(file_id, file)| { .flat_map(|(file_id, file)| {
file_symbols(&file) file_symbols(&file)
.into_iter() .into_iter()
.map(move |symbol| { .map(move |symbol| (symbol.name.as_str().to_lowercase(), (file_id, symbol)))
(symbol.name.as_str().to_lowercase(), (file_id, symbol))
})
.collect::<Vec<_>>() .collect::<Vec<_>>()
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -48,9 +46,7 @@ impl SymbolIndex {
symbols.dedup_by(|s1, s2| s1.0 == s2.0); symbols.dedup_by(|s1, s2| s1.0 == s2.0);
let (names, symbols): (Vec<String>, Vec<(FileId, FileSymbol)>) = let (names, symbols): (Vec<String>, Vec<(FileId, FileSymbol)>) =
symbols.into_iter().unzip(); symbols.into_iter().unzip();
let map = fst::Map::from_iter( let map = fst::Map::from_iter(names.into_iter().zip(0u64..)).unwrap();
names.into_iter().zip(0u64..)
).unwrap();
SymbolIndex { symbols, map } SymbolIndex { symbols, map }
} }
@ -65,7 +61,6 @@ impl Query {
indices: &[Arc<SymbolIndex>], indices: &[Arc<SymbolIndex>],
token: &JobToken, token: &JobToken,
) -> Vec<(FileId, FileSymbol)> { ) -> Vec<(FileId, FileSymbol)> {
let mut op = fst::map::OpBuilder::new(); let mut op = fst::map::OpBuilder::new();
for file_symbols in indices.iter() { for file_symbols in indices.iter() {
let automaton = fst::automaton::Subsequence::new(&self.lowercased); let automaton = fst::automaton::Subsequence::new(&self.lowercased);

View file

@ -1,17 +1,17 @@
extern crate relative_path;
extern crate ra_analysis; extern crate ra_analysis;
extern crate rustc_hash;
extern crate ra_editor; extern crate ra_editor;
extern crate ra_syntax; extern crate ra_syntax;
extern crate relative_path;
extern crate rustc_hash;
extern crate test_utils; extern crate test_utils;
use std::{ use std::sync::Arc;
sync::Arc,
};
use rustc_hash::FxHashMap; use ra_analysis::{
Analysis, AnalysisHost, CrateGraph, CrateId, FileId, FileResolver, FnDescriptor, JobHandle,
};
use relative_path::{RelativePath, RelativePathBuf}; use relative_path::{RelativePath, RelativePathBuf};
use ra_analysis::{Analysis, AnalysisHost, FileId, FileResolver, JobHandle, CrateGraph, CrateId, FnDescriptor}; use rustc_hash::FxHashMap;
use test_utils::{assert_eq_dbg, extract_offset}; use test_utils::{assert_eq_dbg, extract_offset};
#[derive(Debug)] #[derive(Debug)]
@ -19,14 +19,13 @@ struct FileMap(Vec<(FileId, RelativePathBuf)>);
impl FileMap { impl FileMap {
fn iter<'a>(&'a self) -> impl Iterator<Item = (FileId, &'a RelativePath)> + 'a { fn iter<'a>(&'a self) -> impl Iterator<Item = (FileId, &'a RelativePath)> + 'a {
self.0.iter().map(|(id, path)| (*id, path.as_relative_path())) self.0
.iter()
.map(|(id, path)| (*id, path.as_relative_path()))
} }
fn path(&self, id: FileId) -> &RelativePath { fn path(&self, id: FileId) -> &RelativePath {
self.iter() self.iter().find(|&(it, _)| it == id).unwrap().1
.find(|&(it, _)| it == id)
.unwrap()
.1
} }
} }
@ -71,10 +70,7 @@ fn get_signature(text: &str) -> (FnDescriptor, Option<usize>) {
#[test] #[test]
fn test_resolve_module() { fn test_resolve_module() {
let snap = analysis(&[ let snap = analysis(&[("/lib.rs", "mod foo;"), ("/foo.rs", "")]);
("/lib.rs", "mod foo;"),
("/foo.rs", "")
]);
let (_handle, token) = JobHandle::new(); let (_handle, token) = JobHandle::new();
let symbols = snap.approximately_resolve_symbol(FileId(1), 4.into(), &token); let symbols = snap.approximately_resolve_symbol(FileId(1), 4.into(), &token);
assert_eq_dbg( assert_eq_dbg(
@ -82,10 +78,7 @@ fn test_resolve_module() {
&symbols, &symbols,
); );
let snap = analysis(&[ let snap = analysis(&[("/lib.rs", "mod foo;"), ("/foo/mod.rs", "")]);
("/lib.rs", "mod foo;"),
("/foo/mod.rs", "")
]);
let symbols = snap.approximately_resolve_symbol(FileId(1), 4.into(), &token); let symbols = snap.approximately_resolve_symbol(FileId(1), 4.into(), &token);
assert_eq_dbg( assert_eq_dbg(
r#"[(FileId(2), FileSymbol { name: "foo", node_range: [0; 0), kind: MODULE })]"#, r#"[(FileId(2), FileSymbol { name: "foo", node_range: [0; 0), kind: MODULE })]"#,
@ -114,18 +107,12 @@ fn test_unresolved_module_diagnostic() {
fn test_unresolved_module_diagnostic_no_diag_for_inline_mode() { fn test_unresolved_module_diagnostic_no_diag_for_inline_mode() {
let snap = analysis(&[("/lib.rs", "mod foo {}")]); let snap = analysis(&[("/lib.rs", "mod foo {}")]);
let diagnostics = snap.diagnostics(FileId(1)); let diagnostics = snap.diagnostics(FileId(1));
assert_eq_dbg( assert_eq_dbg(r#"[]"#, &diagnostics);
r#"[]"#,
&diagnostics,
);
} }
#[test] #[test]
fn test_resolve_parent_module() { fn test_resolve_parent_module() {
let snap = analysis(&[ let snap = analysis(&[("/lib.rs", "mod foo;"), ("/foo.rs", "")]);
("/lib.rs", "mod foo;"),
("/foo.rs", ""),
]);
let symbols = snap.parent_module(FileId(2)); let symbols = snap.parent_module(FileId(2));
assert_eq_dbg( assert_eq_dbg(
r#"[(FileId(1), FileSymbol { name: "foo", node_range: [0; 8), kind: MODULE })]"#, r#"[(FileId(1), FileSymbol { name: "foo", node_range: [0; 8), kind: MODULE })]"#,
@ -135,10 +122,7 @@ fn test_resolve_parent_module() {
#[test] #[test]
fn test_resolve_crate_root() { fn test_resolve_crate_root() {
let mut host = analysis_host(&[ let mut host = analysis_host(&[("/lib.rs", "mod foo;"), ("/foo.rs", "")]);
("/lib.rs", "mod foo;"),
("/foo.rs", ""),
]);
let snap = host.analysis(); let snap = host.analysis();
assert!(snap.crate_for(FileId(2)).is_empty()); assert!(snap.crate_for(FileId(2)).is_empty());
@ -152,17 +136,15 @@ fn test_resolve_crate_root() {
host.set_crate_graph(crate_graph); host.set_crate_graph(crate_graph);
let snap = host.analysis(); let snap = host.analysis();
assert_eq!( assert_eq!(snap.crate_for(FileId(2)), vec![CrateId(1)],);
snap.crate_for(FileId(2)),
vec![CrateId(1)],
);
} }
#[test] #[test]
fn test_fn_signature_two_args_first() { fn test_fn_signature_two_args_first() {
let (desc, param) = get_signature( let (desc, param) = get_signature(
r#"fn foo(x: u32, y: u32) -> u32 {x + y} r#"fn foo(x: u32, y: u32) -> u32 {x + y}
fn bar() { foo(<|>3, ); }"#); fn bar() { foo(<|>3, ); }"#,
);
assert_eq!(desc.name, "foo".to_string()); assert_eq!(desc.name, "foo".to_string());
assert_eq!(desc.params, vec!("x".to_string(), "y".to_string())); assert_eq!(desc.params, vec!("x".to_string(), "y".to_string()));
@ -174,7 +156,8 @@ fn bar() { foo(<|>3, ); }"#);
fn test_fn_signature_two_args_second() { fn test_fn_signature_two_args_second() {
let (desc, param) = get_signature( let (desc, param) = get_signature(
r#"fn foo(x: u32, y: u32) -> u32 {x + y} r#"fn foo(x: u32, y: u32) -> u32 {x + y}
fn bar() { foo(3, <|>); }"#); fn bar() { foo(3, <|>); }"#,
);
assert_eq!(desc.name, "foo".to_string()); assert_eq!(desc.name, "foo".to_string());
assert_eq!(desc.params, vec!("x".to_string(), "y".to_string())); assert_eq!(desc.params, vec!("x".to_string(), "y".to_string()));
@ -186,7 +169,8 @@ fn bar() { foo(3, <|>); }"#);
fn test_fn_signature_for_impl() { fn test_fn_signature_for_impl() {
let (desc, param) = get_signature( let (desc, param) = get_signature(
r#"struct F; impl F { pub fn new() { F{}} } r#"struct F; impl F { pub fn new() { F{}} }
fn bar() {let _ : F = F::new(<|>);}"#); fn bar() {let _ : F = F::new(<|>);}"#,
);
assert_eq!(desc.name, "new".to_string()); assert_eq!(desc.name, "new".to_string());
assert_eq!(desc.params, Vec::<String>::new()); assert_eq!(desc.params, Vec::<String>::new());
@ -209,7 +193,8 @@ impl F {
fn bar() { fn bar() {
let f : F = F::new(); let f : F = F::new();
f.do_it(<|>); f.do_it(<|>);
}"#); }"#,
);
assert_eq!(desc.name, "do_it".to_string()); assert_eq!(desc.name, "do_it".to_string());
assert_eq!(desc.params, vec!["&self".to_string()]); assert_eq!(desc.params, vec!["&self".to_string()]);
@ -232,7 +217,8 @@ impl F {
fn bar() { fn bar() {
let f : F = F::new(); let f : F = F::new();
f.do_it(<|>); f.do_it(<|>);
}"#); }"#,
);
assert_eq!(desc.name, "do_it".to_string()); assert_eq!(desc.name, "do_it".to_string());
assert_eq!(desc.params, vec!["&self".to_string(), "x".to_string()]); assert_eq!(desc.params, vec!["&self".to_string(), "x".to_string()]);

View file

@ -2,19 +2,17 @@ extern crate clap;
#[macro_use] #[macro_use]
extern crate failure; extern crate failure;
extern crate join_to_string; extern crate join_to_string;
extern crate ra_syntax;
extern crate ra_editor; extern crate ra_editor;
extern crate ra_syntax;
extern crate tools; extern crate tools;
use std::{ use std::{fs, io::Read, path::Path, time::Instant};
fs, io::Read, path::Path,
time::Instant
};
use clap::{App, Arg, SubCommand}; use clap::{App, Arg, SubCommand};
use join_to_string::join; use join_to_string::join;
use ra_editor::{extend_selection, file_structure, syntax_tree};
use ra_syntax::{File, TextRange};
use tools::collect_tests; use tools::collect_tests;
use ra_syntax::{TextRange, File};
use ra_editor::{syntax_tree, file_structure, extend_selection};
type Result<T> = ::std::result::Result<T, failure::Error>; type Result<T> = ::std::result::Result<T, failure::Error>;
@ -36,14 +34,12 @@ fn main() -> Result<()> {
.takes_value(true), .takes_value(true),
), ),
) )
.subcommand( .subcommand(SubCommand::with_name("parse").arg(Arg::with_name("no-dump").long("--no-dump")))
SubCommand::with_name("parse")
.arg(Arg::with_name("no-dump").long("--no-dump"))
)
.subcommand(SubCommand::with_name("symbols")) .subcommand(SubCommand::with_name("symbols"))
.subcommand(SubCommand::with_name("extend-selection") .subcommand(
SubCommand::with_name("extend-selection")
.arg(Arg::with_name("start")) .arg(Arg::with_name("start"))
.arg(Arg::with_name("end")) .arg(Arg::with_name("end")),
) )
.get_matches(); .get_matches();
match matches.subcommand() { match matches.subcommand() {
@ -116,7 +112,8 @@ fn selections(file: &File, start: u32, end: u32) -> String {
ranges.push(r); ranges.push(r);
cur = extend_selection(&file, r); cur = extend_selection(&file, r);
} }
let ranges = ranges.iter() let ranges = ranges
.iter()
.map(|r| (1 + u32::from(r.start()), 1 + u32::from(r.end()))) .map(|r| (1 + u32::from(r.start()), 1 + u32::from(r.end())))
.map(|(s, e)| format!("({} {})", s, e)); .map(|(s, e)| format!("({} {})", s, e));
join(ranges) join(ranges)

View file

@ -1,17 +1,14 @@
use join_to_string::join; use join_to_string::join;
use ra_syntax::{ use ra_syntax::{
File, TextUnit, TextRange, Direction, algo::{find_covering_node, find_leaf_at_offset},
ast::{self, AstNode, AttrsOwner, TypeParamsOwner, NameOwner}, ast::{self, AstNode, AttrsOwner, NameOwner, TypeParamsOwner},
Direction, File,
SyntaxKind::{COMMA, WHITESPACE}, SyntaxKind::{COMMA, WHITESPACE},
SyntaxNodeRef, SyntaxNodeRef, TextRange, TextUnit,
algo::{
find_leaf_at_offset,
find_covering_node,
},
}; };
use crate::{EditBuilder, Edit, find_node_at_offset}; use crate::{find_node_at_offset, Edit, EditBuilder};
#[derive(Debug)] #[derive(Debug)]
pub struct LocalEdit { pub struct LocalEdit {
@ -52,9 +49,7 @@ pub fn add_derive<'a>(file: &'a File, offset: TextUnit) -> Option<impl FnOnce()
edit.insert(node_start, "#[derive()]\n".to_string()); edit.insert(node_start, "#[derive()]\n".to_string());
node_start + TextUnit::of_str("#[derive(") node_start + TextUnit::of_str("#[derive(")
} }
Some(tt) => { Some(tt) => tt.syntax().range().end() - TextUnit::of_char(')'),
tt.syntax().range().end() - TextUnit::of_char(')')
}
}; };
LocalEdit { LocalEdit {
edit: edit.finish(), edit: edit.finish(),
@ -74,14 +69,19 @@ pub fn add_impl<'a>(file: &'a File, offset: TextUnit) -> Option<impl FnOnce() ->
let mut buf = String::new(); let mut buf = String::new();
buf.push_str("\n\nimpl"); buf.push_str("\n\nimpl");
if let Some(type_params) = type_params { if let Some(type_params) = type_params {
type_params.syntax().text() type_params.syntax().text().push_to(&mut buf);
.push_to(&mut buf);
} }
buf.push_str(" "); buf.push_str(" ");
buf.push_str(name.text().as_str()); buf.push_str(name.text().as_str());
if let Some(type_params) = type_params { if let Some(type_params) = type_params {
let lifetime_params = type_params.lifetime_params().filter_map(|it| it.lifetime()).map(|it| it.text()); let lifetime_params = type_params
let type_params = type_params.type_params().filter_map(|it| it.name()).map(|it| it.text()); .lifetime_params()
.filter_map(|it| it.lifetime())
.map(|it| it.text());
let type_params = type_params
.type_params()
.filter_map(|it| it.name())
.map(|it| it.text());
join(lifetime_params.chain(type_params)) join(lifetime_params.chain(type_params))
.surround_with("<", ">") .surround_with("<", ">")
.to_buf(&mut buf); .to_buf(&mut buf);
@ -97,10 +97,17 @@ pub fn add_impl<'a>(file: &'a File, offset: TextUnit) -> Option<impl FnOnce() ->
}) })
} }
pub fn introduce_variable<'a>(file: &'a File, range: TextRange) -> Option<impl FnOnce() -> LocalEdit + 'a> { pub fn introduce_variable<'a>(
file: &'a File,
range: TextRange,
) -> Option<impl FnOnce() -> LocalEdit + 'a> {
let node = find_covering_node(file.syntax(), range); let node = find_covering_node(file.syntax(), range);
let expr = node.ancestors().filter_map(ast::Expr::cast).next()?; let expr = node.ancestors().filter_map(ast::Expr::cast).next()?;
let anchor_stmt = expr.syntax().ancestors().filter_map(ast::Stmt::cast).next()?; let anchor_stmt = expr
.syntax()
.ancestors()
.filter_map(ast::Stmt::cast)
.next()?;
let indent = anchor_stmt.syntax().prev_sibling()?; let indent = anchor_stmt.syntax().prev_sibling()?;
if indent.kind() != WHITESPACE { if indent.kind() != WHITESPACE {
return None; return None;
@ -191,7 +198,8 @@ mod tests {
" "
fn foo() { fn foo() {
foo(<|>1 + 1<|>); foo(<|>1 + 1<|>);
}", " }",
"
fn foo() { fn foo() {
let <|>var_name = 1 + 1; let <|>var_name = 1 + 1;
foo(var_name); foo(var_name);
@ -205,7 +213,8 @@ check_action_range(
" "
fn foo() { fn foo() {
<|>1 + 1<|>; <|>1 + 1<|>;
}", " }",
"
fn foo() { fn foo() {
let <|>var_name = 1 + 1; let <|>var_name = 1 + 1;
}", }",

View file

@ -1,17 +1,18 @@
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use ra_syntax::{ use ra_syntax::{
File, TextUnit, AstNode, SyntaxNodeRef, SyntaxKind::*, algo::visit::{visitor, visitor_ctx, Visitor, VisitorCtx},
ast::{self, LoopBodyOwner, ModuleItemOwner}, ast::{self, LoopBodyOwner, ModuleItemOwner},
algo::{
visit::{visitor, Visitor, visitor_ctx, VisitorCtx},
},
text_utils::is_subrange, text_utils::is_subrange,
AstNode, File,
SyntaxKind::*,
SyntaxNodeRef, TextUnit,
}; };
use crate::{ use crate::{
AtomEdit, find_node_at_offset, find_node_at_offset,
scope::{FnScopes, ModuleScope}, scope::{FnScopes, ModuleScope},
AtomEdit,
}; };
#[derive(Debug)] #[derive(Debug)]
@ -21,7 +22,7 @@ pub struct CompletionItem {
/// What string is used for filtering, defaults to label /// What string is used for filtering, defaults to label
pub lookup: Option<String>, pub lookup: Option<String>,
/// What is inserted, defaults to label /// What is inserted, defaults to label
pub snippet: Option<String> pub snippet: Option<String>,
} }
pub fn scope_completion(file: &File, offset: TextUnit) -> Option<Vec<CompletionItem>> { pub fn scope_completion(file: &File, offset: TextUnit) -> Option<Vec<CompletionItem>> {
@ -40,7 +41,12 @@ pub fn scope_completion(file: &File, offset: TextUnit) -> Option<Vec<CompletionI
param_completions(name_ref.syntax(), &mut res); param_completions(name_ref.syntax(), &mut res);
} }
let name_range = name_ref.syntax().range(); let name_range = name_ref.syntax().range();
let top_node = name_ref.syntax().ancestors().take_while(|it| it.range() == name_range).last().unwrap(); let top_node = name_ref
.syntax()
.ancestors()
.take_while(|it| it.range() == name_range)
.last()
.unwrap();
match top_node.parent().map(|it| it.kind()) { match top_node.parent().map(|it| it.kind()) {
Some(ROOT) | Some(ITEM_LIST) => complete_mod_item_snippets(&mut res), Some(ROOT) | Some(ITEM_LIST) => complete_mod_item_snippets(&mut res),
_ => (), _ => (),
@ -68,21 +74,23 @@ fn complete_name_ref(file: &File, name_ref: ast::NameRef, acc: &mut Vec<Completi
if let Some(items) = visitor() if let Some(items) = visitor()
.visit::<ast::Root, _>(|it| Some(it.items())) .visit::<ast::Root, _>(|it| Some(it.items()))
.visit::<ast::Module, _>(|it| Some(it.item_list()?.items())) .visit::<ast::Module, _>(|it| Some(it.item_list()?.items()))
.accept(node) { .accept(node)
{
if let Some(items) = items { if let Some(items) = items {
let scope = ModuleScope::new(items); let scope = ModuleScope::new(items);
acc.extend( acc.extend(
scope.entries().iter() scope
.entries()
.iter()
.filter(|entry| entry.syntax() != name_ref.syntax()) .filter(|entry| entry.syntax() != name_ref.syntax())
.map(|entry| CompletionItem { .map(|entry| CompletionItem {
label: entry.name().to_string(), label: entry.name().to_string(),
lookup: None, lookup: None,
snippet: None, snippet: None,
}) }),
); );
} }
break; break;
} else if !visited_fn { } else if !visited_fn {
if let Some(fn_def) = ast::FnDef::cast(node) { if let Some(fn_def) = ast::FnDef::cast(node) {
visited_fn = true; visited_fn = true;
@ -103,26 +111,34 @@ fn param_completions(ctx: SyntaxNodeRef, acc: &mut Vec<CompletionItem>) {
.visit::<ast::ItemList, _>(process) .visit::<ast::ItemList, _>(process)
.accept(node); .accept(node);
} }
params.into_iter() params
.into_iter()
.filter_map(|(label, (count, param))| { .filter_map(|(label, (count, param))| {
let lookup = param.pat()?.syntax().text().to_string(); let lookup = param.pat()?.syntax().text().to_string();
if count < 2 { None } else { Some((label, lookup)) } if count < 2 {
None
} else {
Some((label, lookup))
}
}) })
.for_each(|(label, lookup)| { .for_each(|(label, lookup)| {
acc.push(CompletionItem { acc.push(CompletionItem {
label, lookup: Some(lookup), snippet: None label,
lookup: Some(lookup),
snippet: None,
}) })
}); });
fn process<'a, N: ast::FnDefOwner<'a>>(node: N, params: &mut FxHashMap<String, (u32, ast::Param<'a>)>) { fn process<'a, N: ast::FnDefOwner<'a>>(
node: N,
params: &mut FxHashMap<String, (u32, ast::Param<'a>)>,
) {
node.functions() node.functions()
.filter_map(|it| it.param_list()) .filter_map(|it| it.param_list())
.flat_map(|it| it.params()) .flat_map(|it| it.params())
.for_each(|param| { .for_each(|param| {
let text = param.syntax().text().to_string(); let text = param.syntax().text().to_string();
params.entry(text) params.entry(text).or_insert((0, param)).0 += 1;
.or_insert((0, param))
.0 += 1;
}) })
} }
} }
@ -134,8 +150,12 @@ fn is_node<'a, N: AstNode<'a>>(node: SyntaxNodeRef<'a>) -> bool {
} }
} }
fn complete_expr_keywords(
fn complete_expr_keywords(file: &File, fn_def: ast::FnDef, name_ref: ast::NameRef, acc: &mut Vec<CompletionItem>) { file: &File,
fn_def: ast::FnDef,
name_ref: ast::NameRef,
acc: &mut Vec<CompletionItem>,
) {
acc.push(keyword("if", "if $0 {}")); acc.push(keyword("if", "if $0 {}"));
acc.push(keyword("match", "match $0 {}")); acc.push(keyword("match", "match $0 {}"));
acc.push(keyword("while", "while $0 {}")); acc.push(keyword("while", "while $0 {}"));
@ -186,9 +206,14 @@ fn complete_return(fn_def: ast::FnDef, name_ref: ast::NameRef) -> Option<Complet
// return None; // return None;
// } // }
let is_stmt = match name_ref.syntax().ancestors().filter_map(ast::ExprStmt::cast).next() { let is_stmt = match name_ref
.syntax()
.ancestors()
.filter_map(ast::ExprStmt::cast)
.next()
{
None => false, None => false,
Some(expr_stmt) => expr_stmt.syntax().range() == name_ref.syntax().range() Some(expr_stmt) => expr_stmt.syntax().range() == name_ref.syntax().range(),
}; };
let snip = match (is_stmt, fn_def.ret_type().is_some()) { let snip = match (is_stmt, fn_def.ret_type().is_some()) {
(true, true) => "return $0;", (true, true) => "return $0;",
@ -212,14 +237,12 @@ fn complete_expr_snippets(acc: &mut Vec<CompletionItem>) {
label: "pd".to_string(), label: "pd".to_string(),
lookup: None, lookup: None,
snippet: Some("eprintln!(\"$0 = {:?}\", $0);".to_string()), snippet: Some("eprintln!(\"$0 = {:?}\", $0);".to_string()),
} });
);
acc.push(CompletionItem { acc.push(CompletionItem {
label: "ppd".to_string(), label: "ppd".to_string(),
lookup: None, lookup: None,
snippet: Some("eprintln!(\"$0 = {:#?}\", $0);".to_string()), snippet: Some("eprintln!(\"$0 = {:#?}\", $0);".to_string()),
} });
);
} }
fn complete_mod_item_snippets(acc: &mut Vec<CompletionItem>) { fn complete_mod_item_snippets(acc: &mut Vec<CompletionItem>) {
@ -227,21 +250,21 @@ fn complete_mod_item_snippets(acc: &mut Vec<CompletionItem>) {
label: "tfn".to_string(), label: "tfn".to_string(),
lookup: None, lookup: None,
snippet: Some("#[test]\nfn $1() {\n $0\n}".to_string()), snippet: Some("#[test]\nfn $1() {\n $0\n}".to_string()),
} });
);
} }
fn complete_fn(name_ref: ast::NameRef, scopes: &FnScopes, acc: &mut Vec<CompletionItem>) { fn complete_fn(name_ref: ast::NameRef, scopes: &FnScopes, acc: &mut Vec<CompletionItem>) {
let mut shadowed = FxHashSet::default(); let mut shadowed = FxHashSet::default();
acc.extend( acc.extend(
scopes.scope_chain(name_ref.syntax()) scopes
.scope_chain(name_ref.syntax())
.flat_map(|scope| scopes.entries(scope).iter()) .flat_map(|scope| scopes.entries(scope).iter())
.filter(|entry| shadowed.insert(entry.name())) .filter(|entry| shadowed.insert(entry.name()))
.map(|entry| CompletionItem { .map(|entry| CompletionItem {
label: entry.name().to_string(), label: entry.name().to_string(),
lookup: None, lookup: None,
snippet: None, snippet: None,
}) }),
); );
if scopes.self_param.is_some() { if scopes.self_param.is_some() {
acc.push(CompletionItem { acc.push(CompletionItem {
@ -281,20 +304,24 @@ mod tests {
#[test] #[test]
fn test_completion_let_scope() { fn test_completion_let_scope() {
check_scope_completion(r" check_scope_completion(
r"
fn quux(x: i32) { fn quux(x: i32) {
let y = 92; let y = 92;
1 + <|>; 1 + <|>;
let z = (); let z = ();
} }
", r#"[CompletionItem { label: "y", lookup: None, snippet: None }, ",
r#"[CompletionItem { label: "y", lookup: None, snippet: None },
CompletionItem { label: "x", lookup: None, snippet: None }, CompletionItem { label: "x", lookup: None, snippet: None },
CompletionItem { label: "quux", lookup: None, snippet: None }]"#); CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
);
} }
#[test] #[test]
fn test_completion_if_let_scope() { fn test_completion_if_let_scope() {
check_scope_completion(r" check_scope_completion(
r"
fn quux() { fn quux() {
if let Some(x) = foo() { if let Some(x) = foo() {
let y = 92; let y = 92;
@ -304,67 +331,85 @@ mod tests {
1 + <|> 1 + <|>
} }
} }
", r#"[CompletionItem { label: "b", lookup: None, snippet: None }, ",
r#"[CompletionItem { label: "b", lookup: None, snippet: None },
CompletionItem { label: "a", lookup: None, snippet: None }, CompletionItem { label: "a", lookup: None, snippet: None },
CompletionItem { label: "quux", lookup: None, snippet: None }]"#); CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
);
} }
#[test] #[test]
fn test_completion_for_scope() { fn test_completion_for_scope() {
check_scope_completion(r" check_scope_completion(
r"
fn quux() { fn quux() {
for x in &[1, 2, 3] { for x in &[1, 2, 3] {
<|> <|>
} }
} }
", r#"[CompletionItem { label: "x", lookup: None, snippet: None }, ",
CompletionItem { label: "quux", lookup: None, snippet: None }]"#); r#"[CompletionItem { label: "x", lookup: None, snippet: None },
CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
);
} }
#[test] #[test]
fn test_completion_mod_scope() { fn test_completion_mod_scope() {
check_scope_completion(r" check_scope_completion(
r"
struct Foo; struct Foo;
enum Baz {} enum Baz {}
fn quux() { fn quux() {
<|> <|>
} }
", r#"[CompletionItem { label: "Foo", lookup: None, snippet: None }, ",
r#"[CompletionItem { label: "Foo", lookup: None, snippet: None },
CompletionItem { label: "Baz", lookup: None, snippet: None }, CompletionItem { label: "Baz", lookup: None, snippet: None },
CompletionItem { label: "quux", lookup: None, snippet: None }]"#); CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
);
} }
#[test] #[test]
fn test_completion_mod_scope_no_self_use() { fn test_completion_mod_scope_no_self_use() {
check_scope_completion(r" check_scope_completion(
r"
use foo<|>; use foo<|>;
", r#"[]"#); ",
r#"[]"#,
);
} }
#[test] #[test]
fn test_completion_mod_scope_nested() { fn test_completion_mod_scope_nested() {
check_scope_completion(r" check_scope_completion(
r"
struct Foo; struct Foo;
mod m { mod m {
struct Bar; struct Bar;
fn quux() { <|> } fn quux() { <|> }
} }
", r#"[CompletionItem { label: "Bar", lookup: None, snippet: None }, ",
CompletionItem { label: "quux", lookup: None, snippet: None }]"#); r#"[CompletionItem { label: "Bar", lookup: None, snippet: None },
CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
);
} }
#[test] #[test]
fn test_complete_type() { fn test_complete_type() {
check_scope_completion(r" check_scope_completion(
r"
struct Foo; struct Foo;
fn x() -> <|> fn x() -> <|>
", r#"[CompletionItem { label: "Foo", lookup: None, snippet: None }, ",
CompletionItem { label: "x", lookup: None, snippet: None }]"#) r#"[CompletionItem { label: "Foo", lookup: None, snippet: None },
CompletionItem { label: "x", lookup: None, snippet: None }]"#,
)
} }
#[test] #[test]
fn test_complete_shadowing() { fn test_complete_shadowing() {
check_scope_completion(r" check_scope_completion(
r"
fn foo() -> { fn foo() -> {
let bar = 92; let bar = 92;
{ {
@ -372,15 +417,20 @@ mod tests {
<|> <|>
} }
} }
", r#"[CompletionItem { label: "bar", lookup: None, snippet: None }, ",
CompletionItem { label: "foo", lookup: None, snippet: None }]"#) r#"[CompletionItem { label: "bar", lookup: None, snippet: None },
CompletionItem { label: "foo", lookup: None, snippet: None }]"#,
)
} }
#[test] #[test]
fn test_complete_self() { fn test_complete_self() {
check_scope_completion(r" check_scope_completion(
r"
impl S { fn foo(&self) { <|> } } impl S { fn foo(&self) { <|> } }
", r#"[CompletionItem { label: "self", lookup: None, snippet: None }]"#) ",
r#"[CompletionItem { label: "self", lookup: None, snippet: None }]"#,
)
} }
#[test] #[test]

View file

@ -1,8 +1,5 @@
use crate::{TextRange, TextUnit}; use crate::{TextRange, TextUnit};
use ra_syntax::{ use ra_syntax::{text_utils::contains_offset_nonstrict, AtomEdit};
AtomEdit,
text_utils::contains_offset_nonstrict,
};
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Edit { pub struct Edit {
@ -11,7 +8,7 @@ pub struct Edit {
#[derive(Debug)] #[derive(Debug)]
pub struct EditBuilder { pub struct EditBuilder {
atoms: Vec<AtomEdit> atoms: Vec<AtomEdit>,
} }
impl EditBuilder { impl EditBuilder {
@ -36,7 +33,9 @@ impl EditBuilder {
Edit { atoms } Edit { atoms }
} }
pub fn invalidates_offset(&self, offset: TextUnit) -> bool { pub fn invalidates_offset(&self, offset: TextUnit) -> bool {
self.atoms.iter().any(|atom| contains_offset_nonstrict(atom.delete, offset)) self.atoms
.iter()
.any(|atom| contains_offset_nonstrict(atom.delete, offset))
} }
} }
@ -74,7 +73,7 @@ impl Edit {
break; break;
} }
if offset < atom.delete.end() { if offset < atom.delete.end() {
return None return None;
} }
res += TextUnit::of_str(&atom.insert); res += TextUnit::of_str(&atom.insert);
res -= atom.delete.len(); res -= atom.delete.len();

View file

@ -1,7 +1,8 @@
use ra_syntax::{ use ra_syntax::{
File, TextRange, SyntaxNodeRef, TextUnit, Direction, algo::{find_covering_node, find_leaf_at_offset, LeafAtOffset},
Direction, File,
SyntaxKind::*, SyntaxKind::*,
algo::{find_leaf_at_offset, LeafAtOffset, find_covering_node}, SyntaxNodeRef, TextRange, TextUnit,
}; };
pub fn extend_selection(file: &File, range: TextRange) -> Option<TextRange> { pub fn extend_selection(file: &File, range: TextRange) -> Option<TextRange> {
@ -24,7 +25,7 @@ pub(crate) fn extend(root: SyntaxNodeRef, range: TextRange) -> Option<TextRange>
} else { } else {
l.range() l.range()
} }
}, }
LeafAtOffset::Between(l, r) => pick_best(l, r).range(), LeafAtOffset::Between(l, r) => pick_best(l, r).range(),
}; };
return Some(leaf_range); return Some(leaf_range);
@ -66,7 +67,7 @@ fn extend_ws(root: SyntaxNodeRef, ws: SyntaxNodeRef, offset: TextUnit) -> TextRa
if let Some(node) = ws.next_sibling() { if let Some(node) = ws.next_sibling() {
let start = match ws_prefix.rfind('\n') { let start = match ws_prefix.rfind('\n') {
Some(idx) => ws.range().start() + TextUnit::from((idx + 1) as u32), Some(idx) => ws.range().start() + TextUnit::from((idx + 1) as u32),
None => node.range().start() None => node.range().start(),
}; };
let end = if root.text().char_at(node.range().end()) == Some('\n') { let end = if root.text().char_at(node.range().end()) == Some('\n') {
node.range().end() + TextUnit::of_char('\n') node.range().end() + TextUnit::of_char('\n')
@ -94,10 +95,7 @@ fn extend_comments(node: SyntaxNodeRef) -> Option<TextRange> {
let prev = adj_comments(node, Direction::Prev); let prev = adj_comments(node, Direction::Prev);
let next = adj_comments(node, Direction::Next); let next = adj_comments(node, Direction::Next);
if prev != next { if prev != next {
Some(TextRange::from_to( Some(TextRange::from_to(prev.range().start(), next.range().end()))
prev.range().start(),
next.range().end(),
))
} else { } else {
None None
} }
@ -109,7 +107,7 @@ fn adj_comments(node: SyntaxNodeRef, dir: Direction) -> SyntaxNodeRef {
match node.kind() { match node.kind() {
COMMENT => res = node, COMMENT => res = node,
WHITESPACE if !node.leaf_text().unwrap().as_str().contains("\n\n") => (), WHITESPACE if !node.leaf_text().unwrap().as_str().contains("\n\n") => (),
_ => break _ => break,
} }
} }
res res
@ -125,8 +123,7 @@ mod tests {
let file = File::parse(&before); let file = File::parse(&before);
let mut range = TextRange::offset_len(cursor, 0.into()); let mut range = TextRange::offset_len(cursor, 0.into());
for &after in afters { for &after in afters {
range = extend_selection(&file, range) range = extend_selection(&file, range).unwrap();
.unwrap();
let actual = &before[range]; let actual = &before[range];
assert_eq!(after, actual); assert_eq!(after, actual);
} }
@ -134,10 +131,7 @@ mod tests {
#[test] #[test]
fn test_extend_selection_arith() { fn test_extend_selection_arith() {
do_check( do_check(r#"fn foo() { <|>1 + 1 }"#, &["1", "1 + 1", "{ 1 + 1 }"]);
r#"fn foo() { <|>1 + 1 }"#,
&["1", "1 + 1", "{ 1 + 1 }"],
);
} }
#[test] #[test]
@ -149,7 +143,7 @@ impl S {
} }
}"#, }"#,
&[" fn foo() {\n\n }\n"] &[" fn foo() {\n\n }\n"],
); );
} }
@ -165,7 +159,11 @@ struct B {
<|> <|>
} }
"#, "#,
&["\n \n", "{\n \n}", "/// bla\n/// bla\nstruct B {\n \n}"] &[
"\n \n",
"{\n \n}",
"/// bla\n/// bla\nstruct B {\n \n}",
],
) )
} }
@ -181,7 +179,7 @@ fn bar(){}
// fn foo(){} // fn foo(){}
"#, "#,
&["// 1 + 1", "// fn foo() {\n// 1 + 1\n// }"] &["// 1 + 1", "// fn foo() {\n// 1 + 1\n// }"],
); );
} }
@ -191,42 +189,34 @@ fn bar(){}
r#" r#"
fn main() { foo<|>+bar;} fn main() { foo<|>+bar;}
"#, "#,
&["foo", "foo+bar"] &["foo", "foo+bar"],
); );
do_check( do_check(
r#" r#"
fn main() { foo+<|>bar;} fn main() { foo+<|>bar;}
"#, "#,
&["bar", "foo+bar"] &["bar", "foo+bar"],
); );
} }
#[test] #[test]
fn test_extend_selection_prefer_lifetimes() { fn test_extend_selection_prefer_lifetimes() {
do_check( do_check(r#"fn foo<<|>'a>() {}"#, &["'a", "<'a>"]);
r#"fn foo<<|>'a>() {}"#, do_check(r#"fn foo<'a<|>>() {}"#, &["'a", "<'a>"]);
&["'a", "<'a>"]
);
do_check(
r#"fn foo<'a<|>>() {}"#,
&["'a", "<'a>"]
);
} }
#[test] #[test]
fn test_extend_selection_select_first_word() { fn test_extend_selection_select_first_word() {
do_check(r#"// foo bar b<|>az quxx"#, &["baz", "// foo bar baz quxx"]);
do_check( do_check(
r#"// foo bar b<|>az quxx"#, r#"
&["baz", "// foo bar baz quxx"]
);
do_check(r#"
impl S { impl S {
fn foo() { fn foo() {
// hel<|>lo world // hel<|>lo world
} }
} }
"#, "#,
&["hello", "// hello world"] &["hello", "// hello world"],
); );
} }
} }

View file

@ -1,11 +1,9 @@
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use ra_syntax::{ use ra_syntax::{
ast, ast, AstNode, Direction, File,
AstNode,
File, TextRange, SyntaxNodeRef,
SyntaxKind::{self, *}, SyntaxKind::{self, *},
Direction, SyntaxNodeRef, TextRange,
}; };
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
@ -28,7 +26,10 @@ pub fn folding_ranges(file: &File) -> Vec<Fold> {
// Fold items that span multiple lines // Fold items that span multiple lines
if let Some(kind) = fold_kind(node.kind()) { if let Some(kind) = fold_kind(node.kind()) {
if has_newline(node) { if has_newline(node) {
res.push(Fold { range: node.range(), kind }); res.push(Fold {
range: node.range(),
kind,
});
} }
} }
@ -37,8 +38,12 @@ pub fn folding_ranges(file: &File) -> Vec<Fold> {
continue; continue;
} }
if node.kind() == COMMENT { if node.kind() == COMMENT {
contiguous_range_for_comment(node, &mut visited_comments) contiguous_range_for_comment(node, &mut visited_comments).map(|range| {
.map(|range| res.push(Fold { range, kind: FoldKind::Comment })); res.push(Fold {
range,
kind: FoldKind::Comment,
})
});
} }
} }
@ -49,13 +54,11 @@ fn fold_kind(kind: SyntaxKind) -> Option<FoldKind> {
match kind { match kind {
COMMENT => Some(FoldKind::Comment), COMMENT => Some(FoldKind::Comment),
USE_ITEM => Some(FoldKind::Imports), USE_ITEM => Some(FoldKind::Imports),
_ => None _ => None,
} }
} }
fn has_newline( fn has_newline(node: SyntaxNodeRef) -> bool {
node: SyntaxNodeRef,
) -> bool {
for descendant in node.descendants() { for descendant in node.descendants() {
if let Some(ws) = ast::Whitespace::cast(descendant) { if let Some(ws) = ast::Whitespace::cast(descendant) {
if ws.has_newlines() { if ws.has_newlines() {
@ -100,9 +103,7 @@ fn contiguous_range_for_comment<'a>(
// The comment group ends because either: // The comment group ends because either:
// * An element of a different kind was reached // * An element of a different kind was reached
// * A comment of a different flavor was reached // * A comment of a different flavor was reached
_ => { _ => break,
break
}
} }
} }
@ -128,7 +129,11 @@ mod tests {
let folds = folding_ranges(&file); let folds = folding_ranges(&file);
assert_eq!(folds.len(), ranges.len()); assert_eq!(folds.len(), ranges.len());
for ((fold, range), fold_kind) in folds.into_iter().zip(ranges.into_iter()).zip(fold_kinds.into_iter()) { for ((fold, range), fold_kind) in folds
.into_iter()
.zip(ranges.into_iter())
.zip(fold_kinds.into_iter())
{
assert_eq!(fold.range.start(), range.start()); assert_eq!(fold.range.start(), range.start());
assert_eq!(fold.range.end(), range.end()); assert_eq!(fold.range.end(), range.end());
assert_eq!(&fold.kind, fold_kind); assert_eq!(&fold.kind, fold_kind);
@ -181,5 +186,4 @@ fn main() {
do_check(text, folds); do_check(text, folds);
} }
} }

View file

@ -1,44 +1,41 @@
extern crate ra_syntax;
extern crate superslice;
extern crate itertools; extern crate itertools;
extern crate join_to_string; extern crate join_to_string;
extern crate ra_syntax;
extern crate rustc_hash; extern crate rustc_hash;
extern crate superslice;
#[cfg(test)] #[cfg(test)]
#[macro_use] #[macro_use]
extern crate test_utils as _test_utils; extern crate test_utils as _test_utils;
mod extend_selection;
mod symbols;
mod line_index;
mod edit;
mod folding_ranges;
mod code_actions; mod code_actions;
mod typing;
mod completion; mod completion;
mod edit;
mod extend_selection;
mod folding_ranges;
mod line_index;
mod scope; mod scope;
mod symbols;
#[cfg(test)] #[cfg(test)]
mod test_utils; mod test_utils;
mod typing;
use ra_syntax::{ pub use self::{
File, TextUnit, TextRange, SmolStr, SyntaxNodeRef, code_actions::{add_derive, add_impl, flip_comma, introduce_variable, LocalEdit},
ast::{self, AstNode, NameOwner}, completion::{scope_completion, CompletionItem},
algo::find_leaf_at_offset, edit::{Edit, EditBuilder},
SyntaxKind::{self, *}, extend_selection::extend_selection,
folding_ranges::{folding_ranges, Fold, FoldKind},
line_index::{LineCol, LineIndex},
symbols::{file_structure, file_symbols, FileSymbol, StructureNode},
typing::{join_lines, on_enter, on_eq_typed},
}; };
pub use ra_syntax::AtomEdit; pub use ra_syntax::AtomEdit;
pub use self::{ use ra_syntax::{
line_index::{LineIndex, LineCol}, algo::find_leaf_at_offset,
extend_selection::extend_selection, ast::{self, AstNode, NameOwner},
symbols::{StructureNode, file_structure, FileSymbol, file_symbols}, File, SmolStr,
edit::{EditBuilder, Edit}, SyntaxKind::{self, *},
code_actions::{ SyntaxNodeRef, TextRange, TextUnit,
LocalEdit,
flip_comma, add_derive, add_impl,
introduce_variable,
},
typing::{join_lines, on_eq_typed, on_enter},
completion::{scope_completion, CompletionItem},
folding_ranges::{Fold, FoldKind, folding_ranges}
}; };
#[derive(Debug)] #[derive(Debug)]
@ -67,10 +64,7 @@ pub enum RunnableKind {
pub fn matching_brace(file: &File, offset: TextUnit) -> Option<TextUnit> { pub fn matching_brace(file: &File, offset: TextUnit) -> Option<TextUnit> {
const BRACES: &[SyntaxKind] = &[ const BRACES: &[SyntaxKind] = &[
L_CURLY, R_CURLY, L_CURLY, R_CURLY, L_BRACK, R_BRACK, L_PAREN, R_PAREN, L_ANGLE, R_ANGLE,
L_BRACK, R_BRACK,
L_PAREN, R_PAREN,
L_ANGLE, R_ANGLE,
]; ];
let (brace_node, brace_idx) = find_leaf_at_offset(file.syntax(), offset) let (brace_node, brace_idx) = find_leaf_at_offset(file.syntax(), offset)
.filter_map(|node| { .filter_map(|node| {
@ -80,7 +74,8 @@ pub fn matching_brace(file: &File, offset: TextUnit) -> Option<TextUnit> {
.next()?; .next()?;
let parent = brace_node.parent()?; let parent = brace_node.parent()?;
let matching_kind = BRACES[brace_idx ^ 1]; let matching_kind = BRACES[brace_idx ^ 1];
let matching_node = parent.children() let matching_node = parent
.children()
.find(|node| node.kind() == matching_kind)?; .find(|node| node.kind() == matching_kind)?;
Some(matching_node.range().start()) Some(matching_node.range().start())
} }
@ -108,10 +103,13 @@ pub fn highlight(file: &File) -> Vec<HighlightedRange> {
} }
pub fn diagnostics(file: &File) -> Vec<Diagnostic> { pub fn diagnostics(file: &File) -> Vec<Diagnostic> {
file.errors().into_iter().map(|err| Diagnostic { file.errors()
.into_iter()
.map(|err| Diagnostic {
range: TextRange::offset_len(err.offset, 1.into()), range: TextRange::offset_len(err.offset, 1.into()),
msg: "Syntax Error: ".to_string() + &err.msg, msg: "Syntax Error: ".to_string() + &err.msg,
}).collect() })
.collect()
} }
pub fn syntax_tree(file: &File) -> String { pub fn syntax_tree(file: &File) -> String {
@ -119,7 +117,8 @@ pub fn syntax_tree(file: &File) -> String {
} }
pub fn runnables(file: &File) -> Vec<Runnable> { pub fn runnables(file: &File) -> Vec<Runnable> {
file.syntax().descendants() file.syntax()
.descendants()
.filter_map(ast::FnDef::cast) .filter_map(ast::FnDef::cast)
.filter_map(|f| { .filter_map(|f| {
let name = f.name()?.text(); let name = f.name()?.text();
@ -127,7 +126,7 @@ pub fn runnables(file: &File) -> Vec<Runnable> {
RunnableKind::Bin RunnableKind::Bin
} else if f.has_atom_attr("test") { } else if f.has_atom_attr("test") {
RunnableKind::Test { RunnableKind::Test {
name: name.to_string() name: name.to_string(),
} }
} else { } else {
return None; return None;
@ -145,15 +144,18 @@ pub fn find_node_at_offset<'a, N: AstNode<'a>>(
offset: TextUnit, offset: TextUnit,
) -> Option<N> { ) -> Option<N> {
let leaves = find_leaf_at_offset(syntax, offset); let leaves = find_leaf_at_offset(syntax, offset);
let leaf = leaves.clone() let leaf = leaves
.clone()
.find(|leaf| !leaf.kind().is_trivia()) .find(|leaf| !leaf.kind().is_trivia())
.or_else(|| leaves.right_biased())?; .or_else(|| leaves.right_biased())?;
leaf.ancestors() leaf.ancestors().filter_map(N::cast).next()
.filter_map(N::cast)
.next()
} }
pub fn resolve_local_name(file: &File, offset: TextUnit, name_ref: ast::NameRef) -> Option<(SmolStr, TextRange)> { pub fn resolve_local_name(
file: &File,
offset: TextUnit,
name_ref: ast::NameRef,
) -> Option<(SmolStr, TextRange)> {
let fn_def = find_node_at_offset::<ast::FnDef>(file.syntax(), offset)?; let fn_def = find_node_at_offset::<ast::FnDef>(file.syntax(), offset)?;
let scopes = scope::FnScopes::new(fn_def); let scopes = scope::FnScopes::new(fn_def);
let scope_entry = scope::resolve_local_name(name_ref, &scopes)?; let scope_entry = scope::resolve_local_name(name_ref, &scopes)?;
@ -164,15 +166,17 @@ pub fn resolve_local_name(file: &File, offset: TextUnit, name_ref: ast::NameRef)
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{assert_eq_dbg, extract_offset, add_cursor}; use crate::test_utils::{add_cursor, assert_eq_dbg, extract_offset};
#[test] #[test]
fn test_highlighting() { fn test_highlighting() {
let file = File::parse(r#" let file = File::parse(
r#"
// comment // comment
fn main() {} fn main() {}
println!("Hello, {}!", 92); println!("Hello, {}!", 92);
"#); "#,
);
let hls = highlight(&file); let hls = highlight(&file);
assert_eq_dbg( assert_eq_dbg(
r#"[HighlightedRange { range: [1; 11), tag: "comment" }, r#"[HighlightedRange { range: [1; 11), tag: "comment" },
@ -187,7 +191,8 @@ fn main() {}
#[test] #[test]
fn test_runnables() { fn test_runnables() {
let file = File::parse(r#" let file = File::parse(
r#"
fn main() {} fn main() {}
#[test] #[test]
@ -196,7 +201,8 @@ fn test_foo() {}
#[test] #[test]
#[ignore] #[ignore]
fn test_foo() {} fn test_foo() {}
"#); "#,
);
let runnables = runnables(&file); let runnables = runnables(&file);
assert_eq_dbg( assert_eq_dbg(
r#"[Runnable { range: [1; 13), kind: Bin }, r#"[Runnable { range: [1; 13), kind: Bin },
@ -219,9 +225,6 @@ fn test_foo() {}
assert_eq_text!(after, &actual); assert_eq_text!(after, &actual);
} }
do_check( do_check("struct Foo { a: i32, }<|>", "struct Foo <|>{ a: i32, }");
"struct Foo { a: i32, }<|>",
"struct Foo <|>{ a: i32, }",
);
} }
} }

View file

@ -1,5 +1,5 @@
use superslice::Ext;
use crate::TextUnit; use crate::TextUnit;
use superslice::Ext;
#[derive(Clone, Debug, Hash, PartialEq, Eq)] #[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub struct LineIndex { pub struct LineIndex {
@ -29,7 +29,10 @@ impl LineIndex {
let line = self.newlines.upper_bound(&offset) - 1; let line = self.newlines.upper_bound(&offset) - 1;
let line_start_offset = self.newlines[line]; let line_start_offset = self.newlines[line];
let col = offset - line_start_offset; let col = offset - line_start_offset;
return LineCol { line: line as u32, col }; return LineCol {
line: line as u32,
col,
};
} }
pub fn offset(&self, line_col: LineCol) -> TextUnit { pub fn offset(&self, line_col: LineCol) -> TextUnit {
@ -42,21 +45,105 @@ impl LineIndex {
fn test_line_index() { fn test_line_index() {
let text = "hello\nworld"; let text = "hello\nworld";
let index = LineIndex::new(text); let index = LineIndex::new(text);
assert_eq!(index.line_col(0.into()), LineCol { line: 0, col: 0.into() }); assert_eq!(
assert_eq!(index.line_col(1.into()), LineCol { line: 0, col: 1.into() }); index.line_col(0.into()),
assert_eq!(index.line_col(5.into()), LineCol { line: 0, col: 5.into() }); LineCol {
assert_eq!(index.line_col(6.into()), LineCol { line: 1, col: 0.into() }); line: 0,
assert_eq!(index.line_col(7.into()), LineCol { line: 1, col: 1.into() }); col: 0.into()
assert_eq!(index.line_col(8.into()), LineCol { line: 1, col: 2.into() }); }
assert_eq!(index.line_col(10.into()), LineCol { line: 1, col: 4.into() }); );
assert_eq!(index.line_col(11.into()), LineCol { line: 1, col: 5.into() }); assert_eq!(
assert_eq!(index.line_col(12.into()), LineCol { line: 1, col: 6.into() }); index.line_col(1.into()),
LineCol {
line: 0,
col: 1.into()
}
);
assert_eq!(
index.line_col(5.into()),
LineCol {
line: 0,
col: 5.into()
}
);
assert_eq!(
index.line_col(6.into()),
LineCol {
line: 1,
col: 0.into()
}
);
assert_eq!(
index.line_col(7.into()),
LineCol {
line: 1,
col: 1.into()
}
);
assert_eq!(
index.line_col(8.into()),
LineCol {
line: 1,
col: 2.into()
}
);
assert_eq!(
index.line_col(10.into()),
LineCol {
line: 1,
col: 4.into()
}
);
assert_eq!(
index.line_col(11.into()),
LineCol {
line: 1,
col: 5.into()
}
);
assert_eq!(
index.line_col(12.into()),
LineCol {
line: 1,
col: 6.into()
}
);
let text = "\nhello\nworld"; let text = "\nhello\nworld";
let index = LineIndex::new(text); let index = LineIndex::new(text);
assert_eq!(index.line_col(0.into()), LineCol { line: 0, col: 0.into() }); assert_eq!(
assert_eq!(index.line_col(1.into()), LineCol { line: 1, col: 0.into() }); index.line_col(0.into()),
assert_eq!(index.line_col(2.into()), LineCol { line: 1, col: 1.into() }); LineCol {
assert_eq!(index.line_col(6.into()), LineCol { line: 1, col: 5.into() }); line: 0,
assert_eq!(index.line_col(7.into()), LineCol { line: 2, col: 0.into() }); col: 0.into()
}
);
assert_eq!(
index.line_col(1.into()),
LineCol {
line: 1,
col: 0.into()
}
);
assert_eq!(
index.line_col(2.into()),
LineCol {
line: 1,
col: 1.into()
}
);
assert_eq!(
index.line_col(6.into()),
LineCol {
line: 1,
col: 5.into()
}
);
assert_eq!(
index.line_col(7.into()),
LineCol {
line: 2,
col: 0.into()
}
);
} }

View file

@ -1,10 +1,11 @@
use std::fmt; use std::fmt;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use ra_syntax::{ use ra_syntax::{
SyntaxNodeRef, SyntaxNode, SmolStr, AstNode, algo::generate,
ast::{self, NameOwner, LoopBodyOwner, ArgListOwner}, ast::{self, ArgListOwner, LoopBodyOwner, NameOwner},
algo::{generate} AstNode, SmolStr, SyntaxNode, SyntaxNodeRef,
}; };
type ScopeId = usize; type ScopeId = usize;
@ -19,11 +20,12 @@ pub struct FnScopes {
impl FnScopes { impl FnScopes {
pub fn new(fn_def: ast::FnDef) -> FnScopes { pub fn new(fn_def: ast::FnDef) -> FnScopes {
let mut scopes = FnScopes { let mut scopes = FnScopes {
self_param: fn_def.param_list() self_param: fn_def
.param_list()
.and_then(|it| it.self_param()) .and_then(|it| it.self_param())
.map(|it| it.syntax().owned()), .map(|it| it.syntax().owned()),
scopes: Vec::new(), scopes: Vec::new(),
scope_for: FxHashMap::default() scope_for: FxHashMap::default(),
}; };
let root = scopes.root_scope(); let root = scopes.root_scope();
scopes.add_params_bindings(root, fn_def.param_list()); scopes.add_params_bindings(root, fn_def.param_list());
@ -36,26 +38,37 @@ impl FnScopes {
&self.scopes[scope].entries &self.scopes[scope].entries
} }
pub fn scope_chain<'a>(&'a self, node: SyntaxNodeRef) -> impl Iterator<Item = ScopeId> + 'a { pub fn scope_chain<'a>(&'a self, node: SyntaxNodeRef) -> impl Iterator<Item = ScopeId> + 'a {
generate(self.scope_for(node), move |&scope| self.scopes[scope].parent) generate(self.scope_for(node), move |&scope| {
self.scopes[scope].parent
})
} }
fn root_scope(&mut self) -> ScopeId { fn root_scope(&mut self) -> ScopeId {
let res = self.scopes.len(); let res = self.scopes.len();
self.scopes.push(ScopeData { parent: None, entries: vec![] }); self.scopes.push(ScopeData {
parent: None,
entries: vec![],
});
res res
} }
fn new_scope(&mut self, parent: ScopeId) -> ScopeId { fn new_scope(&mut self, parent: ScopeId) -> ScopeId {
let res = self.scopes.len(); let res = self.scopes.len();
self.scopes.push(ScopeData { parent: Some(parent), entries: vec![] }); self.scopes.push(ScopeData {
parent: Some(parent),
entries: vec![],
});
res res
} }
fn add_bindings(&mut self, scope: ScopeId, pat: ast::Pat) { fn add_bindings(&mut self, scope: ScopeId, pat: ast::Pat) {
let entries = pat.syntax().descendants() let entries = pat
.syntax()
.descendants()
.filter_map(ast::BindPat::cast) .filter_map(ast::BindPat::cast)
.filter_map(ScopeEntry::new); .filter_map(ScopeEntry::new);
self.scopes[scope].entries.extend(entries); self.scopes[scope].entries.extend(entries);
} }
fn add_params_bindings(&mut self, scope: ScopeId, params: Option<ast::ParamList>) { fn add_params_bindings(&mut self, scope: ScopeId, params: Option<ast::ParamList>) {
params.into_iter() params
.into_iter()
.flat_map(|it| it.params()) .flat_map(|it| it.params())
.filter_map(|it| it.pat()) .filter_map(|it| it.pat())
.for_each(|it| self.add_bindings(scope, it)); .for_each(|it| self.add_bindings(scope, it));
@ -71,25 +84,24 @@ impl FnScopes {
} }
pub struct ScopeEntry { pub struct ScopeEntry {
syntax: SyntaxNode syntax: SyntaxNode,
} }
impl ScopeEntry { impl ScopeEntry {
fn new(pat: ast::BindPat) -> Option<ScopeEntry> { fn new(pat: ast::BindPat) -> Option<ScopeEntry> {
if pat.name().is_some() { if pat.name().is_some() {
Some(ScopeEntry { syntax: pat.syntax().owned() }) Some(ScopeEntry {
syntax: pat.syntax().owned(),
})
} else { } else {
None None
} }
} }
pub fn name(&self) -> SmolStr { pub fn name(&self) -> SmolStr {
self.ast().name() self.ast().name().unwrap().text()
.unwrap()
.text()
} }
pub fn ast(&self) -> ast::BindPat { pub fn ast(&self) -> ast::BindPat {
ast::BindPat::cast(self.syntax.borrowed()) ast::BindPat::cast(self.syntax.borrowed()).unwrap()
.unwrap()
} }
} }
@ -132,16 +144,16 @@ fn compute_block_scopes(block: ast::Block, scopes: &mut FnScopes, mut scope: Sco
fn compute_expr_scopes(expr: ast::Expr, scopes: &mut FnScopes, scope: ScopeId) { fn compute_expr_scopes(expr: ast::Expr, scopes: &mut FnScopes, scope: ScopeId) {
match expr { match expr {
ast::Expr::IfExpr(e) => { ast::Expr::IfExpr(e) => {
let cond_scope = e.condition().and_then(|cond| { let cond_scope = e
compute_cond_scopes(cond, scopes, scope) .condition()
}); .and_then(|cond| compute_cond_scopes(cond, scopes, scope));
if let Some(block) = e.then_branch() { if let Some(block) = e.then_branch() {
compute_block_scopes(block, scopes, cond_scope.unwrap_or(scope)); compute_block_scopes(block, scopes, cond_scope.unwrap_or(scope));
} }
if let Some(block) = e.else_branch() { if let Some(block) = e.else_branch() {
compute_block_scopes(block, scopes, scope); compute_block_scopes(block, scopes, scope);
} }
}, }
ast::Expr::BlockExpr(e) => { ast::Expr::BlockExpr(e) => {
if let Some(block) = e.block() { if let Some(block) = e.block() {
compute_block_scopes(block, scopes, scope); compute_block_scopes(block, scopes, scope);
@ -153,9 +165,9 @@ fn compute_expr_scopes(expr: ast::Expr, scopes: &mut FnScopes, scope: ScopeId) {
} }
} }
ast::Expr::WhileExpr(e) => { ast::Expr::WhileExpr(e) => {
let cond_scope = e.condition().and_then(|cond| { let cond_scope = e
compute_cond_scopes(cond, scopes, scope) .condition()
}); .and_then(|cond| compute_cond_scopes(cond, scopes, scope));
if let Some(block) = e.loop_body() { if let Some(block) = e.loop_body() {
compute_block_scopes(block, scopes, cond_scope.unwrap_or(scope)); compute_block_scopes(block, scopes, cond_scope.unwrap_or(scope));
} }
@ -201,25 +213,31 @@ fn compute_expr_scopes(expr: ast::Expr, scopes: &mut FnScopes, scope: ScopeId) {
} }
} }
} }
_ => { _ => expr
expr.syntax().children() .syntax()
.children()
.filter_map(ast::Expr::cast) .filter_map(ast::Expr::cast)
.for_each(|expr| compute_expr_scopes(expr, scopes, scope)) .for_each(|expr| compute_expr_scopes(expr, scopes, scope)),
}
}; };
fn compute_call_scopes( fn compute_call_scopes(
receiver: Option<ast::Expr>, receiver: Option<ast::Expr>,
arg_list: Option<ast::ArgList>, arg_list: Option<ast::ArgList>,
scopes: &mut FnScopes, scope: ScopeId, scopes: &mut FnScopes,
scope: ScopeId,
) { ) {
arg_list.into_iter() arg_list
.into_iter()
.flat_map(|it| it.args()) .flat_map(|it| it.args())
.chain(receiver) .chain(receiver)
.for_each(|expr| compute_expr_scopes(expr, scopes, scope)); .for_each(|expr| compute_expr_scopes(expr, scopes, scope));
} }
fn compute_cond_scopes(cond: ast::Condition, scopes: &mut FnScopes, scope: ScopeId) -> Option<ScopeId> { fn compute_cond_scopes(
cond: ast::Condition,
scopes: &mut FnScopes,
scope: ScopeId,
) -> Option<ScopeId> {
if let Some(expr) = cond.expr() { if let Some(expr) = cond.expr() {
compute_expr_scopes(expr, scopes, scope); compute_expr_scopes(expr, scopes, scope);
} }
@ -236,14 +254,18 @@ fn compute_expr_scopes(expr: ast::Expr, scopes: &mut FnScopes, scope: ScopeId) {
#[derive(Debug)] #[derive(Debug)]
struct ScopeData { struct ScopeData {
parent: Option<ScopeId>, parent: Option<ScopeId>,
entries: Vec<ScopeEntry> entries: Vec<ScopeEntry>,
} }
pub fn resolve_local_name<'a>(name_ref: ast::NameRef, scopes: &'a FnScopes) -> Option<&'a ScopeEntry> { pub fn resolve_local_name<'a>(
name_ref: ast::NameRef,
scopes: &'a FnScopes,
) -> Option<&'a ScopeEntry> {
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
let mut shadowed = FxHashSet::default(); let mut shadowed = FxHashSet::default();
let ret = scopes.scope_chain(name_ref.syntax()) let ret = scopes
.scope_chain(name_ref.syntax())
.flat_map(|scope| scopes.entries(scope).iter()) .flat_map(|scope| scopes.entries(scope).iter())
.filter(|entry| shadowed.insert(entry.name())) .filter(|entry| shadowed.insert(entry.name()))
.filter(|entry| entry.name() == name_ref.text()) .filter(|entry| entry.name() == name_ref.text())
@ -255,8 +277,8 @@ pub fn resolve_local_name<'a>(name_ref: ast::NameRef, scopes: &'a FnScopes) -> O
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use ra_syntax::File;
use crate::{find_node_at_offset, test_utils::extract_offset}; use crate::{find_node_at_offset, test_utils::extract_offset};
use ra_syntax::File;
fn do_check(code: &str, expected: &[&str]) { fn do_check(code: &str, expected: &[&str]) {
let (off, code) = extract_offset(code); let (off, code) = extract_offset(code);
@ -272,7 +294,8 @@ mod tests {
let marker: ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap(); let marker: ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap();
let fn_def: ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap(); let fn_def: ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap();
let scopes = FnScopes::new(fn_def); let scopes = FnScopes::new(fn_def);
let actual = scopes.scope_chain(marker.syntax()) let actual = scopes
.scope_chain(marker.syntax())
.flat_map(|scope| scopes.entries(scope)) .flat_map(|scope| scopes.entries(scope))
.map(|it| it.name()) .map(|it| it.name())
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -281,7 +304,8 @@ mod tests {
#[test] #[test]
fn test_lambda_scope() { fn test_lambda_scope() {
do_check(r" do_check(
r"
fn quux(foo: i32) { fn quux(foo: i32) {
let f = |bar, baz: i32| { let f = |bar, baz: i32| {
<|> <|>
@ -293,7 +317,8 @@ mod tests {
#[test] #[test]
fn test_call_scope() { fn test_call_scope() {
do_check(r" do_check(
r"
fn quux() { fn quux() {
f(|x| <|> ); f(|x| <|> );
}", }",
@ -303,7 +328,8 @@ mod tests {
#[test] #[test]
fn test_metod_call_scope() { fn test_metod_call_scope() {
do_check(r" do_check(
r"
fn quux() { fn quux() {
z.f(|x| <|> ); z.f(|x| <|> );
}", }",
@ -313,7 +339,8 @@ mod tests {
#[test] #[test]
fn test_loop_scope() { fn test_loop_scope() {
do_check(r" do_check(
r"
fn quux() { fn quux() {
loop { loop {
let x = (); let x = ();
@ -326,7 +353,8 @@ mod tests {
#[test] #[test]
fn test_match() { fn test_match() {
do_check(r" do_check(
r"
fn quux() { fn quux() {
match () { match () {
Some(x) => { Some(x) => {
@ -340,7 +368,8 @@ mod tests {
#[test] #[test]
fn test_shadow_variable() { fn test_shadow_variable() {
do_check(r" do_check(
r"
fn foo(x: String) { fn foo(x: String) {
let x : &str = &x<|>; let x : &str = &x<|>;
}", }",
@ -356,14 +385,20 @@ mod tests {
let scopes = FnScopes::new(fn_def); let scopes = FnScopes::new(fn_def);
let local_name = resolve_local_name(name_ref, &scopes).unwrap().ast().name().unwrap(); let local_name = resolve_local_name(name_ref, &scopes)
let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into()).unwrap(); .unwrap()
.ast()
.name()
.unwrap();
let expected_name =
find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into()).unwrap();
assert_eq!(local_name.syntax().range(), expected_name.syntax().range()); assert_eq!(local_name.syntax().range(), expected_name.syntax().range());
} }
#[test] #[test]
fn test_resolve_local_name() { fn test_resolve_local_name() {
do_check_local_name(r#" do_check_local_name(
r#"
fn foo(x: i32, y: u32) { fn foo(x: i32, y: u32) {
{ {
let z = x * 2; let z = x * 2;
@ -372,25 +407,30 @@ mod tests {
let t = x<|> * 3; let t = x<|> * 3;
} }
}"#, }"#,
21); 21,
);
} }
#[test] #[test]
fn test_resolve_local_name_declaration() { fn test_resolve_local_name_declaration() {
do_check_local_name(r#" do_check_local_name(
r#"
fn foo(x: String) { fn foo(x: String) {
let x : &str = &x<|>; let x : &str = &x<|>;
}"#, }"#,
21); 21,
);
} }
#[test] #[test]
fn test_resolve_local_name_shadow() { fn test_resolve_local_name_shadow() {
do_check_local_name(r" do_check_local_name(
r"
fn foo(x: String) { fn foo(x: String) {
let x : &str = &x; let x : &str = &x;
x<|> x<|>
}", }",
46); 46,
);
} }
} }

View file

@ -2,7 +2,6 @@ mod fn_scope;
mod mod_scope; mod mod_scope;
pub use self::{ pub use self::{
fn_scope::{FnScopes, resolve_local_name}, fn_scope::{resolve_local_name, FnScopes},
mod_scope::ModuleScope, mod_scope::ModuleScope,
}; };

View file

@ -1,6 +1,6 @@
use ra_syntax::{ use ra_syntax::{
AstNode, SyntaxNode, SyntaxNodeRef, SmolStr,
ast::{self, AstChildren}, ast::{self, AstChildren},
AstNode, SmolStr, SyntaxNode, SyntaxNodeRef,
}; };
pub struct ModuleScope { pub struct ModuleScope {
@ -13,7 +13,8 @@ pub struct Entry {
} }
enum EntryKind { enum EntryKind {
Item, Import, Item,
Import,
} }
impl ModuleScope { impl ModuleScope {
@ -34,9 +35,8 @@ impl ModuleScope {
collect_imports(tree, &mut entries); collect_imports(tree, &mut entries);
} }
continue; continue;
}, }
ast::ModuleItem::ExternCrateItem(_) | ast::ModuleItem::ExternCrateItem(_) | ast::ModuleItem::ImplItem(_) => continue,
ast::ModuleItem::ImplItem(_) => continue,
}; };
entries.extend(entry) entries.extend(entry)
} }
@ -52,20 +52,22 @@ impl ModuleScope {
impl Entry { impl Entry {
fn new<'a>(item: impl ast::NameOwner<'a>) -> Option<Entry> { fn new<'a>(item: impl ast::NameOwner<'a>) -> Option<Entry> {
let name = item.name()?; let name = item.name()?;
Some(Entry { node: name.syntax().owned(), kind: EntryKind::Item }) Some(Entry {
node: name.syntax().owned(),
kind: EntryKind::Item,
})
} }
fn new_import(path: ast::Path) -> Option<Entry> { fn new_import(path: ast::Path) -> Option<Entry> {
let name_ref = path.segment()?.name_ref()?; let name_ref = path.segment()?.name_ref()?;
Some(Entry { node: name_ref.syntax().owned(), kind: EntryKind::Import }) Some(Entry {
node: name_ref.syntax().owned(),
kind: EntryKind::Import,
})
} }
pub fn name(&self) -> SmolStr { pub fn name(&self) -> SmolStr {
match self.kind { match self.kind {
EntryKind::Item => EntryKind::Item => ast::Name::cast(self.node.borrowed()).unwrap().text(),
ast::Name::cast(self.node.borrowed()).unwrap() EntryKind::Import => ast::NameRef::cast(self.node.borrowed()).unwrap().text(),
.text(),
EntryKind::Import =>
ast::NameRef::cast(self.node.borrowed()).unwrap()
.text(),
} }
} }
pub fn syntax(&self) -> SyntaxNodeRef { pub fn syntax(&self) -> SyntaxNodeRef {
@ -75,32 +77,31 @@ impl Entry {
fn collect_imports(tree: ast::UseTree, acc: &mut Vec<Entry>) { fn collect_imports(tree: ast::UseTree, acc: &mut Vec<Entry>) {
if let Some(use_tree_list) = tree.use_tree_list() { if let Some(use_tree_list) = tree.use_tree_list() {
return use_tree_list.use_trees().for_each(|it| collect_imports(it, acc)); return use_tree_list
.use_trees()
.for_each(|it| collect_imports(it, acc));
} }
if let Some(path) = tree.path() { if let Some(path) = tree.path() {
acc.extend(Entry::new_import(path)); acc.extend(Entry::new_import(path));
} }
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use ra_syntax::{File, ast::ModuleItemOwner}; use ra_syntax::{ast::ModuleItemOwner, File};
fn do_check(code: &str, expected: &[&str]) { fn do_check(code: &str, expected: &[&str]) {
let file = File::parse(&code); let file = File::parse(&code);
let scope = ModuleScope::new(file.ast().items()); let scope = ModuleScope::new(file.ast().items());
let actual = scope.entries let actual = scope.entries.iter().map(|it| it.name()).collect::<Vec<_>>();
.iter()
.map(|it| it.name())
.collect::<Vec<_>>();
assert_eq!(expected, actual.as_slice()); assert_eq!(expected, actual.as_slice());
} }
#[test] #[test]
fn test_module_scope() { fn test_module_scope() {
do_check(" do_check(
"
struct Foo; struct Foo;
enum Bar {} enum Bar {}
mod baz {} mod baz {}
@ -110,6 +111,8 @@ mod tests {
t, t,
}; };
type T = (); type T = ();
", &["Foo", "Bar", "baz", "quux", "z", "t", "T"]) ",
&["Foo", "Bar", "baz", "quux", "z", "t", "T"],
)
} }
} }

View file

@ -1,12 +1,13 @@
use crate::TextRange;
use ra_syntax::{ use ra_syntax::{
SyntaxKind, SyntaxNodeRef, AstNode, File, SmolStr,
ast::{self, NameOwner},
algo::{ algo::{
visit::{visitor, Visitor}, visit::{visitor, Visitor},
walk::{walk, WalkEvent}, walk::{walk, WalkEvent},
}, },
ast::{self, NameOwner},
AstNode, File, SmolStr, SyntaxKind, SyntaxNodeRef,
}; };
use crate::TextRange;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct StructureNode { pub struct StructureNode {
@ -25,9 +26,7 @@ pub struct FileSymbol {
} }
pub fn file_symbols(file: &File) -> Vec<FileSymbol> { pub fn file_symbols(file: &File) -> Vec<FileSymbol> {
file.syntax().descendants() file.syntax().descendants().filter_map(to_symbol).collect()
.filter_map(to_symbol)
.collect()
} }
fn to_symbol(node: SyntaxNodeRef) -> Option<FileSymbol> { fn to_symbol(node: SyntaxNodeRef) -> Option<FileSymbol> {
@ -51,23 +50,20 @@ fn to_symbol(node: SyntaxNodeRef) -> Option<FileSymbol> {
.accept(node)? .accept(node)?
} }
pub fn file_structure(file: &File) -> Vec<StructureNode> { pub fn file_structure(file: &File) -> Vec<StructureNode> {
let mut res = Vec::new(); let mut res = Vec::new();
let mut stack = Vec::new(); let mut stack = Vec::new();
for event in walk(file.syntax()) { for event in walk(file.syntax()) {
match event { match event {
WalkEvent::Enter(node) => { WalkEvent::Enter(node) => match structure_node(node) {
match structure_node(node) {
Some(mut symbol) => { Some(mut symbol) => {
symbol.parent = stack.last().map(|&n| n); symbol.parent = stack.last().map(|&n| n);
stack.push(res.len()); stack.push(res.len());
res.push(symbol); res.push(symbol);
} }
None => (), None => (),
} },
}
WalkEvent::Exit(node) => { WalkEvent::Exit(node) => {
if structure_node(node).is_some() { if structure_node(node).is_some() {
stack.pop().unwrap(); stack.pop().unwrap();
@ -131,7 +127,8 @@ mod tests {
#[test] #[test]
fn test_file_structure() { fn test_file_structure() {
let file = File::parse(r#" let file = File::parse(
r#"
struct Foo { struct Foo {
x: i32 x: i32
} }
@ -148,7 +145,8 @@ const C: i32 = 92;
impl E {} impl E {}
impl fmt::Debug for E {} impl fmt::Debug for E {}
"#); "#,
);
let symbols = file_structure(&file); let symbols = file_structure(&file);
assert_eq_dbg( assert_eq_dbg(
r#"[StructureNode { parent: None, label: "Foo", navigation_range: [8; 11), node_range: [1; 26), kind: STRUCT_DEF }, r#"[StructureNode { parent: None, label: "Foo", navigation_range: [8; 11), node_range: [1; 26), kind: STRUCT_DEF },

View file

@ -1,12 +1,8 @@
use ra_syntax::{File, TextUnit, TextRange};
pub use crate::_test_utils::*;
use crate::LocalEdit; use crate::LocalEdit;
pub use crate::_test_utils::*;
use ra_syntax::{File, TextRange, TextUnit};
pub fn check_action<F: Fn(&File, TextUnit) -> Option<LocalEdit>> ( pub fn check_action<F: Fn(&File, TextUnit) -> Option<LocalEdit>>(before: &str, after: &str, f: F) {
before: &str,
after: &str,
f: F,
) {
let (before_cursor_pos, before) = extract_offset(before); let (before_cursor_pos, before) = extract_offset(before);
let file = File::parse(&before); let file = File::parse(&before);
let result = f(&file, before_cursor_pos).expect("code action is not applicable"); let result = f(&file, before_cursor_pos).expect("code action is not applicable");

View file

@ -1,32 +1,30 @@
use std::mem; use std::mem;
use ra_syntax::{ use ra_syntax::{
TextUnit, TextRange, SyntaxNodeRef, File, AstNode, SyntaxKind, algo::{find_covering_node, find_leaf_at_offset, LeafAtOffset},
ast, ast,
algo::{ text_utils::{contains_offset_nonstrict, intersect},
find_covering_node, find_leaf_at_offset, LeafAtOffset, AstNode, File, SyntaxKind,
},
text_utils::{intersect, contains_offset_nonstrict},
SyntaxKind::*, SyntaxKind::*,
SyntaxNodeRef, TextRange, TextUnit,
}; };
use crate::{LocalEdit, EditBuilder, find_node_at_offset}; use crate::{find_node_at_offset, EditBuilder, LocalEdit};
pub fn join_lines(file: &File, range: TextRange) -> LocalEdit { pub fn join_lines(file: &File, range: TextRange) -> LocalEdit {
let range = if range.is_empty() { let range = if range.is_empty() {
let syntax = file.syntax(); let syntax = file.syntax();
let text = syntax.text().slice(range.start()..); let text = syntax.text().slice(range.start()..);
let pos = match text.find('\n') { let pos = match text.find('\n') {
None => return LocalEdit { None => {
return LocalEdit {
edit: EditBuilder::new().finish(), edit: EditBuilder::new().finish(),
cursor_position: None cursor_position: None,
}, }
Some(pos) => pos }
Some(pos) => pos,
}; };
TextRange::offset_len( TextRange::offset_len(range.start() + pos, TextUnit::of_char('\n'))
range.start() + pos,
TextUnit::of_char('\n'),
)
} else { } else {
range range
}; };
@ -58,7 +56,9 @@ pub fn join_lines(file: &File, range: TextRange) -> LocalEdit {
} }
pub fn on_enter(file: &File, offset: TextUnit) -> Option<LocalEdit> { pub fn on_enter(file: &File, offset: TextUnit) -> Option<LocalEdit> {
let comment = find_leaf_at_offset(file.syntax(), offset).left_biased().and_then(|it| ast::Comment::cast(it))?; let comment = find_leaf_at_offset(file.syntax(), offset)
.left_biased()
.and_then(|it| ast::Comment::cast(it))?;
if let ast::CommentFlavor::Multiline = comment.flavor() { if let ast::CommentFlavor::Multiline = comment.flavor() {
return None; return None;
@ -88,7 +88,7 @@ fn node_indent<'a>(file: &'a File, node: SyntaxNodeRef) -> Option<&'a str> {
} }
LeafAtOffset::Single(n) => { LeafAtOffset::Single(n) => {
assert!(n == node); assert!(n == node);
return Some("") return Some("");
} }
LeafAtOffset::None => unreachable!(), LeafAtOffset::None => unreachable!(),
}; };
@ -110,7 +110,12 @@ pub fn on_eq_typed(file: &File, offset: TextUnit) -> Option<LocalEdit> {
if contains_offset_nonstrict(expr_range, offset) && offset != expr_range.start() { if contains_offset_nonstrict(expr_range, offset) && offset != expr_range.start() {
return None; return None;
} }
if file.syntax().text().slice(offset..expr_range.start()).contains('\n') { if file
.syntax()
.text()
.slice(offset..expr_range.start())
.contains('\n')
{
return None; return None;
} }
} else { } else {
@ -125,12 +130,7 @@ pub fn on_eq_typed(file: &File, offset: TextUnit) -> Option<LocalEdit> {
}) })
} }
fn remove_newline( fn remove_newline(edit: &mut EditBuilder, node: SyntaxNodeRef, node_text: &str, offset: TextUnit) {
edit: &mut EditBuilder,
node: SyntaxNodeRef,
node_text: &str,
offset: TextUnit,
) {
if node.kind() != WHITESPACE || node_text.bytes().filter(|&b| b == b'\n').count() != 1 { if node.kind() != WHITESPACE || node_text.bytes().filter(|&b| b == b'\n').count() != 1 {
// The node is either the first or the last in the file // The node is either the first or the last in the file
let suff = &node_text[TextRange::from_to( let suff = &node_text[TextRange::from_to(
@ -156,7 +156,7 @@ fn remove_newline(
// //
// into `my_function(<some-expr>)` // into `my_function(<some-expr>)`
if join_single_expr_block(edit, node).is_some() { if join_single_expr_block(edit, node).is_some() {
return return;
} }
// The node is between two other nodes // The node is between two other nodes
@ -170,34 +170,28 @@ fn remove_newline(
// Adds: a single whitespace // Adds: a single whitespace
edit.replace( edit.replace(
TextRange::from_to(prev.range().start(), node.range().end()), TextRange::from_to(prev.range().start(), node.range().end()),
" ".to_string() " ".to_string(),
); );
} else if let (Some(_), Some(next)) = (ast::Comment::cast(prev), ast::Comment::cast(next)) { } else if let (Some(_), Some(next)) = (ast::Comment::cast(prev), ast::Comment::cast(next)) {
// Removes: newline (incl. surrounding whitespace), start of the next comment // Removes: newline (incl. surrounding whitespace), start of the next comment
edit.delete(TextRange::from_to( edit.delete(TextRange::from_to(
node.range().start(), node.range().start(),
next.syntax().range().start() + TextUnit::of_str(next.prefix()) next.syntax().range().start() + TextUnit::of_str(next.prefix()),
)); ));
} else { } else {
// Remove newline but add a computed amount of whitespace characters // Remove newline but add a computed amount of whitespace characters
edit.replace( edit.replace(node.range(), compute_ws(prev, next).to_string());
node.range(),
compute_ws(prev, next).to_string(),
);
} }
} }
fn is_trailing_comma(left: SyntaxKind, right: SyntaxKind) -> bool { fn is_trailing_comma(left: SyntaxKind, right: SyntaxKind) -> bool {
match (left, right) { match (left, right) {
(COMMA, R_PAREN) | (COMMA, R_BRACK) => true, (COMMA, R_PAREN) | (COMMA, R_BRACK) => true,
_ => false _ => false,
} }
} }
fn join_single_expr_block( fn join_single_expr_block(edit: &mut EditBuilder, node: SyntaxNodeRef) -> Option<()> {
edit: &mut EditBuilder,
node: SyntaxNodeRef,
) -> Option<()> {
let block = ast::Block::cast(node.parent()?)?; let block = ast::Block::cast(node.parent()?)?;
let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?; let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?;
let expr = single_expr(block)?; let expr = single_expr(block)?;
@ -244,7 +238,7 @@ fn compute_ws(left: SyntaxNodeRef, right: SyntaxNodeRef) -> &'static str {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{check_action, extract_range, extract_offset, add_cursor}; use crate::test_utils::{add_cursor, check_action, extract_offset, extract_range};
fn check_join_lines(before: &str, after: &str) { fn check_join_lines(before: &str, after: &str) {
check_action(before, after, |file, offset| { check_action(before, after, |file, offset| {
@ -256,118 +250,142 @@ mod tests {
#[test] #[test]
fn test_join_lines_comma() { fn test_join_lines_comma() {
check_join_lines(r" check_join_lines(
r"
fn foo() { fn foo() {
<|>foo(1, <|>foo(1,
) )
} }
", r" ",
r"
fn foo() { fn foo() {
<|>foo(1) <|>foo(1)
} }
"); ",
);
} }
#[test] #[test]
fn test_join_lines_lambda_block() { fn test_join_lines_lambda_block() {
check_join_lines(r" check_join_lines(
r"
pub fn reparse(&self, edit: &AtomEdit) -> File { pub fn reparse(&self, edit: &AtomEdit) -> File {
<|>self.incremental_reparse(edit).unwrap_or_else(|| { <|>self.incremental_reparse(edit).unwrap_or_else(|| {
self.full_reparse(edit) self.full_reparse(edit)
}) })
} }
", r" ",
r"
pub fn reparse(&self, edit: &AtomEdit) -> File { pub fn reparse(&self, edit: &AtomEdit) -> File {
<|>self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit)) <|>self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit))
} }
"); ",
);
} }
#[test] #[test]
fn test_join_lines_block() { fn test_join_lines_block() {
check_join_lines(r" check_join_lines(
r"
fn foo() { fn foo() {
foo(<|>{ foo(<|>{
92 92
}) })
}", r" }",
r"
fn foo() { fn foo() {
foo(<|>92) foo(<|>92)
}"); }",
);
} }
#[test] #[test]
fn test_join_lines_normal_comments() { fn test_join_lines_normal_comments() {
check_join_lines(r" check_join_lines(
r"
fn foo() { fn foo() {
// Hello<|> // Hello<|>
// world! // world!
} }
", r" ",
r"
fn foo() { fn foo() {
// Hello<|> world! // Hello<|> world!
} }
"); ",
);
} }
#[test] #[test]
fn test_join_lines_doc_comments() { fn test_join_lines_doc_comments() {
check_join_lines(r" check_join_lines(
r"
fn foo() { fn foo() {
/// Hello<|> /// Hello<|>
/// world! /// world!
} }
", r" ",
r"
fn foo() { fn foo() {
/// Hello<|> world! /// Hello<|> world!
} }
"); ",
);
} }
#[test] #[test]
fn test_join_lines_mod_comments() { fn test_join_lines_mod_comments() {
check_join_lines(r" check_join_lines(
r"
fn foo() { fn foo() {
//! Hello<|> //! Hello<|>
//! world! //! world!
} }
", r" ",
r"
fn foo() { fn foo() {
//! Hello<|> world! //! Hello<|> world!
} }
"); ",
);
} }
#[test] #[test]
fn test_join_lines_multiline_comments_1() { fn test_join_lines_multiline_comments_1() {
check_join_lines(r" check_join_lines(
r"
fn foo() { fn foo() {
// Hello<|> // Hello<|>
/* world! */ /* world! */
} }
", r" ",
r"
fn foo() { fn foo() {
// Hello<|> world! */ // Hello<|> world! */
} }
"); ",
);
} }
#[test] #[test]
fn test_join_lines_multiline_comments_2() { fn test_join_lines_multiline_comments_2() {
check_join_lines(r" check_join_lines(
r"
fn foo() { fn foo() {
// The<|> // The<|>
/* quick /* quick
brown brown
fox! */ fox! */
} }
", r" ",
r"
fn foo() { fn foo() {
// The<|> quick // The<|> quick
brown brown
fox! */ fox! */
} }
"); ",
);
} }
fn check_join_lines_sel(before: &str, after: &str) { fn check_join_lines_sel(before: &str, after: &str) {
@ -380,59 +398,71 @@ fn foo() {
#[test] #[test]
fn test_join_lines_selection_fn_args() { fn test_join_lines_selection_fn_args() {
check_join_lines_sel(r" check_join_lines_sel(
r"
fn foo() { fn foo() {
<|>foo(1, <|>foo(1,
2, 2,
3, 3,
<|>) <|>)
} }
", r" ",
r"
fn foo() { fn foo() {
foo(1, 2, 3) foo(1, 2, 3)
} }
"); ",
);
} }
#[test] #[test]
fn test_join_lines_selection_struct() { fn test_join_lines_selection_struct() {
check_join_lines_sel(r" check_join_lines_sel(
r"
struct Foo <|>{ struct Foo <|>{
f: u32, f: u32,
}<|> }<|>
", r" ",
r"
struct Foo { f: u32 } struct Foo { f: u32 }
"); ",
);
} }
#[test] #[test]
fn test_join_lines_selection_dot_chain() { fn test_join_lines_selection_dot_chain() {
check_join_lines_sel(r" check_join_lines_sel(
r"
fn foo() { fn foo() {
join(<|>type_params.type_params() join(<|>type_params.type_params()
.filter_map(|it| it.name()) .filter_map(|it| it.name())
.map(|it| it.text())<|>) .map(|it| it.text())<|>)
}", r" }",
r"
fn foo() { fn foo() {
join(type_params.type_params().filter_map(|it| it.name()).map(|it| it.text())) join(type_params.type_params().filter_map(|it| it.name()).map(|it| it.text()))
}"); }",
);
} }
#[test] #[test]
fn test_join_lines_selection_lambda_block_body() { fn test_join_lines_selection_lambda_block_body() {
check_join_lines_sel(r" check_join_lines_sel(
r"
pub fn handle_find_matching_brace() { pub fn handle_find_matching_brace() {
params.offsets params.offsets
.map(|offset| <|>{ .map(|offset| <|>{
world.analysis().matching_brace(&file, offset).unwrap_or(offset) world.analysis().matching_brace(&file, offset).unwrap_or(offset)
}<|>) }<|>)
.collect(); .collect();
}", r" }",
r"
pub fn handle_find_matching_brace() { pub fn handle_find_matching_brace() {
params.offsets params.offsets
.map(|offset| world.analysis().matching_brace(&file, offset).unwrap_or(offset)) .map(|offset| world.analysis().matching_brace(&file, offset).unwrap_or(offset))
.collect(); .collect();
}"); }",
);
} }
#[test] #[test]
@ -454,15 +484,18 @@ pub fn handle_find_matching_brace() {
// let foo =; // let foo =;
// } // }
// "); // ");
do_check(r" do_check(
r"
fn foo() { fn foo() {
let foo =<|> 1 + 1 let foo =<|> 1 + 1
} }
", r" ",
r"
fn foo() { fn foo() {
let foo = 1 + 1; let foo = 1 + 1;
} }
"); ",
);
// do_check(r" // do_check(r"
// fn foo() { // fn foo() {
// let foo =<|> // let foo =<|>
@ -496,28 +529,34 @@ fn foo() {
assert!(apply_on_enter(text).is_none()) assert!(apply_on_enter(text).is_none())
} }
do_check(r" do_check(
r"
/// Some docs<|> /// Some docs<|>
fn foo() { fn foo() {
} }
", r" ",
r"
/// Some docs /// Some docs
/// <|> /// <|>
fn foo() { fn foo() {
} }
"); ",
do_check(r" );
do_check(
r"
impl S { impl S {
/// Some<|> docs. /// Some<|> docs.
fn foo() {} fn foo() {}
} }
", r" ",
r"
impl S { impl S {
/// Some /// Some
/// <|> docs. /// <|> docs.
fn foo() {} fn foo() {}
} }
"); ",
);
do_check_noop(r"<|>//! docz"); do_check_noop(r"<|>//! docz");
} }
} }

View file

@ -1,14 +1,8 @@
use languageserver_types::{ use languageserver_types::{
ServerCapabilities, CodeActionProviderCapability, CompletionOptions, DocumentOnTypeFormattingOptions,
CodeActionProviderCapability, ExecuteCommandOptions, FoldingRangeProviderCapability, ServerCapabilities,
FoldingRangeProviderCapability, SignatureHelpOptions, TextDocumentSyncCapability, TextDocumentSyncKind,
TextDocumentSyncCapability,
TextDocumentSyncOptions, TextDocumentSyncOptions,
TextDocumentSyncKind,
ExecuteCommandOptions,
CompletionOptions,
SignatureHelpOptions,
DocumentOnTypeFormattingOptions,
}; };
pub fn server_capabilities() -> ServerCapabilities { pub fn server_capabilities() -> ServerCapabilities {
@ -20,7 +14,7 @@ pub fn server_capabilities() -> ServerCapabilities {
will_save: None, will_save: None,
will_save_wait_until: None, will_save_wait_until: None,
save: None, save: None,
} },
)), )),
hover_provider: None, hover_provider: None,
completion_provider: Some(CompletionOptions { completion_provider: Some(CompletionOptions {
@ -28,7 +22,7 @@ pub fn server_capabilities() -> ServerCapabilities {
trigger_characters: None, trigger_characters: None,
}), }),
signature_help_provider: Some(SignatureHelpOptions { signature_help_provider: Some(SignatureHelpOptions {
trigger_characters: Some(vec!["(".to_string(), ",".to_string()]) trigger_characters: Some(vec!["(".to_string(), ",".to_string()]),
}), }),
definition_provider: Some(true), definition_provider: Some(true),
type_definition_provider: None, type_definition_provider: None,

View file

@ -1,17 +1,12 @@
use languageserver_types::{ use languageserver_types::{
Range, SymbolKind, Position, TextEdit, Location, Url, Location, Position, Range, SymbolKind, TextDocumentEdit, TextDocumentIdentifier,
TextDocumentIdentifier, VersionedTextDocumentIdentifier, TextDocumentItem, TextDocumentItem, TextDocumentPositionParams, TextEdit, Url, VersionedTextDocumentIdentifier,
TextDocumentPositionParams, TextDocumentEdit,
}; };
use ra_editor::{LineIndex, LineCol, Edit, AtomEdit}; use ra_analysis::{FileId, FileSystemEdit, SourceChange, SourceFileEdit};
use ra_syntax::{SyntaxKind, TextUnit, TextRange}; use ra_editor::{AtomEdit, Edit, LineCol, LineIndex};
use ra_analysis::{FileId, SourceChange, SourceFileEdit, FileSystemEdit}; use ra_syntax::{SyntaxKind, TextRange, TextUnit};
use crate::{ use crate::{req, server_world::ServerWorld, Result};
Result,
server_world::ServerWorld,
req,
};
pub trait Conv { pub trait Conv {
type Output; type Output;
@ -190,8 +185,12 @@ impl TryConvWith for SourceChange {
None => None, None => None,
Some(pos) => { Some(pos) => {
let line_index = world.analysis().file_line_index(pos.file_id); let line_index = world.analysis().file_line_index(pos.file_id);
let edits = self.source_file_edits.iter().find(|it| it.file_id == pos.file_id) let edits = self
.map(|it| it.edits.as_slice()).unwrap_or(&[]); .source_file_edits
.iter()
.find(|it| it.file_id == pos.file_id)
.map(|it| it.edits.as_slice())
.unwrap_or(&[]);
let line_col = translate_offset_with_edit(&*line_index, pos.offset, edits); let line_col = translate_offset_with_edit(&*line_index, pos.offset, edits);
let position = Position::new(line_col.line as u64, u32::from(line_col.col) as u64); let position = Position::new(line_col.line as u64, u32::from(line_col.col) as u64);
Some(TextDocumentPositionParams { Some(TextDocumentPositionParams {
@ -224,11 +223,11 @@ fn translate_offset_with_edit(
let fallback = pre_edit_index.line_col(offset); let fallback = pre_edit_index.line_col(offset);
let edit = match edits.first() { let edit = match edits.first() {
None => return fallback, None => return fallback,
Some(edit) => edit Some(edit) => edit,
}; };
let end_offset = edit.delete.start() + TextUnit::of_str(&edit.insert); let end_offset = edit.delete.start() + TextUnit::of_str(&edit.insert);
if !(edit.delete.start() <= offset && offset <= end_offset) { if !(edit.delete.start() <= offset && offset <= end_offset) {
return fallback return fallback;
} }
let rel_offset = offset - edit.delete.start(); let rel_offset = offset - edit.delete.start();
let in_edit_line_col = LineIndex::new(&edit.insert).line_col(rel_offset); let in_edit_line_col = LineIndex::new(&edit.insert).line_col(rel_offset);
@ -255,11 +254,11 @@ impl TryConvWith for SourceFileEdit {
version: None, version: None,
}; };
let line_index = world.analysis().file_line_index(self.file_id); let line_index = world.analysis().file_line_index(self.file_id);
let edits = self.edits let edits = self.edits.into_iter().map_conv_with(&line_index).collect();
.into_iter() Ok(TextDocumentEdit {
.map_conv_with(&line_index) text_document,
.collect(); edits,
Ok(TextDocumentEdit { text_document, edits }) })
} }
} }
@ -273,13 +272,13 @@ impl TryConvWith for FileSystemEdit {
let path = &path.as_str()[3..]; // strip `../` b/c url is weird let path = &path.as_str()[3..]; // strip `../` b/c url is weird
let uri = uri.join(path)?; let uri = uri.join(path)?;
req::FileSystemEdit::CreateFile { uri } req::FileSystemEdit::CreateFile { uri }
}, }
FileSystemEdit::MoveFile { file, path } => { FileSystemEdit::MoveFile { file, path } => {
let src = world.file_id_to_uri(file)?; let src = world.file_id_to_uri(file)?;
let path = &path.as_str()[3..]; // strip `../` b/c url is weird let path = &path.as_str()[3..]; // strip `../` b/c url is weird
let dst = src.join(path)?; let dst = src.join(path)?;
req::FileSystemEdit::MoveFile { src, dst } req::FileSystemEdit::MoveFile { src, dst }
}, }
}; };
Ok(res) Ok(res)
} }
@ -292,10 +291,7 @@ pub fn to_location(
line_index: &LineIndex, line_index: &LineIndex,
) -> Result<Location> { ) -> Result<Location> {
let url = file_id.try_conv_with(world)?; let url = file_id.try_conv_with(world)?;
let loc = Location::new( let loc = Location::new(url, range.conv_with(line_index));
url,
range.conv_with(line_index),
);
Ok(loc) Ok(loc)
} }
@ -309,8 +305,9 @@ pub trait MapConvWith<'a>: Sized + 'a {
} }
impl<'a, I> MapConvWith<'a> for I impl<'a, I> MapConvWith<'a> for I
where I: Iterator + 'a, where
I::Item: ConvWith I: Iterator + 'a,
I::Item: ConvWith,
{ {
type Ctx = <I::Item as ConvWith>::Ctx; type Ctx = <I::Item as ConvWith>::Ctx;
type Output = <I::Item as ConvWith>::Output; type Output = <I::Item as ConvWith>::Output;
@ -332,4 +329,3 @@ impl<'a, I, Ctx> Iterator for ConvWithIter<'a, I, Ctx>
self.iter.next().map(|item| item.conv_with(self.ctx)) self.iter.next().map(|item| item.conv_with(self.ctx))
} }
} }

View file

@ -2,39 +2,36 @@
extern crate failure; extern crate failure;
#[macro_use] #[macro_use]
extern crate serde_derive; extern crate serde_derive;
extern crate languageserver_types;
extern crate serde; extern crate serde;
extern crate serde_json; extern crate serde_json;
extern crate languageserver_types;
#[macro_use] #[macro_use]
extern crate crossbeam_channel; extern crate crossbeam_channel;
extern crate rayon; extern crate rayon;
#[macro_use] #[macro_use]
extern crate log; extern crate log;
extern crate cargo_metadata;
extern crate drop_bomb; extern crate drop_bomb;
extern crate url_serde;
extern crate walkdir;
extern crate im; extern crate im;
extern crate relative_path; extern crate relative_path;
extern crate cargo_metadata;
extern crate rustc_hash; extern crate rustc_hash;
extern crate url_serde;
extern crate walkdir;
extern crate gen_lsp_server; extern crate gen_lsp_server;
extern crate ra_editor;
extern crate ra_analysis; extern crate ra_analysis;
extern crate ra_editor;
extern crate ra_syntax; extern crate ra_syntax;
mod caps; mod caps;
pub mod req;
mod conv; mod conv;
mod main_loop; mod main_loop;
mod vfs;
mod path_map; mod path_map;
mod server_world;
mod project_model; mod project_model;
pub mod req;
mod server_world;
pub mod thread_watcher; pub mod thread_watcher;
mod vfs;
pub type Result<T> = ::std::result::Result<T, ::failure::Error>; pub type Result<T> = ::std::result::Result<T, ::failure::Error>;
pub use crate::{ pub use crate::{caps::server_capabilities, main_loop::main_loop};
main_loop::main_loop,
caps::server_capabilities,
};

View file

@ -6,7 +6,7 @@ extern crate flexi_logger;
extern crate gen_lsp_server; extern crate gen_lsp_server;
extern crate ra_lsp_server; extern crate ra_lsp_server;
use flexi_logger::{Logger, Duplicate}; use flexi_logger::{Duplicate, Logger};
use gen_lsp_server::{run_server, stdio_transport}; use gen_lsp_server::{run_server, stdio_transport};
use ra_lsp_server::Result; use ra_lsp_server::Result;
@ -38,7 +38,8 @@ fn main_inner() -> Result<()> {
receiver, receiver,
sender, sender,
|params, r, s| { |params, r, s| {
let root = params.root_uri let root = params
.root_uri
.and_then(|it| it.to_file_path().ok()) .and_then(|it| it.to_file_path().ok())
.unwrap_or(cwd); .unwrap_or(cwd);
ra_lsp_server::main_loop(false, root, r, s) ra_lsp_server::main_loop(false, root, r, s)
@ -49,4 +50,3 @@ fn main_inner() -> Result<()> {
info!("... IO is down"); info!("... IO is down");
Ok(()) Ok(())
} }

View file

@ -1,23 +1,20 @@
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use languageserver_types::{ use languageserver_types::{
Diagnostic, DiagnosticSeverity, DocumentSymbol, CodeActionResponse, Command, CompletionItem, CompletionItemKind, Diagnostic,
CodeActionResponse, Command, TextDocumentIdentifier, DiagnosticSeverity, DocumentSymbol, FoldingRange, FoldingRangeKind, FoldingRangeParams,
SymbolInformation, Position, Location, TextEdit, InsertTextFormat, Location, Position, SymbolInformation, TextDocumentIdentifier, TextEdit,
CompletionItem, InsertTextFormat, CompletionItemKind,
FoldingRange, FoldingRangeParams, FoldingRangeKind
}; };
use ra_analysis::{FileId, FoldKind, JobToken, Query, RunnableKind};
use ra_syntax::text_utils::contains_offset_nonstrict;
use serde_json::to_value; use serde_json::to_value;
use ra_analysis::{Query, FileId, RunnableKind, JobToken, FoldKind};
use ra_syntax::{
text_utils::contains_offset_nonstrict
};
use crate::{ use crate::{
req::{self, Decoration}, Result, conv::{to_location, Conv, ConvWith, MapConvWith, TryConvWith},
conv::{Conv, ConvWith, TryConvWith, MapConvWith, to_location},
server_world::ServerWorld,
project_model::TargetKind, project_model::TargetKind,
req::{self, Decoration},
server_world::ServerWorld,
Result,
}; };
pub fn handle_syntax_tree( pub fn handle_syntax_tree(
@ -38,7 +35,9 @@ pub fn handle_extend_selection(
let file_id = params.text_document.try_conv_with(&world)?; let file_id = params.text_document.try_conv_with(&world)?;
let file = world.analysis().file_syntax(file_id); let file = world.analysis().file_syntax(file_id);
let line_index = world.analysis().file_line_index(file_id); let line_index = world.analysis().file_line_index(file_id);
let selections = params.selections.into_iter() let selections = params
.selections
.into_iter()
.map_conv_with(&line_index) .map_conv_with(&line_index)
.map(|r| world.analysis().extend_selection(&file, r)) .map(|r| world.analysis().extend_selection(&file, r))
.map_conv_with(&line_index) .map_conv_with(&line_index)
@ -54,11 +53,15 @@ pub fn handle_find_matching_brace(
let file_id = params.text_document.try_conv_with(&world)?; let file_id = params.text_document.try_conv_with(&world)?;
let file = world.analysis().file_syntax(file_id); let file = world.analysis().file_syntax(file_id);
let line_index = world.analysis().file_line_index(file_id); let line_index = world.analysis().file_line_index(file_id);
let res = params.offsets let res = params
.offsets
.into_iter() .into_iter()
.map_conv_with(&line_index) .map_conv_with(&line_index)
.map(|offset| { .map(|offset| {
world.analysis().matching_brace(&file, offset).unwrap_or(offset) world
.analysis()
.matching_brace(&file, offset)
.unwrap_or(offset)
}) })
.map_conv_with(&line_index) .map_conv_with(&line_index)
.collect(); .collect();
@ -73,7 +76,9 @@ pub fn handle_join_lines(
let file_id = params.text_document.try_conv_with(&world)?; let file_id = params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id); let line_index = world.analysis().file_line_index(file_id);
let range = params.range.conv_with(&line_index); let range = params.range.conv_with(&line_index);
world.analysis().join_lines(file_id, range) world
.analysis()
.join_lines(file_id, range)
.try_conv_with(&world) .try_conv_with(&world)
} }
@ -87,7 +92,7 @@ pub fn handle_on_enter(
let offset = params.position.conv_with(&line_index); let offset = params.position.conv_with(&line_index);
match world.analysis().on_enter(file_id, offset) { match world.analysis().on_enter(file_id, offset) {
None => Ok(None), None => Ok(None),
Some(edit) => Ok(Some(edit.try_conv_with(&world)?)) Some(edit) => Ok(Some(edit.try_conv_with(&world)?)),
} }
} }
@ -158,7 +163,9 @@ pub fn handle_workspace_symbol(
let all_symbols = params.query.contains("#"); let all_symbols = params.query.contains("#");
let libs = params.query.contains("*"); let libs = params.query.contains("*");
let query = { let query = {
let query: String = params.query.chars() let query: String = params
.query
.chars()
.filter(|&c| c != '#' && c != '*') .filter(|&c| c != '#' && c != '*')
.collect(); .collect();
let mut q = Query::new(query); let mut q = Query::new(query);
@ -180,22 +187,23 @@ pub fn handle_workspace_symbol(
return Ok(Some(res)); return Ok(Some(res));
fn exec_query(world: &ServerWorld, query: Query, token: &JobToken) -> Result<Vec<SymbolInformation>> { fn exec_query(
world: &ServerWorld,
query: Query,
token: &JobToken,
) -> Result<Vec<SymbolInformation>> {
let mut res = Vec::new(); let mut res = Vec::new();
for (file_id, symbol) in world.analysis().symbol_search(query, token) { for (file_id, symbol) in world.analysis().symbol_search(query, token) {
let line_index = world.analysis().file_line_index(file_id); let line_index = world.analysis().file_line_index(file_id);
let info = SymbolInformation { let info = SymbolInformation {
name: symbol.name.to_string(), name: symbol.name.to_string(),
kind: symbol.kind.conv(), kind: symbol.kind.conv(),
location: to_location( location: to_location(file_id, symbol.node_range, world, &line_index)?,
file_id, symbol.node_range,
world, &line_index
)?,
container_name: None, container_name: None,
deprecated: None, deprecated: None,
}; };
res.push(info); res.push(info);
}; }
Ok(res) Ok(res)
} }
} }
@ -209,12 +217,12 @@ pub fn handle_goto_definition(
let line_index = world.analysis().file_line_index(file_id); let line_index = world.analysis().file_line_index(file_id);
let offset = params.position.conv_with(&line_index); let offset = params.position.conv_with(&line_index);
let mut res = Vec::new(); let mut res = Vec::new();
for (file_id, symbol) in world.analysis().approximately_resolve_symbol(file_id, offset, &token) { for (file_id, symbol) in world
.analysis()
.approximately_resolve_symbol(file_id, offset, &token)
{
let line_index = world.analysis().file_line_index(file_id); let line_index = world.analysis().file_line_index(file_id);
let location = to_location( let location = to_location(file_id, symbol.node_range, &world, &line_index)?;
file_id, symbol.node_range,
&world, &line_index,
)?;
res.push(location) res.push(location)
} }
Ok(Some(req::GotoDefinitionResponse::Array(res))) Ok(Some(req::GotoDefinitionResponse::Array(res)))
@ -229,10 +237,7 @@ pub fn handle_parent_module(
let mut res = Vec::new(); let mut res = Vec::new();
for (file_id, symbol) in world.analysis().parent_module(file_id) { for (file_id, symbol) in world.analysis().parent_module(file_id) {
let line_index = world.analysis().file_line_index(file_id); let line_index = world.analysis().file_line_index(file_id);
let location = to_location( let location = to_location(file_id, symbol.node_range, &world, &line_index)?;
file_id, symbol.node_range,
&world, &line_index
)?;
res.push(location); res.push(location);
} }
Ok(res) Ok(res)
@ -259,21 +264,16 @@ pub fn handle_runnables(
let r = req::Runnable { let r = req::Runnable {
range: runnable.range.conv_with(&line_index), range: runnable.range.conv_with(&line_index),
label: match &runnable.kind { label: match &runnable.kind {
RunnableKind::Test { name } => RunnableKind::Test { name } => format!("test {}", name),
format!("test {}", name), RunnableKind::Bin => "run binary".to_string(),
RunnableKind::Bin =>
"run binary".to_string(),
}, },
bin: "cargo".to_string(), bin: "cargo".to_string(),
args, args,
env: { env: {
let mut m = FxHashMap::default(); let mut m = FxHashMap::default();
m.insert( m.insert("RUST_BACKTRACE".to_string(), "short".to_string());
"RUST_BACKTRACE".to_string(),
"short".to_string(),
);
m m
} },
}; };
res.push(r); res.push(r);
} }
@ -283,10 +283,16 @@ pub fn handle_runnables(
let spec = if let Some(&crate_id) = world.analysis().crate_for(file_id).first() { let spec = if let Some(&crate_id) = world.analysis().crate_for(file_id).first() {
let file_id = world.analysis().crate_root(crate_id); let file_id = world.analysis().crate_root(crate_id);
let path = world.path_map.get_path(file_id); let path = world.path_map.get_path(file_id);
world.workspaces.iter() world
.workspaces
.iter()
.filter_map(|ws| { .filter_map(|ws| {
let tgt = ws.target_by_root(path)?; let tgt = ws.target_by_root(path)?;
Some((tgt.package(ws).name(ws).clone(), tgt.name(ws).clone(), tgt.kind(ws))) Some((
tgt.package(ws).name(ws).clone(),
tgt.name(ws).clone(),
tgt.kind(ws),
))
}) })
.next() .next()
} else { } else {
@ -362,7 +368,8 @@ pub fn handle_completion(
None => return Ok(None), None => return Ok(None),
Some(items) => items, Some(items) => items,
}; };
let items = items.into_iter() let items = items
.into_iter()
.map(|item| { .map(|item| {
let mut res = CompletionItem { let mut res = CompletionItem {
label: item.label, label: item.label,
@ -389,13 +396,15 @@ pub fn handle_folding_range(
let file_id = params.text_document.try_conv_with(&world)?; let file_id = params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id); let line_index = world.analysis().file_line_index(file_id);
let res = Some(world.analysis() let res = Some(
world
.analysis()
.folding_ranges(file_id) .folding_ranges(file_id)
.into_iter() .into_iter()
.map(|fold| { .map(|fold| {
let kind = match fold.kind { let kind = match fold.kind {
FoldKind::Comment => FoldingRangeKind::Comment, FoldKind::Comment => FoldingRangeKind::Comment,
FoldKind::Imports => FoldingRangeKind::Imports FoldKind::Imports => FoldingRangeKind::Imports,
}; };
let range = fold.range.conv_with(&line_index); let range = fold.range.conv_with(&line_index);
FoldingRange { FoldingRange {
@ -403,10 +412,11 @@ pub fn handle_folding_range(
start_character: Some(range.start.character), start_character: Some(range.start.character),
end_line: range.end.line, end_line: range.end.line,
end_character: Some(range.start.character), end_character: Some(range.start.character),
kind: Some(kind) kind: Some(kind),
} }
}) })
.collect()); .collect(),
);
Ok(res) Ok(res)
} }
@ -422,25 +432,28 @@ pub fn handle_signature_help(
let line_index = world.analysis().file_line_index(file_id); let line_index = world.analysis().file_line_index(file_id);
let offset = params.position.conv_with(&line_index); let offset = params.position.conv_with(&line_index);
if let Some((descriptor, active_param)) = world.analysis().resolve_callable(file_id, offset, &token) { if let Some((descriptor, active_param)) =
let parameters : Vec<ParameterInformation> = world.analysis().resolve_callable(file_id, offset, &token)
descriptor.params.iter().map(|param| {
ParameterInformation { let parameters: Vec<ParameterInformation> = descriptor
.params
.iter()
.map(|param| ParameterInformation {
label: param.clone(), label: param.clone(),
documentation: None documentation: None,
} })
).collect(); .collect();
let sig_info = SignatureInformation { let sig_info = SignatureInformation {
label: descriptor.label, label: descriptor.label,
documentation: None, documentation: None,
parameters: Some(parameters) parameters: Some(parameters),
}; };
Ok(Some(req::SignatureHelp { Ok(Some(req::SignatureHelp {
signatures: vec![sig_info], signatures: vec![sig_info],
active_signature: Some(0), active_signature: Some(0),
active_parameter: active_param.map(|a| a as u64) active_parameter: active_param.map(|a| a as u64),
})) }))
} else { } else {
Ok(None) Ok(None)
@ -457,7 +470,10 @@ pub fn handle_code_action(
let range = params.range.conv_with(&line_index); let range = params.range.conv_with(&line_index);
let assists = world.analysis().assists(file_id, range).into_iter(); let assists = world.analysis().assists(file_id, range).into_iter();
let fixes = world.analysis().diagnostics(file_id).into_iter() let fixes = world
.analysis()
.diagnostics(file_id)
.into_iter()
.filter_map(|d| Some((d.range, d.fix?))) .filter_map(|d| Some((d.range, d.fix?)))
.filter(|(range, _fix)| contains_offset_nonstrict(*range, range.start())) .filter(|(range, _fix)| contains_offset_nonstrict(*range, range.start()))
.map(|(_range, fix)| fix); .map(|(_range, fix)| fix);
@ -483,7 +499,9 @@ pub fn publish_diagnostics(
) -> Result<req::PublishDiagnosticsParams> { ) -> Result<req::PublishDiagnosticsParams> {
let uri = world.file_id_to_uri(file_id)?; let uri = world.file_id_to_uri(file_id)?;
let line_index = world.analysis().file_line_index(file_id); let line_index = world.analysis().file_line_index(file_id);
let diagnostics = world.analysis().diagnostics(file_id) let diagnostics = world
.analysis()
.diagnostics(file_id)
.into_iter() .into_iter()
.map(|d| Diagnostic { .map(|d| Diagnostic {
range: d.range.conv_with(&line_index), range: d.range.conv_with(&line_index),
@ -492,7 +510,8 @@ pub fn publish_diagnostics(
source: Some("rust-analyzer".to_string()), source: Some("rust-analyzer".to_string()),
message: d.message, message: d.message,
related_information: None, related_information: None,
}).collect(); })
.collect();
Ok(req::PublishDiagnosticsParams { uri, diagnostics }) Ok(req::PublishDiagnosticsParams { uri, diagnostics })
} }
@ -509,10 +528,13 @@ pub fn publish_decorations(
fn highlight(world: &ServerWorld, file_id: FileId) -> Vec<Decoration> { fn highlight(world: &ServerWorld, file_id: FileId) -> Vec<Decoration> {
let line_index = world.analysis().file_line_index(file_id); let line_index = world.analysis().file_line_index(file_id);
world.analysis().highlight(file_id) world
.analysis()
.highlight(file_id)
.into_iter() .into_iter()
.map(|h| Decoration { .map(|h| Decoration {
range: h.range.conv_with(&line_index), range: h.range.conv_with(&line_index),
tag: h.tag, tag: h.tag,
}).collect() })
.collect()
} }

View file

@ -1,29 +1,26 @@
mod handlers; mod handlers;
mod subscriptions; mod subscriptions;
use std::{ use std::path::PathBuf;
path::PathBuf,
};
use serde::{Serialize, de::DeserializeOwned}; use crossbeam_channel::{unbounded, Receiver, Sender};
use crossbeam_channel::{unbounded, Sender, Receiver};
use rayon::{self, ThreadPool};
use languageserver_types::{NumberOrString};
use ra_analysis::{FileId, JobHandle, JobToken, LibraryData};
use gen_lsp_server::{ use gen_lsp_server::{
RawRequest, RawNotification, RawMessage, RawResponse, ErrorCode, handle_shutdown, ErrorCode, RawMessage, RawNotification, RawRequest, RawResponse,
handle_shutdown,
}; };
use languageserver_types::NumberOrString;
use ra_analysis::{FileId, JobHandle, JobToken, LibraryData};
use rayon::{self, ThreadPool};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use serde::{de::DeserializeOwned, Serialize};
use crate::{ use crate::{
main_loop::subscriptions::Subscriptions,
project_model::{workspace_loader, CargoWorkspace},
req, req,
Result, server_world::{ServerWorld, ServerWorldState},
vfs::{self, FileEvent},
server_world::{ServerWorldState, ServerWorld},
main_loop::subscriptions::{Subscriptions},
project_model::{CargoWorkspace, workspace_loader},
thread_watcher::Worker, thread_watcher::Worker,
vfs::{self, FileEvent},
Result,
}; };
#[derive(Debug)] #[derive(Debug)]
@ -147,8 +144,7 @@ fn main_loop_inner(
} }
state_changed = true; state_changed = true;
} }
Event::Ws(ws) => { Event::Ws(ws) => match ws {
match ws {
Ok(ws) => { Ok(ws) => {
let workspaces = vec![ws]; let workspaces = vec![ws];
feedback(internal_mode, "workspace loaded", msg_sender); feedback(internal_mode, "workspace loaded", msg_sender);
@ -162,14 +158,12 @@ fn main_loop_inner(
state_changed = true; state_changed = true;
} }
Err(e) => warn!("loading workspace failed: {}", e), Err(e) => warn!("loading workspace failed: {}", e),
} },
}
Event::Lib(lib) => { Event::Lib(lib) => {
feedback(internal_mode, "library loaded", msg_sender); feedback(internal_mode, "library loaded", msg_sender);
state.add_lib(lib); state.add_lib(lib);
} }
Event::Msg(msg) => { Event::Msg(msg) => match msg {
match msg {
RawMessage::Request(req) => { RawMessage::Request(req) => {
let req = match handle_shutdown(req, msg_sender) { let req = match handle_shutdown(req, msg_sender) {
Some(req) => req, Some(req) => req,
@ -192,11 +186,8 @@ fn main_loop_inner(
on_notification(msg_sender, state, pending_requests, subs, not)?; on_notification(msg_sender, state, pending_requests, subs, not)?;
state_changed = true; state_changed = true;
} }
RawMessage::Response(resp) => { RawMessage::Response(resp) => error!("unexpected response: {:?}", resp),
error!("unexpected response: {:?}", resp) },
}
}
}
}; };
if state_changed { if state_changed {
@ -222,8 +213,7 @@ fn on_task(
} }
msg_sender.send(RawMessage::Response(response)) msg_sender.send(RawMessage::Response(response))
} }
Task::Notify(n) => Task::Notify(n) => msg_sender.send(RawMessage::Notification(n)),
msg_sender.send(RawMessage::Notification(n)),
} }
} }
@ -237,7 +227,9 @@ fn on_request(
let mut pool_dispatcher = PoolDispatcher { let mut pool_dispatcher = PoolDispatcher {
req: Some(req), req: Some(req),
res: None, res: None,
pool, world, sender pool,
world,
sender,
}; };
let req = pool_dispatcher let req = pool_dispatcher
.on::<req::SyntaxTree>(handlers::handle_syntax_tree)? .on::<req::SyntaxTree>(handlers::handle_syntax_tree)?
@ -262,7 +254,7 @@ fn on_request(
let inserted = pending_requests.insert(id, handle).is_none(); let inserted = pending_requests.insert(id, handle).is_none();
assert!(inserted, "duplicate request: {}", id); assert!(inserted, "duplicate request: {}", id);
Ok(None) Ok(None)
}, }
Err(req) => Ok(Some(req)), Err(req) => Ok(Some(req)),
} }
} }
@ -285,45 +277,53 @@ fn on_notification(
if let Some(handle) = pending_requests.remove(&id) { if let Some(handle) = pending_requests.remove(&id) {
handle.cancel(); handle.cancel();
} }
return Ok(()) return Ok(());
} }
Err(not) => not, Err(not) => not,
}; };
let not = match not.cast::<req::DidOpenTextDocument>() { let not = match not.cast::<req::DidOpenTextDocument>() {
Ok(params) => { Ok(params) => {
let uri = params.text_document.uri; let uri = params.text_document.uri;
let path = uri.to_file_path() let path = uri
.to_file_path()
.map_err(|()| format_err!("invalid uri: {}", uri))?; .map_err(|()| format_err!("invalid uri: {}", uri))?;
let file_id = state.add_mem_file(path, params.text_document.text); let file_id = state.add_mem_file(path, params.text_document.text);
subs.add_sub(file_id); subs.add_sub(file_id);
return Ok(()) return Ok(());
} }
Err(not) => not, Err(not) => not,
}; };
let not = match not.cast::<req::DidChangeTextDocument>() { let not = match not.cast::<req::DidChangeTextDocument>() {
Ok(mut params) => { Ok(mut params) => {
let uri = params.text_document.uri; let uri = params.text_document.uri;
let path = uri.to_file_path() let path = uri
.to_file_path()
.map_err(|()| format_err!("invalid uri: {}", uri))?; .map_err(|()| format_err!("invalid uri: {}", uri))?;
let text = params.content_changes.pop() let text = params
.content_changes
.pop()
.ok_or_else(|| format_err!("empty changes"))? .ok_or_else(|| format_err!("empty changes"))?
.text; .text;
state.change_mem_file(path.as_path(), text)?; state.change_mem_file(path.as_path(), text)?;
return Ok(()) return Ok(());
} }
Err(not) => not, Err(not) => not,
}; };
let not = match not.cast::<req::DidCloseTextDocument>() { let not = match not.cast::<req::DidCloseTextDocument>() {
Ok(params) => { Ok(params) => {
let uri = params.text_document.uri; let uri = params.text_document.uri;
let path = uri.to_file_path() let path = uri
.to_file_path()
.map_err(|()| format_err!("invalid uri: {}", uri))?; .map_err(|()| format_err!("invalid uri: {}", uri))?;
let file_id = state.remove_mem_file(path.as_path())?; let file_id = state.remove_mem_file(path.as_path())?;
subs.remove_sub(file_id); subs.remove_sub(file_id);
let params = req::PublishDiagnosticsParams { uri, diagnostics: Vec::new() }; let params = req::PublishDiagnosticsParams {
uri,
diagnostics: Vec::new(),
};
let not = RawNotification::new::<req::PublishDiagnostics>(&params); let not = RawNotification::new::<req::PublishDiagnostics>(&params);
msg_sender.send(RawMessage::Notification(not)); msg_sender.send(RawMessage::Notification(not));
return Ok(()) return Ok(());
} }
Err(not) => not, Err(not) => not,
}; };
@ -342,9 +342,10 @@ struct PoolDispatcher<'a> {
impl<'a> PoolDispatcher<'a> { impl<'a> PoolDispatcher<'a> {
fn on<'b, R>( fn on<'b, R>(
&'b mut self, &'b mut self,
f: fn(ServerWorld, R::Params, JobToken) -> Result<R::Result> f: fn(ServerWorld, R::Params, JobToken) -> Result<R::Result>,
) -> Result<&'b mut Self> ) -> Result<&'b mut Self>
where R: req::Request, where
R: req::Request,
R::Params: DeserializeOwned + Send + 'static, R::Params: DeserializeOwned + Send + 'static,
R::Result: Serialize + 'static, R::Result: Serialize + 'static,
{ {
@ -360,16 +361,16 @@ impl<'a> PoolDispatcher<'a> {
self.pool.spawn(move || { self.pool.spawn(move || {
let resp = match f(world, params, token) { let resp = match f(world, params, token) {
Ok(resp) => RawResponse::ok::<R>(id, &resp), Ok(resp) => RawResponse::ok::<R>(id, &resp),
Err(e) => RawResponse::err(id, ErrorCode::InternalError as i32, e.to_string()), Err(e) => {
RawResponse::err(id, ErrorCode::InternalError as i32, e.to_string())
}
}; };
let task = Task::Respond(resp); let task = Task::Respond(resp);
sender.send(task); sender.send(task);
}); });
self.res = Some((id, handle)); self.res = Some((id, handle));
} }
Err(req) => { Err(req) => self.req = Some(req),
self.req = Some(req)
}
} }
Ok(self) Ok(self)
} }
@ -392,18 +393,14 @@ fn update_file_notifications_on_threadpool(
pool.spawn(move || { pool.spawn(move || {
for file_id in subscriptions { for file_id in subscriptions {
match handlers::publish_diagnostics(&world, file_id) { match handlers::publish_diagnostics(&world, file_id) {
Err(e) => { Err(e) => error!("failed to compute diagnostics: {:?}", e),
error!("failed to compute diagnostics: {:?}", e)
}
Ok(params) => { Ok(params) => {
let not = RawNotification::new::<req::PublishDiagnostics>(&params); let not = RawNotification::new::<req::PublishDiagnostics>(&params);
sender.send(Task::Notify(not)); sender.send(Task::Notify(not));
} }
} }
match handlers::publish_decorations(&world, file_id) { match handlers::publish_decorations(&world, file_id) {
Err(e) => { Err(e) => error!("failed to compute decorations: {:?}", e),
error!("failed to compute decorations: {:?}", e)
}
Ok(params) => { Ok(params) => {
let not = RawNotification::new::<req::PublishDecorations>(&params); let not = RawNotification::new::<req::PublishDecorations>(&params);
sender.send(Task::Notify(not)) sender.send(Task::Notify(not))

View file

@ -1,5 +1,5 @@
use rustc_hash::FxHashSet;
use ra_analysis::FileId; use ra_analysis::FileId;
use rustc_hash::FxHashSet;
pub struct Subscriptions { pub struct Subscriptions {
subs: FxHashSet<FileId>, subs: FxHashSet<FileId>,
@ -7,7 +7,9 @@ pub struct Subscriptions {
impl Subscriptions { impl Subscriptions {
pub fn new() -> Subscriptions { pub fn new() -> Subscriptions {
Subscriptions { subs: FxHashSet::default() } Subscriptions {
subs: FxHashSet::default(),
}
} }
pub fn add_sub(&mut self, file_id: FileId) { pub fn add_sub(&mut self, file_id: FileId) {
self.subs.insert(file_id); self.subs.insert(file_id);

View file

@ -1,11 +1,13 @@
use std::path::{PathBuf, Path, Component};
use im; use im;
use relative_path::RelativePath;
use ra_analysis::{FileId, FileResolver}; use ra_analysis::{FileId, FileResolver};
use relative_path::RelativePath;
use std::path::{Component, Path, PathBuf};
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Root { pub enum Root {
Workspace, Lib Workspace,
Lib,
} }
#[derive(Debug, Default, Clone)] #[derive(Debug, Default, Clone)]
@ -21,7 +23,8 @@ impl PathMap {
Default::default() Default::default()
} }
pub fn get_or_insert(&mut self, path: PathBuf, root: Root) -> FileId { pub fn get_or_insert(&mut self, path: PathBuf, root: Root) -> FileId {
self.path2id.get(path.as_path()) self.path2id
.get(path.as_path())
.map(|&id| id) .map(|&id| id)
.unwrap_or_else(|| { .unwrap_or_else(|| {
let id = self.new_file_id(); let id = self.new_file_id();
@ -33,9 +36,7 @@ impl PathMap {
self.path2id.get(path).map(|&id| id) self.path2id.get(path).map(|&id| id)
} }
pub fn get_path(&self, file_id: FileId) -> &Path { pub fn get_path(&self, file_id: FileId) -> &Path {
self.id2path.get(&file_id) self.id2path.get(&file_id).unwrap().as_path()
.unwrap()
.as_path()
} }
pub fn get_root(&self, file_id: FileId) -> Root { pub fn get_root(&self, file_id: FileId) -> Root {
self.id2root[&file_id] self.id2root[&file_id]
@ -55,7 +56,12 @@ impl PathMap {
impl FileResolver for PathMap { impl FileResolver for PathMap {
fn file_stem(&self, file_id: FileId) -> String { fn file_stem(&self, file_id: FileId) -> String {
self.get_path(file_id).file_stem().unwrap().to_str().unwrap().to_string() self.get_path(file_id)
.file_stem()
.unwrap()
.to_str()
.unwrap()
.to_string()
} }
fn resolve(&self, file_id: FileId, path: &RelativePath) -> Option<FileId> { fn resolve(&self, file_id: FileId, path: &RelativePath) -> Option<FileId> {
@ -101,10 +107,6 @@ mod test {
let mut m = PathMap::new(); let mut m = PathMap::new();
let id1 = m.get_or_insert(PathBuf::from("/foo"), Root::Workspace); let id1 = m.get_or_insert(PathBuf::from("/foo"), Root::Workspace);
let id2 = m.get_or_insert(PathBuf::from("/foo/bar.rs"), Root::Workspace); let id2 = m.get_or_insert(PathBuf::from("/foo/bar.rs"), Root::Workspace);
assert_eq!( assert_eq!(m.resolve(id1, &RelativePath::new("bar.rs")), Some(id2),)
m.resolve(id1, &RelativePath::new("bar.rs")),
Some(id2),
)
} }
} }

View file

@ -1,13 +1,12 @@
use std::{
path::{Path, PathBuf},
};
use rustc_hash::{FxHashMap, FxHashSet};
use cargo_metadata::{metadata_run, CargoOpt}; use cargo_metadata::{metadata_run, CargoOpt};
use ra_syntax::SmolStr; use ra_syntax::SmolStr;
use rustc_hash::{FxHashMap, FxHashSet};
use std::path::{Path, PathBuf};
use crate::{ use crate::{
thread_watcher::{ThreadWatcher, Worker},
Result, Result,
thread_watcher::{Worker, ThreadWatcher},
}; };
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -39,7 +38,12 @@ struct TargetData {
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum TargetKind { pub enum TargetKind {
Bin, Lib, Example, Test, Bench, Other, Bin,
Lib,
Example,
Test,
Bench,
Other,
} }
impl Package { impl Package {
@ -78,13 +82,15 @@ impl CargoWorkspace {
let meta = metadata_run( let meta = metadata_run(
Some(cargo_toml.as_path()), Some(cargo_toml.as_path()),
true, true,
Some(CargoOpt::AllFeatures) Some(CargoOpt::AllFeatures),
).map_err(|e| format_err!("cargo metadata failed: {}", e))?; )
.map_err(|e| format_err!("cargo metadata failed: {}", e))?;
let mut pkg_by_id = FxHashMap::default(); let mut pkg_by_id = FxHashMap::default();
let mut packages = Vec::new(); let mut packages = Vec::new();
let mut targets = Vec::new(); let mut targets = Vec::new();
let ws_members: FxHashSet<String> = meta.workspace_members let ws_members: FxHashSet<String> = meta
.workspace_members
.into_iter() .into_iter()
.map(|it| it.raw) .map(|it| it.raw)
.collect(); .collect();
@ -155,7 +161,7 @@ impl TargetKind {
"example" => TargetKind::Example, "example" => TargetKind::Example,
_ if kind.contains("lib") => TargetKind::Lib, _ if kind.contains("lib") => TargetKind::Lib,
_ => continue, _ => continue,
} };
} }
TargetKind::Other TargetKind::Other
} }
@ -170,6 +176,6 @@ pub fn workspace_loader() -> (Worker<PathBuf, Result<CargoWorkspace>>, ThreadWat
.into_iter() .into_iter()
.map(|path| CargoWorkspace::from_cargo_metadata(path.as_path())) .map(|path| CargoWorkspace::from_cargo_metadata(path.as_path()))
.for_each(|it| output_sender.send(it)) .for_each(|it| output_sender.send(it))
} },
) )
} }

View file

@ -1,20 +1,13 @@
use languageserver_types::{Location, Position, Range, TextDocumentIdentifier, Url};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use languageserver_types::{TextDocumentIdentifier, Range, Url, Position, Location};
use url_serde; use url_serde;
pub use languageserver_types::{ pub use languageserver_types::{
request::*, notification::*, notification::*, request::*, ApplyWorkspaceEditParams, CodeActionParams, CompletionParams,
InitializeResult, PublishDiagnosticsParams, CompletionResponse, DocumentOnTypeFormattingParams, DocumentSymbolParams,
DocumentSymbolParams, DocumentSymbolResponse, DocumentSymbolResponse, ExecuteCommandParams, Hover, InitializeResult,
CodeActionParams, ApplyWorkspaceEditParams, PublishDiagnosticsParams, SignatureHelp, TextDocumentEdit, TextDocumentPositionParams,
ExecuteCommandParams, TextEdit, WorkspaceSymbolParams,
WorkspaceSymbolParams,
TextDocumentPositionParams,
TextEdit,
CompletionParams, CompletionResponse,
DocumentOnTypeFormattingParams,
TextDocumentEdit,
SignatureHelp, Hover
}; };
pub enum SyntaxTree {} pub enum SyntaxTree {}
@ -28,7 +21,7 @@ impl Request for SyntaxTree {
#[derive(Deserialize, Debug)] #[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct SyntaxTreeParams { pub struct SyntaxTreeParams {
pub text_document: TextDocumentIdentifier pub text_document: TextDocumentIdentifier,
} }
pub enum ExtendSelection {} pub enum ExtendSelection {}
@ -94,7 +87,7 @@ pub struct PublishDecorationsParams {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct Decoration { pub struct Decoration {
pub range: Range, pub range: Range,
pub tag: &'static str pub tag: &'static str,
} }
pub enum ParentModule {} pub enum ParentModule {}
@ -167,14 +160,14 @@ pub struct SourceChange {
pub enum FileSystemEdit { pub enum FileSystemEdit {
CreateFile { CreateFile {
#[serde(with = "url_serde")] #[serde(with = "url_serde")]
uri: Url uri: Url,
}, },
MoveFile { MoveFile {
#[serde(with = "url_serde")] #[serde(with = "url_serde")]
src: Url, src: Url,
#[serde(with = "url_serde")] #[serde(with = "url_serde")]
dst: Url, dst: Url,
} },
} }
pub enum InternalFeedback {} pub enum InternalFeedback {}

View file

@ -1,18 +1,18 @@
use std::{ use std::{
fs, fs,
path::{PathBuf, Path}, path::{Path, PathBuf},
sync::Arc, sync::Arc,
}; };
use rustc_hash::FxHashMap;
use languageserver_types::Url; use languageserver_types::Url;
use ra_analysis::{FileId, AnalysisHost, Analysis, CrateGraph, CrateId, LibraryData, FileResolver}; use ra_analysis::{Analysis, AnalysisHost, CrateGraph, CrateId, FileId, FileResolver, LibraryData};
use rustc_hash::FxHashMap;
use crate::{ use crate::{
Result,
path_map::{PathMap, Root}, path_map::{PathMap, Root},
vfs::{FileEvent, FileEventKind},
project_model::CargoWorkspace, project_model::CargoWorkspace,
vfs::{FileEvent, FileEventKind},
Result,
}; };
#[derive(Debug)] #[derive(Debug)]
@ -42,16 +42,15 @@ impl ServerWorldState {
{ {
let pm = &mut self.path_map; let pm = &mut self.path_map;
let mm = &mut self.mem_map; let mm = &mut self.mem_map;
let changes = events.into_iter() let changes = events
.into_iter()
.map(|event| { .map(|event| {
let text = match event.kind { let text = match event.kind {
FileEventKind::Add(text) => Some(text), FileEventKind::Add(text) => Some(text),
}; };
(event.path, text) (event.path, text)
}) })
.map(|(path, text)| { .map(|(path, text)| (pm.get_or_insert(path, Root::Workspace), text))
(pm.get_or_insert(path, Root::Workspace), text)
})
.filter_map(|(id, text)| { .filter_map(|(id, text)| {
if mm.contains_key(&id) { if mm.contains_key(&id) {
mm.insert(id, text); mm.insert(id, text);
@ -62,12 +61,17 @@ impl ServerWorldState {
}); });
self.analysis_host.change_files(changes); self.analysis_host.change_files(changes);
} }
self.analysis_host.set_file_resolver(Arc::new(self.path_map.clone())); self.analysis_host
.set_file_resolver(Arc::new(self.path_map.clone()));
} }
pub fn events_to_files(&mut self, events: Vec<FileEvent>) -> (Vec<(FileId, String)>, Arc<FileResolver>) { pub fn events_to_files(
&mut self,
events: Vec<FileEvent>,
) -> (Vec<(FileId, String)>, Arc<FileResolver>) {
let files = { let files = {
let pm = &mut self.path_map; let pm = &mut self.path_map;
events.into_iter() events
.into_iter()
.map(|event| { .map(|event| {
let text = match event.kind { let text = match event.kind {
FileEventKind::Add(text) => text, FileEventKind::Add(text) => text,
@ -86,7 +90,8 @@ impl ServerWorldState {
pub fn add_mem_file(&mut self, path: PathBuf, text: String) -> FileId { pub fn add_mem_file(&mut self, path: PathBuf, text: String) -> FileId {
let file_id = self.path_map.get_or_insert(path, Root::Workspace); let file_id = self.path_map.get_or_insert(path, Root::Workspace);
self.analysis_host.set_file_resolver(Arc::new(self.path_map.clone())); self.analysis_host
.set_file_resolver(Arc::new(self.path_map.clone()));
self.mem_map.insert(file_id, None); self.mem_map.insert(file_id, None);
if self.path_map.get_root(file_id) != Root::Lib { if self.path_map.get_root(file_id) != Root::Lib {
self.analysis_host.change_file(file_id, Some(text)); self.analysis_host.change_file(file_id, Some(text));
@ -95,9 +100,10 @@ impl ServerWorldState {
} }
pub fn change_mem_file(&mut self, path: &Path, text: String) -> Result<()> { pub fn change_mem_file(&mut self, path: &Path, text: String) -> Result<()> {
let file_id = self.path_map.get_id(path).ok_or_else(|| { let file_id = self
format_err!("change to unknown file: {}", path.display()) .path_map
})?; .get_id(path)
.ok_or_else(|| format_err!("change to unknown file: {}", path.display()))?;
if self.path_map.get_root(file_id) != Root::Lib { if self.path_map.get_root(file_id) != Root::Lib {
self.analysis_host.change_file(file_id, Some(text)); self.analysis_host.change_file(file_id, Some(text));
} }
@ -105,9 +111,10 @@ impl ServerWorldState {
} }
pub fn remove_mem_file(&mut self, path: &Path) -> Result<FileId> { pub fn remove_mem_file(&mut self, path: &Path) -> Result<FileId> {
let file_id = self.path_map.get_id(path).ok_or_else(|| { let file_id = self
format_err!("change to unknown file: {}", path.display()) .path_map
})?; .get_id(path)
.ok_or_else(|| format_err!("change to unknown file: {}", path.display()))?;
match self.mem_map.remove(&file_id) { match self.mem_map.remove(&file_id) {
Some(_) => (), Some(_) => (),
None => bail!("unmatched close notification"), None => bail!("unmatched close notification"),
@ -141,7 +148,7 @@ impl ServerWorldState {
ServerWorld { ServerWorld {
workspaces: Arc::clone(&self.workspaces), workspaces: Arc::clone(&self.workspaces),
analysis: self.analysis_host.analysis(), analysis: self.analysis_host.analysis(),
path_map: self.path_map.clone() path_map: self.path_map.clone(),
} }
} }
} }
@ -152,9 +159,12 @@ impl ServerWorld {
} }
pub fn uri_to_file_id(&self, uri: &Url) -> Result<FileId> { pub fn uri_to_file_id(&self, uri: &Url) -> Result<FileId> {
let path = uri.to_file_path() let path = uri
.to_file_path()
.map_err(|()| format_err!("invalid uri: {}", uri))?; .map_err(|()| format_err!("invalid uri: {}", uri))?;
self.path_map.get_id(&path).ok_or_else(|| format_err!("unknown file: {}", path.display())) self.path_map
.get_id(&path)
.ok_or_else(|| format_err!("unknown file: {}", path.display()))
} }
pub fn file_id_to_uri(&self, id: FileId) -> Result<Url> { pub fn file_id_to_uri(&self, id: FileId) -> Result<Url> {

View file

@ -1,7 +1,8 @@
use std::thread;
use crossbeam_channel::{bounded, unbounded, Sender, Receiver};
use drop_bomb::DropBomb;
use crate::Result; use crate::Result;
use crossbeam_channel::{bounded, unbounded, Receiver, Sender};
use drop_bomb::DropBomb;
use std::thread;
pub struct Worker<I, O> { pub struct Worker<I, O> {
pub inp: Sender<I>, pub inp: Sender<I>,
@ -50,11 +51,13 @@ impl ThreadWatcher {
info!("waiting for {} to finish ...", self.name); info!("waiting for {} to finish ...", self.name);
let name = self.name; let name = self.name;
self.bomb.defuse(); self.bomb.defuse();
let res = self.thread.join() let res = self
.thread
.join()
.map_err(|_| format_err!("ThreadWatcher {} died", name)); .map_err(|_| format_err!("ThreadWatcher {} died", name));
match &res { match &res {
Ok(()) => info!("... {} terminated with ok", name), Ok(()) => info!("... {} terminated with ok", name),
Err(_) => error!("... {} terminated with err", name) Err(_) => error!("... {} terminated with err", name),
} }
res res
} }
@ -66,5 +69,9 @@ impl ThreadWatcher {
fn worker_chan<I, O>(buf: usize) -> ((Sender<I>, Receiver<O>), Receiver<I>, Sender<O>) { fn worker_chan<I, O>(buf: usize) -> ((Sender<I>, Receiver<O>), Receiver<I>, Sender<O>) {
let (input_sender, input_receiver) = bounded::<I>(buf); let (input_sender, input_receiver) = bounded::<I>(buf);
let (output_sender, output_receiver) = unbounded::<O>(); let (output_sender, output_receiver) = unbounded::<O>();
((input_sender, output_receiver), input_receiver, output_sender) (
(input_sender, output_receiver),
input_receiver,
output_sender,
)
} }

View file

@ -1,14 +1,11 @@
use std::{ use std::{
path::{PathBuf, Path},
fs, fs,
path::{Path, PathBuf},
}; };
use walkdir::WalkDir; use walkdir::WalkDir;
use crate::{ use crate::thread_watcher::{ThreadWatcher, Worker};
thread_watcher::{Worker, ThreadWatcher},
};
#[derive(Debug)] #[derive(Debug)]
pub struct FileEvent { pub struct FileEvent {
@ -24,7 +21,8 @@ pub enum FileEventKind {
pub fn roots_loader() -> (Worker<PathBuf, (PathBuf, Vec<FileEvent>)>, ThreadWatcher) { pub fn roots_loader() -> (Worker<PathBuf, (PathBuf, Vec<FileEvent>)>, ThreadWatcher) {
Worker::<PathBuf, (PathBuf, Vec<FileEvent>)>::spawn( Worker::<PathBuf, (PathBuf, Vec<FileEvent>)>::spawn(
"roots loader", "roots loader",
128, |input_receiver, output_sender| { 128,
|input_receiver, output_sender| {
input_receiver input_receiver
.into_iter() .into_iter()
.map(|path| { .map(|path| {
@ -34,7 +32,7 @@ pub fn roots_loader() -> (Worker<PathBuf, (PathBuf, Vec<FileEvent>)>, ThreadWatc
(path, events) (path, events)
}) })
.for_each(|it| output_sender.send(it)) .for_each(|it| output_sender.send(it))
} },
) )
} }

View file

@ -1,12 +1,12 @@
#[macro_use] #[macro_use]
extern crate crossbeam_channel; extern crate crossbeam_channel;
extern crate tempdir; extern crate flexi_logger;
extern crate gen_lsp_server;
extern crate languageserver_types; extern crate languageserver_types;
extern crate ra_lsp_server;
extern crate serde; extern crate serde;
extern crate serde_json; extern crate serde_json;
extern crate gen_lsp_server; extern crate tempdir;
extern crate flexi_logger;
extern crate ra_lsp_server;
mod support; mod support;
@ -14,17 +14,18 @@ use ra_lsp_server::req::{Runnables, RunnablesParams};
use crate::support::project; use crate::support::project;
const LOG: &'static str = ""; const LOG: &'static str = "";
#[test] #[test]
fn test_runnables_no_project() { fn test_runnables_no_project() {
let server = project(r" let server = project(
r"
//- lib.rs //- lib.rs
#[test] #[test]
fn foo() { fn foo() {
} }
"); ",
);
server.request::<Runnables>( server.request::<Runnables>(
RunnablesParams { RunnablesParams {
text_document: server.doc_id("lib.rs"), text_document: server.doc_id("lib.rs"),
@ -41,13 +42,14 @@ fn foo() {
"start": { "character": 0, "line": 0 } "start": { "character": 0, "line": 0 }
} }
} }
]"# ]"#,
); );
} }
#[test] #[test]
fn test_runnables_project() { fn test_runnables_project() {
let server = project(r#" let server = project(
r#"
//- Cargo.toml //- Cargo.toml
[package] [package]
name = "foo" name = "foo"
@ -59,7 +61,8 @@ pub fn foo() {}
//- tests/spam.rs //- tests/spam.rs
#[test] #[test]
fn test_eggs() {} fn test_eggs() {}
"#); "#,
);
server.wait_for_feedback("workspace loaded"); server.wait_for_feedback("workspace loaded");
server.request::<Runnables>( server.request::<Runnables>(
RunnablesParams { RunnablesParams {

View file

@ -1,34 +1,33 @@
use std::{ use std::{
fs,
cell::{Cell, RefCell}, cell::{Cell, RefCell},
fs,
path::PathBuf, path::PathBuf,
time::Duration,
sync::Once, sync::Once,
time::Duration,
}; };
use tempdir::TempDir;
use crossbeam_channel::{after, Receiver}; use crossbeam_channel::{after, Receiver};
use flexi_logger::Logger; use flexi_logger::Logger;
use gen_lsp_server::{RawMessage, RawNotification, RawRequest};
use languageserver_types::{ use languageserver_types::{
Url,
TextDocumentIdentifier,
request::{Request, Shutdown},
notification::DidOpenTextDocument, notification::DidOpenTextDocument,
DidOpenTextDocumentParams, request::{Request, Shutdown},
TextDocumentItem, DidOpenTextDocumentParams, TextDocumentIdentifier, TextDocumentItem, Url,
}; };
use serde::Serialize; use serde::Serialize;
use serde_json::{Value, from_str, to_string_pretty}; use serde_json::{from_str, to_string_pretty, Value};
use gen_lsp_server::{RawMessage, RawRequest, RawNotification}; use tempdir::TempDir;
use ra_lsp_server::{main_loop, req, thread_watcher::{ThreadWatcher, Worker}}; use ra_lsp_server::{
main_loop, req,
thread_watcher::{ThreadWatcher, Worker},
};
pub fn project(fixture: &str) -> Server { pub fn project(fixture: &str) -> Server {
static INIT: Once = Once::new(); static INIT: Once = Once::new();
INIT.call_once(|| Logger::with_env_or_str(crate::LOG).start().unwrap()); INIT.call_once(|| Logger::with_env_or_str(crate::LOG).start().unwrap());
let tmp_dir = TempDir::new("test-project") let tmp_dir = TempDir::new("test-project").unwrap();
.unwrap();
let mut buf = String::new(); let mut buf = String::new();
let mut file_name = None; let mut file_name = None;
let mut paths = vec![]; let mut paths = vec![];
@ -40,7 +39,7 @@ pub fn project(fixture: &str) -> Server {
fs::write(path.as_path(), buf.as_bytes()).unwrap(); fs::write(path.as_path(), buf.as_bytes()).unwrap();
paths.push((path, buf.clone())); paths.push((path, buf.clone()));
} }
} };
}; };
for line in fixture.lines() { for line in fixture.lines() {
if line.starts_with("//-") { if line.starts_with("//-") {
@ -71,9 +70,8 @@ impl Server {
"test server", "test server",
128, 128,
move |mut msg_receiver, mut msg_sender| { move |mut msg_receiver, mut msg_sender| {
main_loop(true, path, &mut msg_receiver, &mut msg_sender) main_loop(true, path, &mut msg_receiver, &mut msg_sender).unwrap()
.unwrap() },
}
); );
let res = Server { let res = Server {
req_id: Cell::new(1), req_id: Cell::new(1),
@ -91,8 +89,8 @@ impl Server {
language_id: "rust".to_string(), language_id: "rust".to_string(),
version: 0, version: 0,
text, text,
} },
} },
)) ))
} }
res res
@ -105,11 +103,7 @@ impl Server {
} }
} }
pub fn request<R>( pub fn request<R>(&self, params: R::Params, expected_resp: &str)
&self,
params: R::Params,
expected_resp: &str,
)
where where
R: Request, R: Request,
R::Params: Serialize, R::Params: Serialize,
@ -119,7 +113,8 @@ impl Server {
let expected_resp: Value = from_str(expected_resp).unwrap(); let expected_resp: Value = from_str(expected_resp).unwrap();
let actual = self.send_request::<R>(id, params); let actual = self.send_request::<R>(id, params);
assert_eq!( assert_eq!(
expected_resp, actual, expected_resp,
actual,
"Expected:\n{}\n\ "Expected:\n{}\n\
Actual:\n{}\n", Actual:\n{}\n",
to_string_pretty(&expected_resp).unwrap(), to_string_pretty(&expected_resp).unwrap(),
@ -135,12 +130,9 @@ impl Server {
let r = RawRequest::new::<R>(id, &params); let r = RawRequest::new::<R>(id, &params);
self.send_request_(r) self.send_request_(r)
} }
fn send_request_(&self, r: RawRequest) -> Value fn send_request_(&self, r: RawRequest) -> Value {
{
let id = r.id; let id = r.id;
self.worker.as_ref() self.worker.as_ref().unwrap().send(RawMessage::Request(r));
.unwrap()
.send(RawMessage::Request(r));
while let Some(msg) = self.recv() { while let Some(msg) = self.recv() {
match msg { match msg {
RawMessage::Request(req) => panic!("unexpected request: {:?}", req), RawMessage::Request(req) => panic!("unexpected request: {:?}", req),
@ -162,8 +154,7 @@ impl Server {
pub fn wait_for_feedback_n(&self, feedback: &str, n: usize) { pub fn wait_for_feedback_n(&self, feedback: &str, n: usize) {
let f = |msg: &RawMessage| match msg { let f = |msg: &RawMessage| match msg {
RawMessage::Notification(n) if n.method == "internalFeedback" => { RawMessage::Notification(n) if n.method == "internalFeedback" => {
return n.clone().cast::<req::InternalFeedback>() return n.clone().cast::<req::InternalFeedback>().unwrap() == feedback
.unwrap() == feedback
} }
_ => false, _ => false,
}; };
@ -181,14 +172,14 @@ impl Server {
} }
} }
fn recv(&self) -> Option<RawMessage> { fn recv(&self) -> Option<RawMessage> {
recv_timeout(&self.worker.as_ref().unwrap().out) recv_timeout(&self.worker.as_ref().unwrap().out).map(|msg| {
.map(|msg| {
self.messages.borrow_mut().push(msg.clone()); self.messages.borrow_mut().push(msg.clone());
msg msg
}) })
} }
fn send_notification(&self, not: RawNotification) { fn send_notification(&self, not: RawNotification) {
self.worker.as_ref() self.worker
.as_ref()
.unwrap() .unwrap()
.send(RawMessage::Notification(not)); .send(RawMessage::Notification(not));
} }
@ -201,10 +192,7 @@ impl Drop for Server {
while let Some(msg) = recv_timeout(&receiver) { while let Some(msg) = recv_timeout(&receiver) {
drop(msg); drop(msg);
} }
self.watcher.take() self.watcher.take().unwrap().stop().unwrap();
.unwrap()
.stop()
.unwrap();
} }
} }

View file

@ -1,16 +1,18 @@
pub mod walk;
pub mod visit; pub mod visit;
pub mod walk;
use crate::{ use crate::{
SyntaxNodeRef, TextUnit, TextRange,
text_utils::{contains_offset_nonstrict, is_subrange}, text_utils::{contains_offset_nonstrict, is_subrange},
SyntaxNodeRef, TextRange, TextUnit,
}; };
pub fn find_leaf_at_offset(node: SyntaxNodeRef, offset: TextUnit) -> LeafAtOffset { pub fn find_leaf_at_offset(node: SyntaxNodeRef, offset: TextUnit) -> LeafAtOffset {
let range = node.range(); let range = node.range();
assert!( assert!(
contains_offset_nonstrict(range, offset), contains_offset_nonstrict(range, offset),
"Bad offset: range {:?} offset {:?}", range, offset "Bad offset: range {:?} offset {:?}",
range,
offset
); );
if range.is_empty() { if range.is_empty() {
return LeafAtOffset::None; return LeafAtOffset::None;
@ -20,8 +22,7 @@ pub fn find_leaf_at_offset(node: SyntaxNodeRef, offset: TextUnit) -> LeafAtOffse
return LeafAtOffset::Single(node); return LeafAtOffset::Single(node);
} }
let mut children = node.children() let mut children = node.children().filter(|child| {
.filter(|child| {
let child_range = child.range(); let child_range = child.range();
!child_range.is_empty() && contains_offset_nonstrict(child_range, offset) !child_range.is_empty() && contains_offset_nonstrict(child_range, offset)
}); });
@ -30,10 +31,14 @@ pub fn find_leaf_at_offset(node: SyntaxNodeRef, offset: TextUnit) -> LeafAtOffse
let right = children.next(); let right = children.next();
assert!(children.next().is_none()); assert!(children.next().is_none());
return if let Some(right) = right { return if let Some(right) = right {
match (find_leaf_at_offset(left, offset), find_leaf_at_offset(right, offset)) { match (
(LeafAtOffset::Single(left), LeafAtOffset::Single(right)) => find_leaf_at_offset(left, offset),
LeafAtOffset::Between(left, right), find_leaf_at_offset(right, offset),
_ => unreachable!() ) {
(LeafAtOffset::Single(left), LeafAtOffset::Single(right)) => {
LeafAtOffset::Between(left, right)
}
_ => unreachable!(),
} }
} else { } else {
find_leaf_at_offset(left, offset) find_leaf_at_offset(left, offset)
@ -44,7 +49,7 @@ pub fn find_leaf_at_offset(node: SyntaxNodeRef, offset: TextUnit) -> LeafAtOffse
pub enum LeafAtOffset<'a> { pub enum LeafAtOffset<'a> {
None, None,
Single(SyntaxNodeRef<'a>), Single(SyntaxNodeRef<'a>),
Between(SyntaxNodeRef<'a>, SyntaxNodeRef<'a>) Between(SyntaxNodeRef<'a>, SyntaxNodeRef<'a>),
} }
impl<'a> LeafAtOffset<'a> { impl<'a> LeafAtOffset<'a> {
@ -52,7 +57,7 @@ impl<'a> LeafAtOffset<'a> {
match self { match self {
LeafAtOffset::None => None, LeafAtOffset::None => None,
LeafAtOffset::Single(node) => Some(node), LeafAtOffset::Single(node) => Some(node),
LeafAtOffset::Between(_, right) => Some(right) LeafAtOffset::Between(_, right) => Some(right),
} }
} }
@ -60,7 +65,7 @@ impl<'a> LeafAtOffset<'a> {
match self { match self {
LeafAtOffset::None => None, LeafAtOffset::None => None,
LeafAtOffset::Single(node) => Some(node), LeafAtOffset::Single(node) => Some(node),
LeafAtOffset::Between(left, _) => Some(left) LeafAtOffset::Between(left, _) => Some(left),
} }
} }
} }
@ -71,8 +76,14 @@ impl<'f> Iterator for LeafAtOffset<'f> {
fn next(&mut self) -> Option<SyntaxNodeRef<'f>> { fn next(&mut self) -> Option<SyntaxNodeRef<'f>> {
match *self { match *self {
LeafAtOffset::None => None, LeafAtOffset::None => None,
LeafAtOffset::Single(node) => { *self = LeafAtOffset::None; Some(node) } LeafAtOffset::Single(node) => {
LeafAtOffset::Between(left, right) => { *self = LeafAtOffset::Single(right); Some(left) } *self = LeafAtOffset::None;
Some(node)
}
LeafAtOffset::Between(left, right) => {
*self = LeafAtOffset::Single(right);
Some(left)
}
} }
} }
} }
@ -81,14 +92,15 @@ pub fn find_covering_node(root: SyntaxNodeRef, range: TextRange) -> SyntaxNodeRe
assert!( assert!(
is_subrange(root.range(), range), is_subrange(root.range(), range),
"node range: {:?}, target range: {:?}", "node range: {:?}, target range: {:?}",
root.range(), range, root.range(),
range,
); );
let (left, right) = match ( let (left, right) = match (
find_leaf_at_offset(root, range.start()).right_biased(), find_leaf_at_offset(root, range.start()).right_biased(),
find_leaf_at_offset(root, range.end()).left_biased() find_leaf_at_offset(root, range.end()).left_biased(),
) { ) {
(Some(l), Some(r)) => (l, r), (Some(l), Some(r)) => (l, r),
_ => return root _ => return root,
}; };
common_ancestor(left, right) common_ancestor(left, right)

View file

@ -1,23 +1,31 @@
use std::marker::PhantomData; use crate::{AstNode, SyntaxNodeRef};
use crate::{SyntaxNodeRef, AstNode};
use std::marker::PhantomData;
pub fn visitor<'a, T>() -> impl Visitor<'a, Output = T> { pub fn visitor<'a, T>() -> impl Visitor<'a, Output = T> {
EmptyVisitor { ph: PhantomData } EmptyVisitor { ph: PhantomData }
} }
pub fn visitor_ctx<'a, T, C>(ctx: C) -> impl VisitorCtx<'a, Output = T, Ctx = C> { pub fn visitor_ctx<'a, T, C>(ctx: C) -> impl VisitorCtx<'a, Output = T, Ctx = C> {
EmptyVisitorCtx { ph: PhantomData, ctx } EmptyVisitorCtx {
ph: PhantomData,
ctx,
}
} }
pub trait Visitor<'a>: Sized { pub trait Visitor<'a>: Sized {
type Output; type Output;
fn accept(self, node: SyntaxNodeRef<'a>) -> Option<Self::Output>; fn accept(self, node: SyntaxNodeRef<'a>) -> Option<Self::Output>;
fn visit<N, F>(self, f: F) -> Vis<Self, N, F> fn visit<N, F>(self, f: F) -> Vis<Self, N, F>
where N: AstNode<'a>, where
N: AstNode<'a>,
F: FnOnce(N) -> Self::Output, F: FnOnce(N) -> Self::Output,
{ {
Vis { inner: self, f, ph: PhantomData } Vis {
inner: self,
f,
ph: PhantomData,
}
} }
} }
@ -26,16 +34,21 @@ pub trait VisitorCtx<'a>: Sized {
type Ctx; type Ctx;
fn accept(self, node: SyntaxNodeRef<'a>) -> Result<Self::Output, Self::Ctx>; fn accept(self, node: SyntaxNodeRef<'a>) -> Result<Self::Output, Self::Ctx>;
fn visit<N, F>(self, f: F) -> VisCtx<Self, N, F> fn visit<N, F>(self, f: F) -> VisCtx<Self, N, F>
where N: AstNode<'a>, where
N: AstNode<'a>,
F: FnOnce(N, Self::Ctx) -> Self::Output, F: FnOnce(N, Self::Ctx) -> Self::Output,
{ {
VisCtx { inner: self, f, ph: PhantomData } VisCtx {
inner: self,
f,
ph: PhantomData,
}
} }
} }
#[derive(Debug)] #[derive(Debug)]
struct EmptyVisitor<T> { struct EmptyVisitor<T> {
ph: PhantomData<fn() -> T> ph: PhantomData<fn() -> T>,
} }
impl<'a, T> Visitor<'a> for EmptyVisitor<T> { impl<'a, T> Visitor<'a> for EmptyVisitor<T> {
@ -100,11 +113,9 @@ impl<'a, V, N, F> VisitorCtx<'a> for VisCtx<V, N, F>
fn accept(self, node: SyntaxNodeRef<'a>) -> Result<Self::Output, Self::Ctx> { fn accept(self, node: SyntaxNodeRef<'a>) -> Result<Self::Output, Self::Ctx> {
let VisCtx { inner, f, .. } = self; let VisCtx { inner, f, .. } = self;
inner.accept(node).or_else(|ctx| inner.accept(node).or_else(|ctx| match N::cast(node) {
match N::cast(node) {
None => Err(ctx), None => Err(ctx),
Some(node) => Ok(f(node, ctx)) Some(node) => Ok(f(node, ctx)),
} })
)
} }
} }

View file

@ -1,8 +1,4 @@
use crate::{ use crate::{algo::generate, SyntaxNodeRef};
SyntaxNodeRef,
algo::generate,
};
#[derive(Debug, Copy, Clone)] #[derive(Debug, Copy, Clone)]
pub enum WalkEvent<'a> { pub enum WalkEvent<'a> {

View file

@ -1,6 +1,8 @@
// This file is automatically generated based on the file `./generated.rs.tera` when `cargo gen-kinds` is run // This file is automatically generated based on the file `./generated.rs.tera` when `cargo gen-kinds` is run
// Do not edit manually // Do not edit manually
#![cfg_attr(rustfmt, rustfmt_skip)]
use crate::{ use crate::{
ast, ast,
SyntaxNodeRef, AstNode, SyntaxNodeRef, AstNode,

View file

@ -3,6 +3,8 @@ the below applies to the result of this template
#}// This file is automatically generated based on the file `./generated.rs.tera` when `cargo gen-kinds` is run #}// This file is automatically generated based on the file `./generated.rs.tera` when `cargo gen-kinds` is run
// Do not edit manually // Do not edit manually
#![cfg_attr(rustfmt, rustfmt_skip)]
use crate::{ use crate::{
ast, ast,
SyntaxNodeRef, AstNode, SyntaxNodeRef, AstNode,

View file

@ -4,15 +4,18 @@ use std::marker::PhantomData;
use itertools::Itertools; use itertools::Itertools;
use crate::{
SmolStr, SyntaxNodeRef, SyntaxKind::*,
yellow::{RefRoot, SyntaxNodeChildren},
};
pub use self::generated::*; pub use self::generated::*;
use crate::{
yellow::{RefRoot, SyntaxNodeChildren},
SmolStr,
SyntaxKind::*,
SyntaxNodeRef,
};
pub trait AstNode<'a>: Clone + Copy + 'a { pub trait AstNode<'a>: Clone + Copy + 'a {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self>
where Self: Sized; where
Self: Sized;
fn syntax(self) -> SyntaxNodeRef<'a>; fn syntax(self) -> SyntaxNodeRef<'a>;
} }
@ -64,9 +67,7 @@ pub trait AttrsOwner<'a>: AstNode<'a> {
impl<'a> FnDef<'a> { impl<'a> FnDef<'a> {
pub fn has_atom_attr(&self, atom: &str) -> bool { pub fn has_atom_attr(&self, atom: &str) -> bool {
self.attrs() self.attrs().filter_map(|x| x.as_atom()).any(|x| x == atom)
.filter_map(|x| x.as_atom())
.any(|x| x == atom)
} }
} }
@ -135,7 +136,7 @@ pub enum CommentFlavor {
Line, Line,
Doc, Doc,
ModuleDoc, ModuleDoc,
Multiline Multiline,
} }
impl CommentFlavor { impl CommentFlavor {
@ -145,7 +146,7 @@ impl CommentFlavor {
Line => "//", Line => "//",
Doc => "///", Doc => "///",
ModuleDoc => "//!", ModuleDoc => "//!",
Multiline => "/*" Multiline => "/*",
} }
} }
} }
@ -166,16 +167,14 @@ impl<'a> Whitespace<'a> {
impl<'a> Name<'a> { impl<'a> Name<'a> {
pub fn text(&self) -> SmolStr { pub fn text(&self) -> SmolStr {
let ident = self.syntax().first_child() let ident = self.syntax().first_child().unwrap();
.unwrap();
ident.leaf_text().unwrap().clone() ident.leaf_text().unwrap().clone()
} }
} }
impl<'a> NameRef<'a> { impl<'a> NameRef<'a> {
pub fn text(&self) -> SmolStr { pub fn text(&self) -> SmolStr {
let ident = self.syntax().first_child() let ident = self.syntax().first_child().unwrap();
.unwrap();
ident.leaf_text().unwrap().clone() ident.leaf_text().unwrap().clone()
} }
} }
@ -241,7 +240,6 @@ fn children<'a, P: AstNode<'a>, C: AstNode<'a>>(parent: P) -> AstChildren<'a, C>
AstChildren::new(parent.syntax()) AstChildren::new(parent.syntax())
} }
#[derive(Debug)] #[derive(Debug)]
pub struct AstChildren<'a, N> { pub struct AstChildren<'a, N> {
inner: SyntaxNodeChildren<RefRoot<'a>>, inner: SyntaxNodeChildren<RefRoot<'a>>,

View file

@ -13,9 +13,18 @@ use super::*;
// let _ = b"e"; // let _ = b"e";
// let _ = br"f"; // let _ = br"f";
// } // }
pub(crate) const LITERAL_FIRST: TokenSet = pub(crate) const LITERAL_FIRST: TokenSet = token_set![
token_set![TRUE_KW, FALSE_KW, INT_NUMBER, FLOAT_NUMBER, BYTE, CHAR, TRUE_KW,
STRING, RAW_STRING, BYTE_STRING, RAW_BYTE_STRING]; FALSE_KW,
INT_NUMBER,
FLOAT_NUMBER,
BYTE,
CHAR,
STRING,
RAW_STRING,
BYTE_STRING,
RAW_BYTE_STRING
];
pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> { pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> {
if !p.at_ts(LITERAL_FIRST) { if !p.at_ts(LITERAL_FIRST) {
@ -26,15 +35,31 @@ pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> {
Some(m.complete(p, LITERAL)) Some(m.complete(p, LITERAL))
} }
pub(super) const ATOM_EXPR_FIRST: TokenSet = pub(super) const ATOM_EXPR_FIRST: TokenSet = token_set_union![
token_set_union![
LITERAL_FIRST, LITERAL_FIRST,
token_set![L_CURLY, L_PAREN, L_BRACK, PIPE, MOVE_KW, IF_KW, WHILE_KW, MATCH_KW, UNSAFE_KW, token_set![
RETURN_KW, IDENT, SELF_KW, SUPER_KW, CRATE_KW, COLONCOLON, BREAK_KW, CONTINUE_KW, LIFETIME ], L_CURLY,
L_PAREN,
L_BRACK,
PIPE,
MOVE_KW,
IF_KW,
WHILE_KW,
MATCH_KW,
UNSAFE_KW,
RETURN_KW,
IDENT,
SELF_KW,
SUPER_KW,
CRATE_KW,
COLONCOLON,
BREAK_KW,
CONTINUE_KW,
LIFETIME
],
]; ];
const EXPR_RECOVERY_SET: TokenSet = const EXPR_RECOVERY_SET: TokenSet = token_set![LET_KW];
token_set![LET_KW];
pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<CompletedMarker> { pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<CompletedMarker> {
match literal(p) { match literal(p) {
@ -80,7 +105,7 @@ pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<CompletedMark
let m = p.start(); let m = p.start();
p.bump(); p.bump();
block_expr(p, Some(m)) block_expr(p, Some(m))
}, }
L_CURLY => block_expr(p, None), L_CURLY => block_expr(p, None),
RETURN_KW => return_expr(p), RETURN_KW => return_expr(p),
CONTINUE_KW => continue_expr(p), CONTINUE_KW => continue_expr(p),
@ -119,7 +144,14 @@ fn tuple_expr(p: &mut Parser) -> CompletedMarker {
} }
} }
p.expect(R_PAREN); p.expect(R_PAREN);
m.complete(p, if saw_expr && !saw_comma { PAREN_EXPR } else { TUPLE_EXPR }) m.complete(
p,
if saw_expr && !saw_comma {
PAREN_EXPR
} else {
TUPLE_EXPR
},
)
} }
// test array_expr // test array_expr

View file

@ -1,23 +1,32 @@
mod atom; mod atom;
use super::*;
pub(super) use self::atom::{literal, LITERAL_FIRST};
pub(crate) use self::atom::match_arm_list; pub(crate) use self::atom::match_arm_list;
pub(super) use self::atom::{literal, LITERAL_FIRST};
use super::*;
const EXPR_FIRST: TokenSet = LHS_FIRST; const EXPR_FIRST: TokenSet = LHS_FIRST;
pub(super) fn expr(p: &mut Parser) -> BlockLike { pub(super) fn expr(p: &mut Parser) -> BlockLike {
let r = Restrictions { forbid_structs: false, prefer_stmt: false }; let r = Restrictions {
forbid_structs: false,
prefer_stmt: false,
};
expr_bp(p, r, 1) expr_bp(p, r, 1)
} }
pub(super) fn expr_stmt(p: &mut Parser) -> BlockLike { pub(super) fn expr_stmt(p: &mut Parser) -> BlockLike {
let r = Restrictions { forbid_structs: false, prefer_stmt: true }; let r = Restrictions {
forbid_structs: false,
prefer_stmt: true,
};
expr_bp(p, r, 1) expr_bp(p, r, 1)
} }
fn expr_no_struct(p: &mut Parser) { fn expr_no_struct(p: &mut Parser) {
let r = Restrictions { forbid_structs: true, prefer_stmt: false }; let r = Restrictions {
forbid_structs: true,
prefer_stmt: false,
};
expr_bp(p, r, 1); expr_bp(p, r, 1);
} }
@ -107,10 +116,8 @@ enum Op {
fn current_op(p: &Parser) -> (u8, Op) { fn current_op(p: &Parser) -> (u8, Op) {
if let Some(t) = p.next3() { if let Some(t) = p.next3() {
match t { match t {
(L_ANGLE, L_ANGLE, EQ) => (L_ANGLE, L_ANGLE, EQ) => return (1, Op::Composite(SHLEQ, 3)),
return (1, Op::Composite(SHLEQ, 3)), (R_ANGLE, R_ANGLE, EQ) => return (1, Op::Composite(SHREQ, 3)),
(R_ANGLE, R_ANGLE, EQ) =>
return (1, Op::Composite(SHREQ, 3)),
_ => (), _ => (),
} }
} }
@ -201,8 +208,7 @@ fn is_block(kind: SyntaxKind) -> bool {
} }
} }
const LHS_FIRST: TokenSet = const LHS_FIRST: TokenSet = token_set_union![
token_set_union![
token_set![AMP, STAR, EXCL, DOTDOT, MINUS], token_set![AMP, STAR, EXCL, DOTDOT, MINUS],
atom::ATOM_EXPR_FIRST, atom::ATOM_EXPR_FIRST,
]; ];
@ -265,11 +271,13 @@ fn postfix_expr(p: &mut Parser, r: Restrictions, mut lhs: CompletedMarker) -> Co
// } // }
L_PAREN if allow_calls => call_expr(p, lhs), L_PAREN if allow_calls => call_expr(p, lhs),
L_BRACK if allow_calls => index_expr(p, lhs), L_BRACK if allow_calls => index_expr(p, lhs),
DOT if p.nth(1) == IDENT => if p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON { DOT if p.nth(1) == IDENT => {
if p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON {
method_call_expr(p, lhs) method_call_expr(p, lhs)
} else { } else {
field_expr(p, lhs) field_expr(p, lhs)
}, }
}
DOT if p.nth(1) == INT_NUMBER => field_expr(p, lhs), DOT if p.nth(1) == INT_NUMBER => field_expr(p, lhs),
// test postfix_range // test postfix_range
// fn foo() { let x = 1..; } // fn foo() { let x = 1..; }
@ -318,10 +326,7 @@ fn index_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {
// y.bar::<T>(1, 2,); // y.bar::<T>(1, 2,);
// } // }
fn method_call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { fn method_call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {
assert!( assert!(p.at(DOT) && p.nth(1) == IDENT && (p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON));
p.at(DOT) && p.nth(1) == IDENT
&& (p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON)
);
let m = lhs.precede(p); let m = lhs.precede(p);
p.bump(); p.bump();
name_ref(p); name_ref(p);
@ -410,7 +415,7 @@ fn path_expr(p: &mut Parser, r: Restrictions) -> CompletedMarker {
items::macro_call_after_excl(p); items::macro_call_after_excl(p);
m.complete(p, MACRO_CALL) m.complete(p, MACRO_CALL)
} }
_ => m.complete(p, PATH_EXPR) _ => m.complete(p, PATH_EXPR),
} }
} }

View file

@ -1,16 +1,15 @@
mod consts; mod consts;
mod nominal; mod nominal;
mod traits; mod traits;
mod use_item; mod use_item;
use super::*;
pub(crate) use self::{ pub(crate) use self::{
expressions::{named_field_list, match_arm_list}, expressions::{match_arm_list, named_field_list},
nominal::{enum_variant_list, named_field_def_list}, nominal::{enum_variant_list, named_field_def_list},
traits::{trait_item_list, impl_item_list}, traits::{impl_item_list, trait_item_list},
use_item::use_tree_list, use_item::use_tree_list,
}; };
use super::*;
// test mod_contents // test mod_contents
// fn foo() {} // fn foo() {}
@ -26,12 +25,14 @@ pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) {
} }
pub(super) enum ItemFlavor { pub(super) enum ItemFlavor {
Mod, Trait Mod,
Trait,
} }
const ITEM_RECOVERY_SET: TokenSet = const ITEM_RECOVERY_SET: TokenSet = token_set![
token_set![FN_KW, STRUCT_KW, ENUM_KW, IMPL_KW, TRAIT_KW, CONST_KW, STATIC_KW, LET_KW, FN_KW, STRUCT_KW, ENUM_KW, IMPL_KW, TRAIT_KW, CONST_KW, STATIC_KW, LET_KW, MOD_KW, PUB_KW,
MOD_KW, PUB_KW, CRATE_KW]; CRATE_KW
];
pub(super) fn item_or_macro(p: &mut Parser, stop_on_r_curly: bool, flavor: ItemFlavor) { pub(super) fn item_or_macro(p: &mut Parser, stop_on_r_curly: bool, flavor: ItemFlavor) {
let m = p.start(); let m = p.start();
@ -153,11 +154,13 @@ pub(super) fn maybe_item(p: &mut Parser, flavor: ItemFlavor) -> MaybeItem {
traits::impl_item(p); traits::impl_item(p);
IMPL_ITEM IMPL_ITEM
} }
_ => return if has_mods { _ => {
return if has_mods {
MaybeItem::Modifiers MaybeItem::Modifiers
} else { } else {
MaybeItem::None MaybeItem::None
} }
}
}; };
MaybeItem::Item(kind) MaybeItem::Item(kind)
@ -194,7 +197,7 @@ fn items_without_modifiers(p: &mut Parser) -> Option<SyntaxKind> {
if p.at(SEMI) { if p.at(SEMI) {
p.err_and_bump( p.err_and_bump(
"expected item, found `;`\n\ "expected item, found `;`\n\
consider removing this semicolon" consider removing this semicolon",
); );
} }
STRUCT_DEF STRUCT_DEF
@ -227,7 +230,9 @@ fn items_without_modifiers(p: &mut Parser) -> Option<SyntaxKind> {
} }
// test extern_block // test extern_block
// extern {} // extern {}
EXTERN_KW if la == L_CURLY || ((la == STRING || la == RAW_STRING) && p.nth(2) == L_CURLY) => { EXTERN_KW
if la == L_CURLY || ((la == STRING || la == RAW_STRING) && p.nth(2) == L_CURLY) =>
{
abi(p); abi(p);
extern_item_list(p); extern_item_list(p);
EXTERN_BLOCK EXTERN_BLOCK
@ -267,10 +272,8 @@ fn fn_def(p: &mut Parser, flavor: ItemFlavor) {
if p.at(L_PAREN) { if p.at(L_PAREN) {
match flavor { match flavor {
ItemFlavor::Mod => ItemFlavor::Mod => params::param_list(p),
params::param_list(p), ItemFlavor::Trait => params::param_list_opt_patterns(p),
ItemFlavor::Trait =>
params::param_list_opt_patterns(p),
} }
} else { } else {
p.error("expected function arguments"); p.error("expected function arguments");
@ -361,7 +364,7 @@ pub(super) fn macro_call_after_excl(p: &mut Parser) -> BlockLike {
_ => { _ => {
p.error("expected `{`, `[`, `(`"); p.error("expected `{`, `[`, `(`");
BlockLike::NotBlock BlockLike::NotBlock
}, }
}; };
flavor flavor
@ -385,9 +388,9 @@ pub(crate) fn token_tree(p: &mut Parser) {
return; return;
} }
R_PAREN | R_BRACK => p.err_and_bump("unmatched brace"), R_PAREN | R_BRACK => p.err_and_bump("unmatched brace"),
_ => p.bump() _ => p.bump(),
}
} }
};
p.expect(closing_paren_kind); p.expect(closing_paren_kind);
m.complete(p, TOKEN_TREE); m.complete(p, TOKEN_TREE);
} }

View file

@ -128,4 +128,3 @@ pub(crate) fn impl_type(p: &mut Parser) {
} }
types::type_(p); types::type_(p);
} }

View file

@ -31,28 +31,18 @@ mod type_args;
mod type_params; mod type_params;
mod types; mod types;
use crate::{
token_set::TokenSet,
parser_api::{Marker, CompletedMarker, Parser},
SyntaxKind::{self, *},
};
pub(crate) use self::{ pub(crate) use self::{
expressions::{ expressions::block,
block,
},
items::{ items::{
enum_variant_list, enum_variant_list, extern_item_list, impl_item_list, match_arm_list, mod_item_list,
extern_item_list, named_field_def_list, named_field_list, token_tree, trait_item_list, use_tree_list,
impl_item_list,
match_arm_list,
mod_item_list,
named_field_def_list,
named_field_list,
token_tree,
trait_item_list,
use_tree_list,
}, },
}; };
use crate::{
parser_api::{CompletedMarker, Marker, Parser},
token_set::TokenSet,
SyntaxKind::{self, *},
};
pub(crate) fn root(p: &mut Parser) { pub(crate) fn root(p: &mut Parser) {
let m = p.start(); let m = p.start();
@ -61,7 +51,6 @@ pub(crate) fn root(p: &mut Parser) {
m.complete(p, ROOT); m.complete(p, ROOT);
} }
#[derive(Clone, Copy, PartialEq, Eq)] #[derive(Clone, Copy, PartialEq, Eq)]
enum BlockLike { enum BlockLike {
Block, Block,
@ -69,7 +58,9 @@ enum BlockLike {
} }
impl BlockLike { impl BlockLike {
fn is_block(self) -> bool { self == BlockLike::Block } fn is_block(self) -> bool {
self == BlockLike::Block
}
} }
fn opt_visibility(p: &mut Parser) { fn opt_visibility(p: &mut Parser) {

View file

@ -61,12 +61,8 @@ fn list_(p: &mut Parser, flavor: Flavor) {
m.complete(p, PARAM_LIST); m.complete(p, PARAM_LIST);
} }
const VALUE_PARAMETER_FIRST: TokenSet = const VALUE_PARAMETER_FIRST: TokenSet =
token_set_union![ token_set_union![patterns::PATTERN_FIRST, types::TYPE_FIRST,];
patterns::PATTERN_FIRST,
types::TYPE_FIRST,
];
fn value_parameter(p: &mut Parser, flavor: Flavor) { fn value_parameter(p: &mut Parser, flavor: Flavor) {
let m = p.start(); let m = p.start();
@ -76,7 +72,7 @@ fn value_parameter(p: &mut Parser, flavor: Flavor) {
if p.at(COLON) || flavor.type_required() { if p.at(COLON) || flavor.type_required() {
types::ascription(p) types::ascription(p)
} }
}, }
// test value_parameters_no_patterns // test value_parameters_no_patterns
// type F = Box<Fn(a: i32, &b: &i32, &mut c: &i32, ())>; // type F = Box<Fn(a: i32, &b: &i32, &mut c: &i32, ())>;
Flavor::OptionalPattern => { Flavor::OptionalPattern => {
@ -86,13 +82,14 @@ fn value_parameter(p: &mut Parser, flavor: Flavor) {
let la3 = p.nth(3); let la3 = p.nth(3);
if la0 == IDENT && la1 == COLON if la0 == IDENT && la1 == COLON
|| la0 == AMP && la1 == IDENT && la2 == COLON || la0 == AMP && la1 == IDENT && la2 == COLON
|| la0 == AMP && la1 == MUT_KW && la2 == IDENT && la3 == COLON { || la0 == AMP && la1 == MUT_KW && la2 == IDENT && la3 == COLON
{
patterns::pattern(p); patterns::pattern(p);
types::ascription(p); types::ascription(p);
} else { } else {
types::type_(p); types::type_(p);
} }
}, }
} }
m.complete(p, PARAM); m.complete(p, PARAM);
} }

View file

@ -97,7 +97,7 @@ fn opt_path_type_args(p: &mut Parser, mode: Mode) {
} else { } else {
type_args::opt_type_arg_list(p, false) type_args::opt_type_arg_list(p, false)
} }
}, }
Mode::Expr => type_args::opt_type_arg_list(p, true), Mode::Expr => type_args::opt_type_arg_list(p, true),
} }
} }

View file

@ -1,7 +1,6 @@
use super::*; use super::*;
pub(super) const PATTERN_FIRST: TokenSet = pub(super) const PATTERN_FIRST: TokenSet = token_set_union![
token_set_union![
token_set![REF_KW, MUT_KW, L_PAREN, L_BRACK, AMP, UNDERSCORE], token_set![REF_KW, MUT_KW, L_PAREN, L_BRACK, AMP, UNDERSCORE],
expressions::LITERAL_FIRST, expressions::LITERAL_FIRST,
paths::PATH_FIRST, paths::PATH_FIRST,
@ -29,12 +28,13 @@ pub(super) fn pattern_r(p: &mut Parser, recovery_set: TokenSet) {
const PAT_RECOVERY_SET: TokenSet = const PAT_RECOVERY_SET: TokenSet =
token_set![LET_KW, IF_KW, WHILE_KW, LOOP_KW, MATCH_KW, R_PAREN, COMMA]; token_set![LET_KW, IF_KW, WHILE_KW, LOOP_KW, MATCH_KW, R_PAREN, COMMA];
fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> { fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> {
let la0 = p.nth(0); let la0 = p.nth(0);
let la1 = p.nth(1); let la1 = p.nth(1);
if la0 == REF_KW || la0 == MUT_KW if la0 == REF_KW
|| (la0 == IDENT && !(la1 == COLONCOLON || la1 == L_PAREN || la1 == L_CURLY)) { || la0 == MUT_KW
|| (la0 == IDENT && !(la1 == COLONCOLON || la1 == L_PAREN || la1 == L_CURLY))
{
return Some(bind_pat(p, true)); return Some(bind_pat(p, true));
} }
if paths::is_path_start(p) { if paths::is_path_start(p) {
@ -87,7 +87,7 @@ fn path_pat(p: &mut Parser) -> CompletedMarker {
field_pat_list(p); field_pat_list(p);
STRUCT_PAT STRUCT_PAT
} }
_ => PATH_PAT _ => PATH_PAT,
}; };
m.complete(p, kind) m.complete(p, kind)
} }
@ -195,7 +195,7 @@ fn pat_list(p: &mut Parser, ket: SyntaxKind) {
break; break;
} }
pattern(p) pattern(p)
}, }
} }
if !p.at(ket) { if !p.at(ket) {
p.expect(COMMA); p.expect(COMMA);

View file

@ -72,12 +72,8 @@ pub(super) fn bounds_without_colon(p: &mut Parser) {
p.eat(QUESTION); p.eat(QUESTION);
match p.current() { match p.current() {
LIFETIME => p.bump(), LIFETIME => p.bump(),
FOR_KW => { FOR_KW => types::for_type(p),
types::for_type(p) _ if paths::is_path_start(p) => types::path_type(p),
}
_ if paths::is_path_start(p) => {
types::path_type(p)
}
_ => break, _ => break,
} }
if has_paren { if has_paren {
@ -104,7 +100,7 @@ pub(super) fn opt_where_clause(p: &mut Parser) {
p.bump(); p.bump();
loop { loop {
if !(paths::is_path_start(p) || p.current() == LIFETIME) { if !(paths::is_path_start(p) || p.current() == LIFETIME) {
break break;
} }
where_predicate(p); where_predicate(p);
if p.current() != L_CURLY && p.current() != SEMI { if p.current() != L_CURLY && p.current() != SEMI {
@ -130,7 +126,6 @@ fn where_predicate(p: &mut Parser) {
} else { } else {
p.error("expected colon") p.error("expected colon")
} }
} }
m.complete(p, WHERE_PRED); m.complete(p, WHERE_PRED);
} }

View file

@ -1,15 +1,14 @@
use super::*; use super::*;
pub(super) const TYPE_FIRST: TokenSet = pub(super) const TYPE_FIRST: TokenSet = token_set_union![
token_set_union![
token_set![ token_set![
L_PAREN, EXCL, STAR, L_BRACK, AMP, UNDERSCORE, FN_KW, UNSAFE_KW, EXTERN_KW, FOR_KW, IMPL_KW, DYN_KW, L_ANGLE, L_PAREN, EXCL, STAR, L_BRACK, AMP, UNDERSCORE, FN_KW, UNSAFE_KW, EXTERN_KW, FOR_KW,
IMPL_KW, DYN_KW, L_ANGLE,
], ],
paths::PATH_FIRST, paths::PATH_FIRST,
]; ];
const TYPE_RECOVERY_SET: TokenSet = const TYPE_RECOVERY_SET: TokenSet = token_set![R_PAREN, COMMA];
token_set![R_PAREN, COMMA];
pub(super) fn type_(p: &mut Parser) { pub(super) fn type_(p: &mut Parser) {
match p.current() { match p.current() {
@ -200,7 +199,6 @@ pub(super) fn for_type(p: &mut Parser) {
FN_KW | UNSAFE_KW | EXTERN_KW => fn_pointer_type(p), FN_KW | UNSAFE_KW | EXTERN_KW => fn_pointer_type(p),
_ if paths::is_path_start(p) => path_type_(p, false), _ if paths::is_path_start(p) => path_type_(p, false),
_ => p.error("expected a path"), _ => p.error("expected a path"),
} }
m.complete(p, FOR_TYPE); m.complete(p, FOR_TYPE);
} }

View file

@ -58,12 +58,16 @@ fn next_token_inner(c: char, ptr: &mut Ptr) -> SyntaxKind {
} }
match c { match c {
'#' => if scan_shebang(ptr) { '#' => {
if scan_shebang(ptr) {
return SHEBANG; return SHEBANG;
}, }
'/' => if let Some(kind) = scan_comment(ptr) { }
'/' => {
if let Some(kind) = scan_comment(ptr) {
return kind; return kind;
}, }
}
_ => (), _ => (),
} }

View file

@ -71,7 +71,7 @@ pub(crate) fn scan_string(ptr: &mut Ptr) {
} }
_ => { _ => {
ptr.bump(); ptr.bump();
}, }
} }
} }
} }

View file

@ -20,11 +20,11 @@
#![allow(missing_docs)] #![allow(missing_docs)]
//#![warn(unreachable_pub)] // rust-lang/rust#47816 //#![warn(unreachable_pub)] // rust-lang/rust#47816
extern crate itertools;
extern crate unicode_xid;
extern crate drop_bomb; extern crate drop_bomb;
extern crate itertools;
extern crate parking_lot; extern crate parking_lot;
extern crate rowan; extern crate rowan;
extern crate unicode_xid;
#[cfg(test)] #[cfg(test)]
#[macro_use] #[macro_use]
@ -35,33 +35,31 @@ pub mod ast;
mod lexer; mod lexer;
#[macro_use] #[macro_use]
mod token_set; mod token_set;
mod parser_api;
mod grammar; mod grammar;
mod parser_api;
mod parser_impl; mod parser_impl;
mod reparsing; mod reparsing;
mod syntax_kinds; mod syntax_kinds;
mod yellow; pub mod text_utils;
/// Utilities for simple uses of the parser. /// Utilities for simple uses of the parser.
pub mod utils; pub mod utils;
pub mod text_utils; mod yellow;
pub use crate::{ pub use crate::{
rowan::{SmolStr, TextRange, TextUnit},
ast::AstNode, ast::AstNode,
lexer::{tokenize, Token}, lexer::{tokenize, Token},
syntax_kinds::SyntaxKind,
yellow::{SyntaxNode, SyntaxNodeRef, OwnedRoot, RefRoot, TreeRoot, SyntaxError, Direction},
reparsing::AtomEdit, reparsing::AtomEdit,
rowan::{SmolStr, TextRange, TextUnit},
syntax_kinds::SyntaxKind,
yellow::{Direction, OwnedRoot, RefRoot, SyntaxError, SyntaxNode, SyntaxNodeRef, TreeRoot},
}; };
use crate::{ use crate::yellow::GreenNode;
yellow::{GreenNode},
};
#[derive(Clone, Debug, Hash, PartialEq, Eq)] #[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub struct File { pub struct File {
root: SyntaxNode root: SyntaxNode,
} }
impl File { impl File {
@ -74,21 +72,21 @@ impl File {
} }
pub fn parse(text: &str) -> File { pub fn parse(text: &str) -> File {
let tokens = tokenize(&text); let tokens = tokenize(&text);
let (green, errors) = parser_impl::parse_with( let (green, errors) =
yellow::GreenBuilder::new(), parser_impl::parse_with(yellow::GreenBuilder::new(), text, &tokens, grammar::root);
text, &tokens, grammar::root,
);
File::new(green, errors) File::new(green, errors)
} }
pub fn reparse(&self, edit: &AtomEdit) -> File { pub fn reparse(&self, edit: &AtomEdit) -> File {
self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit)) self.incremental_reparse(edit)
.unwrap_or_else(|| self.full_reparse(edit))
} }
pub fn incremental_reparse(&self, edit: &AtomEdit) -> Option<File> { pub fn incremental_reparse(&self, edit: &AtomEdit) -> Option<File> {
reparsing::incremental_reparse(self.syntax(), edit, self.errors()) reparsing::incremental_reparse(self.syntax(), edit, self.errors())
.map(|(green_node, errors)| File::new(green_node, errors)) .map(|(green_node, errors)| File::new(green_node, errors))
} }
fn full_reparse(&self, edit: &AtomEdit) -> File { fn full_reparse(&self, edit: &AtomEdit) -> File {
let text = text_utils::replace_range(self.syntax().text().to_string(), edit.delete, &edit.insert); let text =
text_utils::replace_range(self.syntax().text().to_string(), edit.delete, &edit.insert);
File::parse(&text) File::parse(&text)
} }
pub fn ast(&self) -> ast::Root { pub fn ast(&self) -> ast::Root {

View file

@ -1,8 +1,8 @@
use crate::{ use crate::{
token_set::TokenSet,
parser_impl::ParserImpl,
SyntaxKind::{self, ERROR},
drop_bomb::DropBomb, drop_bomb::DropBomb,
parser_impl::ParserImpl,
token_set::TokenSet,
SyntaxKind::{self, ERROR},
}; };
/// `Parser` struct provides the low-level API for /// `Parser` struct provides the low-level API for
@ -116,9 +116,7 @@ impl<'t> Parser<'t> {
/// Create an error node and consume the next token. /// Create an error node and consume the next token.
pub(crate) fn err_recover(&mut self, message: &str, recovery: TokenSet) { pub(crate) fn err_recover(&mut self, message: &str, recovery: TokenSet) {
if self.at(SyntaxKind::L_CURLY) if self.at(SyntaxKind::L_CURLY) || self.at(SyntaxKind::R_CURLY) || self.at_ts(recovery) {
|| self.at(SyntaxKind::R_CURLY)
|| self.at_ts(recovery) {
self.error(message); self.error(message);
} else { } else {
let m = self.start(); let m = self.start();

View file

@ -7,14 +7,14 @@
//! tree builder: the parser produces a stream of events like //! tree builder: the parser produces a stream of events like
//! `start node`, `finish node`, and `FileBuilder` converts //! `start node`, `finish node`, and `FileBuilder` converts
//! this stream to a real tree. //! this stream to a real tree.
use std::mem;
use crate::{ use crate::{
TextUnit, TextRange, SmolStr,
lexer::Token, lexer::Token,
parser_impl::Sink, parser_impl::Sink,
SmolStr,
SyntaxKind::{self, *}, SyntaxKind::{self, *},
TextRange, TextUnit,
}; };
use std::mem;
/// `Parser` produces a flat list of `Event`s. /// `Parser` produces a flat list of `Event`s.
/// They are converted to a tree-structure in /// They are converted to a tree-structure in
@ -89,20 +89,28 @@ pub(super) struct EventProcessor<'a, S: Sink> {
} }
impl<'a, S: Sink> EventProcessor<'a, S> { impl<'a, S: Sink> EventProcessor<'a, S> {
pub(super) fn new(sink: S, text: &'a str, tokens: &'a[Token], events: &'a mut [Event]) -> EventProcessor<'a, S> { pub(super) fn new(
sink: S,
text: &'a str,
tokens: &'a [Token],
events: &'a mut [Event],
) -> EventProcessor<'a, S> {
EventProcessor { EventProcessor {
sink, sink,
text_pos: 0.into(), text_pos: 0.into(),
text, text,
token_pos: 0, token_pos: 0,
tokens, tokens,
events events,
} }
} }
pub(super) fn process(mut self) -> S { pub(super) fn process(mut self) -> S {
fn tombstone() -> Event { fn tombstone() -> Event {
Event::Start { kind: TOMBSTONE, forward_parent: None } Event::Start {
kind: TOMBSTONE,
forward_parent: None,
}
} }
let mut forward_parents = Vec::new(); let mut forward_parents = Vec::new();
@ -112,7 +120,10 @@ impl<'a, S: Sink> EventProcessor<'a, S> {
kind: TOMBSTONE, .. kind: TOMBSTONE, ..
} => (), } => (),
Event::Start { kind, forward_parent } => { Event::Start {
kind,
forward_parent,
} => {
forward_parents.push(kind); forward_parents.push(kind);
let mut idx = i; let mut idx = i;
let mut fp = forward_parent; let mut fp = forward_parent;
@ -125,7 +136,7 @@ impl<'a, S: Sink> EventProcessor<'a, S> {
} => { } => {
forward_parents.push(kind); forward_parents.push(kind);
forward_parent forward_parent
}, }
_ => unreachable!(), _ => unreachable!(),
}; };
} }
@ -136,7 +147,7 @@ impl<'a, S: Sink> EventProcessor<'a, S> {
Event::Finish => { Event::Finish => {
let last = i == self.events.len() - 1; let last = i == self.events.len() - 1;
self.finish(last); self.finish(last);
}, }
Event::Token { kind, n_raw_tokens } => { Event::Token { kind, n_raw_tokens } => {
self.eat_ws(); self.eat_ws();
let n_raw_tokens = n_raw_tokens as usize; let n_raw_tokens = n_raw_tokens as usize;
@ -162,14 +173,11 @@ impl<'a, S: Sink> EventProcessor<'a, S> {
.take_while(|it| it.kind.is_trivia()) .take_while(|it| it.kind.is_trivia())
.count(); .count();
let leading_trivias = &self.tokens[self.token_pos..self.token_pos + n_trivias]; let leading_trivias = &self.tokens[self.token_pos..self.token_pos + n_trivias];
let mut trivia_end = self.text_pos + leading_trivias let mut trivia_end =
.iter() self.text_pos + leading_trivias.iter().map(|it| it.len).sum::<TextUnit>();
.map(|it| it.len)
.sum::<TextUnit>();
let n_attached_trivias = { let n_attached_trivias = {
let leading_trivias = leading_trivias.iter().rev() let leading_trivias = leading_trivias.iter().rev().map(|it| {
.map(|it| {
let next_end = trivia_end - it.len; let next_end = trivia_end - it.len;
let range = TextRange::from_to(next_end, trivia_end); let range = TextRange::from_to(next_end, trivia_end);
trivia_end = next_end; trivia_end = next_end;
@ -215,7 +223,10 @@ impl<'a, S: Sink> EventProcessor<'a, S> {
} }
} }
fn n_attached_trivias<'a>(kind: SyntaxKind, trivias: impl Iterator<Item=(SyntaxKind, &'a str)>) -> usize { fn n_attached_trivias<'a>(
kind: SyntaxKind,
trivias: impl Iterator<Item = (SyntaxKind, &'a str)>,
) -> usize {
match kind { match kind {
STRUCT_DEF | ENUM_DEF | FN_DEF | TRAIT_DEF | MODULE => { STRUCT_DEF | ENUM_DEF | FN_DEF | TRAIT_DEF | MODULE => {
let mut res = 0; let mut res = 0;
@ -236,5 +247,4 @@ fn n_attached_trivias<'a>(kind: SyntaxKind, trivias: impl Iterator<Item=(SyntaxK
} }
_ => 0, _ => 0,
} }
} }

View file

@ -4,13 +4,13 @@ mod input;
use std::cell::Cell; use std::cell::Cell;
use crate::{ use crate::{
TextUnit, SmolStr,
lexer::Token, lexer::Token,
parser_api::Parser, parser_api::Parser,
parser_impl::{ parser_impl::{
event::{EventProcessor, Event}, event::{Event, EventProcessor},
input::{InputPosition, ParserInput}, input::{InputPosition, ParserInput},
}, },
SmolStr, TextUnit,
}; };
use crate::SyntaxKind::{self, EOF, TOMBSTONE}; use crate::SyntaxKind::{self, EOF, TOMBSTONE};
@ -86,7 +86,9 @@ impl<'t> ParserImpl<'t> {
let c2 = self.inp.kind(self.pos + 1); let c2 = self.inp.kind(self.pos + 1);
let c3 = self.inp.kind(self.pos + 2); let c3 = self.inp.kind(self.pos + 2);
if self.inp.start(self.pos + 1) == self.inp.start(self.pos) + self.inp.len(self.pos) if self.inp.start(self.pos + 1) == self.inp.start(self.pos) + self.inp.len(self.pos)
&& self.inp.start(self.pos + 2) == self.inp.start(self.pos + 1) + self.inp.len(self.pos + 1){ && self.inp.start(self.pos + 2)
== self.inp.start(self.pos + 1) + self.inp.len(self.pos + 1)
{
Some((c1, c2, c3)) Some((c1, c2, c3))
} else { } else {
None None
@ -138,10 +140,7 @@ impl<'t> ParserImpl<'t> {
fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) { fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) {
self.pos += u32::from(n_raw_tokens); self.pos += u32::from(n_raw_tokens);
self.event(Event::Token { self.event(Event::Token { kind, n_raw_tokens });
kind,
n_raw_tokens,
});
} }
pub(super) fn error(&mut self, msg: String) { pub(super) fn error(&mut self, msg: String) {

View file

@ -1,14 +1,11 @@
use crate::algo; use crate::algo;
use crate::grammar; use crate::grammar;
use crate::lexer::{tokenize, Token}; use crate::lexer::{tokenize, Token};
use crate::yellow::{self, GreenNode, SyntaxNodeRef, SyntaxError};
use crate::parser_impl;
use crate::parser_api::Parser; use crate::parser_api::Parser;
use crate::{ use crate::parser_impl;
TextUnit, TextRange,
SyntaxKind::*,
};
use crate::text_utils::replace_range; use crate::text_utils::replace_range;
use crate::yellow::{self, GreenNode, SyntaxError, SyntaxNodeRef};
use crate::{SyntaxKind::*, TextRange, TextUnit};
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct AtomEdit { pub struct AtomEdit {
@ -18,7 +15,10 @@ pub struct AtomEdit {
impl AtomEdit { impl AtomEdit {
pub fn replace(range: TextRange, replace_with: String) -> AtomEdit { pub fn replace(range: TextRange, replace_with: String) -> AtomEdit {
AtomEdit { delete: range, insert: replace_with } AtomEdit {
delete: range,
insert: replace_with,
}
} }
pub fn delete(range: TextRange) -> AtomEdit { pub fn delete(range: TextRange) -> AtomEdit {
@ -48,12 +48,7 @@ fn reparse_leaf<'node>(
) -> Option<(SyntaxNodeRef<'node>, GreenNode, Vec<SyntaxError>)> { ) -> Option<(SyntaxNodeRef<'node>, GreenNode, Vec<SyntaxError>)> {
let node = algo::find_covering_node(node, edit.delete); let node = algo::find_covering_node(node, edit.delete);
match node.kind() { match node.kind() {
| WHITESPACE WHITESPACE | COMMENT | DOC_COMMENT | IDENT | STRING | RAW_STRING => {
| COMMENT
| DOC_COMMENT
| IDENT
| STRING
| RAW_STRING => {
let text = get_text_after_edit(node, &edit); let text = get_text_after_edit(node, &edit);
let tokens = tokenize(&text); let tokens = tokenize(&text);
let token = match tokens[..] { let token = match tokens[..] {
@ -84,10 +79,7 @@ fn reparse_block<'node>(
return None; return None;
} }
let (green, new_errors) = let (green, new_errors) =
parser_impl::parse_with( parser_impl::parse_with(yellow::GreenBuilder::new(), &text, &tokens, reparser);
yellow::GreenBuilder::new(),
&text, &tokens, reparser,
);
Some((node, green, new_errors)) Some((node, green, new_errors))
} }
@ -101,9 +93,7 @@ fn get_text_after_edit(node: SyntaxNodeRef, edit: &AtomEdit) -> String {
fn is_contextual_kw(text: &str) -> bool { fn is_contextual_kw(text: &str) -> bool {
match text { match text {
| "auto" "auto" | "default" | "union" => true,
| "default"
| "union" => true,
_ => false, _ => false,
} }
} }
@ -113,7 +103,8 @@ fn find_reparsable_node<'node>(
range: TextRange, range: TextRange,
) -> Option<(SyntaxNodeRef<'node>, fn(&mut Parser))> { ) -> Option<(SyntaxNodeRef<'node>, fn(&mut Parser))> {
let node = algo::find_covering_node(node, range); let node = algo::find_covering_node(node, range);
return node.ancestors() return node
.ancestors()
.filter_map(|node| reparser(node).map(|r| (node, r))) .filter_map(|node| reparser(node).map(|r| (node, r)))
.next(); .next();
@ -145,17 +136,20 @@ fn find_reparsable_node<'node>(
fn is_balanced(tokens: &[Token]) -> bool { fn is_balanced(tokens: &[Token]) -> bool {
if tokens.len() == 0 if tokens.len() == 0
|| tokens.first().unwrap().kind != L_CURLY || tokens.first().unwrap().kind != L_CURLY
|| tokens.last().unwrap().kind != R_CURLY { || tokens.last().unwrap().kind != R_CURLY
{
return false; return false;
} }
let mut balance = 0usize; let mut balance = 0usize;
for t in tokens.iter() { for t in tokens.iter() {
match t.kind { match t.kind {
L_CURLY => balance += 1, L_CURLY => balance += 1,
R_CURLY => balance = match balance.checked_sub(1) { R_CURLY => {
balance = match balance.checked_sub(1) {
Some(b) => b, Some(b) => b,
None => return false, None => return false,
}, }
}
_ => (), _ => (),
} }
} }
@ -191,24 +185,14 @@ fn merge_errors(
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::{ use super::{
super::{ super::{test_utils::extract_range, text_utils::replace_range, utils::dump_tree, File},
File, reparse_block, reparse_leaf, AtomEdit, GreenNode, SyntaxError, SyntaxNodeRef,
test_utils::extract_range,
text_utils::replace_range,
utils::dump_tree,
},
reparse_leaf, reparse_block, AtomEdit, GreenNode, SyntaxError, SyntaxNodeRef,
}; };
fn do_check<F>( fn do_check<F>(before: &str, replace_with: &str, reparser: F)
before: &str, where
replace_with: &str, for<'a> F: Fn(SyntaxNodeRef<'a>, &AtomEdit)
reparser: F, -> Option<(SyntaxNodeRef<'a>, GreenNode, Vec<SyntaxError>)>,
) where
for<'a> F: Fn(
SyntaxNodeRef<'a>,
&AtomEdit,
) -> Option<(SyntaxNodeRef<'a>, GreenNode, Vec<SyntaxError>)>
{ {
let (range, before) = extract_range(before); let (range, before) = extract_range(before);
let after = replace_range(before.clone(), range, replace_with); let after = replace_range(before.clone(), range, replace_with);
@ -216,7 +200,10 @@ mod tests {
let fully_reparsed = File::parse(&after); let fully_reparsed = File::parse(&after);
let incrementally_reparsed = { let incrementally_reparsed = {
let f = File::parse(&before); let f = File::parse(&before);
let edit = AtomEdit { delete: range, insert: replace_with.to_string() }; let edit = AtomEdit {
delete: range,
insert: replace_with.to_string(),
};
let (node, green, new_errors) = let (node, green, new_errors) =
reparser(f.syntax(), &edit).expect("cannot incrementally reparse"); reparser(f.syntax(), &edit).expect("cannot incrementally reparse");
let green_root = node.replace_with(green); let green_root = node.replace_with(green);
@ -232,113 +219,183 @@ mod tests {
#[test] #[test]
fn reparse_block_tests() { fn reparse_block_tests() {
let do_check = |before, replace_to| let do_check = |before, replace_to| do_check(before, replace_to, reparse_block);
do_check(before, replace_to, reparse_block);
do_check(r" do_check(
r"
fn foo() { fn foo() {
let x = foo + <|>bar<|> let x = foo + <|>bar<|>
} }
", "baz"); ",
do_check(r" "baz",
);
do_check(
r"
fn foo() { fn foo() {
let x = foo<|> + bar<|> let x = foo<|> + bar<|>
} }
", "baz"); ",
do_check(r" "baz",
);
do_check(
r"
struct Foo { struct Foo {
f: foo<|><|> f: foo<|><|>
} }
", ",\n g: (),"); ",
do_check(r" ",\n g: (),",
);
do_check(
r"
fn foo { fn foo {
let; let;
1 + 1; 1 + 1;
<|>92<|>; <|>92<|>;
} }
", "62"); ",
do_check(r" "62",
);
do_check(
r"
mod foo { mod foo {
fn <|><|> fn <|><|>
} }
", "bar"); ",
do_check(r" "bar",
);
do_check(
r"
trait Foo { trait Foo {
type <|>Foo<|>; type <|>Foo<|>;
} }
", "Output"); ",
do_check(r" "Output",
);
do_check(
r"
impl IntoIterator<Item=i32> for Foo { impl IntoIterator<Item=i32> for Foo {
f<|><|> f<|><|>
} }
", "n next("); ",
do_check(r" "n next(",
);
do_check(
r"
use a::b::{foo,<|>,bar<|>}; use a::b::{foo,<|>,bar<|>};
", "baz"); ",
do_check(r" "baz",
);
do_check(
r"
pub enum A { pub enum A {
Foo<|><|> Foo<|><|>
} }
", "\nBar;\n"); ",
do_check(r" "\nBar;\n",
);
do_check(
r"
foo!{a, b<|><|> d} foo!{a, b<|><|> d}
", ", c[3]"); ",
do_check(r" ", c[3]",
);
do_check(
r"
fn foo() { fn foo() {
vec![<|><|>] vec![<|><|>]
} }
", "123"); ",
do_check(r" "123",
);
do_check(
r"
extern { extern {
fn<|>;<|> fn<|>;<|>
} }
", " exit(code: c_int)"); ",
" exit(code: c_int)",
);
} }
#[test] #[test]
fn reparse_leaf_tests() { fn reparse_leaf_tests() {
let do_check = |before, replace_to| let do_check = |before, replace_to| do_check(before, replace_to, reparse_leaf);
do_check(before, replace_to, reparse_leaf);
do_check(r"<|><|> do_check(
r"<|><|>
fn foo() -> i32 { 1 } fn foo() -> i32 { 1 }
", "\n\n\n \n"); ",
do_check(r" "\n\n\n \n",
);
do_check(
r"
fn foo() -> <|><|> {} fn foo() -> <|><|> {}
", " \n"); ",
do_check(r" " \n",
);
do_check(
r"
fn <|>foo<|>() -> i32 { 1 } fn <|>foo<|>() -> i32 { 1 }
", "bar"); ",
do_check(r" "bar",
);
do_check(
r"
fn foo<|><|>foo() { } fn foo<|><|>foo() { }
", "bar"); ",
do_check(r" "bar",
);
do_check(
r"
fn foo /* <|><|> */ () {} fn foo /* <|><|> */ () {}
", "some comment"); ",
do_check(r" "some comment",
);
do_check(
r"
fn baz <|><|> () {} fn baz <|><|> () {}
", " \t\t\n\n"); ",
do_check(r" " \t\t\n\n",
);
do_check(
r"
fn baz <|><|> () {} fn baz <|><|> () {}
", " \t\t\n\n"); ",
do_check(r" " \t\t\n\n",
);
do_check(
r"
/// foo <|><|>omment /// foo <|><|>omment
mod { } mod { }
", "c"); ",
do_check(r#" "c",
);
do_check(
r#"
fn -> &str { "Hello<|><|>" } fn -> &str { "Hello<|><|>" }
"#, ", world"); "#,
do_check(r#" ", world",
);
do_check(
r#"
fn -> &str { // "Hello<|><|>" fn -> &str { // "Hello<|><|>"
"#, ", world"); "#,
do_check(r##" ", world",
);
do_check(
r##"
fn -> &str { r#"Hello<|><|>"# fn -> &str { r#"Hello<|><|>"#
"##, ", world"); "##,
do_check(r" ", world",
);
do_check(
r"
#[derive(<|>Copy<|>)] #[derive(<|>Copy<|>)]
enum Foo { enum Foo {
} }
", "Clone"); ",
"Clone",
);
} }
} }

View file

@ -1,7 +1,7 @@
mod generated; mod generated;
use std::fmt;
use crate::SyntaxKind::*; use crate::SyntaxKind::*;
use std::fmt;
pub use self::generated::SyntaxKind; pub use self::generated::SyntaxKind;

View file

@ -1,8 +1,8 @@
use std::fmt::Write;
use crate::{ use crate::{
algo::walk::{walk, WalkEvent}, algo::walk::{walk, WalkEvent},
SyntaxKind, File, SyntaxNodeRef File, SyntaxKind, SyntaxNodeRef,
}; };
use std::fmt::Write;
/// Parse a file and create a string representation of the resulting parse tree. /// Parse a file and create a string representation of the resulting parse tree.
pub fn dump_tree(syntax: SyntaxNodeRef) -> String { pub fn dump_tree(syntax: SyntaxNodeRef) -> String {
@ -58,9 +58,7 @@ pub(crate) fn validate_block_structure(root: SyntaxNodeRef) {
let mut stack = Vec::new(); let mut stack = Vec::new();
for node in root.descendants() { for node in root.descendants() {
match node.kind() { match node.kind() {
SyntaxKind::L_CURLY => { SyntaxKind::L_CURLY => stack.push(node),
stack.push(node)
}
SyntaxKind::R_CURLY => { SyntaxKind::R_CURLY => {
if let Some(pair) = stack.pop() { if let Some(pair) = stack.pop() {
assert_eq!( assert_eq!(

View file

@ -1,10 +1,9 @@
use rowan::GreenNodeBuilder;
use crate::{ use crate::{
TextUnit, SmolStr,
parser_impl::Sink, parser_impl::Sink,
yellow::{GreenNode, SyntaxError, RaTypes}, yellow::{GreenNode, RaTypes, SyntaxError},
SyntaxKind, SmolStr, SyntaxKind, TextUnit,
}; };
use rowan::GreenNodeBuilder;
pub(crate) struct GreenBuilder { pub(crate) struct GreenBuilder {
errors: Vec<SyntaxError>, errors: Vec<SyntaxError>,
@ -36,7 +35,10 @@ impl Sink for GreenBuilder {
} }
fn error(&mut self, message: String, offset: TextUnit) { fn error(&mut self, message: String, offset: TextUnit) {
let error = SyntaxError { msg: message, offset }; let error = SyntaxError {
msg: message,
offset,
};
self.errors.push(error) self.errors.push(error)
} }

View file

@ -1,16 +1,16 @@
mod builder; mod builder;
mod syntax_text; mod syntax_text;
use self::syntax_text::SyntaxText;
use crate::{SmolStr, SyntaxKind, TextRange, TextUnit};
use rowan::Types;
use std::{ use std::{
fmt, fmt,
hash::{Hash, Hasher}, hash::{Hash, Hasher},
}; };
use rowan::Types;
use crate::{SyntaxKind, TextUnit, TextRange, SmolStr};
use self::syntax_text::SyntaxText;
pub use rowan::{TreeRoot};
pub(crate) use self::builder::GreenBuilder; pub(crate) use self::builder::GreenBuilder;
pub use rowan::TreeRoot;
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
pub enum RaTypes {} pub enum RaTypes {}
@ -31,9 +31,7 @@ pub struct SyntaxError {
} }
#[derive(Clone, Copy)] #[derive(Clone, Copy)]
pub struct SyntaxNode<R: TreeRoot<RaTypes> = OwnedRoot>( pub struct SyntaxNode<R: TreeRoot<RaTypes> = OwnedRoot>(::rowan::SyntaxNode<RaTypes, R>);
::rowan::SyntaxNode<RaTypes, R>,
);
pub type SyntaxNodeRef<'a> = SyntaxNode<RefRoot<'a>>; pub type SyntaxNodeRef<'a> = SyntaxNode<RefRoot<'a>>;
impl<R1, R2> PartialEq<SyntaxNode<R1>> for SyntaxNode<R2> impl<R1, R2> PartialEq<SyntaxNode<R1>> for SyntaxNode<R2>
@ -142,9 +140,7 @@ impl<R: TreeRoot<RaTypes>> fmt::Debug for SyntaxNode<R> {
} }
#[derive(Debug)] #[derive(Debug)]
pub struct SyntaxNodeChildren<R: TreeRoot<RaTypes>>( pub struct SyntaxNodeChildren<R: TreeRoot<RaTypes>>(::rowan::SyntaxNodeChildren<RaTypes, R>);
::rowan::SyntaxNodeChildren<RaTypes, R>
);
impl<R: TreeRoot<RaTypes>> Iterator for SyntaxNodeChildren<R> { impl<R: TreeRoot<RaTypes>> Iterator for SyntaxNodeChildren<R> {
type Item = SyntaxNode<R>; type Item = SyntaxNode<R>;
@ -154,7 +150,6 @@ impl<R: TreeRoot<RaTypes>> Iterator for SyntaxNodeChildren<R> {
} }
} }
fn has_short_text(kind: SyntaxKind) -> bool { fn has_short_text(kind: SyntaxKind) -> bool {
use crate::SyntaxKind::*; use crate::SyntaxKind::*;
match kind { match kind {

View file

@ -1,10 +1,8 @@
use std::{ use std::{fmt, ops};
fmt, ops,
};
use crate::{ use crate::{
text_utils::{contains_offset_nonstrict, intersect},
SyntaxNodeRef, TextRange, TextUnit, SyntaxNodeRef, TextRange, TextUnit,
text_utils::{intersect, contains_offset_nonstrict},
}; };
#[derive(Clone)] #[derive(Clone)]
@ -17,14 +15,12 @@ impl<'a> SyntaxText<'a> {
pub(crate) fn new(node: SyntaxNodeRef<'a>) -> SyntaxText<'a> { pub(crate) fn new(node: SyntaxNodeRef<'a>) -> SyntaxText<'a> {
SyntaxText { SyntaxText {
node, node,
range: node.range() range: node.range(),
} }
} }
pub fn chunks(&self) -> impl Iterator<Item = &'a str> { pub fn chunks(&self) -> impl Iterator<Item = &'a str> {
let range = self.range; let range = self.range;
self.node self.node.descendants().filter_map(move |node| {
.descendants()
.filter_map(move |node| {
let text = node.leaf_text()?; let text = node.leaf_text()?;
let range = intersect(range, node.range())?; let range = intersect(range, node.range())?;
let range = range - node.range().start(); let range = range - node.range().start();
@ -55,11 +51,13 @@ impl<'a> SyntaxText<'a> {
self.range.len() self.range.len()
} }
pub fn slice(&self, range: impl SyntaxTextSlice) -> SyntaxText<'a> { pub fn slice(&self, range: impl SyntaxTextSlice) -> SyntaxText<'a> {
let range = range.restrict(self.range) let range = range.restrict(self.range).unwrap_or_else(|| {
.unwrap_or_else(|| {
panic!("invalid slice, range: {:?}, slice: {:?}", self.range, range) panic!("invalid slice, range: {:?}, slice: {:?}", self.range, range)
}); });
SyntaxText { node: self.node, range } SyntaxText {
node: self.node,
range,
}
} }
pub fn char_at(&self, offset: TextUnit) -> Option<char> { pub fn char_at(&self, offset: TextUnit) -> Option<char> {
let mut start: TextUnit = 0.into(); let mut start: TextUnit = 0.into();

View file

@ -4,14 +4,14 @@ extern crate test_utils;
extern crate walkdir; extern crate walkdir;
use std::{ use std::{
fmt::Write,
fs, fs,
path::{Path, PathBuf}, path::{Path, PathBuf},
fmt::Write,
}; };
use ra_syntax::{ use ra_syntax::{
utils::{check_fuzz_invariants, dump_tree},
File, File,
utils::{dump_tree, check_fuzz_invariants},
}; };
#[test] #[test]
@ -37,7 +37,6 @@ fn parser_fuzz_tests() {
} }
} }
/// Read file and normalize newlines. /// Read file and normalize newlines.
/// ///
/// `rustc` seems to always normalize `\r\n` newlines to `\n`: /// `rustc` seems to always normalize `\r\n` newlines to `\n`:

View file

@ -2,9 +2,9 @@ extern crate difference;
extern crate itertools; extern crate itertools;
extern crate text_unit; extern crate text_unit;
use std::fmt;
use itertools::Itertools; use itertools::Itertools;
use text_unit::{TextUnit, TextRange}; use std::fmt;
use text_unit::{TextRange, TextUnit};
pub use self::difference::Changeset as __Changeset; pub use self::difference::Changeset as __Changeset;

View file

@ -1,17 +1,17 @@
extern crate itertools; extern crate itertools;
#[macro_use] #[macro_use]
extern crate failure; extern crate failure;
extern crate heck;
extern crate ron; extern crate ron;
extern crate tera; extern crate tera;
extern crate heck;
use heck::{CamelCase, ShoutySnakeCase, SnakeCase};
use itertools::Itertools;
use std::{ use std::{
collections::HashMap, collections::HashMap,
fs, fs,
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
use itertools::Itertools;
use heck::{CamelCase, ShoutySnakeCase, SnakeCase};
pub type Result<T> = ::std::result::Result<T, failure::Error>; pub type Result<T> = ::std::result::Result<T, failure::Error>;
@ -61,7 +61,6 @@ pub fn collect_tests(s: &str) -> Vec<(usize, Test)> {
res res
} }
pub fn update(path: &Path, contents: &str, verify: bool) -> Result<()> { pub fn update(path: &Path, contents: &str, verify: bool) -> Result<()> {
match fs::read_to_string(path) { match fs::read_to_string(path) {
Ok(ref old_contents) if old_contents == contents => { Ok(ref old_contents) if old_contents == contents => {
@ -116,5 +115,8 @@ pub fn render_template(template: &Path) -> Result<String> {
} }
pub fn project_root() -> PathBuf { pub fn project_root() -> PathBuf {
Path::new(&std::env::var("CARGO_MANIFEST_DIR").unwrap()).parent().unwrap().to_path_buf() Path::new(&std::env::var("CARGO_MANIFEST_DIR").unwrap())
.parent()
.unwrap()
.to_path_buf()
} }

View file

@ -11,7 +11,10 @@ use std::{
path::{Path, PathBuf}, path::{Path, PathBuf},
process::Command, process::Command,
}; };
use tools::{AST, AST_TEMPLATE, Result, SYNTAX_KINDS, SYNTAX_KINDS_TEMPLATE, Test, collect_tests, render_template, update, project_root}; use tools::{
collect_tests, project_root, render_template, update, Result, Test, AST, AST_TEMPLATE,
SYNTAX_KINDS, SYNTAX_KINDS_TEMPLATE,
};
const GRAMMAR_DIR: &str = "./crates/ra_syntax/src/grammar"; const GRAMMAR_DIR: &str = "./crates/ra_syntax/src/grammar";
const INLINE_TESTS_DIR: &str = "./crates/ra_syntax/tests/data/parser/inline"; const INLINE_TESTS_DIR: &str = "./crates/ra_syntax/tests/data/parser/inline";
@ -40,18 +43,23 @@ fn main() -> Result<()> {
fn run_gen_command(name: &str, verify: bool) -> Result<()> { fn run_gen_command(name: &str, verify: bool) -> Result<()> {
match name { match name {
"gen-kinds" => { "gen-kinds" => {
update(&project_root().join(SYNTAX_KINDS), &render_template(&project_root().join(SYNTAX_KINDS_TEMPLATE))?, verify)?; update(
update(&project_root().join(AST), &render_template(&project_root().join(AST_TEMPLATE))?, verify)?; &project_root().join(SYNTAX_KINDS),
}, &render_template(&project_root().join(SYNTAX_KINDS_TEMPLATE))?,
"gen-tests" => { verify,
gen_tests(verify)? )?;
}, update(
&project_root().join(AST),
&render_template(&project_root().join(AST_TEMPLATE))?,
verify,
)?;
}
"gen-tests" => gen_tests(verify)?,
_ => unreachable!(), _ => unreachable!(),
} }
Ok(()) Ok(())
} }
fn gen_tests(verify: bool) -> Result<()> { fn gen_tests(verify: bool) -> Result<()> {
let tests = tests_from_dir(Path::new(GRAMMAR_DIR))?; let tests = tests_from_dir(Path::new(GRAMMAR_DIR))?;
@ -133,11 +141,20 @@ fn install_code_extension() -> Result<()> {
} else { } else {
run(r"npm install", "./editors/code")?; run(r"npm install", "./editors/code")?;
} }
run(r"node ./node_modules/vsce/out/vsce package", "./editors/code")?; run(
r"node ./node_modules/vsce/out/vsce package",
"./editors/code",
)?;
if cfg!(windows) { if cfg!(windows) {
run(r"cmd.exe /c code.cmd --install-extension ./ra-lsp-0.0.1.vsix", "./editors/code")?; run(
r"cmd.exe /c code.cmd --install-extension ./ra-lsp-0.0.1.vsix",
"./editors/code",
)?;
} else { } else {
run(r"code --install-extension ./ra-lsp-0.0.1.vsix", "./editors/code")?; run(
r"code --install-extension ./ra-lsp-0.0.1.vsix",
"./editors/code",
)?;
} }
Ok(()) Ok(())
} }
@ -145,7 +162,11 @@ fn install_code_extension() -> Result<()> {
fn run(cmdline: &'static str, dir: &str) -> Result<()> { fn run(cmdline: &'static str, dir: &str) -> Result<()> {
eprintln!("\nwill run: {}", cmdline); eprintln!("\nwill run: {}", cmdline);
let manifest_dir = env!("CARGO_MANIFEST_DIR"); let manifest_dir = env!("CARGO_MANIFEST_DIR");
let project_dir = Path::new(manifest_dir).ancestors().nth(2).unwrap().join(dir); let project_dir = Path::new(manifest_dir)
.ancestors()
.nth(2)
.unwrap()
.join(dir);
let mut args = cmdline.split_whitespace(); let mut args = cmdline.split_whitespace();
let exec = args.next().unwrap(); let exec = args.next().unwrap();
let status = Command::new(exec) let status = Command::new(exec)

View file

@ -1,13 +1,23 @@
extern crate tools; extern crate tools;
use tools::{AST, AST_TEMPLATE, SYNTAX_KINDS, SYNTAX_KINDS_TEMPLATE, render_template, update, project_root}; use tools::{
project_root, render_template, update, AST, AST_TEMPLATE, SYNTAX_KINDS, SYNTAX_KINDS_TEMPLATE,
};
#[test] #[test]
fn verify_template_generation() { fn verify_template_generation() {
if let Err(error) = update(&project_root().join(SYNTAX_KINDS), &render_template(&project_root().join(SYNTAX_KINDS_TEMPLATE)).unwrap(), true) { if let Err(error) = update(
&project_root().join(SYNTAX_KINDS),
&render_template(&project_root().join(SYNTAX_KINDS_TEMPLATE)).unwrap(),
true,
) {
panic!("{}. Please update it by running `cargo gen-kinds`", error); panic!("{}. Please update it by running `cargo gen-kinds`", error);
} }
if let Err(error) = update(&project_root().join(AST), &render_template(&project_root().join(AST_TEMPLATE)).unwrap(), true) { if let Err(error) = update(
&project_root().join(AST),
&render_template(&project_root().join(AST_TEMPLATE)).unwrap(),
true,
) {
panic!("{}. Please update it by running `cargo gen-kinds`", error); panic!("{}. Please update it by running `cargo gen-kinds`", error);
} }
} }