Merge branch 'master' of https://github.com/rust-analyzer/rust-analyzer into feature/themes

This commit is contained in:
Seivan Heidari 2019-11-20 03:56:43 +01:00
commit 1e2d090ab8
17 changed files with 358 additions and 27 deletions

View file

@ -5,6 +5,7 @@ on:
branches:
- master
- staging
- trying
jobs:
rust:

6
Cargo.lock generated
View file

@ -1180,7 +1180,7 @@ dependencies = [
"once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_parser 0.1.0",
"ra_text_edit 0.1.0",
"rowan 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
"rowan 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_lexer 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"smol_str 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1445,7 +1445,7 @@ dependencies = [
[[package]]
name = "rowan"
version = "0.6.3"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1993,7 +1993,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum relative-path 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bedde000f40f2921ce439ea165c9c53fd629bfa115140c72e22aceacb4a21954"
"checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e"
"checksum ron 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2ece421e0c4129b90e4a35b6f625e472e96c552136f5093a2f4fa2bbb75a62d5"
"checksum rowan 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "fc3a6fb2a35518af7cab43ec4e21ca82eb086a8b3bb1739e426dc3923d459607"
"checksum rowan 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d3a241900475bf2ba302061550ff50c82b45095ca95d23d1872345793fd42407"
"checksum rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783"
"checksum rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8"
"checksum rustc_lexer 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c86aae0c77166108c01305ee1a36a1e77289d7dc6ca0a3cd91ff4992de2d16a5"

View file

@ -140,7 +140,7 @@ impl Expansion {
exp_info.map_token_down(token)
}
fn file_id(&self) -> HirFileId {
pub fn file_id(&self) -> HirFileId {
self.macro_call_id.as_file(MacroFileKind::Items)
}
}

View file

@ -172,12 +172,12 @@ impl_to_to_tokentrees! {
u32 => self { tt::Literal{text: self.to_string().into()} };
usize => self { tt::Literal{text: self.to_string().into()}};
i32 => self { tt::Literal{text: self.to_string().into()}};
&str => self { tt::Literal{text: self.to_string().into()}};
String => self { tt::Literal{text: self.into()}};
tt::Leaf => self { self };
tt::Literal => self { self };
tt::Ident => self { self };
tt::Punct => self { self }
tt::Punct => self { self };
&str => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into()}};
String => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into()}}
}
#[cfg(test)]
@ -200,7 +200,7 @@ mod tests {
let a = 20;
assert_eq!(quote!(#a).to_string(), "20");
let s: String = "hello".into();
assert_eq!(quote!(#s).to_string(), "hello");
assert_eq!(quote!(#s).to_string(), "\"hello\"");
}
fn mk_ident(name: &str) -> tt::Ident {

View file

@ -0,0 +1,178 @@
//! This modules implements "expand macro" functionality in the IDE
use crate::{db::RootDatabase, FilePosition};
use hir::db::AstDatabase;
use ra_db::SourceDatabase;
use rustc_hash::FxHashMap;
use ra_syntax::{
algo::{find_node_at_offset, replace_descendants},
ast::{self},
AstNode, NodeOrToken, SyntaxKind, SyntaxNode, WalkEvent, T,
};
pub struct ExpandedMacro {
pub name: String,
pub expansion: String,
}
pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<ExpandedMacro> {
let parse = db.parse(position.file_id);
let file = parse.tree();
let name_ref = find_node_at_offset::<ast::NameRef>(file.syntax(), position.offset)?;
let mac = name_ref.syntax().ancestors().find_map(ast::MacroCall::cast)?;
let source = hir::Source::new(position.file_id.into(), mac.syntax());
let expanded = expand_macro_recur(db, source, &mac)?;
// FIXME:
// macro expansion may lose all white space information
// But we hope someday we can use ra_fmt for that
let expansion = insert_whitespaces(expanded);
Some(ExpandedMacro { name: name_ref.text().to_string(), expansion })
}
fn expand_macro_recur(
db: &RootDatabase,
source: hir::Source<&SyntaxNode>,
macro_call: &ast::MacroCall,
) -> Option<SyntaxNode> {
let analyzer = hir::SourceAnalyzer::new(db, source, None);
let expansion = analyzer.expand(db, &macro_call)?;
let macro_file_id = expansion.file_id();
let expanded: SyntaxNode = db.parse_or_expand(macro_file_id)?;
let children = expanded.descendants().filter_map(ast::MacroCall::cast);
let mut replaces = FxHashMap::default();
for child in children.into_iter() {
let source = hir::Source::new(macro_file_id, source.ast);
let new_node = expand_macro_recur(db, source, &child)?;
replaces.insert(child.syntax().clone().into(), new_node.into());
}
Some(replace_descendants(&expanded, &replaces))
}
// FIXME: It would also be cool to share logic here and in the mbe tests,
// which are pretty unreadable at the moment.
fn insert_whitespaces(syn: SyntaxNode) -> String {
use SyntaxKind::*;
let mut res = String::new();
let mut token_iter = syn
.preorder_with_tokens()
.filter_map(|event| {
if let WalkEvent::Enter(NodeOrToken::Token(token)) = event {
Some(token)
} else {
None
}
})
.peekable();
let mut indent = 0;
let mut last: Option<SyntaxKind> = None;
while let Some(token) = token_iter.next() {
let mut is_next = |f: fn(SyntaxKind) -> bool, default| -> bool {
token_iter.peek().map(|it| f(it.kind())).unwrap_or(default)
};
let is_last = |f: fn(SyntaxKind) -> bool, default| -> bool {
last.map(|it| f(it)).unwrap_or(default)
};
res += &match token.kind() {
k @ _
if (k.is_keyword() || k.is_literal() || k == IDENT)
&& is_next(|it| !it.is_punct(), true) =>
{
token.text().to_string() + " "
}
L_CURLY if is_next(|it| it != R_CURLY, true) => {
indent += 1;
format!(" {{\n{}", " ".repeat(indent))
}
R_CURLY if is_last(|it| it != L_CURLY, true) => {
indent = indent.checked_sub(1).unwrap_or(0);
format!("\n}}{}", " ".repeat(indent))
}
R_CURLY => {
indent = indent.checked_sub(1).unwrap_or(0);
format!("}}\n{}", " ".repeat(indent))
}
T![;] => format!(";\n{}", " ".repeat(indent)),
T![->] => " -> ".to_string(),
T![=] => " = ".to_string(),
T![=>] => " => ".to_string(),
_ => token.text().to_string(),
};
last = Some(token.kind());
}
res
}
#[cfg(test)]
mod tests {
use super::*;
use crate::mock_analysis::analysis_and_position;
use insta::assert_snapshot;
fn check_expand_macro(fixture: &str) -> ExpandedMacro {
let (analysis, pos) = analysis_and_position(fixture);
analysis.expand_macro(pos).unwrap().unwrap()
}
#[test]
fn macro_expand_recursive_expansion() {
let res = check_expand_macro(
r#"
//- /lib.rs
macro_rules! bar {
() => { fn b() {} }
}
macro_rules! foo {
() => { bar!(); }
}
macro_rules! baz {
() => { foo!(); }
}
f<|>oo!();
"#,
);
assert_eq!(res.name, "foo");
assert_snapshot!(res.expansion, @r###"
fn b(){}
"###);
}
#[test]
fn macro_expand_multiple_lines() {
let res = check_expand_macro(
r#"
//- /lib.rs
macro_rules! foo {
() => {
fn some_thing() -> u32 {
let a = 0;
a + 10
}
}
}
f<|>oo!();
"#,
);
assert_eq!(res.name, "foo");
assert_snapshot!(res.expansion, @r###"
fn some_thing() -> u32 {
let a = 0;
a+10
}
"###);
}
}

View file

@ -42,6 +42,7 @@ mod display;
mod inlay_hints;
mod wasm_shims;
mod expand;
mod expand_macro;
#[cfg(test)]
mod marks;
@ -65,6 +66,7 @@ pub use crate::{
completion::{CompletionItem, CompletionItemKind, InsertTextFormat},
diagnostics::Severity,
display::{file_structure, FunctionSignature, NavigationTarget, StructureNode},
expand_macro::ExpandedMacro,
feature_flags::FeatureFlags,
folding_ranges::{Fold, FoldKind},
hover::HoverResult,
@ -296,6 +298,10 @@ impl Analysis {
self.with_db(|db| syntax_tree::syntax_tree(&db, file_id, text_range))
}
pub fn expand_macro(&self, position: FilePosition) -> Cancelable<Option<ExpandedMacro>> {
self.with_db(|db| expand_macro::expand_macro(db, position))
}
/// Returns an edit to remove all newlines in the range, cleaning up minor
/// stuff like trailing commas.
pub fn join_lines(&self, frange: FileRange) -> Cancelable<SourceChange> {

View file

@ -436,6 +436,7 @@ fn on_request(
})?
.on::<req::AnalyzerStatus>(handlers::handle_analyzer_status)?
.on::<req::SyntaxTree>(handlers::handle_syntax_tree)?
.on::<req::ExpandMacro>(handlers::handle_expand_macro)?
.on::<req::OnTypeFormatting>(handlers::handle_on_type_formatting)?
.on::<req::DocumentSymbolRequest>(handlers::handle_document_symbol)?
.on::<req::WorkspaceSymbol>(handlers::handle_workspace_symbol)?

View file

@ -47,6 +47,24 @@ pub fn handle_syntax_tree(world: WorldSnapshot, params: req::SyntaxTreeParams) -
Ok(res)
}
pub fn handle_expand_macro(
world: WorldSnapshot,
params: req::ExpandMacroParams,
) -> Result<Option<req::ExpandedMacro>> {
let _p = profile("handle_expand_macro");
let file_id = params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id)?;
let offset = params.position.map(|p| p.conv_with(&line_index));
match offset {
None => Ok(None),
Some(offset) => {
let res = world.analysis().expand_macro(FilePosition { file_id, offset })?;
Ok(res.map(|it| req::ExpandedMacro { name: it.name, expansion: it.expansion }))
}
}
}
pub fn handle_selection_range(
world: WorldSnapshot,
params: req::SelectionRangeParams,

View file

@ -45,6 +45,28 @@ pub struct SyntaxTreeParams {
pub range: Option<Range>,
}
#[derive(Serialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ExpandedMacro {
pub name: String,
pub expansion: String,
}
pub enum ExpandMacro {}
impl Request for ExpandMacro {
type Params = ExpandMacroParams;
type Result = Option<ExpandedMacro>;
const METHOD: &'static str = "rust-analyzer/expandMacro";
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ExpandMacroParams {
pub text_document: TextDocumentIdentifier,
pub position: Option<Position>,
}
pub enum SelectionRangeRequest {}
impl Request for SelectionRangeRequest {

View file

@ -12,7 +12,7 @@ doctest = false
[dependencies]
itertools = "0.8.0"
rowan = "0.6.1"
rowan = "0.7.0"
rustc_lexer = "0.1.0"
rustc-hash = "1.0.1"
arrayvec = "0.5.1"

View file

@ -134,23 +134,19 @@ pub fn insert_children(
to_green_element(element)
});
let old_children = parent.green().children();
let mut old_children = parent.green().children().map(|it| match it {
NodeOrToken::Token(it) => NodeOrToken::Token(it.clone()),
NodeOrToken::Node(it) => NodeOrToken::Node(it.clone()),
});
let new_children = match &position {
InsertPosition::First => {
to_insert.chain(old_children.iter().cloned()).collect::<Box<[_]>>()
}
InsertPosition::Last => old_children.iter().cloned().chain(to_insert).collect::<Box<[_]>>(),
InsertPosition::First => to_insert.chain(old_children).collect::<Box<[_]>>(),
InsertPosition::Last => old_children.chain(to_insert).collect::<Box<[_]>>(),
InsertPosition::Before(anchor) | InsertPosition::After(anchor) => {
let take_anchor = if let InsertPosition::After(_) = position { 1 } else { 0 };
let split_at = position_of_child(parent, anchor.clone()) + take_anchor;
let (before, after) = old_children.split_at(split_at);
before
.iter()
.cloned()
.chain(to_insert)
.chain(after.iter().cloned())
.collect::<Box<[_]>>()
let before = old_children.by_ref().take(split_at).collect::<Vec<_>>();
before.into_iter().chain(to_insert).chain(old_children).collect::<Box<[_]>>()
}
};
@ -168,13 +164,16 @@ pub fn replace_children(
) -> SyntaxNode {
let start = position_of_child(parent, to_delete.start().clone());
let end = position_of_child(parent, to_delete.end().clone());
let old_children = parent.green().children();
let mut old_children = parent.green().children().map(|it| match it {
NodeOrToken::Token(it) => NodeOrToken::Token(it.clone()),
NodeOrToken::Node(it) => NodeOrToken::Node(it.clone()),
});
let new_children = old_children[..start]
.iter()
.cloned()
let before = old_children.by_ref().take(start).collect::<Vec<_>>();
let new_children = before
.into_iter()
.chain(to_insert.map(to_green_element))
.chain(old_children[end + 1..].iter().cloned())
.chain(old_children.skip(end + 1 - start))
.collect::<Box<[_]>>();
with_children(parent, new_children)
}

View file

@ -32,7 +32,7 @@ impl ast::NameRef {
}
fn text_of_first_token(node: &SyntaxNode) -> &SmolStr {
node.green().children().first().and_then(|it| it.as_token()).unwrap().text()
node.green().children().next().and_then(|it| it.into_token()).unwrap().text()
}
impl ast::Attr {

View file

@ -81,6 +81,10 @@ Join selected lines into one, smartly fixing up whitespace and trailing commas.
Shows the parse tree of the current file. It exists mostly for debugging
rust-analyzer itself.
#### Expand Macro Recursively
Shows the full macro expansion of the macro at current cursor.
#### Status
Shows internal statistic about memory usage of rust-analyzer

View file

@ -91,6 +91,11 @@
"title": "Show Syntax Tree",
"category": "Rust Analyzer"
},
{
"command": "rust-analyzer.expandMacro",
"title": "Expand macro recursively",
"category": "Rust Analyzer"
},
{
"command": "rust-analyzer.matchingBrace",
"title": "Find matching brace",

View file

@ -0,0 +1,83 @@
import * as vscode from 'vscode';
import { Position, TextDocumentIdentifier } from 'vscode-languageclient';
import { Server } from '../server';
export const expandMacroUri = vscode.Uri.parse(
'rust-analyzer://expandMacro/[EXPANSION].rs'
);
export class ExpandMacroContentProvider
implements vscode.TextDocumentContentProvider {
public eventEmitter = new vscode.EventEmitter<vscode.Uri>();
public provideTextDocumentContent(
uri: vscode.Uri
): vscode.ProviderResult<string> {
async function handle() {
const editor = vscode.window.activeTextEditor;
if (editor == null) {
return '';
}
const position = editor.selection.active;
const request: MacroExpandParams = {
textDocument: { uri: editor.document.uri.toString() },
position
};
const expanded = await Server.client.sendRequest<ExpandedMacro>(
'rust-analyzer/expandMacro',
request
);
if (expanded == null) {
return 'Not available';
}
return code_format(expanded);
}
return handle();
}
get onDidChange(): vscode.Event<vscode.Uri> {
return this.eventEmitter.event;
}
}
// Opens the virtual file that will show the syntax tree
//
// The contents of the file come from the `TextDocumentContentProvider`
export function createHandle(provider: ExpandMacroContentProvider) {
return async () => {
const uri = expandMacroUri;
const document = await vscode.workspace.openTextDocument(uri);
provider.eventEmitter.fire(uri);
return vscode.window.showTextDocument(
document,
vscode.ViewColumn.Two,
true
);
};
}
interface MacroExpandParams {
textDocument: TextDocumentIdentifier;
position: Position;
}
interface ExpandedMacro {
name: string;
expansion: string;
}
function code_format(expanded: ExpandedMacro): string {
let result = `// Recursive expansion of ${expanded.name}! macro\n`;
result += '// ' + '='.repeat(result.length - 3);
result += '\n\n';
result += expanded.expansion;
return result;
}

View file

@ -1,5 +1,6 @@
import * as analyzerStatus from './analyzer_status';
import * as applySourceChange from './apply_source_change';
import * as expandMacro from './expand_macro';
import * as inlayHints from './inlay_hints';
import * as joinLines from './join_lines';
import * as matchingBrace from './matching_brace';
@ -11,6 +12,7 @@ import * as syntaxTree from './syntaxTree';
export {
analyzerStatus,
applySourceChange,
expandMacro,
joinLines,
matchingBrace,
parentModule,

View file

@ -3,6 +3,7 @@ import * as lc from 'vscode-languageclient';
import * as commands from './commands';
import { CargoWatchProvider } from './commands/cargo_watch';
import { ExpandMacroContentProvider } from './commands/expand_macro';
import { HintsUpdater } from './commands/inlay_hints';
import {
interactivelyStartCargoWatch,
@ -97,6 +98,7 @@ export function activate(context: vscode.ExtensionContext) {
]
];
const syntaxTreeContentProvider = new SyntaxTreeContentProvider();
const expandMacroContentProvider = new ExpandMacroContentProvider();
// The events below are plain old javascript events, triggered and handled by vscode
vscode.window.onDidChangeActiveTextEditor(
@ -109,11 +111,21 @@ export function activate(context: vscode.ExtensionContext) {
syntaxTreeContentProvider
)
);
disposeOnDeactivation(
vscode.workspace.registerTextDocumentContentProvider(
'rust-analyzer',
expandMacroContentProvider
)
);
registerCommand(
'rust-analyzer.syntaxTree',
commands.syntaxTree.createHandle(syntaxTreeContentProvider)
);
registerCommand(
'rust-analyzer.expandMacro',
commands.expandMacro.createHandle(expandMacroContentProvider)
);
vscode.workspace.onDidChangeTextDocument(
events.changeTextDocument.createHandler(syntaxTreeContentProvider),