always use \n newlines

This commit is contained in:
Aleksey Kladov 2019-09-05 17:50:08 +03:00
parent 36d7b75c95
commit 8b8a11ded7
2 changed files with 358 additions and 357 deletions

View file

@ -1,357 +1,357 @@
use crate::db::RootDatabase; use crate::db::RootDatabase;
use ra_db::SourceDatabase; use ra_db::SourceDatabase;
use ra_syntax::{ use ra_syntax::{
algo, AstNode, NodeOrToken, SourceFile, algo, AstNode, NodeOrToken, SourceFile,
SyntaxKind::{RAW_STRING, STRING}, SyntaxKind::{RAW_STRING, STRING},
SyntaxToken, TextRange, SyntaxToken, TextRange,
}; };
pub use ra_db::FileId; pub use ra_db::FileId;
pub(crate) fn syntax_tree( pub(crate) fn syntax_tree(
db: &RootDatabase, db: &RootDatabase,
file_id: FileId, file_id: FileId,
text_range: Option<TextRange>, text_range: Option<TextRange>,
) -> String { ) -> String {
let parse = db.parse(file_id); let parse = db.parse(file_id);
if let Some(text_range) = text_range { if let Some(text_range) = text_range {
let node = match algo::find_covering_element(parse.tree().syntax(), text_range) { let node = match algo::find_covering_element(parse.tree().syntax(), text_range) {
NodeOrToken::Node(node) => node, NodeOrToken::Node(node) => node,
NodeOrToken::Token(token) => { NodeOrToken::Token(token) => {
if let Some(tree) = syntax_tree_for_string(&token, text_range) { if let Some(tree) = syntax_tree_for_string(&token, text_range) {
return tree; return tree;
} }
token.parent() token.parent()
} }
}; };
format!("{:#?}", node) format!("{:#?}", node)
} else { } else {
format!("{:#?}", parse.tree().syntax()) format!("{:#?}", parse.tree().syntax())
} }
} }
/// Attempts parsing the selected contents of a string literal /// Attempts parsing the selected contents of a string literal
/// as rust syntax and returns its syntax tree /// as rust syntax and returns its syntax tree
fn syntax_tree_for_string(token: &SyntaxToken, text_range: TextRange) -> Option<String> { fn syntax_tree_for_string(token: &SyntaxToken, text_range: TextRange) -> Option<String> {
// When the range is inside a string // When the range is inside a string
// we'll attempt parsing it as rust syntax // we'll attempt parsing it as rust syntax
// to provide the syntax tree of the contents of the string // to provide the syntax tree of the contents of the string
match token.kind() { match token.kind() {
STRING | RAW_STRING => syntax_tree_for_token(token, text_range), STRING | RAW_STRING => syntax_tree_for_token(token, text_range),
_ => None, _ => None,
} }
} }
fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option<String> { fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option<String> {
// Range of the full node // Range of the full node
let node_range = node.text_range(); let node_range = node.text_range();
let text = node.text().to_string(); let text = node.text().to_string();
// We start at some point inside the node // We start at some point inside the node
// Either we have selected the whole string // Either we have selected the whole string
// or our selection is inside it // or our selection is inside it
let start = text_range.start() - node_range.start(); let start = text_range.start() - node_range.start();
// how many characters we have selected // how many characters we have selected
let len = text_range.len().to_usize(); let len = text_range.len().to_usize();
let node_len = node_range.len().to_usize(); let node_len = node_range.len().to_usize();
let start = start.to_usize(); let start = start.to_usize();
// We want to cap our length // We want to cap our length
let len = len.min(node_len); let len = len.min(node_len);
// Ensure our slice is inside the actual string // Ensure our slice is inside the actual string
let end = if start + len < text.len() { start + len } else { text.len() - start }; let end = if start + len < text.len() { start + len } else { text.len() - start };
let text = &text[start..end]; let text = &text[start..end];
// Remove possible extra string quotes from the start // Remove possible extra string quotes from the start
// and the end of the string // and the end of the string
let text = text let text = text
.trim_start_matches('r') .trim_start_matches('r')
.trim_start_matches('#') .trim_start_matches('#')
.trim_start_matches('"') .trim_start_matches('"')
.trim_end_matches('#') .trim_end_matches('#')
.trim_end_matches('"') .trim_end_matches('"')
.trim() .trim()
// Remove custom markers // Remove custom markers
.replace("<|>", ""); .replace("<|>", "");
let parsed = SourceFile::parse(&text); let parsed = SourceFile::parse(&text);
// If the "file" parsed without errors, // If the "file" parsed without errors,
// return its syntax // return its syntax
if parsed.errors().is_empty() { if parsed.errors().is_empty() {
return Some(format!("{:#?}", parsed.tree().syntax())); return Some(format!("{:#?}", parsed.tree().syntax()));
} }
None None
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use test_utils::assert_eq_text; use test_utils::assert_eq_text;
use crate::mock_analysis::{single_file, single_file_with_range}; use crate::mock_analysis::{single_file, single_file_with_range};
#[test] #[test]
fn test_syntax_tree_without_range() { fn test_syntax_tree_without_range() {
// Basic syntax // Basic syntax
let (analysis, file_id) = single_file(r#"fn foo() {}"#); let (analysis, file_id) = single_file(r#"fn foo() {}"#);
let syn = analysis.syntax_tree(file_id, None).unwrap(); let syn = analysis.syntax_tree(file_id, None).unwrap();
assert_eq_text!( assert_eq_text!(
syn.trim(), syn.trim(),
r#" r#"
SOURCE_FILE@[0; 11) SOURCE_FILE@[0; 11)
FN_DEF@[0; 11) FN_DEF@[0; 11)
FN_KW@[0; 2) "fn" FN_KW@[0; 2) "fn"
WHITESPACE@[2; 3) " " WHITESPACE@[2; 3) " "
NAME@[3; 6) NAME@[3; 6)
IDENT@[3; 6) "foo" IDENT@[3; 6) "foo"
PARAM_LIST@[6; 8) PARAM_LIST@[6; 8)
L_PAREN@[6; 7) "(" L_PAREN@[6; 7) "("
R_PAREN@[7; 8) ")" R_PAREN@[7; 8) ")"
WHITESPACE@[8; 9) " " WHITESPACE@[8; 9) " "
BLOCK_EXPR@[9; 11) BLOCK_EXPR@[9; 11)
BLOCK@[9; 11) BLOCK@[9; 11)
L_CURLY@[9; 10) "{" L_CURLY@[9; 10) "{"
R_CURLY@[10; 11) "}" R_CURLY@[10; 11) "}"
"# "#
.trim() .trim()
); );
let (analysis, file_id) = single_file( let (analysis, file_id) = single_file(
r#" r#"
fn test() { fn test() {
assert!(" assert!("
fn foo() { fn foo() {
} }
", ""); ", "");
}"# }"#
.trim(), .trim(),
); );
let syn = analysis.syntax_tree(file_id, None).unwrap(); let syn = analysis.syntax_tree(file_id, None).unwrap();
assert_eq_text!( assert_eq_text!(
syn.trim(), syn.trim(),
r#" r#"
SOURCE_FILE@[0; 60) SOURCE_FILE@[0; 60)
FN_DEF@[0; 60) FN_DEF@[0; 60)
FN_KW@[0; 2) "fn" FN_KW@[0; 2) "fn"
WHITESPACE@[2; 3) " " WHITESPACE@[2; 3) " "
NAME@[3; 7) NAME@[3; 7)
IDENT@[3; 7) "test" IDENT@[3; 7) "test"
PARAM_LIST@[7; 9) PARAM_LIST@[7; 9)
L_PAREN@[7; 8) "(" L_PAREN@[7; 8) "("
R_PAREN@[8; 9) ")" R_PAREN@[8; 9) ")"
WHITESPACE@[9; 10) " " WHITESPACE@[9; 10) " "
BLOCK_EXPR@[10; 60) BLOCK_EXPR@[10; 60)
BLOCK@[10; 60) BLOCK@[10; 60)
L_CURLY@[10; 11) "{" L_CURLY@[10; 11) "{"
WHITESPACE@[11; 16) "\n " WHITESPACE@[11; 16) "\n "
EXPR_STMT@[16; 58) EXPR_STMT@[16; 58)
MACRO_CALL@[16; 57) MACRO_CALL@[16; 57)
PATH@[16; 22) PATH@[16; 22)
PATH_SEGMENT@[16; 22) PATH_SEGMENT@[16; 22)
NAME_REF@[16; 22) NAME_REF@[16; 22)
IDENT@[16; 22) "assert" IDENT@[16; 22) "assert"
EXCL@[22; 23) "!" EXCL@[22; 23) "!"
TOKEN_TREE@[23; 57) TOKEN_TREE@[23; 57)
L_PAREN@[23; 24) "(" L_PAREN@[23; 24) "("
STRING@[24; 52) "\"\n fn foo() {\n ..." STRING@[24; 52) "\"\n fn foo() {\n ..."
COMMA@[52; 53) "," COMMA@[52; 53) ","
WHITESPACE@[53; 54) " " WHITESPACE@[53; 54) " "
STRING@[54; 56) "\"\"" STRING@[54; 56) "\"\""
R_PAREN@[56; 57) ")" R_PAREN@[56; 57) ")"
SEMI@[57; 58) ";" SEMI@[57; 58) ";"
WHITESPACE@[58; 59) "\n" WHITESPACE@[58; 59) "\n"
R_CURLY@[59; 60) "}" R_CURLY@[59; 60) "}"
"# "#
.trim() .trim()
); );
} }
#[test] #[test]
fn test_syntax_tree_with_range() { fn test_syntax_tree_with_range() {
let (analysis, range) = single_file_with_range(r#"<|>fn foo() {}<|>"#.trim()); let (analysis, range) = single_file_with_range(r#"<|>fn foo() {}<|>"#.trim());
let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap();
assert_eq_text!( assert_eq_text!(
syn.trim(), syn.trim(),
r#" r#"
FN_DEF@[0; 11) FN_DEF@[0; 11)
FN_KW@[0; 2) "fn" FN_KW@[0; 2) "fn"
WHITESPACE@[2; 3) " " WHITESPACE@[2; 3) " "
NAME@[3; 6) NAME@[3; 6)
IDENT@[3; 6) "foo" IDENT@[3; 6) "foo"
PARAM_LIST@[6; 8) PARAM_LIST@[6; 8)
L_PAREN@[6; 7) "(" L_PAREN@[6; 7) "("
R_PAREN@[7; 8) ")" R_PAREN@[7; 8) ")"
WHITESPACE@[8; 9) " " WHITESPACE@[8; 9) " "
BLOCK_EXPR@[9; 11) BLOCK_EXPR@[9; 11)
BLOCK@[9; 11) BLOCK@[9; 11)
L_CURLY@[9; 10) "{" L_CURLY@[9; 10) "{"
R_CURLY@[10; 11) "}" R_CURLY@[10; 11) "}"
"# "#
.trim() .trim()
); );
let (analysis, range) = single_file_with_range( let (analysis, range) = single_file_with_range(
r#"fn test() { r#"fn test() {
<|>assert!(" <|>assert!("
fn foo() { fn foo() {
} }
", "");<|> ", "");<|>
}"# }"#
.trim(), .trim(),
); );
let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap();
assert_eq_text!( assert_eq_text!(
syn.trim(), syn.trim(),
r#" r#"
EXPR_STMT@[16; 58) EXPR_STMT@[16; 58)
MACRO_CALL@[16; 57) MACRO_CALL@[16; 57)
PATH@[16; 22) PATH@[16; 22)
PATH_SEGMENT@[16; 22) PATH_SEGMENT@[16; 22)
NAME_REF@[16; 22) NAME_REF@[16; 22)
IDENT@[16; 22) "assert" IDENT@[16; 22) "assert"
EXCL@[22; 23) "!" EXCL@[22; 23) "!"
TOKEN_TREE@[23; 57) TOKEN_TREE@[23; 57)
L_PAREN@[23; 24) "(" L_PAREN@[23; 24) "("
STRING@[24; 52) "\"\n fn foo() {\n ..." STRING@[24; 52) "\"\n fn foo() {\n ..."
COMMA@[52; 53) "," COMMA@[52; 53) ","
WHITESPACE@[53; 54) " " WHITESPACE@[53; 54) " "
STRING@[54; 56) "\"\"" STRING@[54; 56) "\"\""
R_PAREN@[56; 57) ")" R_PAREN@[56; 57) ")"
SEMI@[57; 58) ";" SEMI@[57; 58) ";"
"# "#
.trim() .trim()
); );
} }
#[test] #[test]
fn test_syntax_tree_inside_string() { fn test_syntax_tree_inside_string() {
let (analysis, range) = single_file_with_range( let (analysis, range) = single_file_with_range(
r#"fn test() { r#"fn test() {
assert!(" assert!("
<|>fn foo() { <|>fn foo() {
}<|> }<|>
fn bar() { fn bar() {
} }
", ""); ", "");
}"# }"#
.trim(), .trim(),
); );
let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap();
assert_eq_text!( assert_eq_text!(
syn.trim(), syn.trim(),
r#" r#"
SOURCE_FILE@[0; 12) SOURCE_FILE@[0; 12)
FN_DEF@[0; 12) FN_DEF@[0; 12)
FN_KW@[0; 2) "fn" FN_KW@[0; 2) "fn"
WHITESPACE@[2; 3) " " WHITESPACE@[2; 3) " "
NAME@[3; 6) NAME@[3; 6)
IDENT@[3; 6) "foo" IDENT@[3; 6) "foo"
PARAM_LIST@[6; 8) PARAM_LIST@[6; 8)
L_PAREN@[6; 7) "(" L_PAREN@[6; 7) "("
R_PAREN@[7; 8) ")" R_PAREN@[7; 8) ")"
WHITESPACE@[8; 9) " " WHITESPACE@[8; 9) " "
BLOCK_EXPR@[9; 12) BLOCK_EXPR@[9; 12)
BLOCK@[9; 12) BLOCK@[9; 12)
L_CURLY@[9; 10) "{" L_CURLY@[9; 10) "{"
WHITESPACE@[10; 11) "\n" WHITESPACE@[10; 11) "\n"
R_CURLY@[11; 12) "}" R_CURLY@[11; 12) "}"
"# "#
.trim() .trim()
); );
// With a raw string // With a raw string
let (analysis, range) = single_file_with_range( let (analysis, range) = single_file_with_range(
r###"fn test() { r###"fn test() {
assert!(r#" assert!(r#"
<|>fn foo() { <|>fn foo() {
}<|> }<|>
fn bar() { fn bar() {
} }
"#, ""); "#, "");
}"### }"###
.trim(), .trim(),
); );
let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap();
assert_eq_text!( assert_eq_text!(
syn.trim(), syn.trim(),
r#" r#"
SOURCE_FILE@[0; 12) SOURCE_FILE@[0; 12)
FN_DEF@[0; 12) FN_DEF@[0; 12)
FN_KW@[0; 2) "fn" FN_KW@[0; 2) "fn"
WHITESPACE@[2; 3) " " WHITESPACE@[2; 3) " "
NAME@[3; 6) NAME@[3; 6)
IDENT@[3; 6) "foo" IDENT@[3; 6) "foo"
PARAM_LIST@[6; 8) PARAM_LIST@[6; 8)
L_PAREN@[6; 7) "(" L_PAREN@[6; 7) "("
R_PAREN@[7; 8) ")" R_PAREN@[7; 8) ")"
WHITESPACE@[8; 9) " " WHITESPACE@[8; 9) " "
BLOCK_EXPR@[9; 12) BLOCK_EXPR@[9; 12)
BLOCK@[9; 12) BLOCK@[9; 12)
L_CURLY@[9; 10) "{" L_CURLY@[9; 10) "{"
WHITESPACE@[10; 11) "\n" WHITESPACE@[10; 11) "\n"
R_CURLY@[11; 12) "}" R_CURLY@[11; 12) "}"
"# "#
.trim() .trim()
); );
// With a raw string // With a raw string
let (analysis, range) = single_file_with_range( let (analysis, range) = single_file_with_range(
r###"fn test() { r###"fn test() {
assert!(r<|>#" assert!(r<|>#"
fn foo() { fn foo() {
} }
fn bar() { fn bar() {
}"<|>#, ""); }"<|>#, "");
}"### }"###
.trim(), .trim(),
); );
let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap();
assert_eq_text!( assert_eq_text!(
syn.trim(), syn.trim(),
r#" r#"
SOURCE_FILE@[0; 25) SOURCE_FILE@[0; 25)
FN_DEF@[0; 12) FN_DEF@[0; 12)
FN_KW@[0; 2) "fn" FN_KW@[0; 2) "fn"
WHITESPACE@[2; 3) " " WHITESPACE@[2; 3) " "
NAME@[3; 6) NAME@[3; 6)
IDENT@[3; 6) "foo" IDENT@[3; 6) "foo"
PARAM_LIST@[6; 8) PARAM_LIST@[6; 8)
L_PAREN@[6; 7) "(" L_PAREN@[6; 7) "("
R_PAREN@[7; 8) ")" R_PAREN@[7; 8) ")"
WHITESPACE@[8; 9) " " WHITESPACE@[8; 9) " "
BLOCK_EXPR@[9; 12) BLOCK_EXPR@[9; 12)
BLOCK@[9; 12) BLOCK@[9; 12)
L_CURLY@[9; 10) "{" L_CURLY@[9; 10) "{"
WHITESPACE@[10; 11) "\n" WHITESPACE@[10; 11) "\n"
R_CURLY@[11; 12) "}" R_CURLY@[11; 12) "}"
WHITESPACE@[12; 13) "\n" WHITESPACE@[12; 13) "\n"
FN_DEF@[13; 25) FN_DEF@[13; 25)
FN_KW@[13; 15) "fn" FN_KW@[13; 15) "fn"
WHITESPACE@[15; 16) " " WHITESPACE@[15; 16) " "
NAME@[16; 19) NAME@[16; 19)
IDENT@[16; 19) "bar" IDENT@[16; 19) "bar"
PARAM_LIST@[19; 21) PARAM_LIST@[19; 21)
L_PAREN@[19; 20) "(" L_PAREN@[19; 20) "("
R_PAREN@[20; 21) ")" R_PAREN@[20; 21) ")"
WHITESPACE@[21; 22) " " WHITESPACE@[21; 22) " "
BLOCK_EXPR@[22; 25) BLOCK_EXPR@[22; 25)
BLOCK@[22; 25) BLOCK@[22; 25)
L_CURLY@[22; 23) "{" L_CURLY@[22; 23) "{"
WHITESPACE@[23; 24) "\n" WHITESPACE@[23; 24) "\n"
R_CURLY@[24; 25) "}" R_CURLY@[24; 25) "}"
"# "#
.trim() .trim()
); );
} }
} }

View file

@ -1,2 +1,3 @@
reorder_modules = false reorder_modules = false
use_small_heuristics = "Max" use_small_heuristics = "Max"
newline_style = "Unix"