mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-12 21:28:51 +00:00
Add testing infrastructure for type inference
- move dir_tests to test_utils for that.
This commit is contained in:
parent
3899898d75
commit
7348f7883f
9 changed files with 223 additions and 125 deletions
|
@ -5,7 +5,8 @@ use std::{
|
||||||
|
|
||||||
use ra_editor::{self, find_node_at_offset, FileSymbol, LineIndex, LocalEdit};
|
use ra_editor::{self, find_node_at_offset, FileSymbol, LineIndex, LocalEdit};
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
ast::{self, ArgListOwner, Expr, NameOwner},
|
ast::{self, ArgListOwner, Expr, NameOwner, FnDef},
|
||||||
|
algo::find_covering_node,
|
||||||
AstNode, SourceFileNode,
|
AstNode, SourceFileNode,
|
||||||
SyntaxKind::*,
|
SyntaxKind::*,
|
||||||
SyntaxNodeRef, TextRange, TextUnit,
|
SyntaxNodeRef, TextRange, TextUnit,
|
||||||
|
@ -510,6 +511,17 @@ impl AnalysisImpl {
|
||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn type_of(&self, file_id: FileId, range: TextRange) -> Cancelable<Option<String>> {
|
||||||
|
let file = self.db.source_file(file_id);
|
||||||
|
let syntax = file.syntax();
|
||||||
|
let node = find_covering_node(syntax, range);
|
||||||
|
let parent_fn = node.ancestors().filter_map(FnDef::cast).next();
|
||||||
|
let parent_fn = if let Some(p) = parent_fn { p } else { return Ok(None) };
|
||||||
|
let function = ctry!(source_binder::function_from_source(&*self.db, file_id, parent_fn)?);
|
||||||
|
let infer = function.infer(&*self.db);
|
||||||
|
Ok(infer.type_of_node(node).map(|t| t.to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
fn index_resolve(&self, name_ref: ast::NameRef) -> Cancelable<Vec<(FileId, FileSymbol)>> {
|
fn index_resolve(&self, name_ref: ast::NameRef) -> Cancelable<Vec<(FileId, FileSymbol)>> {
|
||||||
let name = name_ref.text();
|
let name = name_ref.text();
|
||||||
let mut query = Query::new(name.to_string());
|
let mut query = Query::new(name.to_string());
|
||||||
|
|
|
@ -366,6 +366,9 @@ impl Analysis {
|
||||||
) -> Cancelable<Option<(FnSignatureInfo, Option<usize>)>> {
|
) -> Cancelable<Option<(FnSignatureInfo, Option<usize>)>> {
|
||||||
self.imp.resolve_callable(position)
|
self.imp.resolve_callable(position)
|
||||||
}
|
}
|
||||||
|
pub fn type_of(&self, file_id: FileId, range: TextRange) -> Cancelable<Option<String>> {
|
||||||
|
self.imp.type_of(file_id, range)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct LibraryData {
|
pub struct LibraryData {
|
||||||
|
|
|
@ -24,6 +24,15 @@ impl MockDatabase {
|
||||||
(db, source_root)
|
(db, source_root)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn with_single_file(text: &str) -> (MockDatabase, SourceRoot, FileId) {
|
||||||
|
let mut db = MockDatabase::default();
|
||||||
|
let mut source_root = SourceRoot::default();
|
||||||
|
let file_id = db.add_file(&mut source_root, "/main.rs", text);
|
||||||
|
db.query_mut(ra_db::SourceRootQuery)
|
||||||
|
.set(WORKSPACE, Arc::new(source_root.clone()));
|
||||||
|
(db, source_root, file_id)
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn with_position(fixture: &str) -> (MockDatabase, FilePosition) {
|
pub(crate) fn with_position(fixture: &str) -> (MockDatabase, FilePosition) {
|
||||||
let (db, _, position) = MockDatabase::from_fixture(fixture);
|
let (db, _, position) = MockDatabase::from_fixture(fixture);
|
||||||
let position = position.expect("expected a marker ( <|> )");
|
let position = position.expect("expected a marker ( <|> )");
|
||||||
|
|
|
@ -6,6 +6,7 @@ use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
use std::fmt;
|
||||||
|
|
||||||
use ra_db::LocalSyntaxPtr;
|
use ra_db::LocalSyntaxPtr;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
|
@ -184,11 +185,40 @@ impl Ty {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for Ty {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
Ty::Bool => write!(f, "bool"),
|
||||||
|
Ty::Char => write!(f, "char"),
|
||||||
|
Ty::Int(t) => write!(f, "{}", t.ty_to_string()),
|
||||||
|
Ty::Uint(t) => write!(f, "{}", t.ty_to_string()),
|
||||||
|
Ty::Float(t) => write!(f, "{}", t.ty_to_string()),
|
||||||
|
Ty::Str => write!(f, "str"),
|
||||||
|
Ty::Slice(t) => write!(f, "[{}]", t),
|
||||||
|
Ty::Never => write!(f, "!"),
|
||||||
|
Ty::Tuple(ts) => {
|
||||||
|
write!(f, "(")?;
|
||||||
|
for t in ts {
|
||||||
|
write!(f, "{},", t)?;
|
||||||
|
}
|
||||||
|
write!(f, ")")
|
||||||
|
}
|
||||||
|
Ty::Unknown => write!(f, "[unknown]")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
pub struct InferenceResult {
|
pub struct InferenceResult {
|
||||||
type_for: FxHashMap<LocalSyntaxPtr, Ty>,
|
type_for: FxHashMap<LocalSyntaxPtr, Ty>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl InferenceResult {
|
||||||
|
pub fn type_of_node(&self, node: SyntaxNodeRef) -> Option<Ty> {
|
||||||
|
self.type_for.get(&LocalSyntaxPtr::new(node)).cloned()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
pub struct InferenceContext {
|
pub struct InferenceContext {
|
||||||
scopes: Arc<FnScopes>,
|
scopes: Arc<FnScopes>,
|
||||||
|
|
|
@ -1,8 +1,11 @@
|
||||||
|
use std::fmt::Write;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use salsa::Database;
|
use salsa::Database;
|
||||||
use ra_db::{FilesDatabase, CrateGraph, SyntaxDatabase};
|
use ra_db::{FilesDatabase, CrateGraph, SyntaxDatabase};
|
||||||
use ra_syntax::{SmolStr, algo::visit::{visitor, Visitor}, ast::{self, AstNode}};
|
use ra_syntax::{SmolStr, algo::visit::{visitor, Visitor}, ast::{self, AstNode}};
|
||||||
|
use test_utils::{project_dir, dir_tests};
|
||||||
use relative_path::RelativePath;
|
use relative_path::RelativePath;
|
||||||
|
|
||||||
use crate::{source_binder, mock::WORKSPACE, module::ModuleSourceNode};
|
use crate::{source_binder, mock::WORKSPACE, module::ModuleSourceNode};
|
||||||
|
@ -13,33 +16,46 @@ use crate::{
|
||||||
mock::MockDatabase,
|
mock::MockDatabase,
|
||||||
};
|
};
|
||||||
|
|
||||||
fn infer_all_fns(fixture: &str) -> () {
|
fn infer_file(content: &str) -> String {
|
||||||
let (db, source_root) = MockDatabase::with_files(fixture);
|
let (db, source_root, file_id) = MockDatabase::with_single_file(content);
|
||||||
for &file_id in source_root.files.values() {
|
let source_file = db.source_file(file_id);
|
||||||
let source_file = db.source_file(file_id);
|
let mut acc = String::new();
|
||||||
for fn_def in source_file.syntax().descendants().filter_map(ast::FnDef::cast) {
|
for fn_def in source_file.syntax().descendants().filter_map(ast::FnDef::cast) {
|
||||||
let func = source_binder::function_from_source(&db, file_id, fn_def).unwrap().unwrap();
|
let func = source_binder::function_from_source(&db, file_id, fn_def).unwrap().unwrap();
|
||||||
let inference_result = func.infer(&db);
|
let inference_result = func.infer(&db);
|
||||||
for (syntax_ptr, ty) in &inference_result.type_for {
|
for (syntax_ptr, ty) in &inference_result.type_for {
|
||||||
let node = syntax_ptr.resolve(&source_file);
|
let node = syntax_ptr.resolve(&source_file);
|
||||||
eprintln!("{} '{}': {:?}", syntax_ptr.range(), node.text(), ty);
|
write!(acc, "{} '{}': {}\n", syntax_ptr.range(), ellipsize(node.text().to_string().replace("\n", " "), 15), ty);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
acc
|
||||||
|
}
|
||||||
|
|
||||||
|
fn ellipsize(mut text: String, max_len: usize) -> String {
|
||||||
|
if text.len() <= max_len {
|
||||||
|
return text;
|
||||||
|
}
|
||||||
|
let ellipsis = "...";
|
||||||
|
let e_len = ellipsis.len();
|
||||||
|
let mut prefix_len = (max_len - e_len) / 2;
|
||||||
|
while !text.is_char_boundary(prefix_len) {
|
||||||
|
prefix_len += 1;
|
||||||
|
}
|
||||||
|
let mut suffix_len = max_len - e_len - prefix_len;
|
||||||
|
while !text.is_char_boundary(text.len() - suffix_len) {
|
||||||
|
suffix_len += 1;
|
||||||
|
}
|
||||||
|
text.replace_range(prefix_len..text.len() - suffix_len, ellipsis);
|
||||||
|
text
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn infer_smoke_test() {
|
pub fn infer_tests() {
|
||||||
let text = "
|
dir_tests(&test_data_dir(), &["."], |text, _path| {
|
||||||
//- /lib.rs
|
infer_file(text)
|
||||||
fn foo(x: u32, y: !) -> i128 {
|
});
|
||||||
x;
|
}
|
||||||
y;
|
|
||||||
return 1;
|
fn test_data_dir() -> PathBuf {
|
||||||
\"hello\";
|
project_dir().join("crates/ra_hir/src/ty/tests/data")
|
||||||
0
|
|
||||||
}
|
|
||||||
";
|
|
||||||
|
|
||||||
infer_all_fns(text);
|
|
||||||
}
|
}
|
||||||
|
|
11
crates/ra_hir/src/ty/tests/data/0001_basics.rs
Normal file
11
crates/ra_hir/src/ty/tests/data/0001_basics.rs
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
|
||||||
|
fn test(a: u32, b: isize, c: !, d: &str) {
|
||||||
|
a;
|
||||||
|
b;
|
||||||
|
c;
|
||||||
|
d;
|
||||||
|
1usize;
|
||||||
|
1isize;
|
||||||
|
"test";
|
||||||
|
1.0f32;
|
||||||
|
}
|
13
crates/ra_hir/src/ty/tests/data/0001_basics.txt
Normal file
13
crates/ra_hir/src/ty/tests/data/0001_basics.txt
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
[33; 34) 'd': [unknown]
|
||||||
|
[88; 94) '1isize': [unknown]
|
||||||
|
[48; 49) 'a': u32
|
||||||
|
[55; 56) 'b': isize
|
||||||
|
[112; 118) '1.0f32': [unknown]
|
||||||
|
[76; 82) '1usize': [unknown]
|
||||||
|
[9; 10) 'a': u32
|
||||||
|
[27; 28) 'c': !
|
||||||
|
[62; 63) 'c': !
|
||||||
|
[17; 18) 'b': isize
|
||||||
|
[100; 106) '"test"': [unknown]
|
||||||
|
[42; 121) '{ ...f32; }': ()
|
||||||
|
[69; 70) 'd': [unknown]
|
|
@ -9,6 +9,7 @@ use std::{
|
||||||
path::{Path, PathBuf, Component},
|
path::{Path, PathBuf, Component},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use test_utils::{project_dir, dir_tests, read_text, collect_tests};
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
utils::{check_fuzz_invariants, dump_tree},
|
utils::{check_fuzz_invariants, dump_tree},
|
||||||
SourceFileNode,
|
SourceFileNode,
|
||||||
|
@ -16,7 +17,7 @@ use ra_syntax::{
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn lexer_tests() {
|
fn lexer_tests() {
|
||||||
dir_tests(&["lexer"], |text, _| {
|
dir_tests(&test_data_dir(), &["lexer"], |text, _| {
|
||||||
let tokens = ra_syntax::tokenize(text);
|
let tokens = ra_syntax::tokenize(text);
|
||||||
dump_tokens(&tokens, text)
|
dump_tokens(&tokens, text)
|
||||||
})
|
})
|
||||||
|
@ -24,7 +25,7 @@ fn lexer_tests() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parser_tests() {
|
fn parser_tests() {
|
||||||
dir_tests(&["parser/inline/ok", "parser/ok"], |text, path| {
|
dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], |text, path| {
|
||||||
let file = SourceFileNode::parse(text);
|
let file = SourceFileNode::parse(text);
|
||||||
let errors = file.errors();
|
let errors = file.errors();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -35,7 +36,7 @@ fn parser_tests() {
|
||||||
);
|
);
|
||||||
dump_tree(file.syntax())
|
dump_tree(file.syntax())
|
||||||
});
|
});
|
||||||
dir_tests(&["parser/err", "parser/inline/err"], |text, path| {
|
dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], |text, path| {
|
||||||
let file = SourceFileNode::parse(text);
|
let file = SourceFileNode::parse(text);
|
||||||
let errors = file.errors();
|
let errors = file.errors();
|
||||||
assert_ne!(
|
assert_ne!(
|
||||||
|
@ -50,7 +51,7 @@ fn parser_tests() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parser_fuzz_tests() {
|
fn parser_fuzz_tests() {
|
||||||
for (_, text) in collect_tests(&["parser/fuzz-failures"]) {
|
for (_, text) in collect_tests(&test_data_dir(), &["parser/fuzz-failures"]) {
|
||||||
check_fuzz_invariants(&text)
|
check_fuzz_invariants(&text)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -92,102 +93,6 @@ fn self_hosting_parsing() {
|
||||||
"self_hosting_parsing found too few files - is it running in the right directory?"
|
"self_hosting_parsing found too few files - is it running in the right directory?"
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
/// Read file and normalize newlines.
|
|
||||||
///
|
|
||||||
/// `rustc` seems to always normalize `\r\n` newlines to `\n`:
|
|
||||||
///
|
|
||||||
/// ```
|
|
||||||
/// let s = "
|
|
||||||
/// ";
|
|
||||||
/// assert_eq!(s.as_bytes(), &[10]);
|
|
||||||
/// ```
|
|
||||||
///
|
|
||||||
/// so this should always be correct.
|
|
||||||
fn read_text(path: &Path) -> String {
|
|
||||||
fs::read_to_string(path)
|
|
||||||
.expect(&format!("File at {:?} should be valid", path))
|
|
||||||
.replace("\r\n", "\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn dir_tests<F>(paths: &[&str], f: F)
|
|
||||||
where
|
|
||||||
F: Fn(&str, &Path) -> String,
|
|
||||||
{
|
|
||||||
for (path, input_code) in collect_tests(paths) {
|
|
||||||
let parse_tree = f(&input_code, &path);
|
|
||||||
let path = path.with_extension("txt");
|
|
||||||
if !path.exists() {
|
|
||||||
println!("\nfile: {}", path.display());
|
|
||||||
println!("No .txt file with expected result, creating...\n");
|
|
||||||
println!("{}\n{}", input_code, parse_tree);
|
|
||||||
fs::write(&path, &parse_tree).unwrap();
|
|
||||||
panic!("No expected result")
|
|
||||||
}
|
|
||||||
let expected = read_text(&path);
|
|
||||||
let expected = expected.as_str();
|
|
||||||
let parse_tree = parse_tree.as_str();
|
|
||||||
assert_equal_text(expected, parse_tree, &path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const REWRITE: bool = false;
|
|
||||||
|
|
||||||
fn assert_equal_text(expected: &str, actual: &str, path: &Path) {
|
|
||||||
if expected == actual {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
let dir = project_dir();
|
|
||||||
let pretty_path = path.strip_prefix(&dir).unwrap_or_else(|_| path);
|
|
||||||
if expected.trim() == actual.trim() {
|
|
||||||
println!("whitespace difference, rewriting");
|
|
||||||
println!("file: {}\n", pretty_path.display());
|
|
||||||
fs::write(path, actual).unwrap();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if REWRITE {
|
|
||||||
println!("rewriting {}", pretty_path.display());
|
|
||||||
fs::write(path, actual).unwrap();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
assert_eq_text!(expected, actual, "file: {}", pretty_path.display());
|
|
||||||
}
|
|
||||||
|
|
||||||
fn collect_tests(paths: &[&str]) -> Vec<(PathBuf, String)> {
|
|
||||||
paths
|
|
||||||
.iter()
|
|
||||||
.flat_map(|path| {
|
|
||||||
let path = test_data_dir().join(path);
|
|
||||||
test_from_dir(&path).into_iter()
|
|
||||||
})
|
|
||||||
.map(|path| {
|
|
||||||
let text = read_text(&path);
|
|
||||||
(path, text)
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn test_from_dir(dir: &Path) -> Vec<PathBuf> {
|
|
||||||
let mut acc = Vec::new();
|
|
||||||
for file in fs::read_dir(&dir).unwrap() {
|
|
||||||
let file = file.unwrap();
|
|
||||||
let path = file.path();
|
|
||||||
if path.extension().unwrap_or_default() == "rs" {
|
|
||||||
acc.push(path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
acc.sort();
|
|
||||||
acc
|
|
||||||
}
|
|
||||||
|
|
||||||
fn project_dir() -> PathBuf {
|
|
||||||
let dir = env!("CARGO_MANIFEST_DIR");
|
|
||||||
PathBuf::from(dir)
|
|
||||||
.parent()
|
|
||||||
.unwrap()
|
|
||||||
.parent()
|
|
||||||
.unwrap()
|
|
||||||
.to_owned()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn test_data_dir() -> PathBuf {
|
fn test_data_dir() -> PathBuf {
|
||||||
project_dir().join("crates/ra_syntax/tests/data")
|
project_dir().join("crates/ra_syntax/tests/data")
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
use std::fs;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use text_unit::{TextRange, TextUnit};
|
use text_unit::{TextRange, TextUnit};
|
||||||
|
@ -262,3 +264,100 @@ pub fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a
|
||||||
_ => Some((expected, actual)),
|
_ => Some((expected, actual)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn dir_tests<F>(test_data_dir: &Path, paths: &[&str], f: F)
|
||||||
|
where
|
||||||
|
F: Fn(&str, &Path) -> String,
|
||||||
|
{
|
||||||
|
for (path, input_code) in collect_tests(test_data_dir, paths) {
|
||||||
|
let parse_tree = f(&input_code, &path);
|
||||||
|
let path = path.with_extension("txt");
|
||||||
|
if !path.exists() {
|
||||||
|
println!("\nfile: {}", path.display());
|
||||||
|
println!("No .txt file with expected result, creating...\n");
|
||||||
|
println!("{}\n{}", input_code, parse_tree);
|
||||||
|
fs::write(&path, &parse_tree).unwrap();
|
||||||
|
panic!("No expected result")
|
||||||
|
}
|
||||||
|
let expected = read_text(&path);
|
||||||
|
let expected = expected.as_str();
|
||||||
|
let parse_tree = parse_tree.as_str();
|
||||||
|
assert_equal_text(expected, parse_tree, &path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn collect_tests(test_data_dir: &Path, paths: &[&str]) -> Vec<(PathBuf, String)> {
|
||||||
|
paths
|
||||||
|
.iter()
|
||||||
|
.flat_map(|path| {
|
||||||
|
let path = test_data_dir.to_owned().join(path);
|
||||||
|
test_from_dir(&path).into_iter()
|
||||||
|
})
|
||||||
|
.map(|path| {
|
||||||
|
let text = read_text(&path);
|
||||||
|
(path, text)
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_from_dir(dir: &Path) -> Vec<PathBuf> {
|
||||||
|
let mut acc = Vec::new();
|
||||||
|
for file in fs::read_dir(&dir).unwrap() {
|
||||||
|
let file = file.unwrap();
|
||||||
|
let path = file.path();
|
||||||
|
if path.extension().unwrap_or_default() == "rs" {
|
||||||
|
acc.push(path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
acc.sort();
|
||||||
|
acc
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn project_dir() -> PathBuf {
|
||||||
|
let dir = env!("CARGO_MANIFEST_DIR");
|
||||||
|
PathBuf::from(dir)
|
||||||
|
.parent()
|
||||||
|
.unwrap()
|
||||||
|
.parent()
|
||||||
|
.unwrap()
|
||||||
|
.to_owned()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read file and normalize newlines.
|
||||||
|
///
|
||||||
|
/// `rustc` seems to always normalize `\r\n` newlines to `\n`:
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// let s = "
|
||||||
|
/// ";
|
||||||
|
/// assert_eq!(s.as_bytes(), &[10]);
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// so this should always be correct.
|
||||||
|
pub fn read_text(path: &Path) -> String {
|
||||||
|
fs::read_to_string(path)
|
||||||
|
.expect(&format!("File at {:?} should be valid", path))
|
||||||
|
.replace("\r\n", "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
const REWRITE: bool = false;
|
||||||
|
|
||||||
|
fn assert_equal_text(expected: &str, actual: &str, path: &Path) {
|
||||||
|
if expected == actual {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let dir = project_dir();
|
||||||
|
let pretty_path = path.strip_prefix(&dir).unwrap_or_else(|_| path);
|
||||||
|
if expected.trim() == actual.trim() {
|
||||||
|
println!("whitespace difference, rewriting");
|
||||||
|
println!("file: {}\n", pretty_path.display());
|
||||||
|
fs::write(path, actual).unwrap();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if REWRITE {
|
||||||
|
println!("rewriting {}", pretty_path.display());
|
||||||
|
fs::write(path, actual).unwrap();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
assert_eq_text!(expected, actual, "file: {}", pretty_path.display());
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in a new issue