mirror of
https://github.com/getzola/zola
synced 2024-12-13 22:02:29 +00:00
Merge pull request #609 from mziter/next
Handle csv parsing error when encountering rows with different lengths
This commit is contained in:
commit
42089a18ba
2 changed files with 38 additions and 1 deletions
|
@ -291,7 +291,16 @@ fn load_csv(csv_data: String) -> Result<Value> {
|
|||
let mut records_array: Vec<Value> = Vec::new();
|
||||
|
||||
for result in records {
|
||||
let record = result.unwrap();
|
||||
let record = match result {
|
||||
Ok(r) => r,
|
||||
Err(e) => {
|
||||
return Err(tera::Error::chain(
|
||||
String::from("Error encountered when parsing csv records"),
|
||||
e,
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let mut elements_array: Vec<Value> = Vec::new();
|
||||
|
||||
for e in record.into_iter() {
|
||||
|
@ -455,6 +464,30 @@ mod tests {
|
|||
)
|
||||
}
|
||||
|
||||
// Test points to bad csv file with uneven row lengths
|
||||
#[test]
|
||||
fn bad_csv_should_result_in_error() {
|
||||
let static_fn = LoadData::new(
|
||||
PathBuf::from("../utils/test-files"),
|
||||
PathBuf::from("../utils/test-files"),
|
||||
);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("uneven_rows.csv").unwrap());
|
||||
let result = static_fn.call(&args.clone());
|
||||
|
||||
assert!(result.is_err());
|
||||
|
||||
let error_kind = result.err().unwrap().kind;
|
||||
match error_kind {
|
||||
tera::ErrorKind::Msg(msg) => {
|
||||
if msg != String::from("Error encountered when parsing csv records") {
|
||||
panic!("Error message is wrong. Perhaps wrong error is being returned?");
|
||||
}
|
||||
}
|
||||
_ => panic!("Error encountered was not expected CSV error"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_load_json() {
|
||||
let static_fn = LoadData::new(
|
||||
|
|
4
components/utils/test-files/uneven_rows.csv
Normal file
4
components/utils/test-files/uneven_rows.csv
Normal file
|
@ -0,0 +1,4 @@
|
|||
Number,Title
|
||||
1,Gutenberg
|
||||
2,Printing
|
||||
3,Typewriter,ExtraBadColumn
|
|
Loading…
Reference in a new issue