2019-11-11 11:54:58 +00:00
|
|
|
use crate::prelude::*;
|
2020-04-06 07:16:14 +00:00
|
|
|
use csv::{ErrorKind, ReaderBuilder};
|
Extract core stuff into own crates
This commit extracts five new crates:
- nu-source, which contains the core source-code handling logic in Nu,
including Text, Span, and also the pretty.rs-based debug logic
- nu-parser, which is the parser and expander logic
- nu-protocol, which is the bulk of the types and basic conveniences
used by plugins
- nu-errors, which contains ShellError, ParseError and error handling
conveniences
- nu-textview, which is the textview plugin extracted into a crate
One of the major consequences of this refactor is that it's no longer
possible to `impl X for Spanned<Y>` outside of the `nu-source` crate, so
a lot of types became more concrete (Value became a concrete type
instead of Spanned<Value>, for example).
This also turned a number of inherent methods in the main nu crate into
plain functions (impl Value {} became a bunch of functions in the
`value` namespace in `crate::data::value`).
2019-11-26 02:30:48 +00:00
|
|
|
use nu_errors::ShellError;
|
2020-04-06 07:16:14 +00:00
|
|
|
use nu_protocol::{ReturnSuccess, TaggedDictBuilder, UntaggedValue, Value};
|
2019-11-11 11:54:58 +00:00
|
|
|
|
2020-04-06 07:16:14 +00:00
|
|
|
fn from_delimited_string_to_value(
|
|
|
|
s: String,
|
|
|
|
headerless: bool,
|
|
|
|
separator: char,
|
|
|
|
tag: impl Into<Tag>,
|
|
|
|
) -> Result<Value, csv::Error> {
|
|
|
|
let mut reader = ReaderBuilder::new()
|
|
|
|
.has_headers(!headerless)
|
|
|
|
.delimiter(separator as u8)
|
|
|
|
.from_reader(s.as_bytes());
|
|
|
|
let tag = tag.into();
|
|
|
|
|
|
|
|
let headers = if headerless {
|
|
|
|
(1..=reader.headers()?.len())
|
|
|
|
.map(|i| format!("Column{}", i))
|
|
|
|
.collect::<Vec<String>>()
|
|
|
|
} else {
|
|
|
|
reader.headers()?.iter().map(String::from).collect()
|
|
|
|
};
|
|
|
|
|
|
|
|
let mut rows = vec![];
|
|
|
|
for row in reader.records() {
|
|
|
|
let mut tagged_row = TaggedDictBuilder::new(&tag);
|
|
|
|
for (value, header) in row?.iter().zip(headers.iter()) {
|
|
|
|
if let Ok(i) = value.parse::<i64>() {
|
|
|
|
tagged_row.insert_value(header, UntaggedValue::int(i).into_value(&tag))
|
|
|
|
} else if let Ok(f) = value.parse::<f64>() {
|
|
|
|
tagged_row.insert_value(header, UntaggedValue::decimal(f).into_value(&tag))
|
|
|
|
} else {
|
|
|
|
tagged_row.insert_value(header, UntaggedValue::string(value).into_value(&tag))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
rows.push(tagged_row.into_value());
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(UntaggedValue::Table(rows).into_value(&tag))
|
|
|
|
}
|
2019-11-11 11:54:58 +00:00
|
|
|
|
2019-11-19 15:13:10 +00:00
|
|
|
pub fn from_delimited_data(
|
2019-11-11 11:54:58 +00:00
|
|
|
headerless: bool,
|
|
|
|
sep: char,
|
|
|
|
format_name: &'static str,
|
2020-05-16 03:18:24 +00:00
|
|
|
input: InputStream,
|
|
|
|
name: Tag,
|
2019-11-11 11:54:58 +00:00
|
|
|
) -> Result<OutputStream, ShellError> {
|
|
|
|
let name_tag = name;
|
|
|
|
|
|
|
|
let stream = async_stream! {
|
2020-03-06 16:06:39 +00:00
|
|
|
let concat_string = input.collect_string(name_tag.clone()).await?;
|
2019-11-11 11:54:58 +00:00
|
|
|
|
2020-03-06 16:06:39 +00:00
|
|
|
match from_delimited_string_to_value(concat_string.item, headerless, sep, name_tag.clone()) {
|
2020-04-06 07:16:14 +00:00
|
|
|
Ok(x) => match x {
|
|
|
|
Value { value: UntaggedValue::Table(list), .. } => {
|
|
|
|
for l in list {
|
|
|
|
yield ReturnSuccess::value(l);
|
2019-11-11 11:54:58 +00:00
|
|
|
}
|
|
|
|
}
|
2020-04-06 07:16:14 +00:00
|
|
|
x => yield ReturnSuccess::value(x),
|
2019-11-11 11:54:58 +00:00
|
|
|
},
|
2020-03-16 17:32:02 +00:00
|
|
|
Err(err) => {
|
2020-04-06 07:16:14 +00:00
|
|
|
let line_one = match pretty_csv_error(err) {
|
|
|
|
Some(pretty) => format!("Could not parse as {} ({})", format_name,pretty),
|
|
|
|
None => format!("Could not parse as {}", format_name),
|
|
|
|
};
|
2019-11-11 11:54:58 +00:00
|
|
|
let line_two = format!("input cannot be parsed as {}", format_name);
|
|
|
|
yield Err(ShellError::labeled_error_with_secondary(
|
|
|
|
line_one,
|
|
|
|
line_two,
|
|
|
|
name_tag.clone(),
|
|
|
|
"value originates from here",
|
2020-03-06 16:06:39 +00:00
|
|
|
concat_string.tag,
|
2019-11-11 11:54:58 +00:00
|
|
|
))
|
|
|
|
} ,
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
Ok(stream.to_output_stream())
|
|
|
|
}
|
2020-03-16 17:32:02 +00:00
|
|
|
|
2020-04-06 07:16:14 +00:00
|
|
|
fn pretty_csv_error(err: csv::Error) -> Option<String> {
|
|
|
|
match err.kind() {
|
|
|
|
ErrorKind::UnequalLengths {
|
|
|
|
pos,
|
|
|
|
expected_len,
|
|
|
|
len,
|
|
|
|
} => {
|
|
|
|
if let Some(pos) = pos {
|
|
|
|
Some(format!(
|
|
|
|
"Line {}: expected {} fields, found {}",
|
|
|
|
pos.line(),
|
|
|
|
expected_len,
|
|
|
|
len
|
|
|
|
))
|
2020-03-16 20:50:45 +00:00
|
|
|
} else {
|
2020-04-06 07:16:14 +00:00
|
|
|
Some(format!("Expected {} fields, found {}", expected_len, len))
|
|
|
|
}
|
2020-03-16 20:50:45 +00:00
|
|
|
}
|
2020-04-06 07:16:14 +00:00
|
|
|
ErrorKind::Seek => Some("Internal error while parsing csv".to_string()),
|
|
|
|
_ => None,
|
2020-03-16 20:50:45 +00:00
|
|
|
}
|
2020-03-16 17:32:02 +00:00
|
|
|
}
|