mirror of
https://github.com/nushell/nushell
synced 2024-12-26 13:03:07 +00:00
Apply more recent/nightly clippy lints (#7916)
# Description - Use inline format strings in dataframe code - Fix manual `.is_ascii_digit()` check - Remove unnecessary `.into_iter()` calls
This commit is contained in:
parent
7402589775
commit
1ea39abcff
20 changed files with 56 additions and 81 deletions
|
@ -123,7 +123,6 @@ pub fn version(
|
|||
// Get a list of command names and check for plugins
|
||||
let installed_plugins = engine_state
|
||||
.plugin_decls()
|
||||
.into_iter()
|
||||
.filter(|x| x.is_plugin().is_some())
|
||||
.map(|x| x.name())
|
||||
.collect::<Vec<_>>();
|
||||
|
|
|
@ -119,10 +119,7 @@ fn command(
|
|||
"ipc" | "arrow" => from_ipc(engine_state, stack, call),
|
||||
"json" => from_json(engine_state, stack, call),
|
||||
_ => Err(ShellError::FileNotFoundCustom(
|
||||
format!(
|
||||
"{}. Supported values: csv, tsv, parquet, ipc, arrow, json",
|
||||
msg
|
||||
),
|
||||
format!("{msg}. Supported values: csv, tsv, parquet, ipc, arrow, json"),
|
||||
blamed,
|
||||
)),
|
||||
},
|
||||
|
@ -154,7 +151,7 @@ fn from_parquet(
|
|||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Parquet reader error".into(),
|
||||
format!("{:?}", e),
|
||||
format!("{e:?}"),
|
||||
Some(call.head),
|
||||
None,
|
||||
Vec::new(),
|
||||
|
@ -188,7 +185,7 @@ fn from_parquet(
|
|||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Parquet reader error".into(),
|
||||
format!("{:?}", e),
|
||||
format!("{e:?}"),
|
||||
Some(call.head),
|
||||
None,
|
||||
Vec::new(),
|
||||
|
@ -219,7 +216,7 @@ fn from_ipc(
|
|||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"IPC reader error".into(),
|
||||
format!("{:?}", e),
|
||||
format!("{e:?}"),
|
||||
Some(call.head),
|
||||
None,
|
||||
Vec::new(),
|
||||
|
@ -253,7 +250,7 @@ fn from_ipc(
|
|||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"IPC reader error".into(),
|
||||
format!("{:?}", e),
|
||||
format!("{e:?}"),
|
||||
Some(call.head),
|
||||
None,
|
||||
Vec::new(),
|
||||
|
@ -289,7 +286,7 @@ fn from_json(
|
|||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Json reader error".into(),
|
||||
format!("{:?}", e),
|
||||
format!("{e:?}"),
|
||||
Some(call.head),
|
||||
None,
|
||||
Vec::new(),
|
||||
|
@ -353,7 +350,7 @@ fn from_csv(
|
|||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Parquet reader error".into(),
|
||||
format!("{:?}", e),
|
||||
format!("{e:?}"),
|
||||
Some(call.head),
|
||||
None,
|
||||
Vec::new(),
|
||||
|
@ -419,7 +416,7 @@ fn from_csv(
|
|||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Parquet reader error".into(),
|
||||
format!("{:?}", e),
|
||||
format!("{e:?}"),
|
||||
Some(call.head),
|
||||
None,
|
||||
Vec::new(),
|
||||
|
|
|
@ -77,12 +77,12 @@ impl SQLContext {
|
|||
Ok(match select_item {
|
||||
SelectItem::UnnamedExpr(expr) => {
|
||||
let expr = parse_sql_expr(expr)?;
|
||||
raw_projection_before_alias.insert(format!("{:?}", expr), i);
|
||||
raw_projection_before_alias.insert(format!("{expr:?}"), i);
|
||||
expr
|
||||
}
|
||||
SelectItem::ExprWithAlias { expr, alias } => {
|
||||
let expr = parse_sql_expr(expr)?;
|
||||
raw_projection_before_alias.insert(format!("{:?}", expr), i);
|
||||
raw_projection_before_alias.insert(format!("{expr:?}"), i);
|
||||
expr.alias(&alias.value)
|
||||
}
|
||||
SelectItem::QualifiedWildcard(_, _) | SelectItem::Wildcard(_) => {
|
||||
|
@ -133,7 +133,7 @@ impl SQLContext {
|
|||
// and its projections columns, keeping the original index
|
||||
let (exclude_expr, groupby_pos): (Vec<_>, Vec<_>) = group_by
|
||||
.iter()
|
||||
.map(|expr| raw_projection_before_alias.get(&format!("{:?}", expr)))
|
||||
.map(|expr| raw_projection_before_alias.get(&format!("{expr:?}")))
|
||||
.enumerate()
|
||||
.filter(|(_, proj_p)| proj_p.is_some())
|
||||
.map(|(gb_p, proj_p)| (*proj_p.unwrap_or(&0), (*proj_p.unwrap_or(&0), gb_p)))
|
||||
|
@ -173,7 +173,7 @@ impl SQLContext {
|
|||
|
||||
pub fn execute(&self, query: &str) -> Result<LazyFrame, PolarsError> {
|
||||
let ast = Parser::parse_sql(&self.dialect, query)
|
||||
.map_err(|e| PolarsError::ComputeError(format!("{:?}", e).into()))?;
|
||||
.map_err(|e| PolarsError::ComputeError(format!("{e:?}").into()))?;
|
||||
if ast.len() != 1 {
|
||||
Err(PolarsError::ComputeError(
|
||||
"One and only one statement at a time please".into(),
|
||||
|
@ -196,7 +196,7 @@ impl SQLContext {
|
|||
Some(SqlExpr::Value(SQLValue::Number(nrow, _))) => {
|
||||
let nrow = nrow.parse().map_err(|err| {
|
||||
PolarsError::ComputeError(
|
||||
format!("Conversion Error: {:?}", err).into(),
|
||||
format!("Conversion Error: {err:?}").into(),
|
||||
)
|
||||
})?;
|
||||
rs.limit(nrow)
|
||||
|
@ -211,7 +211,7 @@ impl SQLContext {
|
|||
}
|
||||
_ => {
|
||||
return Err(PolarsError::ComputeError(
|
||||
format!("Statement type {:?} is not supported", ast).into(),
|
||||
format!("Statement type {ast:?} is not supported").into(),
|
||||
))
|
||||
}
|
||||
})
|
||||
|
|
|
@ -41,11 +41,7 @@ fn map_sql_polars_datatype(data_type: &SQLDataType) -> Result<DataType> {
|
|||
},
|
||||
_ => {
|
||||
return Err(PolarsError::ComputeError(
|
||||
format!(
|
||||
"SQL Datatype {:?} was not supported in polars-sql yet!",
|
||||
data_type
|
||||
)
|
||||
.into(),
|
||||
format!("SQL Datatype {data_type:?} was not supported in polars-sql yet!").into(),
|
||||
))
|
||||
}
|
||||
})
|
||||
|
@ -75,7 +71,7 @@ fn binary_op_(left: Expr, right: Expr, op: &SQLBinaryOperator) -> Result<Expr> {
|
|||
SQLBinaryOperator::Xor => left.xor(right),
|
||||
_ => {
|
||||
return Err(PolarsError::ComputeError(
|
||||
format!("SQL Operator {:?} was not supported in polars-sql yet!", op).into(),
|
||||
format!("SQL Operator {op:?} was not supported in polars-sql yet!").into(),
|
||||
))
|
||||
}
|
||||
})
|
||||
|
@ -87,11 +83,11 @@ fn literal_expr(value: &SqlValue) -> Result<Expr> {
|
|||
// Check for existence of decimal separator dot
|
||||
if s.contains('.') {
|
||||
s.parse::<f64>().map(lit).map_err(|_| {
|
||||
PolarsError::ComputeError(format!("Can't parse literal {:?}", s).into())
|
||||
PolarsError::ComputeError(format!("Can't parse literal {s:?}").into())
|
||||
})
|
||||
} else {
|
||||
s.parse::<i64>().map(lit).map_err(|_| {
|
||||
PolarsError::ComputeError(format!("Can't parse literal {:?}", s).into())
|
||||
PolarsError::ComputeError(format!("Can't parse literal {s:?}").into())
|
||||
})
|
||||
}?
|
||||
}
|
||||
|
@ -103,11 +99,7 @@ fn literal_expr(value: &SqlValue) -> Result<Expr> {
|
|||
SqlValue::Null => Expr::Literal(LiteralValue::Null),
|
||||
_ => {
|
||||
return Err(PolarsError::ComputeError(
|
||||
format!(
|
||||
"Parsing SQL Value {:?} was not supported in polars-sql yet!",
|
||||
value
|
||||
)
|
||||
.into(),
|
||||
format!("Parsing SQL Value {value:?} was not supported in polars-sql yet!").into(),
|
||||
))
|
||||
}
|
||||
})
|
||||
|
@ -127,11 +119,7 @@ pub fn parse_sql_expr(expr: &SqlExpr) -> Result<Expr> {
|
|||
SqlExpr::Value(value) => literal_expr(value)?,
|
||||
_ => {
|
||||
return Err(PolarsError::ComputeError(
|
||||
format!(
|
||||
"Expression: {:?} was not supported in polars-sql yet!",
|
||||
expr
|
||||
)
|
||||
.into(),
|
||||
format!("Expression: {expr:?} was not supported in polars-sql yet!").into(),
|
||||
))
|
||||
}
|
||||
})
|
||||
|
@ -185,8 +173,7 @@ fn parse_sql_function(sql_function: &SQLFunction) -> Result<Expr> {
|
|||
_ => {
|
||||
return Err(PolarsError::ComputeError(
|
||||
format!(
|
||||
"Function {:?} with args {:?} was not supported in polars-sql yet!",
|
||||
function_name, args
|
||||
"Function {function_name:?} with args {args:?} was not supported in polars-sql yet!"
|
||||
)
|
||||
.into(),
|
||||
))
|
||||
|
|
|
@ -128,7 +128,7 @@ impl Command for LazyAggregate {
|
|||
if matches!(dtype, Some(DataType::Object(..))) {
|
||||
return Err(ShellError::GenericError(
|
||||
"Object type column not supported for aggregation".into(),
|
||||
format!("Column '{}' is type Object", name),
|
||||
format!("Column '{name}' is type Object"),
|
||||
Some(call.head),
|
||||
Some("Aggregations cannot be performed on Object type columns. Use dtype command to check column types".into()),
|
||||
Vec::new(),
|
||||
|
|
|
@ -83,7 +83,7 @@ pub fn test_dataframe(cmds: Vec<Box<dyn Command + 'static>>) {
|
|||
.into_value(Span::test_data());
|
||||
|
||||
println!("input: {}", example.example);
|
||||
println!("result: {:?}", result);
|
||||
println!("result: {result:?}");
|
||||
println!("done: {:?}", start.elapsed());
|
||||
|
||||
// Note. Value implements PartialEq for Bool, Int, Float, String and Block
|
||||
|
@ -92,8 +92,7 @@ pub fn test_dataframe(cmds: Vec<Box<dyn Command + 'static>>) {
|
|||
if let Some(expected) = example.result {
|
||||
if result != expected {
|
||||
panic!(
|
||||
"the example result is different to expected value: {:?} != {:?}",
|
||||
result, expected
|
||||
"the example result is different to expected value: {result:?} != {expected:?}"
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -420,7 +420,7 @@ pub fn create_column(
|
|||
"Error casting object from series".into(),
|
||||
"".to_string(),
|
||||
None,
|
||||
Some(format!("Object not supported for conversion: {}", x)),
|
||||
Some(format!("Object not supported for conversion: {x}")),
|
||||
Vec::new(),
|
||||
)),
|
||||
Some(ca) => {
|
||||
|
@ -464,7 +464,7 @@ pub fn create_column(
|
|||
error: ShellError::UnsupportedInput(
|
||||
"The given local datetime representation is invalid."
|
||||
.to_string(),
|
||||
format!("timestamp is {:?}", a),
|
||||
format!("timestamp is {a:?}"),
|
||||
span,
|
||||
Span::unknown(),
|
||||
),
|
||||
|
@ -479,7 +479,7 @@ pub fn create_column(
|
|||
error: ShellError::UnsupportedInput(
|
||||
"The given local datetime representation is invalid."
|
||||
.to_string(),
|
||||
format!("timestamp is {:?}", a),
|
||||
format!("timestamp is {a:?}"),
|
||||
span,
|
||||
Span::unknown(),
|
||||
),
|
||||
|
@ -529,7 +529,7 @@ pub fn create_column(
|
|||
error: ShellError::UnsupportedInput(
|
||||
"The given local datetime representation is invalid."
|
||||
.to_string(),
|
||||
format!("timestamp is {:?}", a),
|
||||
format!("timestamp is {a:?}"),
|
||||
span,
|
||||
Span::unknown(),
|
||||
),
|
||||
|
@ -544,7 +544,7 @@ pub fn create_column(
|
|||
error: ShellError::UnsupportedInput(
|
||||
"The given local datetime representation is invalid."
|
||||
.to_string(),
|
||||
format!("timestamp is {:?}", a),
|
||||
format!("timestamp is {a:?}"),
|
||||
span,
|
||||
Span::unknown(),
|
||||
),
|
||||
|
@ -594,7 +594,7 @@ pub fn create_column(
|
|||
"Error creating Dataframe".into(),
|
||||
"".to_string(),
|
||||
None,
|
||||
Some(format!("Value not supported in nushell: {}", e)),
|
||||
Some(format!("Value not supported in nushell: {e}")),
|
||||
Vec::new(),
|
||||
)),
|
||||
}
|
||||
|
|
|
@ -159,7 +159,7 @@ impl NuDataFrame {
|
|||
Value::CustomValue { .. } => return Self::try_from_value(value),
|
||||
Value::List { vals, .. } => {
|
||||
let cols = (0..vals.len())
|
||||
.map(|i| format!("{}", i))
|
||||
.map(|i| format!("{i}"))
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
conversion::insert_record(&mut column_values, &cols, &vals)?
|
||||
|
@ -181,7 +181,7 @@ impl NuDataFrame {
|
|||
let dataframe = DataFrame::new(columns).map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error creating dataframe".into(),
|
||||
format!("Unable to create DataFrame: {}", e),
|
||||
format!("Unable to create DataFrame: {e}"),
|
||||
Some(span),
|
||||
None,
|
||||
Vec::new(),
|
||||
|
@ -426,7 +426,6 @@ impl NuDataFrame {
|
|||
.collect::<Vec<(String, std::vec::IntoIter<Value>)>>();
|
||||
|
||||
let values = (0..size)
|
||||
.into_iter()
|
||||
.map(|i| {
|
||||
let mut cols = vec![];
|
||||
let mut vals = vec![];
|
||||
|
|
|
@ -187,7 +187,7 @@ impl NuDataFrame {
|
|||
Err(e) => Err({
|
||||
ShellError::GenericError(
|
||||
"Error appending dataframe".into(),
|
||||
format!("Unable to append: {}", e),
|
||||
format!("Unable to append: {e}"),
|
||||
Some(span),
|
||||
None,
|
||||
Vec::new(),
|
||||
|
@ -200,7 +200,7 @@ impl NuDataFrame {
|
|||
let df_new = DataFrame::new(new_cols).map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error appending dataframe".into(),
|
||||
format!("Unable to append dataframes: {}", e),
|
||||
format!("Unable to append dataframes: {e}"),
|
||||
Some(span),
|
||||
None,
|
||||
Vec::new(),
|
||||
|
|
|
@ -227,7 +227,7 @@ pub fn expr_to_value(expr: &Expr, span: Span) -> Value {
|
|||
span,
|
||||
};
|
||||
let value = Value::String {
|
||||
val: format!("{:?}", literal),
|
||||
val: format!("{literal:?}"),
|
||||
span,
|
||||
};
|
||||
|
||||
|
@ -239,7 +239,7 @@ pub fn expr_to_value(expr: &Expr, span: Span) -> Value {
|
|||
let right_val = expr_to_value(right, span);
|
||||
|
||||
let operator = Value::String {
|
||||
val: format!("{:?}", op),
|
||||
val: format!("{op:?}"),
|
||||
span,
|
||||
};
|
||||
|
||||
|
@ -291,7 +291,7 @@ pub fn expr_to_value(expr: &Expr, span: Span) -> Value {
|
|||
let expr = expr_to_value(expr.as_ref(), span);
|
||||
let quantile = expr_to_value(quantile.as_ref(), span);
|
||||
let interpol = Value::String {
|
||||
val: format!("{:?}", interpol),
|
||||
val: format!("{interpol:?}"),
|
||||
span,
|
||||
};
|
||||
|
||||
|
@ -373,7 +373,7 @@ pub fn expr_to_value(expr: &Expr, span: Span) -> Value {
|
|||
let vals = dtypes
|
||||
.iter()
|
||||
.map(|d| Value::String {
|
||||
val: format!("{}", d),
|
||||
val: format!("{d}"),
|
||||
span,
|
||||
})
|
||||
.collect();
|
||||
|
@ -383,7 +383,7 @@ pub fn expr_to_value(expr: &Expr, span: Span) -> Value {
|
|||
Expr::Sort { expr, options } => {
|
||||
let expr = expr_to_value(expr.as_ref(), span);
|
||||
let options = Value::String {
|
||||
val: format!("{:?}", options),
|
||||
val: format!("{options:?}"),
|
||||
span,
|
||||
};
|
||||
let cols = vec!["expr".into(), "options".into()];
|
||||
|
@ -401,7 +401,7 @@ pub fn expr_to_value(expr: &Expr, span: Span) -> Value {
|
|||
} => {
|
||||
let expr = expr_to_value(expr.as_ref(), span);
|
||||
let dtype = Value::String {
|
||||
val: format!("{:?}", data_type),
|
||||
val: format!("{data_type:?}"),
|
||||
span,
|
||||
};
|
||||
let strict = Value::Bool { val: *strict, span };
|
||||
|
@ -482,7 +482,7 @@ pub fn expr_to_value(expr: &Expr, span: Span) -> Value {
|
|||
let excluded = excluded
|
||||
.iter()
|
||||
.map(|e| Value::String {
|
||||
val: format!("{:?}", e),
|
||||
val: format!("{e:?}"),
|
||||
span,
|
||||
})
|
||||
.collect();
|
||||
|
@ -502,7 +502,7 @@ pub fn expr_to_value(expr: &Expr, span: Span) -> Value {
|
|||
Expr::RenameAlias { expr, function } => {
|
||||
let expr = expr_to_value(expr.as_ref(), span);
|
||||
let function = Value::String {
|
||||
val: format!("{:?}", function),
|
||||
val: format!("{function:?}"),
|
||||
span,
|
||||
};
|
||||
|
||||
|
@ -524,15 +524,15 @@ pub fn expr_to_value(expr: &Expr, span: Span) -> Value {
|
|||
let input = Value::List { vals: input, span };
|
||||
|
||||
let function = Value::String {
|
||||
val: format!("{:?}", function),
|
||||
val: format!("{function:?}"),
|
||||
span,
|
||||
};
|
||||
let output_type = Value::String {
|
||||
val: format!("{:?}", output_type),
|
||||
val: format!("{output_type:?}"),
|
||||
span,
|
||||
};
|
||||
let options = Value::String {
|
||||
val: format!("{:?}", options),
|
||||
val: format!("{options:?}"),
|
||||
span,
|
||||
};
|
||||
|
||||
|
@ -558,11 +558,11 @@ pub fn expr_to_value(expr: &Expr, span: Span) -> Value {
|
|||
let input = Value::List { vals: input, span };
|
||||
|
||||
let function = Value::String {
|
||||
val: format!("{:?}", function),
|
||||
val: format!("{function:?}"),
|
||||
span,
|
||||
};
|
||||
let options = Value::String {
|
||||
val: format!("{:?}", options),
|
||||
val: format!("{options:?}"),
|
||||
span,
|
||||
};
|
||||
|
||||
|
@ -597,7 +597,7 @@ pub fn expr_to_value(expr: &Expr, span: Span) -> Value {
|
|||
.unwrap_or_else(|| Value::nothing(span));
|
||||
|
||||
let options = Value::String {
|
||||
val: format!("{:?}", options),
|
||||
val: format!("{options:?}"),
|
||||
span,
|
||||
};
|
||||
|
||||
|
|
|
@ -46,7 +46,6 @@ impl Command for SubCommand {
|
|||
}];
|
||||
Value::Record { cols, vals, span }
|
||||
})
|
||||
.into_iter()
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()))
|
||||
}
|
||||
|
||||
|
|
|
@ -178,7 +178,6 @@ impl Command for Ls {
|
|||
let mut hidden_dirs = vec![];
|
||||
|
||||
Ok(paths_peek
|
||||
.into_iter()
|
||||
.filter_map(move |x| match x {
|
||||
Ok(path) => {
|
||||
let metadata = match std::fs::symlink_metadata(&path) {
|
||||
|
|
|
@ -101,7 +101,6 @@ only unwrap the outer list, and leave the variable's contents untouched."#
|
|||
Ok(input
|
||||
.into_iter()
|
||||
.chain(vec)
|
||||
.into_iter()
|
||||
.into_pipeline_data(engine_state.ctrlc.clone())
|
||||
.set_metadata(metadata))
|
||||
}
|
||||
|
|
|
@ -126,7 +126,6 @@ impl Command for Lines {
|
|||
stdout: Some(stream),
|
||||
..
|
||||
} => Ok(RawStreamLinesAdapter::new(stream, head, skip_empty)
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(move |(_idx, x)| match x {
|
||||
Ok(x) => x,
|
||||
|
|
|
@ -106,7 +106,6 @@ only unwrap the outer list, and leave the variable's contents untouched."#
|
|||
Ok(vec
|
||||
.into_iter()
|
||||
.chain(input.into_iter())
|
||||
.into_iter()
|
||||
.into_pipeline_data(engine_state.ctrlc.clone())
|
||||
.set_metadata(metadata))
|
||||
}
|
||||
|
|
|
@ -286,7 +286,6 @@ pub fn uniq(
|
|||
index,
|
||||
}))
|
||||
})
|
||||
.into_iter()
|
||||
.try_fold(
|
||||
HashMap::<String, ValueCounter>::new(),
|
||||
|mut counter, item| {
|
||||
|
|
|
@ -87,7 +87,7 @@ impl Command for NuCheck {
|
|||
stdout: Some(stream),
|
||||
..
|
||||
} => {
|
||||
let raw_stream: Vec<_> = stream.stream.into_iter().collect();
|
||||
let raw_stream: Vec<_> = stream.stream.collect();
|
||||
for r in raw_stream {
|
||||
match r {
|
||||
Ok(v) => contents.extend(v),
|
||||
|
|
|
@ -1007,7 +1007,7 @@ fn convert_to_table2<'a>(
|
|||
}
|
||||
|
||||
let mut last_index = 0;
|
||||
for (row, item) in input.clone().into_iter().enumerate() {
|
||||
for (row, item) in input.clone().enumerate() {
|
||||
if nu_utils::ctrl_c::was_pressed(&ctrlc) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
@ -1134,7 +1134,7 @@ fn convert_to_table2<'a>(
|
|||
NuTable::create_cell(header.clone(), header_style(style_computer, header.clone()));
|
||||
data[0].push(head_cell);
|
||||
|
||||
for (row, item) in input.clone().into_iter().enumerate() {
|
||||
for (row, item) in input.clone().enumerate() {
|
||||
if nu_utils::ctrl_c::was_pressed(&ctrlc) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
|
|
@ -339,7 +339,7 @@ fn convert_to_table2<'a>(
|
|||
));
|
||||
}
|
||||
|
||||
for (row, item) in input.clone().into_iter().enumerate() {
|
||||
for (row, item) in input.clone().enumerate() {
|
||||
if let Some(ctrlc) = &ctrlc {
|
||||
if ctrlc.load(Ordering::SeqCst) {
|
||||
return Ok(None);
|
||||
|
@ -436,7 +436,7 @@ fn convert_to_table2<'a>(
|
|||
header_style(style_computer, header.clone()),
|
||||
));
|
||||
|
||||
for (row, item) in input.clone().into_iter().enumerate() {
|
||||
for (row, item) in input.clone().enumerate() {
|
||||
if let Some(ctrlc) = &ctrlc {
|
||||
if ctrlc.load(Ordering::SeqCst) {
|
||||
return Ok(None);
|
||||
|
@ -476,7 +476,7 @@ fn convert_to_table2<'a>(
|
|||
|
||||
column_width = string_width(&header);
|
||||
|
||||
for (row, item) in input.clone().into_iter().enumerate() {
|
||||
for (row, item) in input.clone().enumerate() {
|
||||
if let Some(ctrlc) = &ctrlc {
|
||||
if ctrlc.load(Ordering::SeqCst) {
|
||||
return Ok(None);
|
||||
|
@ -503,7 +503,7 @@ fn convert_to_table2<'a>(
|
|||
|
||||
column_width = string_width(&header);
|
||||
|
||||
for (row, item) in input.clone().into_iter().enumerate() {
|
||||
for (row, item) in input.clone().enumerate() {
|
||||
if let Some(ctrlc) = &ctrlc {
|
||||
if ctrlc.load(Ordering::SeqCst) {
|
||||
return Ok(None);
|
||||
|
|
|
@ -268,7 +268,7 @@ where
|
|||
}
|
||||
}
|
||||
_ => {
|
||||
if chf == b'-' || (b'0'..=b'9').contains(&chf) {
|
||||
if chf == b'-' || chf.is_ascii_digit() {
|
||||
let mut pn = ParseNumber::new(self.str_buf.iter().copied());
|
||||
match pn.parse(false) {
|
||||
Ok(Number::F64(v)) => {
|
||||
|
|
Loading…
Reference in a new issue