Dataframe feature (#361)

* custom value trait

* functions for custom value trait

* custom trait behind flag

* open dataframe command

* command to-df for basic types

* follow path for dataframe

* dataframe operations

* dataframe not default feature

* custom as default feature

* corrected examples in command
This commit is contained in:
Fernando Herrera 2021-11-23 08:14:40 +00:00 committed by GitHub
parent a2aaeb38ed
commit 76019f434e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
27 changed files with 3375 additions and 53 deletions

1000
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -13,6 +13,7 @@ members = [
"crates/nu-command",
"crates/nu-protocol",
"crates/nu-plugin",
"crates/nu-dataframe",
"crates/nu_plugin_inc",
]
@ -37,7 +38,9 @@ ctrlc = "3.2.1"
[features]
plugin = ["nu-plugin", "nu-parser/plugin", "nu-command/plugin", "nu-protocol/plugin"]
default = ["plugin"]
custom = ["nu-command/custom", "nu-protocol/custom"]
dataframe = ["custom", "nu-command/dataframe"]
default = ["plugin", "custom"]
[dev-dependencies]
tempfile = "3.2.0"

View file

@ -13,8 +13,8 @@ nu-protocol = { path = "../nu-protocol" }
nu-table = { path = "../nu-table" }
nu-term-grid = { path = "../nu-term-grid" }
nu-parser = { path = "../nu-parser" }
nu-dataframe = { path = "../nu-dataframe", optional = true }
nu-ansi-term = { path = "../nu-ansi-term" }
trash = { version = "1.3.0", optional = true }
unicode-segmentation = "1.8.0"
@ -45,6 +45,13 @@ itertools = "0.10.0"
calamine = "0.18.0"
rand = "0.8"
[dependencies.polars]
version = "0.17.0"
optional = true
features = ["default", "parquet", "json"]
[features]
trash-support = ["trash"]
plugin = ["nu-parser/plugin"]
custom = ["nu-protocol/custom"]
dataframe = ["custom", "nu-dataframe", "polars"]

View file

@ -0,0 +1,5 @@
mod open;
mod to_df;
pub use open::OpenDataFrame;
pub use to_df::ToDataFrame;

View file

@ -0,0 +1,195 @@
use std::{fs::File, path::PathBuf};
use nu_dataframe::NuDataFrame;
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape,
};
use polars::prelude::{CsvEncoding, CsvReader, JsonReader, ParquetReader, SerReader};
#[derive(Clone)]
pub struct OpenDataFrame;
impl Command for OpenDataFrame {
fn name(&self) -> &str {
"open-df"
}
fn usage(&self) -> &str {
"Opens csv, json or parquet file to create dataframe"
}
fn signature(&self) -> Signature {
Signature::build("open-df")
.required(
"file",
SyntaxShape::Filepath,
"file path to load values from",
)
.named(
"delimiter",
SyntaxShape::String,
"file delimiter character. CSV file",
Some('d'),
)
.switch(
"no-header",
"Indicates if file doesn't have header. CSV file",
None,
)
.named(
"infer-schema",
SyntaxShape::Number,
"Number of rows to infer the schema of the file. CSV file",
None,
)
.named(
"skip-rows",
SyntaxShape::Number,
"Number of rows to skip from file. CSV file",
None,
)
.named(
"columns",
SyntaxShape::List(Box::new(SyntaxShape::String)),
"Columns to be selected from csv file. CSV file",
None,
)
.category(Category::Custom("dataframe".into()))
}
fn examples(&self) -> Vec<Example> {
vec![Example {
description: "Takes a file name and creates a dataframe",
example: "dataframe open test.csv",
result: None,
}]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
_input: PipelineData,
) -> Result<PipelineData, ShellError> {
command(engine_state, stack, call)
}
}
fn command(
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
) -> Result<PipelineData, ShellError> {
let span = call.head;
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
let df = match file.item.extension() {
Some(e) => match e.to_str() {
Some("csv") => from_csv(engine_state, stack, call),
Some("parquet") => from_parquet(engine_state, stack, call),
Some("json") => from_json(engine_state, stack, call),
_ => Err(ShellError::FileNotFoundCustom(
"Not a csv, parquet or json file".into(),
file.span,
)),
},
None => Err(ShellError::FileNotFoundCustom(
"File without extension".into(),
file.span,
)),
}?;
Ok(PipelineData::Value(NuDataFrame::dataframe_into_value(
df, span,
)))
}
fn from_parquet(
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
) -> Result<polars::prelude::DataFrame, ShellError> {
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
let r = File::open(&file.item).map_err(|e| ShellError::InternalError(e.to_string()))?;
let reader = ParquetReader::new(r);
reader
.finish()
.map_err(|e| ShellError::InternalError(format!("{:?}", e)))
}
fn from_json(
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
) -> Result<polars::prelude::DataFrame, ShellError> {
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
let r = File::open(&file.item).map_err(|e| ShellError::InternalError(e.to_string()))?;
let reader = JsonReader::new(r);
reader
.finish()
.map_err(|e| ShellError::InternalError(e.to_string()))
}
fn from_csv(
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
) -> Result<polars::prelude::DataFrame, ShellError> {
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
let delimiter: Option<Spanned<String>> = call.get_flag(engine_state, stack, "delimiter")?;
let no_header: bool = call.has_flag("no_header");
let infer_schema: Option<usize> = call.get_flag(engine_state, stack, "infer_schema")?;
let skip_rows: Option<usize> = call.get_flag(engine_state, stack, "skip_rows")?;
let columns: Option<Vec<String>> = call.get_flag(engine_state, stack, "columns")?;
let csv_reader = CsvReader::from_path(&file.item)
.map_err(|e| ShellError::InternalError(e.to_string()))?
.with_encoding(CsvEncoding::LossyUtf8);
let csv_reader = match delimiter {
None => csv_reader,
Some(d) => {
if d.item.len() != 1 {
return Err(ShellError::InternalError(
"Delimiter has to be one char".into(),
));
} else {
let delimiter = match d.item.chars().next() {
Some(d) => d as u8,
None => unreachable!(),
};
csv_reader.with_delimiter(delimiter)
}
}
};
let csv_reader = csv_reader.has_header(!no_header);
let csv_reader = match infer_schema {
None => csv_reader,
Some(r) => csv_reader.infer_schema(Some(r)),
};
let csv_reader = match skip_rows {
None => csv_reader,
Some(r) => csv_reader.with_skip_rows(r),
};
let csv_reader = match columns {
None => csv_reader,
Some(columns) => csv_reader.with_columns(Some(columns)),
};
csv_reader
.finish()
.map_err(|e| ShellError::InternalError(e.to_string()))
}

View file

@ -0,0 +1,59 @@
use nu_dataframe::NuDataFrame;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature,
};
#[derive(Clone)]
pub struct ToDataFrame;
impl Command for ToDataFrame {
fn name(&self) -> &str {
"to-df"
}
fn usage(&self) -> &str {
"Converts a List, Table or Dictionary into a dataframe"
}
fn signature(&self) -> Signature {
Signature::build("to-df").category(Category::Custom("dataframe".into()))
}
fn examples(&self) -> Vec<Example> {
vec![
Example {
description: "Takes a dictionary and creates a dataframe",
example: "[[a b];[1 2] [3 4]] | to-df",
result: None,
},
Example {
description: "Takes a list of tables and creates a dataframe",
example: "[[1 2 a] [3 4 b] [5 6 c]] | to-df",
result: None,
},
Example {
description: "Takes a list and creates a dataframe",
example: "[a b c] | to-df",
result: None,
},
Example {
description: "Takes a list of booleans and creates a dataframe",
example: "[$true $true $false] | to-df",
result: None,
},
]
}
fn run(
&self,
_engine_state: &EngineState,
_stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let df = NuDataFrame::try_from_iter(input.into_iter())?;
Ok(PipelineData::Value(NuDataFrame::to_value(df, call.head)))
}
}

View file

@ -143,6 +143,9 @@ pub fn create_default_context() -> EngineState {
#[cfg(feature = "plugin")]
bind_command!(Register);
#[cfg(feature = "dataframe")]
bind_command!(OpenDataFrame, ToDataFrame);
// This is a WIP proof of concept
bind_command!(ListGitBranches, Git, GitCheckout, Source);

View file

@ -70,8 +70,6 @@ pub fn value_to_json_value(v: &Value) -> Result<nu_json::Value, ShellError> {
Value::List { vals, .. } => nu_json::Value::Array(json_list(vals)?),
Value::Error { error } => return Err(error.clone()),
Value::Block { .. } | Value::Range { .. } => nu_json::Value::Null,
#[cfg(feature = "dataframe")]
UntaggedValue::DataFrame(_) | UntaggedValue::FrameStruct(_) => serde_json::Value::Null,
Value::Binary { val, .. } => {
nu_json::Value::Array(val.iter().map(|x| nu_json::Value::U64(*x as u64)).collect())
}
@ -82,6 +80,8 @@ pub fn value_to_json_value(v: &Value) -> Result<nu_json::Value, ShellError> {
}
nu_json::Value::Object(m)
}
#[cfg(feature = "custom")]
Value::CustomValue { val, .. } => val.to_json(),
})
}

View file

@ -13,6 +13,9 @@ mod strings;
mod system;
mod viewers;
#[cfg(feature = "dataframe")]
mod dataframe;
pub use conversions::*;
pub use core_commands::*;
pub use date::*;
@ -27,3 +30,6 @@ pub use math::*;
pub use strings::*;
pub use system::*;
pub use viewers::*;
#[cfg(feature = "dataframe")]
pub use dataframe::*;

View file

@ -107,6 +107,11 @@ impl Command for Table {
.into_pipeline_data())
}
PipelineData::Value(Value::Error { error }) => Err(error),
#[cfg(feature = "custom")]
PipelineData::Value(Value::CustomValue { val, span }) => {
let base_pipeline = val.to_base_value(span)?.into_pipeline_data();
self.run(engine_state, stack, call, base_pipeline)
}
x => Ok(x),
}
}

View file

@ -0,0 +1,14 @@
[package]
name = "nu-dataframe"
version = "0.1.0"
edition = "2018"
[dependencies]
chrono = { version="0.4.19", features=["serde"] }
serde = {version = "1.0.130", features = ["derive"]}
num = "0.4.0"
nu-protocol = { path = "../nu-protocol", features = ["custom"] }
nu-json = { path = "../nu-json"}
indexmap = { version="1.7.0", features=["serde-1"] }
polars = { version = "0.17.0", features = ["default", "serde", "object", "checked_arithmetic", "strings"] }

View file

@ -0,0 +1,3 @@
# nu-dataframe
The nu-dataframe crate holds the definitions of the dataframe structure

View file

@ -0,0 +1,568 @@
use super::{operations::Axis, NuDataFrame};
use nu_protocol::{ast::Operator, ShellError, Span, Spanned, Value};
use num::Zero;
use polars::prelude::{
BooleanType, ChunkCompare, ChunkedArray, DataType, Float64Type, Int64Type, IntoSeries,
NumOpsDispatchChecked, PolarsError, Series,
};
use std::ops::{Add, BitAnd, BitOr, Div, Mul, Sub};
pub fn between_dataframes(
operator: Spanned<Operator>,
left: Value,
lhs: &NuDataFrame,
right: &Value,
rhs: &NuDataFrame,
operation_span: Span,
) -> Result<Value, ShellError> {
match operator.item {
Operator::Plus => match lhs.append_df(rhs, Axis::Row, operation_span) {
Ok(df) => Ok(df.to_value(operation_span)),
Err(e) => Err(e),
},
_ => Err(ShellError::OperatorMismatch {
op_span: operator.span,
lhs_ty: left.get_type(),
lhs_span: left.span()?,
rhs_ty: right.get_type(),
rhs_span: right.span()?,
}),
}
}
pub fn compute_between_series(
operator: Spanned<Operator>,
left: Value,
lhs: &Series,
right: &Value,
rhs: &Series,
operation_span: Span,
) -> Result<Value, ShellError> {
match operator.item {
Operator::Plus => {
let mut res = lhs + rhs;
let name = format!("sum_{}_{}", lhs.name(), rhs.name());
res.rename(&name);
NuDataFrame::series_to_value(res, operation_span)
}
Operator::Minus => {
let mut res = lhs - rhs;
let name = format!("sub_{}_{}", lhs.name(), rhs.name());
res.rename(&name);
NuDataFrame::series_to_value(res, operation_span)
}
Operator::Multiply => {
let mut res = lhs * rhs;
let name = format!("mul_{}_{}", lhs.name(), rhs.name());
res.rename(&name);
NuDataFrame::series_to_value(res, operation_span)
}
Operator::Divide => {
let res = lhs.checked_div(rhs);
match res {
Ok(mut res) => {
let name = format!("div_{}_{}", lhs.name(), rhs.name());
res.rename(&name);
NuDataFrame::series_to_value(res, operation_span)
}
Err(e) => Err(ShellError::InternalError(e.to_string())),
}
}
Operator::Equal => {
let mut res = Series::eq(lhs, rhs).into_series();
let name = format!("eq_{}_{}", lhs.name(), rhs.name());
res.rename(&name);
NuDataFrame::series_to_value(res, operation_span)
}
Operator::NotEqual => {
let mut res = Series::neq(lhs, rhs).into_series();
let name = format!("neq_{}_{}", lhs.name(), rhs.name());
res.rename(&name);
NuDataFrame::series_to_value(res, operation_span)
}
Operator::LessThan => {
let mut res = Series::lt(lhs, rhs).into_series();
let name = format!("lt_{}_{}", lhs.name(), rhs.name());
res.rename(&name);
NuDataFrame::series_to_value(res, operation_span)
}
Operator::LessThanOrEqual => {
let mut res = Series::lt_eq(lhs, rhs).into_series();
let name = format!("lte_{}_{}", lhs.name(), rhs.name());
res.rename(&name);
NuDataFrame::series_to_value(res, operation_span)
}
Operator::GreaterThan => {
let mut res = Series::gt(lhs, rhs).into_series();
let name = format!("gt_{}_{}", lhs.name(), rhs.name());
res.rename(&name);
NuDataFrame::series_to_value(res, operation_span)
}
Operator::GreaterThanOrEqual => {
let mut res = Series::gt_eq(lhs, rhs).into_series();
let name = format!("gte_{}_{}", lhs.name(), rhs.name());
res.rename(&name);
NuDataFrame::series_to_value(res, operation_span)
}
Operator::And => match lhs.dtype() {
DataType::Boolean => {
let lhs_cast = lhs.bool();
let rhs_cast = rhs.bool();
match (lhs_cast, rhs_cast) {
(Ok(l), Ok(r)) => {
let mut res = l.bitand(r).into_series();
let name = format!("and_{}_{}", lhs.name(), rhs.name());
res.rename(&name);
NuDataFrame::series_to_value(res, operation_span)
}
_ => Err(ShellError::InternalError(
"unable to cast to boolean".into(),
)),
}
}
_ => Err(ShellError::IncompatibleParametersSingle(
format!(
"Operation {} can only be done with boolean values",
operator.item
),
operation_span,
)),
},
Operator::Or => match lhs.dtype() {
DataType::Boolean => {
let lhs_cast = lhs.bool();
let rhs_cast = rhs.bool();
match (lhs_cast, rhs_cast) {
(Ok(l), Ok(r)) => {
let mut res = l.bitor(r).into_series();
let name = format!("or_{}_{}", lhs.name(), rhs.name());
res.rename(&name);
NuDataFrame::series_to_value(res, operation_span)
}
_ => Err(ShellError::InternalError(
"unable to cast to boolean".into(),
)),
}
}
_ => Err(ShellError::IncompatibleParametersSingle(
format!(
"Operation {} can only be done with boolean values",
operator.item
),
operation_span,
)),
},
_ => Err(ShellError::OperatorMismatch {
op_span: operator.span,
lhs_ty: left.get_type(),
lhs_span: left.span()?,
rhs_ty: right.get_type(),
rhs_span: right.span()?,
}),
}
}
pub fn compute_series_single_value(
operator: Spanned<Operator>,
lhs: &NuDataFrame,
lhs_span: &Span,
left: Value,
right: &Value,
) -> Result<Value, ShellError> {
if !lhs.is_series() {
return Err(ShellError::OperatorMismatch {
op_span: operator.span,
lhs_ty: left.get_type(),
lhs_span: left.span()?,
rhs_ty: right.get_type(),
rhs_span: right.span()?,
});
}
let lhs = lhs.as_series(*lhs_span)?;
match operator.item {
Operator::Plus => match &right {
Value::Int { val, .. } => {
compute_series_i64(&lhs, *val, <ChunkedArray<Int64Type>>::add, *lhs_span)
}
Value::Float { val, .. } => {
compute_series_decimal(&lhs, *val, <ChunkedArray<Float64Type>>::add, *lhs_span)
}
_ => Err(ShellError::OperatorMismatch {
op_span: operator.span,
lhs_ty: left.get_type(),
lhs_span: left.span()?,
rhs_ty: right.get_type(),
rhs_span: right.span()?,
}),
},
Operator::Minus => match &right {
Value::Int { val, .. } => {
compute_series_i64(&lhs, *val, <ChunkedArray<Int64Type>>::sub, *lhs_span)
}
Value::Float { val, .. } => {
compute_series_decimal(&lhs, *val, <ChunkedArray<Float64Type>>::sub, *lhs_span)
}
_ => Err(ShellError::OperatorMismatch {
op_span: operator.span,
lhs_ty: left.get_type(),
lhs_span: left.span()?,
rhs_ty: right.get_type(),
rhs_span: right.span()?,
}),
},
Operator::Multiply => match &right {
Value::Int { val, .. } => {
compute_series_i64(&lhs, *val, <ChunkedArray<Int64Type>>::mul, *lhs_span)
}
Value::Float { val, .. } => {
compute_series_decimal(&lhs, *val, <ChunkedArray<Float64Type>>::mul, *lhs_span)
}
_ => Err(ShellError::OperatorMismatch {
op_span: operator.span,
lhs_ty: left.get_type(),
lhs_span: left.span()?,
rhs_ty: right.get_type(),
rhs_span: right.span()?,
}),
},
Operator::Divide => match &right {
Value::Int { val, span } => {
if *val == 0 {
Err(ShellError::DivisionByZero(*span))
} else {
compute_series_i64(&lhs, *val, <ChunkedArray<Int64Type>>::div, *lhs_span)
}
}
Value::Float { val, span } => {
if val.is_zero() {
Err(ShellError::DivisionByZero(*span))
} else {
compute_series_decimal(&lhs, *val, <ChunkedArray<Float64Type>>::div, *lhs_span)
}
}
_ => Err(ShellError::OperatorMismatch {
op_span: operator.span,
lhs_ty: left.get_type(),
lhs_span: left.span()?,
rhs_ty: right.get_type(),
rhs_span: right.span()?,
}),
},
Operator::Equal => match &right {
Value::Int { val, .. } => compare_series_i64(&lhs, *val, ChunkedArray::eq, *lhs_span),
Value::Float { val, .. } => {
compare_series_decimal(&lhs, *val, ChunkedArray::eq, *lhs_span)
}
_ => Err(ShellError::OperatorMismatch {
op_span: operator.span,
lhs_ty: left.get_type(),
lhs_span: left.span()?,
rhs_ty: right.get_type(),
rhs_span: right.span()?,
}),
},
Operator::NotEqual => match &right {
Value::Int { val, .. } => compare_series_i64(&lhs, *val, ChunkedArray::neq, *lhs_span),
Value::Float { val, .. } => {
compare_series_decimal(&lhs, *val, ChunkedArray::neq, *lhs_span)
}
_ => Err(ShellError::OperatorMismatch {
op_span: operator.span,
lhs_ty: left.get_type(),
lhs_span: left.span()?,
rhs_ty: right.get_type(),
rhs_span: right.span()?,
}),
},
Operator::LessThan => match &right {
Value::Int { val, .. } => compare_series_i64(&lhs, *val, ChunkedArray::lt, *lhs_span),
Value::Float { val, .. } => {
compare_series_decimal(&lhs, *val, ChunkedArray::lt, *lhs_span)
}
_ => Err(ShellError::OperatorMismatch {
op_span: operator.span,
lhs_ty: left.get_type(),
lhs_span: left.span()?,
rhs_ty: right.get_type(),
rhs_span: right.span()?,
}),
},
Operator::LessThanOrEqual => match &right {
Value::Int { val, .. } => {
compare_series_i64(&lhs, *val, ChunkedArray::lt_eq, *lhs_span)
}
Value::Float { val, .. } => {
compare_series_decimal(&lhs, *val, ChunkedArray::lt_eq, *lhs_span)
}
_ => Err(ShellError::OperatorMismatch {
op_span: operator.span,
lhs_ty: left.get_type(),
lhs_span: left.span()?,
rhs_ty: right.get_type(),
rhs_span: right.span()?,
}),
},
Operator::GreaterThan => match &right {
Value::Int { val, .. } => compare_series_i64(&lhs, *val, ChunkedArray::gt, *lhs_span),
Value::Float { val, .. } => {
compare_series_decimal(&lhs, *val, ChunkedArray::gt, *lhs_span)
}
_ => Err(ShellError::OperatorMismatch {
op_span: operator.span,
lhs_ty: left.get_type(),
lhs_span: left.span()?,
rhs_ty: right.get_type(),
rhs_span: right.span()?,
}),
},
Operator::GreaterThanOrEqual => match &right {
Value::Int { val, .. } => {
compare_series_i64(&lhs, *val, ChunkedArray::gt_eq, *lhs_span)
}
Value::Float { val, .. } => {
compare_series_decimal(&lhs, *val, ChunkedArray::gt_eq, *lhs_span)
}
_ => Err(ShellError::OperatorMismatch {
op_span: operator.span,
lhs_ty: left.get_type(),
lhs_span: left.span()?,
rhs_ty: right.get_type(),
rhs_span: right.span()?,
}),
},
Operator::Contains => match &right {
Value::String { val, .. } => contains_series_pat(&lhs, val, *lhs_span),
_ => Err(ShellError::OperatorMismatch {
op_span: operator.span,
lhs_ty: left.get_type(),
lhs_span: left.span()?,
rhs_ty: right.get_type(),
rhs_span: right.span()?,
}),
},
_ => Err(ShellError::OperatorMismatch {
op_span: operator.span,
lhs_ty: left.get_type(),
lhs_span: left.span()?,
rhs_ty: right.get_type(),
rhs_span: right.span()?,
}),
}
}
fn compute_series_i64<F>(series: &Series, val: i64, f: F, span: Span) -> Result<Value, ShellError>
where
F: Fn(ChunkedArray<Int64Type>, i64) -> ChunkedArray<Int64Type>,
{
match series.dtype() {
DataType::UInt32 | DataType::Int32 | DataType::UInt64 => {
let to_i64 = series.cast(&DataType::Int64);
match to_i64 {
Ok(series) => {
let casted = series.i64();
compute_casted_i64(casted, val, f, span)
}
Err(e) => Err(ShellError::InternalError(e.to_string())),
}
}
DataType::Int64 => {
let casted = series.i64();
compute_casted_i64(casted, val, f, span)
}
_ => Err(ShellError::InternalError(format!(
"Series of type {} can not be used for operations with an i64 value",
series.dtype()
))),
}
}
fn compute_casted_i64<F>(
casted: Result<&ChunkedArray<Int64Type>, PolarsError>,
val: i64,
f: F,
span: Span,
) -> Result<Value, ShellError>
where
F: Fn(ChunkedArray<Int64Type>, i64) -> ChunkedArray<Int64Type>,
{
match casted {
Ok(casted) => {
let res = f(casted.clone(), val);
let res = res.into_series();
NuDataFrame::series_to_value(res, span)
}
Err(e) => Err(ShellError::InternalError(e.to_string())),
}
}
fn compute_series_decimal<F>(
series: &Series,
val: f64,
f: F,
span: Span,
) -> Result<Value, ShellError>
where
F: Fn(ChunkedArray<Float64Type>, f64) -> ChunkedArray<Float64Type>,
{
match series.dtype() {
DataType::Float32 => {
let to_f64 = series.cast(&DataType::Float64);
match to_f64 {
Ok(series) => {
let casted = series.f64();
compute_casted_f64(casted, val, f, span)
}
Err(e) => Err(ShellError::InternalError(e.to_string())),
}
}
DataType::Float64 => {
let casted = series.f64();
compute_casted_f64(casted, val, f, span)
}
_ => Err(ShellError::InternalError(format!(
"Series of type {} can not be used for operations with a decimal value",
series.dtype()
))),
}
}
fn compute_casted_f64<F>(
casted: Result<&ChunkedArray<Float64Type>, PolarsError>,
val: f64,
f: F,
span: Span,
) -> Result<Value, ShellError>
where
F: Fn(ChunkedArray<Float64Type>, f64) -> ChunkedArray<Float64Type>,
{
match casted {
Ok(casted) => {
let res = f(casted.clone(), val);
let res = res.into_series();
NuDataFrame::series_to_value(res, span)
}
Err(e) => Err(ShellError::InternalError(e.to_string())),
}
}
fn compare_series_i64<F>(series: &Series, val: i64, f: F, span: Span) -> Result<Value, ShellError>
where
F: Fn(&ChunkedArray<Int64Type>, i64) -> ChunkedArray<BooleanType>,
{
match series.dtype() {
DataType::UInt32 | DataType::Int32 | DataType::UInt64 => {
let to_i64 = series.cast(&DataType::Int64);
match to_i64 {
Ok(series) => {
let casted = series.i64();
compare_casted_i64(casted, val, f, span)
}
Err(e) => Err(ShellError::InternalError(e.to_string())),
}
}
DataType::Int64 => {
let casted = series.i64();
compare_casted_i64(casted, val, f, span)
}
_ => Err(ShellError::InternalError(format!(
"Series of type {} can not be used for operations with an i64 value",
series.dtype()
))),
}
}
fn compare_casted_i64<F>(
casted: Result<&ChunkedArray<Int64Type>, PolarsError>,
val: i64,
f: F,
span: Span,
) -> Result<Value, ShellError>
where
F: Fn(&ChunkedArray<Int64Type>, i64) -> ChunkedArray<BooleanType>,
{
match casted {
Ok(casted) => {
let res = f(casted, val);
let res = res.into_series();
NuDataFrame::series_to_value(res, span)
}
Err(e) => Err(ShellError::InternalError(e.to_string())),
}
}
fn compare_series_decimal<F>(
series: &Series,
val: f64,
f: F,
span: Span,
) -> Result<Value, ShellError>
where
F: Fn(&ChunkedArray<Float64Type>, f64) -> ChunkedArray<BooleanType>,
{
match series.dtype() {
DataType::Float32 => {
let to_f64 = series.cast(&DataType::Float64);
match to_f64 {
Ok(series) => {
let casted = series.f64();
compare_casted_f64(casted, val, f, span)
}
Err(e) => Err(ShellError::InternalError(e.to_string())),
}
}
DataType::Float64 => {
let casted = series.f64();
compare_casted_f64(casted, val, f, span)
}
_ => Err(ShellError::InternalError(format!(
"Series of type {} can not be used for operations with a decimal value",
series.dtype()
))),
}
}
fn compare_casted_f64<F>(
casted: Result<&ChunkedArray<Float64Type>, PolarsError>,
val: f64,
f: F,
span: Span,
) -> Result<Value, ShellError>
where
F: Fn(&ChunkedArray<Float64Type>, f64) -> ChunkedArray<BooleanType>,
{
match casted {
Ok(casted) => {
let res = f(casted, val);
let res = res.into_series();
NuDataFrame::series_to_value(res, span)
}
Err(e) => Err(ShellError::InternalError(e.to_string())),
}
}
fn contains_series_pat(series: &Series, pat: &str, span: Span) -> Result<Value, ShellError> {
let casted = series.utf8();
match casted {
Ok(casted) => {
let res = casted.contains(pat);
match res {
Ok(res) => {
let res = res.into_series();
NuDataFrame::series_to_value(res, span)
}
Err(e) => Err(ShellError::InternalError(e.to_string())),
}
}
Err(e) => Err(ShellError::InternalError(e.to_string())),
}
}

View file

@ -0,0 +1,537 @@
use super::NuDataFrame;
use crate::DataFrameValue;
use chrono::{DateTime, FixedOffset, NaiveDateTime};
use indexmap::map::{Entry, IndexMap};
use nu_protocol::{ShellError, Span, Value};
use polars::chunked_array::object::builder::ObjectChunkedBuilder;
use polars::chunked_array::ChunkedArray;
use polars::prelude::{
DataFrame, DataType, DatetimeChunked, Int64Type, IntoSeries, NamedFrom, NewChunkedArray,
ObjectType, PolarsNumericType, Series,
};
use std::ops::{Deref, DerefMut};
const SECS_PER_DAY: i64 = 86_400;
#[derive(Debug)]
pub struct Column {
name: String,
values: Vec<Value>,
}
impl Column {
pub fn new(name: String, values: Vec<Value>) -> Self {
Self { name, values }
}
pub fn new_empty(name: String) -> Self {
Self {
name,
values: Vec::new(),
}
}
pub fn name(&self) -> &str {
self.name.as_str()
}
pub fn iter(&self) -> impl Iterator<Item = &Value> {
self.values.iter()
}
}
impl IntoIterator for Column {
type Item = Value;
type IntoIter = std::vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
self.values.into_iter()
}
}
impl Deref for Column {
type Target = Vec<Value>;
fn deref(&self) -> &Self::Target {
&self.values
}
}
impl DerefMut for Column {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.values
}
}
#[derive(Debug)]
pub enum InputType {
Integer,
Float,
String,
Boolean,
Object,
Date,
Duration,
}
#[derive(Debug)]
pub struct TypedColumn {
column: Column,
column_type: Option<InputType>,
}
impl TypedColumn {
fn new_empty(name: String) -> Self {
Self {
column: Column::new_empty(name),
column_type: None,
}
}
}
impl Deref for TypedColumn {
type Target = Column;
fn deref(&self) -> &Self::Target {
&self.column
}
}
impl DerefMut for TypedColumn {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.column
}
}
pub type ColumnMap = IndexMap<String, TypedColumn>;
pub fn create_column(
series: &Series,
from_row: usize,
to_row: usize,
) -> Result<Column, ShellError> {
let size = to_row - from_row;
match series.dtype() {
DataType::Null => {
let values = std::iter::repeat(Value::Nothing {
span: Span::unknown(),
})
.take(size)
.collect::<Vec<Value>>();
Ok(Column::new(series.name().into(), values))
}
DataType::UInt8 => {
let casted = series
.u8()
.map_err(|e| ShellError::InternalError(e.to_string()))?;
Ok(column_from_casted(casted, from_row, size))
}
DataType::UInt16 => {
let casted = series
.u16()
.map_err(|e| ShellError::InternalError(e.to_string()))?;
Ok(column_from_casted(casted, from_row, size))
}
DataType::UInt32 => {
let casted = series
.u32()
.map_err(|e| ShellError::InternalError(e.to_string()))?;
Ok(column_from_casted(casted, from_row, size))
}
DataType::UInt64 => {
let casted = series
.u64()
.map_err(|e| ShellError::InternalError(e.to_string()))?;
Ok(column_from_casted(casted, from_row, size))
}
DataType::Int8 => {
let casted = series
.i8()
.map_err(|e| ShellError::InternalError(e.to_string()))?;
Ok(column_from_casted(casted, from_row, size))
}
DataType::Int16 => {
let casted = series
.i16()
.map_err(|e| ShellError::InternalError(e.to_string()))?;
Ok(column_from_casted(casted, from_row, size))
}
DataType::Int32 => {
let casted = series
.i32()
.map_err(|e| ShellError::InternalError(e.to_string()))?;
Ok(column_from_casted(casted, from_row, size))
}
DataType::Int64 => {
let casted = series
.i64()
.map_err(|e| ShellError::InternalError(e.to_string()))?;
Ok(column_from_casted(casted, from_row, size))
}
DataType::Float32 => {
let casted = series
.f32()
.map_err(|e| ShellError::InternalError(e.to_string()))?;
Ok(column_from_casted(casted, from_row, size))
}
DataType::Float64 => {
let casted = series
.f64()
.map_err(|e| ShellError::InternalError(e.to_string()))?;
Ok(column_from_casted(casted, from_row, size))
}
DataType::Boolean => {
let casted = series
.bool()
.map_err(|e| ShellError::InternalError(e.to_string()))?;
let values = casted
.into_iter()
.skip(from_row)
.take(size)
.map(|v| match v {
Some(a) => Value::Bool {
val: a,
span: Span::unknown(),
},
None => Value::Nothing {
span: Span::unknown(),
},
})
.collect::<Vec<Value>>();
Ok(Column::new(casted.name().into(), values))
}
DataType::Utf8 => {
let casted = series
.utf8()
.map_err(|e| ShellError::InternalError(e.to_string()))?;
let values = casted
.into_iter()
.skip(from_row)
.take(size)
.map(|v| match v {
Some(a) => Value::String {
val: a.into(),
span: Span::unknown(),
},
None => Value::Nothing {
span: Span::unknown(),
},
})
.collect::<Vec<Value>>();
Ok(Column::new(casted.name().into(), values))
}
DataType::Object(x) => {
let casted = series
.as_any()
.downcast_ref::<ChunkedArray<ObjectType<DataFrameValue>>>();
match casted {
None => Err(ShellError::InternalError(format!(
"Object not supported for conversion: {}",
x
))),
Some(ca) => {
let values = ca
.into_iter()
.skip(from_row)
.take(size)
.map(|v| match v {
Some(a) => a.get_value(),
None => Value::Nothing {
span: Span::unknown(),
},
})
.collect::<Vec<Value>>();
Ok(Column::new(ca.name().into(), values))
}
}
}
DataType::Date => {
let casted = series
.date()
.map_err(|e| ShellError::InternalError(e.to_string()))?;
let values = casted
.into_iter()
.skip(from_row)
.take(size)
.map(|v| match v {
Some(a) => {
// elapsed time in day since 1970-01-01
let seconds = a as i64 * SECS_PER_DAY;
let naive_datetime = NaiveDateTime::from_timestamp(seconds, 0);
// Zero length offset
let offset = FixedOffset::east(0);
let datetime = DateTime::<FixedOffset>::from_utc(naive_datetime, offset);
Value::Date {
val: datetime,
span: Span::unknown(),
}
}
None => Value::Nothing {
span: Span::unknown(),
},
})
.collect::<Vec<Value>>();
Ok(Column::new(casted.name().into(), values))
}
DataType::Datetime => {
let casted = series
.datetime()
.map_err(|e| ShellError::InternalError(e.to_string()))?;
let values = casted
.into_iter()
.skip(from_row)
.take(size)
.map(|v| match v {
Some(a) => {
// elapsed time in milliseconds since 1970-01-01
let seconds = a / 1000;
let naive_datetime = NaiveDateTime::from_timestamp(seconds, 0);
// Zero length offset
let offset = FixedOffset::east(0);
let datetime = DateTime::<FixedOffset>::from_utc(naive_datetime, offset);
Value::Date {
val: datetime,
span: Span::unknown(),
}
}
None => Value::Nothing {
span: Span::unknown(),
},
})
.collect::<Vec<Value>>();
Ok(Column::new(casted.name().into(), values))
}
DataType::Time => {
let casted = series
.time()
.map_err(|e| ShellError::InternalError(e.to_string()))?;
let values = casted
.into_iter()
.skip(from_row)
.take(size)
.map(|v| match v {
Some(nanoseconds) => Value::Duration {
val: nanoseconds,
span: Span::unknown(),
},
None => Value::Nothing {
span: Span::unknown(),
},
})
.collect::<Vec<Value>>();
Ok(Column::new(casted.name().into(), values))
}
e => Err(ShellError::InternalError(format!(
"Value not supported in nushell: {}",
e
))),
}
}
fn column_from_casted<T>(casted: &ChunkedArray<T>, from_row: usize, size: usize) -> Column
where
T: PolarsNumericType,
T::Native: Into<Value>,
{
let values = casted
.into_iter()
.skip(from_row)
.take(size)
.map(|v| match v {
Some(a) => a.into(),
None => Value::Nothing {
span: Span::unknown(),
},
})
.collect::<Vec<Value>>();
Column::new(casted.name().into(), values)
}
// Adds a separator to the vector of values using the column names from the
// dataframe to create the Values Row
pub fn add_separator(values: &mut Vec<Value>, df: &DataFrame) {
let mut cols = vec![];
let mut vals = vec![];
for name in df.get_column_names() {
cols.push(name.to_string());
vals.push(Value::String {
val: "...".into(),
span: Span::unknown(),
})
}
let extra_record = Value::Record {
cols,
vals,
span: Span::unknown(),
};
values.push(extra_record);
}
// Inserting the values found in a Value::List
pub fn insert_record(
column_values: &mut ColumnMap,
cols: &[String],
values: &[Value],
) -> Result<(), ShellError> {
for (col, value) in cols.iter().zip(values.iter()) {
insert_value(value.clone(), col.clone(), column_values)?;
}
Ok(())
}
pub fn insert_value(
value: Value,
key: String,
column_values: &mut ColumnMap,
) -> Result<(), ShellError> {
let col_val = match column_values.entry(key.clone()) {
Entry::Vacant(entry) => entry.insert(TypedColumn::new_empty(key)),
Entry::Occupied(entry) => entry.into_mut(),
};
// Checking that the type for the value is the same
// for the previous value in the column
if col_val.values.is_empty() {
match &value {
Value::Int { .. } => {
col_val.column_type = Some(InputType::Integer);
}
Value::Float { .. } => {
col_val.column_type = Some(InputType::Float);
}
Value::String { .. } => {
col_val.column_type = Some(InputType::String);
}
Value::Bool { .. } => {
col_val.column_type = Some(InputType::Boolean);
}
Value::Date { .. } => {
col_val.column_type = Some(InputType::Date);
}
Value::Duration { .. } => {
col_val.column_type = Some(InputType::Duration);
}
_ => col_val.column_type = Some(InputType::Object),
}
col_val.values.push(value);
} else {
let prev_value = &col_val.values[col_val.values.len() - 1];
match (&prev_value, &value) {
(Value::Int { .. }, Value::Int { .. })
| (Value::Float { .. }, Value::Float { .. })
| (Value::String { .. }, Value::String { .. })
| (Value::Bool { .. }, Value::Bool { .. })
| (Value::Date { .. }, Value::Date { .. })
| (Value::Duration { .. }, Value::Duration { .. }) => col_val.values.push(value),
_ => {
col_val.column_type = Some(InputType::Object);
col_val.values.push(value);
}
}
}
Ok(())
}
// The ColumnMap has the parsed data from the StreamInput
// This data can be used to create a Series object that can initialize
// the dataframe based on the type of data that is found
pub fn from_parsed_columns(column_values: ColumnMap) -> Result<NuDataFrame, ShellError> {
let mut df_series: Vec<Series> = Vec::new();
for (name, column) in column_values {
if let Some(column_type) = &column.column_type {
match column_type {
InputType::Float => {
let series_values: Result<Vec<_>, _> =
column.values.iter().map(|v| v.as_f64()).collect();
let series = Series::new(&name, series_values?);
df_series.push(series)
}
InputType::Integer => {
let series_values: Result<Vec<_>, _> =
column.values.iter().map(|v| v.as_i64()).collect();
let series = Series::new(&name, series_values?);
df_series.push(series)
}
InputType::String => {
let series_values: Result<Vec<_>, _> =
column.values.iter().map(|v| v.as_string()).collect();
let series = Series::new(&name, series_values?);
df_series.push(series)
}
InputType::Boolean => {
let series_values: Result<Vec<_>, _> =
column.values.iter().map(|v| v.as_bool()).collect();
let series = Series::new(&name, series_values?);
df_series.push(series)
}
InputType::Object => {
let mut builder =
ObjectChunkedBuilder::<DataFrameValue>::new(&name, column.values.len());
for v in &column.values {
builder.append_value(DataFrameValue::new(v.clone()));
}
let res = builder.finish();
df_series.push(res.into_series())
}
InputType::Date => {
let it = column.values.iter().map(|v| {
if let Value::Date { val, .. } = &v {
Some(val.timestamp_millis())
} else {
None
}
});
let res: DatetimeChunked =
ChunkedArray::<Int64Type>::new_from_opt_iter(&name, it).into();
df_series.push(res.into_series())
}
InputType::Duration => {
let it = column.values.iter().map(|v| {
if let Value::Duration { val, .. } = &v {
Some(*val)
} else {
None
}
});
let res = ChunkedArray::<Int64Type>::new_from_opt_iter(&name, it);
df_series.push(res.into_series())
}
}
}
}
match DataFrame::new(df_series) {
Ok(df) => Ok(NuDataFrame::new(df)),
Err(e) => Err(ShellError::InternalError(e.to_string())),
}
}

View file

@ -0,0 +1,59 @@
use crate::NuDataFrame;
use nu_protocol::{ast::Operator, CustomValue, ShellError, Span, Value};
// CustomValue implementation for NuDataFrame
impl CustomValue for NuDataFrame {
fn typetag_name(&self) -> &'static str {
"dataframe"
}
fn typetag_deserialize(&self) {
unimplemented!("typetag_deserialize")
}
fn clone_value(&self, span: nu_protocol::Span) -> Value {
let cloned = NuDataFrame(self.0.clone());
Value::CustomValue {
val: Box::new(cloned),
span,
}
}
fn value_string(&self) -> String {
self.typetag_name().to_string()
}
fn to_base_value(&self, span: Span) -> Result<Value, ShellError> {
let vals = self.print()?;
Ok(Value::List { vals, span })
}
fn to_json(&self) -> nu_json::Value {
nu_json::Value::Null
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
fn follow_path_int(&self, count: usize, span: Span) -> Result<Value, ShellError> {
self.get_value(count, span)
}
fn follow_path_string(&self, column_name: String, span: Span) -> Result<Value, ShellError> {
let column = self.column(&column_name, span)?;
Ok(column.to_value(span))
}
fn operation(
&self,
lhs_span: Span,
operator: Operator,
op: Span,
right: &Value,
) -> Result<Value, ShellError> {
self.compute_with_value(lhs_span, operator, op, right)
}
}

View file

@ -0,0 +1,298 @@
mod between_values;
mod conversion;
mod custom_value;
mod operations;
use std::{cmp::Ordering, fmt::Display, hash::Hasher};
use conversion::{Column, ColumnMap};
use indexmap::map::IndexMap;
use nu_protocol::{did_you_mean, ShellError, Span, Value};
use polars::prelude::{DataFrame, PolarsObject, Series};
use serde::{Deserialize, Serialize};
// DataFrameValue is an encapsulation of Nushell Value that can be used
// to define the PolarsObject Trait. The polars object trait allows to
// create dataframes with mixed datatypes
#[derive(Clone, Debug)]
pub struct DataFrameValue(Value);
impl DataFrameValue {
fn new(value: Value) -> Self {
Self(value)
}
fn get_value(&self) -> Value {
self.0.clone()
}
}
impl Display for DataFrameValue {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0.get_type())
}
}
impl Default for DataFrameValue {
fn default() -> Self {
Self(Value::Nothing {
span: Span::unknown(),
})
}
}
impl PartialEq for DataFrameValue {
fn eq(&self, other: &Self) -> bool {
self.0.partial_cmp(&other.0).map_or(false, Ordering::is_eq)
}
}
impl Eq for DataFrameValue {}
impl std::hash::Hash for DataFrameValue {
fn hash<H: Hasher>(&self, state: &mut H) {
match &self.0 {
Value::Nothing { .. } => 0.hash(state),
Value::Int { val, .. } => val.hash(state),
Value::String { val, .. } => val.hash(state),
// TODO. Define hash for the rest of types
_ => {}
}
}
}
impl PolarsObject for DataFrameValue {
fn type_name() -> &'static str {
"value"
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct NuDataFrame(DataFrame);
impl NuDataFrame {
pub fn new(dataframe: DataFrame) -> Self {
Self(dataframe)
}
fn default_value(span: Span) -> Value {
let dataframe = DataFrame::default();
NuDataFrame::dataframe_into_value(dataframe, span)
}
pub fn dataframe_into_value(dataframe: DataFrame, span: Span) -> Value {
Value::CustomValue {
val: Box::new(Self::new(dataframe)),
span,
}
}
pub fn to_value(self, span: Span) -> Value {
Value::CustomValue {
val: Box::new(self),
span,
}
}
pub fn series_to_value(series: Series, span: Span) -> Result<Value, ShellError> {
match DataFrame::new(vec![series]) {
Ok(dataframe) => Ok(NuDataFrame::dataframe_into_value(dataframe, span)),
Err(e) => Err(ShellError::InternalError(e.to_string())),
}
}
pub fn try_from_iter<T>(iter: T) -> Result<Self, ShellError>
where
T: Iterator<Item = Value>,
{
// Dictionary to store the columnar data extracted from
// the input. During the iteration we check if the values
// have different type
let mut column_values: ColumnMap = IndexMap::new();
for value in iter {
match value {
Value::List { vals, .. } => {
let cols = (0..vals.len())
.map(|i| format!("{}", i))
.collect::<Vec<String>>();
conversion::insert_record(&mut column_values, &cols, &vals)?
}
Value::Record { cols, vals, .. } => {
conversion::insert_record(&mut column_values, &cols, &vals)?
}
_ => {
let key = "0".to_string();
conversion::insert_value(value, key, &mut column_values)?
}
}
}
conversion::from_parsed_columns(column_values)
}
pub fn try_from_series(columns: Vec<Series>) -> Result<Self, ShellError> {
let dataframe = DataFrame::new(columns)
.map_err(|e| ShellError::InternalError(format!("Unable to create DataFrame: {}", e)))?;
Ok(Self::new(dataframe))
}
pub fn try_from_columns(columns: Vec<Column>) -> Result<Self, ShellError> {
let mut column_values: ColumnMap = IndexMap::new();
for column in columns {
let name = column.name().to_string();
for value in column {
conversion::insert_value(value, name.clone(), &mut column_values)?;
}
}
conversion::from_parsed_columns(column_values)
}
pub fn column(&self, column: &str, span: Span) -> Result<Self, ShellError> {
let s = self.0.column(column).map_err(|_| {
let possibilities = self
.0
.get_column_names()
.iter()
.map(|name| name.to_string())
.collect::<Vec<String>>();
let option = did_you_mean(&possibilities, column).unwrap_or_else(|| column.to_string());
ShellError::DidYouMean(option, span)
})?;
let dataframe = DataFrame::new(vec![s.clone()])
.map_err(|e| ShellError::InternalError(e.to_string()))?;
Ok(Self(dataframe))
}
pub fn is_series(&self) -> bool {
self.0.width() == 1
}
pub fn as_series(&self, _span: Span) -> Result<Series, ShellError> {
if !self.is_series() {
return Err(ShellError::InternalError(
"DataFrame cannot be used as Series".into(),
));
}
let series = self
.0
.get_columns()
.get(0)
.expect("We have already checked that the width is 1");
Ok(series.clone())
}
pub fn get_value(&self, row: usize, span: Span) -> Result<Value, ShellError> {
let series = self.as_series(Span::unknown())?;
let column = conversion::create_column(&series, row, row + 1)?;
if column.len() == 0 {
Err(ShellError::AccessBeyondEnd(series.len(), span))
} else {
let value = column
.into_iter()
.next()
.expect("already checked there is a value");
Ok(value)
}
}
// Print is made out a head and if the dataframe is too large, then a tail
pub fn print(&self) -> Result<Vec<Value>, ShellError> {
let df = &self.0;
let size: usize = 20;
if df.height() > size {
let sample_size = size / 2;
let mut values = self.head(Some(sample_size))?;
conversion::add_separator(&mut values, df);
let remaining = df.height() - sample_size;
let tail_size = remaining.min(sample_size);
let mut tail_values = self.tail(Some(tail_size))?;
values.append(&mut tail_values);
Ok(values)
} else {
Ok(self.head(Some(size))?)
}
}
pub fn head(&self, rows: Option<usize>) -> Result<Vec<Value>, ShellError> {
let to_row = rows.unwrap_or(5);
let values = self.to_rows(0, to_row)?;
Ok(values)
}
pub fn tail(&self, rows: Option<usize>) -> Result<Vec<Value>, ShellError> {
let df = &self.0;
let to_row = df.height();
let size = rows.unwrap_or(5);
let from_row = to_row.saturating_sub(size);
let values = self.to_rows(from_row, to_row)?;
Ok(values)
}
pub fn to_rows(&self, from_row: usize, to_row: usize) -> Result<Vec<Value>, ShellError> {
let df = &self.0;
let upper_row = to_row.min(df.height());
let mut size: usize = 0;
let columns = self
.0
.get_columns()
.iter()
.map(
|col| match conversion::create_column(col, from_row, upper_row) {
Ok(col) => {
size = col.len();
Ok(col)
}
Err(e) => Err(e),
},
)
.collect::<Result<Vec<Column>, ShellError>>()?;
let mut iterators = columns
.into_iter()
.map(|col| (col.name().to_string(), col.into_iter()))
.collect::<Vec<(String, std::vec::IntoIter<Value>)>>();
let values = (0..size)
.into_iter()
.map(|_| {
let mut cols = vec![];
let mut vals = vec![];
for (name, col) in &mut iterators {
cols.push(name.clone());
match col.next() {
Some(v) => vals.push(v),
None => vals.push(Value::Nothing {
span: Span::unknown(),
}),
};
}
Value::Record {
cols,
vals,
span: Span::unknown(),
}
})
.collect::<Vec<Value>>();
Ok(values)
}
}

View file

@ -0,0 +1,220 @@
use nu_protocol::{ast::Operator, span, ShellError, Span, Spanned, Value};
use polars::prelude::{DataFrame, Series};
use crate::between_values::{
between_dataframes, compute_between_series, compute_series_single_value,
};
use super::NuDataFrame;
pub enum Axis {
Row,
Column,
}
impl Axis {
pub fn try_from_str(axis: &str, span: Span) -> Result<Axis, ShellError> {
match axis {
"row" => Ok(Axis::Row),
"col" => Ok(Axis::Column),
_ => Err(ShellError::DidYouMean("'row' or 'col'".into(), span)),
}
}
}
impl NuDataFrame {
pub fn compute_with_value(
&self,
lhs_span: Span,
operator: Operator,
op_span: Span,
right: &Value,
) -> Result<Value, ShellError> {
match right {
Value::CustomValue {
val: rhs,
span: rhs_span,
} => {
let rhs = rhs.as_any().downcast_ref::<NuDataFrame>().ok_or_else(|| {
ShellError::DowncastNotPossible(
"Unable to create dataframe".to_string(),
*rhs_span,
)
})?;
let operation_span = span(&[lhs_span, *rhs_span]);
match (self.is_series(), rhs.is_series()) {
(true, true) => {
let lhs = &self
.as_series(lhs_span)
.expect("Already checked that is a series");
let rhs = &rhs
.as_series(*rhs_span)
.expect("Already checked that is a series");
if lhs.dtype() != rhs.dtype() {
return Err(ShellError::IncompatibleParameters {
left_message: format!("datatype {}", lhs.dtype()),
left_span: lhs_span,
right_message: format!("datatype {}", lhs.dtype()),
right_span: *rhs_span,
});
}
if lhs.len() != rhs.len() {
return Err(ShellError::IncompatibleParameters {
left_message: format!("len {}", lhs.len()),
left_span: lhs_span,
right_message: format!("len {}", rhs.len()),
right_span: *rhs_span,
});
}
let op = Spanned {
item: operator,
span: op_span,
};
compute_between_series(
op,
NuDataFrame::default_value(lhs_span),
lhs,
right,
rhs,
operation_span,
)
}
_ => {
if self.0.height() != rhs.0.height() {
return Err(ShellError::IncompatibleParameters {
left_message: format!("rows {}", self.0.height()),
left_span: lhs_span,
right_message: format!("rows {}", rhs.0.height()),
right_span: *rhs_span,
});
}
let op = Spanned {
item: operator,
span: op_span,
};
between_dataframes(
op,
NuDataFrame::default_value(lhs_span),
self,
right,
rhs,
operation_span,
)
}
}
}
_ => {
let op = Spanned {
item: operator,
span: op_span,
};
compute_series_single_value(
op,
self,
&lhs_span,
NuDataFrame::default_value(lhs_span),
right,
)
}
}
}
pub fn append_df(
&self,
other: &NuDataFrame,
axis: Axis,
span: Span,
) -> Result<Self, ShellError> {
match axis {
Axis::Row => {
let mut columns: Vec<&str> = Vec::new();
let new_cols = self
.0
.get_columns()
.iter()
.chain(other.0.get_columns())
.map(|s| {
let name = if columns.contains(&s.name()) {
format!("{}_{}", s.name(), "x")
} else {
columns.push(s.name());
s.name().to_string()
};
let mut series = s.clone();
series.rename(&name);
series
})
.collect::<Vec<Series>>();
let df_new = DataFrame::new(new_cols)
.map_err(|e| ShellError::InternalError(e.to_string()))?;
Ok(NuDataFrame::new(df_new))
}
Axis::Column => {
if self.0.width() != other.0.width() {
return Err(ShellError::IncompatibleParametersSingle(
"Dataframes with different number of columns".into(),
span,
));
}
if !self
.0
.get_column_names()
.iter()
.all(|col| other.0.get_column_names().contains(col))
{
return Err(ShellError::IncompatibleParametersSingle(
"Dataframes with different columns names".into(),
span,
));
}
let new_cols = self
.0
.get_columns()
.iter()
.map(|s| {
let other_col = other
.0
.column(s.name())
.expect("Already checked that dataframes have same columns");
let mut tmp = s.clone();
let res = tmp.append(other_col);
match res {
Ok(s) => Ok(s.clone()),
Err(e) => Err({
ShellError::InternalError(format!(
"Unable to append dataframes: {}",
e
))
}),
}
})
.collect::<Result<Vec<Series>, ShellError>>()?;
let df_new = DataFrame::new(new_cols).map_err(|e| {
ShellError::InternalError(format!(
"Unable to append dataframes: {}",
e.to_string()
))
})?;
Ok(NuDataFrame::new(df_new))
}
}
}
}

View file

@ -1,5 +1,8 @@
// use std::path::PathBuf;
use std::path::PathBuf;
use std::str::FromStr;
use chrono::{DateTime, FixedOffset};
// use nu_path::expand_path;
use nu_protocol::ast::{CellPath, PathMember};
@ -92,6 +95,47 @@ impl FromValue for f64 {
}
}
impl FromValue for Spanned<usize> {
fn from_value(v: &Value) -> Result<Self, ShellError> {
match v {
Value::Int { val, span } => Ok(Spanned {
item: *val as usize,
span: *span,
}),
Value::Filesize { val, span } => Ok(Spanned {
item: *val as usize,
span: *span,
}),
Value::Duration { val, span } => Ok(Spanned {
item: *val as usize,
span: *span,
}),
v => Err(ShellError::CantConvert(
"integer".into(),
v.get_type().to_string(),
v.span()?,
)),
}
}
}
impl FromValue for usize {
fn from_value(v: &Value) -> Result<Self, ShellError> {
match v {
Value::Int { val, .. } => Ok(*val as usize),
Value::Filesize { val, .. } => Ok(*val as usize),
Value::Duration { val, .. } => Ok(*val as usize),
v => Err(ShellError::CantConvert(
"integer".into(),
v.get_type().to_string(),
v.span()?,
)),
}
}
}
impl FromValue for String {
fn from_value(v: &Value) -> Result<Self, ShellError> {
// FIXME: we may want to fail a little nicer here
@ -126,6 +170,30 @@ impl FromValue for Spanned<String> {
}
}
impl FromValue for Vec<String> {
fn from_value(v: &Value) -> Result<Self, ShellError> {
// FIXME: we may want to fail a little nicer here
match v {
Value::List { vals, .. } => vals
.iter()
.map(|val| match val {
Value::String { val, .. } => Ok(val.clone()),
c => Err(ShellError::CantConvert(
"string".into(),
c.get_type().to_string(),
c.span()?,
)),
})
.collect::<Result<Vec<String>, ShellError>>(),
v => Err(ShellError::CantConvert(
"string".into(),
v.get_type().to_string(),
v.span()?,
)),
}
}
}
impl FromValue for CellPath {
fn from_value(v: &Value) -> Result<Self, ShellError> {
let span = v.span()?;
@ -253,6 +321,23 @@ impl FromValue for Vec<u8> {
}
}
impl FromValue for Spanned<PathBuf> {
fn from_value(v: &Value) -> Result<Self, ShellError> {
match v {
Value::String { val, span } => Ok(Spanned {
item: PathBuf::from_str(val)
.map_err(|err| ShellError::FileNotFoundCustom(err.to_string(), *span))?,
span: *span,
}),
v => Err(ShellError::CantConvert(
"range".into(),
v.get_type().to_string(),
v.span()?,
)),
}
}
}
// impl FromValue for Dictionary {
// fn from_value(v: &Value) -> Result<Self, ShellError> {
// match v {

View file

@ -22,6 +22,7 @@ pub(crate) fn serialize_signature(signature: &Signature, mut builder: signature:
Category::Strings => builder.set_category(PluginCategory::Strings),
Category::System => builder.set_category(PluginCategory::System),
Category::Viewers => builder.set_category(PluginCategory::Viewers),
_ => builder.set_category(PluginCategory::Default),
}
// Serializing list of required arguments

View file

@ -14,9 +14,12 @@ chrono-humanize = "0.2.1"
byte-unit = "4.0.9"
im = "15.0.0"
serde_json = { version = "1.0", optional = true }
nu-json = { path = "../nu-json" }
typetag = "0.1.8"
[features]
plugin = ["serde_json"]
custom = []
[dev-dependencies]
serde_json = "1.0"

View file

@ -27,3 +27,6 @@ pub use span::*;
pub use syntax_shape::*;
pub use ty::*;
pub use value::*;
#[cfg(feature = "custom")]
pub use value::CustomValue;

View file

@ -203,9 +203,13 @@ pub enum ShellError {
#[diagnostic(code(nu::shell::name_not_found), url(docsrs))]
DidYouMean(String, #[label("did you mean '{0}'?")] Span),
#[error("Non-UTF8 string.")]
#[error("Non-UTF8 string")]
#[diagnostic(code(nu::parser::non_utf8), url(docsrs))]
NonUtf8(#[label = "non-UTF8 string"] Span),
#[error("Casting error")]
#[diagnostic(code(nu::parser::downcast_not_possible), url(docsrs))]
DowncastNotPossible(String, #[label("{0}")] Span),
}
impl From<std::io::Error> for ShellError {

View file

@ -45,6 +45,7 @@ pub enum Category {
Strings,
System,
Viewers,
Custom(String),
}
impl std::fmt::Display for Category {
@ -63,6 +64,7 @@ impl std::fmt::Display for Category {
Category::Strings => "strings",
Category::System => "system",
Category::Viewers => "viewers",
Category::Custom(name) => name,
};
write!(f, "{}", msg)

View file

@ -23,6 +23,7 @@ pub enum Type {
Unknown,
Error,
Binary,
Custom,
}
impl Display for Type {
@ -55,6 +56,7 @@ impl Display for Type {
Type::Unknown => write!(f, "unknown"),
Type::Error => write!(f, "error"),
Type::Binary => write!(f, "binary"),
Type::Custom => write!(f, "custom"),
}
}
}

View file

@ -0,0 +1,42 @@
use std::fmt;
use crate::{ast::Operator, ShellError, Span, Value};
// Trait definition for a custom value
#[typetag::serde(tag = "type")]
pub trait CustomValue: fmt::Debug + Send + Sync {
fn clone_value(&self, span: Span) -> Value;
// Define string representation of the custom value
fn value_string(&self) -> String;
// Converts the custom value to a base nushell value
// This is used to represent the custom value using the table representations
// That already exist in nushell
fn to_base_value(&self, span: Span) -> Result<Value, ShellError>;
// Json representation of custom value
fn to_json(&self) -> nu_json::Value {
nu_json::Value::Null
}
// Any representation used to downcast object to its original type
fn as_any(&self) -> &dyn std::any::Any;
// Follow cell path functions
fn follow_path_int(&self, count: usize, span: Span) -> Result<Value, ShellError>;
fn follow_path_string(&self, column_name: String, span: Span) -> Result<Value, ShellError>;
// Definition of an operation between the object that implements the trait
// and another Value.
// The Operator enum is used to indicate the expected operation
fn operation(
&self,
_lhs_span: Span,
operator: Operator,
op: Span,
_right: &Value,
) -> Result<Value, ShellError> {
Err(ShellError::UnsupportedOperator(operator, op))
}
}

View file

@ -0,0 +1,115 @@
use crate::{ShellError, Span, Value};
impl From<u8> for Value {
fn from(val: u8) -> Self {
Value::Int {
val: val as i64,
span: Span::unknown(),
}
}
}
impl From<u16> for Value {
fn from(val: u16) -> Self {
Value::Int {
val: val as i64,
span: Span::unknown(),
}
}
}
impl From<u32> for Value {
fn from(val: u32) -> Self {
Value::Int {
val: val as i64,
span: Span::unknown(),
}
}
}
impl From<u64> for Value {
fn from(val: u64) -> Self {
Value::Int {
val: val as i64,
span: Span::unknown(),
}
}
}
impl From<i8> for Value {
fn from(val: i8) -> Self {
Value::Int {
val: val as i64,
span: Span::unknown(),
}
}
}
impl From<i16> for Value {
fn from(val: i16) -> Self {
Value::Int {
val: val as i64,
span: Span::unknown(),
}
}
}
impl From<i32> for Value {
fn from(val: i32) -> Self {
Value::Int {
val: val as i64,
span: Span::unknown(),
}
}
}
impl From<i64> for Value {
fn from(val: i64) -> Self {
Value::Int {
val: val as i64,
span: Span::unknown(),
}
}
}
impl From<f32> for Value {
fn from(val: f32) -> Self {
Value::Float {
val: val as f64,
span: Span::unknown(),
}
}
}
impl From<f64> for Value {
fn from(val: f64) -> Self {
Value::Float {
val: val as f64,
span: Span::unknown(),
}
}
}
impl Value {
pub fn as_f64(&self) -> Result<f64, ShellError> {
match self {
Value::Float { val, .. } => Ok(*val),
x => Err(ShellError::CantConvert(
"f64".into(),
x.get_type().to_string(),
self.span()?,
)),
}
}
pub fn as_i64(&self) -> Result<i64, ShellError> {
match self {
Value::Int { val, .. } => Ok(*val),
x => Err(ShellError::CantConvert(
"rf64".into(),
x.get_type().to_string(),
self.span()?,
)),
}
}
}

View file

@ -1,3 +1,5 @@
mod custom_value;
mod from;
mod range;
mod stream;
mod unit;
@ -15,10 +17,16 @@ use std::{cmp::Ordering, fmt::Debug};
use crate::ast::{CellPath, PathMember};
use crate::{did_you_mean, span, BlockId, Config, Span, Spanned, Type};
#[cfg(feature = "custom")]
use crate::ast::Operator;
#[cfg(feature = "custom")]
pub use custom_value::CustomValue;
use crate::ShellError;
/// Core structured values that pass through the pipeline in engine-q
#[derive(Debug, Clone, Serialize, Deserialize)]
#[derive(Debug, Serialize, Deserialize)]
pub enum Value {
Bool {
val: bool,
@ -79,6 +87,77 @@ pub enum Value {
val: CellPath,
span: Span,
},
#[cfg(feature = "custom")]
CustomValue {
val: Box<dyn CustomValue>,
span: Span,
},
}
impl Clone for Value {
fn clone(&self) -> Self {
match self {
Value::Bool { val, span } => Value::Bool {
val: *val,
span: *span,
},
Value::Int { val, span } => Value::Int {
val: *val,
span: *span,
},
Value::Filesize { val, span } => Value::Filesize {
val: *val,
span: *span,
},
Value::Duration { val, span } => Value::Duration {
val: *val,
span: *span,
},
Value::Date { val, span } => Value::Date {
val: *val,
span: *span,
},
Value::Range { val, span } => Value::Range {
val: val.clone(),
span: *span,
},
Value::Float { val, span } => Value::Float {
val: *val,
span: *span,
},
Value::String { val, span } => Value::String {
val: val.clone(),
span: *span,
},
Value::Record { cols, vals, span } => Value::Record {
cols: cols.clone(),
vals: vals.clone(),
span: *span,
},
Value::List { vals, span } => Value::List {
vals: vals.clone(),
span: *span,
},
Value::Block { val, span } => Value::Block {
val: *val,
span: *span,
},
Value::Nothing { span } => Value::Nothing { span: *span },
Value::Error { error } => Value::Error {
error: error.clone(),
},
Value::Binary { val, span } => Value::Binary {
val: val.clone(),
span: *span,
},
Value::CellPath { val, span } => Value::CellPath {
val: val.clone(),
span: *span,
},
#[cfg(feature = "custom")]
Value::CustomValue { val, span } => val.clone_value(*span),
}
}
}
impl Value {
@ -144,6 +223,8 @@ impl Value {
Value::Nothing { span, .. } => Ok(*span),
Value::Binary { span, .. } => Ok(*span),
Value::CellPath { span, .. } => Ok(*span),
#[cfg(feature = "custom")]
Value::CustomValue { span, .. } => Ok(*span),
}
}
@ -165,6 +246,8 @@ impl Value {
Value::Error { .. } => {}
Value::Binary { span, .. } => *span = new_span,
Value::CellPath { span, .. } => *span = new_span,
#[cfg(feature = "custom")]
Value::CustomValue { span, .. } => *span = new_span,
}
self
@ -193,6 +276,8 @@ impl Value {
Value::Error { .. } => Type::Error,
Value::Binary { .. } => Type::Binary,
Value::CellPath { .. } => Type::CellPath,
#[cfg(feature = "custom")]
Value::CustomValue { .. } => Type::Custom,
}
}
@ -233,6 +318,8 @@ impl Value {
Value::Error { error } => format!("{:?}", error),
Value::Binary { val, .. } => format!("{:?}", val),
Value::CellPath { val, .. } => val.into_string(),
#[cfg(feature = "custom")]
Value::CustomValue { val, .. } => val.value_string(),
}
}
@ -273,6 +360,8 @@ impl Value {
Value::Error { error } => format!("{:?}", error),
Value::Binary { val, .. } => format!("{:?}", val),
Value::CellPath { val, .. } => val.into_string(),
#[cfg(feature = "custom")]
Value::CustomValue { val, .. } => val.value_string(),
}
}
@ -318,6 +407,10 @@ impl Value {
return Err(ShellError::AccessBeyondEndOfStream(*origin_span));
}
}
#[cfg(feature = "custom")]
Value::CustomValue { val, .. } => {
current = val.follow_path_int(*count, *origin_span)?;
}
x => {
return Err(ShellError::IncompatiblePathAccess(
format!("{}", x.get_type()),
@ -365,6 +458,10 @@ impl Value {
span: *span,
};
}
#[cfg(feature = "custom")]
Value::CustomValue { val, .. } => {
current = val.follow_path_string(column_name.clone(), *origin_span)?;
}
x => {
return Err(ShellError::IncompatiblePathAccess(
format!("{}", x.get_type()),
@ -627,6 +724,11 @@ impl Value {
}
}
#[cfg(feature = "custom")]
(Value::CustomValue { val: lhs, span }, rhs) => {
lhs.operation(*span, Operator::Plus, op, rhs)
}
_ => Err(ShellError::OperatorMismatch {
op_span: op,
lhs_ty: self.get_type(),
@ -692,6 +794,11 @@ impl Value {
}
}
#[cfg(feature = "custom")]
(Value::CustomValue { val: lhs, span }, rhs) => {
lhs.operation(*span, Operator::Minus, op, rhs)
}
_ => Err(ShellError::OperatorMismatch {
op_span: op,
lhs_ty: self.get_type(),
@ -727,6 +834,10 @@ impl Value {
val: lhs * rhs,
span,
}),
#[cfg(feature = "custom")]
(Value::CustomValue { val: lhs, span }, rhs) => {
lhs.operation(*span, Operator::Multiply, op, rhs)
}
_ => Err(ShellError::OperatorMismatch {
op_span: op,
@ -788,6 +899,10 @@ impl Value {
Err(ShellError::DivisionByZero(op))
}
}
#[cfg(feature = "custom")]
(Value::CustomValue { val: lhs, span }, rhs) => {
lhs.operation(*span, Operator::Divide, op, rhs)
}
_ => Err(ShellError::OperatorMismatch {
op_span: op,
@ -801,6 +916,11 @@ impl Value {
pub fn lt(&self, op: Span, rhs: &Value) -> Result<Value, ShellError> {
let span = span(&[self.span()?, rhs.span()?]);
#[cfg(feature = "custom")]
if let (Value::CustomValue { val: lhs, span }, rhs) = (self, rhs) {
return lhs.operation(*span, Operator::LessThan, op, rhs);
}
match self.partial_cmp(rhs) {
Some(ordering) => Ok(Value::Bool {
val: matches!(ordering, Ordering::Less),
@ -818,6 +938,11 @@ impl Value {
pub fn lte(&self, op: Span, rhs: &Value) -> Result<Value, ShellError> {
let span = span(&[self.span()?, rhs.span()?]);
#[cfg(feature = "custom")]
if let (Value::CustomValue { val: lhs, span }, rhs) = (self, rhs) {
return lhs.operation(*span, Operator::LessThanOrEqual, op, rhs);
}
match self.partial_cmp(rhs) {
Some(ordering) => Ok(Value::Bool {
val: matches!(ordering, Ordering::Less | Ordering::Equal),
@ -835,6 +960,11 @@ impl Value {
pub fn gt(&self, op: Span, rhs: &Value) -> Result<Value, ShellError> {
let span = span(&[self.span()?, rhs.span()?]);
#[cfg(feature = "custom")]
if let (Value::CustomValue { val: lhs, span }, rhs) = (self, rhs) {
return lhs.operation(*span, Operator::GreaterThan, op, rhs);
}
match self.partial_cmp(rhs) {
Some(ordering) => Ok(Value::Bool {
val: matches!(ordering, Ordering::Greater),
@ -852,6 +982,11 @@ impl Value {
pub fn gte(&self, op: Span, rhs: &Value) -> Result<Value, ShellError> {
let span = span(&[self.span()?, rhs.span()?]);
#[cfg(feature = "custom")]
if let (Value::CustomValue { val: lhs, span }, rhs) = (self, rhs) {
return lhs.operation(*span, Operator::GreaterThanOrEqual, op, rhs);
}
match self.partial_cmp(rhs) {
Some(ordering) => Ok(Value::Bool {
val: matches!(ordering, Ordering::Greater | Ordering::Equal),
@ -869,6 +1004,11 @@ impl Value {
pub fn eq(&self, op: Span, rhs: &Value) -> Result<Value, ShellError> {
let span = span(&[self.span()?, rhs.span()?]);
#[cfg(feature = "custom")]
if let (Value::CustomValue { val: lhs, span }, rhs) = (self, rhs) {
return lhs.operation(*span, Operator::Equal, op, rhs);
}
match self.partial_cmp(rhs) {
Some(ordering) => Ok(Value::Bool {
val: matches!(ordering, Ordering::Equal),
@ -886,6 +1026,11 @@ impl Value {
pub fn ne(&self, op: Span, rhs: &Value) -> Result<Value, ShellError> {
let span = span(&[self.span()?, rhs.span()?]);
#[cfg(feature = "custom")]
if let (Value::CustomValue { val: lhs, span }, rhs) = (self, rhs) {
return lhs.operation(*span, Operator::NotEqual, op, rhs);
}
match self.partial_cmp(rhs) {
Some(ordering) => Ok(Value::Bool {
val: !matches!(ordering, Ordering::Equal),
@ -921,6 +1066,10 @@ impl Value {
val: rhs.contains(lhs),
span,
}),
#[cfg(feature = "custom")]
(Value::CustomValue { val: lhs, span }, rhs) => {
lhs.operation(*span, Operator::In, op, rhs)
}
_ => Err(ShellError::OperatorMismatch {
op_span: op,
lhs_ty: self.get_type(),
@ -951,6 +1100,10 @@ impl Value {
val: !rhs.contains(lhs),
span,
}),
#[cfg(feature = "custom")]
(Value::CustomValue { val: lhs, span }, rhs) => {
lhs.operation(*span, Operator::NotIn, op, rhs)
}
_ => Err(ShellError::OperatorMismatch {
op_span: op,
lhs_ty: self.get_type(),
@ -969,6 +1122,10 @@ impl Value {
val: lhs.contains(rhs),
span,
}),
#[cfg(feature = "custom")]
(Value::CustomValue { val: lhs, span }, rhs) => {
lhs.operation(*span, Operator::Contains, op, rhs)
}
_ => Err(ShellError::OperatorMismatch {
op_span: op,
lhs_ty: self.get_type(),
@ -987,6 +1144,10 @@ impl Value {
val: !lhs.contains(rhs),
span,
}),
#[cfg(feature = "custom")]
(Value::CustomValue { val: lhs, span }, rhs) => {
lhs.operation(*span, Operator::NotContains, op, rhs)
}
_ => Err(ShellError::OperatorMismatch {
op_span: op,
lhs_ty: self.get_type(),
@ -1041,6 +1202,10 @@ impl Value {
Err(ShellError::DivisionByZero(op))
}
}
#[cfg(feature = "custom")]
(Value::CustomValue { val: lhs, span }, rhs) => {
lhs.operation(*span, Operator::Modulo, op, rhs)
}
_ => Err(ShellError::OperatorMismatch {
op_span: op,
@ -1060,6 +1225,10 @@ impl Value {
val: *lhs && *rhs,
span,
}),
#[cfg(feature = "custom")]
(Value::CustomValue { val: lhs, span }, rhs) => {
lhs.operation(*span, Operator::And, op, rhs)
}
_ => Err(ShellError::OperatorMismatch {
op_span: op,
lhs_ty: self.get_type(),
@ -1078,6 +1247,10 @@ impl Value {
val: *lhs || *rhs,
span,
}),
#[cfg(feature = "custom")]
(Value::CustomValue { val: lhs, span }, rhs) => {
lhs.operation(*span, Operator::Or, op, rhs)
}
_ => Err(ShellError::OperatorMismatch {
op_span: op,
lhs_ty: self.get_type(),
@ -1114,6 +1287,10 @@ impl Value {
val: lhs.powf(*rhs),
span,
}),
#[cfg(feature = "custom")]
(Value::CustomValue { val: lhs, span }, rhs) => {
lhs.operation(*span, Operator::Pow, op, rhs)
}
_ => Err(ShellError::OperatorMismatch {
op_span: op,