mirror of
https://github.com/nushell/nushell
synced 2024-12-28 05:53:09 +00:00
Merge branch 'master' into split-with-empty-cols
This commit is contained in:
commit
9aab884db0
59 changed files with 909 additions and 432 deletions
7
Cargo.lock
generated
7
Cargo.lock
generated
|
@ -1733,6 +1733,7 @@ dependencies = [
|
||||||
"serde_ini 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde_ini 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"serde_yaml 0.8.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde_yaml 0.8.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"shellexpand 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"subprocess 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)",
|
"subprocess 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"surf 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"surf 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"syntect 3.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"syntect 3.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -2646,6 +2647,11 @@ dependencies = [
|
||||||
"winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "shellexpand"
|
||||||
|
version = "1.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "slab"
|
name = "slab"
|
||||||
version = "0.4.2"
|
version = "0.4.2"
|
||||||
|
@ -3568,6 +3574,7 @@ dependencies = [
|
||||||
"checksum serde_yaml 0.8.9 (registry+https://github.com/rust-lang/crates.io-index)" = "38b08a9a90e5260fe01c6480ec7c811606df6d3a660415808c3c3fa8ed95b582"
|
"checksum serde_yaml 0.8.9 (registry+https://github.com/rust-lang/crates.io-index)" = "38b08a9a90e5260fe01c6480ec7c811606df6d3a660415808c3c3fa8ed95b582"
|
||||||
"checksum shell-words 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "39acde55a154c4cd3ae048ac78cc21c25f3a0145e44111b523279113dce0d94a"
|
"checksum shell-words 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "39acde55a154c4cd3ae048ac78cc21c25f3a0145e44111b523279113dce0d94a"
|
||||||
"checksum shell32-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9ee04b46101f57121c9da2b151988283b6beb79b34f5bb29a58ee48cb695122c"
|
"checksum shell32-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9ee04b46101f57121c9da2b151988283b6beb79b34f5bb29a58ee48cb695122c"
|
||||||
|
"checksum shellexpand 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de7a5b5a9142fd278a10e0209b021a1b85849352e6951f4f914735c976737564"
|
||||||
"checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8"
|
"checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8"
|
||||||
"checksum sluice 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ec70d7c3b17c262d4a18f7291c6ce62bf47170915f3b795434d3c5c49a4e59b7"
|
"checksum sluice 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ec70d7c3b17c262d4a18f7291c6ce62bf47170915f3b795434d3c5c49a4e59b7"
|
||||||
"checksum smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)" = "ab606a9c5e214920bb66c458cd7be8ef094f813f20fe77a54cc7dbfff220d4b7"
|
"checksum smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)" = "ab606a9c5e214920bb66c458cd7be8ef094f813f20fe77a54cc7dbfff220d4b7"
|
||||||
|
|
|
@ -78,6 +78,7 @@ battery = "0.7.4"
|
||||||
textwrap = {version = "0.11.0", features = ["term_size"]}
|
textwrap = {version = "0.11.0", features = ["term_size"]}
|
||||||
rawkey = {version = "0.1.2", optional = true }
|
rawkey = {version = "0.1.2", optional = true }
|
||||||
clipboard = {version = "0.5", optional = true }
|
clipboard = {version = "0.5", optional = true }
|
||||||
|
shellexpand = "1.0.0"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
pretty_assertions = "0.6.1"
|
pretty_assertions = "0.6.1"
|
||||||
|
|
50
src/cli.rs
50
src/cli.rs
|
@ -58,12 +58,17 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel
|
||||||
|
|
||||||
let mut input = String::new();
|
let mut input = String::new();
|
||||||
match reader.read_line(&mut input) {
|
match reader.read_line(&mut input) {
|
||||||
Ok(_) => {
|
Ok(count) => {
|
||||||
|
trace!("processing response ({} bytes)", count);
|
||||||
|
|
||||||
let response = serde_json::from_str::<JsonRpc<Result<Signature, ShellError>>>(&input);
|
let response = serde_json::from_str::<JsonRpc<Result<Signature, ShellError>>>(&input);
|
||||||
match response {
|
match response {
|
||||||
Ok(jrpc) => match jrpc.params {
|
Ok(jrpc) => match jrpc.params {
|
||||||
Ok(params) => {
|
Ok(params) => {
|
||||||
let fname = path.to_string_lossy();
|
let fname = path.to_string_lossy();
|
||||||
|
|
||||||
|
trace!("processing {:?}", params);
|
||||||
|
|
||||||
if params.is_filter {
|
if params.is_filter {
|
||||||
let fname = fname.to_string();
|
let fname = fname.to_string();
|
||||||
let name = params.name.clone();
|
let name = params.name.clone();
|
||||||
|
@ -91,7 +96,9 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel
|
||||||
|
|
||||||
fn load_plugins_in_dir(path: &std::path::PathBuf, context: &mut Context) -> Result<(), ShellError> {
|
fn load_plugins_in_dir(path: &std::path::PathBuf, context: &mut Context) -> Result<(), ShellError> {
|
||||||
let re_bin = Regex::new(r"^nu_plugin_[A-Za-z_]+$")?;
|
let re_bin = Regex::new(r"^nu_plugin_[A-Za-z_]+$")?;
|
||||||
let re_exe = Regex::new(r"^nu_plugin_[A-Za-z_]+\.exe$")?;
|
let re_exe = Regex::new(r"^nu_plugin_[A-Za-z_]+\.(exe|bat)$")?;
|
||||||
|
|
||||||
|
trace!("Looking for plugins in {:?}", path);
|
||||||
|
|
||||||
match std::fs::read_dir(path) {
|
match std::fs::read_dir(path) {
|
||||||
Ok(p) => {
|
Ok(p) => {
|
||||||
|
@ -99,8 +106,10 @@ fn load_plugins_in_dir(path: &std::path::PathBuf, context: &mut Context) -> Resu
|
||||||
let entry = entry?;
|
let entry = entry?;
|
||||||
let filename = entry.file_name();
|
let filename = entry.file_name();
|
||||||
let f_name = filename.to_string_lossy();
|
let f_name = filename.to_string_lossy();
|
||||||
|
|
||||||
if re_bin.is_match(&f_name) || re_exe.is_match(&f_name) {
|
if re_bin.is_match(&f_name) || re_exe.is_match(&f_name) {
|
||||||
let mut load_path = path.clone();
|
let mut load_path = path.clone();
|
||||||
|
trace!("Found {:?}", f_name);
|
||||||
load_path.push(f_name.to_string());
|
load_path.push(f_name.to_string());
|
||||||
load_plugin(&load_path, context)?;
|
load_plugin(&load_path, context)?;
|
||||||
}
|
}
|
||||||
|
@ -121,19 +130,24 @@ fn load_plugins(context: &mut Context) -> Result<(), ShellError> {
|
||||||
None => println!("PATH is not defined in the environment."),
|
None => println!("PATH is not defined in the environment."),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Also use our debug output for now
|
#[cfg(debug_assertions)]
|
||||||
let mut path = std::path::PathBuf::from(".");
|
{
|
||||||
path.push("target");
|
// Use our debug plugins in debug mode
|
||||||
path.push("debug");
|
let mut path = std::path::PathBuf::from(".");
|
||||||
|
path.push("target");
|
||||||
|
path.push("debug");
|
||||||
|
let _ = load_plugins_in_dir(&path, context);
|
||||||
|
}
|
||||||
|
|
||||||
let _ = load_plugins_in_dir(&path, context);
|
#[cfg(not(debug_assertions))]
|
||||||
|
{
|
||||||
|
// Use our release plugins in release mode
|
||||||
|
let mut path = std::path::PathBuf::from(".");
|
||||||
|
path.push("target");
|
||||||
|
path.push("release");
|
||||||
|
|
||||||
// Also use our release output for now
|
let _ = load_plugins_in_dir(&path, context);
|
||||||
let mut path = std::path::PathBuf::from(".");
|
}
|
||||||
path.push("target");
|
|
||||||
path.push("release");
|
|
||||||
|
|
||||||
let _ = load_plugins_in_dir(&path, context);
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -162,6 +176,7 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||||
whole_stream_command(Reverse),
|
whole_stream_command(Reverse),
|
||||||
whole_stream_command(Trim),
|
whole_stream_command(Trim),
|
||||||
whole_stream_command(ToArray),
|
whole_stream_command(ToArray),
|
||||||
|
whole_stream_command(ToBSON),
|
||||||
whole_stream_command(ToCSV),
|
whole_stream_command(ToCSV),
|
||||||
whole_stream_command(ToJSON),
|
whole_stream_command(ToJSON),
|
||||||
whole_stream_command(ToTOML),
|
whole_stream_command(ToTOML),
|
||||||
|
@ -217,6 +232,7 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||||
let _ = ansi_term::enable_ansi_support();
|
let _ = ansi_term::enable_ansi_support();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// we are ok if history does not exist
|
||||||
let _ = rl.load_history("history.txt");
|
let _ = rl.load_history("history.txt");
|
||||||
|
|
||||||
let ctrl_c = Arc::new(AtomicBool::new(false));
|
let ctrl_c = Arc::new(AtomicBool::new(false));
|
||||||
|
@ -297,7 +313,9 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||||
}
|
}
|
||||||
ctrlcbreak = false;
|
ctrlcbreak = false;
|
||||||
}
|
}
|
||||||
rl.save_history("history.txt")?;
|
|
||||||
|
// we are ok if we can not save history
|
||||||
|
let _ = rl.save_history("history.txt");
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -504,7 +522,9 @@ fn classify_command(
|
||||||
|
|
||||||
trace!(target: "nu::build_pipeline", "classifying {:?}", config);
|
trace!(target: "nu::build_pipeline", "classifying {:?}", config);
|
||||||
|
|
||||||
let args: hir::Call = config.parse_args(call, context.registry(), source)?;
|
let args: hir::Call = config.parse_args(call, &context, source)?;
|
||||||
|
|
||||||
|
trace!(target: "nu::build_pipeline", "args :: {}", args.debug(source));
|
||||||
|
|
||||||
Ok(ClassifiedCommand::Internal(InternalCommand {
|
Ok(ClassifiedCommand::Internal(InternalCommand {
|
||||||
command,
|
command,
|
||||||
|
|
|
@ -35,8 +35,8 @@ crate mod pick;
|
||||||
crate mod plugin;
|
crate mod plugin;
|
||||||
crate mod prev;
|
crate mod prev;
|
||||||
crate mod ps;
|
crate mod ps;
|
||||||
crate mod reverse;
|
|
||||||
crate mod reject;
|
crate mod reject;
|
||||||
|
crate mod reverse;
|
||||||
crate mod rm;
|
crate mod rm;
|
||||||
crate mod save;
|
crate mod save;
|
||||||
crate mod shells;
|
crate mod shells;
|
||||||
|
@ -48,6 +48,7 @@ crate mod split_row;
|
||||||
crate mod table;
|
crate mod table;
|
||||||
crate mod tags;
|
crate mod tags;
|
||||||
crate mod to_array;
|
crate mod to_array;
|
||||||
|
crate mod to_bson;
|
||||||
crate mod to_csv;
|
crate mod to_csv;
|
||||||
crate mod to_json;
|
crate mod to_json;
|
||||||
crate mod to_toml;
|
crate mod to_toml;
|
||||||
|
@ -104,6 +105,7 @@ crate use split_row::SplitRow;
|
||||||
crate use table::Table;
|
crate use table::Table;
|
||||||
crate use tags::Tags;
|
crate use tags::Tags;
|
||||||
crate use to_array::ToArray;
|
crate use to_array::ToArray;
|
||||||
|
crate use to_bson::ToBSON;
|
||||||
crate use to_csv::ToCSV;
|
crate use to_csv::ToCSV;
|
||||||
crate use to_json::ToJSON;
|
crate use to_json::ToJSON;
|
||||||
crate use to_toml::ToTOML;
|
crate use to_toml::ToTOML;
|
||||||
|
|
|
@ -122,10 +122,11 @@ impl InternalCommand {
|
||||||
self.name_span.clone(),
|
self.name_span.clone(),
|
||||||
context.source_map.clone(),
|
context.source_map.clone(),
|
||||||
self.args,
|
self.args,
|
||||||
source,
|
&source,
|
||||||
objects,
|
objects,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
let result = trace_out_stream!(target: "nu::trace_stream::internal", source: &source, "output" = result);
|
||||||
let mut result = result.values;
|
let mut result = result.values;
|
||||||
|
|
||||||
let mut stream = VecDeque::new();
|
let mut stream = VecDeque::new();
|
||||||
|
|
|
@ -422,6 +422,25 @@ pub enum CommandAction {
|
||||||
LeaveShell,
|
LeaveShell,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl ToDebug for CommandAction {
|
||||||
|
fn fmt_debug(&self, f: &mut fmt::Formatter, _source: &str) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
CommandAction::ChangePath(s) => write!(f, "action:change-path={}", s),
|
||||||
|
CommandAction::AddSpanSource(u, source) => {
|
||||||
|
write!(f, "action:add-span-source={}@{:?}", u, source)
|
||||||
|
}
|
||||||
|
CommandAction::Exit => write!(f, "action:exit"),
|
||||||
|
CommandAction::EnterShell(s) => write!(f, "action:enter-shell={}", s),
|
||||||
|
CommandAction::EnterValueShell(t) => {
|
||||||
|
write!(f, "action:enter-value-shell={:?}", t.debug())
|
||||||
|
}
|
||||||
|
CommandAction::PreviousShell => write!(f, "action:previous-shell"),
|
||||||
|
CommandAction::NextShell => write!(f, "action:next-shell"),
|
||||||
|
CommandAction::LeaveShell => write!(f, "action:leave-shell"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
pub enum ReturnSuccess {
|
pub enum ReturnSuccess {
|
||||||
Value(Tagged<Value>),
|
Value(Tagged<Value>),
|
||||||
|
@ -430,6 +449,16 @@ pub enum ReturnSuccess {
|
||||||
|
|
||||||
pub type ReturnValue = Result<ReturnSuccess, ShellError>;
|
pub type ReturnValue = Result<ReturnSuccess, ShellError>;
|
||||||
|
|
||||||
|
impl ToDebug for ReturnValue {
|
||||||
|
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
Err(err) => write!(f, "{}", err.debug(source)),
|
||||||
|
Ok(ReturnSuccess::Value(v)) => write!(f, "{:?}", v.debug()),
|
||||||
|
Ok(ReturnSuccess::Action(a)) => write!(f, "{}", a.debug(source)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl From<Tagged<Value>> for ReturnValue {
|
impl From<Tagged<Value>> for ReturnValue {
|
||||||
fn from(input: Tagged<Value>) -> ReturnValue {
|
fn from(input: Tagged<Value>) -> ReturnValue {
|
||||||
Ok(ReturnSuccess::Value(input))
|
Ok(ReturnSuccess::Value(input))
|
||||||
|
@ -469,7 +498,7 @@ pub trait WholeStreamCommand: Send + Sync {
|
||||||
Signature {
|
Signature {
|
||||||
name: self.name().to_string(),
|
name: self.name().to_string(),
|
||||||
positional: vec![],
|
positional: vec![],
|
||||||
rest_positional: true,
|
rest_positional: None,
|
||||||
named: indexmap::IndexMap::new(),
|
named: indexmap::IndexMap::new(),
|
||||||
is_filter: true,
|
is_filter: true,
|
||||||
}
|
}
|
||||||
|
@ -491,7 +520,7 @@ pub trait PerItemCommand: Send + Sync {
|
||||||
Signature {
|
Signature {
|
||||||
name: self.name().to_string(),
|
name: self.name().to_string(),
|
||||||
positional: vec![],
|
positional: vec![],
|
||||||
rest_positional: true,
|
rest_positional: None,
|
||||||
named: indexmap::IndexMap::new(),
|
named: indexmap::IndexMap::new(),
|
||||||
is_filter: true,
|
is_filter: true,
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@ use crate::commands::WholeStreamCommand;
|
||||||
use crate::object::base::OF64;
|
use crate::object::base::OF64;
|
||||||
use crate::object::{Primitive, TaggedDictBuilder, Value};
|
use crate::object::{Primitive, TaggedDictBuilder, Value};
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use bson::{decode_document, Bson, spec::BinarySubtype};
|
use bson::{decode_document, spec::BinarySubtype, Bson};
|
||||||
|
|
||||||
pub struct FromBSON;
|
pub struct FromBSON;
|
||||||
|
|
||||||
|
@ -47,71 +47,80 @@ fn convert_bson_value_to_nu_value(v: &Bson, tag: impl Into<Tag>) -> Tagged<Value
|
||||||
Bson::Boolean(b) => Value::Primitive(Primitive::Boolean(*b)).tagged(tag),
|
Bson::Boolean(b) => Value::Primitive(Primitive::Boolean(*b)).tagged(tag),
|
||||||
Bson::Null => Value::Primitive(Primitive::String(String::from(""))).tagged(tag),
|
Bson::Null => Value::Primitive(Primitive::String(String::from(""))).tagged(tag),
|
||||||
Bson::RegExp(r, opts) => {
|
Bson::RegExp(r, opts) => {
|
||||||
let mut collected = TaggedDictBuilder::new(tag);
|
let mut collected = TaggedDictBuilder::new(tag);
|
||||||
collected.insert_tagged(
|
collected.insert_tagged(
|
||||||
"$regex".to_string(),
|
"$regex".to_string(),
|
||||||
Value::Primitive(Primitive::String(String::from(r))).tagged(tag),
|
Value::Primitive(Primitive::String(String::from(r))).tagged(tag),
|
||||||
);
|
);
|
||||||
collected.insert_tagged(
|
collected.insert_tagged(
|
||||||
"$options".to_string(),
|
"$options".to_string(),
|
||||||
Value::Primitive(Primitive::String(String::from(opts))).tagged(tag),
|
Value::Primitive(Primitive::String(String::from(opts))).tagged(tag),
|
||||||
);
|
);
|
||||||
collected.into_tagged_value()
|
collected.into_tagged_value()
|
||||||
}
|
}
|
||||||
|
// TODO: Add Int32 to nushell?
|
||||||
Bson::I32(n) => Value::Primitive(Primitive::Int(*n as i64)).tagged(tag),
|
Bson::I32(n) => Value::Primitive(Primitive::Int(*n as i64)).tagged(tag),
|
||||||
Bson::I64(n) => Value::Primitive(Primitive::Int(*n as i64)).tagged(tag),
|
Bson::I64(n) => Value::Primitive(Primitive::Int(*n as i64)).tagged(tag),
|
||||||
Bson::JavaScriptCode(js) => {
|
Bson::JavaScriptCode(js) => {
|
||||||
let mut collected = TaggedDictBuilder::new(tag);
|
let mut collected = TaggedDictBuilder::new(tag);
|
||||||
collected.insert_tagged(
|
collected.insert_tagged(
|
||||||
"$javascript".to_string(),
|
"$javascript".to_string(),
|
||||||
Value::Primitive(Primitive::String(String::from(js))).tagged(tag),
|
Value::Primitive(Primitive::String(String::from(js))).tagged(tag),
|
||||||
);
|
);
|
||||||
collected.into_tagged_value()
|
collected.into_tagged_value()
|
||||||
}
|
}
|
||||||
Bson::JavaScriptCodeWithScope(js, doc) => {
|
Bson::JavaScriptCodeWithScope(js, doc) => {
|
||||||
let mut collected = TaggedDictBuilder::new(tag);
|
let mut collected = TaggedDictBuilder::new(tag);
|
||||||
collected.insert_tagged(
|
collected.insert_tagged(
|
||||||
"$javascript".to_string(),
|
"$javascript".to_string(),
|
||||||
Value::Primitive(Primitive::String(String::from(js))).tagged(tag),
|
Value::Primitive(Primitive::String(String::from(js))).tagged(tag),
|
||||||
);
|
);
|
||||||
collected.insert_tagged(
|
collected.insert_tagged(
|
||||||
"$scope".to_string(),
|
"$scope".to_string(),
|
||||||
convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag),
|
convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag),
|
||||||
);
|
);
|
||||||
collected.into_tagged_value()
|
collected.into_tagged_value()
|
||||||
}
|
}
|
||||||
Bson::TimeStamp(ts) => {
|
Bson::TimeStamp(ts) => {
|
||||||
let mut collected = TaggedDictBuilder::new(tag);
|
let mut collected = TaggedDictBuilder::new(tag);
|
||||||
collected.insert_tagged(
|
collected.insert_tagged(
|
||||||
"$timestamp".to_string(),
|
"$timestamp".to_string(),
|
||||||
Value::Primitive(Primitive::Int(*ts as i64)).tagged(tag),
|
Value::Primitive(Primitive::Int(*ts as i64)).tagged(tag),
|
||||||
);
|
);
|
||||||
collected.into_tagged_value()
|
collected.into_tagged_value()
|
||||||
}
|
}
|
||||||
Bson::Binary(bst, bytes) => {
|
Bson::Binary(bst, bytes) => {
|
||||||
let mut collected = TaggedDictBuilder::new(tag);
|
let mut collected = TaggedDictBuilder::new(tag);
|
||||||
collected.insert_tagged(
|
collected.insert_tagged(
|
||||||
"$binary_subtype".to_string(),
|
"$binary_subtype".to_string(),
|
||||||
match bst {
|
match bst {
|
||||||
BinarySubtype::UserDefined(u) => Value::Primitive(Primitive::Int(*u as i64)),
|
BinarySubtype::UserDefined(u) => Value::Primitive(Primitive::Int(*u as i64)),
|
||||||
_ => Value::Primitive(Primitive::String(binary_subtype_to_string(*bst))),
|
_ => Value::Primitive(Primitive::String(binary_subtype_to_string(*bst))),
|
||||||
}.tagged(tag)
|
}
|
||||||
);
|
.tagged(tag),
|
||||||
collected.insert_tagged(
|
);
|
||||||
"$binary".to_string(),
|
collected.insert_tagged(
|
||||||
Value::Binary(bytes.to_owned()).tagged(tag),
|
"$binary".to_string(),
|
||||||
);
|
Value::Binary(bytes.to_owned()).tagged(tag),
|
||||||
collected.into_tagged_value()
|
);
|
||||||
|
collected.into_tagged_value()
|
||||||
|
}
|
||||||
|
Bson::ObjectId(obj_id) => {
|
||||||
|
let mut collected = TaggedDictBuilder::new(tag);
|
||||||
|
collected.insert_tagged(
|
||||||
|
"$object_id".to_string(),
|
||||||
|
Value::Primitive(Primitive::String(obj_id.to_hex())).tagged(tag),
|
||||||
|
);
|
||||||
|
collected.into_tagged_value()
|
||||||
}
|
}
|
||||||
Bson::ObjectId(obj_id) => Value::Primitive(Primitive::String(obj_id.to_hex())).tagged(tag),
|
|
||||||
Bson::UtcDatetime(dt) => Value::Primitive(Primitive::Date(*dt)).tagged(tag),
|
Bson::UtcDatetime(dt) => Value::Primitive(Primitive::Date(*dt)).tagged(tag),
|
||||||
Bson::Symbol(s) => {
|
Bson::Symbol(s) => {
|
||||||
let mut collected = TaggedDictBuilder::new(tag);
|
let mut collected = TaggedDictBuilder::new(tag);
|
||||||
collected.insert_tagged(
|
collected.insert_tagged(
|
||||||
"$symbol".to_string(),
|
"$symbol".to_string(),
|
||||||
Value::Primitive(Primitive::String(String::from(s))).tagged(tag),
|
Value::Primitive(Primitive::String(String::from(s))).tagged(tag),
|
||||||
);
|
);
|
||||||
collected.into_tagged_value()
|
collected.into_tagged_value()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -125,7 +134,8 @@ fn binary_subtype_to_string(bst: BinarySubtype) -> String {
|
||||||
BinarySubtype::Uuid => "uuid",
|
BinarySubtype::Uuid => "uuid",
|
||||||
BinarySubtype::Md5 => "md5",
|
BinarySubtype::Md5 => "md5",
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}.to_string()
|
}
|
||||||
|
.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
|
|
@ -22,33 +22,41 @@ impl WholeStreamCommand for Get {
|
||||||
) -> Result<OutputStream, ShellError> {
|
) -> Result<OutputStream, ShellError> {
|
||||||
args.process(registry, get)?.run()
|
args.process(registry, get)?.run()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build("get").rest()
|
Signature::build("get").rest(SyntaxType::Member)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_member(path: &Tagged<String>, obj: &Tagged<Value>) -> Result<Tagged<Value>, ShellError> {
|
fn get_member(path: &Tagged<String>, obj: &Tagged<Value>) -> Result<Tagged<Value>, ShellError> {
|
||||||
let mut current = obj;
|
let mut current = Some(obj);
|
||||||
for p in path.split(".") {
|
for p in path.split(".") {
|
||||||
match current.get_data_by_key(p) {
|
if let Some(obj) = current {
|
||||||
Some(v) => current = v,
|
current = match obj.get_data_by_key(p) {
|
||||||
None => {
|
Some(v) => Some(v),
|
||||||
|
None =>
|
||||||
// Before we give up, see if they gave us a path that matches a field name by itself
|
// Before we give up, see if they gave us a path that matches a field name by itself
|
||||||
match obj.get_data_by_key(&path.item) {
|
{
|
||||||
Some(v) => return Ok(v.clone()),
|
match obj.get_data_by_key(&path.item) {
|
||||||
None => {
|
Some(v) => return Ok(v.clone()),
|
||||||
return Err(ShellError::labeled_error(
|
None => {
|
||||||
"Unknown column",
|
return Err(ShellError::labeled_error(
|
||||||
"table missing column",
|
"Unknown column",
|
||||||
path.span(),
|
"table missing column",
|
||||||
));
|
path.span(),
|
||||||
|
));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(current.clone())
|
match current {
|
||||||
|
Some(v) => Ok(v.clone()),
|
||||||
|
None => Ok(Value::nothing().tagged(obj.tag)),
|
||||||
|
}
|
||||||
|
// Ok(current.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get(
|
pub fn get(
|
||||||
|
|
|
@ -44,7 +44,7 @@ fn last(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, S
|
||||||
"Value is too low",
|
"Value is too low",
|
||||||
"expected a positive integer",
|
"expected a positive integer",
|
||||||
args.expect_nth(0)?.span(),
|
args.expect_nth(0)?.span(),
|
||||||
))
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
let stream = async_stream_block! {
|
let stream = async_stream_block! {
|
||||||
|
|
|
@ -27,7 +27,7 @@ impl PerItemCommand for Mkdir {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build("mkdir").rest()
|
Signature::build("mkdir").rest(SyntaxType::Path)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -44,7 +44,8 @@ fn run(call_info: &CallInfo, shell_manager: &ShellManager) -> Result<OutputStrea
|
||||||
{
|
{
|
||||||
file => file,
|
file => file,
|
||||||
};
|
};
|
||||||
let path_str = path.as_string()?;
|
let path_buf = path.as_path()?;
|
||||||
|
let path_str = path_buf.display().to_string();
|
||||||
let path_span = path.span();
|
let path_span = path.span();
|
||||||
let name_span = call_info.name_span;
|
let name_span = call_info.name_span;
|
||||||
let has_raw = call_info.args.has("raw");
|
let has_raw = call_info.args.has("raw");
|
||||||
|
@ -426,14 +427,16 @@ pub fn parse_string_as_value(
|
||||||
name_span: Span,
|
name_span: Span,
|
||||||
) -> Result<Tagged<Value>, ShellError> {
|
) -> Result<Tagged<Value>, ShellError> {
|
||||||
match extension {
|
match extension {
|
||||||
Some(x) if x == "csv" => crate::commands::from_csv::from_csv_string_to_value(
|
Some(x) if x == "csv" => {
|
||||||
contents,
|
crate::commands::from_csv::from_csv_string_to_value(contents, false, contents_tag)
|
||||||
false,
|
.map_err(move |_| {
|
||||||
contents_tag,
|
ShellError::labeled_error(
|
||||||
)
|
"Could not open as CSV",
|
||||||
.map_err(move |_| {
|
"could not open as CSV",
|
||||||
ShellError::labeled_error("Could not open as CSV", "could not open as CSV", name_span)
|
name_span,
|
||||||
}),
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
Some(x) if x == "toml" => {
|
Some(x) if x == "toml" => {
|
||||||
crate::commands::from_toml::from_toml_string_to_value(contents, contents_tag).map_err(
|
crate::commands::from_toml::from_toml_string_to_value(contents, contents_tag).map_err(
|
||||||
move |_| {
|
move |_| {
|
||||||
|
@ -507,9 +510,9 @@ pub fn parse_binary_as_value(
|
||||||
crate::commands::from_bson::from_bson_bytes_to_value(contents, contents_tag).map_err(
|
crate::commands::from_bson::from_bson_bytes_to_value(contents, contents_tag).map_err(
|
||||||
move |_| {
|
move |_| {
|
||||||
ShellError::labeled_error(
|
ShellError::labeled_error(
|
||||||
"Could not open as BSON",
|
"Could not open as BSON",
|
||||||
"could not open as BSON",
|
"could not open as BSON",
|
||||||
name_span,
|
name_span,
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
|
@ -17,7 +17,7 @@ impl WholeStreamCommand for Pick {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build("pick").rest()
|
Signature::build("pick").rest(SyntaxType::Any)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run(
|
fn run(
|
||||||
|
|
|
@ -3,6 +3,7 @@ use crate::errors::ShellError;
|
||||||
use crate::parser::registry;
|
use crate::parser::registry;
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
|
use log::trace;
|
||||||
use serde::{self, Deserialize, Serialize};
|
use serde::{self, Deserialize, Serialize};
|
||||||
use std::io::prelude::*;
|
use std::io::prelude::*;
|
||||||
use std::io::BufReader;
|
use std::io::BufReader;
|
||||||
|
@ -64,6 +65,8 @@ pub fn filter_plugin(
|
||||||
args: CommandArgs,
|
args: CommandArgs,
|
||||||
registry: &CommandRegistry,
|
registry: &CommandRegistry,
|
||||||
) -> Result<OutputStream, ShellError> {
|
) -> Result<OutputStream, ShellError> {
|
||||||
|
trace!("filter_plugin :: {}", path);
|
||||||
|
|
||||||
let args = args.evaluate_once(registry)?;
|
let args = args.evaluate_once(registry)?;
|
||||||
|
|
||||||
let mut child = std::process::Command::new(path)
|
let mut child = std::process::Command::new(path)
|
||||||
|
@ -80,6 +83,8 @@ pub fn filter_plugin(
|
||||||
|
|
||||||
let call_info = args.call_info.clone();
|
let call_info = args.call_info.clone();
|
||||||
|
|
||||||
|
trace!("filtering :: {:?}", call_info);
|
||||||
|
|
||||||
let stream = bos
|
let stream = bos
|
||||||
.chain(args.input.values)
|
.chain(args.input.values)
|
||||||
.chain(eos)
|
.chain(eos)
|
||||||
|
@ -95,7 +100,14 @@ pub fn filter_plugin(
|
||||||
|
|
||||||
let request = JsonRpc::new("begin_filter", call_info.clone());
|
let request = JsonRpc::new("begin_filter", call_info.clone());
|
||||||
let request_raw = serde_json::to_string(&request).unwrap();
|
let request_raw = serde_json::to_string(&request).unwrap();
|
||||||
let _ = stdin.write(format!("{}\n", request_raw).as_bytes()); // TODO: Handle error
|
match stdin.write(format!("{}\n", request_raw).as_bytes()) {
|
||||||
|
Ok(_) => {}
|
||||||
|
Err(err) => {
|
||||||
|
let mut result = VecDeque::new();
|
||||||
|
result.push_back(Err(ShellError::unexpected(format!("{}", err))));
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let mut input = String::new();
|
let mut input = String::new();
|
||||||
match reader.read_line(&mut input) {
|
match reader.read_line(&mut input) {
|
||||||
|
@ -140,7 +152,15 @@ pub fn filter_plugin(
|
||||||
let mut reader = BufReader::new(stdout);
|
let mut reader = BufReader::new(stdout);
|
||||||
|
|
||||||
let request: JsonRpc<std::vec::Vec<Value>> = JsonRpc::new("end_filter", vec![]);
|
let request: JsonRpc<std::vec::Vec<Value>> = JsonRpc::new("end_filter", vec![]);
|
||||||
let request_raw = serde_json::to_string(&request).unwrap();
|
let request_raw = match serde_json::to_string(&request) {
|
||||||
|
Ok(req) => req,
|
||||||
|
Err(err) => {
|
||||||
|
let mut result = VecDeque::new();
|
||||||
|
result.push_back(Err(ShellError::unexpected(format!("{}", err))));
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let _ = stdin.write(format!("{}\n", request_raw).as_bytes()); // TODO: Handle error
|
let _ = stdin.write(format!("{}\n", request_raw).as_bytes()); // TODO: Handle error
|
||||||
|
|
||||||
let mut input = String::new();
|
let mut input = String::new();
|
||||||
|
|
|
@ -24,7 +24,7 @@ impl WholeStreamCommand for Reject {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build("reject").rest()
|
Signature::build("reject").rest(SyntaxType::Member)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -59,7 +59,7 @@ fn save(
|
||||||
// If there is no filename, check the metadata for the origin filename
|
// If there is no filename, check the metadata for the origin filename
|
||||||
if input.len() > 0 {
|
if input.len() > 0 {
|
||||||
let origin = input[0].origin();
|
let origin = input[0].origin();
|
||||||
match origin.map(|x| source_map.get(&x)).flatten() {
|
match origin.and_then(|x| source_map.get(&x)) {
|
||||||
Some(path) => match path {
|
Some(path) => match path {
|
||||||
SpanSource::File(file) => {
|
SpanSource::File(file) => {
|
||||||
full_path.push(Path::new(file));
|
full_path.push(Path::new(file));
|
||||||
|
|
|
@ -4,13 +4,19 @@ use crate::prelude::*;
|
||||||
|
|
||||||
pub struct SortBy;
|
pub struct SortBy;
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
pub struct SortByArgs {
|
||||||
|
rest: Vec<Tagged<String>>,
|
||||||
|
reverse: bool,
|
||||||
|
}
|
||||||
|
|
||||||
impl WholeStreamCommand for SortBy {
|
impl WholeStreamCommand for SortBy {
|
||||||
fn run(
|
fn run(
|
||||||
&self,
|
&self,
|
||||||
args: CommandArgs,
|
args: CommandArgs,
|
||||||
registry: &CommandRegistry,
|
registry: &CommandRegistry,
|
||||||
) -> Result<OutputStream, ShellError> {
|
) -> Result<OutputStream, ShellError> {
|
||||||
sort_by(args, registry)
|
args.process(registry, sort_by)?.run()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn name(&self) -> &str {
|
fn name(&self) -> &str {
|
||||||
|
@ -18,43 +24,32 @@ impl WholeStreamCommand for SortBy {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build("sort-by").switch("reverse")
|
Signature::build("sort-by")
|
||||||
|
.rest(SyntaxType::String)
|
||||||
|
.switch("reverse")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sort_by(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
fn sort_by(
|
||||||
let args = args.evaluate_once(registry)?;
|
SortByArgs { reverse, rest }: SortByArgs,
|
||||||
let (input, args) = args.parts();
|
mut context: RunnableContext,
|
||||||
|
) -> Result<OutputStream, ShellError> {
|
||||||
|
Ok(OutputStream::new(async_stream_block! {
|
||||||
|
let mut vec = context.input.drain_vec().await;
|
||||||
|
|
||||||
let fields: Result<Vec<_>, _> = args
|
|
||||||
.positional
|
|
||||||
.iter()
|
|
||||||
.flatten()
|
|
||||||
.map(|a| a.as_string())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let fields = fields?;
|
|
||||||
|
|
||||||
let output = input.values.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
let reverse = args.has("reverse");
|
|
||||||
let output = output.map(move |mut vec| {
|
|
||||||
let calc_key = |item: &Tagged<Value>| {
|
let calc_key = |item: &Tagged<Value>| {
|
||||||
fields
|
rest.iter()
|
||||||
.iter()
|
|
||||||
.map(|f| item.get_data_by_key(f).map(|i| i.clone()))
|
.map(|f| item.get_data_by_key(f).map(|i| i.clone()))
|
||||||
.collect::<Vec<Option<Tagged<Value>>>>()
|
.collect::<Vec<Option<Tagged<Value>>>>()
|
||||||
};
|
};
|
||||||
if reverse {
|
if reverse {
|
||||||
vec.sort_by_cached_key(|item| {
|
vec.sort_by_cached_key(|item| std::cmp::Reverse(calc_key(item)));
|
||||||
std::cmp::Reverse(calc_key(item))
|
|
||||||
});
|
|
||||||
} else {
|
} else {
|
||||||
vec.sort_by_cached_key(calc_key);
|
vec.sort_by_cached_key(calc_key);
|
||||||
|
};
|
||||||
|
|
||||||
|
for item in vec {
|
||||||
|
yield item.into();
|
||||||
}
|
}
|
||||||
|
}))
|
||||||
vec.into_iter().collect::<VecDeque<_>>()
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(output.flatten_stream().from_input_stream())
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,7 +31,7 @@ impl WholeStreamCommand for SplitColumn {
|
||||||
Signature::build("split-column")
|
Signature::build("split-column")
|
||||||
.required("separator", SyntaxType::Any)
|
.required("separator", SyntaxType::Any)
|
||||||
.switch("collapse-empty")
|
.switch("collapse-empty")
|
||||||
.rest()
|
.rest(SyntaxType::Member)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -38,7 +38,7 @@ fn tags(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream,
|
||||||
dict.insert("end", Value::int(span.end as i64));
|
dict.insert("end", Value::int(span.end as i64));
|
||||||
tags.insert_tagged("span", dict.into_tagged_value());
|
tags.insert_tagged("span", dict.into_tagged_value());
|
||||||
|
|
||||||
match origin.map(|x| source_map.get(&x)).flatten() {
|
match origin.and_then(|x| source_map.get(&x)) {
|
||||||
Some(SpanSource::File(source)) => {
|
Some(SpanSource::File(source)) => {
|
||||||
tags.insert("origin", Value::string(source));
|
tags.insert("origin", Value::string(source));
|
||||||
}
|
}
|
||||||
|
|
231
src/commands/to_bson.rs
Normal file
231
src/commands/to_bson.rs
Normal file
|
@ -0,0 +1,231 @@
|
||||||
|
use crate::commands::WholeStreamCommand;
|
||||||
|
use crate::object::{Dictionary, Primitive, Value};
|
||||||
|
use crate::prelude::*;
|
||||||
|
use bson::{encode_document, oid::ObjectId, spec::BinarySubtype, Bson, Document};
|
||||||
|
use std::convert::TryInto;
|
||||||
|
|
||||||
|
pub struct ToBSON;
|
||||||
|
|
||||||
|
impl WholeStreamCommand for ToBSON {
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
args: CommandArgs,
|
||||||
|
registry: &CommandRegistry,
|
||||||
|
) -> Result<OutputStream, ShellError> {
|
||||||
|
to_bson(args, registry)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"to-bson"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build("to-bson")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn value_to_bson_value(v: &Value) -> Bson {
|
||||||
|
match v {
|
||||||
|
Value::Primitive(Primitive::Boolean(b)) => Bson::Boolean(*b),
|
||||||
|
Value::Primitive(Primitive::Bytes(b)) => Bson::I64(*b as i64),
|
||||||
|
Value::Primitive(Primitive::Date(d)) => Bson::UtcDatetime(*d),
|
||||||
|
Value::Primitive(Primitive::EndOfStream) => Bson::Null,
|
||||||
|
Value::Primitive(Primitive::BeginningOfStream) => Bson::Null,
|
||||||
|
Value::Primitive(Primitive::Float(f)) => Bson::FloatingPoint(f.into_inner()),
|
||||||
|
Value::Primitive(Primitive::Int(i)) => Bson::I64(*i),
|
||||||
|
Value::Primitive(Primitive::Nothing) => Bson::Null,
|
||||||
|
Value::Primitive(Primitive::String(s)) => Bson::String(s.clone()),
|
||||||
|
Value::Primitive(Primitive::Path(s)) => Bson::String(s.display().to_string()),
|
||||||
|
Value::List(l) => Bson::Array(l.iter().map(|x| value_to_bson_value(x)).collect()),
|
||||||
|
Value::Block(_) => Bson::Null,
|
||||||
|
Value::Binary(b) => Bson::Binary(BinarySubtype::Generic, b.clone()),
|
||||||
|
Value::Object(o) => object_value_to_bson(o),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// object_value_to_bson handles all Objects, even those that correspond to special
|
||||||
|
// types (things like regex or javascript code).
|
||||||
|
fn object_value_to_bson(o: &Dictionary) -> Bson {
|
||||||
|
let mut it = o.entries.iter();
|
||||||
|
if it.len() > 2 {
|
||||||
|
return generic_object_value_to_bson(o);
|
||||||
|
}
|
||||||
|
match it.next() {
|
||||||
|
Some((regex, tagged_regex_value)) if regex == "$regex" => match it.next() {
|
||||||
|
Some((options, tagged_opts_value)) if options == "$options" => {
|
||||||
|
let r: Result<String, _> = tagged_regex_value.try_into();
|
||||||
|
let opts: Result<String, _> = tagged_opts_value.try_into();
|
||||||
|
if r.is_err() || opts.is_err() {
|
||||||
|
generic_object_value_to_bson(o)
|
||||||
|
} else {
|
||||||
|
Bson::RegExp(r.unwrap(), opts.unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => generic_object_value_to_bson(o),
|
||||||
|
},
|
||||||
|
Some((javascript, tagged_javascript_value)) if javascript == "$javascript" => {
|
||||||
|
match it.next() {
|
||||||
|
Some((scope, tagged_scope_value)) if scope == "$scope" => {
|
||||||
|
let js: Result<String, _> = tagged_javascript_value.try_into();
|
||||||
|
let s: Result<&Dictionary, _> = tagged_scope_value.try_into();
|
||||||
|
if js.is_err() || s.is_err() {
|
||||||
|
generic_object_value_to_bson(o)
|
||||||
|
} else {
|
||||||
|
if let Bson::Document(doc) = object_value_to_bson(s.unwrap()) {
|
||||||
|
Bson::JavaScriptCodeWithScope(js.unwrap(), doc)
|
||||||
|
} else {
|
||||||
|
generic_object_value_to_bson(o)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
let js: Result<String, _> = tagged_javascript_value.try_into();
|
||||||
|
if js.is_err() {
|
||||||
|
generic_object_value_to_bson(o)
|
||||||
|
} else {
|
||||||
|
Bson::JavaScriptCode(js.unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => generic_object_value_to_bson(o),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some((timestamp, tagged_timestamp_value)) if timestamp == "$timestamp" => {
|
||||||
|
let ts: Result<i64, _> = tagged_timestamp_value.try_into();
|
||||||
|
if ts.is_err() {
|
||||||
|
generic_object_value_to_bson(o)
|
||||||
|
} else {
|
||||||
|
Bson::TimeStamp(ts.unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some((binary_subtype, tagged_binary_subtype_value))
|
||||||
|
if binary_subtype == "$binary_subtype" =>
|
||||||
|
{
|
||||||
|
match it.next() {
|
||||||
|
Some((binary, tagged_bin_value)) if binary == "$binary" => {
|
||||||
|
let bst = get_binary_subtype(tagged_binary_subtype_value);
|
||||||
|
let bin: Result<Vec<u8>, _> = tagged_bin_value.try_into();
|
||||||
|
if bst.is_none() || bin.is_err() {
|
||||||
|
generic_object_value_to_bson(o)
|
||||||
|
} else {
|
||||||
|
Bson::Binary(bst.unwrap(), bin.unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => generic_object_value_to_bson(o),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some((object_id, tagged_object_id_value)) if object_id == "$object_id" => {
|
||||||
|
let obj_id: Result<String, _> = tagged_object_id_value.try_into();
|
||||||
|
if obj_id.is_err() {
|
||||||
|
generic_object_value_to_bson(o)
|
||||||
|
} else {
|
||||||
|
let obj_id = ObjectId::with_string(&obj_id.unwrap());
|
||||||
|
if obj_id.is_err() {
|
||||||
|
generic_object_value_to_bson(o)
|
||||||
|
} else {
|
||||||
|
Bson::ObjectId(obj_id.unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some((symbol, tagged_symbol_value)) if symbol == "$symbol" => {
|
||||||
|
let sym: Result<String, _> = tagged_symbol_value.try_into();
|
||||||
|
if sym.is_err() {
|
||||||
|
generic_object_value_to_bson(o)
|
||||||
|
} else {
|
||||||
|
Bson::Symbol(sym.unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => generic_object_value_to_bson(o),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_binary_subtype<'a>(tagged_value: &'a Tagged<Value>) -> Option<BinarySubtype> {
|
||||||
|
match tagged_value.item() {
|
||||||
|
Value::Primitive(Primitive::String(s)) => Some(match s.as_ref() {
|
||||||
|
"generic" => BinarySubtype::Generic,
|
||||||
|
"function" => BinarySubtype::Function,
|
||||||
|
"binary_old" => BinarySubtype::BinaryOld,
|
||||||
|
"uuid_old" => BinarySubtype::UuidOld,
|
||||||
|
"uuid" => BinarySubtype::Uuid,
|
||||||
|
"md5" => BinarySubtype::Md5,
|
||||||
|
_ => unreachable!(),
|
||||||
|
}),
|
||||||
|
Value::Primitive(Primitive::Int(i)) => Some(BinarySubtype::UserDefined(*i as u8)),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// generic_object_value_bson handles any Object that does not
|
||||||
|
// correspond to a special bson type (things like regex or javascript code).
|
||||||
|
fn generic_object_value_to_bson(o: &Dictionary) -> Bson {
|
||||||
|
let mut doc = Document::new();
|
||||||
|
for (k, v) in o.entries.iter() {
|
||||||
|
doc.insert(k.clone(), value_to_bson_value(v));
|
||||||
|
}
|
||||||
|
Bson::Document(doc)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn shell_encode_document(
|
||||||
|
writer: &mut Vec<u8>,
|
||||||
|
doc: Document,
|
||||||
|
span: Span,
|
||||||
|
) -> Result<(), ShellError> {
|
||||||
|
match encode_document(writer, &doc) {
|
||||||
|
Err(e) => Err(ShellError::labeled_error(
|
||||||
|
format!("Failed to encode document due to: {:?}", e),
|
||||||
|
"requires BSON-compatible document",
|
||||||
|
span,
|
||||||
|
)),
|
||||||
|
_ => Ok(()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn bson_value_to_bytes(bson: Bson, span: Span) -> Result<Vec<u8>, ShellError> {
|
||||||
|
let mut out = Vec::new();
|
||||||
|
match bson {
|
||||||
|
Bson::Array(a) => {
|
||||||
|
for v in a.into_iter() {
|
||||||
|
match v {
|
||||||
|
Bson::Document(d) => shell_encode_document(&mut out, d, span)?,
|
||||||
|
_ => {
|
||||||
|
return Err(ShellError::labeled_error(
|
||||||
|
format!("All top level values must be Documents, got {:?}", v),
|
||||||
|
"requires BSON-compatible document",
|
||||||
|
span,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Bson::Document(d) => shell_encode_document(&mut out, d, span)?,
|
||||||
|
_ => {
|
||||||
|
return Err(ShellError::labeled_error(
|
||||||
|
format!("All top level values must be Documents, got {:?}", bson),
|
||||||
|
"requires BSON-compatible document",
|
||||||
|
span,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(out)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||||
|
let args = args.evaluate_once(registry)?;
|
||||||
|
let name_span = args.name_span();
|
||||||
|
let out = args.input;
|
||||||
|
|
||||||
|
Ok(out
|
||||||
|
.values
|
||||||
|
.map(
|
||||||
|
move |a| match bson_value_to_bytes(value_to_bson_value(&a), name_span) {
|
||||||
|
Ok(x) => ReturnSuccess::value(Value::Binary(x).simple_spanned(name_span)),
|
||||||
|
_ => Err(ShellError::labeled_error_with_secondary(
|
||||||
|
"Expected an object with BSON-compatible structure from pipeline",
|
||||||
|
"requires BSON-compatible input: Must be Array or Object",
|
||||||
|
name_span,
|
||||||
|
format!("{} originates from here", a.item.type_name()),
|
||||||
|
a.span(),
|
||||||
|
)),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.to_output_stream())
|
||||||
|
}
|
|
@ -5,8 +5,6 @@ use crate::parser::registry::Signature;
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
|
|
||||||
const VERSION: &'static str = env!("CARGO_PKG_VERSION");
|
|
||||||
|
|
||||||
pub struct Version;
|
pub struct Version;
|
||||||
|
|
||||||
impl WholeStreamCommand for Version {
|
impl WholeStreamCommand for Version {
|
||||||
|
@ -34,7 +32,7 @@ pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||||
let mut indexmap = IndexMap::new();
|
let mut indexmap = IndexMap::new();
|
||||||
indexmap.insert(
|
indexmap.insert(
|
||||||
"version".to_string(),
|
"version".to_string(),
|
||||||
Tagged::from_simple_spanned_item(Value::string(VERSION.to_string()), span),
|
Tagged::from_simple_spanned_item(Value::string(clap::crate_version!()), span),
|
||||||
);
|
);
|
||||||
|
|
||||||
let value = Tagged::from_simple_spanned_item(Value::Object(Dictionary::from(indexmap)), span);
|
let value = Tagged::from_simple_spanned_item(Value::Object(Dictionary::from(indexmap)), span);
|
||||||
|
|
|
@ -121,7 +121,7 @@ impl Context {
|
||||||
name_span: Span,
|
name_span: Span,
|
||||||
source_map: SourceMap,
|
source_map: SourceMap,
|
||||||
args: hir::Call,
|
args: hir::Call,
|
||||||
source: Text,
|
source: &Text,
|
||||||
input: InputStream,
|
input: InputStream,
|
||||||
) -> OutputStream {
|
) -> OutputStream {
|
||||||
let command_args = self.command_args(args, input, source, source_map, name_span);
|
let command_args = self.command_args(args, input, source, source_map, name_span);
|
||||||
|
@ -131,13 +131,13 @@ impl Context {
|
||||||
fn call_info(
|
fn call_info(
|
||||||
&self,
|
&self,
|
||||||
args: hir::Call,
|
args: hir::Call,
|
||||||
source: Text,
|
source: &Text,
|
||||||
source_map: SourceMap,
|
source_map: SourceMap,
|
||||||
name_span: Span,
|
name_span: Span,
|
||||||
) -> UnevaluatedCallInfo {
|
) -> UnevaluatedCallInfo {
|
||||||
UnevaluatedCallInfo {
|
UnevaluatedCallInfo {
|
||||||
args,
|
args,
|
||||||
source,
|
source: source.clone(),
|
||||||
source_map,
|
source_map,
|
||||||
name_span,
|
name_span,
|
||||||
}
|
}
|
||||||
|
@ -147,7 +147,7 @@ impl Context {
|
||||||
&self,
|
&self,
|
||||||
args: hir::Call,
|
args: hir::Call,
|
||||||
input: InputStream,
|
input: InputStream,
|
||||||
source: Text,
|
source: &Text,
|
||||||
source_map: SourceMap,
|
source_map: SourceMap,
|
||||||
name_span: Span,
|
name_span: Span,
|
||||||
) -> CommandArgs {
|
) -> CommandArgs {
|
||||||
|
|
|
@ -5,6 +5,7 @@ use ansi_term::Color;
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use language_reporting::{Diagnostic, Label, Severity};
|
use language_reporting::{Diagnostic, Label, Severity};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::fmt;
|
||||||
|
|
||||||
#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)]
|
#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)]
|
||||||
pub enum Description {
|
pub enum Description {
|
||||||
|
@ -56,6 +57,12 @@ pub struct ShellError {
|
||||||
cause: Option<Box<ProximateShellError>>,
|
cause: Option<Box<ProximateShellError>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl ToDebug for ShellError {
|
||||||
|
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||||
|
self.error.fmt_debug(f, source)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl serde::de::Error for ShellError {
|
impl serde::de::Error for ShellError {
|
||||||
fn custom<T>(msg: T) -> Self
|
fn custom<T>(msg: T) -> Self
|
||||||
where
|
where
|
||||||
|
@ -335,6 +342,7 @@ pub enum ProximateShellError {
|
||||||
right: Tagged<String>,
|
right: Tagged<String>,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ProximateShellError {
|
impl ProximateShellError {
|
||||||
fn start(self) -> ShellError {
|
fn start(self) -> ShellError {
|
||||||
ShellError {
|
ShellError {
|
||||||
|
@ -344,6 +352,13 @@ impl ProximateShellError {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl ToDebug for ProximateShellError {
|
||||||
|
fn fmt_debug(&self, f: &mut fmt::Formatter, _source: &str) -> fmt::Result {
|
||||||
|
// TODO: Custom debug for inner spans
|
||||||
|
write!(f, "{:?}", self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct ShellDiagnostic {
|
pub struct ShellDiagnostic {
|
||||||
crate diagnostic: Diagnostic<Span>,
|
crate diagnostic: Diagnostic<Span>,
|
||||||
|
|
|
@ -39,6 +39,7 @@ crate fn evaluate_baseline_expr(
|
||||||
) -> Result<Tagged<Value>, ShellError> {
|
) -> Result<Tagged<Value>, ShellError> {
|
||||||
match &expr.item {
|
match &expr.item {
|
||||||
RawExpression::Literal(literal) => Ok(evaluate_literal(expr.copy_span(*literal), source)),
|
RawExpression::Literal(literal) => Ok(evaluate_literal(expr.copy_span(*literal), source)),
|
||||||
|
RawExpression::FilePath(path) => Ok(Value::path(path.clone()).tagged(expr.span())),
|
||||||
RawExpression::Synthetic(hir::Synthetic::String(s)) => Ok(Value::string(s).tagged_unknown()),
|
RawExpression::Synthetic(hir::Synthetic::String(s)) => Ok(Value::string(s).tagged_unknown()),
|
||||||
RawExpression::Variable(var) => evaluate_reference(var, scope, source),
|
RawExpression::Variable(var) => evaluate_reference(var, scope, source),
|
||||||
RawExpression::ExternalCommand(external) => evaluate_external(external, scope, source),
|
RawExpression::ExternalCommand(external) => evaluate_external(external, scope, source),
|
||||||
|
|
|
@ -78,13 +78,16 @@ impl TableView {
|
||||||
for head in 0..headers.len() {
|
for head in 0..headers.len() {
|
||||||
let mut current_col_max = 0;
|
let mut current_col_max = 0;
|
||||||
for row in 0..values.len() {
|
for row in 0..values.len() {
|
||||||
let value_length = entries[row][head].0.len();
|
let value_length = entries[row][head].0.chars().count();
|
||||||
if value_length > current_col_max {
|
if value_length > current_col_max {
|
||||||
current_col_max = value_length;
|
current_col_max = value_length;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
max_per_column.push(std::cmp::max(current_col_max, headers[head].len()));
|
max_per_column.push(std::cmp::max(
|
||||||
|
current_col_max,
|
||||||
|
headers[head].chars().count(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Different platforms want different amounts of buffer, not sure why
|
// Different platforms want different amounts of buffer, not sure why
|
||||||
|
|
14
src/git.rs
14
src/git.rs
|
@ -7,15 +7,13 @@ pub fn current_branch() -> Option<String> {
|
||||||
Ok(repo) => {
|
Ok(repo) => {
|
||||||
let r = repo.head();
|
let r = repo.head();
|
||||||
match r {
|
match r {
|
||||||
Ok(r) => {
|
Ok(r) => match r.shorthand() {
|
||||||
match r.shorthand() {
|
Some(s) => Some(s.to_string()),
|
||||||
Some(s) => Some(s.to_string()),
|
None => None,
|
||||||
None => None,
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
_ => None
|
_ => None,
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
_ => None
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,6 @@
|
||||||
#![feature(generators)]
|
#![feature(generators)]
|
||||||
#![feature(try_trait)]
|
#![feature(try_trait)]
|
||||||
#![feature(bind_by_move_pattern_guards)]
|
#![feature(bind_by_move_pattern_guards)]
|
||||||
#![feature(option_flattening)]
|
|
||||||
#![feature(specialization)]
|
#![feature(specialization)]
|
||||||
#![feature(proc_macro_hygiene)]
|
#![feature(proc_macro_hygiene)]
|
||||||
|
|
||||||
|
@ -30,6 +29,7 @@ pub use crate::commands::command::{CallInfo, ReturnSuccess, ReturnValue};
|
||||||
pub use crate::context::{SourceMap, SpanSource};
|
pub use crate::context::{SourceMap, SpanSource};
|
||||||
pub use crate::env::host::BasicHost;
|
pub use crate::env::host::BasicHost;
|
||||||
pub use crate::object::base::OF64;
|
pub use crate::object::base::OF64;
|
||||||
|
pub use crate::parser::hir::SyntaxType;
|
||||||
pub use crate::plugin::{serve_plugin, Plugin};
|
pub use crate::plugin::{serve_plugin, Plugin};
|
||||||
pub use crate::utils::{AbsolutePath, RelativePath};
|
pub use crate::utils::{AbsolutePath, RelativePath};
|
||||||
pub use cli::cli;
|
pub use cli::cli;
|
||||||
|
|
|
@ -122,10 +122,8 @@ impl Primitive {
|
||||||
pub fn style(&self) -> &'static str {
|
pub fn style(&self) -> &'static str {
|
||||||
match self {
|
match self {
|
||||||
Primitive::Bytes(0) => "c", // centre 'missing' indicator
|
Primitive::Bytes(0) => "c", // centre 'missing' indicator
|
||||||
Primitive::Int(_) |
|
Primitive::Int(_) | Primitive::Bytes(_) | Primitive::Float(_) => "r",
|
||||||
Primitive::Bytes(_) |
|
_ => "",
|
||||||
Primitive::Float(_) => "r",
|
|
||||||
_ => ""
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -245,6 +243,48 @@ impl std::convert::TryFrom<&'a Tagged<Value>> for i64 {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl std::convert::TryFrom<&'a Tagged<Value>> for String {
|
||||||
|
type Error = ShellError;
|
||||||
|
|
||||||
|
fn try_from(value: &'a Tagged<Value>) -> Result<String, ShellError> {
|
||||||
|
match value.item() {
|
||||||
|
Value::Primitive(Primitive::String(s)) => Ok(s.clone()),
|
||||||
|
v => Err(ShellError::type_error(
|
||||||
|
"String",
|
||||||
|
value.copy_span(v.type_name()),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::convert::TryFrom<&'a Tagged<Value>> for Vec<u8> {
|
||||||
|
type Error = ShellError;
|
||||||
|
|
||||||
|
fn try_from(value: &'a Tagged<Value>) -> Result<Vec<u8>, ShellError> {
|
||||||
|
match value.item() {
|
||||||
|
Value::Binary(b) => Ok(b.clone()),
|
||||||
|
v => Err(ShellError::type_error(
|
||||||
|
"Binary",
|
||||||
|
value.copy_span(v.type_name()),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::convert::TryFrom<&'a Tagged<Value>> for &'a crate::object::Dictionary {
|
||||||
|
type Error = ShellError;
|
||||||
|
|
||||||
|
fn try_from(value: &'a Tagged<Value>) -> Result<&'a crate::object::Dictionary, ShellError> {
|
||||||
|
match value.item() {
|
||||||
|
Value::Object(d) => Ok(d),
|
||||||
|
v => Err(ShellError::type_error(
|
||||||
|
"Dictionary",
|
||||||
|
value.copy_span(v.type_name()),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
pub enum Switch {
|
pub enum Switch {
|
||||||
Present,
|
Present,
|
||||||
|
@ -295,6 +335,7 @@ impl Value {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: This is basically a legacy construct, I think
|
||||||
pub fn data_descriptors(&self) -> Vec<String> {
|
pub fn data_descriptors(&self) -> Vec<String> {
|
||||||
match self {
|
match self {
|
||||||
Value::Primitive(_) => vec![],
|
Value::Primitive(_) => vec![],
|
||||||
|
@ -472,7 +513,7 @@ impl Value {
|
||||||
crate fn style_leaf(&self) -> &'static str {
|
crate fn style_leaf(&self) -> &'static str {
|
||||||
match self {
|
match self {
|
||||||
Value::Primitive(p) => p.style(),
|
Value::Primitive(p) => p.style(),
|
||||||
_ => ""
|
_ => "",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -559,6 +600,10 @@ impl Value {
|
||||||
Value::Primitive(Primitive::String(s.into()))
|
Value::Primitive(Primitive::String(s.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn path(s: impl Into<PathBuf>) -> Value {
|
||||||
|
Value::Primitive(Primitive::Path(s.into()))
|
||||||
|
}
|
||||||
|
|
||||||
pub fn bytes(s: impl Into<u64>) -> Value {
|
pub fn bytes(s: impl Into<u64>) -> Value {
|
||||||
Value::Primitive(Primitive::Bytes(s.into()))
|
Value::Primitive(Primitive::Bytes(s.into()))
|
||||||
}
|
}
|
||||||
|
@ -594,6 +639,18 @@ impl Value {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Tagged<Value> {
|
||||||
|
crate fn as_path(&self) -> Result<PathBuf, ShellError> {
|
||||||
|
match self.item() {
|
||||||
|
Value::Primitive(Primitive::Path(path)) => Ok(path.clone()),
|
||||||
|
other => Err(ShellError::type_error(
|
||||||
|
"Path",
|
||||||
|
other.type_name().tagged(self.span()),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
crate fn select_fields(obj: &Value, fields: &[String], tag: impl Into<Tag>) -> Tagged<Value> {
|
crate fn select_fields(obj: &Value, fields: &[String], tag: impl Into<Tag>) -> Tagged<Value> {
|
||||||
let mut out = TaggedDictBuilder::new(tag);
|
let mut out = TaggedDictBuilder::new(tag);
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,6 @@ use crate::object::base as value;
|
||||||
use crate::parser::hir;
|
use crate::parser::hir;
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use log::trace;
|
use log::trace;
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
pub trait ExtractType: Sized {
|
pub trait ExtractType: Sized {
|
||||||
fn extract(value: &Tagged<Value>) -> Result<Self, ShellError>;
|
fn extract(value: &Tagged<Value>) -> Result<Self, ShellError>;
|
||||||
|
@ -196,9 +195,9 @@ impl ExtractType for std::path::PathBuf {
|
||||||
|
|
||||||
match &value {
|
match &value {
|
||||||
Tagged {
|
Tagged {
|
||||||
item: Value::Primitive(Primitive::String(p)),
|
item: Value::Primitive(Primitive::Path(p)),
|
||||||
..
|
..
|
||||||
} => Ok(PathBuf::from(p)),
|
} => Ok(p.clone()),
|
||||||
other => Err(ShellError::type_error("Path", other.tagged_type_name())),
|
other => Err(ShellError::type_error("Path", other.tagged_type_name())),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,16 +11,22 @@ use derive_new::new;
|
||||||
use getset::Getters;
|
use getset::Getters;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use crate::evaluate::Scope;
|
use crate::evaluate::Scope;
|
||||||
|
|
||||||
crate use self::baseline_parse::{baseline_parse_single_token, baseline_parse_token_as_string};
|
crate use self::baseline_parse::{
|
||||||
crate use self::baseline_parse_tokens::{baseline_parse_next_expr, SyntaxType, TokensIterator};
|
baseline_parse_single_token, baseline_parse_token_as_number, baseline_parse_token_as_path,
|
||||||
|
baseline_parse_token_as_string,
|
||||||
|
};
|
||||||
|
crate use self::baseline_parse_tokens::{baseline_parse_next_expr, TokensIterator};
|
||||||
crate use self::binary::Binary;
|
crate use self::binary::Binary;
|
||||||
crate use self::external_command::ExternalCommand;
|
crate use self::external_command::ExternalCommand;
|
||||||
crate use self::named::NamedArguments;
|
crate use self::named::NamedArguments;
|
||||||
crate use self::path::Path;
|
crate use self::path::Path;
|
||||||
|
|
||||||
|
pub use self::baseline_parse_tokens::SyntaxType;
|
||||||
|
|
||||||
pub fn path(head: impl Into<Expression>, tail: Vec<Tagged<impl Into<String>>>) -> Path {
|
pub fn path(head: impl Into<Expression>, tail: Vec<Tagged<impl Into<String>>>) -> Path {
|
||||||
Path::new(
|
Path::new(
|
||||||
head.into(),
|
head.into(),
|
||||||
|
@ -68,6 +74,8 @@ impl ToDebug for Call {
|
||||||
write!(f, "{}", named.debug(source))?;
|
write!(f, "{}", named.debug(source))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
write!(f, ")")?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -81,6 +89,7 @@ pub enum RawExpression {
|
||||||
Block(Vec<Expression>),
|
Block(Vec<Expression>),
|
||||||
List(Vec<Expression>),
|
List(Vec<Expression>),
|
||||||
Path(Box<Path>),
|
Path(Box<Path>),
|
||||||
|
FilePath(PathBuf),
|
||||||
ExternalCommand(ExternalCommand),
|
ExternalCommand(ExternalCommand),
|
||||||
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
|
@ -105,6 +114,7 @@ impl RawExpression {
|
||||||
match self {
|
match self {
|
||||||
RawExpression::Literal(literal) => literal.type_name(),
|
RawExpression::Literal(literal) => literal.type_name(),
|
||||||
RawExpression::Synthetic(synthetic) => synthetic.type_name(),
|
RawExpression::Synthetic(synthetic) => synthetic.type_name(),
|
||||||
|
RawExpression::FilePath(..) => "filepath",
|
||||||
RawExpression::Variable(..) => "variable",
|
RawExpression::Variable(..) => "variable",
|
||||||
RawExpression::List(..) => "list",
|
RawExpression::List(..) => "list",
|
||||||
RawExpression::Binary(..) => "binary",
|
RawExpression::Binary(..) => "binary",
|
||||||
|
@ -141,6 +151,10 @@ impl Expression {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
crate fn file_path(path: impl Into<PathBuf>, outer: impl Into<Span>) -> Expression {
|
||||||
|
Tagged::from_simple_spanned_item(RawExpression::FilePath(path.into()), outer.into())
|
||||||
|
}
|
||||||
|
|
||||||
crate fn bare(span: impl Into<Span>) -> Expression {
|
crate fn bare(span: impl Into<Span>) -> Expression {
|
||||||
Tagged::from_simple_spanned_item(RawExpression::Literal(Literal::Bare), span.into())
|
Tagged::from_simple_spanned_item(RawExpression::Literal(Literal::Bare), span.into())
|
||||||
}
|
}
|
||||||
|
@ -170,7 +184,8 @@ impl Expression {
|
||||||
impl ToDebug for Expression {
|
impl ToDebug for Expression {
|
||||||
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||||
match self.item() {
|
match self.item() {
|
||||||
RawExpression::Literal(l) => write!(f, "{:?}", l),
|
RawExpression::Literal(l) => l.tagged(self.span()).fmt_debug(f, source),
|
||||||
|
RawExpression::FilePath(p) => write!(f, "{}", p.display()),
|
||||||
RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{:?}", s),
|
RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{:?}", s),
|
||||||
RawExpression::Variable(Variable::It(_)) => write!(f, "$it"),
|
RawExpression::Variable(Variable::It(_)) => write!(f, "$it"),
|
||||||
RawExpression::Variable(Variable::Other(s)) => write!(f, "${}", s.slice(source)),
|
RawExpression::Variable(Variable::Other(s)) => write!(f, "${}", s.slice(source)),
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
|
use crate::context::Context;
|
||||||
use crate::parser::{hir, RawToken, Token};
|
use crate::parser::{hir, RawToken, Token};
|
||||||
use crate::Text;
|
use crate::Text;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
pub fn baseline_parse_single_token(token: &Token, source: &Text) -> hir::Expression {
|
pub fn baseline_parse_single_token(token: &Token, source: &Text) -> hir::Expression {
|
||||||
match *token.item() {
|
match *token.item() {
|
||||||
|
@ -15,6 +17,20 @@ pub fn baseline_parse_single_token(token: &Token, source: &Text) -> hir::Express
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn baseline_parse_token_as_number(token: &Token, source: &Text) -> hir::Expression {
|
||||||
|
match *token.item() {
|
||||||
|
RawToken::Variable(span) if span.slice(source) == "it" => {
|
||||||
|
hir::Expression::it_variable(span, token.span())
|
||||||
|
}
|
||||||
|
RawToken::External(span) => hir::Expression::external_command(span, token.span()),
|
||||||
|
RawToken::Variable(span) => hir::Expression::variable(span, token.span()),
|
||||||
|
RawToken::Integer(int) => hir::Expression::int(int, token.span()),
|
||||||
|
RawToken::Size(int, unit) => hir::Expression::size(int, unit, token.span()),
|
||||||
|
RawToken::Bare => hir::Expression::bare(token.span()),
|
||||||
|
RawToken::String(span) => hir::Expression::string(span, token.span()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn baseline_parse_token_as_string(token: &Token, source: &Text) -> hir::Expression {
|
pub fn baseline_parse_token_as_string(token: &Token, source: &Text) -> hir::Expression {
|
||||||
match *token.item() {
|
match *token.item() {
|
||||||
RawToken::Variable(span) if span.slice(source) == "it" => {
|
RawToken::Variable(span) if span.slice(source) == "it" => {
|
||||||
|
@ -28,3 +44,32 @@ pub fn baseline_parse_token_as_string(token: &Token, source: &Text) -> hir::Expr
|
||||||
RawToken::String(span) => hir::Expression::string(span, token.span()),
|
RawToken::String(span) => hir::Expression::string(span, token.span()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn baseline_parse_token_as_path(
|
||||||
|
token: &Token,
|
||||||
|
context: &Context,
|
||||||
|
source: &Text,
|
||||||
|
) -> hir::Expression {
|
||||||
|
match *token.item() {
|
||||||
|
RawToken::Variable(span) if span.slice(source) == "it" => {
|
||||||
|
hir::Expression::it_variable(span, token.span())
|
||||||
|
}
|
||||||
|
RawToken::External(span) => hir::Expression::external_command(span, token.span()),
|
||||||
|
RawToken::Variable(span) => hir::Expression::variable(span, token.span()),
|
||||||
|
RawToken::Integer(_) => hir::Expression::bare(token.span()),
|
||||||
|
RawToken::Size(_, _) => hir::Expression::bare(token.span()),
|
||||||
|
RawToken::Bare => hir::Expression::file_path(
|
||||||
|
expand_path(token.span().slice(source), context),
|
||||||
|
token.span(),
|
||||||
|
),
|
||||||
|
RawToken::String(span) => {
|
||||||
|
hir::Expression::file_path(expand_path(span.slice(source), context), token.span())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn expand_path(string: &str, context: &Context) -> PathBuf {
|
||||||
|
let expanded = shellexpand::tilde_with_context(string, || context.shell_manager.homedir());
|
||||||
|
|
||||||
|
PathBuf::from(expanded.as_ref())
|
||||||
|
}
|
||||||
|
|
|
@ -1,8 +1,11 @@
|
||||||
|
use crate::context::Context;
|
||||||
use crate::errors::ShellError;
|
use crate::errors::ShellError;
|
||||||
use crate::parser::registry::CommandRegistry;
|
|
||||||
use crate::parser::{
|
use crate::parser::{
|
||||||
hir,
|
hir,
|
||||||
hir::{baseline_parse_single_token, baseline_parse_token_as_string},
|
hir::{
|
||||||
|
baseline_parse_single_token, baseline_parse_token_as_number, baseline_parse_token_as_path,
|
||||||
|
baseline_parse_token_as_string,
|
||||||
|
},
|
||||||
DelimitedNode, Delimiter, PathNode, RawToken, TokenNode,
|
DelimitedNode, Delimiter, PathNode, RawToken, TokenNode,
|
||||||
};
|
};
|
||||||
use crate::{Span, Tag, Tagged, TaggedItem, Text};
|
use crate::{Span, Tag, Tagged, TaggedItem, Text};
|
||||||
|
@ -12,8 +15,9 @@ use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
pub fn baseline_parse_tokens(
|
pub fn baseline_parse_tokens(
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
registry: &CommandRegistry,
|
context: &Context,
|
||||||
source: &Text,
|
source: &Text,
|
||||||
|
syntax_type: SyntaxType,
|
||||||
) -> Result<Vec<hir::Expression>, ShellError> {
|
) -> Result<Vec<hir::Expression>, ShellError> {
|
||||||
let mut exprs: Vec<hir::Expression> = vec![];
|
let mut exprs: Vec<hir::Expression> = vec![];
|
||||||
|
|
||||||
|
@ -22,7 +26,7 @@ pub fn baseline_parse_tokens(
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
let expr = baseline_parse_next_expr(token_nodes, registry, source, SyntaxType::Any)?;
|
let expr = baseline_parse_next_expr(token_nodes, context, source, syntax_type)?;
|
||||||
exprs.push(expr);
|
exprs.push(expr);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -35,7 +39,10 @@ pub enum SyntaxType {
|
||||||
Any,
|
Any,
|
||||||
List,
|
List,
|
||||||
Literal,
|
Literal,
|
||||||
|
String,
|
||||||
|
Member,
|
||||||
Variable,
|
Variable,
|
||||||
|
Number,
|
||||||
Path,
|
Path,
|
||||||
Binary,
|
Binary,
|
||||||
Block,
|
Block,
|
||||||
|
@ -44,7 +51,7 @@ pub enum SyntaxType {
|
||||||
|
|
||||||
pub fn baseline_parse_next_expr(
|
pub fn baseline_parse_next_expr(
|
||||||
tokens: &mut TokensIterator,
|
tokens: &mut TokensIterator,
|
||||||
registry: &CommandRegistry,
|
context: &Context,
|
||||||
source: &Text,
|
source: &Text,
|
||||||
syntax_type: SyntaxType,
|
syntax_type: SyntaxType,
|
||||||
) -> Result<hir::Expression, ShellError> {
|
) -> Result<hir::Expression, ShellError> {
|
||||||
|
@ -56,7 +63,7 @@ pub fn baseline_parse_next_expr(
|
||||||
|
|
||||||
match (syntax_type, next) {
|
match (syntax_type, next) {
|
||||||
(SyntaxType::Path, TokenNode::Token(token)) => {
|
(SyntaxType::Path, TokenNode::Token(token)) => {
|
||||||
return Ok(baseline_parse_token_as_string(token, source))
|
return Ok(baseline_parse_token_as_path(token, context, source))
|
||||||
}
|
}
|
||||||
|
|
||||||
(SyntaxType::Path, token) => {
|
(SyntaxType::Path, token) => {
|
||||||
|
@ -66,10 +73,50 @@ pub fn baseline_parse_next_expr(
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
_ => {}
|
(SyntaxType::String, TokenNode::Token(token)) => {
|
||||||
|
return Ok(baseline_parse_token_as_string(token, source));
|
||||||
|
}
|
||||||
|
|
||||||
|
(SyntaxType::String, token) => {
|
||||||
|
return Err(ShellError::type_error(
|
||||||
|
"String",
|
||||||
|
token.type_name().simple_spanned(token.span()),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
(SyntaxType::Number, TokenNode::Token(token)) => {
|
||||||
|
return Ok(baseline_parse_token_as_number(token, source));
|
||||||
|
}
|
||||||
|
|
||||||
|
(SyntaxType::Number, token) => {
|
||||||
|
return Err(ShellError::type_error(
|
||||||
|
"Numeric",
|
||||||
|
token.type_name().simple_spanned(token.span()),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: More legit member processing
|
||||||
|
(SyntaxType::Member, TokenNode::Token(token)) => {
|
||||||
|
return Ok(baseline_parse_token_as_string(token, source));
|
||||||
|
}
|
||||||
|
|
||||||
|
(SyntaxType::Member, token) => {
|
||||||
|
return Err(ShellError::type_error(
|
||||||
|
"member",
|
||||||
|
token.type_name().simple_spanned(token.span()),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
(SyntaxType::Any, _) => {}
|
||||||
|
(SyntaxType::List, _) => {}
|
||||||
|
(SyntaxType::Literal, _) => {}
|
||||||
|
(SyntaxType::Variable, _) => {}
|
||||||
|
(SyntaxType::Binary, _) => {}
|
||||||
|
(SyntaxType::Block, _) => {}
|
||||||
|
(SyntaxType::Boolean, _) => {}
|
||||||
};
|
};
|
||||||
|
|
||||||
let first = baseline_parse_semantic_token(next, registry, source)?;
|
let first = baseline_parse_semantic_token(next, context, source)?;
|
||||||
|
|
||||||
let possible_op = tokens.peek();
|
let possible_op = tokens.peek();
|
||||||
|
|
||||||
|
@ -88,7 +135,7 @@ pub fn baseline_parse_next_expr(
|
||||||
op.span(),
|
op.span(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
Some(token) => baseline_parse_semantic_token(token, registry, source)?,
|
Some(token) => baseline_parse_semantic_token(token, context, source)?,
|
||||||
};
|
};
|
||||||
|
|
||||||
// We definitely have a binary expression here -- let's see if we should coerce it into a block
|
// We definitely have a binary expression here -- let's see if we should coerce it into a block
|
||||||
|
@ -176,13 +223,13 @@ pub fn baseline_parse_next_expr(
|
||||||
|
|
||||||
pub fn baseline_parse_semantic_token(
|
pub fn baseline_parse_semantic_token(
|
||||||
token: &TokenNode,
|
token: &TokenNode,
|
||||||
registry: &CommandRegistry,
|
context: &Context,
|
||||||
source: &Text,
|
source: &Text,
|
||||||
) -> Result<hir::Expression, ShellError> {
|
) -> Result<hir::Expression, ShellError> {
|
||||||
match token {
|
match token {
|
||||||
TokenNode::Token(token) => Ok(baseline_parse_single_token(token, source)),
|
TokenNode::Token(token) => Ok(baseline_parse_single_token(token, source)),
|
||||||
TokenNode::Call(_call) => unimplemented!(),
|
TokenNode::Call(_call) => unimplemented!(),
|
||||||
TokenNode::Delimited(delimited) => baseline_parse_delimited(delimited, registry, source),
|
TokenNode::Delimited(delimited) => baseline_parse_delimited(delimited, context, source),
|
||||||
TokenNode::Pipeline(_pipeline) => unimplemented!(),
|
TokenNode::Pipeline(_pipeline) => unimplemented!(),
|
||||||
TokenNode::Operator(_op) => unreachable!(),
|
TokenNode::Operator(_op) => unreachable!(),
|
||||||
TokenNode::Flag(_flag) => Err(ShellError::unimplemented(
|
TokenNode::Flag(_flag) => Err(ShellError::unimplemented(
|
||||||
|
@ -191,20 +238,24 @@ pub fn baseline_parse_semantic_token(
|
||||||
TokenNode::Member(_span) => unreachable!(),
|
TokenNode::Member(_span) => unreachable!(),
|
||||||
TokenNode::Whitespace(_span) => unreachable!(),
|
TokenNode::Whitespace(_span) => unreachable!(),
|
||||||
TokenNode::Error(error) => Err(*error.item.clone()),
|
TokenNode::Error(error) => Err(*error.item.clone()),
|
||||||
TokenNode::Path(path) => baseline_parse_path(path, registry, source),
|
TokenNode::Path(path) => baseline_parse_path(path, context, source),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn baseline_parse_delimited(
|
pub fn baseline_parse_delimited(
|
||||||
token: &Tagged<DelimitedNode>,
|
token: &Tagged<DelimitedNode>,
|
||||||
registry: &CommandRegistry,
|
context: &Context,
|
||||||
source: &Text,
|
source: &Text,
|
||||||
) -> Result<hir::Expression, ShellError> {
|
) -> Result<hir::Expression, ShellError> {
|
||||||
match token.delimiter() {
|
match token.delimiter() {
|
||||||
Delimiter::Brace => {
|
Delimiter::Brace => {
|
||||||
let children = token.children();
|
let children = token.children();
|
||||||
let exprs =
|
let exprs = baseline_parse_tokens(
|
||||||
baseline_parse_tokens(&mut TokensIterator::new(children), registry, source)?;
|
&mut TokensIterator::new(children),
|
||||||
|
context,
|
||||||
|
source,
|
||||||
|
SyntaxType::Any,
|
||||||
|
)?;
|
||||||
|
|
||||||
let expr = hir::RawExpression::Block(exprs);
|
let expr = hir::RawExpression::Block(exprs);
|
||||||
Ok(Tagged::from_simple_spanned_item(expr, token.span()))
|
Ok(Tagged::from_simple_spanned_item(expr, token.span()))
|
||||||
|
@ -212,8 +263,12 @@ pub fn baseline_parse_delimited(
|
||||||
Delimiter::Paren => unimplemented!(),
|
Delimiter::Paren => unimplemented!(),
|
||||||
Delimiter::Square => {
|
Delimiter::Square => {
|
||||||
let children = token.children();
|
let children = token.children();
|
||||||
let exprs =
|
let exprs = baseline_parse_tokens(
|
||||||
baseline_parse_tokens(&mut TokensIterator::new(children), registry, source)?;
|
&mut TokensIterator::new(children),
|
||||||
|
context,
|
||||||
|
source,
|
||||||
|
SyntaxType::Any,
|
||||||
|
)?;
|
||||||
|
|
||||||
let expr = hir::RawExpression::List(exprs);
|
let expr = hir::RawExpression::List(exprs);
|
||||||
Ok(expr.tagged(Tag::unknown_origin(token.span())))
|
Ok(expr.tagged(Tag::unknown_origin(token.span())))
|
||||||
|
@ -223,10 +278,10 @@ pub fn baseline_parse_delimited(
|
||||||
|
|
||||||
pub fn baseline_parse_path(
|
pub fn baseline_parse_path(
|
||||||
token: &Tagged<PathNode>,
|
token: &Tagged<PathNode>,
|
||||||
registry: &CommandRegistry,
|
context: &Context,
|
||||||
source: &Text,
|
source: &Text,
|
||||||
) -> Result<hir::Expression, ShellError> {
|
) -> Result<hir::Expression, ShellError> {
|
||||||
let head = baseline_parse_semantic_token(token.head(), registry, source)?;
|
let head = baseline_parse_semantic_token(token.head(), context, source)?;
|
||||||
|
|
||||||
let mut tail = vec![];
|
let mut tail = vec![];
|
||||||
|
|
||||||
|
|
|
@ -27,7 +27,7 @@ impl ToDebug for NamedArguments {
|
||||||
for (name, value) in &self.named {
|
for (name, value) in &self.named {
|
||||||
match value {
|
match value {
|
||||||
NamedValue::AbsentSwitch => continue,
|
NamedValue::AbsentSwitch => continue,
|
||||||
NamedValue::PresentSwitch(span) => write!(f, " {}", span.slice(source))?,
|
NamedValue::PresentSwitch(span) => write!(f, " --{}", span.slice(source))?,
|
||||||
NamedValue::AbsentValue => continue,
|
NamedValue::AbsentValue => continue,
|
||||||
NamedValue::Value(expr) => write!(f, " --{} {}", name, expr.debug(source))?,
|
NamedValue::Value(expr) => write!(f, " --{} {}", name, expr.debug(source))?,
|
||||||
}
|
}
|
||||||
|
|
|
@ -540,6 +540,8 @@ fn is_start_bare_char(c: char) -> bool {
|
||||||
'@' => true,
|
'@' => true,
|
||||||
'*' => true,
|
'*' => true,
|
||||||
'?' => true,
|
'?' => true,
|
||||||
|
'~' => true,
|
||||||
|
'+' => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -557,6 +559,8 @@ fn is_bare_char(c: char) -> bool {
|
||||||
'*' => true,
|
'*' => true,
|
||||||
'?' => true,
|
'?' => true,
|
||||||
'=' => true,
|
'=' => true,
|
||||||
|
'~' => true,
|
||||||
|
'+' => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
|
use crate::context::Context;
|
||||||
use crate::errors::{ArgumentError, ShellError};
|
use crate::errors::{ArgumentError, ShellError};
|
||||||
use crate::parser::registry::{CommandRegistry, NamedType, PositionalType, Signature};
|
use crate::parser::registry::{NamedType, PositionalType, Signature};
|
||||||
use crate::parser::{baseline_parse_tokens, CallNode};
|
use crate::parser::{baseline_parse_tokens, CallNode};
|
||||||
use crate::parser::{
|
use crate::parser::{
|
||||||
hir::{self, NamedArguments},
|
hir::{self, NamedArguments},
|
||||||
|
@ -10,7 +11,7 @@ use log::trace;
|
||||||
|
|
||||||
pub fn parse_command(
|
pub fn parse_command(
|
||||||
config: &Signature,
|
config: &Signature,
|
||||||
registry: &CommandRegistry,
|
context: &Context,
|
||||||
call: &Tagged<CallNode>,
|
call: &Tagged<CallNode>,
|
||||||
source: &Text,
|
source: &Text,
|
||||||
) -> Result<hir::Call, ShellError> {
|
) -> Result<hir::Call, ShellError> {
|
||||||
|
@ -31,7 +32,7 @@ pub fn parse_command(
|
||||||
.collect()
|
.collect()
|
||||||
});
|
});
|
||||||
|
|
||||||
match parse_command_tail(&config, registry, children, source, call.span())? {
|
match parse_command_tail(&config, context, children, source, call.span())? {
|
||||||
None => Ok(hir::Call::new(Box::new(head), None, None)),
|
None => Ok(hir::Call::new(Box::new(head), None, None)),
|
||||||
Some((positional, named)) => Ok(hir::Call::new(Box::new(head), positional, named)),
|
Some((positional, named)) => Ok(hir::Call::new(Box::new(head), positional, named)),
|
||||||
}
|
}
|
||||||
|
@ -63,7 +64,7 @@ fn parse_command_head(head: &TokenNode) -> Result<hir::Expression, ShellError> {
|
||||||
|
|
||||||
fn parse_command_tail(
|
fn parse_command_tail(
|
||||||
config: &Signature,
|
config: &Signature,
|
||||||
registry: &CommandRegistry,
|
context: &Context,
|
||||||
tail: Option<Vec<TokenNode>>,
|
tail: Option<Vec<TokenNode>>,
|
||||||
source: &Text,
|
source: &Text,
|
||||||
command_span: Span,
|
command_span: Span,
|
||||||
|
@ -101,7 +102,7 @@ fn parse_command_tail(
|
||||||
}
|
}
|
||||||
|
|
||||||
let expr =
|
let expr =
|
||||||
hir::baseline_parse_next_expr(tail, registry, source, *syntax_type)?;
|
hir::baseline_parse_next_expr(tail, context, source, *syntax_type)?;
|
||||||
|
|
||||||
tail.restart();
|
tail.restart();
|
||||||
named.insert_mandatory(name, expr);
|
named.insert_mandatory(name, expr);
|
||||||
|
@ -121,7 +122,7 @@ fn parse_command_tail(
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
let expr = hir::baseline_parse_next_expr(tail, registry, source, *syntax_type)?;
|
let expr = hir::baseline_parse_next_expr(tail, context, source, *syntax_type)?;
|
||||||
|
|
||||||
tail.restart();
|
tail.restart();
|
||||||
named.insert_optional(name, Some(expr));
|
named.insert_optional(name, Some(expr));
|
||||||
|
@ -160,16 +161,17 @@ fn parse_command_tail(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let result = hir::baseline_parse_next_expr(tail, registry, source, arg.syntax_type())?;
|
let result = hir::baseline_parse_next_expr(tail, context, source, arg.syntax_type())?;
|
||||||
|
|
||||||
positional.push(result);
|
positional.push(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
trace_remaining("after positional", tail.clone(), source);
|
trace_remaining("after positional", tail.clone(), source);
|
||||||
|
|
||||||
// TODO: Only do this if rest params are specified
|
if let Some(syntax_type) = config.rest_positional {
|
||||||
let remainder = baseline_parse_tokens(tail, registry, source)?;
|
let remainder = baseline_parse_tokens(tail, context, source, syntax_type)?;
|
||||||
positional.extend(remainder);
|
positional.extend(remainder);
|
||||||
|
}
|
||||||
|
|
||||||
trace_remaining("after rest", tail.clone(), source);
|
trace_remaining("after rest", tail.clone(), source);
|
||||||
|
|
||||||
|
@ -180,6 +182,8 @@ fn parse_command_tail(
|
||||||
positional => Some(positional),
|
positional => Some(positional),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// TODO: Error if extra unconsumed positional arguments
|
||||||
|
|
||||||
let named = match named {
|
let named = match named {
|
||||||
named if named.named.is_empty() => None,
|
named if named.named.is_empty() => None,
|
||||||
named => Some(named),
|
named => Some(named),
|
||||||
|
|
|
@ -74,8 +74,8 @@ pub struct Signature {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
#[new(default)]
|
#[new(default)]
|
||||||
pub positional: Vec<PositionalType>,
|
pub positional: Vec<PositionalType>,
|
||||||
#[new(value = "false")]
|
#[new(value = "None")]
|
||||||
pub rest_positional: bool,
|
pub rest_positional: Option<SyntaxType>,
|
||||||
#[new(default)]
|
#[new(default)]
|
||||||
pub named: IndexMap<String, NamedType>,
|
pub named: IndexMap<String, NamedType>,
|
||||||
#[new(value = "false")]
|
#[new(value = "false")]
|
||||||
|
@ -130,8 +130,8 @@ impl Signature {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn rest(mut self) -> Signature {
|
pub fn rest(mut self, ty: SyntaxType) -> Signature {
|
||||||
self.rest_positional = true;
|
self.rest_positional = Some(ty);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -279,10 +279,10 @@ impl Signature {
|
||||||
crate fn parse_args(
|
crate fn parse_args(
|
||||||
&self,
|
&self,
|
||||||
call: &Tagged<CallNode>,
|
call: &Tagged<CallNode>,
|
||||||
registry: &CommandRegistry,
|
context: &Context,
|
||||||
source: &Text,
|
source: &Text,
|
||||||
) -> Result<hir::Call, ShellError> {
|
) -> Result<hir::Call, ShellError> {
|
||||||
let args = parse_command(self, registry, call, source)?;
|
let args = parse_command(self, context, call, source)?;
|
||||||
|
|
||||||
trace!("parsed args: {:?}", args);
|
trace!("parsed args: {:?}", args);
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
use indexmap::IndexMap;
|
|
||||||
use nu::{
|
use nu::{
|
||||||
serve_plugin, CallInfo, Plugin, PositionalType, Primitive, ReturnSuccess, ReturnValue,
|
serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature,
|
||||||
ShellError, Signature, Tagged, Value,
|
SyntaxType, Tagged, Value,
|
||||||
};
|
};
|
||||||
|
|
||||||
struct Add {
|
struct Add {
|
||||||
|
@ -43,17 +42,12 @@ impl Add {
|
||||||
|
|
||||||
impl Plugin for Add {
|
impl Plugin for Add {
|
||||||
fn config(&mut self) -> Result<Signature, ShellError> {
|
fn config(&mut self) -> Result<Signature, ShellError> {
|
||||||
Ok(Signature {
|
Ok(Signature::build("add")
|
||||||
name: "add".to_string(),
|
.required("Field", SyntaxType::String)
|
||||||
positional: vec![
|
.required("Value", SyntaxType::String)
|
||||||
PositionalType::mandatory_any("Field"),
|
.rest(SyntaxType::String).filter())
|
||||||
PositionalType::mandatory_any("Value"),
|
|
||||||
],
|
|
||||||
is_filter: true,
|
|
||||||
named: IndexMap::new(),
|
|
||||||
rest_positional: true,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn begin_filter(&mut self, call_info: CallInfo) -> Result<Vec<ReturnValue>, ShellError> {
|
fn begin_filter(&mut self, call_info: CallInfo) -> Result<Vec<ReturnValue>, ShellError> {
|
||||||
if let Some(args) = call_info.args.positional {
|
if let Some(args) = call_info.args.positional {
|
||||||
match &args[0] {
|
match &args[0] {
|
||||||
|
|
|
@ -1,9 +1,5 @@
|
||||||
#![feature(option_flattening)]
|
|
||||||
use crossterm::{cursor, terminal, Attribute, RawScreen};
|
use crossterm::{cursor, terminal, Attribute, RawScreen};
|
||||||
use indexmap::IndexMap;
|
use nu::{serve_plugin, CallInfo, Plugin, ShellError, Signature, SpanSource, Tagged, Value};
|
||||||
use nu::{
|
|
||||||
serve_plugin, CallInfo, NamedType, Plugin, ShellError, Signature, SpanSource, Tagged, Value,
|
|
||||||
};
|
|
||||||
use pretty_hex::*;
|
use pretty_hex::*;
|
||||||
|
|
||||||
struct BinaryView;
|
struct BinaryView;
|
||||||
|
@ -16,15 +12,7 @@ impl BinaryView {
|
||||||
|
|
||||||
impl Plugin for BinaryView {
|
impl Plugin for BinaryView {
|
||||||
fn config(&mut self) -> Result<Signature, ShellError> {
|
fn config(&mut self) -> Result<Signature, ShellError> {
|
||||||
let mut named = IndexMap::new();
|
Ok(Signature::build("binaryview").switch("lores"))
|
||||||
named.insert("lores".to_string(), NamedType::Switch);
|
|
||||||
Ok(Signature {
|
|
||||||
name: "binaryview".to_string(),
|
|
||||||
positional: vec![],
|
|
||||||
is_filter: false,
|
|
||||||
named,
|
|
||||||
rest_positional: false,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sink(&mut self, call_info: CallInfo, input: Vec<Tagged<Value>>) {
|
fn sink(&mut self, call_info: CallInfo, input: Vec<Tagged<Value>>) {
|
||||||
|
@ -32,7 +20,7 @@ impl Plugin for BinaryView {
|
||||||
let value_origin = v.origin();
|
let value_origin = v.origin();
|
||||||
match v.item {
|
match v.item {
|
||||||
Value::Binary(b) => {
|
Value::Binary(b) => {
|
||||||
let source = value_origin.map(|x| call_info.source_map.get(&x)).flatten();
|
let source = value_origin.and_then(|x| call_info.source_map.get(&x));
|
||||||
let _ = view_binary(&b, source, call_info.args.has("lores"));
|
let _ = view_binary(&b, source, call_info.args.has("lores"));
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
use indexmap::IndexMap;
|
|
||||||
use nu::{
|
use nu::{
|
||||||
serve_plugin, CallInfo, Plugin, PositionalType, Primitive, ReturnSuccess, ReturnValue,
|
serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature,
|
||||||
ShellError, Signature, Tagged, Value,
|
SyntaxType, Tagged, Value,
|
||||||
};
|
};
|
||||||
|
|
||||||
struct Edit {
|
struct Edit {
|
||||||
|
@ -42,17 +41,12 @@ impl Edit {
|
||||||
|
|
||||||
impl Plugin for Edit {
|
impl Plugin for Edit {
|
||||||
fn config(&mut self) -> Result<Signature, ShellError> {
|
fn config(&mut self) -> Result<Signature, ShellError> {
|
||||||
Ok(Signature {
|
Ok(Signature::build("edit")
|
||||||
name: "edit".to_string(),
|
.required("Field", SyntaxType::String)
|
||||||
positional: vec![
|
.required("Value", SyntaxType::String)
|
||||||
PositionalType::mandatory_any("Field"),
|
.filter())
|
||||||
PositionalType::mandatory_any("Value"),
|
|
||||||
],
|
|
||||||
is_filter: true,
|
|
||||||
named: IndexMap::new(),
|
|
||||||
rest_positional: true,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn begin_filter(&mut self, call_info: CallInfo) -> Result<Vec<ReturnValue>, ShellError> {
|
fn begin_filter(&mut self, call_info: CallInfo) -> Result<Vec<ReturnValue>, ShellError> {
|
||||||
if let Some(args) = call_info.args.positional {
|
if let Some(args) = call_info.args.positional {
|
||||||
match &args[0] {
|
match &args[0] {
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
use indexmap::IndexMap;
|
|
||||||
use nu::{
|
use nu::{
|
||||||
serve_plugin, CallInfo, NamedType, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError,
|
serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature,
|
||||||
Signature, Tagged, TaggedItem, Value,
|
SyntaxType, Tagged, TaggedItem, Value,
|
||||||
};
|
};
|
||||||
|
|
||||||
enum Action {
|
enum Action {
|
||||||
|
@ -116,19 +115,14 @@ impl Inc {
|
||||||
|
|
||||||
impl Plugin for Inc {
|
impl Plugin for Inc {
|
||||||
fn config(&mut self) -> Result<Signature, ShellError> {
|
fn config(&mut self) -> Result<Signature, ShellError> {
|
||||||
let mut named = IndexMap::new();
|
Ok(Signature::build("inc")
|
||||||
named.insert("major".to_string(), NamedType::Switch);
|
.switch("major")
|
||||||
named.insert("minor".to_string(), NamedType::Switch);
|
.switch("minor")
|
||||||
named.insert("patch".to_string(), NamedType::Switch);
|
.switch("patch")
|
||||||
|
.rest(SyntaxType::String)
|
||||||
Ok(Signature {
|
.filter())
|
||||||
name: "inc".to_string(),
|
|
||||||
positional: vec![],
|
|
||||||
is_filter: true,
|
|
||||||
named,
|
|
||||||
rest_positional: true,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn begin_filter(&mut self, call_info: CallInfo) -> Result<Vec<ReturnValue>, ShellError> {
|
fn begin_filter(&mut self, call_info: CallInfo) -> Result<Vec<ReturnValue>, ShellError> {
|
||||||
if call_info.args.has("major") {
|
if call_info.args.has("major") {
|
||||||
self.for_semver(SemVerAction::Major);
|
self.for_semver(SemVerAction::Major);
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
use nu::{
|
use nu::{
|
||||||
serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature,
|
serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature,
|
||||||
Tagged, Value,
|
SyntaxType, Tagged, Value,
|
||||||
};
|
};
|
||||||
|
|
||||||
struct Skip {
|
struct Skip {
|
||||||
|
@ -20,7 +20,7 @@ impl Plugin for Skip {
|
||||||
positional: vec![],
|
positional: vec![],
|
||||||
is_filter: true,
|
is_filter: true,
|
||||||
named: IndexMap::new(),
|
named: IndexMap::new(),
|
||||||
rest_positional: true,
|
rest_positional: Some(SyntaxType::Number),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
fn begin_filter(&mut self, call_info: CallInfo) -> Result<Vec<ReturnValue>, ShellError> {
|
fn begin_filter(&mut self, call_info: CallInfo) -> Result<Vec<ReturnValue>, ShellError> {
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
use indexmap::IndexMap;
|
|
||||||
use nu::{
|
use nu::{
|
||||||
serve_plugin, CallInfo, NamedType, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError,
|
serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature,
|
||||||
Signature, Tagged, Value,
|
SyntaxType, Tagged, Value,
|
||||||
};
|
};
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
|
||||||
|
@ -40,9 +39,11 @@ impl Str {
|
||||||
let applied = match self.action.as_ref() {
|
let applied = match self.action.as_ref() {
|
||||||
Some(Action::Downcase) => Value::string(input.to_ascii_lowercase()),
|
Some(Action::Downcase) => Value::string(input.to_ascii_lowercase()),
|
||||||
Some(Action::Upcase) => Value::string(input.to_ascii_uppercase()),
|
Some(Action::Upcase) => Value::string(input.to_ascii_uppercase()),
|
||||||
Some(Action::ToInteger) => match input.trim().parse::<i64>() {
|
Some(Action::ToInteger) => match input.trim() {
|
||||||
Ok(v) => Value::int(v),
|
other => match other.parse::<i64>() {
|
||||||
Err(_) => Value::string(input),
|
Ok(v) => Value::int(v),
|
||||||
|
Err(_) => Value::string(input),
|
||||||
|
},
|
||||||
},
|
},
|
||||||
Some(Action::Replace(ref mode)) => match mode {
|
Some(Action::Replace(ref mode)) => match mode {
|
||||||
ReplaceAction::Direct => Value::string(self.first_param()),
|
ReplaceAction::Direct => Value::string(self.first_param()),
|
||||||
|
@ -138,9 +139,7 @@ impl Str {
|
||||||
Some(ref f) => {
|
Some(ref f) => {
|
||||||
let replacement = match value.item.get_data_by_path(value.tag(), f) {
|
let replacement = match value.item.get_data_by_path(value.tag(), f) {
|
||||||
Some(result) => self.strutils(result.map(|x| x.clone()))?,
|
Some(result) => self.strutils(result.map(|x| x.clone()))?,
|
||||||
None => {
|
None => return Ok(Tagged::from_item(Value::nothing(), value.tag)),
|
||||||
return Err(ShellError::string("str could not find field to replace"))
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
match value
|
match value
|
||||||
.item
|
.item
|
||||||
|
@ -168,20 +167,14 @@ impl Str {
|
||||||
|
|
||||||
impl Plugin for Str {
|
impl Plugin for Str {
|
||||||
fn config(&mut self) -> Result<Signature, ShellError> {
|
fn config(&mut self) -> Result<Signature, ShellError> {
|
||||||
let mut named = IndexMap::new();
|
Ok(Signature::build("str")
|
||||||
named.insert("downcase".to_string(), NamedType::Switch);
|
.switch("downcase")
|
||||||
named.insert("upcase".to_string(), NamedType::Switch);
|
.switch("upcase")
|
||||||
named.insert("to-int".to_string(), NamedType::Switch);
|
.switch("to-int")
|
||||||
named.insert("replace".to_string(), NamedType::Switch);
|
.switch("replace")
|
||||||
named.insert("find-replace".to_string(), NamedType::Switch);
|
.switch("find-replace")
|
||||||
|
.rest(SyntaxType::Member)
|
||||||
Ok(Signature {
|
.filter())
|
||||||
name: "str".to_string(),
|
|
||||||
positional: vec![],
|
|
||||||
is_filter: true,
|
|
||||||
named,
|
|
||||||
rest_positional: true,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn begin_filter(&mut self, call_info: CallInfo) -> Result<Vec<ReturnValue>, ShellError> {
|
fn begin_filter(&mut self, call_info: CallInfo) -> Result<Vec<ReturnValue>, ShellError> {
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
use indexmap::IndexMap;
|
|
||||||
use nu::{
|
use nu::{
|
||||||
serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature,
|
serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature,
|
||||||
Tag, Tagged, Value,
|
Tag, Tagged, Value,
|
||||||
|
@ -14,6 +13,7 @@ impl Sum {
|
||||||
|
|
||||||
fn sum(&mut self, value: Tagged<Value>) -> Result<(), ShellError> {
|
fn sum(&mut self, value: Tagged<Value>) -> Result<(), ShellError> {
|
||||||
match value.item {
|
match value.item {
|
||||||
|
Value::Primitive(Primitive::Nothing) => Ok(()),
|
||||||
Value::Primitive(Primitive::Int(i)) => {
|
Value::Primitive(Primitive::Int(i)) => {
|
||||||
match self.total {
|
match self.total {
|
||||||
Some(Tagged {
|
Some(Tagged {
|
||||||
|
@ -64,14 +64,9 @@ impl Sum {
|
||||||
|
|
||||||
impl Plugin for Sum {
|
impl Plugin for Sum {
|
||||||
fn config(&mut self) -> Result<Signature, ShellError> {
|
fn config(&mut self) -> Result<Signature, ShellError> {
|
||||||
Ok(Signature {
|
Ok(Signature::build("sum").filter())
|
||||||
name: "sum".to_string(),
|
|
||||||
positional: vec![],
|
|
||||||
is_filter: true,
|
|
||||||
named: IndexMap::new(),
|
|
||||||
rest_positional: true,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn begin_filter(&mut self, _: CallInfo) -> Result<Vec<ReturnValue>, ShellError> {
|
fn begin_filter(&mut self, _: CallInfo) -> Result<Vec<ReturnValue>, ShellError> {
|
||||||
Ok(vec![])
|
Ok(vec![])
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,10 +1,9 @@
|
||||||
use futures::executor::block_on;
|
use futures::executor::block_on;
|
||||||
use futures::stream::StreamExt;
|
use futures::stream::StreamExt;
|
||||||
use heim::{disk, memory, net};
|
use heim::{disk, memory, net};
|
||||||
use indexmap::IndexMap;
|
|
||||||
use nu::{
|
use nu::{
|
||||||
serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature,
|
serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature,
|
||||||
Tag, Tagged, TaggedDictBuilder, Value,
|
SyntaxType, Tag, Tagged, TaggedDictBuilder, Value,
|
||||||
};
|
};
|
||||||
use std::ffi::OsStr;
|
use std::ffi::OsStr;
|
||||||
|
|
||||||
|
@ -251,14 +250,9 @@ async fn sysinfo(tag: Tag) -> Vec<Tagged<Value>> {
|
||||||
|
|
||||||
impl Plugin for Sys {
|
impl Plugin for Sys {
|
||||||
fn config(&mut self) -> Result<Signature, ShellError> {
|
fn config(&mut self) -> Result<Signature, ShellError> {
|
||||||
Ok(Signature {
|
Ok(Signature::build("sys").rest(SyntaxType::Any))
|
||||||
name: "sys".to_string(),
|
|
||||||
positional: vec![],
|
|
||||||
is_filter: true,
|
|
||||||
named: IndexMap::new(),
|
|
||||||
rest_positional: true,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn begin_filter(&mut self, callinfo: CallInfo) -> Result<Vec<ReturnValue>, ShellError> {
|
fn begin_filter(&mut self, callinfo: CallInfo) -> Result<Vec<ReturnValue>, ShellError> {
|
||||||
Ok(block_on(sysinfo(Tag::unknown_origin(callinfo.name_span)))
|
Ok(block_on(sysinfo(Tag::unknown_origin(callinfo.name_span)))
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
|
|
@ -1,8 +1,5 @@
|
||||||
#![feature(option_flattening)]
|
|
||||||
|
|
||||||
use crossterm::{cursor, terminal, RawScreen};
|
use crossterm::{cursor, terminal, RawScreen};
|
||||||
use crossterm::{InputEvent, KeyEvent};
|
use crossterm::{InputEvent, KeyEvent};
|
||||||
use indexmap::IndexMap;
|
|
||||||
use nu::{
|
use nu::{
|
||||||
serve_plugin, CallInfo, Plugin, Primitive, ShellError, Signature, SourceMap, SpanSource,
|
serve_plugin, CallInfo, Plugin, Primitive, ShellError, Signature, SourceMap, SpanSource,
|
||||||
Tagged, Value,
|
Tagged, Value,
|
||||||
|
@ -29,13 +26,7 @@ impl TextView {
|
||||||
|
|
||||||
impl Plugin for TextView {
|
impl Plugin for TextView {
|
||||||
fn config(&mut self) -> Result<Signature, ShellError> {
|
fn config(&mut self) -> Result<Signature, ShellError> {
|
||||||
Ok(Signature {
|
Ok(Signature::build("textview"))
|
||||||
name: "textview".to_string(),
|
|
||||||
positional: vec![],
|
|
||||||
is_filter: false,
|
|
||||||
named: IndexMap::new(),
|
|
||||||
rest_positional: false,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sink(&mut self, call_info: CallInfo, input: Vec<Tagged<Value>>) {
|
fn sink(&mut self, call_info: CallInfo, input: Vec<Tagged<Value>>) {
|
||||||
|
@ -217,7 +208,7 @@ fn view_text_value(value: &Tagged<Value>, source_map: &SourceMap) {
|
||||||
let value_origin = value.origin();
|
let value_origin = value.origin();
|
||||||
match value.item {
|
match value.item {
|
||||||
Value::Primitive(Primitive::String(ref s)) => {
|
Value::Primitive(Primitive::String(ref s)) => {
|
||||||
let source = value_origin.map(|x| source_map.get(&x)).flatten();
|
let source = value_origin.and_then(|x| source_map.get(&x));
|
||||||
|
|
||||||
if let Some(source) = source {
|
if let Some(source) = source {
|
||||||
let extension: Option<String> = match source {
|
let extension: Option<String> = match source {
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use indexmap::IndexMap;
|
|
||||||
use nu::{serve_plugin, CallInfo, Plugin, ShellError, Signature, Tagged, Value};
|
use nu::{serve_plugin, CallInfo, Plugin, ShellError, Signature, Tagged, Value};
|
||||||
use ptree::item::StringItem;
|
use ptree::item::StringItem;
|
||||||
use ptree::output::print_tree_with;
|
use ptree::output::print_tree_with;
|
||||||
|
@ -81,13 +80,7 @@ struct TreeViewer;
|
||||||
|
|
||||||
impl Plugin for TreeViewer {
|
impl Plugin for TreeViewer {
|
||||||
fn config(&mut self) -> Result<Signature, ShellError> {
|
fn config(&mut self) -> Result<Signature, ShellError> {
|
||||||
Ok(Signature {
|
Ok(Signature::build("tree"))
|
||||||
name: "tree".to_string(),
|
|
||||||
positional: vec![],
|
|
||||||
is_filter: false,
|
|
||||||
named: IndexMap::new(),
|
|
||||||
rest_positional: true,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sink(&mut self, _call_info: CallInfo, input: Vec<Tagged<Value>>) {
|
fn sink(&mut self, _call_info: CallInfo, input: Vec<Tagged<Value>>) {
|
||||||
|
|
|
@ -32,6 +32,25 @@ macro_rules! trace_stream {
|
||||||
}};
|
}};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! trace_out_stream {
|
||||||
|
(target: $target:tt, source: $source:expr, $desc:tt = $expr:expr) => {{
|
||||||
|
if log::log_enabled!(target: $target, log::Level::Trace) {
|
||||||
|
use futures::stream::StreamExt;
|
||||||
|
|
||||||
|
let source = $source.clone();
|
||||||
|
|
||||||
|
let objects = $expr.values.inspect(move |o| {
|
||||||
|
trace!(target: $target, "{} = {}", $desc, o.debug(&source));
|
||||||
|
});
|
||||||
|
|
||||||
|
$crate::stream::OutputStream::new(objects)
|
||||||
|
} else {
|
||||||
|
$expr
|
||||||
|
}
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
crate use crate::cli::MaybeOwned;
|
crate use crate::cli::MaybeOwned;
|
||||||
crate use crate::commands::command::{
|
crate use crate::commands::command::{
|
||||||
CallInfo, CommandAction, CommandArgs, ReturnSuccess, ReturnValue, RunnableContext,
|
CallInfo, CommandAction, CommandArgs, ReturnSuccess, ReturnValue, RunnableContext,
|
||||||
|
|
|
@ -72,12 +72,16 @@ impl Shell for FilesystemShell {
|
||||||
"filesystem".to_string()
|
"filesystem".to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn homedir(&self) -> Option<PathBuf> {
|
||||||
|
dirs::home_dir()
|
||||||
|
}
|
||||||
|
|
||||||
fn ls(&self, args: EvaluatedWholeStreamCommandArgs) -> Result<OutputStream, ShellError> {
|
fn ls(&self, args: EvaluatedWholeStreamCommandArgs) -> Result<OutputStream, ShellError> {
|
||||||
let cwd = self.path();
|
let cwd = self.path();
|
||||||
let mut full_path = PathBuf::from(self.path());
|
let mut full_path = PathBuf::from(self.path());
|
||||||
|
|
||||||
match &args.nth(0) {
|
match &args.nth(0) {
|
||||||
Some(Tagged { item: value, .. }) => full_path.push(Path::new(&value.as_string()?)),
|
Some(value) => full_path.push(Path::new(&value.as_path()?)),
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -176,7 +180,7 @@ impl Shell for FilesystemShell {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
Some(v) => {
|
Some(v) => {
|
||||||
let target = v.as_string()?;
|
let target = v.as_path()?;
|
||||||
let path = PathBuf::from(self.path());
|
let path = PathBuf::from(self.path());
|
||||||
match dunce::canonicalize(path.join(target).as_path()) {
|
match dunce::canonicalize(path.join(target).as_path()) {
|
||||||
Ok(p) => p,
|
Ok(p) => p,
|
||||||
|
|
|
@ -7,9 +7,11 @@ use crate::context::SourceMap;
|
||||||
use crate::errors::ShellError;
|
use crate::errors::ShellError;
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use crate::stream::OutputStream;
|
use crate::stream::OutputStream;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
pub trait Shell: std::fmt::Debug {
|
pub trait Shell: std::fmt::Debug {
|
||||||
fn name(&self, source_map: &SourceMap) -> String;
|
fn name(&self, source_map: &SourceMap) -> String;
|
||||||
|
fn homedir(&self) -> Option<PathBuf>;
|
||||||
fn ls(&self, args: EvaluatedWholeStreamCommandArgs) -> Result<OutputStream, ShellError>;
|
fn ls(&self, args: EvaluatedWholeStreamCommandArgs) -> Result<OutputStream, ShellError>;
|
||||||
fn cd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result<OutputStream, ShellError>;
|
fn cd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result<OutputStream, ShellError>;
|
||||||
fn cp(&self, args: CopyArgs, name: Span, path: &str) -> Result<OutputStream, ShellError>;
|
fn cp(&self, args: CopyArgs, name: Span, path: &str) -> Result<OutputStream, ShellError>;
|
||||||
|
|
|
@ -9,6 +9,7 @@ use crate::shell::filesystem_shell::FilesystemShell;
|
||||||
use crate::shell::shell::Shell;
|
use crate::shell::shell::Shell;
|
||||||
use crate::stream::OutputStream;
|
use crate::stream::OutputStream;
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
|
use std::path::PathBuf;
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
|
@ -102,16 +103,24 @@ impl ShellManager {
|
||||||
self.set_path(self.path());
|
self.set_path(self.path());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn homedir(&self) -> Option<PathBuf> {
|
||||||
|
let env = self.shells.lock().unwrap();
|
||||||
|
|
||||||
|
env[self.current_shell].homedir()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn ls(&self, args: EvaluatedWholeStreamCommandArgs) -> Result<OutputStream, ShellError> {
|
pub fn ls(&self, args: EvaluatedWholeStreamCommandArgs) -> Result<OutputStream, ShellError> {
|
||||||
let env = self.shells.lock().unwrap();
|
let env = self.shells.lock().unwrap();
|
||||||
|
|
||||||
env[self.current_shell].ls(args)
|
env[self.current_shell].ls(args)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn cd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result<OutputStream, ShellError> {
|
pub fn cd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result<OutputStream, ShellError> {
|
||||||
let env = self.shells.lock().unwrap();
|
let env = self.shells.lock().unwrap();
|
||||||
|
|
||||||
env[self.current_shell].cd(args)
|
env[self.current_shell].cd(args)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn cp(
|
pub fn cp(
|
||||||
&self,
|
&self,
|
||||||
args: CopyArgs,
|
args: CopyArgs,
|
||||||
|
|
|
@ -69,6 +69,10 @@ impl Shell for ValueShell {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn homedir(&self) -> Option<PathBuf> {
|
||||||
|
dirs::home_dir()
|
||||||
|
}
|
||||||
|
|
||||||
fn ls(&self, _args: EvaluatedWholeStreamCommandArgs) -> Result<OutputStream, ShellError> {
|
fn ls(&self, _args: EvaluatedWholeStreamCommandArgs) -> Result<OutputStream, ShellError> {
|
||||||
Ok(self
|
Ok(self
|
||||||
.members()
|
.members()
|
||||||
|
@ -80,19 +84,20 @@ impl Shell for ValueShell {
|
||||||
let path = match args.nth(0) {
|
let path = match args.nth(0) {
|
||||||
None => "/".to_string(),
|
None => "/".to_string(),
|
||||||
Some(v) => {
|
Some(v) => {
|
||||||
let target = v.as_string()?;
|
let target = v.as_path()?;
|
||||||
|
|
||||||
let mut cwd = PathBuf::from(&self.path);
|
let mut cwd = PathBuf::from(&self.path);
|
||||||
match target {
|
|
||||||
x if x == ".." => {
|
if target == PathBuf::from("..") {
|
||||||
cwd.pop();
|
cwd.pop();
|
||||||
|
} else {
|
||||||
|
match target.to_str() {
|
||||||
|
Some(target) => match target.chars().nth(0) {
|
||||||
|
Some(x) if x == '/' => cwd = PathBuf::from(target),
|
||||||
|
_ => cwd.push(target),
|
||||||
|
},
|
||||||
|
None => cwd.push(target),
|
||||||
}
|
}
|
||||||
_ => match target.chars().nth(0) {
|
|
||||||
Some(x) if x == '/' => cwd = PathBuf::from(target),
|
|
||||||
_ => {
|
|
||||||
cwd.push(target);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
cwd.to_string_lossy().to_string()
|
cwd.to_string_lossy().to_string()
|
||||||
}
|
}
|
||||||
|
|
|
@ -105,7 +105,10 @@ impl FileStructure {
|
||||||
self.root = path.to_path_buf();
|
self.root = path.to_path_buf();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn paths_applying_with<F>(&mut self, to: F) -> Result<Vec<(PathBuf, PathBuf)>, Box<dyn std::error::Error>>
|
pub fn paths_applying_with<F>(
|
||||||
|
&mut self,
|
||||||
|
to: F,
|
||||||
|
) -> Result<Vec<(PathBuf, PathBuf)>, Box<dyn std::error::Error>>
|
||||||
where
|
where
|
||||||
F: Fn((PathBuf, usize)) -> Result<(PathBuf, PathBuf), Box<dyn std::error::Error>>,
|
F: Fn((PathBuf, usize)) -> Result<(PathBuf, PathBuf), Box<dyn std::error::Error>>,
|
||||||
{
|
{
|
||||||
|
@ -155,6 +158,7 @@ impl FileStructure {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use pretty_assertions::assert_eq;
|
||||||
|
|
||||||
use super::{FileStructure, Res};
|
use super::{FileStructure, Res};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
@ -175,7 +179,8 @@ mod tests {
|
||||||
fn prepares_and_decorates_source_files_for_copying() {
|
fn prepares_and_decorates_source_files_for_copying() {
|
||||||
let mut res = FileStructure::new();
|
let mut res = FileStructure::new();
|
||||||
|
|
||||||
res.walk_decorate(fixtures().as_path()).expect("Can not decorate files traversal.");
|
res.walk_decorate(fixtures().as_path())
|
||||||
|
.expect("Can not decorate files traversal.");
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
res.resources,
|
res.resources,
|
||||||
|
|
|
@ -18,7 +18,7 @@ fn ls_lists_regular_files() {
|
||||||
nu!(
|
nu!(
|
||||||
output,
|
output,
|
||||||
cwd(&full_path),
|
cwd(&full_path),
|
||||||
"ls | get name | lines| split-column \".\" | get Column2 | str Column2 --to-int | sum | echo $it"
|
r#"ls | get name | lines | split-column "." | get Column2 | str Column2 --to-int | sum | echo $it"#
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(output, "30");
|
assert_eq!(output, "30");
|
||||||
|
|
|
@ -25,7 +25,7 @@ fn accepts_and_creates_directories() {
|
||||||
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
||||||
|
|
||||||
nu!(_output, cwd(&full_path), "mkdir dir_1 dir_2 dir_3");
|
nu!(_output, cwd(&full_path), "mkdir dir_1 dir_2 dir_3");
|
||||||
|
|
||||||
assert!(h::files_exist_at(
|
assert!(h::files_exist_at(
|
||||||
vec![Path::new("dir_1"), Path::new("dir_2"), Path::new("dir_3")],
|
vec![Path::new("dir_1"), Path::new("dir_2"), Path::new("dir_3")],
|
||||||
PathBuf::from(&full_path)
|
PathBuf::from(&full_path)
|
||||||
|
|
|
@ -8,15 +8,13 @@ use std::path::{Path, PathBuf};
|
||||||
#[test]
|
#[test]
|
||||||
fn moves_a_file() {
|
fn moves_a_file() {
|
||||||
let sandbox = Playground::setup_for("mv_test_1")
|
let sandbox = Playground::setup_for("mv_test_1")
|
||||||
.with_files(vec![
|
.with_files(vec![EmptyFile("andres.txt")])
|
||||||
EmptyFile("andres.txt"),
|
|
||||||
])
|
|
||||||
.mkdir("expected")
|
.mkdir("expected")
|
||||||
.test_dir_name();
|
.test_dir_name();
|
||||||
|
|
||||||
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
||||||
let original = format!("{}/{}", full_path, "andres.txt");
|
let original = format!("{}/{}", full_path, "andres.txt");
|
||||||
let expected = format!("{}/{}", full_path, "expected/yehuda.txt");
|
let expected = format!("{}/{}", full_path, "expected/yehuda.txt");
|
||||||
|
|
||||||
nu!(
|
nu!(
|
||||||
_output,
|
_output,
|
||||||
|
@ -31,21 +29,14 @@ fn moves_a_file() {
|
||||||
#[test]
|
#[test]
|
||||||
fn overwrites_if_moving_to_existing_file() {
|
fn overwrites_if_moving_to_existing_file() {
|
||||||
let sandbox = Playground::setup_for("mv_test_2")
|
let sandbox = Playground::setup_for("mv_test_2")
|
||||||
.with_files(vec![
|
.with_files(vec![EmptyFile("andres.txt"), EmptyFile("jonathan.txt")])
|
||||||
EmptyFile("andres.txt"),
|
|
||||||
EmptyFile("jonathan.txt"),
|
|
||||||
])
|
|
||||||
.test_dir_name();
|
.test_dir_name();
|
||||||
|
|
||||||
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
||||||
let original = format!("{}/{}", full_path, "andres.txt");
|
let original = format!("{}/{}", full_path, "andres.txt");
|
||||||
let expected = format!("{}/{}", full_path, "jonathan.txt");
|
let expected = format!("{}/{}", full_path, "jonathan.txt");
|
||||||
|
|
||||||
nu!(
|
nu!(_output, cwd(&full_path), "mv andres.txt jonathan.txt");
|
||||||
_output,
|
|
||||||
cwd(&full_path),
|
|
||||||
"mv andres.txt jonathan.txt"
|
|
||||||
);
|
|
||||||
|
|
||||||
assert!(!h::file_exists_at(PathBuf::from(original)));
|
assert!(!h::file_exists_at(PathBuf::from(original)));
|
||||||
assert!(h::file_exists_at(PathBuf::from(expected)));
|
assert!(h::file_exists_at(PathBuf::from(expected)));
|
||||||
|
@ -58,14 +49,10 @@ fn moves_a_directory() {
|
||||||
.test_dir_name();
|
.test_dir_name();
|
||||||
|
|
||||||
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
||||||
let original_dir = format!("{}/{}", full_path, "empty_dir");
|
let original_dir = format!("{}/{}", full_path, "empty_dir");
|
||||||
let expected = format!("{}/{}", full_path, "renamed_dir");
|
let expected = format!("{}/{}", full_path, "renamed_dir");
|
||||||
|
|
||||||
nu!(
|
nu!(_output, cwd(&full_path), "mv empty_dir renamed_dir");
|
||||||
_output,
|
|
||||||
cwd(&full_path),
|
|
||||||
"mv empty_dir renamed_dir"
|
|
||||||
);
|
|
||||||
|
|
||||||
assert!(!h::dir_exists_at(PathBuf::from(original_dir)));
|
assert!(!h::dir_exists_at(PathBuf::from(original_dir)));
|
||||||
assert!(h::dir_exists_at(PathBuf::from(expected)));
|
assert!(h::dir_exists_at(PathBuf::from(expected)));
|
||||||
|
@ -74,22 +61,15 @@ fn moves_a_directory() {
|
||||||
#[test]
|
#[test]
|
||||||
fn moves_the_file_inside_directory_if_path_to_move_is_existing_directory() {
|
fn moves_the_file_inside_directory_if_path_to_move_is_existing_directory() {
|
||||||
let sandbox = Playground::setup_for("mv_test_4")
|
let sandbox = Playground::setup_for("mv_test_4")
|
||||||
.with_files(vec![
|
.with_files(vec![EmptyFile("jonathan.txt")])
|
||||||
EmptyFile("jonathan.txt"),
|
|
||||||
])
|
|
||||||
.mkdir("expected")
|
.mkdir("expected")
|
||||||
.test_dir_name();
|
.test_dir_name();
|
||||||
|
|
||||||
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
||||||
let original_dir = format!("{}/{}", full_path, "jonathan.txt");
|
let original_dir = format!("{}/{}", full_path, "jonathan.txt");
|
||||||
let expected = format!("{}/{}", full_path, "expected/jonathan.txt");
|
let expected = format!("{}/{}", full_path, "expected/jonathan.txt");
|
||||||
|
|
||||||
nu!(
|
|
||||||
_output,
|
|
||||||
cwd(&full_path),
|
|
||||||
"mv jonathan.txt expected"
|
|
||||||
);
|
|
||||||
|
|
||||||
|
nu!(_output, cwd(&full_path), "mv jonathan.txt expected");
|
||||||
|
|
||||||
assert!(!h::file_exists_at(PathBuf::from(original_dir)));
|
assert!(!h::file_exists_at(PathBuf::from(original_dir)));
|
||||||
assert!(h::file_exists_at(PathBuf::from(expected)));
|
assert!(h::file_exists_at(PathBuf::from(expected)));
|
||||||
|
@ -99,22 +79,15 @@ fn moves_the_file_inside_directory_if_path_to_move_is_existing_directory() {
|
||||||
fn moves_the_directory_inside_directory_if_path_to_move_is_existing_directory() {
|
fn moves_the_directory_inside_directory_if_path_to_move_is_existing_directory() {
|
||||||
let sandbox = Playground::setup_for("mv_test_5")
|
let sandbox = Playground::setup_for("mv_test_5")
|
||||||
.within("contributors")
|
.within("contributors")
|
||||||
.with_files(vec![
|
.with_files(vec![EmptyFile("jonathan.txt")])
|
||||||
EmptyFile("jonathan.txt"),
|
|
||||||
])
|
|
||||||
.mkdir("expected")
|
.mkdir("expected")
|
||||||
.test_dir_name();
|
.test_dir_name();
|
||||||
|
|
||||||
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
||||||
let original_dir = format!("{}/{}", full_path, "contributors");
|
let original_dir = format!("{}/{}", full_path, "contributors");
|
||||||
let expected = format!("{}/{}", full_path, "expected/contributors");
|
let expected = format!("{}/{}", full_path, "expected/contributors");
|
||||||
|
|
||||||
nu!(
|
|
||||||
_output,
|
|
||||||
cwd(&full_path),
|
|
||||||
"mv contributors expected"
|
|
||||||
);
|
|
||||||
|
|
||||||
|
nu!(_output, cwd(&full_path), "mv contributors expected");
|
||||||
|
|
||||||
assert!(!h::dir_exists_at(PathBuf::from(original_dir)));
|
assert!(!h::dir_exists_at(PathBuf::from(original_dir)));
|
||||||
assert!(h::file_exists_at(PathBuf::from(expected)));
|
assert!(h::file_exists_at(PathBuf::from(expected)));
|
||||||
|
@ -124,14 +97,12 @@ fn moves_the_directory_inside_directory_if_path_to_move_is_existing_directory()
|
||||||
fn moves_the_directory_inside_directory_if_path_to_move_is_nonexistent_directory() {
|
fn moves_the_directory_inside_directory_if_path_to_move_is_nonexistent_directory() {
|
||||||
let sandbox = Playground::setup_for("mv_test_6")
|
let sandbox = Playground::setup_for("mv_test_6")
|
||||||
.within("contributors")
|
.within("contributors")
|
||||||
.with_files(vec![
|
.with_files(vec![EmptyFile("jonathan.txt")])
|
||||||
EmptyFile("jonathan.txt"),
|
|
||||||
])
|
|
||||||
.mkdir("expected")
|
.mkdir("expected")
|
||||||
.test_dir_name();
|
.test_dir_name();
|
||||||
|
|
||||||
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
||||||
let original_dir = format!("{}/{}", full_path, "contributors");
|
let original_dir = format!("{}/{}", full_path, "contributors");
|
||||||
|
|
||||||
nu!(
|
nu!(
|
||||||
_output,
|
_output,
|
||||||
|
@ -139,7 +110,10 @@ fn moves_the_directory_inside_directory_if_path_to_move_is_nonexistent_directory
|
||||||
"mv contributors expected/this_dir_exists_now/los_tres_amigos"
|
"mv contributors expected/this_dir_exists_now/los_tres_amigos"
|
||||||
);
|
);
|
||||||
|
|
||||||
let expected = format!("{}/{}", full_path, "expected/this_dir_exists_now/los_tres_amigos");
|
let expected = format!(
|
||||||
|
"{}/{}",
|
||||||
|
full_path, "expected/this_dir_exists_now/los_tres_amigos"
|
||||||
|
);
|
||||||
|
|
||||||
assert!(!h::dir_exists_at(PathBuf::from(original_dir)));
|
assert!(!h::dir_exists_at(PathBuf::from(original_dir)));
|
||||||
assert!(h::file_exists_at(PathBuf::from(expected)));
|
assert!(h::file_exists_at(PathBuf::from(expected)));
|
||||||
|
@ -168,11 +142,7 @@ fn moves_using_path_with_wildcard() {
|
||||||
let work_dir = format!("{}/{}", full_path, "work_dir");
|
let work_dir = format!("{}/{}", full_path, "work_dir");
|
||||||
let expected_copies_path = format!("{}/{}", full_path, "expected");
|
let expected_copies_path = format!("{}/{}", full_path, "expected");
|
||||||
|
|
||||||
nu!(
|
nu!(_output, cwd(&work_dir), "mv ../originals/*.ini ../expected");
|
||||||
_output,
|
|
||||||
cwd(&work_dir),
|
|
||||||
"mv ../originals/*.ini ../expected"
|
|
||||||
);
|
|
||||||
|
|
||||||
assert!(h::files_exist_at(
|
assert!(h::files_exist_at(
|
||||||
vec![
|
vec![
|
||||||
|
@ -185,7 +155,6 @@ fn moves_using_path_with_wildcard() {
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn moves_using_a_glob() {
|
fn moves_using_a_glob() {
|
||||||
let sandbox = Playground::setup_for("mv_test_8")
|
let sandbox = Playground::setup_for("mv_test_8")
|
||||||
|
@ -204,11 +173,7 @@ fn moves_using_a_glob() {
|
||||||
let work_dir = format!("{}/{}", full_path, "work_dir");
|
let work_dir = format!("{}/{}", full_path, "work_dir");
|
||||||
let expected_copies_path = format!("{}/{}", full_path, "expected");
|
let expected_copies_path = format!("{}/{}", full_path, "expected");
|
||||||
|
|
||||||
nu!(
|
nu!(_output, cwd(&work_dir), "mv ../meals/* ../expected");
|
||||||
_output,
|
|
||||||
cwd(&work_dir),
|
|
||||||
"mv ../meals/* ../expected"
|
|
||||||
);
|
|
||||||
|
|
||||||
assert!(h::dir_exists_at(PathBuf::from(meal_dir)));
|
assert!(h::dir_exists_at(PathBuf::from(meal_dir)));
|
||||||
assert!(h::files_exist_at(
|
assert!(h::files_exist_at(
|
||||||
|
@ -219,4 +184,4 @@ fn moves_using_a_glob() {
|
||||||
],
|
],
|
||||||
PathBuf::from(&expected_copies_path)
|
PathBuf::from(&expected_copies_path)
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,7 +28,7 @@ fn open_can_parse_bson_1() {
|
||||||
nu!(
|
nu!(
|
||||||
output,
|
output,
|
||||||
cwd("tests/fixtures/formats"),
|
cwd("tests/fixtures/formats"),
|
||||||
"open sample.bson | nth 0 | get b | echo $it"
|
"open sample.bson | get root | nth 0 | get b | echo $it"
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(output, "hello");
|
assert_eq!(output, "hello");
|
||||||
|
@ -39,7 +39,7 @@ fn open_can_parse_bson_2() {
|
||||||
nu!(
|
nu!(
|
||||||
output,
|
output,
|
||||||
cwd("tests/fixtures/formats"),
|
cwd("tests/fixtures/formats"),
|
||||||
"open sample.bson | nth 6 | get b | get '$binary_subtype' | echo $it "
|
"open sample.bson | get root | nth 6 | get b | get '$binary_subtype' | echo $it "
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(output, "function");
|
assert_eq!(output, "function");
|
||||||
|
@ -111,4 +111,4 @@ fn errors_if_file_not_found() {
|
||||||
);
|
);
|
||||||
|
|
||||||
assert!(output.contains("File could not be opened"));
|
assert!(output.contains("File could not be opened"));
|
||||||
}
|
}
|
||||||
|
|
|
@ -90,7 +90,7 @@ fn rm_removes_deeply_nested_directories_with_wildcard_and_recursive_flag() {
|
||||||
.test_dir_name();
|
.test_dir_name();
|
||||||
|
|
||||||
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
||||||
|
|
||||||
nu!(
|
nu!(
|
||||||
_output,
|
_output,
|
||||||
cwd("tests/fixtures/nuplayground/rm_wildcard_test_2"),
|
cwd("tests/fixtures/nuplayground/rm_wildcard_test_2"),
|
||||||
|
@ -98,10 +98,7 @@ fn rm_removes_deeply_nested_directories_with_wildcard_and_recursive_flag() {
|
||||||
);
|
);
|
||||||
|
|
||||||
assert!(!h::files_exist_at(
|
assert!(!h::files_exist_at(
|
||||||
vec![
|
vec![Path::new("src/parser/parse"), Path::new("src/parser/hir"),],
|
||||||
Path::new("src/parser/parse"),
|
|
||||||
Path::new("src/parser/hir"),
|
|
||||||
],
|
|
||||||
PathBuf::from(&full_path)
|
PathBuf::from(&full_path)
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
@ -150,7 +147,11 @@ fn rm_errors_if_attempting_to_delete_a_directory_with_content_without_recursive_
|
||||||
|
|
||||||
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
||||||
|
|
||||||
nu_error!(output, cwd(&Playground::root()), "rm rm_prevent_directory_removal_without_flag_test");
|
nu_error!(
|
||||||
|
output,
|
||||||
|
cwd(&Playground::root()),
|
||||||
|
"rm rm_prevent_directory_removal_without_flag_test"
|
||||||
|
);
|
||||||
|
|
||||||
assert!(h::file_exists_at(PathBuf::from(full_path)));
|
assert!(h::file_exists_at(PathBuf::from(full_path)));
|
||||||
assert!(output.contains("is a directory"));
|
assert!(output.contains("is a directory"));
|
||||||
|
@ -168,4 +169,4 @@ fn rm_errors_if_attempting_to_delete_two_dot_as_argument() {
|
||||||
nu_error!(output, cwd(&Playground::root()), "rm ..");
|
nu_error!(output, cwd(&Playground::root()), "rm ..");
|
||||||
|
|
||||||
assert!(output.contains("may not be removed"));
|
assert!(output.contains("may not be removed"));
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,14 +16,15 @@ fn can_only_apply_one() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn by_one_with_field_passed() {
|
fn by_one_with_field_passed() {
|
||||||
Playground::setup_for("plugin_inc_by_one_with_field_passed_test")
|
Playground::setup_for("plugin_inc_by_one_with_field_passed_test").with_files(vec![
|
||||||
.with_files(vec![FileWithContent(
|
FileWithContent(
|
||||||
"sample.toml",
|
"sample.toml",
|
||||||
r#"
|
r#"
|
||||||
[package]
|
[package]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
"#,
|
"#,
|
||||||
)]);
|
),
|
||||||
|
]);
|
||||||
|
|
||||||
nu!(
|
nu!(
|
||||||
output,
|
output,
|
||||||
|
@ -36,35 +37,34 @@ fn by_one_with_field_passed() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn by_one_with_no_field_passed() {
|
fn by_one_with_no_field_passed() {
|
||||||
Playground::setup_for("plugin_inc_by_one_with_no_field_passed_test")
|
Playground::setup_for("plugin_inc_by_one_with_no_field_passed_test").with_files(vec![
|
||||||
.with_files(vec![FileWithContent(
|
FileWithContent(
|
||||||
"sample.toml",
|
"sample.toml",
|
||||||
r#"
|
r#"
|
||||||
[package]
|
[package]
|
||||||
contributors = "2"
|
contributors = "2"
|
||||||
"#,
|
"#,
|
||||||
)]);
|
),
|
||||||
|
]);
|
||||||
|
|
||||||
nu!(
|
nu!(
|
||||||
output,
|
output,
|
||||||
cwd("tests/fixtures/nuplayground/plugin_inc_by_one_with_no_field_passed_test"),
|
cwd("tests/fixtures/nuplayground/plugin_inc_by_one_with_no_field_passed_test"),
|
||||||
"open sample.toml | get package.contributors | inc | echo $it"
|
"open sample.toml | get package.contributors | inc | echo $it"
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(output, "3");
|
assert_eq!(output, "3");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn semversion_major_inc() {
|
fn semversion_major_inc() {
|
||||||
Playground::setup_for("plugin_inc_major_semversion_test")
|
Playground::setup_for("plugin_inc_major_semversion_test").with_files(vec![FileWithContent(
|
||||||
.with_files(vec![FileWithContent(
|
"sample.toml",
|
||||||
"sample.toml",
|
r#"
|
||||||
r#"
|
|
||||||
[package]
|
[package]
|
||||||
version = "0.1.3"
|
version = "0.1.3"
|
||||||
"#,
|
"#,
|
||||||
)]);
|
)]);
|
||||||
|
|
||||||
nu!(
|
nu!(
|
||||||
output,
|
output,
|
||||||
|
@ -77,14 +77,13 @@ fn semversion_major_inc() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn semversion_minor_inc() {
|
fn semversion_minor_inc() {
|
||||||
Playground::setup_for("plugin_inc_minor_semversion_test")
|
Playground::setup_for("plugin_inc_minor_semversion_test").with_files(vec![FileWithContent(
|
||||||
.with_files(vec![FileWithContent(
|
"sample.toml",
|
||||||
"sample.toml",
|
r#"
|
||||||
r#"
|
|
||||||
[package]
|
[package]
|
||||||
version = "0.1.3"
|
version = "0.1.3"
|
||||||
"#,
|
"#,
|
||||||
)]);
|
)]);
|
||||||
|
|
||||||
nu!(
|
nu!(
|
||||||
output,
|
output,
|
||||||
|
@ -97,14 +96,13 @@ fn semversion_minor_inc() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn semversion_patch_inc() {
|
fn semversion_patch_inc() {
|
||||||
Playground::setup_for("plugin_inc_patch_semversion_test")
|
Playground::setup_for("plugin_inc_patch_semversion_test").with_files(vec![FileWithContent(
|
||||||
.with_files(vec![FileWithContent(
|
"sample.toml",
|
||||||
"sample.toml",
|
r#"
|
||||||
r#"
|
|
||||||
[package]
|
[package]
|
||||||
version = "0.1.3"
|
version = "0.1.3"
|
||||||
"#,
|
"#,
|
||||||
)]);
|
)]);
|
||||||
|
|
||||||
nu!(
|
nu!(
|
||||||
output,
|
output,
|
||||||
|
@ -117,14 +115,15 @@ fn semversion_patch_inc() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn semversion_without_passing_field() {
|
fn semversion_without_passing_field() {
|
||||||
Playground::setup_for("plugin_inc_semversion_without_passing_field_test")
|
Playground::setup_for("plugin_inc_semversion_without_passing_field_test").with_files(vec![
|
||||||
.with_files(vec![FileWithContent(
|
FileWithContent(
|
||||||
"sample.toml",
|
"sample.toml",
|
||||||
r#"
|
r#"
|
||||||
[package]
|
[package]
|
||||||
version = "0.1.3"
|
version = "0.1.3"
|
||||||
"#,
|
"#,
|
||||||
)]);
|
),
|
||||||
|
]);
|
||||||
|
|
||||||
nu!(
|
nu!(
|
||||||
output,
|
output,
|
||||||
|
@ -133,4 +132,4 @@ fn semversion_without_passing_field() {
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(output, "0.1.4");
|
assert_eq!(output, "0.1.4");
|
||||||
}
|
}
|
||||||
|
|
|
@ -106,6 +106,17 @@ fn can_convert_table_to_json_text_and_from_json_text_back_into_table() {
|
||||||
assert_eq!(output, "markup");
|
assert_eq!(output, "markup");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_convert_json_text_to_bson_and_back_into_table() {
|
||||||
|
nu!(
|
||||||
|
output,
|
||||||
|
cwd("tests/fixtures/formats"),
|
||||||
|
"open sample.bson | to-bson | from-bson | get root | nth 1 | get b | echo $it"
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(output, "whel");
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_convert_table_to_toml_text_and_from_toml_text_back_into_table() {
|
fn can_convert_table_to_toml_text_and_from_toml_text_back_into_table() {
|
||||||
nu!(
|
nu!(
|
||||||
|
|
BIN
tests/fixtures/formats/sample.bson
vendored
BIN
tests/fixtures/formats/sample.bson
vendored
Binary file not shown.
Loading…
Reference in a new issue