mirror of
https://github.com/nushell/nushell
synced 2024-12-26 13:03:07 +00:00
Merge branch 'master' into expand-tilde
This commit is contained in:
commit
3750a04cfc
22 changed files with 515 additions and 230 deletions
35
src/cli.rs
35
src/cli.rs
|
@ -96,7 +96,7 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel
|
|||
|
||||
fn load_plugins_in_dir(path: &std::path::PathBuf, context: &mut Context) -> Result<(), ShellError> {
|
||||
let re_bin = Regex::new(r"^nu_plugin_[A-Za-z_]+$")?;
|
||||
let re_exe = Regex::new(r"^nu_plugin_[A-Za-z_]+\.exe$")?;
|
||||
let re_exe = Regex::new(r"^nu_plugin_[A-Za-z_]+\.(exe|bat)$")?;
|
||||
|
||||
trace!("Looking for plugins in {:?}", path);
|
||||
|
||||
|
@ -130,19 +130,24 @@ fn load_plugins(context: &mut Context) -> Result<(), ShellError> {
|
|||
None => println!("PATH is not defined in the environment."),
|
||||
}
|
||||
|
||||
// Also use our debug output for now
|
||||
let mut path = std::path::PathBuf::from(".");
|
||||
path.push("target");
|
||||
path.push("debug");
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
// Use our debug plugins in debug mode
|
||||
let mut path = std::path::PathBuf::from(".");
|
||||
path.push("target");
|
||||
path.push("debug");
|
||||
let _ = load_plugins_in_dir(&path, context);
|
||||
}
|
||||
|
||||
let _ = load_plugins_in_dir(&path, context);
|
||||
#[cfg(not(debug_assertions))]
|
||||
{
|
||||
// Use our release plugins in release mode
|
||||
let mut path = std::path::PathBuf::from(".");
|
||||
path.push("target");
|
||||
path.push("release");
|
||||
|
||||
// Also use our release output for now
|
||||
let mut path = std::path::PathBuf::from(".");
|
||||
path.push("target");
|
||||
path.push("release");
|
||||
|
||||
let _ = load_plugins_in_dir(&path, context);
|
||||
let _ = load_plugins_in_dir(&path, context);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -171,6 +176,7 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
|||
whole_stream_command(Reverse),
|
||||
whole_stream_command(Trim),
|
||||
whole_stream_command(ToArray),
|
||||
whole_stream_command(ToBSON),
|
||||
whole_stream_command(ToCSV),
|
||||
whole_stream_command(ToJSON),
|
||||
whole_stream_command(ToTOML),
|
||||
|
@ -226,6 +232,7 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
|||
let _ = ansi_term::enable_ansi_support();
|
||||
}
|
||||
|
||||
// we are ok if history does not exist
|
||||
let _ = rl.load_history("history.txt");
|
||||
|
||||
let ctrl_c = Arc::new(AtomicBool::new(false));
|
||||
|
@ -306,7 +313,9 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
|||
}
|
||||
ctrlcbreak = false;
|
||||
}
|
||||
rl.save_history("history.txt")?;
|
||||
|
||||
// we are ok if we can not save history
|
||||
let _ = rl.save_history("history.txt");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -35,8 +35,8 @@ crate mod pick;
|
|||
crate mod plugin;
|
||||
crate mod prev;
|
||||
crate mod ps;
|
||||
crate mod reverse;
|
||||
crate mod reject;
|
||||
crate mod reverse;
|
||||
crate mod rm;
|
||||
crate mod save;
|
||||
crate mod shells;
|
||||
|
@ -48,6 +48,7 @@ crate mod split_row;
|
|||
crate mod table;
|
||||
crate mod tags;
|
||||
crate mod to_array;
|
||||
crate mod to_bson;
|
||||
crate mod to_csv;
|
||||
crate mod to_json;
|
||||
crate mod to_toml;
|
||||
|
@ -104,6 +105,7 @@ crate use split_row::SplitRow;
|
|||
crate use table::Table;
|
||||
crate use tags::Tags;
|
||||
crate use to_array::ToArray;
|
||||
crate use to_bson::ToBSON;
|
||||
crate use to_csv::ToCSV;
|
||||
crate use to_json::ToJSON;
|
||||
crate use to_toml::ToTOML;
|
||||
|
|
|
@ -2,7 +2,7 @@ use crate::commands::WholeStreamCommand;
|
|||
use crate::object::base::OF64;
|
||||
use crate::object::{Primitive, TaggedDictBuilder, Value};
|
||||
use crate::prelude::*;
|
||||
use bson::{decode_document, Bson, spec::BinarySubtype};
|
||||
use bson::{decode_document, spec::BinarySubtype, Bson};
|
||||
|
||||
pub struct FromBSON;
|
||||
|
||||
|
@ -47,71 +47,80 @@ fn convert_bson_value_to_nu_value(v: &Bson, tag: impl Into<Tag>) -> Tagged<Value
|
|||
Bson::Boolean(b) => Value::Primitive(Primitive::Boolean(*b)).tagged(tag),
|
||||
Bson::Null => Value::Primitive(Primitive::String(String::from(""))).tagged(tag),
|
||||
Bson::RegExp(r, opts) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
collected.insert_tagged(
|
||||
"$regex".to_string(),
|
||||
Value::Primitive(Primitive::String(String::from(r))).tagged(tag),
|
||||
);
|
||||
collected.insert_tagged(
|
||||
"$options".to_string(),
|
||||
Value::Primitive(Primitive::String(String::from(opts))).tagged(tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
collected.insert_tagged(
|
||||
"$regex".to_string(),
|
||||
Value::Primitive(Primitive::String(String::from(r))).tagged(tag),
|
||||
);
|
||||
collected.insert_tagged(
|
||||
"$options".to_string(),
|
||||
Value::Primitive(Primitive::String(String::from(opts))).tagged(tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
}
|
||||
// TODO: Add Int32 to nushell?
|
||||
Bson::I32(n) => Value::Primitive(Primitive::Int(*n as i64)).tagged(tag),
|
||||
Bson::I64(n) => Value::Primitive(Primitive::Int(*n as i64)).tagged(tag),
|
||||
Bson::JavaScriptCode(js) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
collected.insert_tagged(
|
||||
"$javascript".to_string(),
|
||||
Value::Primitive(Primitive::String(String::from(js))).tagged(tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
collected.insert_tagged(
|
||||
"$javascript".to_string(),
|
||||
Value::Primitive(Primitive::String(String::from(js))).tagged(tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
}
|
||||
Bson::JavaScriptCodeWithScope(js, doc) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
collected.insert_tagged(
|
||||
"$javascript".to_string(),
|
||||
Value::Primitive(Primitive::String(String::from(js))).tagged(tag),
|
||||
);
|
||||
collected.insert_tagged(
|
||||
"$scope".to_string(),
|
||||
convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
collected.insert_tagged(
|
||||
"$javascript".to_string(),
|
||||
Value::Primitive(Primitive::String(String::from(js))).tagged(tag),
|
||||
);
|
||||
collected.insert_tagged(
|
||||
"$scope".to_string(),
|
||||
convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
}
|
||||
Bson::TimeStamp(ts) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
collected.insert_tagged(
|
||||
"$timestamp".to_string(),
|
||||
Value::Primitive(Primitive::Int(*ts as i64)).tagged(tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
collected.insert_tagged(
|
||||
"$timestamp".to_string(),
|
||||
Value::Primitive(Primitive::Int(*ts as i64)).tagged(tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
}
|
||||
Bson::Binary(bst, bytes) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
collected.insert_tagged(
|
||||
"$binary_subtype".to_string(),
|
||||
match bst {
|
||||
BinarySubtype::UserDefined(u) => Value::Primitive(Primitive::Int(*u as i64)),
|
||||
_ => Value::Primitive(Primitive::String(binary_subtype_to_string(*bst))),
|
||||
}.tagged(tag)
|
||||
);
|
||||
collected.insert_tagged(
|
||||
"$binary".to_string(),
|
||||
Value::Binary(bytes.to_owned()).tagged(tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
collected.insert_tagged(
|
||||
"$binary_subtype".to_string(),
|
||||
match bst {
|
||||
BinarySubtype::UserDefined(u) => Value::Primitive(Primitive::Int(*u as i64)),
|
||||
_ => Value::Primitive(Primitive::String(binary_subtype_to_string(*bst))),
|
||||
}
|
||||
.tagged(tag),
|
||||
);
|
||||
collected.insert_tagged(
|
||||
"$binary".to_string(),
|
||||
Value::Binary(bytes.to_owned()).tagged(tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
}
|
||||
Bson::ObjectId(obj_id) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
collected.insert_tagged(
|
||||
"$object_id".to_string(),
|
||||
Value::Primitive(Primitive::String(obj_id.to_hex())).tagged(tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
}
|
||||
Bson::ObjectId(obj_id) => Value::Primitive(Primitive::String(obj_id.to_hex())).tagged(tag),
|
||||
Bson::UtcDatetime(dt) => Value::Primitive(Primitive::Date(*dt)).tagged(tag),
|
||||
Bson::Symbol(s) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
collected.insert_tagged(
|
||||
"$symbol".to_string(),
|
||||
Value::Primitive(Primitive::String(String::from(s))).tagged(tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
collected.insert_tagged(
|
||||
"$symbol".to_string(),
|
||||
Value::Primitive(Primitive::String(String::from(s))).tagged(tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -125,7 +134,8 @@ fn binary_subtype_to_string(bst: BinarySubtype) -> String {
|
|||
BinarySubtype::Uuid => "uuid",
|
||||
BinarySubtype::Md5 => "md5",
|
||||
_ => unreachable!(),
|
||||
}.to_string()
|
||||
}
|
||||
.to_string()
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
|
|
@ -44,7 +44,7 @@ fn last(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, S
|
|||
"Value is too low",
|
||||
"expected a positive integer",
|
||||
args.expect_nth(0)?.span(),
|
||||
))
|
||||
));
|
||||
}
|
||||
|
||||
let stream = async_stream_block! {
|
||||
|
|
|
@ -59,7 +59,7 @@ fn save(
|
|||
// If there is no filename, check the metadata for the origin filename
|
||||
if input.len() > 0 {
|
||||
let origin = input[0].origin();
|
||||
match origin.map(|x| source_map.get(&x)).flatten() {
|
||||
match origin.and_then(|x| source_map.get(&x)) {
|
||||
Some(path) => match path {
|
||||
SpanSource::File(file) => {
|
||||
full_path.push(Path::new(file));
|
||||
|
|
|
@ -38,7 +38,7 @@ fn tags(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream,
|
|||
dict.insert("end", Value::int(span.end as i64));
|
||||
tags.insert_tagged("span", dict.into_tagged_value());
|
||||
|
||||
match origin.map(|x| source_map.get(&x)).flatten() {
|
||||
match origin.and_then(|x| source_map.get(&x)) {
|
||||
Some(SpanSource::File(source)) => {
|
||||
tags.insert("origin", Value::string(source));
|
||||
}
|
||||
|
|
231
src/commands/to_bson.rs
Normal file
231
src/commands/to_bson.rs
Normal file
|
@ -0,0 +1,231 @@
|
|||
use crate::commands::WholeStreamCommand;
|
||||
use crate::object::{Dictionary, Primitive, Value};
|
||||
use crate::prelude::*;
|
||||
use bson::{encode_document, oid::ObjectId, spec::BinarySubtype, Bson, Document};
|
||||
use std::convert::TryInto;
|
||||
|
||||
pub struct ToBSON;
|
||||
|
||||
impl WholeStreamCommand for ToBSON {
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
to_bson(args, registry)
|
||||
}
|
||||
|
||||
fn name(&self) -> &str {
|
||||
"to-bson"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("to-bson")
|
||||
}
|
||||
}
|
||||
|
||||
pub fn value_to_bson_value(v: &Value) -> Bson {
|
||||
match v {
|
||||
Value::Primitive(Primitive::Boolean(b)) => Bson::Boolean(*b),
|
||||
Value::Primitive(Primitive::Bytes(b)) => Bson::I64(*b as i64),
|
||||
Value::Primitive(Primitive::Date(d)) => Bson::UtcDatetime(*d),
|
||||
Value::Primitive(Primitive::EndOfStream) => Bson::Null,
|
||||
Value::Primitive(Primitive::BeginningOfStream) => Bson::Null,
|
||||
Value::Primitive(Primitive::Float(f)) => Bson::FloatingPoint(f.into_inner()),
|
||||
Value::Primitive(Primitive::Int(i)) => Bson::I64(*i),
|
||||
Value::Primitive(Primitive::Nothing) => Bson::Null,
|
||||
Value::Primitive(Primitive::String(s)) => Bson::String(s.clone()),
|
||||
Value::Primitive(Primitive::Path(s)) => Bson::String(s.display().to_string()),
|
||||
Value::List(l) => Bson::Array(l.iter().map(|x| value_to_bson_value(x)).collect()),
|
||||
Value::Block(_) => Bson::Null,
|
||||
Value::Binary(b) => Bson::Binary(BinarySubtype::Generic, b.clone()),
|
||||
Value::Object(o) => object_value_to_bson(o),
|
||||
}
|
||||
}
|
||||
|
||||
// object_value_to_bson handles all Objects, even those that correspond to special
|
||||
// types (things like regex or javascript code).
|
||||
fn object_value_to_bson(o: &Dictionary) -> Bson {
|
||||
let mut it = o.entries.iter();
|
||||
if it.len() > 2 {
|
||||
return generic_object_value_to_bson(o);
|
||||
}
|
||||
match it.next() {
|
||||
Some((regex, tagged_regex_value)) if regex == "$regex" => match it.next() {
|
||||
Some((options, tagged_opts_value)) if options == "$options" => {
|
||||
let r: Result<String, _> = tagged_regex_value.try_into();
|
||||
let opts: Result<String, _> = tagged_opts_value.try_into();
|
||||
if r.is_err() || opts.is_err() {
|
||||
generic_object_value_to_bson(o)
|
||||
} else {
|
||||
Bson::RegExp(r.unwrap(), opts.unwrap())
|
||||
}
|
||||
}
|
||||
_ => generic_object_value_to_bson(o),
|
||||
},
|
||||
Some((javascript, tagged_javascript_value)) if javascript == "$javascript" => {
|
||||
match it.next() {
|
||||
Some((scope, tagged_scope_value)) if scope == "$scope" => {
|
||||
let js: Result<String, _> = tagged_javascript_value.try_into();
|
||||
let s: Result<&Dictionary, _> = tagged_scope_value.try_into();
|
||||
if js.is_err() || s.is_err() {
|
||||
generic_object_value_to_bson(o)
|
||||
} else {
|
||||
if let Bson::Document(doc) = object_value_to_bson(s.unwrap()) {
|
||||
Bson::JavaScriptCodeWithScope(js.unwrap(), doc)
|
||||
} else {
|
||||
generic_object_value_to_bson(o)
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let js: Result<String, _> = tagged_javascript_value.try_into();
|
||||
if js.is_err() {
|
||||
generic_object_value_to_bson(o)
|
||||
} else {
|
||||
Bson::JavaScriptCode(js.unwrap())
|
||||
}
|
||||
}
|
||||
_ => generic_object_value_to_bson(o),
|
||||
}
|
||||
}
|
||||
Some((timestamp, tagged_timestamp_value)) if timestamp == "$timestamp" => {
|
||||
let ts: Result<i64, _> = tagged_timestamp_value.try_into();
|
||||
if ts.is_err() {
|
||||
generic_object_value_to_bson(o)
|
||||
} else {
|
||||
Bson::TimeStamp(ts.unwrap())
|
||||
}
|
||||
}
|
||||
Some((binary_subtype, tagged_binary_subtype_value))
|
||||
if binary_subtype == "$binary_subtype" =>
|
||||
{
|
||||
match it.next() {
|
||||
Some((binary, tagged_bin_value)) if binary == "$binary" => {
|
||||
let bst = get_binary_subtype(tagged_binary_subtype_value);
|
||||
let bin: Result<Vec<u8>, _> = tagged_bin_value.try_into();
|
||||
if bst.is_none() || bin.is_err() {
|
||||
generic_object_value_to_bson(o)
|
||||
} else {
|
||||
Bson::Binary(bst.unwrap(), bin.unwrap())
|
||||
}
|
||||
}
|
||||
_ => generic_object_value_to_bson(o),
|
||||
}
|
||||
}
|
||||
Some((object_id, tagged_object_id_value)) if object_id == "$object_id" => {
|
||||
let obj_id: Result<String, _> = tagged_object_id_value.try_into();
|
||||
if obj_id.is_err() {
|
||||
generic_object_value_to_bson(o)
|
||||
} else {
|
||||
let obj_id = ObjectId::with_string(&obj_id.unwrap());
|
||||
if obj_id.is_err() {
|
||||
generic_object_value_to_bson(o)
|
||||
} else {
|
||||
Bson::ObjectId(obj_id.unwrap())
|
||||
}
|
||||
}
|
||||
}
|
||||
Some((symbol, tagged_symbol_value)) if symbol == "$symbol" => {
|
||||
let sym: Result<String, _> = tagged_symbol_value.try_into();
|
||||
if sym.is_err() {
|
||||
generic_object_value_to_bson(o)
|
||||
} else {
|
||||
Bson::Symbol(sym.unwrap())
|
||||
}
|
||||
}
|
||||
_ => generic_object_value_to_bson(o),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_binary_subtype<'a>(tagged_value: &'a Tagged<Value>) -> Option<BinarySubtype> {
|
||||
match tagged_value.item() {
|
||||
Value::Primitive(Primitive::String(s)) => Some(match s.as_ref() {
|
||||
"generic" => BinarySubtype::Generic,
|
||||
"function" => BinarySubtype::Function,
|
||||
"binary_old" => BinarySubtype::BinaryOld,
|
||||
"uuid_old" => BinarySubtype::UuidOld,
|
||||
"uuid" => BinarySubtype::Uuid,
|
||||
"md5" => BinarySubtype::Md5,
|
||||
_ => unreachable!(),
|
||||
}),
|
||||
Value::Primitive(Primitive::Int(i)) => Some(BinarySubtype::UserDefined(*i as u8)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
// generic_object_value_bson handles any Object that does not
|
||||
// correspond to a special bson type (things like regex or javascript code).
|
||||
fn generic_object_value_to_bson(o: &Dictionary) -> Bson {
|
||||
let mut doc = Document::new();
|
||||
for (k, v) in o.entries.iter() {
|
||||
doc.insert(k.clone(), value_to_bson_value(v));
|
||||
}
|
||||
Bson::Document(doc)
|
||||
}
|
||||
|
||||
fn shell_encode_document(
|
||||
writer: &mut Vec<u8>,
|
||||
doc: Document,
|
||||
span: Span,
|
||||
) -> Result<(), ShellError> {
|
||||
match encode_document(writer, &doc) {
|
||||
Err(e) => Err(ShellError::labeled_error(
|
||||
format!("Failed to encode document due to: {:?}", e),
|
||||
"requires BSON-compatible document",
|
||||
span,
|
||||
)),
|
||||
_ => Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
fn bson_value_to_bytes(bson: Bson, span: Span) -> Result<Vec<u8>, ShellError> {
|
||||
let mut out = Vec::new();
|
||||
match bson {
|
||||
Bson::Array(a) => {
|
||||
for v in a.into_iter() {
|
||||
match v {
|
||||
Bson::Document(d) => shell_encode_document(&mut out, d, span)?,
|
||||
_ => {
|
||||
return Err(ShellError::labeled_error(
|
||||
format!("All top level values must be Documents, got {:?}", v),
|
||||
"requires BSON-compatible document",
|
||||
span,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Bson::Document(d) => shell_encode_document(&mut out, d, span)?,
|
||||
_ => {
|
||||
return Err(ShellError::labeled_error(
|
||||
format!("All top level values must be Documents, got {:?}", bson),
|
||||
"requires BSON-compatible document",
|
||||
span,
|
||||
))
|
||||
}
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
let name_span = args.name_span();
|
||||
let out = args.input;
|
||||
|
||||
Ok(out
|
||||
.values
|
||||
.map(
|
||||
move |a| match bson_value_to_bytes(value_to_bson_value(&a), name_span) {
|
||||
Ok(x) => ReturnSuccess::value(Value::Binary(x).simple_spanned(name_span)),
|
||||
_ => Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected an object with BSON-compatible structure from pipeline",
|
||||
"requires BSON-compatible input: Must be Array or Object",
|
||||
name_span,
|
||||
format!("{} originates from here", a.item.type_name()),
|
||||
a.span(),
|
||||
)),
|
||||
},
|
||||
)
|
||||
.to_output_stream())
|
||||
}
|
|
@ -5,8 +5,6 @@ use crate::parser::registry::Signature;
|
|||
use crate::prelude::*;
|
||||
use indexmap::IndexMap;
|
||||
|
||||
const VERSION: &'static str = env!("CARGO_PKG_VERSION");
|
||||
|
||||
pub struct Version;
|
||||
|
||||
impl WholeStreamCommand for Version {
|
||||
|
@ -34,7 +32,7 @@ pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
|||
let mut indexmap = IndexMap::new();
|
||||
indexmap.insert(
|
||||
"version".to_string(),
|
||||
Tagged::from_simple_spanned_item(Value::string(VERSION.to_string()), span),
|
||||
Tagged::from_simple_spanned_item(Value::string(clap::crate_version!()), span),
|
||||
);
|
||||
|
||||
let value = Tagged::from_simple_spanned_item(Value::Object(Dictionary::from(indexmap)), span);
|
||||
|
|
|
@ -1,17 +1,19 @@
|
|||
use crate::format::RenderView;
|
||||
use crate::object::Value;
|
||||
use crate::prelude::*;
|
||||
use ansi_term::Color;
|
||||
use derive_new::new;
|
||||
use prettytable::format::{FormatBuilder, LinePosition, LineSeparator};
|
||||
use textwrap::fill;
|
||||
|
||||
use prettytable::format::{FormatBuilder, LinePosition, LineSeparator};
|
||||
use prettytable::{color, Attr, Cell, Row, Table};
|
||||
|
||||
#[derive(Debug, new)]
|
||||
pub struct TableView {
|
||||
// List of header cell values:
|
||||
headers: Vec<String>,
|
||||
entries: Vec<Vec<String>>,
|
||||
|
||||
// List of rows of cells, each containing value and prettytable style-string:
|
||||
entries: Vec<Vec<(String, &'static str)>>,
|
||||
}
|
||||
|
||||
impl TableView {
|
||||
|
@ -41,21 +43,29 @@ impl TableView {
|
|||
let mut entries = vec![];
|
||||
|
||||
for (idx, value) in values.iter().enumerate() {
|
||||
let mut row: Vec<String> = match value {
|
||||
let mut row: Vec<(String, &'static str)> = match value {
|
||||
Tagged {
|
||||
item: Value::Object(..),
|
||||
..
|
||||
} => headers
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, d)| value.get_data(d).borrow().format_leaf(Some(&headers[i])))
|
||||
.map(|(i, d)| {
|
||||
let data = value.get_data(d);
|
||||
return (
|
||||
data.borrow().format_leaf(Some(&headers[i])),
|
||||
data.borrow().style_leaf(),
|
||||
);
|
||||
})
|
||||
.collect(),
|
||||
x => vec![x.format_leaf(None)],
|
||||
x => vec![(x.format_leaf(None), x.style_leaf())],
|
||||
};
|
||||
|
||||
if values.len() > 1 {
|
||||
row.insert(0, format!("{}", idx.to_string()));
|
||||
// Indices are black, bold, right-aligned:
|
||||
row.insert(0, (format!("{}", idx.to_string()), "Fdbr"));
|
||||
}
|
||||
|
||||
entries.push(row);
|
||||
}
|
||||
|
||||
|
@ -66,13 +76,15 @@ impl TableView {
|
|||
}
|
||||
|
||||
for head in 0..headers.len() {
|
||||
let mut current_row_max = 0;
|
||||
let mut current_col_max = 0;
|
||||
for row in 0..values.len() {
|
||||
if head > entries[row].len() && entries[row][head].len() > current_row_max {
|
||||
current_row_max = entries[row][head].len();
|
||||
let value_length = entries[row][head].0.len();
|
||||
if head > entries[row].len() && value_length > current_col_max {
|
||||
current_col_max = value_length;
|
||||
}
|
||||
}
|
||||
max_per_column.push(std::cmp::max(current_row_max, headers[head].len()));
|
||||
|
||||
max_per_column.push(std::cmp::max(current_col_max, headers[head].len()));
|
||||
}
|
||||
|
||||
// Different platforms want different amounts of buffer, not sure why
|
||||
|
@ -90,7 +102,7 @@ impl TableView {
|
|||
|
||||
headers.push("...".to_string());
|
||||
for row in 0..entries.len() {
|
||||
entries[row].push("...".to_string());
|
||||
entries[row].push(("...".to_string(), "c")); // ellipsis is centred
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -167,19 +179,11 @@ impl TableView {
|
|||
if max_per_column[head] > max_naive_column_width {
|
||||
headers[head] = fill(&headers[head], max_column_width);
|
||||
for row in 0..entries.len() {
|
||||
entries[row][head] = fill(&entries[row][head], max_column_width);
|
||||
entries[row][head].0 = fill(&entries[row][head].0, max_column_width);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Paint the number column, if it exists
|
||||
if entries.len() > 1 {
|
||||
for row in 0..entries.len() {
|
||||
entries[row][0] =
|
||||
format!("{}", Color::Black.bold().paint(entries[row][0].to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
Some(TableView { headers, entries })
|
||||
}
|
||||
}
|
||||
|
@ -191,16 +195,15 @@ impl RenderView for TableView {
|
|||
}
|
||||
|
||||
let mut table = Table::new();
|
||||
|
||||
let fb = FormatBuilder::new()
|
||||
.separator(LinePosition::Top, LineSeparator::new('-', '+', ' ', ' '))
|
||||
.separator(LinePosition::Bottom, LineSeparator::new('-', '+', ' ', ' '))
|
||||
.separator(LinePosition::Title, LineSeparator::new('-', '+', '|', '|'))
|
||||
.column_separator('|')
|
||||
.padding(1, 1);
|
||||
|
||||
//table.set_format(*prettytable::format::consts::FORMAT_NO_LINESEP_WITH_TITLE);
|
||||
table.set_format(fb.build());
|
||||
table.set_format(
|
||||
FormatBuilder::new()
|
||||
.column_separator('│')
|
||||
.separator(LinePosition::Top, LineSeparator::new('━', '┯', ' ', ' '))
|
||||
.separator(LinePosition::Title, LineSeparator::new('─', '┼', ' ', ' '))
|
||||
.separator(LinePosition::Bottom, LineSeparator::new('━', '┷', ' ', ' '))
|
||||
.padding(1, 1)
|
||||
.build(),
|
||||
);
|
||||
|
||||
let header: Vec<Cell> = self
|
||||
.headers
|
||||
|
@ -215,7 +218,11 @@ impl RenderView for TableView {
|
|||
table.set_titles(Row::new(header));
|
||||
|
||||
for row in &self.entries {
|
||||
table.add_row(Row::new(row.iter().map(|h| Cell::new(h)).collect()));
|
||||
table.add_row(Row::new(
|
||||
row.iter()
|
||||
.map(|(v, s)| Cell::new(v).style_spec(s))
|
||||
.collect(),
|
||||
));
|
||||
}
|
||||
|
||||
table.print_term(&mut *host.out_terminal()).unwrap();
|
||||
|
|
|
@ -2,8 +2,8 @@ use crate::format::RenderView;
|
|||
use crate::object::Value;
|
||||
use crate::prelude::*;
|
||||
use derive_new::new;
|
||||
use prettytable::format::{FormatBuilder, LinePosition, LineSeparator};
|
||||
|
||||
use prettytable::format::{FormatBuilder, LinePosition, LineSeparator};
|
||||
use prettytable::{color, Attr, Cell, Row, Table};
|
||||
|
||||
#[derive(new)]
|
||||
|
@ -47,14 +47,15 @@ impl RenderView for VTableView {
|
|||
}
|
||||
|
||||
let mut table = Table::new();
|
||||
|
||||
let fb = FormatBuilder::new()
|
||||
.separator(LinePosition::Top, LineSeparator::new('-', '+', ' ', ' '))
|
||||
.separator(LinePosition::Bottom, LineSeparator::new('-', '+', ' ', ' '))
|
||||
.column_separator('|')
|
||||
.padding(1, 1);
|
||||
|
||||
table.set_format(fb.build());
|
||||
table.set_format(
|
||||
FormatBuilder::new()
|
||||
.column_separator('│')
|
||||
.separator(LinePosition::Top, LineSeparator::new('━', '┯', ' ', ' '))
|
||||
.separator(LinePosition::Title, LineSeparator::new('─', '┼', ' ', ' '))
|
||||
.separator(LinePosition::Bottom, LineSeparator::new('━', '┷', ' ', ' '))
|
||||
.padding(1, 1)
|
||||
.build(),
|
||||
);
|
||||
|
||||
for row in &self.entries {
|
||||
table.add_row(Row::new(
|
||||
|
|
14
src/git.rs
14
src/git.rs
|
@ -7,15 +7,13 @@ pub fn current_branch() -> Option<String> {
|
|||
Ok(repo) => {
|
||||
let r = repo.head();
|
||||
match r {
|
||||
Ok(r) => {
|
||||
match r.shorthand() {
|
||||
Some(s) => Some(s.to_string()),
|
||||
None => None,
|
||||
}
|
||||
Ok(r) => match r.shorthand() {
|
||||
Some(s) => Some(s.to_string()),
|
||||
None => None,
|
||||
},
|
||||
_ => None
|
||||
_ => None,
|
||||
}
|
||||
},
|
||||
_ => None
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
#![feature(generators)]
|
||||
#![feature(try_trait)]
|
||||
#![feature(bind_by_move_pattern_guards)]
|
||||
#![feature(option_flattening)]
|
||||
#![feature(specialization)]
|
||||
#![feature(proc_macro_hygiene)]
|
||||
|
||||
|
|
|
@ -94,13 +94,13 @@ impl Primitive {
|
|||
let byte = byte_unit::Byte::from_bytes(*b as u128);
|
||||
|
||||
if byte.get_bytes() == 0u128 {
|
||||
return "<empty>".to_string();
|
||||
return "—".to_string();
|
||||
}
|
||||
|
||||
let byte = byte.get_appropriate_unit(false);
|
||||
|
||||
match byte.get_unit() {
|
||||
byte_unit::ByteUnit::B => format!("{}", byte.format(0)),
|
||||
byte_unit::ByteUnit::B => format!("{} B ", byte.get_value()),
|
||||
_ => format!("{}", byte.format(1)),
|
||||
}
|
||||
}
|
||||
|
@ -118,6 +118,14 @@ impl Primitive {
|
|||
Primitive::Date(d) => format!("{}", d.humanize()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn style(&self) -> &'static str {
|
||||
match self {
|
||||
Primitive::Bytes(0) => "c", // centre 'missing' indicator
|
||||
Primitive::Int(_) | Primitive::Bytes(_) | Primitive::Float(_) => "r",
|
||||
_ => "",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Clone, new, Serialize)]
|
||||
|
@ -235,6 +243,48 @@ impl std::convert::TryFrom<&'a Tagged<Value>> for i64 {
|
|||
}
|
||||
}
|
||||
|
||||
impl std::convert::TryFrom<&'a Tagged<Value>> for String {
|
||||
type Error = ShellError;
|
||||
|
||||
fn try_from(value: &'a Tagged<Value>) -> Result<String, ShellError> {
|
||||
match value.item() {
|
||||
Value::Primitive(Primitive::String(s)) => Ok(s.clone()),
|
||||
v => Err(ShellError::type_error(
|
||||
"String",
|
||||
value.copy_span(v.type_name()),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::TryFrom<&'a Tagged<Value>> for Vec<u8> {
|
||||
type Error = ShellError;
|
||||
|
||||
fn try_from(value: &'a Tagged<Value>) -> Result<Vec<u8>, ShellError> {
|
||||
match value.item() {
|
||||
Value::Binary(b) => Ok(b.clone()),
|
||||
v => Err(ShellError::type_error(
|
||||
"Binary",
|
||||
value.copy_span(v.type_name()),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::TryFrom<&'a Tagged<Value>> for &'a crate::object::Dictionary {
|
||||
type Error = ShellError;
|
||||
|
||||
fn try_from(value: &'a Tagged<Value>) -> Result<&'a crate::object::Dictionary, ShellError> {
|
||||
match value.item() {
|
||||
Value::Object(d) => Ok(d),
|
||||
v => Err(ShellError::type_error(
|
||||
"Dictionary",
|
||||
value.copy_span(v.type_name()),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub enum Switch {
|
||||
Present,
|
||||
|
@ -460,6 +510,13 @@ impl Value {
|
|||
}
|
||||
}
|
||||
|
||||
crate fn style_leaf(&self) -> &'static str {
|
||||
match self {
|
||||
Value::Primitive(p) => p.style(),
|
||||
_ => "",
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
crate fn compare(&self, operator: &Operator, other: &Value) -> Result<bool, (String, String)> {
|
||||
match operator {
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
#![feature(option_flattening)]
|
||||
use crossterm::{cursor, terminal, Attribute, RawScreen};
|
||||
use nu::{serve_plugin, CallInfo, Plugin, ShellError, Signature, SpanSource, Tagged, Value};
|
||||
use pretty_hex::*;
|
||||
|
@ -21,7 +20,7 @@ impl Plugin for BinaryView {
|
|||
let value_origin = v.origin();
|
||||
match v.item {
|
||||
Value::Binary(b) => {
|
||||
let source = value_origin.map(|x| call_info.source_map.get(&x)).flatten();
|
||||
let source = value_origin.and_then(|x| call_info.source_map.get(&x));
|
||||
let _ = view_binary(&b, source, call_info.args.has("lores"));
|
||||
}
|
||||
_ => {}
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
#![feature(option_flattening)]
|
||||
|
||||
use crossterm::{cursor, terminal, RawScreen};
|
||||
use crossterm::{InputEvent, KeyEvent};
|
||||
use nu::{
|
||||
|
@ -210,7 +208,7 @@ fn view_text_value(value: &Tagged<Value>, source_map: &SourceMap) {
|
|||
let value_origin = value.origin();
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(ref s)) => {
|
||||
let source = value_origin.map(|x| source_map.get(&x)).flatten();
|
||||
let source = value_origin.and_then(|x| source_map.get(&x));
|
||||
|
||||
if let Some(source) = source {
|
||||
let extension: Option<String> = match source {
|
||||
|
|
|
@ -25,7 +25,7 @@ fn accepts_and_creates_directories() {
|
|||
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
||||
|
||||
nu!(_output, cwd(&full_path), "mkdir dir_1 dir_2 dir_3");
|
||||
|
||||
|
||||
assert!(h::files_exist_at(
|
||||
vec![Path::new("dir_1"), Path::new("dir_2"), Path::new("dir_3")],
|
||||
PathBuf::from(&full_path)
|
||||
|
|
|
@ -8,15 +8,13 @@ use std::path::{Path, PathBuf};
|
|||
#[test]
|
||||
fn moves_a_file() {
|
||||
let sandbox = Playground::setup_for("mv_test_1")
|
||||
.with_files(vec![
|
||||
EmptyFile("andres.txt"),
|
||||
])
|
||||
.with_files(vec![EmptyFile("andres.txt")])
|
||||
.mkdir("expected")
|
||||
.test_dir_name();
|
||||
|
||||
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
||||
let original = format!("{}/{}", full_path, "andres.txt");
|
||||
let expected = format!("{}/{}", full_path, "expected/yehuda.txt");
|
||||
let expected = format!("{}/{}", full_path, "expected/yehuda.txt");
|
||||
|
||||
nu!(
|
||||
_output,
|
||||
|
@ -31,21 +29,14 @@ fn moves_a_file() {
|
|||
#[test]
|
||||
fn overwrites_if_moving_to_existing_file() {
|
||||
let sandbox = Playground::setup_for("mv_test_2")
|
||||
.with_files(vec![
|
||||
EmptyFile("andres.txt"),
|
||||
EmptyFile("jonathan.txt"),
|
||||
])
|
||||
.with_files(vec![EmptyFile("andres.txt"), EmptyFile("jonathan.txt")])
|
||||
.test_dir_name();
|
||||
|
||||
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
||||
let original = format!("{}/{}", full_path, "andres.txt");
|
||||
let expected = format!("{}/{}", full_path, "jonathan.txt");
|
||||
let expected = format!("{}/{}", full_path, "jonathan.txt");
|
||||
|
||||
nu!(
|
||||
_output,
|
||||
cwd(&full_path),
|
||||
"mv andres.txt jonathan.txt"
|
||||
);
|
||||
nu!(_output, cwd(&full_path), "mv andres.txt jonathan.txt");
|
||||
|
||||
assert!(!h::file_exists_at(PathBuf::from(original)));
|
||||
assert!(h::file_exists_at(PathBuf::from(expected)));
|
||||
|
@ -58,14 +49,10 @@ fn moves_a_directory() {
|
|||
.test_dir_name();
|
||||
|
||||
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
||||
let original_dir = format!("{}/{}", full_path, "empty_dir");
|
||||
let expected = format!("{}/{}", full_path, "renamed_dir");
|
||||
let original_dir = format!("{}/{}", full_path, "empty_dir");
|
||||
let expected = format!("{}/{}", full_path, "renamed_dir");
|
||||
|
||||
nu!(
|
||||
_output,
|
||||
cwd(&full_path),
|
||||
"mv empty_dir renamed_dir"
|
||||
);
|
||||
nu!(_output, cwd(&full_path), "mv empty_dir renamed_dir");
|
||||
|
||||
assert!(!h::dir_exists_at(PathBuf::from(original_dir)));
|
||||
assert!(h::dir_exists_at(PathBuf::from(expected)));
|
||||
|
@ -74,22 +61,15 @@ fn moves_a_directory() {
|
|||
#[test]
|
||||
fn moves_the_file_inside_directory_if_path_to_move_is_existing_directory() {
|
||||
let sandbox = Playground::setup_for("mv_test_4")
|
||||
.with_files(vec![
|
||||
EmptyFile("jonathan.txt"),
|
||||
])
|
||||
.with_files(vec![EmptyFile("jonathan.txt")])
|
||||
.mkdir("expected")
|
||||
.test_dir_name();
|
||||
|
||||
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
||||
let original_dir = format!("{}/{}", full_path, "jonathan.txt");
|
||||
let expected = format!("{}/{}", full_path, "expected/jonathan.txt");
|
||||
|
||||
nu!(
|
||||
_output,
|
||||
cwd(&full_path),
|
||||
"mv jonathan.txt expected"
|
||||
);
|
||||
let original_dir = format!("{}/{}", full_path, "jonathan.txt");
|
||||
let expected = format!("{}/{}", full_path, "expected/jonathan.txt");
|
||||
|
||||
nu!(_output, cwd(&full_path), "mv jonathan.txt expected");
|
||||
|
||||
assert!(!h::file_exists_at(PathBuf::from(original_dir)));
|
||||
assert!(h::file_exists_at(PathBuf::from(expected)));
|
||||
|
@ -99,22 +79,15 @@ fn moves_the_file_inside_directory_if_path_to_move_is_existing_directory() {
|
|||
fn moves_the_directory_inside_directory_if_path_to_move_is_existing_directory() {
|
||||
let sandbox = Playground::setup_for("mv_test_5")
|
||||
.within("contributors")
|
||||
.with_files(vec![
|
||||
EmptyFile("jonathan.txt"),
|
||||
])
|
||||
.with_files(vec![EmptyFile("jonathan.txt")])
|
||||
.mkdir("expected")
|
||||
.test_dir_name();
|
||||
|
||||
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
||||
let original_dir = format!("{}/{}", full_path, "contributors");
|
||||
let expected = format!("{}/{}", full_path, "expected/contributors");
|
||||
|
||||
nu!(
|
||||
_output,
|
||||
cwd(&full_path),
|
||||
"mv contributors expected"
|
||||
);
|
||||
let original_dir = format!("{}/{}", full_path, "contributors");
|
||||
let expected = format!("{}/{}", full_path, "expected/contributors");
|
||||
|
||||
nu!(_output, cwd(&full_path), "mv contributors expected");
|
||||
|
||||
assert!(!h::dir_exists_at(PathBuf::from(original_dir)));
|
||||
assert!(h::file_exists_at(PathBuf::from(expected)));
|
||||
|
@ -124,14 +97,12 @@ fn moves_the_directory_inside_directory_if_path_to_move_is_existing_directory()
|
|||
fn moves_the_directory_inside_directory_if_path_to_move_is_nonexistent_directory() {
|
||||
let sandbox = Playground::setup_for("mv_test_6")
|
||||
.within("contributors")
|
||||
.with_files(vec![
|
||||
EmptyFile("jonathan.txt"),
|
||||
])
|
||||
.with_files(vec![EmptyFile("jonathan.txt")])
|
||||
.mkdir("expected")
|
||||
.test_dir_name();
|
||||
|
||||
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
||||
let original_dir = format!("{}/{}", full_path, "contributors");
|
||||
let original_dir = format!("{}/{}", full_path, "contributors");
|
||||
|
||||
nu!(
|
||||
_output,
|
||||
|
@ -139,7 +110,10 @@ fn moves_the_directory_inside_directory_if_path_to_move_is_nonexistent_directory
|
|||
"mv contributors expected/this_dir_exists_now/los_tres_amigos"
|
||||
);
|
||||
|
||||
let expected = format!("{}/{}", full_path, "expected/this_dir_exists_now/los_tres_amigos");
|
||||
let expected = format!(
|
||||
"{}/{}",
|
||||
full_path, "expected/this_dir_exists_now/los_tres_amigos"
|
||||
);
|
||||
|
||||
assert!(!h::dir_exists_at(PathBuf::from(original_dir)));
|
||||
assert!(h::file_exists_at(PathBuf::from(expected)));
|
||||
|
@ -168,11 +142,7 @@ fn moves_using_path_with_wildcard() {
|
|||
let work_dir = format!("{}/{}", full_path, "work_dir");
|
||||
let expected_copies_path = format!("{}/{}", full_path, "expected");
|
||||
|
||||
nu!(
|
||||
_output,
|
||||
cwd(&work_dir),
|
||||
"mv ../originals/*.ini ../expected"
|
||||
);
|
||||
nu!(_output, cwd(&work_dir), "mv ../originals/*.ini ../expected");
|
||||
|
||||
assert!(h::files_exist_at(
|
||||
vec![
|
||||
|
@ -185,7 +155,6 @@ fn moves_using_path_with_wildcard() {
|
|||
));
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn moves_using_a_glob() {
|
||||
let sandbox = Playground::setup_for("mv_test_8")
|
||||
|
@ -204,11 +173,7 @@ fn moves_using_a_glob() {
|
|||
let work_dir = format!("{}/{}", full_path, "work_dir");
|
||||
let expected_copies_path = format!("{}/{}", full_path, "expected");
|
||||
|
||||
nu!(
|
||||
_output,
|
||||
cwd(&work_dir),
|
||||
"mv ../meals/* ../expected"
|
||||
);
|
||||
nu!(_output, cwd(&work_dir), "mv ../meals/* ../expected");
|
||||
|
||||
assert!(h::dir_exists_at(PathBuf::from(meal_dir)));
|
||||
assert!(h::files_exist_at(
|
||||
|
@ -219,4 +184,4 @@ fn moves_using_a_glob() {
|
|||
],
|
||||
PathBuf::from(&expected_copies_path)
|
||||
));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,7 +28,7 @@ fn open_can_parse_bson_1() {
|
|||
nu!(
|
||||
output,
|
||||
cwd("tests/fixtures/formats"),
|
||||
"open sample.bson | nth 0 | get b | echo $it"
|
||||
"open sample.bson | get root | nth 0 | get b | echo $it"
|
||||
);
|
||||
|
||||
assert_eq!(output, "hello");
|
||||
|
@ -39,7 +39,7 @@ fn open_can_parse_bson_2() {
|
|||
nu!(
|
||||
output,
|
||||
cwd("tests/fixtures/formats"),
|
||||
"open sample.bson | nth 6 | get b | get '$binary_subtype' | echo $it "
|
||||
"open sample.bson | get root | nth 6 | get b | get '$binary_subtype' | echo $it "
|
||||
);
|
||||
|
||||
assert_eq!(output, "function");
|
||||
|
@ -111,4 +111,4 @@ fn errors_if_file_not_found() {
|
|||
);
|
||||
|
||||
assert!(output.contains("File could not be opened"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -90,7 +90,7 @@ fn rm_removes_deeply_nested_directories_with_wildcard_and_recursive_flag() {
|
|||
.test_dir_name();
|
||||
|
||||
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
||||
|
||||
|
||||
nu!(
|
||||
_output,
|
||||
cwd("tests/fixtures/nuplayground/rm_wildcard_test_2"),
|
||||
|
@ -98,10 +98,7 @@ fn rm_removes_deeply_nested_directories_with_wildcard_and_recursive_flag() {
|
|||
);
|
||||
|
||||
assert!(!h::files_exist_at(
|
||||
vec![
|
||||
Path::new("src/parser/parse"),
|
||||
Path::new("src/parser/hir"),
|
||||
],
|
||||
vec![Path::new("src/parser/parse"), Path::new("src/parser/hir"),],
|
||||
PathBuf::from(&full_path)
|
||||
));
|
||||
}
|
||||
|
@ -150,7 +147,11 @@ fn rm_errors_if_attempting_to_delete_a_directory_with_content_without_recursive_
|
|||
|
||||
let full_path = format!("{}/{}", Playground::root(), sandbox);
|
||||
|
||||
nu_error!(output, cwd(&Playground::root()), "rm rm_prevent_directory_removal_without_flag_test");
|
||||
nu_error!(
|
||||
output,
|
||||
cwd(&Playground::root()),
|
||||
"rm rm_prevent_directory_removal_without_flag_test"
|
||||
);
|
||||
|
||||
assert!(h::file_exists_at(PathBuf::from(full_path)));
|
||||
assert!(output.contains("is a directory"));
|
||||
|
@ -168,4 +169,4 @@ fn rm_errors_if_attempting_to_delete_two_dot_as_argument() {
|
|||
nu_error!(output, cwd(&Playground::root()), "rm ..");
|
||||
|
||||
assert!(output.contains("may not be removed"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,14 +16,15 @@ fn can_only_apply_one() {
|
|||
|
||||
#[test]
|
||||
fn by_one_with_field_passed() {
|
||||
Playground::setup_for("plugin_inc_by_one_with_field_passed_test")
|
||||
.with_files(vec![FileWithContent(
|
||||
Playground::setup_for("plugin_inc_by_one_with_field_passed_test").with_files(vec![
|
||||
FileWithContent(
|
||||
"sample.toml",
|
||||
r#"
|
||||
[package]
|
||||
edition = "2018"
|
||||
"#,
|
||||
)]);
|
||||
),
|
||||
]);
|
||||
|
||||
nu!(
|
||||
output,
|
||||
|
@ -36,35 +37,34 @@ fn by_one_with_field_passed() {
|
|||
|
||||
#[test]
|
||||
fn by_one_with_no_field_passed() {
|
||||
Playground::setup_for("plugin_inc_by_one_with_no_field_passed_test")
|
||||
.with_files(vec![FileWithContent(
|
||||
Playground::setup_for("plugin_inc_by_one_with_no_field_passed_test").with_files(vec![
|
||||
FileWithContent(
|
||||
"sample.toml",
|
||||
r#"
|
||||
[package]
|
||||
contributors = "2"
|
||||
"#,
|
||||
)]);
|
||||
|
||||
),
|
||||
]);
|
||||
|
||||
nu!(
|
||||
output,
|
||||
cwd("tests/fixtures/nuplayground/plugin_inc_by_one_with_no_field_passed_test"),
|
||||
"open sample.toml | get package.contributors | inc | echo $it"
|
||||
);
|
||||
|
||||
|
||||
assert_eq!(output, "3");
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn semversion_major_inc() {
|
||||
Playground::setup_for("plugin_inc_major_semversion_test")
|
||||
.with_files(vec![FileWithContent(
|
||||
"sample.toml",
|
||||
r#"
|
||||
Playground::setup_for("plugin_inc_major_semversion_test").with_files(vec![FileWithContent(
|
||||
"sample.toml",
|
||||
r#"
|
||||
[package]
|
||||
version = "0.1.3"
|
||||
"#,
|
||||
)]);
|
||||
)]);
|
||||
|
||||
nu!(
|
||||
output,
|
||||
|
@ -77,14 +77,13 @@ fn semversion_major_inc() {
|
|||
|
||||
#[test]
|
||||
fn semversion_minor_inc() {
|
||||
Playground::setup_for("plugin_inc_minor_semversion_test")
|
||||
.with_files(vec![FileWithContent(
|
||||
"sample.toml",
|
||||
r#"
|
||||
Playground::setup_for("plugin_inc_minor_semversion_test").with_files(vec![FileWithContent(
|
||||
"sample.toml",
|
||||
r#"
|
||||
[package]
|
||||
version = "0.1.3"
|
||||
"#,
|
||||
)]);
|
||||
)]);
|
||||
|
||||
nu!(
|
||||
output,
|
||||
|
@ -97,14 +96,13 @@ fn semversion_minor_inc() {
|
|||
|
||||
#[test]
|
||||
fn semversion_patch_inc() {
|
||||
Playground::setup_for("plugin_inc_patch_semversion_test")
|
||||
.with_files(vec![FileWithContent(
|
||||
"sample.toml",
|
||||
r#"
|
||||
Playground::setup_for("plugin_inc_patch_semversion_test").with_files(vec![FileWithContent(
|
||||
"sample.toml",
|
||||
r#"
|
||||
[package]
|
||||
version = "0.1.3"
|
||||
"#,
|
||||
)]);
|
||||
)]);
|
||||
|
||||
nu!(
|
||||
output,
|
||||
|
@ -117,14 +115,15 @@ fn semversion_patch_inc() {
|
|||
|
||||
#[test]
|
||||
fn semversion_without_passing_field() {
|
||||
Playground::setup_for("plugin_inc_semversion_without_passing_field_test")
|
||||
.with_files(vec![FileWithContent(
|
||||
Playground::setup_for("plugin_inc_semversion_without_passing_field_test").with_files(vec![
|
||||
FileWithContent(
|
||||
"sample.toml",
|
||||
r#"
|
||||
[package]
|
||||
version = "0.1.3"
|
||||
"#,
|
||||
)]);
|
||||
),
|
||||
]);
|
||||
|
||||
nu!(
|
||||
output,
|
||||
|
@ -133,4 +132,4 @@ fn semversion_without_passing_field() {
|
|||
);
|
||||
|
||||
assert_eq!(output, "0.1.4");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -106,6 +106,17 @@ fn can_convert_table_to_json_text_and_from_json_text_back_into_table() {
|
|||
assert_eq!(output, "markup");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_convert_json_text_to_bson_and_back_into_table() {
|
||||
nu!(
|
||||
output,
|
||||
cwd("tests/fixtures/formats"),
|
||||
"open sample.bson | to-bson | from-bson | get root | nth 1 | get b | echo $it"
|
||||
);
|
||||
|
||||
assert_eq!(output, "whel");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_convert_table_to_toml_text_and_from_toml_text_back_into_table() {
|
||||
nu!(
|
||||
|
|
BIN
tests/fixtures/formats/sample.bson
vendored
BIN
tests/fixtures/formats/sample.bson
vendored
Binary file not shown.
Loading…
Reference in a new issue