mirror of
https://github.com/nushell/nushell
synced 2025-01-13 05:38:57 +00:00
upgrade dependencies (#4116)
* remove unused dependencies * upgrade dependency bytes 0.5.6 -> 1.1.0 * upgrade dependency heapless 0.6.1 -> 0.7.8 * upgrade dependency image 0.22.4 -> 0.23.14 * upgrade dependency mp4 0.8.2 -> 0.9.0 * upgrade dependency bson 0.14.1 -> 2.0.1 Bson::Undefined, Bson::MaxKey, Bson::MinKey and Bson::DbPointer weren't present in the previous version. Co-authored-by: ahkrr <alexhk@protonmail.com>
This commit is contained in:
parent
649b3804c1
commit
74b812228c
14 changed files with 91 additions and 67 deletions
|
@ -613,7 +613,7 @@ mod serde_json_tests {
|
|||
let serialized = serde_json::to_string(&color).unwrap();
|
||||
let deserialized: Color = serde_json::from_str(&serialized).unwrap();
|
||||
|
||||
assert_eq!(color, &deserialized);
|
||||
assert_eq!(color, deserialized);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -36,8 +36,5 @@ nu-test-support = { version = "0.39.0", path="../nu-test-support" }
|
|||
nu-value-ext = { version = "0.39.0", path="../nu-value-ext" }
|
||||
nu-ansi-term = { version = "0.39.0", path="../nu-ansi-term" }
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
users = "0.11.0"
|
||||
|
||||
[features]
|
||||
dataframe = ["nu-protocol/dataframe"]
|
||||
|
|
|
@ -22,9 +22,8 @@ nu-path = { version = "0.39.0", path="../nu-path" }
|
|||
trash = { version="1.3.0", optional=true }
|
||||
which = { version="4.0.2", optional=true }
|
||||
codespan-reporting = "0.11.0"
|
||||
ansi_term = "0.12.1"
|
||||
bigdecimal = { package = "bigdecimal-rs", version = "0.2.1", features = ["serde"] }
|
||||
bytes = "0.5.6"
|
||||
bytes = "1.1.0"
|
||||
chrono = { version="0.4.19", features=["serde"] }
|
||||
derive-new = "0.5.8"
|
||||
dirs-next = "2.0.0"
|
||||
|
|
|
@ -20,7 +20,7 @@ nu-ansi-term = { path="../nu-ansi-term", version = "0.39.0" }
|
|||
rand = "0.8.3"
|
||||
|
||||
[dev-dependencies]
|
||||
heapless = "0.6.1"
|
||||
heapless = { version = "0.7.8", default-features = false }
|
||||
|
||||
# [features]
|
||||
# default = ["alloc"]
|
||||
|
|
|
@ -166,7 +166,7 @@ fn test_hex_write_with_simple_config() {
|
|||
core::str::from_utf8(b"00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f").unwrap();
|
||||
// let expected =
|
||||
// "\u{1b}[38;5;242m00\u{1b}[0m \u{1b}[1;35m01\u{1b}[0m \u{1b}[1;35m02\u{1b}[0m \u{1b}[1;";
|
||||
let mut buffer = heapless::Vec::<u8, heapless::consts::U50>::new();
|
||||
let mut buffer = heapless::Vec::<u8, 50>::new();
|
||||
|
||||
hex_write(&mut buffer, &bytes, config, None).unwrap();
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ doctest = false
|
|||
|
||||
[dependencies]
|
||||
crossterm = "0.19"
|
||||
image = { version="0.22.4", default_features=false, features=["png_codec", "jpeg"] }
|
||||
image = { version = "0.23.14", default_features = false, features = ["png", "jpeg"] }
|
||||
neso = "0.5.0"
|
||||
nu-errors = { path="../nu-errors", version = "0.39.0" }
|
||||
nu-plugin = { path="../nu-plugin", version = "0.39.0" }
|
||||
|
|
|
@ -167,7 +167,7 @@ impl RenderContext {
|
|||
|
||||
#[derive(Debug)]
|
||||
struct RawImageBuffer {
|
||||
dimensions: (u64, u64),
|
||||
dimensions: (u32, u32),
|
||||
colortype: image::ColorType,
|
||||
buffer: Vec<u8>,
|
||||
}
|
||||
|
@ -175,11 +175,12 @@ struct RawImageBuffer {
|
|||
fn load_from_png_buffer(buffer: &[u8]) -> Result<RawImageBuffer, Box<dyn std::error::Error>> {
|
||||
use image::ImageDecoder;
|
||||
|
||||
let decoder = image::png::PNGDecoder::new(buffer)?;
|
||||
let decoder = image::codecs::png::PngDecoder::new(buffer)?;
|
||||
|
||||
let dimensions = decoder.dimensions();
|
||||
let colortype = decoder.colortype();
|
||||
let buffer = decoder.read_image()?;
|
||||
let colortype = decoder.color_type();
|
||||
let mut buffer: Vec<u8> = vec![0; decoder.total_bytes() as usize];
|
||||
decoder.read_image(&mut buffer)?;
|
||||
|
||||
Ok(RawImageBuffer {
|
||||
dimensions,
|
||||
|
@ -191,11 +192,12 @@ fn load_from_png_buffer(buffer: &[u8]) -> Result<RawImageBuffer, Box<dyn std::er
|
|||
fn load_from_jpg_buffer(buffer: &[u8]) -> Result<RawImageBuffer, Box<dyn std::error::Error>> {
|
||||
use image::ImageDecoder;
|
||||
|
||||
let decoder = image::jpeg::JPEGDecoder::new(buffer)?;
|
||||
let decoder = image::codecs::jpeg::JpegDecoder::new(buffer)?;
|
||||
|
||||
let dimensions = decoder.dimensions();
|
||||
let colortype = decoder.colortype();
|
||||
let buffer = decoder.read_image()?;
|
||||
let colortype = decoder.color_type();
|
||||
let mut buffer: Vec<u8> = vec![0; decoder.total_bytes() as usize];
|
||||
decoder.read_image(&mut buffer)?;
|
||||
|
||||
Ok(RawImageBuffer {
|
||||
dimensions,
|
||||
|
@ -245,7 +247,7 @@ pub fn view_contents(
|
|||
render_context.clear();
|
||||
|
||||
match raw_image_buffer.colortype {
|
||||
image::ColorType::RGBA(8) => {
|
||||
image::ColorType::Rgba8 => {
|
||||
let img = image::ImageBuffer::<image::Rgba<u8>, Vec<u8>>::from_vec(
|
||||
raw_image_buffer.dimensions.0 as u32,
|
||||
raw_image_buffer.dimensions.1 as u32,
|
||||
|
@ -257,7 +259,7 @@ pub fn view_contents(
|
|||
&img,
|
||||
render_context.width as u32,
|
||||
render_context.height as u32,
|
||||
image::FilterType::Lanczos3,
|
||||
image::imageops::FilterType::Lanczos3,
|
||||
);
|
||||
|
||||
for (count, pixel) in resized_img.pixels().enumerate() {
|
||||
|
@ -266,7 +268,7 @@ pub fn view_contents(
|
|||
render_context.frame_buffer[count] = (rgb[0], rgb[1], rgb[2]);
|
||||
}
|
||||
}
|
||||
image::ColorType::RGB(8) => {
|
||||
image::ColorType::Rgb8 => {
|
||||
let img = image::ImageBuffer::<image::Rgb<u8>, Vec<u8>>::from_vec(
|
||||
raw_image_buffer.dimensions.0 as u32,
|
||||
raw_image_buffer.dimensions.1 as u32,
|
||||
|
@ -278,7 +280,7 @@ pub fn view_contents(
|
|||
&img,
|
||||
render_context.width as u32,
|
||||
render_context.height as u32,
|
||||
image::FilterType::Lanczos3,
|
||||
image::imageops::FilterType::Lanczos3,
|
||||
);
|
||||
|
||||
for (count, pixel) in resized_img.pixels().enumerate() {
|
||||
|
@ -359,7 +361,7 @@ pub fn view_contents_interactive(
|
|||
&img,
|
||||
render_context.width as u32,
|
||||
render_context.height as u32,
|
||||
image::FilterType::Lanczos3,
|
||||
image::imageops::FilterType::Lanczos3,
|
||||
);
|
||||
|
||||
render_context.clear();
|
||||
|
|
|
@ -11,7 +11,7 @@ doctest = false
|
|||
|
||||
[dependencies]
|
||||
bigdecimal = { package = "bigdecimal-rs", version = "0.2.1", features = ["serde"] }
|
||||
bson = { version="0.14.1", features=["decimal128"] }
|
||||
bson = { version = "2.0.1", features = [ "chrono-0_4" ] }
|
||||
nu-errors = { path="../nu-errors", version = "0.39.0" }
|
||||
nu-plugin = { path="../nu-plugin", version = "0.39.0" }
|
||||
nu-protocol = { path="../nu-protocol", version = "0.39.0" }
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use bigdecimal::BigDecimal;
|
||||
use bson::{decode_document, spec::BinarySubtype, Bson};
|
||||
use bson::{spec::BinarySubtype, Bson};
|
||||
use nu_errors::{ExpectedRange, ShellError};
|
||||
use nu_protocol::{Primitive, ReturnSuccess, ReturnValue, TaggedDictBuilder, UntaggedValue, Value};
|
||||
use nu_source::{SpannedItem, Tag};
|
||||
|
@ -35,7 +35,7 @@ fn convert_bson_value_to_nu_value(v: &Bson, tag: impl Into<Tag>) -> Result<Value
|
|||
let span = tag.span;
|
||||
|
||||
Ok(match v {
|
||||
Bson::FloatingPoint(n) => UntaggedValue::Primitive(Primitive::from(*n)).into_value(&tag),
|
||||
Bson::Double(n) => UntaggedValue::Primitive(Primitive::from(*n)).into_value(&tag),
|
||||
Bson::String(s) => {
|
||||
UntaggedValue::Primitive(Primitive::String(String::from(s))).into_value(&tag)
|
||||
}
|
||||
|
@ -50,20 +50,22 @@ fn convert_bson_value_to_nu_value(v: &Bson, tag: impl Into<Tag>) -> Result<Value
|
|||
}
|
||||
Bson::Boolean(b) => UntaggedValue::Primitive(Primitive::Boolean(*b)).into_value(&tag),
|
||||
Bson::Null => UntaggedValue::Primitive(Primitive::Nothing).into_value(&tag),
|
||||
Bson::RegExp(r, opts) => {
|
||||
Bson::RegularExpression(regx) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
collected.insert_value(
|
||||
"$regex".to_string(),
|
||||
UntaggedValue::Primitive(Primitive::String(String::from(r))).into_value(&tag),
|
||||
UntaggedValue::Primitive(Primitive::String(String::from(®x.pattern)))
|
||||
.into_value(&tag),
|
||||
);
|
||||
collected.insert_value(
|
||||
"$options".to_string(),
|
||||
UntaggedValue::Primitive(Primitive::String(String::from(opts))).into_value(&tag),
|
||||
UntaggedValue::Primitive(Primitive::String(String::from(®x.options)))
|
||||
.into_value(&tag),
|
||||
);
|
||||
collected.into_value()
|
||||
}
|
||||
Bson::I32(n) => UntaggedValue::int(*n).into_value(&tag),
|
||||
Bson::I64(n) => UntaggedValue::int(*n).into_value(&tag),
|
||||
Bson::Int32(n) => UntaggedValue::int(*n).into_value(&tag),
|
||||
Bson::Int64(n) => UntaggedValue::int(*n).into_value(&tag),
|
||||
Bson::Decimal128(n) => {
|
||||
// TODO: this really isn't great, and we should update this to do a higher
|
||||
// fidelity translation
|
||||
|
@ -84,41 +86,43 @@ fn convert_bson_value_to_nu_value(v: &Bson, tag: impl Into<Tag>) -> Result<Value
|
|||
);
|
||||
collected.into_value()
|
||||
}
|
||||
Bson::JavaScriptCodeWithScope(js, doc) => {
|
||||
Bson::JavaScriptCodeWithScope(js) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
collected.insert_value(
|
||||
"$javascript".to_string(),
|
||||
UntaggedValue::Primitive(Primitive::String(String::from(js))).into_value(&tag),
|
||||
UntaggedValue::Primitive(Primitive::String(String::from(&js.code)))
|
||||
.into_value(&tag),
|
||||
);
|
||||
collected.insert_value(
|
||||
"$scope".to_string(),
|
||||
convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag)?,
|
||||
convert_bson_value_to_nu_value(&Bson::Document(js.scope.to_owned()), tag)?,
|
||||
);
|
||||
collected.into_value()
|
||||
}
|
||||
Bson::TimeStamp(ts) => {
|
||||
Bson::Timestamp(ts) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
collected.insert_value(
|
||||
"$timestamp".to_string(),
|
||||
UntaggedValue::int(*ts).into_value(&tag),
|
||||
UntaggedValue::int(ts.time).into_value(&tag),
|
||||
);
|
||||
collected.into_value()
|
||||
}
|
||||
Bson::Binary(bst, bytes) => {
|
||||
Bson::Binary(binary) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
collected.insert_value(
|
||||
"$binary_subtype".to_string(),
|
||||
match bst {
|
||||
BinarySubtype::UserDefined(u) => UntaggedValue::int(*u),
|
||||
_ => {
|
||||
UntaggedValue::Primitive(Primitive::String(binary_subtype_to_string(*bst)))
|
||||
}
|
||||
match binary.subtype {
|
||||
BinarySubtype::UserDefined(u) => UntaggedValue::int(u),
|
||||
_ => UntaggedValue::Primitive(Primitive::String(binary_subtype_to_string(
|
||||
binary.subtype,
|
||||
))),
|
||||
}
|
||||
.into_value(&tag),
|
||||
);
|
||||
collected.insert_value(
|
||||
"$binary".to_string(),
|
||||
UntaggedValue::Primitive(Primitive::Binary(bytes.to_owned())).into_value(&tag),
|
||||
UntaggedValue::Primitive(Primitive::Binary(binary.bytes.to_owned()))
|
||||
.into_value(&tag),
|
||||
);
|
||||
collected.into_value()
|
||||
}
|
||||
|
@ -130,8 +134,8 @@ fn convert_bson_value_to_nu_value(v: &Bson, tag: impl Into<Tag>) -> Result<Value
|
|||
);
|
||||
collected.into_value()
|
||||
}
|
||||
Bson::UtcDatetime(dt) => {
|
||||
UntaggedValue::Primitive(Primitive::Date((*dt).into())).into_value(&tag)
|
||||
Bson::DateTime(dt) => {
|
||||
UntaggedValue::Primitive(Primitive::Date(dt.to_chrono().into())).into_value(&tag)
|
||||
}
|
||||
Bson::Symbol(s) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
|
@ -141,6 +145,11 @@ fn convert_bson_value_to_nu_value(v: &Bson, tag: impl Into<Tag>) -> Result<Value
|
|||
);
|
||||
collected.into_value()
|
||||
}
|
||||
Bson::Undefined | Bson::MaxKey | Bson::MinKey | Bson::DbPointer(_) => {
|
||||
// TODO Impelmenting Bson::Undefined, Bson::MaxKey, Bson::MinKey and Bson::DbPointer
|
||||
// These Variants weren't present in the previous version.
|
||||
TaggedDictBuilder::new(tag).into_value()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -184,7 +193,7 @@ impl std::io::Read for BytesReader {
|
|||
pub fn from_bson_bytes_to_value(bytes: Vec<u8>, tag: impl Into<Tag>) -> Result<Value, ShellError> {
|
||||
let mut docs = Vec::new();
|
||||
let mut b_reader = BytesReader::new(bytes);
|
||||
while let Ok(v) = decode_document(&mut b_reader) {
|
||||
while let Ok(v) = bson::de::from_reader(&mut b_reader) {
|
||||
docs.push(Bson::Document(v));
|
||||
}
|
||||
|
||||
|
|
|
@ -15,6 +15,6 @@ nu-plugin = { path="../nu-plugin", version = "0.39.0" }
|
|||
nu-protocol = { path="../nu-protocol", version = "0.39.0" }
|
||||
nu-source = { path="../nu-source", version = "0.39.0" }
|
||||
tempfile = "3.2.0"
|
||||
mp4 = "0.8.2"
|
||||
mp4 = "0.9.0"
|
||||
|
||||
[build-dependencies]
|
||||
|
|
|
@ -27,7 +27,7 @@ pub fn convert_mp4_file_to_nu_value(path: &Path, tag: Tag) -> Result<Value, mp4:
|
|||
|
||||
// Build tracks table
|
||||
let mut tracks = Vec::new();
|
||||
for track in mp4.tracks() {
|
||||
for track in mp4.tracks().values() {
|
||||
let mut curr_track_dict = TaggedDictBuilder::new(tag.clone());
|
||||
|
||||
curr_track_dict.insert_untagged("track id", UntaggedValue::int(track.track_id()));
|
||||
|
|
|
@ -15,10 +15,12 @@ nu-errors = { path="../nu-errors", version = "0.39.0" }
|
|||
nu-plugin = { path="../nu-plugin", version = "0.39.0" }
|
||||
nu-protocol = { path="../nu-protocol", version = "0.39.0" }
|
||||
nu-source = { path="../nu-source", version = "0.39.0" }
|
||||
open = "1.4.0"
|
||||
url = "2.2.0"
|
||||
webbrowser = "0.5.5"
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
open = "1.4.0"
|
||||
|
||||
[build-dependencies]
|
||||
nu-errors = { version = "0.39.0", path="../nu-errors" }
|
||||
nu-source = { version = "0.39.0", path="../nu-source" }
|
||||
|
|
|
@ -10,7 +10,7 @@ version = "0.39.0"
|
|||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
bson = "0.14.1"
|
||||
bson = { version = "2.0.1", features = [ "chrono-0_4" ] }
|
||||
nu-errors = { path="../nu-errors", version = "0.39.0" }
|
||||
nu-plugin = { path="../nu-plugin", version = "0.39.0" }
|
||||
nu-protocol = { path="../nu-protocol", version = "0.39.0" }
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use bson::{encode_document, oid::ObjectId, spec::BinarySubtype, Bson, Document};
|
||||
use bson::{oid::ObjectId, spec::BinarySubtype, Bson, Document};
|
||||
use nu_errors::{CoerceInto, ShellError};
|
||||
use nu_protocol::{
|
||||
Dictionary, Primitive, ReturnSuccess, ReturnValue, SpannedTypeName, UnspannedPathMember,
|
||||
|
@ -23,17 +23,19 @@ pub fn value_to_bson_value(v: &Value) -> Result<Bson, ShellError> {
|
|||
Ok(match &v.value {
|
||||
UntaggedValue::Primitive(Primitive::Boolean(b)) => Bson::Boolean(*b),
|
||||
// FIXME: What about really big decimals?
|
||||
UntaggedValue::Primitive(Primitive::Filesize(decimal)) => Bson::FloatingPoint(
|
||||
UntaggedValue::Primitive(Primitive::Filesize(decimal)) => Bson::Double(
|
||||
(decimal)
|
||||
.to_f64()
|
||||
.expect("Unimplemented BUG: What about big decimals?"),
|
||||
),
|
||||
UntaggedValue::Primitive(Primitive::Duration(i)) => Bson::String(i.to_string()),
|
||||
UntaggedValue::Primitive(Primitive::Date(d)) => Bson::UtcDatetime((*d).into()),
|
||||
UntaggedValue::Primitive(Primitive::Date(d)) => {
|
||||
Bson::DateTime(bson::DateTime::from_chrono(*d))
|
||||
}
|
||||
UntaggedValue::Primitive(Primitive::EndOfStream) => Bson::Null,
|
||||
UntaggedValue::Primitive(Primitive::BeginningOfStream) => Bson::Null,
|
||||
UntaggedValue::Primitive(Primitive::Decimal(d)) => {
|
||||
Bson::FloatingPoint(d.to_f64().ok_or_else(|| {
|
||||
Bson::Double(d.to_f64().ok_or_else(|| {
|
||||
ShellError::labeled_error(
|
||||
"Could not convert value to decimal",
|
||||
"could not convert to decimal",
|
||||
|
@ -41,9 +43,9 @@ pub fn value_to_bson_value(v: &Value) -> Result<Bson, ShellError> {
|
|||
)
|
||||
})?)
|
||||
}
|
||||
UntaggedValue::Primitive(Primitive::Int(i)) => Bson::I64(*i),
|
||||
UntaggedValue::Primitive(Primitive::Int(i)) => Bson::Int64(*i),
|
||||
UntaggedValue::Primitive(Primitive::BigInt(i)) => {
|
||||
Bson::I64(i.tagged(&v.tag).coerce_into("converting to BSON")?)
|
||||
Bson::Int64(i.tagged(&v.tag).coerce_into("converting to BSON")?)
|
||||
}
|
||||
UntaggedValue::Primitive(Primitive::Nothing) => Bson::Null,
|
||||
UntaggedValue::Primitive(Primitive::String(s)) => Bson::String(s.clone()),
|
||||
|
@ -51,7 +53,7 @@ pub fn value_to_bson_value(v: &Value) -> Result<Bson, ShellError> {
|
|||
path.iter()
|
||||
.map(|x| match &x.unspanned {
|
||||
UnspannedPathMember::String(string) => Ok(Bson::String(string.clone())),
|
||||
UnspannedPathMember::Int(int) => Ok(Bson::I64(*int)),
|
||||
UnspannedPathMember::Int(int) => Ok(Bson::Int64(*int)),
|
||||
})
|
||||
.collect::<Result<Vec<Bson>, ShellError>>()?,
|
||||
),
|
||||
|
@ -66,10 +68,13 @@ pub fn value_to_bson_value(v: &Value) -> Result<Bson, ShellError> {
|
|||
#[cfg(feature = "dataframe")]
|
||||
UntaggedValue::DataFrame(_) | UntaggedValue::FrameStruct(_) => Bson::Null,
|
||||
UntaggedValue::Error(e) => return Err(e.clone()),
|
||||
UntaggedValue::Primitive(Primitive::Binary(b)) => {
|
||||
Bson::Binary(BinarySubtype::Generic, b.clone())
|
||||
}
|
||||
UntaggedValue::Primitive(Primitive::Binary(b)) => Bson::Binary(bson::Binary {
|
||||
subtype: BinarySubtype::Generic,
|
||||
bytes: b.clone(),
|
||||
}),
|
||||
UntaggedValue::Row(o) => object_value_to_bson(o)?,
|
||||
// TODO Impelmenting Bson::Undefined, Bson::MaxKey, Bson::MinKey and Bson::DbPointer
|
||||
// These Variants weren't present in the previous version.
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -86,7 +91,9 @@ fn object_value_to_bson(o: &Dictionary) -> Result<Bson, ShellError> {
|
|||
let r: Result<String, _> = tagged_regex_value.try_into();
|
||||
let opts: Result<String, _> = tagged_opts_value.try_into();
|
||||
match (r, opts) {
|
||||
(Ok(r), Ok(opts)) => Ok(Bson::RegExp(r, opts)),
|
||||
(Ok(pattern), Ok(options)) => {
|
||||
Ok(Bson::RegularExpression(bson::Regex { pattern, options }))
|
||||
}
|
||||
_ => generic_object_value_to_bson(o),
|
||||
}
|
||||
}
|
||||
|
@ -99,9 +106,11 @@ fn object_value_to_bson(o: &Dictionary) -> Result<Bson, ShellError> {
|
|||
let s: Result<&Dictionary, _> = tagged_scope_value.try_into();
|
||||
|
||||
match (js, s) {
|
||||
(Ok(js), Ok(s)) => {
|
||||
if let Bson::Document(doc) = object_value_to_bson(s)? {
|
||||
Ok(Bson::JavaScriptCodeWithScope(js, doc))
|
||||
(Ok(code), Ok(s)) => {
|
||||
if let Bson::Document(scope) = object_value_to_bson(s)? {
|
||||
Ok(Bson::JavaScriptCodeWithScope(
|
||||
bson::JavaScriptCodeWithScope { code, scope },
|
||||
))
|
||||
} else {
|
||||
generic_object_value_to_bson(o)
|
||||
}
|
||||
|
@ -122,8 +131,11 @@ fn object_value_to_bson(o: &Dictionary) -> Result<Bson, ShellError> {
|
|||
}
|
||||
Some((timestamp, tagged_timestamp_value)) if timestamp == "$timestamp" => {
|
||||
let ts: Result<i64, _> = tagged_timestamp_value.try_into();
|
||||
if let Ok(ts) = ts {
|
||||
Ok(Bson::TimeStamp(ts))
|
||||
if let Ok(time) = ts {
|
||||
Ok(Bson::Timestamp(bson::Timestamp {
|
||||
time: time as u32,
|
||||
increment: Default::default(),
|
||||
}))
|
||||
} else {
|
||||
generic_object_value_to_bson(o)
|
||||
}
|
||||
|
@ -137,7 +149,10 @@ fn object_value_to_bson(o: &Dictionary) -> Result<Bson, ShellError> {
|
|||
let bin: Result<Vec<u8>, _> = tagged_bin_value.try_into();
|
||||
|
||||
match (bin, bst) {
|
||||
(Ok(bin), Ok(v)) => Ok(Bson::Binary(v, bin)),
|
||||
(Ok(bin), Ok(subtype)) => Ok(Bson::Binary(bson::Binary {
|
||||
subtype,
|
||||
bytes: bin,
|
||||
})),
|
||||
_ => generic_object_value_to_bson(o),
|
||||
}
|
||||
}
|
||||
|
@ -148,7 +163,7 @@ fn object_value_to_bson(o: &Dictionary) -> Result<Bson, ShellError> {
|
|||
let obj_id: Result<String, _> = tagged_object_id_value.try_into();
|
||||
|
||||
if let Ok(obj_id) = obj_id {
|
||||
let obj_id = ObjectId::with_string(&obj_id);
|
||||
let obj_id = ObjectId::parse_str(&obj_id);
|
||||
|
||||
if let Ok(obj_id) = obj_id {
|
||||
Ok(Bson::ObjectId(obj_id))
|
||||
|
@ -204,7 +219,7 @@ fn generic_object_value_to_bson(o: &Dictionary) -> Result<Bson, ShellError> {
|
|||
}
|
||||
|
||||
fn shell_encode_document(writer: &mut Vec<u8>, doc: Document, tag: Tag) -> Result<(), ShellError> {
|
||||
match encode_document(writer, &doc) {
|
||||
match doc.to_writer(writer) {
|
||||
Err(e) => Err(ShellError::labeled_error(
|
||||
format!("Failed to encode document due to: {:?}", e),
|
||||
"requires BSON-compatible document",
|
||||
|
|
Loading…
Reference in a new issue