Revert "Getting closer to multiline scripts (#2738)" (#2745)

This reverts commit e66bf70589.
This commit is contained in:
Jonathan Turner 2020-11-10 18:22:13 +13:00 committed by GitHub
parent e66bf70589
commit 5a75e11b0e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
17 changed files with 160 additions and 262 deletions

View file

@ -290,8 +290,8 @@ pub fn create_default_context(interactive: bool) -> Result<EvaluationContext, Bo
Ok(context) Ok(context)
} }
pub async fn run_script_file( pub async fn run_vec_of_pipelines(
file_contents: String, pipelines: Vec<String>,
redirect_stdin: bool, redirect_stdin: bool,
) -> Result<(), Box<dyn Error>> { ) -> Result<(), Box<dyn Error>> {
let mut syncer = EnvironmentSyncer::new(); let mut syncer = EnvironmentSyncer::new();
@ -313,7 +313,9 @@ pub async fn run_script_file(
let _ = run_startup_commands(&mut context, &config).await; let _ = run_startup_commands(&mut context, &config).await;
run_script_standalone(file_contents, redirect_stdin, &mut context, true).await?; for pipeline in pipelines {
run_pipeline_standalone(pipeline, redirect_stdin, &mut context, true).await?;
}
Ok(()) Ok(())
} }
@ -473,7 +475,7 @@ pub async fn cli(mut context: EvaluationContext) -> Result<(), Box<dyn Error>> {
} }
let line = match convert_rustyline_result_to_string(readline) { let line = match convert_rustyline_result_to_string(readline) {
LineResult::Success(s) => process_script(&s, &mut context, false, true).await, LineResult::Success(s) => process_line(&s, &mut context, false, true).await,
x => x, x => x,
}; };
@ -600,7 +602,8 @@ async fn run_startup_commands(
} => { } => {
for pipeline in pipelines { for pipeline in pipelines {
if let Ok(pipeline_string) = pipeline.as_string() { if let Ok(pipeline_string) = pipeline.as_string() {
let _ = run_script_standalone(pipeline_string, false, context, false).await; let _ =
run_pipeline_standalone(pipeline_string, false, context, false).await;
} }
} }
} }
@ -615,13 +618,13 @@ async fn run_startup_commands(
Ok(()) Ok(())
} }
pub async fn run_script_standalone( pub async fn run_pipeline_standalone(
script_text: String, pipeline: String,
redirect_stdin: bool, redirect_stdin: bool,
context: &mut EvaluationContext, context: &mut EvaluationContext,
exit_on_error: bool, exit_on_error: bool,
) -> Result<(), Box<dyn Error>> { ) -> Result<(), Box<dyn Error>> {
let line = process_script(&script_text, context, redirect_stdin, false).await; let line = process_line(&pipeline, context, redirect_stdin, false).await;
match line { match line {
LineResult::Success(line) => { LineResult::Success(line) => {
@ -889,16 +892,16 @@ pub async fn parse_and_eval(line: &str, ctx: &mut EvaluationContext) -> Result<S
} }
/// Process the line by parsing the text to turn it into commands, classify those commands so that we understand what is being called in the pipeline, and then run this pipeline /// Process the line by parsing the text to turn it into commands, classify those commands so that we understand what is being called in the pipeline, and then run this pipeline
pub async fn process_script( pub async fn process_line(
script_text: &str, line: &str,
ctx: &mut EvaluationContext, ctx: &mut EvaluationContext,
redirect_stdin: bool, redirect_stdin: bool,
cli_mode: bool, cli_mode: bool,
) -> LineResult { ) -> LineResult {
if script_text.trim() == "" { if line.trim() == "" {
LineResult::Success(script_text.to_string()) LineResult::Success(line.to_string())
} else { } else {
let line = chomp_newline(script_text); let line = chomp_newline(line);
ctx.raw_input = line.to_string(); ctx.raw_input = line.to_string();
let (result, err) = nu_parser::lite_parse(&line, 0); let (result, err) = nu_parser::lite_parse(&line, 0);
@ -927,12 +930,11 @@ pub async fn process_script(
// ...then change to this directory // ...then change to this directory
if cli_mode if cli_mode
&& classified_block.block.block.len() == 1 && classified_block.block.block.len() == 1
&& classified_block.block.block[0].pipelines.len() == 1 && classified_block.block.block[0].list.len() == 1
&& classified_block.block.block[0].pipelines[0].list.len() == 1
{ {
if let ClassifiedCommand::Internal(InternalCommand { if let ClassifiedCommand::Internal(InternalCommand {
ref name, ref args, .. ref name, ref args, ..
}) = classified_block.block.block[0].pipelines[0].list[0] }) = classified_block.block.block[0].list[0]
{ {
let internal_name = name; let internal_name = name;
let name = args let name = args

View file

@ -5,7 +5,7 @@ use crate::prelude::*;
use heim::cpu::time; use heim::cpu::time;
use nu_errors::ShellError; use nu_errors::ShellError;
use nu_protocol::{ use nu_protocol::{
hir::{Block, ClassifiedCommand, Group, InternalCommand, Pipeline}, hir::{Block, ClassifiedCommand, Commands, InternalCommand},
Dictionary, Scope, Signature, SyntaxShape, UntaggedValue, Value, Dictionary, Scope, Signature, SyntaxShape, UntaggedValue, Value,
}; };
use rand::{ use rand::{
@ -175,19 +175,15 @@ where
fn add_implicit_autoview(mut block: Block) -> Block { fn add_implicit_autoview(mut block: Block) -> Block {
if block.block.is_empty() { if block.block.is_empty() {
let group = Group::new( block.push({
vec![{ let mut commands = Commands::new(block.span);
let mut commands = Pipeline::new(block.span);
commands.push(ClassifiedCommand::Internal(InternalCommand::new( commands.push(ClassifiedCommand::Internal(InternalCommand::new(
"autoview".to_string(), "autoview".to_string(),
block.span, block.span,
block.span, block.span,
))); )));
commands commands
}], });
block.span,
);
block.push(group);
} }
block block
} }

View file

@ -5,9 +5,7 @@ use crate::prelude::*;
use crate::stream::InputStream; use crate::stream::InputStream;
use futures::stream::TryStreamExt; use futures::stream::TryStreamExt;
use nu_errors::ShellError; use nu_errors::ShellError;
use nu_protocol::hir::{ use nu_protocol::hir::{Block, ClassifiedCommand, Commands};
Block, Call, ClassifiedCommand, Expression, Pipeline, SpannedExpression, Synthetic,
};
use nu_protocol::{ReturnSuccess, Scope, UntaggedValue, Value}; use nu_protocol::{ReturnSuccess, Scope, UntaggedValue, Value};
use std::sync::atomic::Ordering; use std::sync::atomic::Ordering;
@ -18,61 +16,7 @@ pub(crate) async fn run_block(
scope: Arc<Scope>, scope: Arc<Scope>,
) -> Result<InputStream, ShellError> { ) -> Result<InputStream, ShellError> {
let mut output: Result<InputStream, ShellError> = Ok(InputStream::empty()); let mut output: Result<InputStream, ShellError> = Ok(InputStream::empty());
for group in &block.block { for pipeline in &block.block {
match output {
Ok(inp) if inp.is_empty() => {}
Ok(inp) => {
// Run autoview on the values we've seen so far
// We may want to make this configurable for other kinds of hosting
if let Some(autoview) = ctx.get_command("autoview") {
let mut output_stream = ctx
.run_command(
autoview,
Tag::unknown(),
Call::new(
Box::new(SpannedExpression::new(
Expression::Synthetic(Synthetic::String("autoview".into())),
Span::unknown(),
)),
Span::unknown(),
),
scope.clone(),
inp,
)
.await?;
loop {
match output_stream.try_next().await {
Ok(Some(ReturnSuccess::Value(Value {
value: UntaggedValue::Error(e),
..
}))) => return Err(e),
Ok(Some(_item)) => {
if let Some(err) = ctx.get_errors().get(0) {
ctx.clear_errors();
return Err(err.clone());
}
if ctx.ctrl_c.load(Ordering::SeqCst) {
break;
}
}
Ok(None) => {
if let Some(err) = ctx.get_errors().get(0) {
ctx.clear_errors();
return Err(err.clone());
}
break;
}
Err(e) => return Err(e),
}
}
}
}
Err(e) => {
return Err(e);
}
}
output = Ok(InputStream::empty());
for pipeline in &group.pipelines {
match output { match output {
Ok(inp) if inp.is_empty() => {} Ok(inp) if inp.is_empty() => {}
Ok(inp) => { Ok(inp) => {
@ -112,13 +56,12 @@ pub(crate) async fn run_block(
input = InputStream::empty(); input = InputStream::empty();
} }
}
output output
} }
async fn run_pipeline( async fn run_pipeline(
commands: &Pipeline, commands: &Commands,
ctx: &mut EvaluationContext, ctx: &mut EvaluationContext,
mut input: InputStream, mut input: InputStream,
scope: Arc<Scope>, scope: Arc<Scope>,

View file

@ -94,9 +94,9 @@ async fn if_command(
tag, tag,
)); ));
} }
match condition.block[0].pipelines.get(0) { match condition.block[0].list.get(0) {
Some(item) => match item.list.get(0) { Some(item) => match item {
Some(ClassifiedCommand::Expr(expr)) => expr.clone(), ClassifiedCommand::Expr(expr) => expr.clone(),
_ => { _ => {
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
"Expected a condition", "Expected a condition",

View file

@ -51,9 +51,9 @@ impl WholeStreamCommand for SubCommand {
tag, tag,
)); ));
} }
match block.block[0].pipelines.get(0) { match block.block[0].list.get(0) {
Some(item) => match item.list.get(0) { Some(item) => match item {
Some(ClassifiedCommand::Expr(expr)) => expr.clone(), ClassifiedCommand::Expr(expr) => expr.clone(),
_ => { _ => {
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
"Expected a condition", "Expected a condition",

View file

@ -50,9 +50,9 @@ impl WholeStreamCommand for SubCommand {
tag, tag,
)); ));
} }
match block.block[0].pipelines.get(0) { match block.block[0].list.get(0) {
Some(item) => match item.list.get(0) { Some(item) => match item {
Some(ClassifiedCommand::Expr(expr)) => expr.clone(), ClassifiedCommand::Expr(expr) => expr.clone(),
_ => { _ => {
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
"Expected a condition", "Expected a condition",

View file

@ -50,9 +50,9 @@ impl WholeStreamCommand for SubCommand {
tag, tag,
)); ));
} }
match block.block[0].pipelines.get(0) { match block.block[0].list.get(0) {
Some(item) => match item.list.get(0) { Some(item) => match item {
Some(ClassifiedCommand::Expr(expr)) => expr.clone(), ClassifiedCommand::Expr(expr) => expr.clone(),
_ => { _ => {
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
"Expected a condition", "Expected a condition",

View file

@ -50,9 +50,9 @@ impl WholeStreamCommand for SubCommand {
tag, tag,
)); ));
} }
match block.block[0].pipelines.get(0) { match block.block[0].list.get(0) {
Some(item) => match item.list.get(0) { Some(item) => match item {
Some(ClassifiedCommand::Expr(expr)) => expr.clone(), ClassifiedCommand::Expr(expr) => expr.clone(),
_ => { _ => {
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
"Expected a condition", "Expected a condition",

View file

@ -81,9 +81,9 @@ async fn where_command(
tag, tag,
)); ));
} }
match block.block[0].pipelines.get(0) { match block.block[0].list.get(0) {
Some(item) => match item.list.get(0) { Some(item) => match item {
Some(ClassifiedCommand::Expr(expr)) => expr.clone(), ClassifiedCommand::Expr(expr) => expr.clone(),
_ => { _ => {
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
"Expected a condition", "Expected a condition",

View file

@ -138,7 +138,7 @@ impl<'s> Flatten<'s> {
result result
} }
fn pipeline(&self, pipeline: &Pipeline) -> Vec<CompletionLocation> { fn pipeline(&self, pipeline: &Commands) -> Vec<CompletionLocation> {
let mut result = Vec::new(); let mut result = Vec::new();
for command in &pipeline.list { for command in &pipeline.list {
@ -158,11 +158,7 @@ impl<'s> Flatten<'s> {
/// Flattens the block into a Vec of completion locations /// Flattens the block into a Vec of completion locations
pub fn completion_locations(&self, block: &Block) -> Vec<CompletionLocation> { pub fn completion_locations(&self, block: &Block) -> Vec<CompletionLocation> {
block block.block.iter().flat_map(|v| self.pipeline(v)).collect()
.block
.iter()
.flat_map(|g| g.pipelines.iter().flat_map(|v| self.pipeline(v)))
.collect()
} }
pub fn new(line: &'s str) -> Flatten<'s> { pub fn new(line: &'s str) -> Flatten<'s> {

View file

@ -43,8 +43,8 @@ mod examples;
pub use crate::cli::cli; pub use crate::cli::cli;
pub use crate::cli::{ pub use crate::cli::{
create_default_context, parse_and_eval, process_script, register_plugins, run_script_file, create_default_context, parse_and_eval, process_line, register_plugins,
run_script_standalone, LineResult, run_pipeline_standalone, run_vec_of_pipelines, LineResult,
}; };
pub use crate::command_registry::CommandRegistry; pub use crate::command_registry::CommandRegistry;
pub use crate::commands::command::{ pub use crate::commands::command::{

View file

@ -5,8 +5,8 @@ use nu_errors::ShellError;
use nu_parser::SignatureRegistry; use nu_parser::SignatureRegistry;
use nu_protocol::{ use nu_protocol::{
hir::{ hir::{
Binary, Block, ClassifiedCommand, Expression, Literal, NamedArguments, NamedValue, Binary, Block, ClassifiedCommand, Commands, Expression, Literal, NamedArguments,
Operator, Pipeline, SpannedExpression, NamedValue, Operator, SpannedExpression,
}, },
NamedType, PositionalType, Signature, SyntaxShape, NamedType, PositionalType, Signature, SyntaxShape,
}; };
@ -318,7 +318,7 @@ fn spanned_to_binary(bin_spanned: &SpannedExpression) -> &Binary {
///Returns result shape of this math expr otherwise ///Returns result shape of this math expr otherwise
fn get_result_shape_of_math_expr( fn get_result_shape_of_math_expr(
bin: &Binary, bin: &Binary,
(pipeline_idx, pipeline): (usize, &Pipeline), (pipeline_idx, pipeline): (usize, &Commands),
registry: &CommandRegistry, registry: &CommandRegistry,
) -> Result<Option<SyntaxShape>, ShellError> { ) -> Result<Option<SyntaxShape>, ShellError> {
let mut shapes: Vec<Option<SyntaxShape>> = vec![]; let mut shapes: Vec<Option<SyntaxShape>> = vec![];
@ -388,17 +388,15 @@ impl VarSyntaxShapeDeductor {
fn infer_shape(&mut self, block: &Block, registry: &CommandRegistry) -> Result<(), ShellError> { fn infer_shape(&mut self, block: &Block, registry: &CommandRegistry) -> Result<(), ShellError> {
trace!("Infering vars in shape"); trace!("Infering vars in shape");
for group in &block.block { for pipeline in &block.block {
for pipeline in &group.pipelines {
self.infer_pipeline(pipeline, registry)?; self.infer_pipeline(pipeline, registry)?;
} }
}
Ok(()) Ok(())
} }
pub fn infer_pipeline( pub fn infer_pipeline(
&mut self, &mut self,
pipeline: &Pipeline, pipeline: &Commands,
registry: &CommandRegistry, registry: &CommandRegistry,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
trace!("Infering vars in pipeline"); trace!("Infering vars in pipeline");
@ -536,7 +534,7 @@ impl VarSyntaxShapeDeductor {
fn infer_shapes_in_expr( fn infer_shapes_in_expr(
&mut self, &mut self,
(pipeline_idx, pipeline): (usize, &Pipeline), (pipeline_idx, pipeline): (usize, &Commands),
spanned_expr: &SpannedExpression, spanned_expr: &SpannedExpression,
registry: &CommandRegistry, registry: &CommandRegistry,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
@ -662,7 +660,7 @@ impl VarSyntaxShapeDeductor {
(var, expr): (&VarUsage, &SpannedExpression), (var, expr): (&VarUsage, &SpannedExpression),
//source_bin is binary having var on one and expr on other side //source_bin is binary having var on one and expr on other side
source_bin: &SpannedExpression, source_bin: &SpannedExpression,
(pipeline_idx, pipeline): (usize, &Pipeline), (pipeline_idx, pipeline): (usize, &Commands),
registry: &CommandRegistry, registry: &CommandRegistry,
) -> Result<Option<SyntaxShape>, ShellError> { ) -> Result<Option<SyntaxShape>, ShellError> {
get_result_shape_of_math_expr(spanned_to_binary(expr), (pipeline_idx, pipeline), registry) get_result_shape_of_math_expr(spanned_to_binary(expr), (pipeline_idx, pipeline), registry)
@ -684,7 +682,7 @@ impl VarSyntaxShapeDeductor {
(var, expr): (&VarUsage, &SpannedExpression), (var, expr): (&VarUsage, &SpannedExpression),
//source_bin is binary having var on one and expr on other side //source_bin is binary having var on one and expr on other side
source_bin: &SpannedExpression, source_bin: &SpannedExpression,
(pipeline_idx, pipeline): (usize, &Pipeline), (pipeline_idx, pipeline): (usize, &Commands),
registry: &CommandRegistry, registry: &CommandRegistry,
) -> Result<Option<SyntaxShape>, ShellError> { ) -> Result<Option<SyntaxShape>, ShellError> {
trace!("Getting shape of binary arg {:?} for var {:?}", expr, var); trace!("Getting shape of binary arg {:?} for var {:?}", expr, var);
@ -716,7 +714,7 @@ impl VarSyntaxShapeDeductor {
var: &VarUsage, var: &VarUsage,
bin_spanned: &SpannedExpression, bin_spanned: &SpannedExpression,
list: &[SpannedExpression], list: &[SpannedExpression],
(_pipeline_idx, _pipeline): (usize, &Pipeline), (_pipeline_idx, _pipeline): (usize, &Commands),
_registry: &CommandRegistry, _registry: &CommandRegistry,
) -> Option<Vec<SyntaxShape>> { ) -> Option<Vec<SyntaxShape>> {
let shapes_in_list = list let shapes_in_list = list
@ -742,7 +740,7 @@ impl VarSyntaxShapeDeductor {
var_side: BinarySide, var_side: BinarySide,
//Binary having expr on one side and var on other //Binary having expr on one side and var on other
bin_spanned: &SpannedExpression, bin_spanned: &SpannedExpression,
(pipeline_idx, pipeline): (usize, &Pipeline), (pipeline_idx, pipeline): (usize, &Commands),
registry: &CommandRegistry, registry: &CommandRegistry,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
trace!("Infering shapes between var {:?} and expr {:?}", var, expr); trace!("Infering shapes between var {:?} and expr {:?}", var, expr);
@ -895,7 +893,7 @@ impl VarSyntaxShapeDeductor {
fn infer_shapes_in_binary_expr( fn infer_shapes_in_binary_expr(
&mut self, &mut self,
(pipeline_idx, pipeline): (usize, &Pipeline), (pipeline_idx, pipeline): (usize, &Commands),
bin_spanned: &SpannedExpression, bin_spanned: &SpannedExpression,
registry: &CommandRegistry, registry: &CommandRegistry,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {

View file

@ -117,17 +117,6 @@ impl LiteGroup {
pub fn push(&mut self, item: LitePipeline) { pub fn push(&mut self, item: LitePipeline) {
self.pipelines.push(item) self.pipelines.push(item)
} }
pub fn is_comment(&self) -> bool {
if !self.is_empty()
&& !self.pipelines[0].is_empty()
&& !self.pipelines[0].commands.is_empty()
&& !self.pipelines[0].commands[0].parts.is_empty()
{
self.pipelines[0].commands[0].parts[0].item.starts_with('#')
} else {
false
}
}
pub(crate) fn span(&self) -> Span { pub(crate) fn span(&self) -> Span {
let start = if !self.pipelines.is_empty() { let start = if !self.pipelines.is_empty() {
self.pipelines[0].span().start() self.pipelines[0].span().start()
@ -359,9 +348,7 @@ fn group(tokens: Vec<Token>) -> (LiteBlock, Option<ParseError>) {
pipeline = LitePipeline::new(); pipeline = LitePipeline::new();
} }
if !group.is_empty() { if !group.is_empty() {
if !group.is_comment() {
groups.push(group); groups.push(group);
}
group = LiteGroup::new(); group = LiteGroup::new();
} }
} }
@ -402,7 +389,7 @@ fn group(tokens: Vec<Token>) -> (LiteBlock, Option<ParseError>) {
if !pipeline.is_empty() { if !pipeline.is_empty() {
group.push(pipeline); group.push(pipeline);
} }
if !group.is_empty() && !group.is_comment() { if !group.is_empty() {
groups.push(group); groups.push(group);
} }

View file

@ -3,9 +3,9 @@ use std::path::Path;
use log::trace; use log::trace;
use nu_errors::{ArgumentError, ParseError}; use nu_errors::{ArgumentError, ParseError};
use nu_protocol::hir::{ use nu_protocol::hir::{
self, Binary, Block, ClassifiedBlock, ClassifiedCommand, ClassifiedPipeline, Expression, self, Binary, Block, ClassifiedBlock, ClassifiedCommand, ClassifiedPipeline, Commands,
ExternalRedirection, Flag, FlagKind, Group, InternalCommand, Member, NamedArguments, Operator, Expression, ExternalRedirection, Flag, FlagKind, InternalCommand, Member, NamedArguments,
Pipeline, RangeOperator, SpannedExpression, Unit, Operator, RangeOperator, SpannedExpression, Unit,
}; };
use nu_protocol::{NamedType, PositionalType, Signature, SyntaxShape, UnspannedPathMember}; use nu_protocol::{NamedType, PositionalType, Signature, SyntaxShape, UnspannedPathMember};
use nu_source::{Span, Spanned, SpannedItem}; use nu_source::{Span, Spanned, SpannedItem};
@ -584,7 +584,7 @@ fn parse_interpolated_string(
} }
} }
let pipelines = vec![Pipeline { let block = vec![Commands {
span: lite_arg.span, span: lite_arg.span,
list: vec![ClassifiedCommand::Internal(InternalCommand { list: vec![ClassifiedCommand::Internal(InternalCommand {
name: "build-string".to_owned(), name: "build-string".to_owned(),
@ -602,10 +602,8 @@ fn parse_interpolated_string(
})], })],
}]; }];
let group = Group::new(pipelines, lite_arg.span);
let call = SpannedExpression { let call = SpannedExpression {
expr: Expression::Invocation(Block::new(vec![], vec![group], lite_arg.span)), expr: Expression::Invocation(Block::new(vec![], block, lite_arg.span)),
span: lite_arg.span, span: lite_arg.span,
}; };
@ -1343,14 +1341,10 @@ fn parse_positional_argument(
parse_math_expression(idx, &lite_cmd.parts[idx..end_idx], registry, true); parse_math_expression(idx, &lite_cmd.parts[idx..end_idx], registry, true);
let span = arg.span; let span = arg.span;
let mut commands = hir::Pipeline::new(span); let mut commands = hir::Commands::new(span);
commands.push(ClassifiedCommand::Expr(Box::new(arg))); commands.push(ClassifiedCommand::Expr(Box::new(arg)));
let block = hir::Block::new( let block = hir::Block::new(vec![], vec![commands], span);
vec![],
vec![Group::new(vec![commands], lite_cmd.span())],
span,
);
let arg = SpannedExpression::new(Expression::Block(block), span); let arg = SpannedExpression::new(Expression::Block(block), span);
@ -1544,7 +1538,7 @@ fn classify_pipeline(
lite_pipeline: &LitePipeline, lite_pipeline: &LitePipeline,
registry: &dyn SignatureRegistry, registry: &dyn SignatureRegistry,
) -> (ClassifiedPipeline, Option<ParseError>) { ) -> (ClassifiedPipeline, Option<ParseError>) {
let mut commands = Pipeline::new(lite_pipeline.span()); let mut commands = Commands::new(lite_pipeline.span());
let mut error = None; let mut error = None;
let mut iter = lite_pipeline.commands.iter().peekable(); let mut iter = lite_pipeline.commands.iter().peekable();
@ -1751,10 +1745,10 @@ fn expand_shorthand_forms(
} }
pub fn classify_block(lite_block: &LiteBlock, registry: &dyn SignatureRegistry) -> ClassifiedBlock { pub fn classify_block(lite_block: &LiteBlock, registry: &dyn SignatureRegistry) -> ClassifiedBlock {
let mut block = vec![]; let mut command_list = vec![];
let mut error = None; let mut error = None;
for lite_group in &lite_block.block { for lite_group in &lite_block.block {
let mut command_list = vec![];
for lite_pipeline in &lite_group.pipelines { for lite_pipeline in &lite_group.pipelines {
let (lite_pipeline, vars, err) = expand_shorthand_forms(lite_pipeline); let (lite_pipeline, vars, err) = expand_shorthand_forms(lite_pipeline);
if error.is_none() { if error.is_none() {
@ -1765,8 +1759,7 @@ pub fn classify_block(lite_block: &LiteBlock, registry: &dyn SignatureRegistry)
let pipeline = if let Some(vars) = vars { let pipeline = if let Some(vars) = vars {
let span = pipeline.commands.span; let span = pipeline.commands.span;
let group = Group::new(vec![pipeline.commands.clone()], span); let block = hir::Block::new(vec![], vec![pipeline.commands.clone()], span);
let block = hir::Block::new(vec![], vec![group], span);
let mut call = hir::Call::new( let mut call = hir::Call::new(
Box::new(SpannedExpression { Box::new(SpannedExpression {
expr: Expression::string("with-env".to_string()), expr: Expression::string("with-env".to_string()),
@ -1799,7 +1792,7 @@ pub fn classify_block(lite_block: &LiteBlock, registry: &dyn SignatureRegistry)
args: call, args: call,
}); });
ClassifiedPipeline { ClassifiedPipeline {
commands: Pipeline { commands: Commands {
list: vec![classified_with_env], list: vec![classified_with_env],
span, span,
}, },
@ -1813,10 +1806,8 @@ pub fn classify_block(lite_block: &LiteBlock, registry: &dyn SignatureRegistry)
error = err; error = err;
} }
} }
let group = Group::new(command_list, lite_block.span());
block.push(group);
} }
let block = Block::new(vec![], block, lite_block.span()); let block = Block::new(vec![], command_list, lite_block.span());
ClassifiedBlock::new(block, error) ClassifiedBlock::new(block, error)
} }

View file

@ -88,8 +88,7 @@ pub fn expression_to_flat_shape(e: &SpannedExpression) -> Vec<Spanned<FlatShape>
pub fn shapes(commands: &Block) -> Vec<Spanned<FlatShape>> { pub fn shapes(commands: &Block) -> Vec<Spanned<FlatShape>> {
let mut output = vec![]; let mut output = vec![];
for group in &commands.block { for pipeline in &commands.block {
for pipeline in &group.pipelines {
for command in &pipeline.list { for command in &pipeline.list {
match command { match command {
ClassifiedCommand::Internal(internal) => { ClassifiedCommand::Internal(internal) => {
@ -116,14 +115,11 @@ pub fn shapes(commands: &Block) -> Vec<Spanned<FlatShape>> {
} }
} }
} }
ClassifiedCommand::Expr(expr) => { ClassifiedCommand::Expr(expr) => output.append(&mut expression_to_flat_shape(expr)),
output.append(&mut expression_to_flat_shape(expr))
}
_ => {} _ => {}
} }
} }
} }
}
output output
} }

View file

@ -62,11 +62,11 @@ impl ClassifiedBlock {
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
pub struct ClassifiedPipeline { pub struct ClassifiedPipeline {
pub commands: Pipeline, pub commands: Commands,
} }
impl ClassifiedPipeline { impl ClassifiedPipeline {
pub fn new(commands: Pipeline) -> ClassifiedPipeline { pub fn new(commands: Commands) -> ClassifiedPipeline {
ClassifiedPipeline { commands } ClassifiedPipeline { commands }
} }
} }
@ -92,14 +92,14 @@ impl ClassifiedCommand {
} }
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
pub struct Pipeline { pub struct Commands {
pub list: Vec<ClassifiedCommand>, pub list: Vec<ClassifiedCommand>,
pub span: Span, pub span: Span,
} }
impl Pipeline { impl Commands {
pub fn new(span: Span) -> Pipeline { pub fn new(span: Span) -> Commands {
Pipeline { list: vec![], span } Commands { list: vec![], span }
} }
pub fn push(&mut self, command: ClassifiedCommand) { pub fn push(&mut self, command: ClassifiedCommand) {
@ -111,34 +111,15 @@ impl Pipeline {
} }
} }
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
pub struct Group {
pub pipelines: Vec<Pipeline>,
pub span: Span,
}
impl Group {
pub fn new(pipelines: Vec<Pipeline>, span: Span) -> Group {
Group { pipelines, span }
}
pub fn push(&mut self, pipeline: Pipeline) {
self.pipelines.push(pipeline);
}
pub fn has_it_usage(&self) -> bool {
self.pipelines.iter().any(|cc| cc.has_it_usage())
}
}
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
pub struct Block { pub struct Block {
pub params: Vec<String>, pub params: Vec<String>,
pub block: Vec<Group>, pub block: Vec<Commands>,
pub span: Span, pub span: Span,
} }
impl Block { impl Block {
pub fn new(params: Vec<String>, block: Vec<Group>, span: Span) -> Block { pub fn new(params: Vec<String>, block: Vec<Commands>, span: Span) -> Block {
let mut output = Block { let mut output = Block {
params, params,
block, block,
@ -149,14 +130,13 @@ impl Block {
output output
} }
pub fn push(&mut self, group: Group) { pub fn push(&mut self, commands: Commands) {
self.block.push(group); self.block.push(commands);
self.infer_params(); self.infer_params();
} }
pub fn set_redirect(&mut self, external_redirection: ExternalRedirection) { pub fn set_redirect(&mut self, external_redirection: ExternalRedirection) {
if let Some(group) = self.block.last_mut() { if let Some(pipeline) = self.block.last_mut() {
if let Some(pipeline) = group.pipelines.last_mut() {
if let Some(command) = pipeline.list.last_mut() { if let Some(command) = pipeline.list.last_mut() {
if let ClassifiedCommand::Internal(internal) = command { if let ClassifiedCommand::Internal(internal) = command {
internal.args.external_redirection = external_redirection; internal.args.external_redirection = external_redirection;
@ -164,7 +144,6 @@ impl Block {
} }
} }
} }
}
pub fn has_it_usage(&self) -> bool { pub fn has_it_usage(&self) -> bool {
self.block.iter().any(|x| x.has_it_usage()) self.block.iter().any(|x| x.has_it_usage())

View file

@ -4,7 +4,7 @@ use nu_cli::create_default_context;
use nu_cli::utils::test_bins as binaries; use nu_cli::utils::test_bins as binaries;
use std::error::Error; use std::error::Error;
use std::fs::File; use std::fs::File;
use std::io::prelude::*; use std::io::{prelude::*, BufReader};
fn main() -> Result<(), Box<dyn Error>> { fn main() -> Result<(), Box<dyn Error>> {
let matches = App::new("nushell") let matches = App::new("nushell")
@ -124,12 +124,9 @@ fn main() -> Result<(), Box<dyn Error>> {
match matches.values_of("commands") { match matches.values_of("commands") {
None => {} None => {}
Some(values) => { Some(values) => {
let script_text: String = values let pipelines: Vec<String> = values.map(|x| x.to_string()).collect();
.map(|x| x.to_string()) futures::executor::block_on(nu_cli::run_vec_of_pipelines(
.collect::<Vec<String>>() pipelines,
.join("\n");
futures::executor::block_on(nu_cli::run_script_file(
script_text,
matches.is_present("stdin"), matches.is_present("stdin"),
))?; ))?;
return Ok(()); return Ok(());
@ -138,12 +135,25 @@ fn main() -> Result<(), Box<dyn Error>> {
match matches.value_of("script") { match matches.value_of("script") {
Some(script) => { Some(script) => {
let mut file = File::open(script)?; let file = File::open(script)?;
let mut buffer = String::new(); let reader = BufReader::new(file);
file.read_to_string(&mut buffer)?; let pipelines: Vec<String> = reader
.lines()
.filter_map(|x| {
if let Ok(x) = x {
if !x.starts_with('#') {
Some(x)
} else {
None
}
} else {
None
}
})
.collect();
futures::executor::block_on(nu_cli::run_script_file( futures::executor::block_on(nu_cli::run_vec_of_pipelines(
buffer, pipelines,
matches.is_present("stdin"), matches.is_present("stdin"),
))?; ))?;
return Ok(()); return Ok(());