From 8bd035f51d9f754be00ce810bcf35151af7105a9 Mon Sep 17 00:00:00 2001 From: Fahmi Akbar Wildana Date: Mon, 7 Oct 2019 02:43:28 +0700 Subject: [PATCH 001/184] ci(github): renew trigger definition There is an update in workflow syntax docs https://help.github.com/en/articles/workflow-syntax-for-github-actions#filter-pattern-cheat-sheet --- .github/workflows/docker-publish.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml index e84cefd3ab..34c40d5ced 100644 --- a/.github/workflows/docker-publish.yml +++ b/.github/workflows/docker-publish.yml @@ -2,7 +2,7 @@ name: Publish consumable Docker images on: push: - tags: ['*.*.*'] + tags: ['v?[0-9]+.[0-9]+.[0-9]+*'] jobs: compile: From b1637751124eb970b849465303a1d6f080e4b563 Mon Sep 17 00:00:00 2001 From: Fahmi Akbar Wildana Date: Mon, 7 Oct 2019 02:53:23 +0700 Subject: [PATCH 002/184] =?UTF-8?q?ci(github):=20install=20cross=20from=20?= =?UTF-8?q?release=20page=20=E2=9A=A1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Instead of compiling `cross` via `cargo install`, downloading binary executable from release page will speedup the CI --- .github/workflows/docker-publish.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml index 34c40d5ced..a0a027ff53 100644 --- a/.github/workflows/docker-publish.yml +++ b/.github/workflows/docker-publish.yml @@ -14,7 +14,11 @@ jobs: - x86_64-unknown-linux-gnu steps: - uses: actions/checkout@v1 - - run: cargo install cross + - name: Install rust-embedded/cross + env: { VERSION: v0.1.16 } + run: >- + wget -nv https://github.com/rust-embedded/cross/releases/download/${VERSION}/cross-${VERSION}-x86_64-unknown-linux-gnu.tar.gz + -O- | sudo tar xz -C /usr/local/bin/ - name: compile for specific target env: { arch: '${{ matrix.arch }}' } run: | From b13439431981b308ddea588144cb595883c30cca Mon Sep 17 00:00:00 2001 From: Fahmi Akbar Wildana Date: Mon, 7 Oct 2019 03:08:10 +0700 Subject: [PATCH 003/184] ci(github): refactor docker related run scripts --- .github/workflows/docker-publish.yml | 46 ++++++++++++++++++---------- 1 file changed, 30 insertions(+), 16 deletions(-) diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml index a0a027ff53..424d833b3a 100644 --- a/.github/workflows/docker-publish.yml +++ b/.github/workflows/docker-publish.yml @@ -62,13 +62,13 @@ jobs: - uses: actions/download-artifact@master with: { name: '${{ matrix.arch }}', path: target/release } - name: Build and publish exact version - run: | + run: |- REGISTRY=${REGISTRY,,}; export TAG=${GITHUB_REF##*/}-${{ matrix.tag }}; export NU_BINS=target/release/$( [ ${{ matrix.plugin }} = true ] && echo nu* || echo nu ) export PATCH=$([ ${{ matrix.use-patch }} = true ] && echo .${{ matrix.tag }} || echo '') chmod +x $NU_BINS - echo ${{ secrets.DOCKER_REGISTRY }} | docker login docker.pkg.github.com -u ${{ github.actor }} --password-stdin + echo ${{ secrets.DOCKER_REGISTRY }} | docker login ${REGISTRY%%/*} -u ${{ github.actor }} --password-stdin docker-compose --file docker/docker-compose.package.yml build docker-compose --file docker/docker-compose.package.yml push # exact version env: @@ -77,26 +77,40 @@ jobs: #region semantics tagging - name: Retag and push without suffixing version - run: | + run: |- VERSION=${GITHUB_REF##*/} + + latest_version=${VERSION%%%.*}-${{ matrix.tag }} + latest_feature=${VERSION%%.*}-${{ matrix.tag }} + latest_patch=${VERSION%.*}-${{ matrix.tag }} + exact_version=${VERSION}-${{ matrix.tag }} + + tags=( latest_version latest_feature latest_patch exact_version ) + + for tag in ${tags[@]}; do + docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${tag} + docker push ${REGISTRY,,}/nu:${tag} + done + + # latest version docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${{ matrix.tag }} - docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${VERSION%%.*}-${{ matrix.tag }} - docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${VERSION%.*}-${{ matrix.tag }} - docker push ${REGISTRY,,}/nu:${VERSION%.*}-${{ matrix.tag }} # latest patch - docker push ${REGISTRY,,}/nu:${VERSION%%.*}-${{ matrix.tag }} # latest features - docker push ${REGISTRY,,}/nu:${{ matrix.tag }} # latest version + docker push ${REGISTRY,,}/nu:${{ matrix.tag }} env: { REGISTRY: 'docker.pkg.github.com/${{ github.repository }}' } - name: Retag and push debian as latest if: matrix.tag == 'debian' - run: | + run: |- VERSION=${GITHUB_REF##*/} + + # ${latest features} ${latest patch} ${exact version} + tags=( ${VERSION%%.*} ${VERSION%.*} ${VERSION} ) + + for tag in ${tags[@]}; do + docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${tag} + docker push ${REGISTRY,,}/nu:${tag} + done + + # latest version docker tag ${REGISTRY,,}/nu:${{ matrix.tag }} ${REGISTRY,,}/nu:latest - docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${VERSION%.*} - docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${VERSION%%.*} - docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${VERSION} - docker push ${REGISTRY,,}/nu:${VERSION} # exact version - docker push ${REGISTRY,,}/nu:${VERSION%%.*} # latest features - docker push ${REGISTRY,,}/nu:${VERSION%.*} # latest patch - docker push ${REGISTRY,,}/nu:latest # latest version + docker push ${REGISTRY,,}/nu:latest env: { REGISTRY: 'docker.pkg.github.com/${{ github.repository }}' } #endregion semantics tagging From 93ae5043cc996f632e3de8de7fb383698016752e Mon Sep 17 00:00:00 2001 From: Fahmi Akbar Wildana Date: Mon, 7 Oct 2019 03:53:07 +0700 Subject: [PATCH 004/184] ci(github): change REGISTRY to quay.io --- .github/workflows/docker-publish.yml | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml index 424d833b3a..d54ccea3b1 100644 --- a/.github/workflows/docker-publish.yml +++ b/.github/workflows/docker-publish.yml @@ -68,12 +68,13 @@ jobs: export PATCH=$([ ${{ matrix.use-patch }} = true ] && echo .${{ matrix.tag }} || echo '') chmod +x $NU_BINS - echo ${{ secrets.DOCKER_REGISTRY }} | docker login ${REGISTRY%%/*} -u ${{ github.actor }} --password-stdin + echo ${{ secrets.DOCKER_REGISTRY }} | docker login ${REGISTRY%/*} -u ${{ github.actor }} --password-stdin docker-compose --file docker/docker-compose.package.yml build docker-compose --file docker/docker-compose.package.yml push # exact version env: BASE_IMAGE: ${{ matrix.base-image }} - REGISTRY: docker.pkg.github.com/${{ github.repository }} + # REGISTRY: docker.pkg.github.com/${{ github.repository }} #TODO: waiting support for GITHUB_TOKEN for docker.pkg.github.com + REGISTRY: quay.io/${{ github.actor }} #region semantics tagging - name: Retag and push without suffixing version @@ -95,7 +96,9 @@ jobs: # latest version docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${{ matrix.tag }} docker push ${REGISTRY,,}/nu:${{ matrix.tag }} - env: { REGISTRY: 'docker.pkg.github.com/${{ github.repository }}' } + env: + # REGISTRY: 'docker.pkg.github.com/${{ github.repository }}' #TODO: waiting support for GITHUB_TOKEN for docker.pkg.github.com + REGISTRY: quay.io/${{ github.actor }} - name: Retag and push debian as latest if: matrix.tag == 'debian' run: |- @@ -112,5 +115,7 @@ jobs: # latest version docker tag ${REGISTRY,,}/nu:${{ matrix.tag }} ${REGISTRY,,}/nu:latest docker push ${REGISTRY,,}/nu:latest - env: { REGISTRY: 'docker.pkg.github.com/${{ github.repository }}' } + env: + # REGISTRY: 'docker.pkg.github.com/${{ github.repository }}' #TODO: waiting support for GITHUB_TOKEN for docker.pkg.github.com + REGISTRY: quay.io/${{ github.actor }} #endregion semantics tagging From e72bc8ea8b2a17a7fbc2944c28e4bc37938c01b9 Mon Sep 17 00:00:00 2001 From: Odin Dutton Date: Tue, 8 Oct 2019 10:16:25 +1100 Subject: [PATCH 005/184] Remove unneeded - --- docs/commands/cd.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/commands/cd.md b/docs/commands/cd.md index 377733ba8f..b964be50c9 100644 --- a/docs/commands/cd.md +++ b/docs/commands/cd.md @@ -2,7 +2,7 @@ If you didn't already know, the `cd` command is very simple. It stands for 'change directory' and it does exactly that. It changes the current directory to the one specified. If no directory is specified, it takes you to the home directory. Additionally, using `cd ..` takes you to the parent directory. -## Examples - +## Examples ```shell /home/username> cd Desktop From 77c34acb0318dab540b7e9677211f75e2a6b34c0 Mon Sep 17 00:00:00 2001 From: Odin Dutton Date: Tue, 8 Oct 2019 10:17:46 +1100 Subject: [PATCH 006/184] Whitespace --- docs/commands/cd.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/commands/cd.md b/docs/commands/cd.md index b964be50c9..535f1d16eb 100644 --- a/docs/commands/cd.md +++ b/docs/commands/cd.md @@ -21,5 +21,5 @@ If you didn't already know, the `cd` command is very simple. It stands for 'chan /home/username/Desktop/super/duper/crazy/nested/folders> cd /home/username> cd ../../usr /usr> cd -/home/username> +/home/username> ``` From 4d7025569603e20c446dbdf88bc86af4a5c78542 Mon Sep 17 00:00:00 2001 From: Odin Dutton Date: Tue, 8 Oct 2019 10:17:51 +1100 Subject: [PATCH 007/184] Add documentation for `cd -` --- docs/commands/cd.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/commands/cd.md b/docs/commands/cd.md index 535f1d16eb..2e5d933f47 100644 --- a/docs/commands/cd.md +++ b/docs/commands/cd.md @@ -23,3 +23,11 @@ If you didn't already know, the `cd` command is very simple. It stands for 'chan /usr> cd /home/username> ``` + +Using `cd -` will take you to the previous directory: + +```shell +/home/username/Desktop/super/duper/crazy/nested/folders> cd +/home/username> cd - +/home/username/Desktop/super/duper/crazy/nested/folders> cd +``` From 3e14de158beea945dcfe6ea9aa2ac7e603be38c1 Mon Sep 17 00:00:00 2001 From: Fahmi Akbar Wildana Date: Wed, 9 Oct 2019 09:58:49 +0700 Subject: [PATCH 008/184] fix(ci): can't push to quay.io (#1) * ci(github): lowercase ${{ github.actor }} * ci(github): fix robot username * fix(ci): fix tag name on suffixed version --- .github/workflows/docker-publish.yml | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml index d54ccea3b1..0deca68a42 100644 --- a/.github/workflows/docker-publish.yml +++ b/.github/workflows/docker-publish.yml @@ -63,21 +63,22 @@ jobs: with: { name: '${{ matrix.arch }}', path: target/release } - name: Build and publish exact version run: |- - REGISTRY=${REGISTRY,,}; export TAG=${GITHUB_REF##*/}-${{ matrix.tag }}; + REGISTRY=${REGISTRY,,}; export TAG=${GITHUB_REF##*/}-${{ matrix.tag }} export NU_BINS=target/release/$( [ ${{ matrix.plugin }} = true ] && echo nu* || echo nu ) export PATCH=$([ ${{ matrix.use-patch }} = true ] && echo .${{ matrix.tag }} || echo '') chmod +x $NU_BINS - echo ${{ secrets.DOCKER_REGISTRY }} | docker login ${REGISTRY%/*} -u ${{ github.actor }} --password-stdin + echo ${{ secrets.DOCKER_REGISTRY }} | docker login ${REGISTRY%/*} -u ${USER,,} --password-stdin docker-compose --file docker/docker-compose.package.yml build docker-compose --file docker/docker-compose.package.yml push # exact version env: BASE_IMAGE: ${{ matrix.base-image }} # REGISTRY: docker.pkg.github.com/${{ github.repository }} #TODO: waiting support for GITHUB_TOKEN for docker.pkg.github.com + USER: ${{ github.actor }}+action REGISTRY: quay.io/${{ github.actor }} #region semantics tagging - - name: Retag and push without suffixing version + - name: Retag and push with suffixed version run: |- VERSION=${GITHUB_REF##*/} @@ -86,7 +87,7 @@ jobs: latest_patch=${VERSION%.*}-${{ matrix.tag }} exact_version=${VERSION}-${{ matrix.tag }} - tags=( latest_version latest_feature latest_patch exact_version ) + tags=( ${latest_version} ${latest_feature} ${latest_patch} ${exact_version} ) for tag in ${tags[@]}; do docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${tag} From 1ad9d6f199556c706812d992f22a7a91e8668259 Mon Sep 17 00:00:00 2001 From: Yehuda Katz Date: Tue, 17 Sep 2019 15:26:27 -0700 Subject: [PATCH 009/184] Overhaul the expansion system The main thrust of this (very large) commit is an overhaul of the expansion system. The parsing pipeline is: - Lightly parse the source file for atoms, basic delimiters and pipeline structure into a token tree - Expand the token tree into a HIR (high-level intermediate representation) based upon the baseline syntax rules for expressions and the syntactic shape of commands. Somewhat non-traditionally, nu doesn't have an AST at all. It goes directly from the token tree, which doesn't represent many important distinctions (like the difference between `hello` and `5KB`) directly into a high-level representation that doesn't have a direct correspondence to the source code. At a high level, nu commands work like macros, in the sense that the syntactic shape of the invocation of a command depends on the definition of a command. However, commands do not have the ability to perform unrestricted expansions of the token tree. Instead, they describe their arguments in terms of syntactic shapes, and the expander expands the token tree into HIR based upon that definition. For example, the `where` command says that it takes a block as its first required argument, and the description of the block syntactic shape expands the syntax `cpu > 10` into HIR that represents `{ $it.cpu > 10 }`. This commit overhauls that system so that the syntactic shapes are described in terms of a few new traits (`ExpandSyntax` and `ExpandExpression` are the primary ones) that are more composable than the previous system. The first big win of this new system is the addition of the `ColumnPath` shape, which looks like `cpu."max ghz"` or `package.version`. Previously, while a variable path could look like `$it.cpu."max ghz"`, the tail of a variable path could not be easily reused in other contexts. Now, that tail is its own syntactic shape, and it can be used as part of a command's signature. This cleans up commands like `inc`, `add` and `edit` as well as shorthand blocks, which can now look like `| where cpu."max ghz" > 10` --- Cargo.lock | 22 + Cargo.toml | 3 + src/cli.rs | 190 +- src/commands.rs | 1 + src/commands/autoview.rs | 33 +- src/commands/classified.rs | 35 +- src/commands/command.rs | 9 + src/commands/echo.rs | 9 +- src/commands/enter.rs | 10 +- src/commands/fetch.rs | 2 +- src/commands/first.rs | 2 +- src/commands/get.rs | 61 +- src/commands/open.rs | 2 +- src/commands/save.rs | 7 +- src/commands/skip_while.rs | 3 + src/commands/tags.rs | 4 +- src/context.rs | 24 +- src/data/base.rs | 133 +- src/data/meta.rs | 123 +- src/errors.rs | 31 +- src/evaluate/evaluator.rs | 30 +- src/lib.rs | 2 +- src/parser.rs | 6 +- src/parser/deserializer.rs | 9 +- src/parser/hir.rs | 138 +- src/parser/hir/baseline_parse.rs | 142 +- src/parser/hir/baseline_parse/tests.rs | 144 ++ src/parser/hir/baseline_parse_tokens.rs | 459 ----- src/parser/hir/binary.rs | 6 + src/parser/hir/expand_external_tokens.rs | 87 + src/parser/hir/external_command.rs | 2 +- src/parser/hir/path.rs | 34 +- src/parser/hir/syntax_shape.rs | 662 +++++++ src/parser/hir/syntax_shape/block.rs | 168 ++ src/parser/hir/syntax_shape/expression.rs | 188 ++ .../hir/syntax_shape/expression/delimited.rs | 38 + .../hir/syntax_shape/expression/file_path.rs | 59 + .../hir/syntax_shape/expression/list.rs | 43 + .../hir/syntax_shape/expression/number.rs | 97 + .../hir/syntax_shape/expression/pattern.rs | 86 + .../hir/syntax_shape/expression/string.rs | 60 + .../hir/syntax_shape/expression/unit.rs | 89 + .../syntax_shape/expression/variable_path.rs | 396 ++++ src/parser/hir/tokens_iterator.rs | 365 ++++ src/parser/hir/tokens_iterator/debug.rs | 30 + src/parser/parse/files.rs | 29 +- src/parser/parse/operator.rs | 3 + src/parser/parse/parser.rs | 1586 ++++++++--------- src/parser/parse/pipeline.rs | 27 +- src/parser/parse/token_tree.rs | 112 +- src/parser/parse/token_tree_builder.rs | 110 +- src/parser/parse/tokens.rs | 11 +- src/parser/parse_command.rs | 155 +- src/parser/registry.rs | 19 +- src/plugins/add.rs | 23 +- src/plugins/edit.rs | 18 +- src/plugins/inc.rs | 37 +- src/plugins/str.rs | 47 +- src/shell/helper.rs | 38 +- tests/command_open_tests.rs | 2 +- tests/helpers/mod.rs | 1 + 61 files changed, 4310 insertions(+), 1952 deletions(-) create mode 100644 src/parser/hir/baseline_parse/tests.rs delete mode 100644 src/parser/hir/baseline_parse_tokens.rs create mode 100644 src/parser/hir/expand_external_tokens.rs create mode 100644 src/parser/hir/syntax_shape.rs create mode 100644 src/parser/hir/syntax_shape/block.rs create mode 100644 src/parser/hir/syntax_shape/expression.rs create mode 100644 src/parser/hir/syntax_shape/expression/delimited.rs create mode 100644 src/parser/hir/syntax_shape/expression/file_path.rs create mode 100644 src/parser/hir/syntax_shape/expression/list.rs create mode 100644 src/parser/hir/syntax_shape/expression/number.rs create mode 100644 src/parser/hir/syntax_shape/expression/pattern.rs create mode 100644 src/parser/hir/syntax_shape/expression/string.rs create mode 100644 src/parser/hir/syntax_shape/expression/unit.rs create mode 100644 src/parser/hir/syntax_shape/expression/variable_path.rs create mode 100644 src/parser/hir/tokens_iterator.rs create mode 100644 src/parser/hir/tokens_iterator/debug.rs diff --git a/Cargo.lock b/Cargo.lock index 852fbd6103..af1d46aa0e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1491,6 +1491,25 @@ dependencies = [ "version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "nom-tracable" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "nom 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "nom-tracable-macros 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "nom_locate 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "nom-tracable-macros" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "nom_locate" version = "1.0.0" @@ -1550,6 +1569,7 @@ dependencies = [ "natural 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "neso 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "nom 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "nom-tracable 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "nom_locate 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "num-bigint 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -3140,6 +3160,8 @@ dependencies = [ "checksum nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9667ddcc6cc8a43afc9b7917599d7216aa09c463919ea32c59ed6cac8bc945" "checksum nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2ad2a91a8e869eeb30b9cb3119ae87773a8f4ae617f41b1eb9c154b2905f7bd6" "checksum nom 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e9761d859320e381010a4f7f8ed425f2c924de33ad121ace447367c713ad561b" +"checksum nom-tracable 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "edaa64ad2837d831d4a17966c9a83aa5101cc320730f5b724811c8f7442a2528" +"checksum nom-tracable-macros 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fd25f70877a9fe68bd406b3dd3ff99e94ce9de776cf2a96e0d99de90b53d4765" "checksum nom_locate 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f932834fd8e391fc7710e2ba17e8f9f8645d846b55aa63207e17e110a1e1ce35" "checksum ntapi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f26e041cd983acbc087e30fcba770380cfa352d0e392e175b2344ebaf7ea0602" "checksum num-bigint 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f9c3f34cdd24f334cb265d9bf8bfa8a241920d026916785747a92f0e55541a1a" diff --git a/Cargo.toml b/Cargo.toml index f51ea06d8a..66bd695c08 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -55,6 +55,7 @@ surf = "1.0.2" url = "2.1.0" roxmltree = "0.7.0" nom_locate = "1.0.0" +nom-tracable = "0.4.0" enum-utils = "0.1.1" unicode-xid = "0.2.0" serde_ini = "0.2.0" @@ -95,6 +96,8 @@ textview = ["syntect", "onig_sys", "crossterm"] binaryview = ["image", "crossterm"] sys = ["heim", "battery"] ps = ["heim"] +trace = ["nom-tracable/trace"] +all = ["raw-key", "textview", "binaryview", "sys", "ps", "clipboard", "ptree"] [dependencies.rusqlite] version = "0.20.0" diff --git a/src/cli.rs b/src/cli.rs index 38e2474faf..6a35608d91 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -1,4 +1,3 @@ -use crate::commands::autoview; use crate::commands::classified::{ ClassifiedCommand, ClassifiedInputStream, ClassifiedPipeline, ExternalCommand, InternalCommand, StreamNext, @@ -13,7 +12,12 @@ pub(crate) use crate::errors::ShellError; use crate::fuzzysearch::{interactive_fuzzy_search, SelectionResult}; use crate::git::current_branch; use crate::parser::registry::Signature; -use crate::parser::{hir, CallNode, Pipeline, PipelineElement, TokenNode}; +use crate::parser::{ + hir, + hir::syntax_shape::{CommandHeadShape, CommandSignature, ExpandSyntax}, + hir::{expand_external_tokens::expand_external_tokens, tokens_iterator::TokensIterator}, + parse_command_tail, Pipeline, PipelineElement, TokenNode, +}; use crate::prelude::*; use log::{debug, trace}; @@ -25,6 +29,7 @@ use std::io::{BufRead, BufReader, Write}; use std::iter::Iterator; use std::path::PathBuf; use std::sync::atomic::{AtomicBool, Ordering}; +use std::sync::Arc; #[derive(Debug)] pub enum MaybeOwned<'a, T> { @@ -75,7 +80,7 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel let name = params.name.clone(); let fname = fname.to_string(); - if context.has_command(&name) { + if let Some(_) = context.get_command(&name) { trace!("plugin {:?} already loaded.", &name); } else { if params.is_filter { @@ -428,21 +433,11 @@ pub async fn cli() -> Result<(), Box> { } } - LineResult::Error(mut line, err) => { + LineResult::Error(line, err) => { rl.add_history_entry(line.clone()); - let diag = err.to_diagnostic(); + context.with_host(|host| { - let writer = host.err_termcolor(); - line.push_str(" "); - let files = crate::parser::Files::new(line); - let _ = std::panic::catch_unwind(move || { - let _ = language_reporting::emit( - &mut writer.lock(), - &files, - &diag, - &language_reporting::DefaultConfig, - ); - }); + print_err(err, host, &Text::from(line)); }) } @@ -459,6 +454,14 @@ pub async fn cli() -> Result<(), Box> { Ok(()) } +fn chomp_newline(s: &str) -> &str { + if s.ends_with('\n') { + &s[..s.len() - 1] + } else { + s + } +} + enum LineResult { Success(String), Error(String, ShellError), @@ -471,9 +474,11 @@ async fn process_line(readline: Result, ctx: &mut Context Ok(line) if line.trim() == "" => LineResult::Success(line.clone()), Ok(line) => { - let result = match crate::parser::parse(&line, uuid::Uuid::nil()) { + let line = chomp_newline(line); + + let result = match crate::parser::parse(&line, uuid::Uuid::new_v4()) { Err(err) => { - return LineResult::Error(line.clone(), err); + return LineResult::Error(line.to_string(), err); } Ok(val) => val, @@ -484,7 +489,7 @@ async fn process_line(readline: Result, ctx: &mut Context let mut pipeline = match classify_pipeline(&result, ctx, &Text::from(line)) { Ok(pipeline) => pipeline, - Err(err) => return LineResult::Error(line.clone(), err), + Err(err) => return LineResult::Error(line.to_string(), err), }; match pipeline.commands.last() { @@ -492,7 +497,7 @@ async fn process_line(readline: Result, ctx: &mut Context _ => pipeline .commands .push(ClassifiedCommand::Internal(InternalCommand { - command: whole_stream_command(autoview::Autoview), + name: "autoview".to_string(), name_tag: Tag::unknown(), args: hir::Call::new( Box::new(hir::Expression::synthetic_string("autoview")), @@ -514,16 +519,24 @@ async fn process_line(readline: Result, ctx: &mut Context input = match (item, next) { (None, _) => break, + (Some(ClassifiedCommand::Dynamic(_)), _) + | (_, Some(ClassifiedCommand::Dynamic(_))) => { + return LineResult::Error( + line.to_string(), + ShellError::unimplemented("Dynamic commands"), + ) + } + (Some(ClassifiedCommand::Expr(_)), _) => { return LineResult::Error( - line.clone(), + line.to_string(), ShellError::unimplemented("Expression-only commands"), ) } (_, Some(ClassifiedCommand::Expr(_))) => { return LineResult::Error( - line.clone(), + line.to_string(), ShellError::unimplemented("Expression-only commands"), ) } @@ -536,7 +549,7 @@ async fn process_line(readline: Result, ctx: &mut Context .await { Ok(val) => ClassifiedInputStream::from_input_stream(val), - Err(err) => return LineResult::Error(line.clone(), err), + Err(err) => return LineResult::Error(line.to_string(), err), }, (Some(ClassifiedCommand::Internal(left)), Some(_)) => { @@ -545,7 +558,7 @@ async fn process_line(readline: Result, ctx: &mut Context .await { Ok(val) => ClassifiedInputStream::from_input_stream(val), - Err(err) => return LineResult::Error(line.clone(), err), + Err(err) => return LineResult::Error(line.to_string(), err), } } @@ -555,7 +568,7 @@ async fn process_line(readline: Result, ctx: &mut Context .await { Ok(val) => ClassifiedInputStream::from_input_stream(val), - Err(err) => return LineResult::Error(line.clone(), err), + Err(err) => return LineResult::Error(line.to_string(), err), } } @@ -564,20 +577,20 @@ async fn process_line(readline: Result, ctx: &mut Context Some(ClassifiedCommand::External(_)), ) => match left.run(ctx, input, StreamNext::External).await { Ok(val) => val, - Err(err) => return LineResult::Error(line.clone(), err), + Err(err) => return LineResult::Error(line.to_string(), err), }, (Some(ClassifiedCommand::External(left)), Some(_)) => { match left.run(ctx, input, StreamNext::Internal).await { Ok(val) => val, - Err(err) => return LineResult::Error(line.clone(), err), + Err(err) => return LineResult::Error(line.to_string(), err), } } (Some(ClassifiedCommand::External(left)), None) => { match left.run(ctx, input, StreamNext::Last).await { Ok(val) => val, - Err(err) => return LineResult::Error(line.clone(), err), + Err(err) => return LineResult::Error(line.to_string(), err), } } }; @@ -585,7 +598,7 @@ async fn process_line(readline: Result, ctx: &mut Context is_first_command = false; } - LineResult::Success(line.clone()) + LineResult::Success(line.to_string()) } Err(ReadlineError::Interrupted) => LineResult::CtrlC, Err(ReadlineError::Eof) => LineResult::Break, @@ -616,80 +629,91 @@ fn classify_pipeline( } fn classify_command( - command: &PipelineElement, + command: &Tagged, context: &Context, source: &Text, ) -> Result { - let call = command.call(); + let mut iterator = TokensIterator::new(&command.tokens.item, command.tag, true); + + let head = CommandHeadShape + .expand_syntax(&mut iterator, &context.expand_context(source, command.tag))?; + + match &head { + CommandSignature::Expression(_) => Err(ShellError::syntax_error( + "Unexpected expression in command position".tagged(command.tag), + )), - match call { // If the command starts with `^`, treat it as an external command no matter what - call if call.head().is_external() => { - let name_tag = call.head().expect_external(); - let name = name_tag.slice(source); + CommandSignature::External(name) => { + let name_str = name.slice(source); - Ok(external_command(call, source, name.tagged(name_tag))) + external_command(&mut iterator, source, name_str.tagged(name)) } - // Otherwise, if the command is a bare word, we'll need to triage it - call if call.head().is_bare() => { - let head = call.head(); - let name = head.source(source); + CommandSignature::LiteralExternal { outer, inner } => { + let name_str = inner.slice(source); - match context.has_command(name) { - // if the command is in the registry, it's an internal command - true => { - let command = context.get_command(name); - let config = command.signature(); - - trace!(target: "nu::build_pipeline", "classifying {:?}", config); - - let args: hir::Call = config.parse_args(call, &context, source)?; - - trace!(target: "nu::build_pipeline", "args :: {}", args.debug(source)); - - Ok(ClassifiedCommand::Internal(InternalCommand { - command, - name_tag: head.tag(), - args, - })) - } - - // otherwise, it's an external command - false => Ok(external_command(call, source, name.tagged(head.tag()))), - } + external_command(&mut iterator, source, name_str.tagged(outer)) } - // If the command is something else (like a number or a variable), that is currently unsupported. - // We might support `$somevar` as a curried command in the future. - call => Err(ShellError::invalid_command(call.head().tag())), + CommandSignature::Internal(command) => { + let tail = parse_command_tail( + &command.signature(), + &context.expand_context(source, command.tag), + &mut iterator, + command.tag, + )?; + + let (positional, named) = match tail { + None => (None, None), + Some((positional, named)) => (positional, named), + }; + + let call = hir::Call { + head: Box::new(head.to_expression()), + positional, + named, + }; + + Ok(ClassifiedCommand::Internal(InternalCommand::new( + command.name().to_string(), + command.tag, + call, + ))) + } } } // Classify this command as an external command, which doesn't give special meaning // to nu syntactic constructs, and passes all arguments to the external command as // strings. -fn external_command( - call: &Tagged, +pub(crate) fn external_command( + tokens: &mut TokensIterator, source: &Text, name: Tagged<&str>, -) -> ClassifiedCommand { - let arg_list_strings: Vec> = match call.children() { - Some(args) => args - .iter() - .filter_map(|i| match i { - TokenNode::Whitespace(_) => None, - other => Some(other.as_external_arg(source).tagged(other.tag())), - }) - .collect(), - None => vec![], - }; +) -> Result { + let arg_list_strings = expand_external_tokens(tokens, source)?; - let (name, tag) = name.into_parts(); - - ClassifiedCommand::External(ExternalCommand { + Ok(ClassifiedCommand::External(ExternalCommand { name: name.to_string(), - name_tag: tag, + name_tag: name.tag(), args: arg_list_strings, - }) + })) +} + +pub fn print_err(err: ShellError, host: &dyn Host, source: &Text) { + let diag = err.to_diagnostic(); + + let writer = host.err_termcolor(); + let mut source = source.to_string(); + source.push_str(" "); + let files = crate::parser::Files::new(source); + let _ = std::panic::catch_unwind(move || { + let _ = language_reporting::emit( + &mut writer.lock(), + &files, + &diag, + &language_reporting::DefaultConfig, + ); + }); } diff --git a/src/commands.rs b/src/commands.rs index 72c07e38e6..4eb733edd4 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -75,6 +75,7 @@ pub(crate) use command::{ UnevaluatedCallInfo, WholeStreamCommand, }; +pub(crate) use classified::ClassifiedCommand; pub(crate) use config::Config; pub(crate) use cp::Cpy; pub(crate) use date::Date; diff --git a/src/commands/autoview.rs b/src/commands/autoview.rs index a0e7e9a8a5..57ab6269b3 100644 --- a/src/commands/autoview.rs +++ b/src/commands/autoview.rs @@ -58,21 +58,21 @@ pub fn autoview( } } }; - } else if is_single_anchored_text_value(&input) { - let text = context.get_command("textview"); - if let Some(text) = text { - let result = text.run(raw.with_input(input), &context.commands, false); - result.collect::>().await; - } else { - for i in input { - match i.item { - Value::Primitive(Primitive::String(s)) => { - println!("{}", s); - } - _ => {} - } - } - } + // } else if is_single_origined_text_value(&input) { + // let text = context.get_command("textview"); + // if let Some(text) = text { + // let result = text.run(raw.with_input(input), &context.commands); + // result.collect::>().await; + // } else { + // for i in input { + // match i.item { + // Value::Primitive(Primitive::String(s)) => { + // println!("{}", s); + // } + // _ => {} + // } + // } + // } } else if is_single_text_value(&input) { for i in input { match i.item { @@ -111,7 +111,8 @@ fn is_single_text_value(input: &Vec>) -> bool { } } -fn is_single_anchored_text_value(input: &Vec>) -> bool { +#[allow(unused)] +fn is_single_origined_text_value(input: &Vec>) -> bool { if input.len() != 1 { return false; } diff --git a/src/commands/classified.rs b/src/commands/classified.rs index 0e5cd95d8d..d30025b944 100644 --- a/src/commands/classified.rs +++ b/src/commands/classified.rs @@ -1,12 +1,11 @@ -use crate::commands::Command; use crate::parser::{hir, TokenNode}; use crate::prelude::*; use bytes::{BufMut, BytesMut}; +use derive_new::new; use futures::stream::StreamExt; use futures_codec::{Decoder, Encoder, Framed}; use log::{log_enabled, trace}; use std::io::{Error, ErrorKind}; -use std::sync::Arc; use subprocess::Exec; /// A simple `Codec` implementation that splits up data into lines. @@ -77,19 +76,28 @@ pub(crate) struct ClassifiedPipeline { pub(crate) commands: Vec, } +#[derive(Debug, Eq, PartialEq)] pub(crate) enum ClassifiedCommand { #[allow(unused)] Expr(TokenNode), Internal(InternalCommand), + #[allow(unused)] + Dynamic(hir::Call), External(ExternalCommand), } +#[derive(new, Debug, Eq, PartialEq)] pub(crate) struct InternalCommand { - pub(crate) command: Arc, + pub(crate) name: String, pub(crate) name_tag: Tag, pub(crate) args: hir::Call, } +#[derive(new, Debug, Eq, PartialEq)] +pub(crate) struct DynamicCommand { + pub(crate) args: hir::Call, +} + impl InternalCommand { pub(crate) async fn run( self, @@ -100,15 +108,17 @@ impl InternalCommand { ) -> Result { if log_enabled!(log::Level::Trace) { trace!(target: "nu::run::internal", "->"); - trace!(target: "nu::run::internal", "{}", self.command.name()); + trace!(target: "nu::run::internal", "{}", self.name); trace!(target: "nu::run::internal", "{}", self.args.debug(&source)); } let objects: InputStream = trace_stream!(target: "nu::trace_stream::internal", "input" = input.objects); + let command = context.expect_command(&self.name); + let result = context.run_command( - self.command, + command, self.name_tag.clone(), context.source_map.clone(), self.args, @@ -185,6 +195,7 @@ impl InternalCommand { } } +#[derive(Debug, Eq, PartialEq)] pub(crate) struct ExternalCommand { pub(crate) name: String, @@ -192,6 +203,7 @@ pub(crate) struct ExternalCommand { pub(crate) args: Vec>, } +#[derive(Debug)] pub(crate) enum StreamNext { Last, External, @@ -221,6 +233,8 @@ impl ExternalCommand { process = Exec::cmd(&self.name); + trace!(target: "nu::run::external", "command = {:?}", process); + if arg_string.contains("$it") { let mut first = true; @@ -275,6 +289,8 @@ impl ExternalCommand { process = process.cwd(context.shell_manager.path()); + trace!(target: "nu::run::external", "cwd = {:?}", context.shell_manager.path()); + let mut process = match stream_next { StreamNext::Last => process, StreamNext::External | StreamNext::Internal => { @@ -282,11 +298,18 @@ impl ExternalCommand { } }; + trace!(target: "nu::run::external", "set up stdout pipe"); + if let Some(stdin) = stdin { process = process.stdin(stdin); } - let mut popen = process.popen()?; + trace!(target: "nu::run::external", "set up stdin pipe"); + trace!(target: "nu::run::external", "built process {:?}", process); + + let mut popen = process.popen().unwrap(); + + trace!(target: "nu::run::external", "next = {:?}", stream_next); match stream_next { StreamNext::Last => { diff --git a/src/commands/command.rs b/src/commands/command.rs index 95732abac2..7fb08bcefa 100644 --- a/src/commands/command.rs +++ b/src/commands/command.rs @@ -507,6 +507,15 @@ pub enum Command { PerItem(Arc), } +impl std::fmt::Debug for Command { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Command::WholeStream(command) => write!(f, "WholeStream({})", command.name()), + Command::PerItem(command) => write!(f, "PerItem({})", command.name()), + } + } +} + impl Command { pub fn name(&self) -> &str { match self { diff --git a/src/commands/echo.rs b/src/commands/echo.rs index 21188f54f2..5bfc12efb7 100644 --- a/src/commands/echo.rs +++ b/src/commands/echo.rs @@ -54,11 +54,10 @@ fn run( output.push_str(&s); } _ => { - return Err(ShellError::labeled_error( - "Expect a string from pipeline", - "not a string-compatible value", - i.tag(), - )); + return Err(ShellError::type_error( + "a string-compatible value", + i.tagged_type_name(), + )) } } } diff --git a/src/commands/enter.rs b/src/commands/enter.rs index 2d96fe865c..94688acd56 100644 --- a/src/commands/enter.rs +++ b/src/commands/enter.rs @@ -15,7 +15,7 @@ impl PerItemCommand for Enter { } fn signature(&self) -> registry::Signature { - Signature::build("enter").required("location", SyntaxShape::Block) + Signature::build("enter").required("location", SyntaxShape::Path) } fn usage(&self) -> &str { @@ -33,14 +33,14 @@ impl PerItemCommand for Enter { let raw_args = raw_args.clone(); match call_info.args.expect_nth(0)? { Tagged { - item: Value::Primitive(Primitive::String(location)), + item: Value::Primitive(Primitive::Path(location)), .. } => { - let location = location.to_string(); - let location_clone = location.to_string(); + let location_string = location.display().to_string(); + let location_clone = location_string.clone(); if location.starts_with("help") { - let spec = location.split(":").collect::>(); + let spec = location_string.split(":").collect::>(); let (_, command) = (spec[0], spec[1]); diff --git a/src/commands/fetch.rs b/src/commands/fetch.rs index 652ec77eb5..21ef7fbfd9 100644 --- a/src/commands/fetch.rs +++ b/src/commands/fetch.rs @@ -53,7 +53,7 @@ fn run( }; let path_buf = path.as_path()?; let path_str = path_buf.display().to_string(); - let path_span = path.span(); + let path_span = path.tag.span; let has_raw = call_info.args.has("raw"); let registry = registry.clone(); let raw_args = raw_args.clone(); diff --git a/src/commands/first.rs b/src/commands/first.rs index e39b5155d0..71d05be7e1 100644 --- a/src/commands/first.rs +++ b/src/commands/first.rs @@ -16,7 +16,7 @@ impl WholeStreamCommand for First { } fn signature(&self) -> Signature { - Signature::build("first").required("amount", SyntaxShape::Literal) + Signature::build("first").required("amount", SyntaxShape::Int) } fn usage(&self) -> &str { diff --git a/src/commands/get.rs b/src/commands/get.rs index afa550c72c..4b0916c5d1 100644 --- a/src/commands/get.rs +++ b/src/commands/get.rs @@ -1,14 +1,16 @@ use crate::commands::WholeStreamCommand; +use crate::data::meta::tag_for_tagged_list; use crate::data::Value; use crate::errors::ShellError; use crate::prelude::*; +use log::trace; pub struct Get; #[derive(Deserialize)] pub struct GetArgs { - member: Tagged, - rest: Vec>, + member: ColumnPath, + rest: Vec, } impl WholeStreamCommand for Get { @@ -18,8 +20,8 @@ impl WholeStreamCommand for Get { fn signature(&self) -> Signature { Signature::build("get") - .required("member", SyntaxShape::Member) - .rest(SyntaxShape::Member) + .required("member", SyntaxShape::ColumnPath) + .rest(SyntaxShape::ColumnPath) } fn usage(&self) -> &str { @@ -35,39 +37,34 @@ impl WholeStreamCommand for Get { } } -fn get_member(path: &Tagged, obj: &Tagged) -> Result, ShellError> { +pub type ColumnPath = Vec>; + +pub fn get_column_path( + path: &ColumnPath, + obj: &Tagged, +) -> Result, ShellError> { let mut current = Some(obj); - for p in path.split(".") { + for p in path.iter() { if let Some(obj) = current { - current = match obj.get_data_by_key(p) { + current = match obj.get_data_by_key(&p) { Some(v) => Some(v), None => // Before we give up, see if they gave us a path that matches a field name by itself { - match obj.get_data_by_key(&path.item) { - Some(v) => return Ok(v.clone()), - None => { - let possibilities = obj.data_descriptors(); + let possibilities = obj.data_descriptors(); - let mut possible_matches: Vec<_> = possibilities - .iter() - .map(|x| { - (natural::distance::levenshtein_distance(x, &path.item), x) - }) - .collect(); + let mut possible_matches: Vec<_> = possibilities + .iter() + .map(|x| (natural::distance::levenshtein_distance(x, &p), x)) + .collect(); - possible_matches.sort(); + possible_matches.sort(); - if possible_matches.len() > 0 { - return Err(ShellError::labeled_error( - "Unknown column", - format!("did you mean '{}'?", possible_matches[0].1), - path.tag(), - )); - } - None - } - } + return Err(ShellError::labeled_error( + "Unknown column", + format!("did you mean '{}'?", possible_matches[0].1), + tag_for_tagged_list(path.iter().map(|p| p.tag())), + )); } } } @@ -97,6 +94,8 @@ pub fn get( }: GetArgs, RunnableContext { input, .. }: RunnableContext, ) -> Result { + trace!("get {:?} {:?}", member, fields); + let stream = input .values .map(move |item| { @@ -107,10 +106,10 @@ pub fn get( let fields = vec![&member, &fields] .into_iter() .flatten() - .collect::>>(); + .collect::>(); - for field in &fields { - match get_member(field, &item) { + for column_path in &fields { + match get_column_path(column_path, &item) { Ok(Tagged { item: Value::Table(l), .. diff --git a/src/commands/open.rs b/src/commands/open.rs index 254b0bd7b9..97b0df2744 100644 --- a/src/commands/open.rs +++ b/src/commands/open.rs @@ -54,7 +54,7 @@ fn run( }; let path_buf = path.as_path()?; let path_str = path_buf.display().to_string(); - let path_span = path.span(); + let path_span = path.tag.span; let has_raw = call_info.args.has("raw"); let registry = registry.clone(); let raw_args = raw_args.clone(); diff --git a/src/commands/save.rs b/src/commands/save.rs index 47f1a17e95..44e07da5ed 100644 --- a/src/commands/save.rs +++ b/src/commands/save.rs @@ -143,15 +143,16 @@ fn save( } _ => { yield Err(ShellError::labeled_error( - "Save requires a filepath", + "Save requires a filepath (1)", "needs path", name_tag, )); } }, None => { + eprintln!("{:?} {:?}", anchor, source_map); yield Err(ShellError::labeled_error( - "Save requires a filepath", + "Save requires a filepath (2)", "needs path", name_tag, )); @@ -159,7 +160,7 @@ fn save( } } else { yield Err(ShellError::labeled_error( - "Save requires a filepath", + "Save requires a filepath (3)", "needs path", name_tag, )); diff --git a/src/commands/skip_while.rs b/src/commands/skip_while.rs index 041caf300e..a768ae6133 100644 --- a/src/commands/skip_while.rs +++ b/src/commands/skip_while.rs @@ -1,6 +1,7 @@ use crate::commands::WholeStreamCommand; use crate::errors::ShellError; use crate::prelude::*; +use log::trace; pub struct SkipWhile; @@ -38,7 +39,9 @@ pub fn skip_while( RunnableContext { input, .. }: RunnableContext, ) -> Result { let objects = input.values.skip_while(move |item| { + trace!("ITEM = {:?}", item); let result = condition.invoke(&item); + trace!("RESULT = {:?}", result); let return_value = match result { Ok(ref v) if v.is_true() => true, diff --git a/src/commands/tags.rs b/src/commands/tags.rs index 0cef300b0c..2b710d1b61 100644 --- a/src/commands/tags.rs +++ b/src/commands/tags.rs @@ -38,8 +38,8 @@ fn tags(args: CommandArgs, _registry: &CommandRegistry) -> Result Arc { + self.get_command(name).unwrap() + } + pub(crate) fn has(&self, name: &str) -> bool { let registry = self.registry.lock().unwrap(); registry.contains_key(name) } - fn insert(&mut self, name: impl Into, command: Arc) { + pub(crate) fn insert(&mut self, name: impl Into, command: Arc) { let mut registry = self.registry.lock().unwrap(); registry.insert(name.into(), command); } @@ -83,6 +87,14 @@ impl Context { &self.registry } + pub(crate) fn expand_context<'context>( + &'context self, + source: &'context Text, + tag: Tag, + ) -> ExpandContext<'context> { + ExpandContext::new(&self.registry, tag, source, self.shell_manager.homedir()) + } + pub(crate) fn basic() -> Result> { let registry = CommandRegistry::new(); Ok(Context { @@ -109,12 +121,12 @@ impl Context { self.source_map.insert(uuid, anchor_location); } - pub(crate) fn has_command(&self, name: &str) -> bool { - self.registry.has(name) + pub(crate) fn get_command(&self, name: &str) -> Option> { + self.registry.get_command(name) } - pub(crate) fn get_command(&self, name: &str) -> Arc { - self.registry.get_command(name).unwrap() + pub(crate) fn expect_command(&self, name: &str) -> Arc { + self.registry.expect_command(name) } pub(crate) fn run_command<'a>( diff --git a/src/data/base.rs b/src/data/base.rs index 04465181a3..176560137f 100644 --- a/src/data/base.rs +++ b/src/data/base.rs @@ -8,6 +8,7 @@ use crate::Text; use chrono::{DateTime, Utc}; use chrono_humanize::Humanize; use derive_new::new; +use log::trace; use serde::{Deserialize, Serialize}; use std::fmt; use std::path::PathBuf; @@ -217,6 +218,14 @@ impl Block { let mut last = None; + trace!( + "EXPRS = {:?}", + self.expressions + .iter() + .map(|e| format!("{}", e)) + .collect::>() + ); + for expr in self.expressions.iter() { last = Some(evaluate_baseline_expr( &expr, @@ -394,13 +403,34 @@ impl Tagged { pub(crate) fn debug(&self) -> ValueDebug<'_> { ValueDebug { value: self } } + + pub fn as_column_path(&self) -> Result>>, ShellError> { + let mut out: Vec> = vec![]; + + match &self.item { + Value::Table(table) => { + for item in table { + out.push(item.as_string()?.tagged(item.tag)); + } + } + + other => { + return Err(ShellError::type_error( + "column name", + other.type_name().tagged(self.tag), + )) + } + } + + Ok(out.tagged(self.tag)) + } } impl Value { pub(crate) fn type_name(&self) -> String { match self { Value::Primitive(p) => p.type_name(), - Value::Row(_) => format!("object"), + Value::Row(_) => format!("row"), Value::Table(_) => format!("list"), Value::Block(_) => format!("block"), } @@ -443,6 +473,22 @@ impl Value { } } + pub fn get_data_by_column_path( + &self, + tag: Tag, + path: &Vec>, + ) -> Option> { + let mut current = self; + for p in path { + match current.get_data_by_key(p) { + Some(v) => current = v, + None => return None, + } + } + + Some(Tagged::from_item(current, tag)) + } + pub fn get_data_by_path(&self, tag: Tag, path: &str) -> Option> { let mut current = self; for p in path.split(".") { @@ -508,6 +554,58 @@ impl Value { None } + pub fn insert_data_at_column_path( + &self, + tag: Tag, + split_path: &Vec>, + new_value: Value, + ) -> Option> { + let mut new_obj = self.clone(); + + if let Value::Row(ref mut o) = new_obj { + let mut current = o; + + if split_path.len() == 1 { + // Special case for inserting at the top level + current.entries.insert( + split_path[0].item.clone(), + Tagged::from_item(new_value, tag), + ); + return Some(Tagged::from_item(new_obj, tag)); + } + + for idx in 0..split_path.len() { + match current.entries.get_mut(&split_path[idx].item) { + Some(next) => { + if idx == (split_path.len() - 2) { + match &mut next.item { + Value::Row(o) => { + o.entries.insert( + split_path[idx + 1].to_string(), + Tagged::from_item(new_value, tag), + ); + } + _ => {} + } + + return Some(Tagged::from_item(new_obj, tag)); + } else { + match next.item { + Value::Row(ref mut o) => { + current = o; + } + _ => return None, + } + } + } + _ => return None, + } + } + } + + None + } + pub fn replace_data_at_path( &self, tag: Tag, @@ -543,6 +641,39 @@ impl Value { None } + pub fn replace_data_at_column_path( + &self, + tag: Tag, + split_path: &Vec>, + replaced_value: Value, + ) -> Option> { + let mut new_obj = self.clone(); + + if let Value::Row(ref mut o) = new_obj { + let mut current = o; + for idx in 0..split_path.len() { + match current.entries.get_mut(&split_path[idx].item) { + Some(next) => { + if idx == (split_path.len() - 1) { + *next = Tagged::from_item(replaced_value, tag); + return Some(Tagged::from_item(new_obj, tag)); + } else { + match next.item { + Value::Row(ref mut o) => { + current = o; + } + _ => return None, + } + } + } + _ => return None, + } + } + } + + None + } + pub fn get_data(&self, desc: &String) -> MaybeOwned<'_, Value> { match self { p @ Value::Primitive(_) => MaybeOwned::Borrowed(p), diff --git a/src/data/meta.rs b/src/data/meta.rs index 0a56198e6c..b66b009cc2 100644 --- a/src/data/meta.rs +++ b/src/data/meta.rs @@ -1,4 +1,5 @@ use crate::context::{AnchorLocation, SourceMap}; +use crate::parser::parse::parser::TracableContext; use crate::prelude::*; use crate::Text; use derive_new::new; @@ -119,10 +120,7 @@ impl From<&Tag> for Tag { impl From> for Span { fn from(input: nom_locate::LocatedSpanEx<&str, Uuid>) -> Span { - Span { - start: input.offset, - end: input.offset + input.fragment.len(), - } + Span::new(input.offset, input.offset + input.fragment.len()) } } @@ -147,10 +145,7 @@ impl impl From<(usize, usize)> for Span { fn from(input: (usize, usize)) -> Span { - Span { - start: input.0, - end: input.1, - } + Span::new(input.0, input.1) } } @@ -164,7 +159,7 @@ impl From<&std::ops::Range> for Span { } #[derive( - Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters, + Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters, new, )] pub struct Tag { pub anchor: Uuid, @@ -189,11 +184,20 @@ impl From<&Span> for Tag { } } +impl From<(usize, usize, TracableContext)> for Tag { + fn from((start, end, context): (usize, usize, TracableContext)) -> Self { + Tag { + anchor: context.origin, + span: Span::new(start, end), + } + } +} + impl From<(usize, usize, Uuid)> for Tag { fn from((start, end, anchor): (usize, usize, Uuid)) -> Self { Tag { anchor, - span: Span { start, end }, + span: Span::new(start, end), } } } @@ -201,24 +205,17 @@ impl From<(usize, usize, Uuid)> for Tag { impl From<(usize, usize, Option)> for Tag { fn from((start, end, anchor): (usize, usize, Option)) -> Self { Tag { - anchor: if let Some(uuid) = anchor { - uuid - } else { - uuid::Uuid::nil() - }, - span: Span { start, end }, + anchor: anchor.unwrap_or(uuid::Uuid::nil()), + span: Span::new(start, end), } } } -impl From> for Tag { - fn from(input: nom_locate::LocatedSpanEx<&str, Uuid>) -> Tag { +impl From> for Tag { + fn from(input: nom_locate::LocatedSpanEx<&str, TracableContext>) -> Tag { Tag { - anchor: input.extra, - span: Span { - start: input.offset, - end: input.offset + input.fragment.len(), - }, + anchor: input.extra.origin, + span: Span::new(input.offset, input.offset + input.fragment.len()), } } } @@ -265,10 +262,7 @@ impl Tag { ); Tag { - span: Span { - start: self.span.start, - end: other.span.end, - }, + span: Span::new(self.span.start, other.span.end), anchor: self.anchor, } } @@ -276,18 +270,46 @@ impl Tag { pub fn slice<'a>(&self, source: &'a str) -> &'a str { self.span.slice(source) } + + pub fn string<'a>(&self, source: &'a str) -> String { + self.span.slice(source).to_string() + } + + pub fn tagged_slice<'a>(&self, source: &'a str) -> Tagged<&'a str> { + self.span.slice(source).tagged(self) + } + + pub fn tagged_string<'a>(&self, source: &'a str) -> Tagged { + self.span.slice(source).to_string().tagged(self) + } +} + +pub fn tag_for_tagged_list(mut iter: impl Iterator) -> Tag { + let first = iter.next(); + + let first = match first { + None => return Tag::unknown(), + Some(first) => first, + }; + + let last = iter.last(); + + match last { + None => first, + Some(last) => first.until(last), + } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash)] pub struct Span { - pub(crate) start: usize, - pub(crate) end: usize, + start: usize, + end: usize, } impl From> for Span { fn from(input: Option) -> Span { match input { - None => Span { start: 0, end: 0 }, + None => Span::new(0, 0), Some(span) => span, } } @@ -295,7 +317,18 @@ impl From> for Span { impl Span { pub fn unknown() -> Span { - Span { start: 0, end: 0 } + Span::new(0, 0) + } + + pub fn new(start: usize, end: usize) -> Span { + assert!( + end >= start, + "Can't create a Span whose end < start, start={}, end={}", + start, + end + ); + + Span { start, end } } /* @@ -308,6 +341,14 @@ impl Span { } */ + pub fn start(&self) -> usize { + self.start + } + + pub fn end(&self) -> usize { + self.end + } + pub fn is_unknown(&self) -> bool { self.start == 0 && self.end == 0 } @@ -319,17 +360,11 @@ impl Span { impl language_reporting::ReportingSpan for Span { fn with_start(&self, start: usize) -> Self { - Span { - start, - end: self.end, - } + Span::new(start, self.end) } fn with_end(&self, end: usize) -> Self { - Span { - start: self.start, - end, - } + Span::new(self.start, end) } fn start(&self) -> usize { @@ -344,20 +379,14 @@ impl language_reporting::ReportingSpan for Span { impl language_reporting::ReportingSpan for Tag { fn with_start(&self, start: usize) -> Self { Tag { - span: Span { - start, - end: self.span.end, - }, + span: Span::new(start, self.span.end), anchor: self.anchor, } } fn with_end(&self, end: usize) -> Self { Tag { - span: Span { - start: self.span.start, - end, - }, + span: Span::new(self.span.start, end), anchor: self.anchor, } } diff --git a/src/errors.rs b/src/errors.rs index 7e9c14b239..a070f6f54e 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -1,5 +1,6 @@ use crate::prelude::*; +use crate::parser::parse::parser::TracableContext; use ansi_term::Color; use derive_new::new; use language_reporting::{Diagnostic, Label, Severity}; @@ -62,6 +63,14 @@ impl ShellError { .start() } + pub(crate) fn unexpected_eof(expected: impl Into, tag: Tag) -> ShellError { + ProximateShellError::UnexpectedEof { + expected: expected.into(), + tag, + } + .start() + } + pub(crate) fn range_error( expected: impl Into, actual: &Tagged, @@ -82,6 +91,7 @@ impl ShellError { .start() } + #[allow(unused)] pub(crate) fn invalid_command(problem: impl Into) -> ShellError { ProximateShellError::InvalidCommand { command: problem.into(), @@ -133,7 +143,7 @@ impl ShellError { pub(crate) fn parse_error( error: nom::Err<( - nom_locate::LocatedSpanEx<&str, uuid::Uuid>, + nom_locate::LocatedSpanEx<&str, TracableContext>, nom::error::ErrorKind, )>, ) -> ShellError { @@ -235,7 +245,6 @@ impl ShellError { Label::new_primary(tag) .with_message(format!("Expected {}, found {}", expected, actual)), ), - ProximateShellError::TypeError { expected, actual: @@ -246,6 +255,11 @@ impl ShellError { } => Diagnostic::new(Severity::Error, "Type Error") .with_label(Label::new_primary(tag).with_message(expected)), + ProximateShellError::UnexpectedEof { + expected, tag + } => Diagnostic::new(Severity::Error, format!("Unexpected end of input")) + .with_label(Label::new_primary(tag).with_message(format!("Expected {}", expected))), + ProximateShellError::RangeError { kind, operation, @@ -267,10 +281,10 @@ impl ShellError { problem: Tagged { tag, - .. + item }, } => Diagnostic::new(Severity::Error, "Syntax Error") - .with_label(Label::new_primary(tag).with_message("Unexpected external command")), + .with_label(Label::new_primary(tag).with_message(item)), ProximateShellError::MissingProperty { subpath, expr } => { let subpath = subpath.into_label(); @@ -340,6 +354,10 @@ impl ShellError { pub(crate) fn unexpected(title: impl Into) -> ShellError { ShellError::string(&format!("Unexpected: {}", title.into())) } + + pub(crate) fn unreachable(title: impl Into) -> ShellError { + ShellError::string(&format!("BUG: Unreachable: {}", title.into())) + } } #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)] @@ -387,6 +405,10 @@ pub enum ProximateShellError { SyntaxError { problem: Tagged, }, + UnexpectedEof { + expected: String, + tag: Tag, + }, InvalidCommand { command: Tag, }, @@ -473,6 +495,7 @@ impl std::fmt::Display for ShellError { ProximateShellError::MissingValue { .. } => write!(f, "MissingValue"), ProximateShellError::InvalidCommand { .. } => write!(f, "InvalidCommand"), ProximateShellError::TypeError { .. } => write!(f, "TypeError"), + ProximateShellError::UnexpectedEof { .. } => write!(f, "UnexpectedEof"), ProximateShellError::RangeError { .. } => write!(f, "RangeError"), ProximateShellError::SyntaxError { .. } => write!(f, "SyntaxError"), ProximateShellError::MissingProperty { .. } => write!(f, "MissingProperty"), diff --git a/src/evaluate/evaluator.rs b/src/evaluate/evaluator.rs index a111d3964d..248d2a0816 100644 --- a/src/evaluate/evaluator.rs +++ b/src/evaluate/evaluator.rs @@ -7,6 +7,8 @@ use crate::parser::{ use crate::prelude::*; use derive_new::new; use indexmap::IndexMap; +use log::trace; +use std::fmt; #[derive(new)] pub struct Scope { @@ -15,6 +17,15 @@ pub struct Scope { vars: IndexMap>, } +impl fmt::Display for Scope { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_map() + .entry(&"$it", &format!("{:?}", self.it.item)) + .entries(self.vars.iter().map(|(k, v)| (k, &v.item))) + .finish() + } +} + impl Scope { pub(crate) fn empty() -> Scope { Scope { @@ -48,12 +59,15 @@ pub(crate) fn evaluate_baseline_expr( RawExpression::Synthetic(hir::Synthetic::String(s)) => { Ok(Value::string(s).tagged_unknown()) } - RawExpression::Variable(var) => evaluate_reference(var, scope, source), + RawExpression::Variable(var) => evaluate_reference(var, scope, source, expr.tag()), + RawExpression::Command(_) => evaluate_command(expr.tag(), scope, source), RawExpression::ExternalCommand(external) => evaluate_external(external, scope, source), RawExpression::Binary(binary) => { let left = evaluate_baseline_expr(binary.left(), registry, scope, source)?; let right = evaluate_baseline_expr(binary.right(), registry, scope, source)?; + trace!("left={:?} right={:?}", left.item, right.item); + match left.compare(binary.op(), &*right) { Ok(result) => Ok(Value::boolean(result).tagged(expr.tag())), Err((left_type, right_type)) => Err(ShellError::coerce_error( @@ -130,14 +144,16 @@ fn evaluate_reference( name: &hir::Variable, scope: &Scope, source: &Text, + tag: Tag, ) -> Result, ShellError> { + trace!("Evaluating {} with Scope {}", name, scope); match name { - hir::Variable::It(tag) => Ok(scope.it.item.clone().tagged(*tag)), - hir::Variable::Other(tag) => Ok(scope + hir::Variable::It(_) => Ok(scope.it.item.clone().tagged(tag)), + hir::Variable::Other(inner) => Ok(scope .vars - .get(tag.slice(source)) + .get(inner.slice(source)) .map(|v| v.clone()) - .unwrap_or_else(|| Value::nothing().tagged(*tag))), + .unwrap_or_else(|| Value::nothing().tagged(tag))), } } @@ -150,3 +166,7 @@ fn evaluate_external( "Unexpected external command".tagged(*external.name()), )) } + +fn evaluate_command(tag: Tag, _scope: &Scope, _source: &Text) -> Result, ShellError> { + Err(ShellError::syntax_error("Unexpected command".tagged(tag))) +} diff --git a/src/lib.rs b/src/lib.rs index e8e09aacdd..b955f426e9 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -31,7 +31,7 @@ pub use cli::cli; pub use data::base::{Primitive, Value}; pub use data::config::{config_path, APP_INFO}; pub use data::dict::{Dictionary, TaggedDictBuilder}; -pub use data::meta::{Tag, Tagged, TaggedItem}; +pub use data::meta::{Span, Tag, Tagged, TaggedItem}; pub use errors::{CoerceInto, ShellError}; pub use num_traits::cast::ToPrimitive; pub use parser::parse::text::Text; diff --git a/src/parser.rs b/src/parser.rs index 138125769b..5fcfaaa27e 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -7,7 +7,7 @@ pub(crate) mod registry; use crate::errors::ShellError; pub(crate) use deserializer::ConfigDeserializer; -pub(crate) use hir::baseline_parse_tokens::baseline_parse_tokens; +pub(crate) use hir::TokensIterator; pub(crate) use parse::call_node::CallNode; pub(crate) use parse::files::Files; pub(crate) use parse::flag::Flag; @@ -15,10 +15,10 @@ pub(crate) use parse::operator::Operator; pub(crate) use parse::parser::{nom_input, pipeline}; pub(crate) use parse::pipeline::{Pipeline, PipelineElement}; pub(crate) use parse::text::Text; -pub(crate) use parse::token_tree::{DelimitedNode, Delimiter, PathNode, TokenNode}; +pub(crate) use parse::token_tree::{DelimitedNode, Delimiter, TokenNode}; pub(crate) use parse::tokens::{RawToken, Token}; pub(crate) use parse::unit::Unit; -pub(crate) use parse_command::parse_command; +pub(crate) use parse_command::parse_command_tail; pub(crate) use registry::CommandRegistry; pub fn parse(input: &str, anchor: uuid::Uuid) -> Result { diff --git a/src/parser/deserializer.rs b/src/parser/deserializer.rs index f9b9146e50..43409fc4df 100644 --- a/src/parser/deserializer.rs +++ b/src/parser/deserializer.rs @@ -310,9 +310,10 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut ConfigDeserializer<'de> { return Ok(r); } trace!( - "deserializing struct {:?} {:?} (stack={:?})", + "deserializing struct {:?} {:?} (saw_root={} stack={:?})", name, fields, + self.saw_root, self.stack ); @@ -326,6 +327,12 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut ConfigDeserializer<'de> { let type_name = std::any::type_name::(); let tagged_val_name = std::any::type_name::>(); + trace!( + "type_name={} tagged_val_name={}", + type_name, + tagged_val_name + ); + if type_name == tagged_val_name { return visit::, _>(value.val, name, fields, visitor); } diff --git a/src/parser/hir.rs b/src/parser/hir.rs index 96eb7272a6..4fd0a71b3d 100644 --- a/src/parser/hir.rs +++ b/src/parser/hir.rs @@ -1,11 +1,13 @@ pub(crate) mod baseline_parse; -pub(crate) mod baseline_parse_tokens; pub(crate) mod binary; +pub(crate) mod expand_external_tokens; pub(crate) mod external_command; pub(crate) mod named; pub(crate) mod path; +pub(crate) mod syntax_shape; +pub(crate) mod tokens_iterator; -use crate::parser::{registry, Unit}; +use crate::parser::{registry, Operator, Unit}; use crate::prelude::*; use derive_new::new; use getset::Getters; @@ -14,27 +16,18 @@ use std::fmt; use std::path::PathBuf; use crate::evaluate::Scope; +use crate::parser::parse::tokens::RawNumber; +use crate::traits::ToDebug; -pub(crate) use self::baseline_parse::{ - baseline_parse_single_token, baseline_parse_token_as_number, baseline_parse_token_as_path, - baseline_parse_token_as_pattern, baseline_parse_token_as_string, -}; -pub(crate) use self::baseline_parse_tokens::{baseline_parse_next_expr, TokensIterator}; pub(crate) use self::binary::Binary; pub(crate) use self::external_command::ExternalCommand; pub(crate) use self::named::NamedArguments; pub(crate) use self::path::Path; +pub(crate) use self::syntax_shape::ExpandContext; +pub(crate) use self::tokens_iterator::debug::debug_tokens; +pub(crate) use self::tokens_iterator::TokensIterator; -pub use self::baseline_parse_tokens::SyntaxShape; - -pub fn path(head: impl Into, tail: Vec>>) -> Path { - Path::new( - head.into(), - tail.into_iter() - .map(|item| item.map(|string| string.into())) - .collect(), - ) -} +pub use self::syntax_shape::SyntaxShape; #[derive(Debug, Clone, Eq, PartialEq, Getters, Serialize, Deserialize, new)] pub struct Call { @@ -93,6 +86,7 @@ pub enum RawExpression { FilePath(PathBuf), ExternalCommand(ExternalCommand), + Command(Tag), Boolean(bool), } @@ -115,13 +109,14 @@ impl RawExpression { match self { RawExpression::Literal(literal) => literal.type_name(), RawExpression::Synthetic(synthetic) => synthetic.type_name(), - RawExpression::ExternalWord => "externalword", - RawExpression::FilePath(..) => "filepath", + RawExpression::Command(..) => "command", + RawExpression::ExternalWord => "external word", + RawExpression::FilePath(..) => "file path", RawExpression::Variable(..) => "variable", RawExpression::List(..) => "list", RawExpression::Binary(..) => "binary", RawExpression::Block(..) => "block", - RawExpression::Path(..) => "path", + RawExpression::Path(..) => "variable path", RawExpression::Boolean(..) => "boolean", RawExpression::ExternalCommand(..) => "external", } @@ -130,6 +125,39 @@ impl RawExpression { pub type Expression = Tagged; +impl std::fmt::Display for Expression { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let span = self.tag.span; + + match &self.item { + RawExpression::Literal(literal) => write!(f, "{}", literal.tagged(self.tag)), + RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{}", s), + RawExpression::Command(_) => write!(f, "Command{{ {}..{} }}", span.start(), span.end()), + RawExpression::ExternalWord => { + write!(f, "ExternalWord{{ {}..{} }}", span.start(), span.end()) + } + RawExpression::FilePath(file) => write!(f, "Path{{ {} }}", file.display()), + RawExpression::Variable(variable) => write!(f, "{}", variable), + RawExpression::List(list) => f + .debug_list() + .entries(list.iter().map(|e| format!("{}", e))) + .finish(), + RawExpression::Binary(binary) => write!(f, "{}", binary), + RawExpression::Block(items) => { + write!(f, "Block")?; + f.debug_set() + .entries(items.iter().map(|i| format!("{}", i))) + .finish() + } + RawExpression::Path(path) => write!(f, "{}", path), + RawExpression::Boolean(b) => write!(f, "${}", b), + RawExpression::ExternalCommand(..) => { + write!(f, "ExternalComment{{ {}..{} }}", span.start(), span.end()) + } + } + } +} + impl Expression { pub(crate) fn number(i: impl Into, tag: impl Into) -> Expression { RawExpression::Literal(Literal::Number(i.into())).tagged(tag.into()) @@ -151,10 +179,50 @@ impl Expression { RawExpression::Literal(Literal::String(inner.into())).tagged(outer.into()) } + pub(crate) fn path( + head: Expression, + tail: Vec>>, + tag: impl Into, + ) -> Expression { + let tail = tail.into_iter().map(|t| t.map(|s| s.into())).collect(); + RawExpression::Path(Box::new(Path::new(head, tail))).tagged(tag.into()) + } + + pub(crate) fn dot_member(head: Expression, next: Tagged>) -> Expression { + let Tagged { item, tag } = head; + let new_tag = head.tag.until(next.tag); + + match item { + RawExpression::Path(path) => { + let (head, mut tail) = path.parts(); + + tail.push(next.map(|i| i.into())); + Expression::path(head, tail, new_tag) + } + + other => Expression::path(other.tagged(tag), vec![next], new_tag), + } + } + + pub(crate) fn infix( + left: Expression, + op: Tagged>, + right: Expression, + ) -> Expression { + let new_tag = left.tag.until(right.tag); + + RawExpression::Binary(Box::new(Binary::new(left, op.map(|o| o.into()), right))) + .tagged(new_tag) + } + pub(crate) fn file_path(path: impl Into, outer: impl Into) -> Expression { RawExpression::FilePath(path.into()).tagged(outer) } + pub(crate) fn list(list: Vec, tag: impl Into) -> Expression { + RawExpression::List(list).tagged(tag) + } + pub(crate) fn bare(tag: impl Into) -> Expression { RawExpression::Literal(Literal::Bare).tagged(tag) } @@ -182,6 +250,7 @@ impl ToDebug for Expression { RawExpression::Literal(l) => l.tagged(self.tag()).fmt_debug(f, source), RawExpression::FilePath(p) => write!(f, "{}", p.display()), RawExpression::ExternalWord => write!(f, "{}", self.tag().slice(source)), + RawExpression::Command(tag) => write!(f, "{}", tag.slice(source)), RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{:?}", s), RawExpression::Variable(Variable::It(_)) => write!(f, "$it"), RawExpression::Variable(Variable::Other(s)) => write!(f, "${}", s.slice(source)), @@ -232,6 +301,26 @@ pub enum Literal { Bare, } +impl std::fmt::Display for Tagged { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", Tagged::new(self.tag, &self.item)) + } +} + +impl std::fmt::Display for Tagged<&Literal> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let span = self.tag.span; + + match &self.item { + Literal::Number(number) => write!(f, "{}", number), + Literal::Size(number, unit) => write!(f, "{}{}", number, unit.as_str()), + Literal::String(_) => write!(f, "String{{ {}..{} }}", span.start(), span.end()), + Literal::GlobPattern => write!(f, "Glob{{ {}..{} }}", span.start(), span.end()), + Literal::Bare => write!(f, "Bare{{ {}..{} }}", span.start(), span.end()), + } + } +} + impl ToDebug for Tagged<&Literal> { fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { match self.item() { @@ -261,3 +350,12 @@ pub enum Variable { It(Tag), Other(Tag), } + +impl std::fmt::Display for Variable { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Variable::It(_) => write!(f, "$it"), + Variable::Other(tag) => write!(f, "${{ {}..{} }}", tag.span.start(), tag.span.end()), + } + } +} diff --git a/src/parser/hir/baseline_parse.rs b/src/parser/hir/baseline_parse.rs index 267494f27c..87c2771955 100644 --- a/src/parser/hir/baseline_parse.rs +++ b/src/parser/hir/baseline_parse.rs @@ -1,140 +1,2 @@ -use crate::context::Context; -use crate::errors::ShellError; -use crate::parser::{hir, RawToken, Token}; -use crate::TaggedItem; -use crate::Text; -use std::path::PathBuf; - -pub fn baseline_parse_single_token( - token: &Token, - source: &Text, -) -> Result { - Ok(match *token.item() { - RawToken::Number(number) => hir::Expression::number(number.to_number(source), token.tag()), - RawToken::Size(int, unit) => { - hir::Expression::size(int.to_number(source), unit, token.tag()) - } - RawToken::String(tag) => hir::Expression::string(tag, token.tag()), - RawToken::Variable(tag) if tag.slice(source) == "it" => { - hir::Expression::it_variable(tag, token.tag()) - } - RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()), - RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()), - RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())), - RawToken::GlobPattern => hir::Expression::pattern(token.tag()), - RawToken::Bare => hir::Expression::bare(token.tag()), - }) -} - -pub fn baseline_parse_token_as_number( - token: &Token, - source: &Text, -) -> Result { - Ok(match *token.item() { - RawToken::Variable(tag) if tag.slice(source) == "it" => { - hir::Expression::it_variable(tag, token.tag()) - } - RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()), - RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())), - RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()), - RawToken::Number(number) => hir::Expression::number(number.to_number(source), token.tag()), - RawToken::Size(number, unit) => { - hir::Expression::size(number.to_number(source), unit, token.tag()) - } - RawToken::Bare => hir::Expression::bare(token.tag()), - RawToken::GlobPattern => { - return Err(ShellError::type_error( - "Number", - "glob pattern".to_string().tagged(token.tag()), - )) - } - RawToken::String(tag) => hir::Expression::string(tag, token.tag()), - }) -} - -pub fn baseline_parse_token_as_string( - token: &Token, - source: &Text, -) -> Result { - Ok(match *token.item() { - RawToken::Variable(tag) if tag.slice(source) == "it" => { - hir::Expression::it_variable(tag, token.tag()) - } - RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()), - RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())), - RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()), - RawToken::Number(_) => hir::Expression::bare(token.tag()), - RawToken::Size(_, _) => hir::Expression::bare(token.tag()), - RawToken::Bare => hir::Expression::bare(token.tag()), - RawToken::GlobPattern => { - return Err(ShellError::type_error( - "String", - "glob pattern".tagged(token.tag()), - )) - } - RawToken::String(tag) => hir::Expression::string(tag, token.tag()), - }) -} - -pub fn baseline_parse_token_as_path( - token: &Token, - context: &Context, - source: &Text, -) -> Result { - Ok(match *token.item() { - RawToken::Variable(tag) if tag.slice(source) == "it" => { - hir::Expression::it_variable(tag, token.tag()) - } - RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()), - RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())), - RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()), - RawToken::Number(_) => hir::Expression::bare(token.tag()), - RawToken::Size(_, _) => hir::Expression::bare(token.tag()), - RawToken::Bare => { - hir::Expression::file_path(expand_path(token.tag().slice(source), context), token.tag()) - } - RawToken::GlobPattern => { - return Err(ShellError::type_error( - "Path", - "glob pattern".tagged(token.tag()), - )) - } - RawToken::String(tag) => { - hir::Expression::file_path(expand_path(tag.slice(source), context), token.tag()) - } - }) -} - -pub fn baseline_parse_token_as_pattern( - token: &Token, - context: &Context, - source: &Text, -) -> Result { - Ok(match *token.item() { - RawToken::Variable(tag) if tag.slice(source) == "it" => { - hir::Expression::it_variable(tag, token.tag()) - } - RawToken::ExternalCommand(_) => { - return Err(ShellError::syntax_error( - "Invalid external command".to_string().tagged(token.tag()), - )) - } - RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())), - RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()), - RawToken::Number(_) => hir::Expression::bare(token.tag()), - RawToken::Size(_, _) => hir::Expression::bare(token.tag()), - RawToken::GlobPattern => hir::Expression::pattern(token.tag()), - RawToken::Bare => { - hir::Expression::file_path(expand_path(token.tag().slice(source), context), token.tag()) - } - RawToken::String(tag) => { - hir::Expression::file_path(expand_path(tag.slice(source), context), token.tag()) - } - }) -} - -pub fn expand_path(string: &str, context: &Context) -> PathBuf { - let expanded = shellexpand::tilde_with_context(string, || context.shell_manager.homedir()); - - PathBuf::from(expanded.as_ref()) -} +#[cfg(test)] +mod tests; diff --git a/src/parser/hir/baseline_parse/tests.rs b/src/parser/hir/baseline_parse/tests.rs new file mode 100644 index 0000000000..badb177513 --- /dev/null +++ b/src/parser/hir/baseline_parse/tests.rs @@ -0,0 +1,144 @@ +use crate::commands::classified::InternalCommand; +use crate::commands::ClassifiedCommand; +use crate::env::host::BasicHost; +use crate::parser::hir; +use crate::parser::hir::syntax_shape::*; +use crate::parser::hir::TokensIterator; +use crate::parser::parse::token_tree_builder::{CurriedToken, TokenTreeBuilder as b}; +use crate::parser::TokenNode; +use crate::{Span, Tag, Tagged, TaggedItem, Text}; +use pretty_assertions::assert_eq; +use std::fmt::Debug; +use uuid::Uuid; + +#[test] +fn test_parse_string() { + parse_tokens(StringShape, vec![b::string("hello")], |tokens| { + hir::Expression::string(inner_string_tag(tokens[0].tag()), tokens[0].tag()) + }); +} + +#[test] +fn test_parse_path() { + parse_tokens( + VariablePathShape, + vec![b::var("it"), b::op("."), b::bare("cpu")], + |tokens| { + let (outer_var, inner_var) = tokens[0].expect_var(); + let bare = tokens[2].expect_bare(); + hir::Expression::path( + hir::Expression::it_variable(inner_var, outer_var), + vec!["cpu".tagged(bare)], + outer_var.until(bare), + ) + }, + ); + + parse_tokens( + VariablePathShape, + vec![ + b::var("cpu"), + b::op("."), + b::bare("amount"), + b::op("."), + b::string("max ghz"), + ], + |tokens| { + let (outer_var, inner_var) = tokens[0].expect_var(); + let amount = tokens[2].expect_bare(); + let (outer_max_ghz, _) = tokens[4].expect_string(); + + hir::Expression::path( + hir::Expression::variable(inner_var, outer_var), + vec!["amount".tagged(amount), "max ghz".tagged(outer_max_ghz)], + outer_var.until(outer_max_ghz), + ) + }, + ); +} + +#[test] +fn test_parse_command() { + parse_tokens( + ClassifiedCommandShape, + vec![b::bare("ls"), b::sp(), b::pattern("*.txt")], + |tokens| { + let bare = tokens[0].expect_bare(); + let pat = tokens[2].tag(); + + ClassifiedCommand::Internal(InternalCommand::new( + "ls".to_string(), + bare, + hir::Call { + head: Box::new(hir::RawExpression::Command(bare).tagged(bare)), + positional: Some(vec![hir::Expression::pattern(pat)]), + named: None, + }, + )) + // hir::Expression::path( + // hir::Expression::variable(inner_var, outer_var), + // vec!["cpu".tagged(bare)], + // outer_var.until(bare), + // ) + }, + ); + + parse_tokens( + VariablePathShape, + vec![ + b::var("cpu"), + b::op("."), + b::bare("amount"), + b::op("."), + b::string("max ghz"), + ], + |tokens| { + let (outer_var, inner_var) = tokens[0].expect_var(); + let amount = tokens[2].expect_bare(); + let (outer_max_ghz, _) = tokens[4].expect_string(); + + hir::Expression::path( + hir::Expression::variable(inner_var, outer_var), + vec!["amount".tagged(amount), "max ghz".tagged(outer_max_ghz)], + outer_var.until(outer_max_ghz), + ) + }, + ); +} + +fn parse_tokens( + shape: impl ExpandSyntax, + tokens: Vec, + expected: impl FnOnce(Tagged<&[TokenNode]>) -> T, +) { + let tokens = b::token_list(tokens); + let (tokens, source) = b::build(test_origin(), tokens); + + ExpandContext::with_empty(&Text::from(source), |context| { + let tokens = tokens.expect_list(); + let mut iterator = TokensIterator::all(tokens.item, *context.tag()); + + let expr = expand_syntax(&shape, &mut iterator, &context); + + let expr = match expr { + Ok(expr) => expr, + Err(err) => { + crate::cli::print_err(err, &BasicHost, context.source().clone()); + panic!("Parse failed"); + } + }; + + assert_eq!(expr, expected(tokens)); + }) +} + +fn test_origin() -> Uuid { + Uuid::nil() +} + +fn inner_string_tag(tag: Tag) -> Tag { + Tag { + span: Span::new(tag.span.start() + 1, tag.span.end() - 1), + anchor: tag.anchor, + } +} diff --git a/src/parser/hir/baseline_parse_tokens.rs b/src/parser/hir/baseline_parse_tokens.rs deleted file mode 100644 index 8413bd07e1..0000000000 --- a/src/parser/hir/baseline_parse_tokens.rs +++ /dev/null @@ -1,459 +0,0 @@ -use crate::context::Context; -use crate::errors::ShellError; -use crate::parser::{ - hir, - hir::{ - baseline_parse_single_token, baseline_parse_token_as_number, baseline_parse_token_as_path, - baseline_parse_token_as_pattern, baseline_parse_token_as_string, - }, - DelimitedNode, Delimiter, PathNode, RawToken, TokenNode, -}; -use crate::{Tag, Tagged, TaggedItem, Text}; -use derive_new::new; -use log::trace; -use serde::{Deserialize, Serialize}; - -pub fn baseline_parse_tokens( - token_nodes: &mut TokensIterator<'_>, - context: &Context, - source: &Text, - syntax_type: SyntaxShape, -) -> Result, ShellError> { - let mut exprs: Vec = vec![]; - - loop { - if token_nodes.at_end() { - break; - } - - let expr = baseline_parse_next_expr(token_nodes, context, source, syntax_type)?; - exprs.push(expr); - } - - Ok(exprs) -} - -#[derive(Debug, Copy, Clone, Serialize, Deserialize)] -pub enum SyntaxShape { - Any, - List, - Literal, - String, - Member, - Variable, - Number, - Path, - Pattern, - Binary, - Block, - Boolean, -} - -impl std::fmt::Display for SyntaxShape { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - match self { - SyntaxShape::Any => write!(f, "Any"), - SyntaxShape::List => write!(f, "List"), - SyntaxShape::Literal => write!(f, "Literal"), - SyntaxShape::String => write!(f, "String"), - SyntaxShape::Member => write!(f, "Member"), - SyntaxShape::Variable => write!(f, "Variable"), - SyntaxShape::Number => write!(f, "Number"), - SyntaxShape::Path => write!(f, "Path"), - SyntaxShape::Pattern => write!(f, "Pattern"), - SyntaxShape::Binary => write!(f, "Binary"), - SyntaxShape::Block => write!(f, "Block"), - SyntaxShape::Boolean => write!(f, "Boolean"), - } - } -} - -pub fn baseline_parse_next_expr( - tokens: &mut TokensIterator, - context: &Context, - source: &Text, - syntax_type: SyntaxShape, -) -> Result { - let next = tokens - .next() - .ok_or_else(|| ShellError::string("Expected token, found none"))?; - - trace!(target: "nu::parser::parse_one_expr", "syntax_type={:?}, token={:?}", syntax_type, next); - - match (syntax_type, next) { - (SyntaxShape::Path, TokenNode::Token(token)) => { - return baseline_parse_token_as_path(token, context, source) - } - - (SyntaxShape::Path, token) => { - return Err(ShellError::type_error( - "Path", - token.type_name().tagged(token.tag()), - )) - } - - (SyntaxShape::Pattern, TokenNode::Token(token)) => { - return baseline_parse_token_as_pattern(token, context, source) - } - - (SyntaxShape::Pattern, token) => { - return Err(ShellError::type_error( - "Path", - token.type_name().tagged(token.tag()), - )) - } - - (SyntaxShape::String, TokenNode::Token(token)) => { - return baseline_parse_token_as_string(token, source); - } - - (SyntaxShape::String, token) => { - return Err(ShellError::type_error( - "String", - token.type_name().tagged(token.tag()), - )) - } - - (SyntaxShape::Number, TokenNode::Token(token)) => { - return Ok(baseline_parse_token_as_number(token, source)?); - } - - (SyntaxShape::Number, token) => { - return Err(ShellError::type_error( - "Numeric", - token.type_name().tagged(token.tag()), - )) - } - - // TODO: More legit member processing - (SyntaxShape::Member, TokenNode::Token(token)) => { - return baseline_parse_token_as_string(token, source); - } - - (SyntaxShape::Member, token) => { - return Err(ShellError::type_error( - "member", - token.type_name().tagged(token.tag()), - )) - } - - (SyntaxShape::Any, _) => {} - (SyntaxShape::List, _) => {} - (SyntaxShape::Literal, _) => {} - (SyntaxShape::Variable, _) => {} - (SyntaxShape::Binary, _) => {} - (SyntaxShape::Block, _) => {} - (SyntaxShape::Boolean, _) => {} - }; - - let first = baseline_parse_semantic_token(next, context, source)?; - - let possible_op = tokens.peek(); - - let op = match possible_op { - Some(TokenNode::Operator(op)) => op.clone(), - _ => return Ok(first), - }; - - tokens.next(); - - let second = match tokens.next() { - None => { - return Err(ShellError::labeled_error( - "Expected something after an operator", - "operator", - op.tag(), - )) - } - Some(token) => baseline_parse_semantic_token(token, context, source)?, - }; - - // We definitely have a binary expression here -- let's see if we should coerce it into a block - - match syntax_type { - SyntaxShape::Any => { - let tag = first.tag().until(second.tag()); - let binary = hir::Binary::new(first, op, second); - let binary = hir::RawExpression::Binary(Box::new(binary)); - let binary = binary.tagged(tag); - - Ok(binary) - } - - SyntaxShape::Block => { - let tag = first.tag().until(second.tag()); - - let path: Tagged = match first { - Tagged { - item: hir::RawExpression::Literal(hir::Literal::Bare), - tag, - } => { - let string = tag.slice(source).to_string().tagged(tag); - let path = hir::Path::new( - // TODO: Deal with synthetic nodes that have no representation at all in source - hir::RawExpression::Variable(hir::Variable::It(Tag::unknown())) - .tagged(Tag::unknown()), - vec![string], - ); - let path = hir::RawExpression::Path(Box::new(path)); - path.tagged(first.tag()) - } - Tagged { - item: hir::RawExpression::Literal(hir::Literal::String(inner)), - tag, - } => { - let string = inner.slice(source).to_string().tagged(tag); - let path = hir::Path::new( - // TODO: Deal with synthetic nodes that have no representation at all in source - hir::RawExpression::Variable(hir::Variable::It(Tag::unknown())) - .tagged_unknown(), - vec![string], - ); - let path = hir::RawExpression::Path(Box::new(path)); - path.tagged(first.tag()) - } - Tagged { - item: hir::RawExpression::Variable(..), - .. - } => first, - Tagged { tag, item } => { - return Err(ShellError::labeled_error( - "The first part of an un-braced block must be a column name", - item.type_name(), - tag, - )) - } - }; - - let binary = hir::Binary::new(path, op, second); - let binary = hir::RawExpression::Binary(Box::new(binary)); - let binary = binary.tagged(tag); - - let block = hir::RawExpression::Block(vec![binary]); - let block = block.tagged(tag); - - Ok(block) - } - - other => Err(ShellError::unimplemented(format!( - "coerce hint {:?}", - other - ))), - } -} - -pub fn baseline_parse_semantic_token( - token: &TokenNode, - context: &Context, - source: &Text, -) -> Result { - match token { - TokenNode::Token(token) => baseline_parse_single_token(token, source), - TokenNode::Call(_call) => unimplemented!(), - TokenNode::Delimited(delimited) => baseline_parse_delimited(delimited, context, source), - TokenNode::Pipeline(_pipeline) => unimplemented!(), - TokenNode::Operator(op) => Err(ShellError::syntax_error( - "Unexpected operator".tagged(op.tag), - )), - TokenNode::Flag(flag) => Err(ShellError::syntax_error("Unexpected flag".tagged(flag.tag))), - TokenNode::Member(tag) => Err(ShellError::syntax_error( - "BUG: Top-level member".tagged(*tag), - )), - TokenNode::Whitespace(tag) => Err(ShellError::syntax_error( - "BUG: Whitespace found during parse".tagged(*tag), - )), - TokenNode::Error(error) => Err(*error.item.clone()), - TokenNode::Path(path) => baseline_parse_path(path, context, source), - } -} - -pub fn baseline_parse_delimited( - token: &Tagged, - context: &Context, - source: &Text, -) -> Result { - match token.delimiter() { - Delimiter::Brace => { - let children = token.children(); - let exprs = baseline_parse_tokens( - &mut TokensIterator::new(children), - context, - source, - SyntaxShape::Any, - )?; - - let expr = hir::RawExpression::Block(exprs); - Ok(expr.tagged(token.tag())) - } - Delimiter::Paren => unimplemented!(), - Delimiter::Square => { - let children = token.children(); - let exprs = baseline_parse_tokens( - &mut TokensIterator::new(children), - context, - source, - SyntaxShape::Any, - )?; - - let expr = hir::RawExpression::List(exprs); - Ok(expr.tagged(token.tag())) - } - } -} - -pub fn baseline_parse_path( - token: &Tagged, - context: &Context, - source: &Text, -) -> Result { - let head = baseline_parse_semantic_token(token.head(), context, source)?; - - let mut tail = vec![]; - - for part in token.tail() { - let string = match part { - TokenNode::Token(token) => match token.item() { - RawToken::Bare => token.tag().slice(source), - RawToken::String(tag) => tag.slice(source), - RawToken::Number(_) - | RawToken::Size(..) - | RawToken::Variable(_) - | RawToken::ExternalCommand(_) - | RawToken::GlobPattern - | RawToken::ExternalWord => { - return Err(ShellError::type_error( - "String", - token.type_name().tagged(part.tag()), - )) - } - }, - - TokenNode::Member(tag) => tag.slice(source), - - // TODO: Make this impossible - other => { - return Err(ShellError::syntax_error( - format!("{} in path", other.type_name()).tagged(other.tag()), - )) - } - } - .to_string(); - - tail.push(string.tagged(part.tag())); - } - - Ok(hir::path(head, tail).tagged(token.tag()).into()) -} - -#[derive(Debug, new)] -pub struct TokensIterator<'a> { - tokens: &'a [TokenNode], - #[new(default)] - index: usize, - #[new(default)] - seen: indexmap::IndexSet, -} - -impl TokensIterator<'_> { - pub fn remove(&mut self, position: usize) { - self.seen.insert(position); - } - - pub fn len(&self) -> usize { - self.tokens.len() - } - - pub fn at_end(&self) -> bool { - for index in self.index..self.tokens.len() { - if !self.seen.contains(&index) { - return false; - } - } - - true - } - - pub fn advance(&mut self) { - self.seen.insert(self.index); - self.index += 1; - } - - pub fn extract(&mut self, f: impl Fn(&TokenNode) -> Option) -> Option<(usize, T)> { - for (i, item) in self.tokens.iter().enumerate() { - if self.seen.contains(&i) { - continue; - } - - match f(item) { - None => { - continue; - } - Some(value) => { - self.seen.insert(i); - return Some((i, value)); - } - } - } - - None - } - - pub fn move_to(&mut self, pos: usize) { - self.index = pos; - } - - pub fn restart(&mut self) { - self.index = 0; - } - - pub fn clone(&self) -> TokensIterator { - TokensIterator { - tokens: self.tokens, - index: self.index, - seen: self.seen.clone(), - } - } - - pub fn peek(&self) -> Option<&TokenNode> { - let mut tokens = self.clone(); - - tokens.next() - } - - pub fn debug_remaining(&self) -> Vec { - let mut tokens = self.clone(); - tokens.restart(); - tokens.cloned().collect() - } -} - -impl<'a> Iterator for TokensIterator<'a> { - type Item = &'a TokenNode; - - fn next(&mut self) -> Option<&'a TokenNode> { - loop { - if self.index >= self.tokens.len() { - return None; - } - - if self.seen.contains(&self.index) { - self.advance(); - continue; - } - - if self.index >= self.tokens.len() { - return None; - } - - match &self.tokens[self.index] { - TokenNode::Whitespace(_) => { - self.advance(); - } - other => { - self.advance(); - return Some(other); - } - } - } - } -} diff --git a/src/parser/hir/binary.rs b/src/parser/hir/binary.rs index 02a4d416e4..a44c41d63a 100644 --- a/src/parser/hir/binary.rs +++ b/src/parser/hir/binary.rs @@ -16,6 +16,12 @@ pub struct Binary { right: Expression, } +impl fmt::Display for Binary { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "({} {} {})", self.op.as_str(), self.left, self.right) + } +} + impl ToDebug for Binary { fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { write!(f, "{}", self.left.debug(source))?; diff --git a/src/parser/hir/expand_external_tokens.rs b/src/parser/hir/expand_external_tokens.rs new file mode 100644 index 0000000000..30a2a90aaf --- /dev/null +++ b/src/parser/hir/expand_external_tokens.rs @@ -0,0 +1,87 @@ +use crate::errors::ShellError; +use crate::parser::{TokenNode, TokensIterator}; +use crate::{Tag, Tagged, Text}; + +pub fn expand_external_tokens( + token_nodes: &mut TokensIterator<'_>, + source: &Text, +) -> Result>, ShellError> { + let mut out: Vec> = vec![]; + + loop { + if let Some(tag) = expand_next_expression(token_nodes)? { + out.push(tag.tagged_string(source)); + } else { + break; + } + } + + Ok(out) +} + +pub fn expand_next_expression( + token_nodes: &mut TokensIterator<'_>, +) -> Result, ShellError> { + let first = token_nodes.next_non_ws(); + + let first = match first { + None => return Ok(None), + Some(v) => v, + }; + + let first = triage_external_head(first)?; + let mut last = first; + + loop { + let continuation = triage_continuation(token_nodes)?; + + if let Some(continuation) = continuation { + last = continuation; + } else { + break; + } + } + + Ok(Some(first.until(last))) +} + +fn triage_external_head(node: &TokenNode) -> Result { + Ok(match node { + TokenNode::Token(token) => token.tag(), + TokenNode::Call(_call) => unimplemented!(), + TokenNode::Nodes(_nodes) => unimplemented!(), + TokenNode::Delimited(_delimited) => unimplemented!(), + TokenNode::Pipeline(_pipeline) => unimplemented!(), + TokenNode::Flag(flag) => flag.tag(), + TokenNode::Member(member) => *member, + TokenNode::Whitespace(_whitespace) => { + unreachable!("This function should be called after next_non_ws()") + } + TokenNode::Error(_error) => unimplemented!(), + }) +} + +fn triage_continuation<'a, 'b>( + nodes: &'a mut TokensIterator<'b>, +) -> Result, ShellError> { + let mut peeked = nodes.peek_any(); + + let node = match peeked.node { + None => return Ok(None), + Some(node) => node, + }; + + match &node { + node if node.is_whitespace() => return Ok(None), + TokenNode::Token(..) | TokenNode::Flag(..) | TokenNode::Member(..) => {} + TokenNode::Call(..) => unimplemented!("call"), + TokenNode::Nodes(..) => unimplemented!("nodes"), + TokenNode::Delimited(..) => unimplemented!("delimited"), + TokenNode::Pipeline(..) => unimplemented!("pipeline"), + TokenNode::Whitespace(..) => unimplemented!("whitespace"), + TokenNode::Error(..) => unimplemented!("error"), + } + + peeked.commit(); + Ok(Some(node.tag())) +} diff --git a/src/parser/hir/external_command.rs b/src/parser/hir/external_command.rs index 28865330d5..2dd42c1312 100644 --- a/src/parser/hir/external_command.rs +++ b/src/parser/hir/external_command.rs @@ -9,7 +9,7 @@ use std::fmt; )] #[get = "pub(crate)"] pub struct ExternalCommand { - name: Tag, + pub(crate) name: Tag, } impl ToDebug for ExternalCommand { diff --git a/src/parser/hir/path.rs b/src/parser/hir/path.rs index f43edf1762..a1925102fb 100644 --- a/src/parser/hir/path.rs +++ b/src/parser/hir/path.rs @@ -2,19 +2,49 @@ use crate::parser::hir::Expression; use crate::prelude::*; use crate::Tagged; use derive_new::new; -use getset::Getters; +use getset::{Getters, MutGetters}; use serde::{Deserialize, Serialize}; use std::fmt; #[derive( - Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Getters, Serialize, Deserialize, new, + Debug, + Clone, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + Getters, + MutGetters, + Serialize, + Deserialize, + new, )] #[get = "pub(crate)"] pub struct Path { head: Expression, + #[get_mut = "pub(crate)"] tail: Vec>, } +impl fmt::Display for Path { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.head)?; + + for entry in &self.tail { + write!(f, ".{}", entry.item)?; + } + + Ok(()) + } +} + +impl Path { + pub(crate) fn parts(self) -> (Expression, Vec>) { + (self.head, self.tail) + } +} + impl ToDebug for Path { fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { write!(f, "{}", self.head.debug(source))?; diff --git a/src/parser/hir/syntax_shape.rs b/src/parser/hir/syntax_shape.rs new file mode 100644 index 0000000000..5dcbd0fb76 --- /dev/null +++ b/src/parser/hir/syntax_shape.rs @@ -0,0 +1,662 @@ +mod block; +mod expression; + +use crate::cli::external_command; +use crate::commands::{classified::InternalCommand, ClassifiedCommand, Command}; +use crate::parser::hir::syntax_shape::block::AnyBlockShape; +use crate::parser::hir::tokens_iterator::Peeked; +use crate::parser::parse_command::parse_command_tail; +use crate::parser::{ + hir, + hir::{debug_tokens, TokensIterator}, + Operator, RawToken, TokenNode, +}; +use crate::prelude::*; +use derive_new::new; +use getset::Getters; +use log::trace; +use serde::{Deserialize, Serialize}; +use std::path::{Path, PathBuf}; + +pub(crate) use self::expression::file_path::FilePathShape; +pub(crate) use self::expression::list::ExpressionListShape; +pub(crate) use self::expression::number::{IntShape, NumberShape}; +pub(crate) use self::expression::pattern::PatternShape; +pub(crate) use self::expression::string::StringShape; +pub(crate) use self::expression::unit::UnitShape; +pub(crate) use self::expression::variable_path::{ + ColumnPathShape, DotShape, ExpressionContinuation, ExpressionContinuationShape, MemberShape, + PathTailShape, VariablePathShape, +}; +pub(crate) use self::expression::{continue_expression, AnyExpressionShape}; + +#[derive(Debug, Copy, Clone, Serialize, Deserialize)] +pub enum SyntaxShape { + Any, + List, + String, + Member, + ColumnPath, + Number, + Int, + Path, + Pattern, + Binary, + Block, + Boolean, +} + +impl ExpandExpression for SyntaxShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + match self { + SyntaxShape::Any => expand_expr(&AnyExpressionShape, token_nodes, context), + SyntaxShape::List => Err(ShellError::unimplemented("SyntaxShape:List")), + SyntaxShape::Int => expand_expr(&IntShape, token_nodes, context), + SyntaxShape::String => expand_expr(&StringShape, token_nodes, context), + SyntaxShape::Member => { + let syntax = expand_syntax(&MemberShape, token_nodes, context)?; + Ok(syntax.to_expr()) + } + SyntaxShape::ColumnPath => { + let Tagged { item: members, tag } = + expand_syntax(&ColumnPathShape, token_nodes, context)?; + + Ok(hir::Expression::list( + members.into_iter().map(|s| s.to_expr()).collect(), + tag, + )) + } + SyntaxShape::Number => expand_expr(&NumberShape, token_nodes, context), + SyntaxShape::Path => expand_expr(&FilePathShape, token_nodes, context), + SyntaxShape::Pattern => expand_expr(&PatternShape, token_nodes, context), + SyntaxShape::Binary => Err(ShellError::unimplemented("SyntaxShape:Binary")), + SyntaxShape::Block => expand_expr(&AnyBlockShape, token_nodes, context), + SyntaxShape::Boolean => Err(ShellError::unimplemented("SyntaxShape:Boolean")), + } + } +} + +impl std::fmt::Display for SyntaxShape { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + match self { + SyntaxShape::Any => write!(f, "Any"), + SyntaxShape::List => write!(f, "List"), + SyntaxShape::String => write!(f, "String"), + SyntaxShape::Int => write!(f, "Integer"), + SyntaxShape::Member => write!(f, "Member"), + SyntaxShape::ColumnPath => write!(f, "ColumnPath"), + SyntaxShape::Number => write!(f, "Number"), + SyntaxShape::Path => write!(f, "Path"), + SyntaxShape::Pattern => write!(f, "Pattern"), + SyntaxShape::Binary => write!(f, "Binary"), + SyntaxShape::Block => write!(f, "Block"), + SyntaxShape::Boolean => write!(f, "Boolean"), + } + } +} + +#[derive(Getters, new)] +pub struct ExpandContext<'context> { + #[get = "pub(crate)"] + registry: &'context CommandRegistry, + #[get = "pub(crate)"] + tag: Tag, + #[get = "pub(crate)"] + source: &'context Text, + homedir: Option, +} + +impl<'context> ExpandContext<'context> { + pub(crate) fn homedir(&self) -> Option<&Path> { + self.homedir.as_ref().map(|h| h.as_path()) + } + + #[cfg(test)] + pub fn with_empty(source: &Text, callback: impl FnOnce(ExpandContext)) { + let mut registry = CommandRegistry::new(); + registry.insert( + "ls", + crate::commands::whole_stream_command(crate::commands::LS), + ); + + callback(ExpandContext { + registry: ®istry, + tag: Tag::unknown(), + source, + homedir: None, + }) + } +} + +pub trait TestSyntax: std::fmt::Debug + Copy { + fn test<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Option>; +} + +pub trait ExpandExpression: std::fmt::Debug + Copy { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result; +} + +pub(crate) trait ExpandSyntax: std::fmt::Debug + Copy { + type Output: std::fmt::Debug; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result; +} + +pub(crate) fn expand_syntax<'a, 'b, T: ExpandSyntax>( + shape: &T, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, +) -> Result { + trace!(target: "nu::expand_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); + + let result = shape.expand_syntax(token_nodes, context); + + match result { + Err(err) => { + trace!(target: "nu::expand_syntax", "error :: {} :: {:?}", err, debug_tokens(token_nodes, context.source)); + Err(err) + } + + Ok(result) => { + trace!(target: "nu::expand_syntax", "ok :: {:?} :: {:?}", result, debug_tokens(token_nodes, context.source)); + Ok(result) + } + } +} + +pub(crate) fn expand_expr<'a, 'b, T: ExpandExpression>( + shape: &T, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, +) -> Result { + trace!(target: "nu::expand_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); + + let result = shape.expand_syntax(token_nodes, context); + + match result { + Err(err) => { + trace!(target: "nu::expand_syntax", "error :: {} :: {:?}", err, debug_tokens(token_nodes, context.source)); + Err(err) + } + + Ok(result) => { + trace!(target: "nu::expand_syntax", "ok :: {:?} :: {:?}", result, debug_tokens(token_nodes, context.source)); + Ok(result) + } + } +} + +impl ExpandSyntax for T { + type Output = hir::Expression; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + ExpandExpression::expand_expr(self, token_nodes, context) + } +} + +pub trait SkipSyntax: std::fmt::Debug + Copy { + fn skip<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError>; +} + +enum BarePathState { + Initial, + Seen(Tag, Tag), + Error(ShellError), +} + +impl BarePathState { + pub fn seen(self, tag: Tag) -> BarePathState { + match self { + BarePathState::Initial => BarePathState::Seen(tag, tag), + BarePathState::Seen(start, _) => BarePathState::Seen(start, tag), + BarePathState::Error(err) => BarePathState::Error(err), + } + } + + pub fn end(self, peeked: Peeked, reason: impl Into) -> BarePathState { + match self { + BarePathState::Initial => BarePathState::Error(peeked.type_error(reason)), + BarePathState::Seen(start, end) => BarePathState::Seen(start, end), + BarePathState::Error(err) => BarePathState::Error(err), + } + } + + pub fn into_bare(self) -> Result { + match self { + BarePathState::Initial => unreachable!("into_bare in initial state"), + BarePathState::Seen(start, end) => Ok(start.until(end)), + BarePathState::Error(err) => Err(err), + } + } +} + +pub fn expand_bare<'a, 'b>( + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + predicate: impl Fn(&TokenNode) -> bool, +) -> Result { + let mut state = BarePathState::Initial; + + loop { + // Whitespace ends a word + let mut peeked = token_nodes.peek_any(); + + match peeked.node { + None => { + state = state.end(peeked, "word"); + break; + } + Some(node) => { + if predicate(node) { + state = state.seen(node.tag()); + peeked.commit(); + } else { + state = state.end(peeked, "word"); + break; + } + } + } + } + + state.into_bare() +} + +#[derive(Debug, Copy, Clone)] +pub struct BarePathShape; + +impl ExpandSyntax for BarePathShape { + type Output = Tag; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + expand_bare(token_nodes, context, |token| match token { + TokenNode::Token(Tagged { + item: RawToken::Bare, + .. + }) + | TokenNode::Token(Tagged { + item: RawToken::Operator(Operator::Dot), + .. + }) => true, + + _ => false, + }) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct BareShape; + +impl ExpandSyntax for BareShape { + type Output = Tagged; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + let peeked = token_nodes.peek_any().not_eof("word")?; + + match peeked.node { + TokenNode::Token(Tagged { + item: RawToken::Bare, + tag, + }) => { + peeked.commit(); + Ok(tag.tagged_string(context.source)) + } + + other => Err(ShellError::type_error("word", other.tagged_type_name())), + } + } +} + +impl TestSyntax for BareShape { + fn test<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Option> { + let peeked = token_nodes.peek_any(); + + match peeked.node { + Some(TokenNode::Token(token)) => match token.item { + RawToken::Bare => Some(peeked), + _ => None, + }, + + _ => None, + } + } +} + +#[derive(Debug)] +pub enum CommandSignature { + Internal(Tagged>), + LiteralExternal { outer: Tag, inner: Tag }, + External(Tag), + Expression(hir::Expression), +} + +impl CommandSignature { + pub fn to_expression(&self) -> hir::Expression { + match self { + CommandSignature::Internal(command) => { + let tag = command.tag; + hir::RawExpression::Command(tag).tagged(tag) + } + CommandSignature::LiteralExternal { outer, inner } => { + hir::RawExpression::ExternalCommand(hir::ExternalCommand::new(*inner)).tagged(outer) + } + CommandSignature::External(tag) => { + hir::RawExpression::ExternalCommand(hir::ExternalCommand::new(*tag)).tagged(tag) + } + CommandSignature::Expression(expr) => expr.clone(), + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct CommandHeadShape; + +impl ExpandSyntax for CommandHeadShape { + type Output = CommandSignature; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + let node = + parse_single_node_skipping_ws(token_nodes, "command head1", |token, token_tag| { + Ok(match token { + RawToken::ExternalCommand(tag) => CommandSignature::LiteralExternal { + outer: token_tag, + inner: tag, + }, + RawToken::Bare => { + let name = token_tag.slice(context.source); + if context.registry.has(name) { + let command = context.registry.expect_command(name); + CommandSignature::Internal(command.tagged(token_tag)) + } else { + CommandSignature::External(token_tag) + } + } + _ => { + return Err(ShellError::type_error( + "command head2", + token.type_name().tagged(token_tag), + )) + } + }) + }); + + match node { + Ok(expr) => return Ok(expr), + Err(_) => match expand_expr(&AnyExpressionShape, token_nodes, context) { + Ok(expr) => return Ok(CommandSignature::Expression(expr)), + Err(_) => Err(token_nodes.peek_non_ws().type_error("command head3")), + }, + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct ClassifiedCommandShape; + +impl ExpandSyntax for ClassifiedCommandShape { + type Output = ClassifiedCommand; + + fn expand_syntax<'a, 'b>( + &self, + iterator: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + let head = expand_syntax(&CommandHeadShape, iterator, context)?; + + match &head { + CommandSignature::Expression(expr) => Err(ShellError::syntax_error( + "Unexpected expression in command position".tagged(expr.tag), + )), + + // If the command starts with `^`, treat it as an external command no matter what + CommandSignature::External(name) => { + let name_str = name.slice(&context.source); + + external_command(iterator, &context.source, name_str.tagged(name)) + } + + CommandSignature::LiteralExternal { outer, inner } => { + let name_str = inner.slice(&context.source); + + external_command(iterator, &context.source, name_str.tagged(outer)) + } + + CommandSignature::Internal(command) => { + let tail = + parse_command_tail(&command.signature(), &context, iterator, command.tag)?; + + let (positional, named) = match tail { + None => (None, None), + Some((positional, named)) => (positional, named), + }; + + let call = hir::Call { + head: Box::new(head.to_expression()), + positional, + named, + }; + + Ok(ClassifiedCommand::Internal(InternalCommand::new( + command.item.name().to_string(), + command.tag, + call, + ))) + } + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct InternalCommandHeadShape; + +impl ExpandExpression for InternalCommandHeadShape { + fn expand_expr( + &self, + token_nodes: &mut TokensIterator<'_>, + _context: &ExpandContext, + ) -> Result { + let peeked_head = token_nodes.peek_non_ws().not_eof("command head4")?; + + let expr = match peeked_head.node { + TokenNode::Token( + spanned @ Tagged { + item: RawToken::Bare, + .. + }, + ) => spanned.map(|_| hir::RawExpression::Literal(hir::Literal::Bare)), + + TokenNode::Token(Tagged { + item: RawToken::String(inner_tag), + tag, + }) => hir::RawExpression::Literal(hir::Literal::String(*inner_tag)).tagged(*tag), + + node => { + return Err(ShellError::type_error( + "command head5", + node.tagged_type_name(), + )) + } + }; + + peeked_head.commit(); + + Ok(expr) + } +} + +fn parse_single_node<'a, 'b, T>( + token_nodes: &'b mut TokensIterator<'a>, + expected: &'static str, + callback: impl FnOnce(RawToken, Tag) -> Result, +) -> Result { + let peeked = token_nodes.peek_any().not_eof(expected)?; + + let expr = match peeked.node { + TokenNode::Token(token) => callback(token.item, token.tag())?, + + other => return Err(ShellError::type_error(expected, other.tagged_type_name())), + }; + + peeked.commit(); + + Ok(expr) +} + +fn parse_single_node_skipping_ws<'a, 'b, T>( + token_nodes: &'b mut TokensIterator<'a>, + expected: &'static str, + callback: impl FnOnce(RawToken, Tag) -> Result, +) -> Result { + let peeked = token_nodes.peek_non_ws().not_eof(expected)?; + + let expr = match peeked.node { + TokenNode::Token(token) => callback(token.item, token.tag())?, + + other => return Err(ShellError::type_error(expected, other.tagged_type_name())), + }; + + peeked.commit(); + + Ok(expr) +} + +#[derive(Debug, Copy, Clone)] +pub struct WhitespaceShape; + +impl ExpandSyntax for WhitespaceShape { + type Output = Tag; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result { + let peeked = token_nodes.peek_any().not_eof("whitespace")?; + + let tag = match peeked.node { + TokenNode::Whitespace(tag) => *tag, + + other => { + return Err(ShellError::type_error( + "whitespace", + other.tagged_type_name(), + )) + } + }; + + peeked.commit(); + + Ok(tag) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct SpacedExpression { + inner: T, +} + +impl ExpandExpression for SpacedExpression { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + // TODO: Make the name part of the trait + let peeked = token_nodes.peek_any().not_eof("whitespace")?; + + match peeked.node { + TokenNode::Whitespace(_) => { + peeked.commit(); + expand_expr(&self.inner, token_nodes, context) + } + + other => Err(ShellError::type_error( + "whitespace", + other.tagged_type_name(), + )), + } + } +} + +pub fn maybe_spaced(inner: T) -> MaybeSpacedExpression { + MaybeSpacedExpression { inner } +} + +#[derive(Debug, Copy, Clone)] +pub struct MaybeSpacedExpression { + inner: T, +} + +impl ExpandExpression for MaybeSpacedExpression { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + // TODO: Make the name part of the trait + let peeked = token_nodes.peek_any().not_eof("whitespace")?; + + match peeked.node { + TokenNode::Whitespace(_) => { + peeked.commit(); + expand_expr(&self.inner, token_nodes, context) + } + + _ => { + peeked.rollback(); + expand_expr(&self.inner, token_nodes, context) + } + } + } +} + +pub fn spaced(inner: T) -> SpacedExpression { + SpacedExpression { inner } +} + +fn expand_variable(tag: Tag, token_tag: Tag, source: &Text) -> hir::Expression { + if tag.slice(source) == "it" { + hir::Expression::it_variable(tag, token_tag) + } else { + hir::Expression::variable(tag, token_tag) + } +} diff --git a/src/parser/hir/syntax_shape/block.rs b/src/parser/hir/syntax_shape/block.rs new file mode 100644 index 0000000000..a78292b34e --- /dev/null +++ b/src/parser/hir/syntax_shape/block.rs @@ -0,0 +1,168 @@ +use crate::errors::ShellError; +use crate::parser::{ + hir, + hir::syntax_shape::{ + continue_expression, expand_expr, expand_syntax, ExpandContext, ExpandExpression, + ExpressionListShape, PathTailShape, VariablePathShape, + }, + hir::tokens_iterator::TokensIterator, + RawToken, TokenNode, +}; +use crate::{Tag, Tagged, TaggedItem}; + +#[derive(Debug, Copy, Clone)] +pub struct AnyBlockShape; + +impl ExpandExpression for AnyBlockShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + let block = token_nodes.peek_non_ws().not_eof("block")?; + + // is it just a block? + let block = block.node.as_block(); + + match block { + Some(block) => { + let mut iterator = TokensIterator::new(&block.item, context.tag, false); + + let exprs = expand_syntax(&ExpressionListShape, &mut iterator, context)?; + + return Ok(hir::RawExpression::Block(exprs).tagged(block.tag)); + } + _ => {} + } + + expand_syntax(&ShorthandBlock, token_nodes, context) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct ShorthandBlock; + +impl ExpandExpression for ShorthandBlock { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + let path = expand_expr(&ShorthandPath, token_nodes, context)?; + let start = path.tag; + let expr = continue_expression(path, token_nodes, context)?; + let end = expr.tag; + let block = hir::RawExpression::Block(vec![expr]).tagged(start.until(end)); + + Ok(block) + } +} + +/// A shorthand for `$it.foo."bar"`, used inside of a shorthand block +#[derive(Debug, Copy, Clone)] +pub struct ShorthandPath; + +impl ExpandExpression for ShorthandPath { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + // if it's a variable path, that's the head part + let path = expand_expr(&VariablePathShape, token_nodes, context); + + match path { + Ok(path) => return Ok(path), + Err(_) => {} + } + + // Synthesize the head of the shorthand path (`` -> `$it.`) + let mut head = expand_expr(&ShorthandHeadShape, token_nodes, context)?; + + // Now that we've synthesized the head, of the path, proceed to expand the tail of the path + // like any other path. + let tail = expand_syntax(&PathTailShape, token_nodes, context); + + match tail { + Err(_) => return Ok(head), + Ok((tail, _)) => { + // For each member that `PathTailShape` expanded, join it onto the existing expression + // to form a new path + for member in tail { + head = hir::Expression::dot_member(head, member); + } + + println!("{:?}", head); + + Ok(head) + } + } + } +} + +/// A shorthand for `$it.foo."bar"`, used inside of a shorthand block +#[derive(Debug, Copy, Clone)] +pub struct ShorthandHeadShape; + +impl ExpandExpression for ShorthandHeadShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + // A shorthand path must not be at EOF + let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?; + + match peeked.node { + // If the head of a shorthand path is a bare token, it expands to `$it.bare` + TokenNode::Token(Tagged { + item: RawToken::Bare, + tag, + }) => { + // Commit the peeked token + peeked.commit(); + + // Synthesize an `$it` expression + let it = synthetic_it(token_nodes.anchor()); + + // Make a path out of `$it` and the bare token as a member + Ok(hir::Expression::path( + it, + vec![tag.tagged_string(context.source)], + tag, + )) + } + + // If the head of a shorthand path is a string, it expands to `$it."some string"` + TokenNode::Token(Tagged { + item: RawToken::String(inner), + tag: outer, + }) => { + // Commit the peeked token + peeked.commit(); + + // Synthesize an `$it` expression + let it = synthetic_it(token_nodes.anchor()); + + // Make a path out of `$it` and the bare token as a member + Ok(hir::Expression::path( + it, + vec![inner.string(context.source).tagged(outer)], + outer, + )) + } + + // Any other token is not a valid bare head + other => { + return Err(ShellError::type_error( + "shorthand path", + other.tagged_type_name(), + )) + } + } + } +} + +fn synthetic_it(origin: uuid::Uuid) -> hir::Expression { + hir::Expression::it_variable(Tag::unknown_span(origin), Tag::unknown_span(origin)) +} diff --git a/src/parser/hir/syntax_shape/expression.rs b/src/parser/hir/syntax_shape/expression.rs new file mode 100644 index 0000000000..58cfa4a1a5 --- /dev/null +++ b/src/parser/hir/syntax_shape/expression.rs @@ -0,0 +1,188 @@ +pub(crate) mod delimited; +pub(crate) mod file_path; +pub(crate) mod list; +pub(crate) mod number; +pub(crate) mod pattern; +pub(crate) mod string; +pub(crate) mod unit; +pub(crate) mod variable_path; + +use crate::parser::hir::syntax_shape::{ + expand_expr, expand_syntax, expand_variable, expression::delimited::expand_delimited_expr, + BareShape, DotShape, ExpandContext, ExpandExpression, ExpandSyntax, ExpressionContinuation, + ExpressionContinuationShape, UnitShape, +}; +use crate::parser::{ + hir, + hir::{Expression, Operator, TokensIterator}, + RawToken, Token, TokenNode, +}; +use crate::prelude::*; +use std::path::PathBuf; + +#[derive(Debug, Copy, Clone)] +pub struct AnyExpressionShape; + +impl ExpandExpression for AnyExpressionShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + // Look for an expression at the cursor + let head = expand_expr(&AnyExpressionStartShape, token_nodes, context)?; + + continue_expression(head, token_nodes, context) + } +} + +pub(crate) fn continue_expression( + mut head: hir::Expression, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, +) -> Result { + loop { + // Check to see whether there's any continuation after the head expression + let continuation = expand_syntax(&ExpressionContinuationShape, token_nodes, context); + + match continuation { + // If there's no continuation, return the head + Err(_) => return Ok(head), + // Otherwise, form a new expression by combining the head with the continuation + Ok(continuation) => match continuation { + // If the continuation is a `.member`, form a path with the new member + ExpressionContinuation::DotSuffix(_dot, member) => { + head = Expression::dot_member(head, member); + } + + // Otherwise, if the continuation is an infix suffix, form an infix expression + ExpressionContinuation::InfixSuffix(op, expr) => { + head = Expression::infix(head, op, expr); + } + }, + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct AnyExpressionStartShape; + +impl ExpandExpression for AnyExpressionStartShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + let size = expand_expr(&UnitShape, token_nodes, context); + + match size { + Ok(expr) => return Ok(expr), + Err(_) => {} + } + + let peek_next = token_nodes.peek_any().not_eof("expression")?; + + let head = match peek_next.node { + TokenNode::Token(token) => match token.item { + RawToken::Bare | RawToken::Operator(Operator::Dot) => { + let start = token.tag; + peek_next.commit(); + + let end = expand_syntax(&BareTailShape, token_nodes, context)?; + + match end { + Some(end) => return Ok(hir::Expression::bare(start.until(end))), + None => return Ok(hir::Expression::bare(start)), + } + } + _ => { + peek_next.commit(); + expand_one_context_free_token(*token, context) + } + }, + node @ TokenNode::Call(_) + | node @ TokenNode::Nodes(_) + | node @ TokenNode::Pipeline(_) + | node @ TokenNode::Flag(_) + | node @ TokenNode::Member(_) + | node @ TokenNode::Whitespace(_) => { + return Err(ShellError::type_error( + "expression", + node.tagged_type_name(), + )) + } + TokenNode::Delimited(delimited) => { + peek_next.commit(); + expand_delimited_expr(delimited, context) + } + + TokenNode::Error(error) => return Err(*error.item.clone()), + }?; + + Ok(head) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct BareTailShape; + +impl ExpandSyntax for BareTailShape { + type Output = Option; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result, ShellError> { + let mut end: Option = None; + + loop { + match expand_syntax(&BareShape, token_nodes, context) { + Ok(bare) => { + end = Some(bare.tag); + continue; + } + + Err(_) => match expand_syntax(&DotShape, token_nodes, context) { + Ok(dot) => { + end = Some(dot); + continue; + } + + Err(_) => break, + }, + } + } + + Ok(end) + } +} + +fn expand_one_context_free_token<'a, 'b>( + token: Token, + context: &ExpandContext, +) -> Result { + Ok(match token.item { + RawToken::Number(number) => { + hir::Expression::number(number.to_number(context.source), token.tag) + } + RawToken::Operator(..) => { + return Err(ShellError::syntax_error( + "unexpected operator, expected an expression".tagged(token.tag), + )) + } + RawToken::Size(..) => unimplemented!("size"), + RawToken::String(tag) => hir::Expression::string(tag, token.tag), + RawToken::Variable(tag) => expand_variable(tag, token.tag, &context.source), + RawToken::ExternalCommand(_) => unimplemented!(), + RawToken::ExternalWord => unimplemented!(), + RawToken::GlobPattern => hir::Expression::pattern(token.tag), + RawToken::Bare => hir::Expression::string(token.tag, token.tag), + }) +} + +pub fn expand_file_path(string: &str, context: &ExpandContext) -> PathBuf { + let expanded = shellexpand::tilde_with_context(string, || context.homedir()); + + PathBuf::from(expanded.as_ref()) +} diff --git a/src/parser/hir/syntax_shape/expression/delimited.rs b/src/parser/hir/syntax_shape/expression/delimited.rs new file mode 100644 index 0000000000..0a01b0fc26 --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/delimited.rs @@ -0,0 +1,38 @@ +use crate::parser::hir::syntax_shape::{expand_syntax, ExpandContext, ExpressionListShape}; +use crate::parser::{hir, hir::TokensIterator}; +use crate::parser::{DelimitedNode, Delimiter}; +use crate::prelude::*; + +pub fn expand_delimited_expr( + delimited: &Tagged, + context: &ExpandContext, +) -> Result { + match &delimited.item { + DelimitedNode { + delimiter: Delimiter::Square, + children, + } => { + let mut tokens = TokensIterator::new(&children, delimited.tag, false); + + let list = expand_syntax(&ExpressionListShape, &mut tokens, context); + + Ok(hir::Expression::list(list?, delimited.tag)) + } + + DelimitedNode { + delimiter: Delimiter::Paren, + .. + } => Err(ShellError::type_error( + "expression", + "unimplemented call expression".tagged(delimited.tag), + )), + + DelimitedNode { + delimiter: Delimiter::Brace, + .. + } => Err(ShellError::type_error( + "expression", + "unimplemented block expression".tagged(delimited.tag), + )), + } +} diff --git a/src/parser/hir/syntax_shape/expression/file_path.rs b/src/parser/hir/syntax_shape/expression/file_path.rs new file mode 100644 index 0000000000..c0e5c7c2ab --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/file_path.rs @@ -0,0 +1,59 @@ +use crate::parser::hir::syntax_shape::{ + expand_syntax, expression::expand_file_path, parse_single_node, BarePathShape, ExpandContext, + ExpandExpression, +}; +use crate::parser::{hir, hir::TokensIterator, RawToken}; +use crate::prelude::*; + +#[derive(Debug, Copy, Clone)] +pub struct FilePathShape; + +impl ExpandExpression for FilePathShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + let bare = expand_syntax(&BarePathShape, token_nodes, context); + + match bare { + Ok(tag) => { + let string = tag.slice(context.source); + let path = expand_file_path(string, context); + return Ok(hir::Expression::file_path(path, tag)); + } + Err(_) => {} + } + + parse_single_node(token_nodes, "Path", |token, token_tag| { + Ok(match token { + RawToken::GlobPattern => { + return Err(ShellError::type_error( + "Path", + "glob pattern".tagged(token_tag), + )) + } + RawToken::Operator(..) => { + return Err(ShellError::type_error("Path", "operator".tagged(token_tag))) + } + RawToken::Variable(tag) if tag.slice(context.source) == "it" => { + hir::Expression::it_variable(tag, token_tag) + } + RawToken::Variable(tag) => hir::Expression::variable(tag, token_tag), + RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag), + RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)), + RawToken::Number(_) => hir::Expression::bare(token_tag), + RawToken::Size(_, _) => hir::Expression::bare(token_tag), + RawToken::Bare => hir::Expression::file_path( + expand_file_path(token_tag.slice(context.source), context), + token_tag, + ), + + RawToken::String(tag) => hir::Expression::file_path( + expand_file_path(tag.slice(context.source), context), + token_tag, + ), + }) + }) + } +} diff --git a/src/parser/hir/syntax_shape/expression/list.rs b/src/parser/hir/syntax_shape/expression/list.rs new file mode 100644 index 0000000000..9d28f44141 --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/list.rs @@ -0,0 +1,43 @@ +use crate::errors::ShellError; +use crate::parser::{ + hir, + hir::syntax_shape::{ + expand_expr, maybe_spaced, spaced, AnyExpressionShape, ExpandContext, ExpandSyntax, + }, + hir::{debug_tokens, TokensIterator}, +}; + +#[derive(Debug, Copy, Clone)] +pub struct ExpressionListShape; + +impl ExpandSyntax for ExpressionListShape { + type Output = Vec; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result, ShellError> { + let mut exprs = vec![]; + + if token_nodes.at_end_possible_ws() { + return Ok(exprs); + } + + let expr = expand_expr(&maybe_spaced(AnyExpressionShape), token_nodes, context)?; + + exprs.push(expr); + + println!("{:?}", debug_tokens(token_nodes, context.source)); + + loop { + if token_nodes.at_end_possible_ws() { + return Ok(exprs); + } + + let expr = expand_expr(&spaced(AnyExpressionShape), token_nodes, context)?; + + exprs.push(expr); + } + } +} diff --git a/src/parser/hir/syntax_shape/expression/number.rs b/src/parser/hir/syntax_shape/expression/number.rs new file mode 100644 index 0000000000..5b77044a2d --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/number.rs @@ -0,0 +1,97 @@ +use crate::parser::hir::syntax_shape::{parse_single_node, ExpandContext, ExpandExpression}; +use crate::parser::{ + hir, + hir::{RawNumber, TokensIterator}, + RawToken, +}; +use crate::prelude::*; + +#[derive(Debug, Copy, Clone)] +pub struct NumberShape; + +impl ExpandExpression for NumberShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + parse_single_node(token_nodes, "Number", |token, token_tag| { + Ok(match token { + RawToken::GlobPattern => { + return Err(ShellError::type_error( + "Number", + "glob pattern".to_string().tagged(token_tag), + )) + } + RawToken::Operator(..) => { + return Err(ShellError::type_error( + "Number", + "operator".to_string().tagged(token_tag), + )) + } + RawToken::Variable(tag) if tag.slice(context.source) == "it" => { + hir::Expression::it_variable(tag, token_tag) + } + RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag), + RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)), + RawToken::Variable(tag) => hir::Expression::variable(tag, token_tag), + RawToken::Number(number) => { + hir::Expression::number(number.to_number(context.source), token_tag) + } + RawToken::Size(number, unit) => { + hir::Expression::size(number.to_number(context.source), unit, token_tag) + } + RawToken::Bare => hir::Expression::bare(token_tag), + RawToken::String(tag) => hir::Expression::string(tag, token_tag), + }) + }) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct IntShape; + +impl ExpandExpression for IntShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + parse_single_node(token_nodes, "Integer", |token, token_tag| { + Ok(match token { + RawToken::GlobPattern => { + return Err(ShellError::type_error( + "Integer", + "glob pattern".to_string().tagged(token_tag), + )) + } + RawToken::Operator(..) => { + return Err(ShellError::type_error( + "Integer", + "operator".to_string().tagged(token_tag), + )) + } + RawToken::Variable(tag) if tag.slice(context.source) == "it" => { + hir::Expression::it_variable(tag, token_tag) + } + RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag), + RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)), + RawToken::Variable(tag) => hir::Expression::variable(tag, token_tag), + RawToken::Number(number @ RawNumber::Int(_)) => { + hir::Expression::number(number.to_number(context.source), token_tag) + } + token @ RawToken::Number(_) => { + return Err(ShellError::type_error( + "Integer", + token.type_name().tagged(token_tag), + )); + } + RawToken::Size(number, unit) => { + hir::Expression::size(number.to_number(context.source), unit, token_tag) + } + RawToken::Bare => hir::Expression::bare(token_tag), + RawToken::String(tag) => hir::Expression::string(tag, token_tag), + }) + }) + } +} diff --git a/src/parser/hir/syntax_shape/expression/pattern.rs b/src/parser/hir/syntax_shape/expression/pattern.rs new file mode 100644 index 0000000000..4105b79b4f --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/pattern.rs @@ -0,0 +1,86 @@ +use crate::parser::hir::syntax_shape::{ + expand_bare, expand_syntax, expression::expand_file_path, parse_single_node, ExpandContext, + ExpandExpression, ExpandSyntax, +}; +use crate::parser::{hir, hir::TokensIterator, Operator, RawToken, TokenNode}; +use crate::prelude::*; + +#[derive(Debug, Copy, Clone)] +pub struct PatternShape; + +impl ExpandExpression for PatternShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + let pattern = expand_syntax(&BarePatternShape, token_nodes, context); + + match pattern { + Ok(tag) => { + return Ok(hir::Expression::pattern(tag)); + } + Err(_) => {} + } + + parse_single_node(token_nodes, "Pattern", |token, token_tag| { + Ok(match token { + RawToken::GlobPattern => { + return Err(ShellError::unreachable( + "glob pattern after glob already returned", + )) + } + RawToken::Operator(..) => { + return Err(ShellError::unreachable("dot after glob already returned")) + } + RawToken::Bare => { + return Err(ShellError::unreachable("bare after glob already returned")) + } + + RawToken::Variable(tag) if tag.slice(context.source) == "it" => { + hir::Expression::it_variable(tag, token_tag) + } + RawToken::Variable(tag) => hir::Expression::variable(tag, token_tag), + RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag), + RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)), + RawToken::Number(_) => hir::Expression::bare(token_tag), + RawToken::Size(_, _) => hir::Expression::bare(token_tag), + + RawToken::String(tag) => hir::Expression::file_path( + expand_file_path(tag.slice(context.source), context), + token_tag, + ), + }) + }) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct BarePatternShape; + +impl ExpandSyntax for BarePatternShape { + type Output = Tag; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + expand_bare(token_nodes, context, |token| match token { + TokenNode::Token(Tagged { + item: RawToken::Bare, + .. + }) + | TokenNode::Token(Tagged { + item: RawToken::Operator(Operator::Dot), + .. + }) + | TokenNode::Token(Tagged { + item: RawToken::GlobPattern, + .. + }) => true, + + _ => false, + }) + } +} diff --git a/src/parser/hir/syntax_shape/expression/string.rs b/src/parser/hir/syntax_shape/expression/string.rs new file mode 100644 index 0000000000..6a4973febe --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/string.rs @@ -0,0 +1,60 @@ +use crate::parser::hir::syntax_shape::{ + expand_variable, parse_single_node, ExpandContext, ExpandExpression, TestSyntax, +}; +use crate::parser::hir::tokens_iterator::Peeked; +use crate::parser::{hir, hir::TokensIterator, RawToken, TokenNode}; +use crate::prelude::*; + +#[derive(Debug, Copy, Clone)] +pub struct StringShape; + +impl ExpandExpression for StringShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + parse_single_node(token_nodes, "String", |token, token_tag| { + Ok(match token { + RawToken::GlobPattern => { + return Err(ShellError::type_error( + "String", + "glob pattern".tagged(token_tag), + )) + } + RawToken::Operator(..) => { + return Err(ShellError::type_error( + "String", + "operator".tagged(token_tag), + )) + } + RawToken::Variable(tag) => expand_variable(tag, token_tag, &context.source), + RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag), + RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)), + RawToken::Number(_) => hir::Expression::bare(token_tag), + RawToken::Size(_, _) => hir::Expression::bare(token_tag), + RawToken::Bare => hir::Expression::bare(token_tag), + RawToken::String(tag) => hir::Expression::string(tag, token_tag), + }) + }) + } +} + +impl TestSyntax for StringShape { + fn test<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Option> { + let peeked = token_nodes.peek_any(); + + match peeked.node { + Some(TokenNode::Token(token)) => match token.item { + RawToken::String(_) => Some(peeked), + _ => None, + }, + + _ => None, + } + } +} diff --git a/src/parser/hir/syntax_shape/expression/unit.rs b/src/parser/hir/syntax_shape/expression/unit.rs new file mode 100644 index 0000000000..cc3642bda5 --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/unit.rs @@ -0,0 +1,89 @@ +use crate::parser::hir::syntax_shape::{ExpandContext, ExpandExpression}; +use crate::parser::parse::tokens::RawNumber; +use crate::parser::parse::unit::Unit; +use crate::parser::{hir, hir::TokensIterator, RawToken, TokenNode}; +use crate::prelude::*; +use nom::branch::alt; +use nom::bytes::complete::tag; +use nom::character::complete::digit1; +use nom::combinator::{all_consuming, opt, value}; +use nom::IResult; + +#[derive(Debug, Copy, Clone)] +pub struct UnitShape; + +impl ExpandExpression for UnitShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + let peeked = token_nodes.peek_any().not_eof("unit")?; + + let tag = match peeked.node { + TokenNode::Token(Tagged { + item: RawToken::Bare, + tag, + }) => tag, + _ => return Err(peeked.type_error("unit")), + }; + + let unit = unit_size(tag.slice(context.source), *tag); + + let (_, (number, unit)) = match unit { + Err(_) => { + return Err(ShellError::type_error( + "unit", + "word".tagged(Tag::unknown()), + )) + } + Ok((number, unit)) => (number, unit), + }; + + Ok(hir::Expression::size( + number.to_number(context.source), + unit, + tag, + )) + } +} + +fn unit_size(input: &str, bare_tag: Tag) -> IResult<&str, (Tagged, Unit)> { + let (input, digits) = digit1(input)?; + + let (input, dot) = opt(tag("."))(input)?; + + let (input, number) = match dot { + Some(dot) => { + let (input, rest) = digit1(input)?; + ( + input, + RawNumber::decimal(( + bare_tag.span.start(), + bare_tag.span.start() + digits.len() + dot.len() + rest.len(), + bare_tag.anchor, + )), + ) + } + + None => ( + input, + RawNumber::int(( + bare_tag.span.start(), + bare_tag.span.start() + digits.len(), + bare_tag.anchor, + )), + ), + }; + + let (input, unit) = all_consuming(alt(( + value(Unit::B, alt((tag("B"), tag("b")))), + value(Unit::KB, alt((tag("KB"), tag("kb"), tag("Kb")))), + value(Unit::MB, alt((tag("MB"), tag("mb"), tag("Mb")))), + value(Unit::MB, alt((tag("GB"), tag("gb"), tag("Gb")))), + value(Unit::MB, alt((tag("TB"), tag("tb"), tag("Tb")))), + value(Unit::MB, alt((tag("PB"), tag("pb"), tag("Pb")))), + )))(input)?; + + Ok((input, (number, unit))) +} diff --git a/src/parser/hir/syntax_shape/expression/variable_path.rs b/src/parser/hir/syntax_shape/expression/variable_path.rs new file mode 100644 index 0000000000..afea1b1499 --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/variable_path.rs @@ -0,0 +1,396 @@ +use crate::parser::hir::syntax_shape::{ + expand_expr, expand_syntax, parse_single_node, AnyExpressionShape, BareShape, ExpandContext, + ExpandExpression, ExpandSyntax, Peeked, SkipSyntax, StringShape, TestSyntax, WhitespaceShape, +}; +use crate::parser::{hir, hir::Expression, hir::TokensIterator, Operator, RawToken}; +use crate::prelude::*; + +#[derive(Debug, Copy, Clone)] +pub struct VariablePathShape; + +impl ExpandExpression for VariablePathShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + // 1. let the head be the first token, expecting a variable + // 2. let the tail be an empty list of members + // 2. while the next token (excluding ws) is a dot: + // 1. consume the dot + // 2. consume the next token as a member and push it onto tail + + let head = expand_expr(&VariableShape, token_nodes, context)?; + let start = head.tag(); + let mut end = start; + let mut tail: Vec> = vec![]; + + loop { + match DotShape.skip(token_nodes, context) { + Err(_) => break, + Ok(_) => {} + } + + let syntax = expand_syntax(&MemberShape, token_nodes, context)?; + let member = syntax.to_tagged_string(context.source); + + end = member.tag(); + tail.push(member); + } + + Ok(hir::Expression::path(head, tail, start.until(end))) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct PathTailShape; + +impl ExpandSyntax for PathTailShape { + type Output = (Vec>, Tag); + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + let mut end: Option = None; + let mut tail = vec![]; + + loop { + match DotShape.skip(token_nodes, context) { + Err(_) => break, + Ok(_) => {} + } + + let syntax = expand_syntax(&MemberShape, token_nodes, context)?; + let member = syntax.to_tagged_string(context.source); + end = Some(member.tag()); + tail.push(member); + } + + match end { + None => { + return Err(ShellError::type_error( + "path tail", + token_nodes.typed_tag_at_cursor(), + )) + } + + Some(end) => Ok((tail, end)), + } + } +} + +#[derive(Debug)] +pub enum ExpressionContinuation { + DotSuffix(Tag, Tagged), + InfixSuffix(Tagged, Expression), +} + +/// An expression continuation +#[derive(Debug, Copy, Clone)] +pub struct ExpressionContinuationShape; + +impl ExpandSyntax for ExpressionContinuationShape { + type Output = ExpressionContinuation; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + // Try to expand a `.` + let dot = expand_syntax(&DotShape, token_nodes, context); + + match dot { + // If a `.` was matched, it's a `Path`, and we expect a `Member` next + Ok(dot) => { + let syntax = expand_syntax(&MemberShape, token_nodes, context)?; + let member = syntax.to_tagged_string(context.source); + + Ok(ExpressionContinuation::DotSuffix(dot, member)) + } + + // Otherwise, we expect an infix operator and an expression next + Err(_) => { + let (_, op, _) = expand_syntax(&InfixShape, token_nodes, context)?; + let next = expand_expr(&AnyExpressionShape, token_nodes, context)?; + + Ok(ExpressionContinuation::InfixSuffix(op, next)) + } + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct VariableShape; + +impl ExpandExpression for VariableShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + parse_single_node(token_nodes, "variable", |token, token_tag| { + Ok(match token { + RawToken::Variable(tag) => { + if tag.slice(context.source) == "it" { + hir::Expression::it_variable(tag, token_tag) + } else { + hir::Expression::variable(tag, token_tag) + } + } + _ => { + return Err(ShellError::type_error( + "variable", + token.type_name().tagged(token_tag), + )) + } + }) + }) + } +} + +#[derive(Debug, Clone, Copy)] +pub enum Member { + String(/* outer */ Tag, /* inner */ Tag), + Bare(Tag), +} + +impl Member { + pub(crate) fn to_expr(&self) -> hir::Expression { + match self { + Member::String(outer, inner) => hir::Expression::string(inner, outer), + Member::Bare(tag) => hir::Expression::string(tag, tag), + } + } + + pub(crate) fn tag(&self) -> Tag { + match self { + Member::String(outer, _inner) => *outer, + Member::Bare(tag) => *tag, + } + } + + pub(crate) fn to_tagged_string(&self, source: &str) -> Tagged { + match self { + Member::String(outer, inner) => inner.string(source).tagged(outer), + Member::Bare(tag) => tag.tagged_string(source), + } + } + + pub(crate) fn tagged_type_name(&self) -> Tagged<&'static str> { + match self { + Member::String(outer, _inner) => "string".tagged(outer), + Member::Bare(tag) => "word".tagged(tag), + } + } +} + +enum ColumnPathState { + Initial, + LeadingDot(Tag), + Dot(Tag, Vec, Tag), + Member(Tag, Vec), + Error(ShellError), +} + +impl ColumnPathState { + pub fn dot(self, dot: Tag) -> ColumnPathState { + match self { + ColumnPathState::Initial => ColumnPathState::LeadingDot(dot), + ColumnPathState::LeadingDot(_) => { + ColumnPathState::Error(ShellError::type_error("column", "dot".tagged(dot))) + } + ColumnPathState::Dot(..) => { + ColumnPathState::Error(ShellError::type_error("column", "dot".tagged(dot))) + } + ColumnPathState::Member(tag, members) => ColumnPathState::Dot(tag, members, dot), + ColumnPathState::Error(err) => ColumnPathState::Error(err), + } + } + + pub fn member(self, member: Member) -> ColumnPathState { + match self { + ColumnPathState::Initial => ColumnPathState::Member(member.tag(), vec![member]), + ColumnPathState::LeadingDot(tag) => { + ColumnPathState::Member(tag.until(member.tag()), vec![member]) + } + + ColumnPathState::Dot(tag, mut tags, _) => { + ColumnPathState::Member(tag.until(member.tag()), { + tags.push(member); + tags + }) + } + ColumnPathState::Member(..) => { + ColumnPathState::Error(ShellError::type_error("column", member.tagged_type_name())) + } + ColumnPathState::Error(err) => ColumnPathState::Error(err), + } + } + + pub fn into_path(self, next: Peeked) -> Result>, ShellError> { + match self { + ColumnPathState::Initial => Err(next.type_error("column path")), + ColumnPathState::LeadingDot(dot) => { + Err(ShellError::type_error("column", "dot".tagged(dot))) + } + ColumnPathState::Dot(_tag, _members, dot) => { + Err(ShellError::type_error("column", "dot".tagged(dot))) + } + ColumnPathState::Member(tag, tags) => Ok(tags.tagged(tag)), + ColumnPathState::Error(err) => Err(err), + } + } +} + +pub fn expand_column_path<'a, 'b>( + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, +) -> Result>, ShellError> { + let mut state = ColumnPathState::Initial; + + loop { + let member = MemberShape.expand_syntax(token_nodes, context); + + match member { + Err(_) => break, + Ok(member) => state = state.member(member), + } + + let dot = DotShape.expand_syntax(token_nodes, context); + + match dot { + Err(_) => break, + Ok(dot) => state = state.dot(dot), + } + } + + state.into_path(token_nodes.peek_non_ws()) +} + +#[derive(Debug, Copy, Clone)] +pub struct ColumnPathShape; + +impl ExpandSyntax for ColumnPathShape { + type Output = Tagged>; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + expand_column_path(token_nodes, context) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct MemberShape; + +impl ExpandSyntax for MemberShape { + type Output = Member; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + let bare = BareShape.test(token_nodes, context); + if let Some(peeked) = bare { + let node = peeked.not_eof("column")?.commit(); + return Ok(Member::Bare(node.tag())); + } + + let string = StringShape.test(token_nodes, context); + + if let Some(peeked) = string { + let node = peeked.not_eof("column")?.commit(); + let (outer, inner) = node.expect_string(); + + return Ok(Member::String(outer, inner)); + } + + Err(token_nodes.peek_any().type_error("column")) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct DotShape; + +impl SkipSyntax for DotShape { + fn skip<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + expand_syntax(self, token_nodes, context)?; + + Ok(()) + } +} + +impl ExpandSyntax for DotShape { + type Output = Tag; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result { + parse_single_node(token_nodes, "dot", |token, token_tag| { + Ok(match token { + RawToken::Operator(Operator::Dot) => token_tag, + _ => { + return Err(ShellError::type_error( + "dot", + token.type_name().tagged(token_tag), + )) + } + }) + }) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct InfixShape; + +impl ExpandSyntax for InfixShape { + type Output = (Tag, Tagged, Tag); + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + let checkpoint = token_nodes.checkpoint(); + + // An infix operator must be prefixed by whitespace + let start = expand_syntax(&WhitespaceShape, checkpoint.iterator, context)?; + + // Parse the next TokenNode after the whitespace + let operator = + parse_single_node(checkpoint.iterator, "infix operator", |token, token_tag| { + Ok(match token { + // If it's an operator (and not `.`), it's a match + RawToken::Operator(operator) if operator != Operator::Dot => { + operator.tagged(token_tag) + } + + // Otherwise, it's not a match + _ => { + return Err(ShellError::type_error( + "infix operator", + token.type_name().tagged(token_tag), + )) + } + }) + })?; + + // An infix operator must be followed by whitespace + let end = expand_syntax(&WhitespaceShape, checkpoint.iterator, context)?; + + checkpoint.commit(); + + Ok((start, operator, end)) + } +} diff --git a/src/parser/hir/tokens_iterator.rs b/src/parser/hir/tokens_iterator.rs new file mode 100644 index 0000000000..c0dd9c50fd --- /dev/null +++ b/src/parser/hir/tokens_iterator.rs @@ -0,0 +1,365 @@ +pub(crate) mod debug; + +use crate::errors::ShellError; +use crate::parser::TokenNode; +use crate::{Tag, Tagged, TaggedItem}; +use derive_new::new; + +#[derive(Debug, new)] +pub struct TokensIterator<'a> { + tokens: &'a [TokenNode], + tag: Tag, + skip_ws: bool, + #[new(default)] + index: usize, + #[new(default)] + seen: indexmap::IndexSet, +} + +#[derive(Debug)] +pub struct Checkpoint<'content, 'me> { + pub(crate) iterator: &'me mut TokensIterator<'content>, + index: usize, + seen: indexmap::IndexSet, + committed: bool, +} + +impl<'content, 'me> Checkpoint<'content, 'me> { + pub(crate) fn commit(mut self) { + self.committed = true; + } +} + +impl<'content, 'me> std::ops::Drop for Checkpoint<'content, 'me> { + fn drop(&mut self) { + if !self.committed { + self.iterator.index = self.index; + self.iterator.seen = self.seen.clone(); + } + } +} + +#[derive(Debug)] +pub struct Peeked<'content, 'me> { + pub(crate) node: Option<&'content TokenNode>, + iterator: &'me mut TokensIterator<'content>, + from: usize, + to: usize, +} + +impl<'content, 'me> Peeked<'content, 'me> { + pub fn commit(&mut self) -> Option<&'content TokenNode> { + let Peeked { + node, + iterator, + from, + to, + } = self; + + let node = (*node)?; + iterator.commit(*from, *to); + Some(node) + } + + pub fn not_eof( + self, + expected: impl Into, + ) -> Result, ShellError> { + match self.node { + None => Err(ShellError::unexpected_eof( + expected, + self.iterator.eof_tag(), + )), + Some(node) => Ok(PeekedNode { + node, + iterator: self.iterator, + from: self.from, + to: self.to, + }), + } + } + + pub fn type_error(&self, expected: impl Into) -> ShellError { + peek_error(&self.node, self.iterator.eof_tag(), expected) + } +} + +#[derive(Debug)] +pub struct PeekedNode<'content, 'me> { + pub(crate) node: &'content TokenNode, + iterator: &'me mut TokensIterator<'content>, + from: usize, + to: usize, +} + +impl<'content, 'me> PeekedNode<'content, 'me> { + pub fn commit(self) -> &'content TokenNode { + let PeekedNode { + node, + iterator, + from, + to, + } = self; + + iterator.commit(from, to); + node + } + + pub fn rollback(self) {} + + pub fn type_error(&self, expected: impl Into) -> ShellError { + peek_error(&Some(self.node), self.iterator.eof_tag(), expected) + } +} + +pub fn peek_error( + node: &Option<&TokenNode>, + eof_tag: Tag, + expected: impl Into, +) -> ShellError { + match node { + None => ShellError::unexpected_eof(expected, eof_tag), + Some(node) => ShellError::type_error(expected, node.tagged_type_name()), + } +} + +impl<'content> TokensIterator<'content> { + #[cfg(test)] + pub fn all(tokens: &'content [TokenNode], tag: Tag) -> TokensIterator<'content> { + TokensIterator::new(tokens, tag, false) + } + + /// Use a checkpoint when you need to peek more than one token ahead, but can't be sure + /// that you'll succeed. + pub fn checkpoint<'me>(&'me mut self) -> Checkpoint<'content, 'me> { + let index = self.index; + let seen = self.seen.clone(); + + Checkpoint { + iterator: self, + index, + seen, + committed: false, + } + } + + pub fn anchor(&self) -> uuid::Uuid { + self.tag.anchor + } + + fn eof_tag(&self) -> Tag { + Tag::from((self.tag.span.end(), self.tag.span.end(), self.tag.anchor)) + } + + pub fn typed_tag_at_cursor(&mut self) -> Tagged<&'static str> { + let next = self.peek_any(); + + match next.node { + None => "end".tagged(self.eof_tag()), + Some(node) => node.tagged_type_name(), + } + } + + pub fn remove(&mut self, position: usize) { + self.seen.insert(position); + } + + pub fn at_end(&self) -> bool { + peek(self, self.skip_ws).is_none() + } + + pub fn at_end_possible_ws(&self) -> bool { + peek(self, true).is_none() + } + + pub fn advance(&mut self) { + self.seen.insert(self.index); + self.index += 1; + } + + pub fn extract(&mut self, f: impl Fn(&TokenNode) -> Option) -> Option<(usize, T)> { + for (i, item) in self.tokens.iter().enumerate() { + if self.seen.contains(&i) { + continue; + } + + match f(item) { + None => { + continue; + } + Some(value) => { + self.seen.insert(i); + return Some((i, value)); + } + } + } + + None + } + + pub fn move_to(&mut self, pos: usize) { + self.index = pos; + } + + pub fn restart(&mut self) { + self.index = 0; + } + + pub fn clone(&self) -> TokensIterator<'content> { + TokensIterator { + tokens: self.tokens, + tag: self.tag, + index: self.index, + seen: self.seen.clone(), + skip_ws: self.skip_ws, + } + } + + // Get the next token, not including whitespace + pub fn next_non_ws(&mut self) -> Option<&TokenNode> { + let mut peeked = start_next(self, true); + peeked.commit() + } + + // Peek the next token, not including whitespace + pub fn peek_non_ws<'me>(&'me mut self) -> Peeked<'content, 'me> { + start_next(self, true) + } + + // Peek the next token, including whitespace + pub fn peek_any<'me>(&'me mut self) -> Peeked<'content, 'me> { + start_next(self, false) + } + + fn commit(&mut self, from: usize, to: usize) { + for index in from..to { + self.seen.insert(index); + } + + self.index = to; + } + + pub fn debug_remaining(&self) -> Vec { + let mut tokens = self.clone(); + tokens.restart(); + tokens.cloned().collect() + } +} + +impl<'a> Iterator for TokensIterator<'a> { + type Item = &'a TokenNode; + + fn next(&mut self) -> Option<&'a TokenNode> { + next(self, self.skip_ws) + } +} + +fn peek<'content, 'me>( + iterator: &TokensIterator<'content>, + skip_ws: bool, +) -> Option<&'content TokenNode> { + let mut to = iterator.index; + + loop { + if to >= iterator.tokens.len() { + return None; + } + + if iterator.seen.contains(&to) { + to += 1; + continue; + } + + if to >= iterator.tokens.len() { + return None; + } + + let node = &iterator.tokens[to]; + + match node { + TokenNode::Whitespace(_) if skip_ws => { + to += 1; + } + _ => { + return Some(node); + } + } + } +} + +fn start_next<'content, 'me>( + iterator: &'me mut TokensIterator<'content>, + skip_ws: bool, +) -> Peeked<'content, 'me> { + let from = iterator.index; + let mut to = iterator.index; + + loop { + if to >= iterator.tokens.len() { + return Peeked { + node: None, + iterator, + from, + to, + }; + } + + if iterator.seen.contains(&to) { + to += 1; + continue; + } + + if to >= iterator.tokens.len() { + return Peeked { + node: None, + iterator, + from, + to, + }; + } + + let node = &iterator.tokens[to]; + + match node { + TokenNode::Whitespace(_) if skip_ws => { + to += 1; + } + _ => { + to += 1; + return Peeked { + node: Some(node), + iterator, + from, + to, + }; + } + } + } +} + +fn next<'a>(iterator: &mut TokensIterator<'a>, skip_ws: bool) -> Option<&'a TokenNode> { + loop { + if iterator.index >= iterator.tokens.len() { + return None; + } + + if iterator.seen.contains(&iterator.index) { + iterator.advance(); + continue; + } + + if iterator.index >= iterator.tokens.len() { + return None; + } + + match &iterator.tokens[iterator.index] { + TokenNode::Whitespace(_) if skip_ws => { + iterator.advance(); + } + other => { + iterator.advance(); + return Some(other); + } + } + } +} diff --git a/src/parser/hir/tokens_iterator/debug.rs b/src/parser/hir/tokens_iterator/debug.rs new file mode 100644 index 0000000000..2e26720154 --- /dev/null +++ b/src/parser/hir/tokens_iterator/debug.rs @@ -0,0 +1,30 @@ +use crate::parser::hir::tokens_iterator::TokensIterator; +use crate::traits::ToDebug; + +#[derive(Debug)] +pub(crate) enum DebugIteratorToken { + Seen(String), + Unseen(String), + Cursor, +} + +pub(crate) fn debug_tokens(iterator: &TokensIterator, source: &str) -> Vec { + let mut out = vec![]; + + for (i, token) in iterator.tokens.iter().enumerate() { + if iterator.index == i { + out.push(DebugIteratorToken::Cursor); + } + + if iterator.seen.contains(&i) { + out.push(DebugIteratorToken::Seen(format!("{}", token.debug(source)))); + } else { + out.push(DebugIteratorToken::Unseen(format!( + "{}", + token.debug(source) + ))); + } + } + + out +} diff --git a/src/parser/parse/files.rs b/src/parser/parse/files.rs index afe75ddb27..3c28237f5d 100644 --- a/src/parser/parse/files.rs +++ b/src/parser/parse/files.rs @@ -1,6 +1,7 @@ use crate::Tag; use derive_new::new; use language_reporting::{FileName, Location}; +use log::trace; use uuid::Uuid; #[derive(new, Debug, Clone)] @@ -18,7 +19,7 @@ impl language_reporting::ReportingFiles for Files { from_index: usize, to_index: usize, ) -> Option { - Some(Tag::from((from_index, to_index, file))) + Some(Tag::new(file, (from_index, to_index).into())) } fn file_id(&self, tag: Self::Span) -> Self::FileId { @@ -38,8 +39,18 @@ impl language_reporting::ReportingFiles for Files { let mut seen_lines = 0; let mut seen_bytes = 0; - for (pos, _) in source.match_indices('\n') { - if pos > byte_index { + for (pos, slice) in source.match_indices('\n') { + trace!( + "SEARCH={} SEEN={} POS={} SLICE={:?} LEN={} ALL={:?}", + byte_index, + seen_bytes, + pos, + slice, + source.len(), + source + ); + + if pos >= byte_index { return Some(language_reporting::Location::new( seen_lines, byte_index - seen_bytes, @@ -53,7 +64,7 @@ impl language_reporting::ReportingFiles for Files { if seen_lines == 0 { Some(language_reporting::Location::new(0, byte_index)) } else { - None + panic!("byte index {} wasn't valid", byte_index); } } @@ -64,7 +75,7 @@ impl language_reporting::ReportingFiles for Files { for (pos, _) in source.match_indices('\n') { if seen_lines == lineno { - return Some(Tag::from((seen_bytes, pos, file))); + return Some(Tag::new(file, (seen_bytes, pos + 1).into())); } else { seen_lines += 1; seen_bytes = pos + 1; @@ -72,16 +83,18 @@ impl language_reporting::ReportingFiles for Files { } if seen_lines == 0 { - Some(Tag::from((0, self.snippet.len() - 1, file))) + Some(Tag::new(file, (0, self.snippet.len() - 1).into())) } else { None } } fn source(&self, tag: Self::Span) -> Option { - if tag.span.start > tag.span.end { + trace!("source(tag={:?}) snippet={:?}", tag, self.snippet); + + if tag.span.start() > tag.span.end() { return None; - } else if tag.span.end >= self.snippet.len() { + } else if tag.span.end() > self.snippet.len() { return None; } Some(tag.slice(&self.snippet).to_string()) diff --git a/src/parser/parse/operator.rs b/src/parser/parse/operator.rs index 82a04ed796..7b5a5c77d8 100644 --- a/src/parser/parse/operator.rs +++ b/src/parser/parse/operator.rs @@ -11,6 +11,7 @@ pub enum Operator { GreaterThan, LessThanOrEqual, GreaterThanOrEqual, + Dot, } impl ToDebug for Operator { @@ -32,6 +33,7 @@ impl Operator { Operator::GreaterThan => ">", Operator::LessThanOrEqual => "<=", Operator::GreaterThanOrEqual => ">=", + Operator::Dot => ".", } } } @@ -52,6 +54,7 @@ impl FromStr for Operator { ">" => Ok(Operator::GreaterThan), "<=" => Ok(Operator::LessThanOrEqual), ">=" => Ok(Operator::GreaterThanOrEqual), + "." => Ok(Operator::Dot), _ => Err(()), } } diff --git a/src/parser/parse/parser.rs b/src/parser/parse/parser.rs index 33903ba37c..93ba043ba1 100644 --- a/src/parser/parse/parser.rs +++ b/src/parser/parse/parser.rs @@ -14,24 +14,54 @@ use nom::combinator::*; use nom::multi::*; use nom::sequence::*; +use derive_new::new; use log::trace; use nom::dbg; use nom::*; use nom::{AsBytes, FindSubstring, IResult, InputLength, InputTake, Slice}; use nom_locate::{position, LocatedSpanEx}; +use nom_tracable::{tracable_parser, HasTracableInfo, TracableInfo}; use serde::{Deserialize, Serialize}; use std::fmt::Debug; use std::str::FromStr; use uuid::Uuid; -pub type NomSpan<'a> = LocatedSpanEx<&'a str, Uuid>; +pub type NomSpan<'a> = LocatedSpanEx<&'a str, TracableContext>; + +#[derive(Debug, Clone, Copy, PartialEq, new)] +pub struct TracableContext { + pub(crate) origin: Uuid, + pub(crate) info: TracableInfo, +} + +impl HasTracableInfo for TracableContext { + fn get_tracable_info(&self) -> TracableInfo { + self.info + } + + fn set_tracable_info(mut self, info: TracableInfo) -> Self { + TracableContext { + origin: self.origin, + info, + } + } +} + +impl std::ops::Deref for TracableContext { + type Target = TracableInfo; + + fn deref(&self) -> &TracableInfo { + &self.info + } +} pub fn nom_input(s: &str, anchor: Uuid) -> NomSpan<'_> { - LocatedSpanEx::new_extra(s, anchor) + LocatedSpanEx::new_extra(s, TracableContext::new(anchor, TracableInfo::new())) } macro_rules! operator { ($name:tt : $token:tt ) => { + #[tracable_parser] pub fn $name(input: NomSpan) -> IResult { let start = input.offset; let (input, tag) = tag(stringify!($token))(input)?; @@ -51,25 +81,7 @@ operator! { gte: >= } operator! { lte: <= } operator! { eq: == } operator! { neq: != } - -fn trace_step<'a, T: Debug>( - input: NomSpan<'a>, - name: &str, - block: impl FnOnce(NomSpan<'a>) -> IResult, T>, -) -> IResult, T> { - trace!(target: "nu::lite_parse", "+ before {} @ {:?}", name, input); - match block(input) { - Ok((input, result)) => { - trace!(target: "nu::lite_parse", "after {} @ {:?} -> {:?}", name, input, result); - Ok((input, result)) - } - - Err(e) => { - trace!(target: "nu::lite_parse", "- failed {} :: {:?}", name, e); - Err(e) - } - } -} +operator! { dot: . } #[derive(Debug, Clone, Eq, PartialEq, Hash, Ord, PartialOrd, Serialize, Deserialize)] pub enum Number { @@ -77,6 +89,15 @@ pub enum Number { Decimal(BigDecimal), } +impl std::fmt::Display for Number { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Number::Int(int) => write!(f, "{}", int), + Number::Decimal(decimal) => write!(f, "{}", decimal), + } + } +} + macro_rules! primitive_int { ($($ty:ty)*) => { $( @@ -148,540 +169,479 @@ impl Into for BigDecimal { } } +#[tracable_parser] +pub fn number(input: NomSpan) -> IResult { + let (input, number) = raw_number(input)?; + + Ok(( + input, + TokenTreeBuilder::tagged_number(number.item, number.tag), + )) +} + +#[tracable_parser] pub fn raw_number(input: NomSpan) -> IResult> { let anchoral = input; let start = input.offset; - trace_step(input, "raw_decimal", move |input| { - let (input, neg) = opt(tag("-"))(input)?; - let (input, head) = digit1(input)?; - let dot: IResult = tag(".")(input); + let (input, neg) = opt(tag("-"))(input)?; + let (input, head) = digit1(input)?; - let input = match dot { - Ok((input, dot)) => input, + match input.fragment.chars().next() { + None => return Ok((input, RawNumber::int((start, input.offset, input.extra)))), + Some('.') => (), + Some(other) if other.is_whitespace() => { + return Ok((input, RawNumber::int((start, input.offset, input.extra)))) + } + _ => { + return Err(nom::Err::Error(nom::error::make_error( + input, + nom::error::ErrorKind::Tag, + ))) + } + } - // it's just an integer - Err(_) => return Ok((input, RawNumber::int((start, input.offset, input.extra)))), - }; + let dot: IResult = tag(".")(input); - let (input, tail) = digit1(input)?; + let input = match dot { + Ok((input, dot)) => input, - let end = input.offset; + // it's just an integer + Err(_) => return Ok((input, RawNumber::int((start, input.offset, input.extra)))), + }; - Ok((input, RawNumber::decimal((start, end, input.extra)))) - }) + let (input, tail) = digit1(input)?; + + let end = input.offset; + + let next = input.fragment.chars().next(); + + if let Some(next) = next { + if !next.is_whitespace() { + return Err(nom::Err::Error(nom::error::make_error( + input, + nom::error::ErrorKind::Tag, + ))); + } + } + + Ok((input, RawNumber::decimal((start, end, input.extra)))) } +#[tracable_parser] pub fn operator(input: NomSpan) -> IResult { - trace_step(input, "operator", |input| { - let (input, operator) = alt((gte, lte, neq, gt, lt, eq))(input)?; + let (input, operator) = alt((gte, lte, neq, gt, lt, eq))(input)?; - Ok((input, operator)) - }) + Ok((input, operator)) } +#[tracable_parser] pub fn dq_string(input: NomSpan) -> IResult { - trace_step(input, "dq_string", |input| { - let start = input.offset; - let (input, _) = char('"')(input)?; - let start1 = input.offset; - let (input, _) = many0(none_of("\""))(input)?; - let end1 = input.offset; - let (input, _) = char('"')(input)?; - let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_string((start1, end1, input.extra), (start, end, input.extra)), - )) - }) + let start = input.offset; + let (input, _) = char('"')(input)?; + let start1 = input.offset; + let (input, _) = many0(none_of("\""))(input)?; + let end1 = input.offset; + let (input, _) = char('"')(input)?; + let end = input.offset; + Ok(( + input, + TokenTreeBuilder::tagged_string((start1, end1, input.extra), (start, end, input.extra)), + )) } +#[tracable_parser] pub fn sq_string(input: NomSpan) -> IResult { - trace_step(input, "sq_string", move |input| { - let start = input.offset; - let (input, _) = char('\'')(input)?; - let start1 = input.offset; - let (input, _) = many0(none_of("\'"))(input)?; - let end1 = input.offset; - let (input, _) = char('\'')(input)?; - let end = input.offset; + let start = input.offset; + let (input, _) = char('\'')(input)?; + let start1 = input.offset; + let (input, _) = many0(none_of("\'"))(input)?; + let end1 = input.offset; + let (input, _) = char('\'')(input)?; + let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_string((start1, end1, input.extra), (start, end, input.extra)), - )) - }) + Ok(( + input, + TokenTreeBuilder::tagged_string((start1, end1, input.extra), (start, end, input.extra)), + )) } +#[tracable_parser] pub fn string(input: NomSpan) -> IResult { - trace_step(input, "string", move |input| { - alt((sq_string, dq_string))(input) - }) + alt((sq_string, dq_string))(input) } +#[tracable_parser] pub fn external(input: NomSpan) -> IResult { - trace_step(input, "external", move |input| { - let start = input.offset; - let (input, _) = tag("^")(input)?; - let (input, bare) = take_while(is_bare_char)(input)?; - let end = input.offset; + let start = input.offset; + let (input, _) = tag("^")(input)?; + let (input, bare) = take_while(is_bare_char)(input)?; + let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_external(bare, (start, end, input.extra)), - )) - }) + Ok(( + input, + TokenTreeBuilder::tagged_external_command(bare, (start, end, input.extra)), + )) } +#[tracable_parser] pub fn pattern(input: NomSpan) -> IResult { - trace_step(input, "bare", move |input| { - let start = input.offset; - let (input, _) = take_while1(is_start_glob_char)(input)?; - let (input, _) = take_while(is_glob_char)(input)?; + let start = input.offset; + let (input, _) = take_while1(is_start_glob_char)(input)?; + let (input, _) = take_while(is_glob_char)(input)?; - let next_char = &input.fragment.chars().nth(0); + let next_char = &input.fragment.chars().nth(0); - if let Some(next_char) = next_char { - if is_external_word_char(*next_char) { - return Err(nom::Err::Error(nom::error::make_error( - input, - nom::error::ErrorKind::TakeWhile1, - ))); - } + if let Some(next_char) = next_char { + if is_external_word_char(*next_char) { + return Err(nom::Err::Error(nom::error::make_error( + input, + nom::error::ErrorKind::TakeWhile1, + ))); } + } - let end = input.offset; + let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_pattern((start, end, input.extra)), - )) - }) + Ok(( + input, + TokenTreeBuilder::tagged_pattern((start, end, input.extra)), + )) } +#[tracable_parser] pub fn bare(input: NomSpan) -> IResult { - trace_step(input, "bare", move |input| { - let start = input.offset; - let (input, _) = take_while1(is_start_bare_char)(input)?; - let (input, _) = take_while(is_bare_char)(input)?; + let start = input.offset; + let (input, _) = take_while1(is_start_bare_char)(input)?; + let (input, last) = take_while(is_bare_char)(input)?; - let next_char = &input.fragment.chars().nth(0); + let next_char = &input.fragment.chars().nth(0); + let prev_char = last.fragment.chars().nth(0); - if let Some(next_char) = next_char { - if is_external_word_char(*next_char) || is_glob_specific_char(*next_char) { - return Err(nom::Err::Error(nom::error::make_error( - input, - nom::error::ErrorKind::TakeWhile1, - ))); - } + // if let (Some(prev), Some(next)) = (prev_char, next_char) { + // if prev == '.' && is_member_start(*next) { + // return Err(nom::Err::Error(nom::error::make_error( + // input, + // nom::error::ErrorKind::TakeWhile1, + // ))); + // } + // } + + if let Some(next_char) = next_char { + if is_external_word_char(*next_char) || is_glob_specific_char(*next_char) { + return Err(nom::Err::Error(nom::error::make_error( + input, + nom::error::ErrorKind::TakeWhile1, + ))); } + } - let end = input.offset; + let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_bare((start, end, input.extra)), - )) - }) + Ok(( + input, + TokenTreeBuilder::tagged_bare((start, end, input.extra)), + )) } +#[tracable_parser] pub fn external_word(input: NomSpan) -> IResult { - trace_step(input, "bare", move |input| { - let start = input.offset; - let (input, _) = take_while1(is_external_word_char)(input)?; - let end = input.offset; + let start = input.offset; + let (input, _) = take_while1(is_external_word_char)(input)?; + let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_external_word((start, end, input.extra)), - )) - }) + Ok(( + input, + TokenTreeBuilder::tagged_external_word((start, end, input.extra)), + )) } +#[tracable_parser] pub fn var(input: NomSpan) -> IResult { - trace_step(input, "var", move |input| { - let start = input.offset; - let (input, _) = tag("$")(input)?; - let (input, bare) = member(input)?; - let end = input.offset; + let start = input.offset; + let (input, _) = tag("$")(input)?; + let (input, bare) = ident(input)?; + let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_var(bare.tag(), (start, end, input.extra)), - )) - }) + Ok(( + input, + TokenTreeBuilder::tagged_var(bare, (start, end, input.extra)), + )) } -pub fn member(input: NomSpan) -> IResult { - trace_step(input, "identifier", move |input| { - let start = input.offset; - let (input, _) = take_while1(is_id_start)(input)?; - let (input, _) = take_while(is_id_continue)(input)?; +#[tracable_parser] +pub fn ident(input: NomSpan) -> IResult { + let start = input.offset; + let (input, _) = take_while1(is_start_bare_char)(input)?; + let (input, _) = take_while(is_bare_char)(input)?; + let end = input.offset; - let end = input.offset; - - Ok(( - input, - TokenTreeBuilder::tagged_member((start, end, input.extra)), - )) - }) + Ok((input, Tag::from((start, end, input.extra.origin)))) } +#[tracable_parser] pub fn flag(input: NomSpan) -> IResult { - trace_step(input, "flag", move |input| { - let start = input.offset; - let (input, _) = tag("--")(input)?; - let (input, bare) = bare(input)?; - let end = input.offset; + let start = input.offset; + let (input, _) = tag("--")(input)?; + let (input, bare) = bare(input)?; + let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_flag(bare.tag(), (start, end, input.extra)), - )) - }) + Ok(( + input, + TokenTreeBuilder::tagged_flag(bare.tag(), (start, end, input.extra)), + )) } +#[tracable_parser] pub fn shorthand(input: NomSpan) -> IResult { - trace_step(input, "shorthand", move |input| { - let start = input.offset; - let (input, _) = tag("-")(input)?; - let (input, bare) = bare(input)?; - let end = input.offset; + let start = input.offset; + let (input, _) = tag("-")(input)?; + let (input, bare) = bare(input)?; + let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_shorthand(bare.tag(), (start, end, input.extra)), - )) - }) -} - -pub fn raw_unit(input: NomSpan) -> IResult> { - trace_step(input, "raw_unit", move |input| { - let start = input.offset; - let (input, unit) = alt(( - tag("B"), - tag("b"), - tag("KB"), - tag("kb"), - tag("Kb"), - tag("K"), - tag("k"), - tag("MB"), - tag("mb"), - tag("Mb"), - tag("GB"), - tag("gb"), - tag("Gb"), - tag("TB"), - tag("tb"), - tag("Tb"), - tag("PB"), - tag("pb"), - tag("Pb"), - ))(input)?; - let end = input.offset; - - Ok(( - input, - Unit::from(unit.fragment).tagged((start, end, input.extra)), - )) - }) -} - -pub fn size(input: NomSpan) -> IResult { - trace_step(input, "size", move |input| { - let mut is_size = false; - let start = input.offset; - let (input, number) = raw_number(input)?; - if let Ok((input, Some(size))) = opt(raw_unit)(input) { - let end = input.offset; - - // Check to make sure there is no trailing parseable characters - if let Ok((input, Some(extra))) = opt(bare)(input) { - return Err(nom::Err::Error((input, nom::error::ErrorKind::Char))); - } - - Ok(( - input, - TokenTreeBuilder::tagged_size((number.item, *size), (start, end, input.extra)), - )) - } else { - let end = input.offset; - - // Check to make sure there is no trailing parseable characters - if let Ok((input, Some(extra))) = opt(bare)(input) { - return Err(nom::Err::Error((input, nom::error::ErrorKind::Char))); - } - - Ok(( - input, - TokenTreeBuilder::tagged_number(number.item, number.tag), - )) - } - }) + Ok(( + input, + TokenTreeBuilder::tagged_shorthand(bare.tag(), (start, end, input.extra)), + )) } +#[tracable_parser] pub fn leaf(input: NomSpan) -> IResult { - trace_step(input, "leaf", move |input| { - let (input, node) = alt(( - size, - string, - operator, - flag, - shorthand, - var, - external, - bare, - pattern, - external_word, - ))(input)?; + let (input, node) = alt((number, string, operator, flag, shorthand, var, external))(input)?; - Ok((input, node)) - }) + Ok((input, node)) } -pub fn token_list(input: NomSpan) -> IResult> { - trace_step(input, "token_list", move |input| { - let (input, first) = node(input)?; - let (input, list) = many0(pair(space1, node))(input)?; +#[tracable_parser] +pub fn token_list(input: NomSpan) -> IResult>> { + let start = input.offset; + let (input, first) = node(input)?; - Ok((input, make_token_list(None, first, list, None))) - }) + let (input, mut list) = many0(pair(alt((whitespace, dot)), node))(input)?; + + let end = input.offset; + + Ok(( + input, + make_token_list(first, list, None).tagged((start, end, input.extra.origin)), + )) } -pub fn spaced_token_list(input: NomSpan) -> IResult> { - trace_step(input, "spaced_token_list", move |input| { - let (input, sp_left) = opt(space1)(input)?; - let (input, first) = node(input)?; - let (input, list) = many0(pair(space1, node))(input)?; - let (input, sp_right) = opt(space1)(input)?; +#[tracable_parser] +pub fn spaced_token_list(input: NomSpan) -> IResult>> { + let start = input.offset; + let (input, pre_ws) = opt(whitespace)(input)?; + let (input, items) = token_list(input)?; + let (input, post_ws) = opt(whitespace)(input)?; + let end = input.offset; - Ok((input, make_token_list(sp_left, first, list, sp_right))) - }) + let mut out = vec![]; + + out.extend(pre_ws); + out.extend(items.item); + out.extend(post_ws); + + Ok((input, out.tagged((start, end, input.extra.origin)))) } fn make_token_list( - sp_left: Option, - first: TokenNode, - list: Vec<(NomSpan, TokenNode)>, - sp_right: Option, + first: Vec, + list: Vec<(TokenNode, Vec)>, + sp_right: Option, ) -> Vec { let mut nodes = vec![]; - if let Some(sp_left) = sp_left { - nodes.push(TokenNode::Whitespace(Tag::from(sp_left))); - } + nodes.extend(first); - nodes.push(first); - - for (ws, token) in list { - nodes.push(TokenNode::Whitespace(Tag::from(ws))); - nodes.push(token); + for (left, right) in list { + nodes.push(left); + nodes.extend(right); } if let Some(sp_right) = sp_right { - nodes.push(TokenNode::Whitespace(Tag::from(sp_right))); + nodes.push(sp_right); } nodes } +#[tracable_parser] pub fn whitespace(input: NomSpan) -> IResult { - trace_step(input, "whitespace", move |input| { - let left = input.offset; - let (input, ws1) = space1(input)?; - let right = input.offset; + let left = input.offset; + let (input, ws1) = space1(input)?; + let right = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_ws((left, right, input.extra)), - )) - }) -} - -pub fn delimited_paren(input: NomSpan) -> IResult { - trace_step(input, "delimited_paren", move |input| { - let left = input.offset; - let (input, _) = char('(')(input)?; - let (input, ws1) = opt(whitespace)(input)?; - let (input, inner_items) = opt(token_list)(input)?; - let (input, ws2) = opt(whitespace)(input)?; - let (input, _) = char(')')(input)?; - let right = input.offset; - - let mut items = vec![]; - - if let Some(space) = ws1 { - items.push(space); - } - - if let Some(inner_items) = inner_items { - items.extend(inner_items); - } - - if let Some(space) = ws2 { - items.push(space); - } - - Ok(( - input, - TokenTreeBuilder::tagged_parens(items, (left, right, input.extra)), - )) - }) -} - -pub fn delimited_square(input: NomSpan) -> IResult { - trace_step(input, "delimited_paren", move |input| { - let left = input.offset; - let (input, _) = char('[')(input)?; - let (input, ws1) = opt(whitespace)(input)?; - let (input, inner_items) = opt(token_list)(input)?; - let (input, ws2) = opt(whitespace)(input)?; - let (input, _) = char(']')(input)?; - let right = input.offset; - - let mut items = vec![]; - - if let Some(space) = ws1 { - items.push(space); - } - - if let Some(inner_items) = inner_items { - items.extend(inner_items); - } - - if let Some(space) = ws2 { - items.push(space); - } - - Ok(( - input, - TokenTreeBuilder::tagged_square(items, (left, right, input.extra)), - )) - }) -} - -pub fn delimited_brace(input: NomSpan) -> IResult { - trace_step(input, "delimited_brace", move |input| { - let left = input.offset; - let (input, _) = char('{')(input)?; - let (input, _) = opt(space1)(input)?; - let (input, items) = opt(token_list)(input)?; - let (input, _) = opt(space1)(input)?; - let (input, _) = char('}')(input)?; - let right = input.offset; - - Ok(( - input, - TokenTreeBuilder::tagged_brace( - items.unwrap_or_else(|| vec![]), - (left, right, input.extra), - ), - )) - }) -} - -pub fn raw_call(input: NomSpan) -> IResult> { - trace_step(input, "raw_call", move |input| { - let left = input.offset; - let (input, items) = token_list(input)?; - let right = input.offset; - - Ok(( - input, - TokenTreeBuilder::tagged_call(items, (left, right, input.extra)), - )) - }) -} - -pub fn path(input: NomSpan) -> IResult { - trace_step(input, "path", move |input| { - let left = input.offset; - let (input, head) = node1(input)?; - let (input, _) = tag(".")(input)?; - let (input, tail) = separated_list(tag("."), alt((member, string)))(input)?; - let right = input.offset; - - Ok(( - input, - TokenTreeBuilder::tagged_path((head, tail), (left, right, input.extra)), - )) - }) -} - -pub fn node1(input: NomSpan) -> IResult { - trace_step(input, "node1", alt((leaf, delimited_paren))) -} - -pub fn node(input: NomSpan) -> IResult { - trace_step( + Ok(( input, - "node", - alt(( - path, - leaf, - delimited_paren, - delimited_brace, - delimited_square, - )), - ) + TokenTreeBuilder::tagged_ws((left, right, input.extra)), + )) } +pub fn delimited(input: NomSpan, delimiter: Delimiter) -> IResult>> { + let left = input.offset; + let (input, _) = char(delimiter.open())(input)?; + let (input, inner_items) = opt(spaced_token_list)(input)?; + let (input, _) = char(delimiter.close())(input)?; + let right = input.offset; + + let mut items = vec![]; + + if let Some(inner_items) = inner_items { + items.extend(inner_items.item); + } + + Ok((input, items.tagged((left, right, input.extra.origin)))) +} + +#[tracable_parser] +pub fn delimited_paren(input: NomSpan) -> IResult { + let (input, tokens) = delimited(input, Delimiter::Paren)?; + + Ok(( + input, + TokenTreeBuilder::tagged_parens(tokens.item, tokens.tag), + )) +} + +#[tracable_parser] +pub fn delimited_square(input: NomSpan) -> IResult { + let (input, tokens) = delimited(input, Delimiter::Square)?; + + Ok(( + input, + TokenTreeBuilder::tagged_square(tokens.item, tokens.tag), + )) +} + +#[tracable_parser] +pub fn delimited_brace(input: NomSpan) -> IResult { + let (input, tokens) = delimited(input, Delimiter::Brace)?; + + Ok(( + input, + TokenTreeBuilder::tagged_brace(tokens.item, tokens.tag), + )) +} + +#[tracable_parser] +pub fn raw_call(input: NomSpan) -> IResult> { + let left = input.offset; + let (input, items) = token_list(input)?; + let right = input.offset; + + Ok(( + input, + TokenTreeBuilder::tagged_call(items.item, (left, right, input.extra)), + )) +} + +#[tracable_parser] +pub fn bare_path(input: NomSpan) -> IResult> { + let (input, head) = alt((bare, dot))(input)?; + + let (input, tail) = many0(alt((bare, dot, string)))(input)?; + + let next_char = &input.fragment.chars().nth(0); + + if is_boundary(*next_char) { + let mut result = vec![head]; + result.extend(tail); + + Ok((input, result)) + } else { + Err(nom::Err::Error(nom::error::make_error( + input, + nom::error::ErrorKind::Many0, + ))) + } +} + +#[tracable_parser] +pub fn pattern_path(input: NomSpan) -> IResult> { + let (input, head) = alt((pattern, dot))(input)?; + + let (input, tail) = many0(alt((pattern, dot, string)))(input)?; + + let next_char = &input.fragment.chars().nth(0); + + if is_boundary(*next_char) { + let mut result = vec![head]; + result.extend(tail); + + Ok((input, result)) + } else { + Err(nom::Err::Error(nom::error::make_error( + input, + nom::error::ErrorKind::Many0, + ))) + } +} + +#[tracable_parser] +pub fn node1(input: NomSpan) -> IResult { + alt((leaf, bare, pattern, external_word, delimited_paren))(input) +} + +#[tracable_parser] +pub fn node(input: NomSpan) -> IResult> { + alt(( + to_list(leaf), + bare_path, + pattern_path, + to_list(external_word), + to_list(delimited_paren), + to_list(delimited_brace), + to_list(delimited_square), + ))(input) +} + +fn to_list( + parser: impl Fn(NomSpan) -> IResult, +) -> impl Fn(NomSpan) -> IResult> { + move |input| { + let (input, next) = parser(input)?; + + Ok((input, vec![next])) + } +} + +#[tracable_parser] +pub fn nodes(input: NomSpan) -> IResult { + let (input, tokens) = token_list(input)?; + + Ok(( + input, + TokenTreeBuilder::tagged_token_list(tokens.item, tokens.tag), + )) +} + +#[tracable_parser] pub fn pipeline(input: NomSpan) -> IResult { - trace_step(input, "pipeline", |input| { - let start = input.offset; - let (input, head) = opt(tuple((opt(space1), raw_call, opt(space1))))(input)?; - let (input, items) = trace_step( + let start = input.offset; + let (input, head) = spaced_token_list(input)?; + let (input, items) = many0(tuple((tag("|"), spaced_token_list)))(input)?; + + if input.input_len() != 0 { + return Err(Err::Error(error_position!( input, - "many0", - many0(tuple((tag("|"), opt(space1), raw_call, opt(space1)))), - )?; - - let (input, tail) = opt(space1)(input)?; - let (input, newline) = opt(multispace1)(input)?; - - if input.input_len() != 0 { - return Err(Err::Error(error_position!( - input, - nom::error::ErrorKind::Eof - ))); - } - - let end = input.offset; - - Ok(( - input, - TokenTreeBuilder::tagged_pipeline( - (make_call_list(head, items), tail.map(Tag::from)), - (start, end, input.extra), - ), - )) - }) -} - -fn make_call_list( - head: Option<(Option, Tagged, Option)>, - items: Vec<(NomSpan, Option, Tagged, Option)>, -) -> Vec { - let mut out = vec![]; - - if let Some(head) = head { - let el = PipelineElement::new(None, head.0.map(Tag::from), head.1, head.2.map(Tag::from)); - - out.push(el); + nom::error::ErrorKind::Eof + ))); } - for (pipe, ws1, call, ws2) in items { - let el = PipelineElement::new( - Some(pipe).map(Tag::from), - ws1.map(Tag::from), - call, - ws2.map(Tag::from), - ); + let end = input.offset; - out.push(el); - } + let head_tag = head.tag(); + let mut all_items: Vec> = + vec![PipelineElement::new(None, head).tagged(head_tag)]; - out + all_items.extend(items.into_iter().map(|(pipe, items)| { + let items_tag = items.tag(); + PipelineElement::new(Some(Tag::from(pipe)), items).tagged(Tag::from(pipe).until(items_tag)) + })); + + Ok(( + input, + TokenTreeBuilder::tagged_pipeline(all_items, (start, end, input.extra)), + )) } fn int(frag: &str, neg: Option) -> i64 { @@ -693,9 +653,19 @@ fn int(frag: &str, neg: Option) -> i64 { } } +fn is_boundary(c: Option) -> bool { + match c { + None => true, + Some(')') | Some(']') | Some('}') => true, + Some(c) if c.is_whitespace() => true, + _ => false, + } +} + fn is_external_word_char(c: char) -> bool { match c { - ';' | '|' | '#' | '-' | '"' | '\'' | '$' | '(' | ')' | '[' | ']' | '{' | '}' | '`' => false, + ';' | '|' | '#' | '-' | '"' | '\'' | '$' | '(' | ')' | '[' | ']' | '{' | '}' | '`' + | '.' => false, other if other.is_whitespace() => false, _ => true, } @@ -717,8 +687,7 @@ fn is_glob_char(c: char) -> bool { fn is_start_bare_char(c: char) -> bool { match c { '+' => false, - _ if c.is_alphabetic() => true, - '.' => true, + _ if c.is_alphanumeric() => true, '\\' => true, '/' => true, '_' => true, @@ -732,7 +701,6 @@ fn is_bare_char(c: char) -> bool { match c { '+' => false, _ if c.is_alphanumeric() => true, - '.' => true, '\\' => true, '/' => true, '_' => true, @@ -759,6 +727,16 @@ fn is_id_continue(c: char) -> bool { } } +fn is_member_start(c: char) -> bool { + match c { + '"' | '\'' => true, + '1'..='9' => true, + + other if is_id_start(other) => true, + _ => false, + } +} + #[cfg(test)] mod tests { use super::*; @@ -768,41 +746,6 @@ mod tests { pub type CurriedNode = Box T + 'static>; - macro_rules! assert_leaf { - (parsers [ $($name:tt)* ] $input:tt -> $left:tt .. $right:tt { $kind:tt $parens:tt } ) => { - $( - assert_eq!( - apply($name, stringify!($name), $input), - token(RawToken::$kind $parens, $left, $right) - ); - )* - - assert_eq!( - apply(leaf, "leaf", $input), - token(RawToken::$kind $parens, $left, $right) - ); - - assert_eq!( - apply(leaf, "leaf", $input), - token(RawToken::$kind $parens, $left, $right) - ); - - assert_eq!( - apply(node, "node", $input), - token(RawToken::$kind $parens, $left, $right) - ); - }; - - (parsers [ $($name:tt)* ] $input:tt -> $left:tt .. $right:tt { $kind:tt } ) => { - $( - assert_eq!( - apply($name, stringify!($name), $input), - token(RawToken::$kind, $left, $right) - ); - )* - } - } - macro_rules! equal_tokens { ($source:tt -> $tokens:expr) => { let result = apply(pipeline, "pipeline", $source); @@ -823,53 +766,50 @@ mod tests { assert_eq!(debug_result, debug_expected) } } - - // apply(pipeline, "pipeline", r#"cargo +nightly run"#), - // build_token(b::pipeline(vec![( - // None, - // b::call( - // b::bare("cargo"), - // vec![ - // b::sp(), - // b::external_word("+nightly"), - // b::sp(), - // b::bare("run") - // ] - // ), - // None - // )])) }; + + (<$parser:tt> $source:tt -> $tokens:expr) => { + let result = apply($parser, stringify!($parser), $source); + let (expected_tree, expected_source) = TokenTreeBuilder::build(uuid::Uuid::nil(), $tokens); + + if result != expected_tree { + let debug_result = format!("{}", result.debug($source)); + let debug_expected = format!("{}", expected_tree.debug(&expected_source)); + + if debug_result == debug_expected { + assert_eq!( + result, expected_tree, + "NOTE: actual and expected had equivalent debug serializations, source={:?}, debug_expected={:?}", + $source, + debug_expected + ) + } else { + assert_eq!(debug_result, debug_expected) + } + } + }; + } #[test] fn test_integer() { - assert_leaf! { - parsers [ size ] - "123" -> 0..3 { Number(RawNumber::int((0, 3, test_uuid())).item) } + equal_tokens! { + + "123" -> b::token_list(vec![b::int(123)]) } - assert_leaf! { - parsers [ size ] - "-123" -> 0..4 { Number(RawNumber::int((0, 4, test_uuid())).item) } - } - } - - #[test] - fn test_size() { - assert_leaf! { - parsers [ size ] - "123MB" -> 0..5 { Size(RawNumber::int((0, 3, test_uuid())).item, Unit::MB) } - } - - assert_leaf! { - parsers [ size ] - "10GB" -> 0..4 { Size(RawNumber::int((0, 2, test_uuid())).item, Unit::GB) } + equal_tokens! { + + "-123" -> b::token_list(vec![b::int(-123)]) } } #[test] fn test_operator() { - assert_eq!(apply(node, "node", ">"), build_token(b::op(">"))); + equal_tokens! { + + ">" -> b::token_list(vec![b::op(">")]) + } // assert_leaf! { // parsers [ operator ] @@ -899,37 +839,50 @@ mod tests { #[test] fn test_string() { - assert_leaf! { - parsers [ string dq_string ] - r#""hello world""# -> 0..13 { String(tag(1, 12)) } + equal_tokens! { + + r#""hello world""# -> b::token_list(vec![b::string("hello world")]) } - assert_leaf! { - parsers [ string sq_string ] - r"'hello world'" -> 0..13 { String(tag(1, 12)) } + equal_tokens! { + + r#"'hello world'"# -> b::token_list(vec![b::string("hello world")]) } } #[test] fn test_bare() { - assert_leaf! { - parsers [ bare ] - "hello" -> 0..5 { Bare } + equal_tokens! { + + "hello" -> b::token_list(vec![b::bare("hello")]) + } + } + + #[test] + fn test_simple_path() { + equal_tokens! { + + "450MB" -> b::token_list(vec![b::bare("450MB")]) } - assert_leaf! { - parsers [ bare ] - "chrome.exe" -> 0..10 { Bare } + equal_tokens! { + + "chrome.exe" -> b::token_list(vec![b::bare("chrome"), b::op(Operator::Dot), b::bare("exe")]) } - assert_leaf! { - parsers [ bare ] - r"C:\windows\system.dll" -> 0..21 { Bare } + equal_tokens! { + + ".azure" -> b::token_list(vec![b::op(Operator::Dot), b::bare("azure")]) } - assert_leaf! { - parsers [ bare ] - r"C:\Code\-testing\my_tests.js" -> 0..28 { Bare } + equal_tokens! { + + r"C:\windows\system.dll" -> b::token_list(vec![b::bare(r"C:\windows\system"), b::op(Operator::Dot), b::bare("dll")]) + } + + equal_tokens! { + + r"C:\Code\-testing\my_tests.js" -> b::token_list(vec![b::bare(r"C:\Code\-testing\my_tests"), b::op(Operator::Dot), b::bare("js")]) } } @@ -956,223 +909,170 @@ mod tests { #[test] fn test_variable() { - assert_leaf! { - parsers [ var ] - "$it" -> 0..3 { Variable(tag(1, 3)) } + equal_tokens! { + + "$it" -> b::token_list(vec![b::var("it")]) } - assert_leaf! { - parsers [ var ] - "$name" -> 0..5 { Variable(tag(1, 5)) } + equal_tokens! { + + "$name" -> b::token_list(vec![b::var("name")]) } } #[test] fn test_external() { - assert_leaf! { - parsers [ external ] - "^ls" -> 0..3 { ExternalCommand(tag(1, 3)) } + equal_tokens! { + + "^ls" -> b::token_list(vec![b::external_command("ls")]) + } + } + + #[test] + fn test_dot_prefixed_name() { + equal_tokens! { + + ".azure" -> b::token_list(vec![b::op("."), b::bare("azure")]) } } #[test] fn test_delimited_paren() { - assert_eq!( - apply(node, "node", "(abc)"), - build_token(b::parens(vec![b::bare("abc")])) - ); + equal_tokens! { + + "(abc)" -> b::token_list(vec![b::parens(vec![b::bare("abc")])]) + } - assert_eq!( - apply(node, "node", "( abc )"), - build_token(b::parens(vec![b::ws(" "), b::bare("abc"), b::ws(" ")])) - ); + equal_tokens! { + + "( abc )" -> b::token_list(vec![b::parens(vec![b::ws(" "), b::bare("abc"), b::ws(" ")])]) + } - assert_eq!( - apply(node, "node", "( abc def )"), - build_token(b::parens(vec![ - b::ws(" "), - b::bare("abc"), - b::sp(), - b::bare("def"), - b::sp() - ])) - ); + equal_tokens! { + + "( abc def )" -> b::token_list(vec![b::parens(vec![b::ws(" "), b::bare("abc"), b::ws(" "), b::bare("def"), b::ws(" ")])]) + } - assert_eq!( - apply(node, "node", "( abc def 123 456GB )"), - build_token(b::parens(vec![ - b::ws(" "), - b::bare("abc"), - b::sp(), - b::bare("def"), - b::sp(), - b::int(123), - b::sp(), - b::size(456, "GB"), - b::sp() - ])) - ); + equal_tokens! { + + "( abc def 123 456GB )" -> b::token_list(vec![b::parens(vec![ + b::ws(" "), b::bare("abc"), b::ws(" "), b::bare("def"), b::ws(" "), b::int(123), b::ws(" "), b::bare("456GB"), b::ws(" ") + ])]) + } } #[test] fn test_delimited_square() { - assert_eq!( - apply(node, "node", "[abc]"), - build_token(b::square(vec![b::bare("abc")])) - ); + equal_tokens! { + + "[abc]" -> b::token_list(vec![b::square(vec![b::bare("abc")])]) + } - assert_eq!( - apply(node, "node", "[ abc ]"), - build_token(b::square(vec![b::ws(" "), b::bare("abc"), b::ws(" ")])) - ); + equal_tokens! { + + "[ abc ]" -> b::token_list(vec![b::square(vec![b::ws(" "), b::bare("abc"), b::ws(" ")])]) + } - assert_eq!( - apply(node, "node", "[ abc def ]"), - build_token(b::square(vec![ - b::ws(" "), - b::bare("abc"), - b::sp(), - b::bare("def"), - b::sp() - ])) - ); + equal_tokens! { + + "[ abc def ]" -> b::token_list(vec![b::square(vec![b::ws(" "), b::bare("abc"), b::ws(" "), b::bare("def"), b::ws(" ")])]) + } - assert_eq!( - apply(node, "node", "[ abc def 123 456GB ]"), - build_token(b::square(vec![ - b::ws(" "), - b::bare("abc"), - b::sp(), - b::bare("def"), - b::sp(), - b::int(123), - b::sp(), - b::size(456, "GB"), - b::sp() - ])) - ); + equal_tokens! { + + "[ abc def 123 456GB ]" -> b::token_list(vec![b::square(vec![ + b::ws(" "), b::bare("abc"), b::ws(" "), b::bare("def"), b::ws(" "), b::int(123), b::ws(" "), b::bare("456GB"), b::ws(" ") + ])]) + } } #[test] fn test_path() { let _ = pretty_env_logger::try_init(); - assert_eq!( - apply(node, "node", "$it.print"), - build_token(b::path(b::var("it"), vec![b::member("print")])) - ); - assert_eq!( - apply(node, "node", "$head.part1.part2"), - build_token(b::path( - b::var("head"), - vec![b::member("part1"), b::member("part2")] - )) - ); + equal_tokens! { + + "$it.print" -> b::token_list(vec![b::var("it"), b::op("."), b::bare("print")]) + } - assert_eq!( - apply(node, "node", "( hello ).world"), - build_token(b::path( - b::parens(vec![b::sp(), b::bare("hello"), b::sp()]), - vec![b::member("world")] - )) - ); + equal_tokens! { + + "$head.part1.part2" -> b::token_list(vec![b::var("head"), b::op("."), b::bare("part1"), b::op("."), b::bare("part2")]) + } - assert_eq!( - apply(node, "node", "( hello ).\"world\""), - build_token(b::path( - b::parens(vec![b::sp(), b::bare("hello"), b::sp()],), - vec![b::string("world")] - )) - ); + equal_tokens! { + + "( hello ).world" -> b::token_list(vec![b::parens(vec![b::sp(), b::bare("hello"), b::sp()]), b::op("."), b::bare("world")]) + } + + equal_tokens! { + + r#"( hello )."world""# -> b::token_list(vec![b::parens(vec![b::sp(), b::bare("hello"), b::sp()]), b::op("."), b::string("world")]) + } } #[test] fn test_nested_path() { - assert_eq!( - apply( - node, - "node", - "( $it.is.\"great news\".right yep $yep ).\"world\"" - ), - build_token(b::path( - b::parens(vec![ - b::sp(), - b::path( + equal_tokens! { + + r#"( $it.is."great news".right yep $yep )."world""# -> b::token_list( + vec![ + b::parens(vec![ + b::sp(), b::var("it"), - vec![b::member("is"), b::string("great news"), b::member("right")] - ), - b::sp(), - b::bare("yep"), - b::sp(), - b::var("yep"), - b::sp() - ]), - vec![b::string("world")] - )) - ) + b::op("."), + b::bare("is"), + b::op("."), + b::string("great news"), + b::op("."), + b::bare("right"), + b::sp(), + b::bare("yep"), + b::sp(), + b::var("yep"), + b::sp() + ]), + b::op("."), b::string("world")] + ) + } } #[test] fn test_smoke_single_command() { - assert_eq!( - apply(raw_call, "raw_call", "git add ."), - build(b::call( - b::bare("git"), - vec![b::sp(), b::bare("add"), b::sp(), b::bare(".")] - )) - ); + equal_tokens! { + + "git add ." -> b::token_list(vec![b::bare("git"), b::sp(), b::bare("add"), b::sp(), b::op(".")]) + } - assert_eq!( - apply(raw_call, "raw_call", "open Cargo.toml"), - build(b::call( - b::bare("open"), - vec![b::sp(), b::bare("Cargo.toml")] - )) - ); + equal_tokens! { + + "open Cargo.toml" -> b::token_list(vec![b::bare("open"), b::sp(), b::bare("Cargo"), b::op("."), b::bare("toml")]) + } - assert_eq!( - apply(raw_call, "raw_call", "select package.version"), - build(b::call( - b::bare("select"), - vec![b::sp(), b::bare("package.version")] - )) - ); + equal_tokens! { + + "select package.version" -> b::token_list(vec![b::bare("select"), b::sp(), b::bare("package"), b::op("."), b::bare("version")]) + } - assert_eq!( - apply(raw_call, "raw_call", "echo $it"), - build(b::call(b::bare("echo"), vec![b::sp(), b::var("it")])) - ); + equal_tokens! { + + "echo $it" -> b::token_list(vec![b::bare("echo"), b::sp(), b::var("it")]) + } - assert_eq!( - apply(raw_call, "raw_call", "open Cargo.toml --raw"), - build(b::call( - b::bare("open"), - vec![b::sp(), b::bare("Cargo.toml"), b::sp(), b::flag("raw")] - )) - ); + equal_tokens! { + + "open Cargo.toml --raw" -> b::token_list(vec![b::bare("open"), b::sp(), b::bare("Cargo"), b::op("."), b::bare("toml"), b::sp(), b::flag("raw")]) + } - assert_eq!( - apply(raw_call, "raw_call", "open Cargo.toml -r"), - build(b::call( - b::bare("open"), - vec![b::sp(), b::bare("Cargo.toml"), b::sp(), b::shorthand("r")] - )) - ); + equal_tokens! { + + "open Cargo.toml -r" -> b::token_list(vec![b::bare("open"), b::sp(), b::bare("Cargo"), b::op("."), b::bare("toml"), b::sp(), b::shorthand("r")]) + } - assert_eq!( - apply(raw_call, "raw_call", "config --set tabs 2"), - build(b::call( - b::bare("config"), - vec![ - b::sp(), - b::flag("set"), - b::sp(), - b::bare("tabs"), - b::sp(), - b::int(2) - ] - )) - ); + equal_tokens! { + + "config --set tabs 2" -> b::token_list(vec![b::bare("config"), b::sp(), b::flag("set"), b::sp(), b::bare("tabs"), b::sp(), b::int(2)]) + } } #[test] @@ -1181,120 +1081,159 @@ mod tests { equal_tokens!( "cargo +nightly run" -> - b::pipeline(vec![( - None, - b::call( - b::bare("cargo"), - vec![ - b::sp(), - b::external_word("+nightly"), - b::sp(), - b::bare("run") - ] - ), - None - )]) + b::pipeline(vec![vec![ + b::bare("cargo"), + b::sp(), + b::external_word("+nightly"), + b::sp(), + b::bare("run") + ]]) ); equal_tokens!( "rm foo%bar" -> - b::pipeline(vec![( - None, - b::call(b::bare("rm"), vec![b::sp(), b::external_word("foo%bar"),]), - None - )]) + b::pipeline(vec![vec![ + b::bare("rm"), b::sp(), b::external_word("foo%bar") + ]]) ); equal_tokens!( "rm foo%bar" -> - b::pipeline(vec![( - None, - b::call(b::bare("rm"), vec![b::sp(), b::external_word("foo%bar"),]), - None - )]) + b::pipeline(vec![vec![ + b::bare("rm"), b::sp(), b::external_word("foo%bar"), + ]]) ); } #[test] - fn test_smoke_pipeline() { + fn test_pipeline() { let _ = pretty_env_logger::try_init(); - assert_eq!( - apply( - pipeline, - "pipeline", - r#"git branch --merged | split-row "`n" | where $it != "* master""# - ), - build_token(b::pipeline(vec![ - ( - None, - b::call( - b::bare("git"), - vec![b::sp(), b::bare("branch"), b::sp(), b::flag("merged")] - ), - Some(" ") - ), - ( - Some(" "), - b::call(b::bare("split-row"), vec![b::sp(), b::string("`n")]), - Some(" ") - ), - ( - Some(" "), - b::call( - b::bare("where"), - vec![ - b::sp(), - b::var("it"), - b::sp(), - b::op("!="), - b::sp(), - b::string("* master") - ] - ), - None - ) - ])) - ); - - assert_eq!( - apply(pipeline, "pipeline", "ls | where { $it.size > 100 }"), - build_token(b::pipeline(vec![ - (None, b::call(b::bare("ls"), vec![]), Some(" ")), - ( - Some(" "), - b::call( - b::bare("where"), - vec![ - b::sp(), - b::braced(vec![ - b::path(b::var("it"), vec![b::member("size")]), - b::sp(), - b::op(">"), - b::sp(), - b::int(100) - ]) - ] - ), - None - ) - ])) - ) + equal_tokens! { + "sys | echo" -> b::pipeline(vec![ + vec![ + b::bare("sys"), b::sp() + ], + vec![ + b::sp(), b::bare("echo") + ] + ]) + } } - fn apply( - f: impl Fn(NomSpan) -> Result<(NomSpan, T), nom::Err<(NomSpan, nom::error::ErrorKind)>>, + #[test] + fn test_patterns() { + equal_tokens! { + + "cp ../formats/*" -> b::pipeline(vec![vec![b::bare("cp"), b::ws(" "), b::op("."), b::op("."), b::pattern("/formats/*")]]) + } + + equal_tokens! { + + "cp * /dev/null" -> b::pipeline(vec![vec![b::bare("cp"), b::ws(" "), b::pattern("*"), b::ws(" "), b::bare("/dev/null")]]) + } + } + + // #[test] + // fn test_pseudo_paths() { + // let _ = pretty_env_logger::try_init(); + + // equal_tokens!( + // r#"sys | where cpu."max ghz" > 1"# -> + // b::pipeline(vec![ + // (None, b::call(b::bare("sys"), vec![]), Some(" ")), + // ( + // Some(" "), + // b::call( + // b::bare("where"), + // vec![ + // b::sp(), + // b::path(b::bare("cpu"), vec![b::string("max ghz")]), + // b::sp(), + // b::op(">"), + // b::sp(), + // b::int(1) + // ] + // ), + // None + // ) + // ]) + // ); + // } + + // #[test] + // fn test_smoke_pipeline() { + // let _ = pretty_env_logger::try_init(); + + // assert_eq!( + // apply( + // pipeline, + // "pipeline", + // r#"git branch --merged | split-row "`n" | where $it != "* master""# + // ), + // build_token(b::pipeline(vec![ + // ( + // None, + // b::call( + // b::bare("git"), + // vec![b::sp(), b::bare("branch"), b::sp(), b::flag("merged")] + // ), + // Some(" ") + // ), + // ( + // Some(" "), + // b::call(b::bare("split-row"), vec![b::sp(), b::string("`n")]), + // Some(" ") + // ), + // ( + // Some(" "), + // b::call( + // b::bare("where"), + // vec![ + // b::sp(), + // b::var("it"), + // b::sp(), + // b::op("!="), + // b::sp(), + // b::string("* master") + // ] + // ), + // None + // ) + // ])) + // ); + + // assert_eq!( + // apply(pipeline, "pipeline", "ls | where { $it.size > 100 }"), + // build_token(b::pipeline(vec![ + // (None, b::call(b::bare("ls"), vec![]), Some(" ")), + // ( + // Some(" "), + // b::call( + // b::bare("where"), + // vec![ + // b::sp(), + // b::braced(vec![ + // b::path(b::var("it"), vec![b::member("size")]), + // b::sp(), + // b::op(">"), + // b::sp(), + // b::int(100) + // ]) + // ] + // ), + // None + // ) + // ])) + // ) + // } + + fn apply( + f: impl Fn(NomSpan) -> Result<(NomSpan, TokenNode), nom::Err<(NomSpan, nom::error::ErrorKind)>>, desc: &str, string: &str, - ) -> T { - match f(NomSpan::new_extra(string, uuid::Uuid::nil())) { - Ok(v) => v.1, - Err(other) => { - println!("{:?}", other); - println!("for {} @ {}", string, desc); - panic!("No dice"); - } - } + ) -> TokenNode { + f(nom_input(string, uuid::Uuid::nil())).unwrap().1 } fn tag(left: usize, right: usize) -> Tag { @@ -1312,17 +1251,6 @@ mod tests { TokenNode::Delimited(spanned) } - fn path(head: TokenNode, tail: Vec, left: usize, right: usize) -> TokenNode { - let tag = head.tag(); - - let node = PathNode::new( - Box::new(head), - tail.into_iter().map(TokenNode::Token).collect(), - ); - let spanned = node.tagged((left, right, tag.anchor)); - TokenNode::Path(spanned) - } - fn token(token: RawToken, left: usize, right: usize) -> TokenNode { TokenNode::Token(token.tagged((left, right, uuid::Uuid::nil()))) } diff --git a/src/parser/parse/pipeline.rs b/src/parser/parse/pipeline.rs index 42bbe23a18..36813e39c4 100644 --- a/src/parser/parse/pipeline.rs +++ b/src/parser/parse/pipeline.rs @@ -1,4 +1,4 @@ -use crate::parser::CallNode; +use crate::parser::TokenNode; use crate::traits::ToDebug; use crate::{Tag, Tagged}; use derive_new::new; @@ -7,20 +7,16 @@ use std::fmt; #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, new)] pub struct Pipeline { - pub(crate) parts: Vec, - pub(crate) post_ws: Option, + pub(crate) parts: Vec>, + // pub(crate) post_ws: Option, } impl ToDebug for Pipeline { fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { - for part in &self.parts { + for part in self.parts.iter() { write!(f, "{}", part.debug(source))?; } - if let Some(post_ws) = self.post_ws { - write!(f, "{}", post_ws.slice(source))? - } - Ok(()) } } @@ -28,10 +24,7 @@ impl ToDebug for Pipeline { #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)] pub struct PipelineElement { pub pipe: Option, - pub pre_ws: Option, - #[get = "pub(crate)"] - call: Tagged, - pub post_ws: Option, + pub tokens: Tagged>, } impl ToDebug for PipelineElement { @@ -40,14 +33,8 @@ impl ToDebug for PipelineElement { write!(f, "{}", pipe.slice(source))?; } - if let Some(pre_ws) = self.pre_ws { - write!(f, "{}", pre_ws.slice(source))?; - } - - write!(f, "{}", self.call.debug(source))?; - - if let Some(post_ws) = self.post_ws { - write!(f, "{}", post_ws.slice(source))?; + for token in &self.tokens.item { + write!(f, "{}", token.debug(source))?; } Ok(()) diff --git a/src/parser/parse/token_tree.rs b/src/parser/parse/token_tree.rs index e0072360e8..8cbb28264b 100644 --- a/src/parser/parse/token_tree.rs +++ b/src/parser/parse/token_tree.rs @@ -1,5 +1,6 @@ use crate::errors::ShellError; -use crate::parser::parse::{call_node::*, flag::*, operator::*, pipeline::*, tokens::*}; +use crate::parser::parse::{call_node::*, flag::*, pipeline::*, tokens::*}; +use crate::prelude::*; use crate::traits::ToDebug; use crate::{Tag, Tagged, Text}; use derive_new::new; @@ -12,15 +13,14 @@ pub enum TokenNode { Token(Token), Call(Tagged), + Nodes(Tagged>), Delimited(Tagged), Pipeline(Tagged), - Operator(Tagged), Flag(Tagged), Member(Tag), Whitespace(Tag), Error(Tagged>), - Path(Tagged), } impl ToDebug for TokenNode { @@ -94,32 +94,33 @@ impl TokenNode { pub fn tag(&self) -> Tag { match self { TokenNode::Token(t) => t.tag(), + TokenNode::Nodes(t) => t.tag(), TokenNode::Call(s) => s.tag(), TokenNode::Delimited(s) => s.tag(), TokenNode::Pipeline(s) => s.tag(), - TokenNode::Operator(s) => s.tag(), TokenNode::Flag(s) => s.tag(), TokenNode::Member(s) => *s, TokenNode::Whitespace(s) => *s, TokenNode::Error(s) => s.tag(), - TokenNode::Path(s) => s.tag(), } } - pub fn type_name(&self) -> String { + pub fn type_name(&self) -> &'static str { match self { TokenNode::Token(t) => t.type_name(), + TokenNode::Nodes(_) => "nodes", TokenNode::Call(_) => "command", TokenNode::Delimited(d) => d.type_name(), TokenNode::Pipeline(_) => "pipeline", - TokenNode::Operator(_) => "operator", TokenNode::Flag(_) => "flag", TokenNode::Member(_) => "member", TokenNode::Whitespace(_) => "whitespace", TokenNode::Error(_) => "error", - TokenNode::Path(_) => "path", } - .to_string() + } + + pub fn tagged_type_name(&self) -> Tagged<&'static str> { + self.type_name().tagged(self.tag()) } pub fn old_debug<'a>(&'a self, source: &'a Text) -> DebugTokenNode<'a> { @@ -134,6 +135,16 @@ impl TokenNode { self.tag().slice(source) } + pub fn get_variable(&self) -> Result<(Tag, Tag), ShellError> { + match self { + TokenNode::Token(Tagged { + item: RawToken::Variable(inner_tag), + tag: outer_tag, + }) => Ok((*outer_tag, *inner_tag)), + _ => Err(ShellError::type_error("variable", self.tagged_type_name())), + } + } + pub fn is_bare(&self) -> bool { match self { TokenNode::Token(Tagged { @@ -144,6 +155,20 @@ impl TokenNode { } } + pub fn as_block(&self) -> Option> { + match self { + TokenNode::Delimited(Tagged { + item: + DelimitedNode { + delimiter, + children, + }, + tag, + }) if *delimiter == Delimiter::Brace => Some((&children[..]).tagged(tag)), + _ => None, + } + } + pub fn is_external(&self) -> bool { match self { TokenNode::Token(Tagged { @@ -181,13 +206,60 @@ impl TokenNode { _ => Err(ShellError::string("unimplemented")), } } + + pub fn is_whitespace(&self) -> bool { + match self { + TokenNode::Whitespace(_) => true, + _ => false, + } + } + + pub fn expect_string(&self) -> (Tag, Tag) { + match self { + TokenNode::Token(Tagged { + item: RawToken::String(inner_tag), + tag: outer_tag, + }) => (*outer_tag, *inner_tag), + other => panic!("Expected string, found {:?}", other), + } + } +} + +#[cfg(test)] +impl TokenNode { + pub fn expect_list(&self) -> Tagged<&[TokenNode]> { + match self { + TokenNode::Nodes(Tagged { item, tag }) => (&item[..]).tagged(tag), + other => panic!("Expected list, found {:?}", other), + } + } + + pub fn expect_var(&self) -> (Tag, Tag) { + match self { + TokenNode::Token(Tagged { + item: RawToken::Variable(inner_tag), + tag: outer_tag, + }) => (*outer_tag, *inner_tag), + other => panic!("Expected var, found {:?}", other), + } + } + + pub fn expect_bare(&self) -> Tag { + match self { + TokenNode::Token(Tagged { + item: RawToken::Bare, + tag, + }) => *tag, + other => panic!("Expected var, found {:?}", other), + } + } } #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)] #[get = "pub(crate)"] pub struct DelimitedNode { - delimiter: Delimiter, - children: Vec, + pub(crate) delimiter: Delimiter, + pub(crate) children: Vec, } impl DelimitedNode { @@ -207,6 +279,24 @@ pub enum Delimiter { Square, } +impl Delimiter { + pub(crate) fn open(&self) -> char { + match self { + Delimiter::Paren => '(', + Delimiter::Brace => '{', + Delimiter::Square => '[', + } + } + + pub(crate) fn close(&self) -> char { + match self { + Delimiter::Paren => ')', + Delimiter::Brace => '}', + Delimiter::Square => ']', + } + } +} + #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)] #[get = "pub(crate)"] pub struct PathNode { diff --git a/src/parser/parse/token_tree_builder.rs b/src/parser/parse/token_tree_builder.rs index 9a2e6ab721..67298987a4 100644 --- a/src/parser/parse/token_tree_builder.rs +++ b/src/parser/parse/token_tree_builder.rs @@ -3,7 +3,7 @@ use crate::prelude::*; use crate::parser::parse::flag::{Flag, FlagKind}; use crate::parser::parse::operator::Operator; use crate::parser::parse::pipeline::{Pipeline, PipelineElement}; -use crate::parser::parse::token_tree::{DelimitedNode, Delimiter, PathNode, TokenNode}; +use crate::parser::parse::token_tree::{DelimitedNode, Delimiter, TokenNode}; use crate::parser::parse::tokens::{RawNumber, RawToken}; use crate::parser::parse::unit::Unit; use crate::parser::CallNode; @@ -31,60 +31,68 @@ impl TokenTreeBuilder { (node, builder.output) } - pub fn pipeline(input: Vec<(Option<&str>, CurriedCall, Option<&str>)>) -> CurriedToken { - let input: Vec<(Option, CurriedCall, Option)> = input - .into_iter() - .map(|(pre, call, post)| { - ( - pre.map(|s| s.to_string()), - call, - post.map(|s| s.to_string()), - ) - }) - .collect(); + fn build_tagged(&mut self, callback: impl FnOnce(&mut TokenTreeBuilder) -> T) -> Tagged { + let start = self.pos; + let ret = callback(self); + let end = self.pos; + ret.tagged((start, end, self.anchor)) + } + + pub fn pipeline(input: Vec>) -> CurriedToken { Box::new(move |b| { let start = b.pos; - let mut out: Vec = vec![]; + let mut out: Vec> = vec![]; let mut input = input.into_iter().peekable(); - let (pre, call, post) = input + let head = input .next() .expect("A pipeline must contain at least one element"); let pipe = None; - let pre_tag = pre.map(|pre| b.consume_tag(&pre)); - let call = call(b); - let post_tag = post.map(|post| b.consume_tag(&post)); + let head = b.build_tagged(|b| head.into_iter().map(|node| node(b)).collect()); - out.push(PipelineElement::new(pipe, pre_tag, call, post_tag)); + let head_tag: Tag = head.tag; + out.push(PipelineElement::new(pipe, head).tagged(head_tag)); loop { match input.next() { None => break, - Some((pre, call, post)) => { + Some(node) => { + let start = b.pos; let pipe = Some(b.consume_tag("|")); - let pre_span = pre.map(|pre| b.consume_tag(&pre)); - let call = call(b); - let post_span = post.map(|post| b.consume_tag(&post)); + let node = + b.build_tagged(|b| node.into_iter().map(|node| node(b)).collect()); + let end = b.pos; - out.push(PipelineElement::new(pipe, pre_span, call, post_span)); + out.push(PipelineElement::new(pipe, node).tagged((start, end, b.anchor))); } } } let end = b.pos; - TokenTreeBuilder::tagged_pipeline((out, None), (start, end, b.anchor)) + TokenTreeBuilder::tagged_pipeline(out, (start, end, b.anchor)) }) } - pub fn tagged_pipeline( - input: (Vec, Option), - tag: impl Into, - ) -> TokenNode { - TokenNode::Pipeline(Pipeline::new(input.0, input.1.into()).tagged(tag.into())) + pub fn tagged_pipeline(input: Vec>, tag: impl Into) -> TokenNode { + TokenNode::Pipeline(Pipeline::new(input).tagged(tag.into())) + } + + pub fn token_list(input: Vec) -> CurriedToken { + Box::new(move |b| { + let start = b.pos; + let tokens = input.into_iter().map(|i| i(b)).collect(); + let end = b.pos; + + TokenTreeBuilder::tagged_token_list(tokens, (start, end, b.anchor)) + }) + } + + pub fn tagged_token_list(input: Vec, tag: impl Into) -> TokenNode { + TokenNode::Nodes(input.tagged(tag)) } pub fn op(input: impl Into) -> CurriedToken { @@ -100,7 +108,7 @@ impl TokenTreeBuilder { } pub fn tagged_op(input: impl Into, tag: impl Into) -> TokenNode { - TokenNode::Operator(input.into().tagged(tag.into())) + TokenNode::Token(RawToken::Operator(input.into()).tagged(tag.into())) } pub fn string(input: impl Into) -> CurriedToken { @@ -168,8 +176,23 @@ impl TokenTreeBuilder { TokenNode::Token(RawToken::ExternalWord.tagged(input.into())) } - pub fn tagged_external(input: impl Into, tag: impl Into) -> TokenNode { - TokenNode::Token(RawToken::ExternalCommand(input.into()).tagged(tag.into())) + pub fn external_command(input: impl Into) -> CurriedToken { + let input = input.into(); + + Box::new(move |b| { + let (outer_start, _) = b.consume("^"); + let (inner_start, end) = b.consume(&input); + b.pos = end; + + TokenTreeBuilder::tagged_external_command( + (inner_start, end, b.anchor), + (outer_start, end, b.anchor), + ) + }) + } + + pub fn tagged_external_command(inner: impl Into, outer: impl Into) -> TokenNode { + TokenNode::Token(RawToken::ExternalCommand(inner.into()).tagged(outer.into())) } pub fn int(input: impl Into) -> CurriedToken { @@ -229,29 +252,6 @@ impl TokenTreeBuilder { TokenNode::Token(RawToken::Size(int, unit).tagged(tag.into())) } - pub fn path(head: CurriedToken, tail: Vec) -> CurriedToken { - Box::new(move |b| { - let start = b.pos; - let head = head(b); - - let mut output = vec![]; - - for item in tail { - b.consume("."); - - output.push(item(b)); - } - - let end = b.pos; - - TokenTreeBuilder::tagged_path((head, output), (start, end, b.anchor)) - }) - } - - pub fn tagged_path(input: (TokenNode, Vec), tag: impl Into) -> TokenNode { - TokenNode::Path(PathNode::new(Box::new(input.0), input.1).tagged(tag.into())) - } - pub fn var(input: impl Into) -> CurriedToken { let input = input.into(); diff --git a/src/parser/parse/tokens.rs b/src/parser/parse/tokens.rs index d796a8fcb7..77a856af3f 100644 --- a/src/parser/parse/tokens.rs +++ b/src/parser/parse/tokens.rs @@ -1,4 +1,5 @@ use crate::parser::parse::unit::*; +use crate::parser::Operator; use crate::prelude::*; use crate::{Tagged, Text}; use std::fmt; @@ -7,6 +8,7 @@ use std::str::FromStr; #[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] pub enum RawToken { Number(RawNumber), + Operator(Operator), Size(RawNumber, Unit), String(Tag), Variable(Tag), @@ -49,12 +51,13 @@ impl RawToken { pub fn type_name(&self) -> &'static str { match self { RawToken::Number(_) => "Number", + RawToken::Operator(..) => "operator", RawToken::Size(..) => "Size", RawToken::String(_) => "String", - RawToken::Variable(_) => "Variable", - RawToken::ExternalCommand(_) => "ExternalCommand", - RawToken::ExternalWord => "ExternalWord", - RawToken::GlobPattern => "GlobPattern", + RawToken::Variable(_) => "variable", + RawToken::ExternalCommand(_) => "external command", + RawToken::ExternalWord => "external word", + RawToken::GlobPattern => "glob pattern", RawToken::Bare => "String", } } diff --git a/src/parser/parse_command.rs b/src/parser/parse_command.rs index 36ba82f8e5..d383689fd9 100644 --- a/src/parser/parse_command.rs +++ b/src/parser/parse_command.rs @@ -1,92 +1,35 @@ -use crate::context::Context; use crate::errors::{ArgumentError, ShellError}; +use crate::parser::hir::syntax_shape::{expand_expr, spaced}; use crate::parser::registry::{NamedType, PositionalType, Signature}; -use crate::parser::{baseline_parse_tokens, CallNode}; +use crate::parser::TokensIterator; use crate::parser::{ - hir::{self, NamedArguments}, - Flag, RawToken, TokenNode, + hir::{self, ExpandContext, NamedArguments}, + Flag, }; use crate::traits::ToDebug; -use crate::{Tag, Tagged, TaggedItem, Text}; +use crate::{Tag, Tagged, Text}; use log::trace; -pub fn parse_command( +pub fn parse_command_tail( config: &Signature, - context: &Context, - call: &Tagged, - source: &Text, -) -> Result { - let Tagged { item: raw_call, .. } = call; - - trace!("Processing {:?}", config); - - let head = parse_command_head(call.head())?; - - let children: Option> = raw_call.children().as_ref().map(|nodes| { - nodes - .iter() - .cloned() - .filter(|node| match node { - TokenNode::Whitespace(_) => false, - _ => true, - }) - .collect() - }); - - match parse_command_tail(&config, context, children, source, call.tag())? { - None => Ok(hir::Call::new(Box::new(head), None, None)), - Some((positional, named)) => Ok(hir::Call::new(Box::new(head), positional, named)), - } -} - -fn parse_command_head(head: &TokenNode) -> Result { - match head { - TokenNode::Token( - spanned @ Tagged { - item: RawToken::Bare, - .. - }, - ) => Ok(spanned.map(|_| hir::RawExpression::Literal(hir::Literal::Bare))), - - TokenNode::Token(Tagged { - item: RawToken::String(inner_tag), - tag, - }) => Ok(hir::RawExpression::Literal(hir::Literal::String(*inner_tag)).tagged(*tag)), - - other => Err(ShellError::unexpected(&format!( - "command head -> {:?}", - other - ))), - } -} - -fn parse_command_tail( - config: &Signature, - context: &Context, - tail: Option>, - source: &Text, + context: &ExpandContext, + tail: &mut TokensIterator, command_tag: Tag, ) -> Result>, Option)>, ShellError> { - let tail = &mut match &tail { - None => hir::TokensIterator::new(&[]), - Some(tail) => hir::TokensIterator::new(tail), - }; - let mut named = NamedArguments::new(); - - trace_remaining("nodes", tail.clone(), source); + trace_remaining("nodes", tail.clone(), context.source()); for (name, kind) in &config.named { trace!(target: "nu::parse", "looking for {} : {:?}", name, kind); match kind { NamedType::Switch => { - let flag = extract_switch(name, tail, source); + let flag = extract_switch(name, tail, context.source()); named.insert_switch(name, flag); } NamedType::Mandatory(syntax_type) => { - match extract_mandatory(config, name, tail, source, command_tag) { + match extract_mandatory(config, name, tail, context.source(), command_tag) { Err(err) => return Err(err), // produce a correct diagnostic Ok((pos, flag)) => { tail.move_to(pos); @@ -99,42 +42,47 @@ fn parse_command_tail( )); } - let expr = - hir::baseline_parse_next_expr(tail, context, source, *syntax_type)?; + let expr = expand_expr(&spaced(*syntax_type), tail, context)?; tail.restart(); named.insert_mandatory(name, expr); } } } - NamedType::Optional(syntax_type) => match extract_optional(name, tail, source) { - Err(err) => return Err(err), // produce a correct diagnostic - Ok(Some((pos, flag))) => { - tail.move_to(pos); + NamedType::Optional(syntax_type) => { + match extract_optional(name, tail, context.source()) { + Err(err) => return Err(err), // produce a correct diagnostic + Ok(Some((pos, flag))) => { + tail.move_to(pos); - if tail.at_end() { - return Err(ShellError::argument_error( - config.name.clone(), - ArgumentError::MissingValueForName(name.to_string()), - flag.tag(), - )); + if tail.at_end() { + return Err(ShellError::argument_error( + config.name.clone(), + ArgumentError::MissingValueForName(name.to_string()), + flag.tag(), + )); + } + + let expr = expand_expr(&spaced(*syntax_type), tail, context); + + match expr { + Err(_) => named.insert_optional(name, None), + Ok(expr) => named.insert_optional(name, Some(expr)), + } + + tail.restart(); } - let expr = hir::baseline_parse_next_expr(tail, context, source, *syntax_type)?; - - tail.restart(); - named.insert_optional(name, Some(expr)); + Ok(None) => { + tail.restart(); + named.insert_optional(name, None); + } } - - Ok(None) => { - tail.restart(); - named.insert_optional(name, None); - } - }, + } }; } - trace_remaining("after named", tail.clone(), source); + trace_remaining("after named", tail.clone(), context.source()); let mut positional = vec![]; @@ -143,7 +91,7 @@ fn parse_command_tail( match arg { PositionalType::Mandatory(..) => { - if tail.len() == 0 { + if tail.at_end() { return Err(ShellError::argument_error( config.name.clone(), ArgumentError::MissingMandatoryPositional(arg.name().to_string()), @@ -153,25 +101,36 @@ fn parse_command_tail( } PositionalType::Optional(..) => { - if tail.len() == 0 { + if tail.at_end() { break; } } } - let result = hir::baseline_parse_next_expr(tail, context, source, arg.syntax_type())?; + let result = expand_expr(&spaced(arg.syntax_type()), tail, context)?; positional.push(result); } - trace_remaining("after positional", tail.clone(), source); + trace_remaining("after positional", tail.clone(), context.source()); if let Some(syntax_type) = config.rest_positional { - let remainder = baseline_parse_tokens(tail, context, source, syntax_type)?; - positional.extend(remainder); + let mut out = vec![]; + + loop { + if tail.at_end_possible_ws() { + break; + } + + let next = expand_expr(&spaced(syntax_type), tail, context)?; + + out.push(next); + } + + positional.extend(out); } - trace_remaining("after rest", tail.clone(), source); + trace_remaining("after rest", tail.clone(), context.source()); trace!("Constructed positional={:?} named={:?}", positional, named); diff --git a/src/parser/registry.rs b/src/parser/registry.rs index 955a1a04c9..888e5ae1e9 100644 --- a/src/parser/registry.rs +++ b/src/parser/registry.rs @@ -1,11 +1,11 @@ // TODO: Temporary redirect pub(crate) use crate::context::CommandRegistry; use crate::evaluate::{evaluate_baseline_expr, Scope}; -use crate::parser::{hir, hir::SyntaxShape, parse_command, CallNode}; +use crate::parser::{hir, hir::SyntaxShape}; use crate::prelude::*; use derive_new::new; use indexmap::IndexMap; -use log::trace; + use serde::{Deserialize, Serialize}; use std::fmt; @@ -271,21 +271,6 @@ impl<'a> Iterator for PositionalIter<'a> { } } -impl Signature { - pub(crate) fn parse_args( - &self, - call: &Tagged, - context: &Context, - source: &Text, - ) -> Result { - let args = parse_command(self, context, call, source)?; - - trace!("parsed args: {:?}", args); - - Ok(args) - } -} - pub(crate) fn evaluate_args( call: &hir::Call, registry: &CommandRegistry, diff --git a/src/plugins/add.rs b/src/plugins/add.rs index 03e1d42828..997400d67f 100644 --- a/src/plugins/add.rs +++ b/src/plugins/add.rs @@ -1,10 +1,13 @@ +use itertools::Itertools; use nu::{ - serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, - SyntaxShape, Tagged, Value, + serve_plugin, CallInfo, Plugin, ReturnSuccess, ReturnValue, ShellError, Signature, SyntaxShape, + Tagged, Value, }; +pub type ColumnPath = Vec>; + struct Add { - field: Option, + field: Option, value: Option, } impl Add { @@ -19,12 +22,13 @@ impl Add { let value_tag = value.tag(); match (value.item, self.value.clone()) { (obj @ Value::Row(_), Some(v)) => match &self.field { - Some(f) => match obj.insert_data_at_path(value_tag, &f, v) { + Some(f) => match obj.insert_data_at_column_path(value_tag, &f, v) { Some(v) => return Ok(v), None => { return Err(ShellError::string(format!( "add could not find place to insert field {:?} {}", - obj, f + obj, + f.iter().map(|i| &i.item).join(".") ))) } }, @@ -44,7 +48,7 @@ impl Plugin for Add { fn config(&mut self) -> Result { Ok(Signature::build("add") .desc("Add a new field to the table.") - .required("Field", SyntaxShape::String) + .required("Field", SyntaxShape::ColumnPath) .required("Value", SyntaxShape::String) .rest(SyntaxShape::String) .filter()) @@ -53,12 +57,13 @@ impl Plugin for Add { fn begin_filter(&mut self, call_info: CallInfo) -> Result, ShellError> { if let Some(args) = call_info.args.positional { match &args[0] { - Tagged { - item: Value::Primitive(Primitive::String(s)), + table @ Tagged { + item: Value::Table(_), .. } => { - self.field = Some(s.clone()); + self.field = Some(table.as_column_path()?.item); } + _ => { return Err(ShellError::string(format!( "Unrecognized type in params: {:?}", diff --git a/src/plugins/edit.rs b/src/plugins/edit.rs index db116fedf5..6d35530ef5 100644 --- a/src/plugins/edit.rs +++ b/src/plugins/edit.rs @@ -1,10 +1,12 @@ use nu::{ - serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, - SyntaxShape, Tagged, Value, + serve_plugin, CallInfo, Plugin, ReturnSuccess, ReturnValue, ShellError, Signature, SyntaxShape, + Tagged, Value, }; +pub type ColumnPath = Vec>; + struct Edit { - field: Option, + field: Option, value: Option, } impl Edit { @@ -19,7 +21,7 @@ impl Edit { let value_tag = value.tag(); match (value.item, self.value.clone()) { (obj @ Value::Row(_), Some(v)) => match &self.field { - Some(f) => match obj.replace_data_at_path(value_tag, &f, v) { + Some(f) => match obj.replace_data_at_column_path(value_tag, &f, v) { Some(v) => return Ok(v), None => { return Err(ShellError::string( @@ -43,7 +45,7 @@ impl Plugin for Edit { fn config(&mut self) -> Result { Ok(Signature::build("edit") .desc("Edit an existing column to have a new value.") - .required("Field", SyntaxShape::String) + .required("Field", SyntaxShape::ColumnPath) .required("Value", SyntaxShape::String) .filter()) } @@ -51,11 +53,11 @@ impl Plugin for Edit { fn begin_filter(&mut self, call_info: CallInfo) -> Result, ShellError> { if let Some(args) = call_info.args.positional { match &args[0] { - Tagged { - item: Value::Primitive(Primitive::String(s)), + table @ Tagged { + item: Value::Table(_), .. } => { - self.field = Some(s.clone()); + self.field = Some(table.as_column_path()?.item); } _ => { return Err(ShellError::string(format!( diff --git a/src/plugins/inc.rs b/src/plugins/inc.rs index ecab03dc97..4e6f6f0f64 100644 --- a/src/plugins/inc.rs +++ b/src/plugins/inc.rs @@ -14,8 +14,10 @@ pub enum SemVerAction { Patch, } +pub type ColumnPath = Vec>; + struct Inc { - field: Option, + field: Option, error: Option, action: Option, } @@ -85,16 +87,17 @@ impl Inc { } Value::Row(_) => match self.field { Some(ref f) => { - let replacement = match value.item.get_data_by_path(value.tag(), f) { + let replacement = match value.item.get_data_by_column_path(value.tag(), f) { Some(result) => self.inc(result.map(|x| x.clone()))?, None => { return Err(ShellError::string("inc could not find field to replace")) } }; - match value - .item - .replace_data_at_path(value.tag(), f, replacement.item.clone()) - { + match value.item.replace_data_at_column_path( + value.tag(), + f, + replacement.item.clone(), + ) { Some(v) => return Ok(v), None => { return Err(ShellError::string("inc could not find field to replace")) @@ -120,7 +123,7 @@ impl Plugin for Inc { .switch("major") .switch("minor") .switch("patch") - .rest(SyntaxShape::String) + .rest(SyntaxShape::ColumnPath) .filter()) } @@ -138,11 +141,11 @@ impl Plugin for Inc { if let Some(args) = call_info.args.positional { for arg in args { match arg { - Tagged { - item: Value::Primitive(Primitive::String(s)), + table @ Tagged { + item: Value::Table(_), .. } => { - self.field = Some(s); + self.field = Some(table.as_column_path()?.item); } _ => { return Err(ShellError::string(format!( @@ -209,8 +212,13 @@ mod tests { } fn with_parameter(&mut self, name: &str) -> &mut Self { + let fields: Vec> = name + .split(".") + .map(|s| Value::string(s.to_string()).tagged(Tag::unknown_span(self.anchor))) + .collect(); + self.positionals - .push(Value::string(name.to_string()).tagged(Tag::unknown_span(self.anchor))); + .push(Value::Table(fields).tagged(Tag::unknown_span(self.anchor))); self } @@ -297,7 +305,12 @@ mod tests { ) .is_ok()); - assert_eq!(plugin.field, Some("package.version".to_string())); + assert_eq!( + plugin + .field + .map(|f| f.into_iter().map(|f| f.item).collect()), + Some(vec!["package".to_string(), "version".to_string()]) + ); } #[test] diff --git a/src/plugins/str.rs b/src/plugins/str.rs index 4b74914f09..7bd35733da 100644 --- a/src/plugins/str.rs +++ b/src/plugins/str.rs @@ -1,6 +1,6 @@ use nu::{ serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, - SyntaxShape, Tagged, Value, + SyntaxShape, Tagged, TaggedItem, Value, }; #[derive(Debug, Eq, PartialEq)] @@ -10,8 +10,10 @@ enum Action { ToInteger, } +pub type ColumnPath = Vec>; + struct Str { - field: Option, + field: Option, params: Option>, error: Option, action: Option, @@ -43,8 +45,8 @@ impl Str { Ok(applied) } - fn for_field(&mut self, field: &str) { - self.field = Some(String::from(field)); + fn for_field(&mut self, column_path: ColumnPath) { + self.field = Some(column_path); } fn permit(&mut self) -> bool { @@ -92,14 +94,15 @@ impl Str { } Value::Row(_) => match self.field { Some(ref f) => { - let replacement = match value.item.get_data_by_path(value.tag(), f) { + let replacement = match value.item.get_data_by_column_path(value.tag(), f) { Some(result) => self.strutils(result.map(|x| x.clone()))?, None => return Ok(Tagged::from_item(Value::nothing(), value.tag)), }; - match value - .item - .replace_data_at_path(value.tag(), f, replacement.item.clone()) - { + match value.item.replace_data_at_column_path( + value.tag(), + f, + replacement.item.clone(), + ) { Some(v) => return Ok(v), None => { return Err(ShellError::string("str could not find field to replace")) @@ -127,7 +130,7 @@ impl Plugin for Str { .switch("downcase") .switch("upcase") .switch("to-int") - .rest(SyntaxShape::Member) + .rest(SyntaxShape::ColumnPath) .filter()) } @@ -148,15 +151,21 @@ impl Plugin for Str { match possible_field { Tagged { item: Value::Primitive(Primitive::String(s)), - .. + tag, } => match self.action { Some(Action::Downcase) | Some(Action::Upcase) | Some(Action::ToInteger) | None => { - self.for_field(&s); + self.for_field(vec![s.clone().tagged(tag)]); } }, + table @ Tagged { + item: Value::Table(_), + .. + } => { + self.field = Some(table.as_column_path()?.item); + } _ => { return Err(ShellError::string(format!( "Unrecognized type in params: {:?}", @@ -227,8 +236,13 @@ mod tests { } fn with_parameter(&mut self, name: &str) -> &mut Self { + let fields: Vec> = name + .split(".") + .map(|s| Value::string(s.to_string()).tagged(Tag::unknown_span(self.anchor))) + .collect(); + self.positionals - .push(Value::string(name.to_string()).tagged(Tag::unknown())); + .push(Value::Table(fields).tagged(Tag::unknown_span(self.anchor))); self } @@ -303,7 +317,12 @@ mod tests { ) .is_ok()); - assert_eq!(plugin.field, Some("package.description".to_string())); + assert_eq!( + plugin + .field + .map(|f| f.into_iter().map(|f| f.item).collect()), + Some(vec!["package".to_string(), "description".to_string()]) + ) } #[test] diff --git a/src/shell/helper.rs b/src/shell/helper.rs index 6fb4544352..85591cf047 100644 --- a/src/shell/helper.rs +++ b/src/shell/helper.rs @@ -1,3 +1,4 @@ +use crate::parser::hir::TokensIterator; use crate::parser::nom_input; use crate::parser::parse::token_tree::TokenNode; use crate::parser::parse::tokens::RawToken; @@ -77,16 +78,12 @@ impl Highlighter for Helper { Ok(v) => v, }; - let Pipeline { parts, post_ws } = pipeline; + let Pipeline { parts } = pipeline; let mut iter = parts.into_iter(); loop { match iter.next() { None => { - if let Some(ws) = post_ws { - out.push_str(ws.slice(line)); - } - return Cow::Owned(out); } Some(token) => { @@ -107,13 +104,12 @@ impl Highlighter for Helper { fn paint_token_node(token_node: &TokenNode, line: &str) -> String { let styled = match token_node { TokenNode::Call(..) => Color::Cyan.bold().paint(token_node.tag().slice(line)), + TokenNode::Nodes(..) => Color::Green.bold().paint(token_node.tag().slice(line)), TokenNode::Whitespace(..) => Color::White.normal().paint(token_node.tag().slice(line)), TokenNode::Flag(..) => Color::Black.bold().paint(token_node.tag().slice(line)), TokenNode::Member(..) => Color::Yellow.bold().paint(token_node.tag().slice(line)), - TokenNode::Path(..) => Color::Green.bold().paint(token_node.tag().slice(line)), TokenNode::Error(..) => Color::Red.bold().paint(token_node.tag().slice(line)), TokenNode::Delimited(..) => Color::White.paint(token_node.tag().slice(line)), - TokenNode::Operator(..) => Color::White.normal().paint(token_node.tag().slice(line)), TokenNode::Pipeline(..) => Color::Blue.normal().paint(token_node.tag().slice(line)), TokenNode::Token(Tagged { item: RawToken::Number(..), @@ -147,6 +143,10 @@ fn paint_token_node(token_node: &TokenNode, line: &str) -> String { item: RawToken::ExternalWord, .. }) => Color::Black.bold().paint(token_node.tag().slice(line)), + TokenNode::Token(Tagged { + item: RawToken::Operator(..), + .. + }) => Color::Black.bold().paint(token_node.tag().slice(line)), }; styled.to_string() @@ -159,25 +159,19 @@ fn paint_pipeline_element(pipeline_element: &PipelineElement, line: &str) -> Str styled.push_str(&Color::Purple.paint("|")); } - if let Some(ws) = pipeline_element.pre_ws { - styled.push_str(&Color::White.normal().paint(ws.slice(line))); - } + let mut tokens = + TokensIterator::new(&pipeline_element.tokens, pipeline_element.tokens.tag, false); + let head = tokens.next(); - styled.push_str( - &Color::Cyan - .bold() - .paint(pipeline_element.call().head().tag().slice(line)) - .to_string(), - ); - - if let Some(children) = pipeline_element.call().children() { - for child in children { - styled.push_str(&paint_token_node(child, line)); + match head { + None => return styled, + Some(head) => { + styled.push_str(&Color::Cyan.bold().paint(head.tag().slice(line)).to_string()) } } - if let Some(ws) = pipeline_element.post_ws { - styled.push_str(&Color::White.normal().paint(ws.slice(line))); + for token in tokens { + styled.push_str(&paint_token_node(token, line)); } styled.to_string() diff --git a/tests/command_open_tests.rs b/tests/command_open_tests.rs index 54dc7ad54d..e9047883cf 100644 --- a/tests/command_open_tests.rs +++ b/tests/command_open_tests.rs @@ -212,7 +212,7 @@ fn open_can_parse_ini() { fn open_can_parse_utf16_ini() { let actual = nu!( cwd: "tests/fixtures/formats", - "open utf16.ini | get .ShellClassInfo | get IconIndex | echo $it" + "open utf16.ini | get '.ShellClassInfo' | get IconIndex | echo $it" ); assert_eq!(actual, "-236") diff --git a/tests/helpers/mod.rs b/tests/helpers/mod.rs index 04fd889925..199038b531 100644 --- a/tests/helpers/mod.rs +++ b/tests/helpers/mod.rs @@ -93,6 +93,7 @@ macro_rules! nu { .write_all(commands.as_bytes()) .expect("couldn't write to stdin"); + let output = process .wait_with_output() .expect("couldn't read from stdout"); From d2eb6f6646195b6c95b09c9ed141a811709b5502 Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Thu, 10 Oct 2019 21:23:12 +0200 Subject: [PATCH 010/184] Adds .envrc and shell.nix --- .envrc | 1 + shell.nix | 10 ++++++++++ 2 files changed, 11 insertions(+) create mode 100644 .envrc create mode 100644 shell.nix diff --git a/.envrc b/.envrc new file mode 100644 index 0000000000..65326bb6dd --- /dev/null +++ b/.envrc @@ -0,0 +1 @@ +use nix \ No newline at end of file diff --git a/shell.nix b/shell.nix new file mode 100644 index 0000000000..e6062b2cb0 --- /dev/null +++ b/shell.nix @@ -0,0 +1,10 @@ +let + moz_overlay = import (builtins.fetchTarball https://github.com/mozilla/nixpkgs-mozilla/archive/master.tar.gz); + nixpkgs = import { overlays = [ moz_overlay ]; }; + nightly = ((nixpkgs.rustChannelOf { date = "2019-09-01"; channel = "nightly"; }).rust.override { extensions = [ "rust-src" "rls-preview" "clippy-preview" "rust-analysis" "rustfmt-preview" ];}); +in +with nixpkgs; +stdenv.mkDerivation { + name = "nushell-rust"; + buildInputs = [ nightly openssl_1_1 pkg-config ]; +} From c2c10e2bc0254497c25968728641c4f1b9b7f135 Mon Sep 17 00:00:00 2001 From: Yehuda Katz Date: Sun, 6 Oct 2019 13:22:50 -0700 Subject: [PATCH 011/184] Overhaul the coloring system This commit replaces the previous naive coloring system with a coloring system that is more aligned with the parser. The main benefit of this change is that it allows us to use parsing rules to decide how to color tokens. For example, consider the following syntax: ``` $ ps | where cpu > 10 ``` Ideally, we could color `cpu` like a column name and not a string, because `cpu > 10` is a shorthand block syntax that expands to `{ $it.cpu > 10 }`. The way that we know that it's a shorthand block is that the `where` command declares that its first parameter is a `SyntaxShape::Block`, which allows the shorthand block form. In order to accomplish this, we need to color the tokens in a way that corresponds to their expanded semantics, which means that high-fidelity coloring requires expansion. This commit adds a `ColorSyntax` trait that corresponds to the `ExpandExpression` trait. The semantics are fairly similar, with a few differences. First `ExpandExpression` consumes N tokens and returns a single `hir::Expression`. `ColorSyntax` consumes N tokens and writes M `FlatShape` tokens to the output. Concretely, for syntax like `[1 2 3]` - `ExpandExpression` takes a single token node and produces a single `hir::Expression` - `ColorSyntax` takes the same token node and emits 7 `FlatShape`s (open delimiter, int, whitespace, int, whitespace, int, close delimiter) Second, `ColorSyntax` is more willing to plow through failures than `ExpandExpression`. In particular, consider syntax like ``` $ ps | where cpu > ``` In this case - `ExpandExpression` will see that the `where` command is expecting a block, see that it's not a literal block and try to parse it as a shorthand block. It will successfully find a member followed by an infix operator, but not a following expression. That means that the entire pipeline part fails to parse and is a syntax error. - `ColorSyntax` will also try to parse it as a shorthand block and ultimately fail, but it will fall back to "backoff coloring mode", which parsing any unidentified tokens in an unfallible, simple way. In this case, `cpu` will color as a string and `>` will color as an operator. Finally, it's very important that coloring a pipeline infallibly colors the entire string, doesn't fail, and doesn't get stuck in an infinite loop. In order to accomplish this, this PR separates `ColorSyntax`, which is infallible from `FallibleColorSyntax`, which might fail. This allows the type system to let us know if our coloring rules bottom out at at an infallible rule. It's not perfect: it's still possible for the coloring process to get stuck or consume tokens non-atomically. I intend to reduce the opportunity for those problems in a future commit. In the meantime, the current system catches a number of mistakes (like trying to use a fallible coloring rule in a loop without thinking about the possibility that it will never terminate). --- Cargo.toml | 2 +- src/cli.rs | 95 +-- src/commands/autoview.rs | 32 +- src/commands/classified.rs | 29 +- src/commands/config.rs | 13 +- src/commands/fetch.rs | 12 +- src/commands/open.rs | 12 +- src/commands/plugin.rs | 12 +- src/commands/post.rs | 16 +- src/commands/save.rs | 5 +- src/commands/to_csv.rs | 57 +- src/commands/to_tsv.rs | 70 +- src/context.rs | 10 +- src/data/base.rs | 48 +- src/data/config.rs | 22 +- src/data/meta.rs | 29 + src/errors.rs | 108 +-- src/parser.rs | 6 +- src/parser/hir/expand_external_tokens.rs | 88 ++- src/parser/hir/syntax_shape.rs | 655 +++++++++++++++++- src/parser/hir/syntax_shape/block.rs | 172 ++++- src/parser/hir/syntax_shape/expression.rs | 255 +++++-- .../hir/syntax_shape/expression/atom.rs | 541 +++++++++++++++ .../hir/syntax_shape/expression/delimited.rs | 67 +- .../hir/syntax_shape/expression/file_path.rs | 94 +-- .../hir/syntax_shape/expression/list.rs | 141 +++- .../hir/syntax_shape/expression/number.rs | 108 +-- .../hir/syntax_shape/expression/pattern.rs | 34 +- .../hir/syntax_shape/expression/string.rs | 36 +- .../hir/syntax_shape/expression/unit.rs | 31 +- .../syntax_shape/expression/variable_path.rs | 346 ++++++++- src/parser/hir/syntax_shape/flat_shape.rs | 95 +++ src/parser/hir/tokens_iterator.rs | 142 +++- src/parser/parse/flag.rs | 16 +- src/parser/parse/parser.rs | 53 +- src/parser/parse/token_tree.rs | 55 +- src/parser/parse/token_tree_builder.rs | 120 ++-- src/parser/parse/tokens.rs | 103 ++- src/parser/parse_command.rs | 232 ++++++- src/plugin.rs | 8 +- src/plugins/add.rs | 35 +- src/plugins/edit.rs | 26 +- src/plugins/embed.rs | 11 +- src/plugins/inc.rs | 41 +- src/plugins/match.rs | 44 +- src/plugins/str.rs | 31 +- src/plugins/sum.rs | 25 +- src/prelude.rs | 10 + src/shell/filesystem_shell.rs | 2 +- src/shell/helper.rs | 177 +++-- 50 files changed, 3527 insertions(+), 845 deletions(-) create mode 100644 src/parser/hir/syntax_shape/expression/atom.rs create mode 100644 src/parser/hir/syntax_shape/flat_shape.rs diff --git a/Cargo.toml b/Cargo.toml index 66bd695c08..80a077dd88 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -96,7 +96,7 @@ textview = ["syntect", "onig_sys", "crossterm"] binaryview = ["image", "crossterm"] sys = ["heim", "battery"] ps = ["heim"] -trace = ["nom-tracable/trace"] +# trace = ["nom-tracable/trace"] all = ["raw-key", "textview", "binaryview", "sys", "ps", "clipboard", "ptree"] [dependencies.rusqlite] diff --git a/src/cli.rs b/src/cli.rs index 6a35608d91..6c1ba5ef93 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -14,9 +14,9 @@ use crate::git::current_branch; use crate::parser::registry::Signature; use crate::parser::{ hir, - hir::syntax_shape::{CommandHeadShape, CommandSignature, ExpandSyntax}, + hir::syntax_shape::{expand_syntax, PipelineShape}, hir::{expand_external_tokens::expand_external_tokens, tokens_iterator::TokensIterator}, - parse_command_tail, Pipeline, PipelineElement, TokenNode, + TokenNode, }; use crate::prelude::*; @@ -99,11 +99,17 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel }, Err(e) => { trace!("incompatible plugin {:?}", input); - Err(ShellError::string(format!("Error: {:?}", e))) + Err(ShellError::untagged_runtime_error(format!( + "Error: {:?}", + e + ))) } } } - Err(e) => Err(ShellError::string(format!("Error: {:?}", e))), + Err(e) => Err(ShellError::untagged_runtime_error(format!( + "Error: {:?}", + e + ))), }; let _ = child.wait(); @@ -319,6 +325,7 @@ pub async fn cli() -> Result<(), Box> { )]); } } + let _ = load_plugins(&mut context); let config = Config::builder().color_mode(ColorMode::Forced).build(); @@ -347,9 +354,7 @@ pub async fn cli() -> Result<(), Box> { let cwd = context.shell_manager.path(); - rl.set_helper(Some(crate::shell::Helper::new( - context.shell_manager.clone(), - ))); + rl.set_helper(Some(crate::shell::Helper::new(context.clone()))); let edit_mode = config::config(Tag::unknown())? .get("edit_mode") @@ -476,7 +481,7 @@ async fn process_line(readline: Result, ctx: &mut Context Ok(line) => { let line = chomp_newline(line); - let result = match crate::parser::parse(&line, uuid::Uuid::new_v4()) { + let result = match crate::parser::parse(&line, uuid::Uuid::nil()) { Err(err) => { return LineResult::Error(line.to_string(), err); } @@ -614,74 +619,14 @@ fn classify_pipeline( context: &Context, source: &Text, ) -> Result { - let pipeline = pipeline.as_pipeline()?; + let mut pipeline_list = vec![pipeline.clone()]; + let mut iterator = TokensIterator::all(&mut pipeline_list, pipeline.tag()); - let Pipeline { parts, .. } = pipeline; - - let commands: Result, ShellError> = parts - .iter() - .map(|item| classify_command(&item, context, &source)) - .collect(); - - Ok(ClassifiedPipeline { - commands: commands?, - }) -} - -fn classify_command( - command: &Tagged, - context: &Context, - source: &Text, -) -> Result { - let mut iterator = TokensIterator::new(&command.tokens.item, command.tag, true); - - let head = CommandHeadShape - .expand_syntax(&mut iterator, &context.expand_context(source, command.tag))?; - - match &head { - CommandSignature::Expression(_) => Err(ShellError::syntax_error( - "Unexpected expression in command position".tagged(command.tag), - )), - - // If the command starts with `^`, treat it as an external command no matter what - CommandSignature::External(name) => { - let name_str = name.slice(source); - - external_command(&mut iterator, source, name_str.tagged(name)) - } - - CommandSignature::LiteralExternal { outer, inner } => { - let name_str = inner.slice(source); - - external_command(&mut iterator, source, name_str.tagged(outer)) - } - - CommandSignature::Internal(command) => { - let tail = parse_command_tail( - &command.signature(), - &context.expand_context(source, command.tag), - &mut iterator, - command.tag, - )?; - - let (positional, named) = match tail { - None => (None, None), - Some((positional, named)) => (positional, named), - }; - - let call = hir::Call { - head: Box::new(head.to_expression()), - positional, - named, - }; - - Ok(ClassifiedCommand::Internal(InternalCommand::new( - command.name().to_string(), - command.tag, - call, - ))) - } - } + expand_syntax( + &PipelineShape, + &mut iterator, + &context.expand_context(source, pipeline.tag()), + ) } // Classify this command as an external command, which doesn't give special meaning diff --git a/src/commands/autoview.rs b/src/commands/autoview.rs index 57ab6269b3..29e7d18121 100644 --- a/src/commands/autoview.rs +++ b/src/commands/autoview.rs @@ -58,21 +58,21 @@ pub fn autoview( } } }; - // } else if is_single_origined_text_value(&input) { - // let text = context.get_command("textview"); - // if let Some(text) = text { - // let result = text.run(raw.with_input(input), &context.commands); - // result.collect::>().await; - // } else { - // for i in input { - // match i.item { - // Value::Primitive(Primitive::String(s)) => { - // println!("{}", s); - // } - // _ => {} - // } - // } - // } + } else if is_single_anchored_text_value(&input) { + let text = context.get_command("textview"); + if let Some(text) = text { + let result = text.run(raw.with_input(input), &context.commands, false); + result.collect::>().await; + } else { + for i in input { + match i.item { + Value::Primitive(Primitive::String(s)) => { + println!("{}", s); + } + _ => {} + } + } + } } else if is_single_text_value(&input) { for i in input { match i.item { @@ -112,7 +112,7 @@ fn is_single_text_value(input: &Vec>) -> bool { } #[allow(unused)] -fn is_single_origined_text_value(input: &Vec>) -> bool { +fn is_single_anchored_text_value(input: &Vec>) -> bool { if input.len() != 1 { return false; } diff --git a/src/commands/classified.rs b/src/commands/classified.rs index d30025b944..c73a56fee4 100644 --- a/src/commands/classified.rs +++ b/src/commands/classified.rs @@ -72,6 +72,7 @@ impl ClassifiedInputStream { } } +#[derive(Debug)] pub(crate) struct ClassifiedPipeline { pub(crate) commands: Vec, } @@ -117,15 +118,19 @@ impl InternalCommand { let command = context.expect_command(&self.name); - let result = context.run_command( - command, - self.name_tag.clone(), - context.source_map.clone(), - self.args, - &source, - objects, - is_first_command, - ); + let result = { + let source_map = context.source_map.lock().unwrap().clone(); + + context.run_command( + command, + self.name_tag.clone(), + source_map, + self.args, + &source, + objects, + is_first_command, + ) + }; let result = trace_out_stream!(target: "nu::trace_stream::internal", source: &source, "output" = result); let mut result = result.values; @@ -253,7 +258,11 @@ impl ExternalCommand { tag, )); } else { - return Err(ShellError::string("Error: $it needs string data")); + return Err(ShellError::labeled_error( + "Error: $it needs string data", + "given something else", + name_tag, + )); } } if !first { diff --git a/src/commands/config.rs b/src/commands/config.rs index 3b36c88fad..337e3437f9 100644 --- a/src/commands/config.rs +++ b/src/commands/config.rs @@ -70,9 +70,9 @@ pub fn config( if let Some(v) = get { let key = v.to_string(); - let value = result - .get(&key) - .ok_or_else(|| ShellError::string(&format!("Missing key {} in config", key)))?; + let value = result.get(&key).ok_or_else(|| { + ShellError::labeled_error(&format!("Missing key in config"), "key", v.tag()) + })?; let mut results = VecDeque::new(); @@ -120,10 +120,11 @@ pub fn config( result.swap_remove(&key); config::write(&result, &configuration)?; } else { - return Err(ShellError::string(&format!( + return Err(ShellError::labeled_error( "{} does not exist in config", - key - ))); + "key", + v.tag(), + )); } let obj = VecDeque::from_iter(vec![Value::Row(result.into()).tagged(v.tag())]); diff --git a/src/commands/fetch.rs b/src/commands/fetch.rs index 21ef7fbfd9..e7966a61bf 100644 --- a/src/commands/fetch.rs +++ b/src/commands/fetch.rs @@ -44,11 +44,13 @@ fn run( registry: &CommandRegistry, raw_args: &RawCommandArgs, ) -> Result { - let path = match call_info - .args - .nth(0) - .ok_or_else(|| ShellError::string(&format!("No file or directory specified")))? - { + let path = match call_info.args.nth(0).ok_or_else(|| { + ShellError::labeled_error( + "No file or directory specified", + "for command", + call_info.name_tag, + ) + })? { file => file, }; let path_buf = path.as_path()?; diff --git a/src/commands/open.rs b/src/commands/open.rs index 97b0df2744..6ea752e9da 100644 --- a/src/commands/open.rs +++ b/src/commands/open.rs @@ -45,11 +45,13 @@ fn run( let cwd = PathBuf::from(shell_manager.path()); let full_path = PathBuf::from(cwd); - let path = match call_info - .args - .nth(0) - .ok_or_else(|| ShellError::string(&format!("No file or directory specified")))? - { + let path = match call_info.args.nth(0).ok_or_else(|| { + ShellError::labeled_error( + "No file or directory specified", + "for command", + call_info.name_tag, + ) + })? { file => file, }; let path_buf = path.as_path()?; diff --git a/src/commands/plugin.rs b/src/commands/plugin.rs index e769a7b5c7..5dfbe6be5b 100644 --- a/src/commands/plugin.rs +++ b/src/commands/plugin.rs @@ -128,7 +128,7 @@ pub fn filter_plugin( }, Err(e) => { let mut result = VecDeque::new(); - result.push_back(Err(ShellError::string(format!( + result.push_back(Err(ShellError::untagged_runtime_error(format!( "Error while processing begin_filter response: {:?} {}", e, input )))); @@ -138,7 +138,7 @@ pub fn filter_plugin( } Err(e) => { let mut result = VecDeque::new(); - result.push_back(Err(ShellError::string(format!( + result.push_back(Err(ShellError::untagged_runtime_error(format!( "Error while reading begin_filter response: {:?}", e )))); @@ -189,7 +189,7 @@ pub fn filter_plugin( }, Err(e) => { let mut result = VecDeque::new(); - result.push_back(Err(ShellError::string(format!( + result.push_back(Err(ShellError::untagged_runtime_error(format!( "Error while processing end_filter response: {:?} {}", e, input )))); @@ -199,7 +199,7 @@ pub fn filter_plugin( } Err(e) => { let mut result = VecDeque::new(); - result.push_back(Err(ShellError::string(format!( + result.push_back(Err(ShellError::untagged_runtime_error(format!( "Error while reading end_filter: {:?}", e )))); @@ -236,7 +236,7 @@ pub fn filter_plugin( }, Err(e) => { let mut result = VecDeque::new(); - result.push_back(Err(ShellError::string(format!( + result.push_back(Err(ShellError::untagged_runtime_error(format!( "Error while processing filter response: {:?} {}", e, input )))); @@ -246,7 +246,7 @@ pub fn filter_plugin( } Err(e) => { let mut result = VecDeque::new(); - result.push_back(Err(ShellError::string(format!( + result.push_back(Err(ShellError::untagged_runtime_error(format!( "Error while reading filter response: {:?}", e )))); diff --git a/src/commands/post.rs b/src/commands/post.rs index 5a77afd14b..a82f1b42b1 100644 --- a/src/commands/post.rs +++ b/src/commands/post.rs @@ -55,18 +55,14 @@ fn run( raw_args: &RawCommandArgs, ) -> Result { let call_info = call_info.clone(); - let path = match call_info - .args - .nth(0) - .ok_or_else(|| ShellError::string(&format!("No url specified")))? - { + let path = match call_info.args.nth(0).ok_or_else(|| { + ShellError::labeled_error("No url specified", "for command", call_info.name_tag) + })? { file => file.clone(), }; - let body = match call_info - .args - .nth(1) - .ok_or_else(|| ShellError::string(&format!("No body specified")))? - { + let body = match call_info.args.nth(1).ok_or_else(|| { + ShellError::labeled_error("No body specified", "for command", call_info.name_tag) + })? { file => file.clone(), }; let path_str = path.as_string()?; diff --git a/src/commands/save.rs b/src/commands/save.rs index 44e07da5ed..0156fc3557 100644 --- a/src/commands/save.rs +++ b/src/commands/save.rs @@ -150,7 +150,6 @@ fn save( } }, None => { - eprintln!("{:?} {:?}", anchor, source_map); yield Err(ShellError::labeled_error( "Save requires a filepath (2)", "needs path", @@ -213,9 +212,9 @@ fn save( match content { Ok(save_data) => match std::fs::write(full_path, save_data) { Ok(o) => o, - Err(e) => yield Err(ShellError::string(e.to_string())), + Err(e) => yield Err(ShellError::labeled_error(e.to_string(), "for command", name)), }, - Err(e) => yield Err(ShellError::string(e.to_string())), + Err(e) => yield Err(ShellError::labeled_error(e.to_string(), "for command", name)), } }; diff --git a/src/commands/to_csv.rs b/src/commands/to_csv.rs index 1897fb86b7..66121df53e 100644 --- a/src/commands/to_csv.rs +++ b/src/commands/to_csv.rs @@ -32,8 +32,8 @@ impl WholeStreamCommand for ToCSV { } } -pub fn value_to_csv_value(v: &Value) -> Value { - match v { +pub fn value_to_csv_value(v: &Tagged) -> Tagged { + match &v.item { Value::Primitive(Primitive::String(s)) => Value::Primitive(Primitive::String(s.clone())), Value::Primitive(Primitive::Nothing) => Value::Primitive(Primitive::Nothing), Value::Primitive(Primitive::Boolean(b)) => Value::Primitive(Primitive::Boolean(b.clone())), @@ -47,10 +47,11 @@ pub fn value_to_csv_value(v: &Value) -> Value { Value::Block(_) => Value::Primitive(Primitive::Nothing), _ => Value::Primitive(Primitive::Nothing), } + .tagged(v.tag) } -fn to_string_helper(v: &Value) -> Result { - match v { +fn to_string_helper(v: &Tagged) -> Result { + match &v.item { Value::Primitive(Primitive::Date(d)) => Ok(d.to_string()), Value::Primitive(Primitive::Bytes(b)) => Ok(format!("{}", b)), Value::Primitive(Primitive::Boolean(_)) => Ok(v.as_string()?), @@ -60,7 +61,7 @@ fn to_string_helper(v: &Value) -> Result { Value::Table(_) => return Ok(String::from("[Table]")), Value::Row(_) => return Ok(String::from("[Row]")), Value::Primitive(Primitive::String(s)) => return Ok(s.to_string()), - _ => return Err(ShellError::string("Unexpected value")), + _ => return Err(ShellError::labeled_error("Unexpected value", "", v.tag)), } } @@ -76,7 +77,9 @@ fn merge_descriptors(values: &[Tagged]) -> Vec { ret } -pub fn to_string(v: &Value) -> Result { +pub fn to_string(tagged_value: &Tagged) -> Result { + let v = &tagged_value.item; + match v { Value::Row(o) => { let mut wtr = WriterBuilder::new().from_writer(vec![]); @@ -92,11 +95,20 @@ pub fn to_string(v: &Value) -> Result { wtr.write_record(fields).expect("can not write."); wtr.write_record(values).expect("can not write."); - return Ok(String::from_utf8( - wtr.into_inner() - .map_err(|_| ShellError::string("Could not convert record"))?, - ) - .map_err(|_| ShellError::string("Could not convert record"))?); + return Ok(String::from_utf8(wtr.into_inner().map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + tagged_value.tag, + ) + })?) + .map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + tagged_value.tag, + ) + })?); } Value::Table(list) => { let mut wtr = WriterBuilder::new().from_writer(vec![]); @@ -120,13 +132,22 @@ pub fn to_string(v: &Value) -> Result { wtr.write_record(&row).expect("can not write"); } - return Ok(String::from_utf8( - wtr.into_inner() - .map_err(|_| ShellError::string("Could not convert record"))?, - ) - .map_err(|_| ShellError::string("Could not convert record"))?); + return Ok(String::from_utf8(wtr.into_inner().map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + tagged_value.tag, + ) + })?) + .map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + tagged_value.tag, + ) + })?); } - _ => return to_string_helper(&v), + _ => return to_string_helper(tagged_value), } } @@ -148,7 +169,7 @@ fn to_csv( }; for value in to_process_input { - match to_string(&value_to_csv_value(&value.item)) { + match to_string(&value_to_csv_value(&value)) { Ok(x) => { let converted = if headerless { x.lines().skip(1).collect() diff --git a/src/commands/to_tsv.rs b/src/commands/to_tsv.rs index 4edc26face..7127a3195b 100644 --- a/src/commands/to_tsv.rs +++ b/src/commands/to_tsv.rs @@ -32,7 +32,9 @@ impl WholeStreamCommand for ToTSV { } } -pub fn value_to_tsv_value(v: &Value) -> Value { +pub fn value_to_tsv_value(tagged_value: &Tagged) -> Tagged { + let v = &tagged_value.item; + match v { Value::Primitive(Primitive::String(s)) => Value::Primitive(Primitive::String(s.clone())), Value::Primitive(Primitive::Nothing) => Value::Primitive(Primitive::Nothing), @@ -47,20 +49,28 @@ pub fn value_to_tsv_value(v: &Value) -> Value { Value::Block(_) => Value::Primitive(Primitive::Nothing), _ => Value::Primitive(Primitive::Nothing), } + .tagged(tagged_value.tag) } -fn to_string_helper(v: &Value) -> Result { +fn to_string_helper(tagged_value: &Tagged) -> Result { + let v = &tagged_value.item; match v { Value::Primitive(Primitive::Date(d)) => Ok(d.to_string()), Value::Primitive(Primitive::Bytes(b)) => Ok(format!("{}", b)), - Value::Primitive(Primitive::Boolean(_)) => Ok(v.as_string()?), - Value::Primitive(Primitive::Decimal(_)) => Ok(v.as_string()?), - Value::Primitive(Primitive::Int(_)) => Ok(v.as_string()?), - Value::Primitive(Primitive::Path(_)) => Ok(v.as_string()?), + Value::Primitive(Primitive::Boolean(_)) => Ok(tagged_value.as_string()?), + Value::Primitive(Primitive::Decimal(_)) => Ok(tagged_value.as_string()?), + Value::Primitive(Primitive::Int(_)) => Ok(tagged_value.as_string()?), + Value::Primitive(Primitive::Path(_)) => Ok(tagged_value.as_string()?), Value::Table(_) => return Ok(String::from("[table]")), Value::Row(_) => return Ok(String::from("[row]")), Value::Primitive(Primitive::String(s)) => return Ok(s.to_string()), - _ => return Err(ShellError::string("Unexpected value")), + _ => { + return Err(ShellError::labeled_error( + "Unexpected value", + "original value", + tagged_value.tag, + )) + } } } @@ -76,7 +86,9 @@ fn merge_descriptors(values: &[Tagged]) -> Vec { ret } -pub fn to_string(v: &Value) -> Result { +pub fn to_string(tagged_value: &Tagged) -> Result { + let v = &tagged_value.item; + match v { Value::Row(o) => { let mut wtr = WriterBuilder::new().delimiter(b'\t').from_writer(vec![]); @@ -91,11 +103,20 @@ pub fn to_string(v: &Value) -> Result { wtr.write_record(fields).expect("can not write."); wtr.write_record(values).expect("can not write."); - return Ok(String::from_utf8( - wtr.into_inner() - .map_err(|_| ShellError::string("Could not convert record"))?, - ) - .map_err(|_| ShellError::string("Could not convert record"))?); + return Ok(String::from_utf8(wtr.into_inner().map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + tagged_value.tag, + ) + })?) + .map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + tagged_value.tag, + ) + })?); } Value::Table(list) => { let mut wtr = WriterBuilder::new().delimiter(b'\t').from_writer(vec![]); @@ -119,13 +140,22 @@ pub fn to_string(v: &Value) -> Result { wtr.write_record(&row).expect("can not write"); } - return Ok(String::from_utf8( - wtr.into_inner() - .map_err(|_| ShellError::string("Could not convert record"))?, - ) - .map_err(|_| ShellError::string("Could not convert record"))?); + return Ok(String::from_utf8(wtr.into_inner().map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + tagged_value.tag, + ) + })?) + .map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + tagged_value.tag, + ) + })?); } - _ => return to_string_helper(&v), + _ => return to_string_helper(tagged_value), } } @@ -147,7 +177,7 @@ fn to_tsv( }; for value in to_process_input { - match to_string(&value_to_tsv_value(&value.item)) { + match to_string(&value_to_tsv_value(&value)) { Ok(x) => { let converted = if headerless { x.lines().skip(1).collect() diff --git a/src/context.rs b/src/context.rs index 6c55aff5c4..a090898328 100644 --- a/src/context.rs +++ b/src/context.rs @@ -7,7 +7,7 @@ use indexmap::IndexMap; use serde::{Deserialize, Serialize}; use std::collections::HashMap; use std::error::Error; -use std::sync::Arc; +use std::sync::{Arc, Mutex}; use uuid::Uuid; #[derive(Clone, Debug, Serialize, Deserialize)] @@ -77,7 +77,7 @@ impl CommandRegistry { #[derive(Clone)] pub struct Context { registry: CommandRegistry, - pub(crate) source_map: SourceMap, + pub(crate) source_map: Arc>, host: Arc>, pub(crate) shell_manager: ShellManager, } @@ -99,7 +99,7 @@ impl Context { let registry = CommandRegistry::new(); Ok(Context { registry: registry.clone(), - source_map: SourceMap::new(), + source_map: Arc::new(Mutex::new(SourceMap::new())), host: Arc::new(Mutex::new(crate::env::host::BasicHost)), shell_manager: ShellManager::basic(registry)?, }) @@ -118,7 +118,9 @@ impl Context { } pub fn add_anchor_location(&mut self, uuid: Uuid, anchor_location: AnchorLocation) { - self.source_map.insert(uuid, anchor_location); + let mut source_map = self.source_map.lock().unwrap(); + + source_map.insert(uuid, anchor_location); } pub(crate) fn get_command(&self, name: &str) -> Option> { diff --git a/src/data/base.rs b/src/data/base.rs index 176560137f..735196c97f 100644 --- a/src/data/base.rs +++ b/src/data/base.rs @@ -298,7 +298,7 @@ impl fmt::Debug for ValueDebug<'_> { } impl Tagged { - pub(crate) fn tagged_type_name(&self) -> Tagged { + pub fn tagged_type_name(&self) -> Tagged { let name = self.type_name(); Tagged::from_item(name, self.tag()) } @@ -424,10 +424,27 @@ impl Tagged { Ok(out.tagged(self.tag)) } + + pub(crate) fn as_string(&self) -> Result { + match &self.item { + Value::Primitive(Primitive::String(s)) => Ok(s.clone()), + Value::Primitive(Primitive::Boolean(x)) => Ok(format!("{}", x)), + Value::Primitive(Primitive::Decimal(x)) => Ok(format!("{}", x)), + Value::Primitive(Primitive::Int(x)) => Ok(format!("{}", x)), + Value::Primitive(Primitive::Bytes(x)) => Ok(format!("{}", x)), + Value::Primitive(Primitive::Path(x)) => Ok(format!("{}", x.display())), + // TODO: this should definitely be more general with better errors + other => Err(ShellError::labeled_error( + "Expected string", + other.type_name(), + self.tag, + )), + } + } } impl Value { - pub(crate) fn type_name(&self) -> String { + pub fn type_name(&self) -> String { match self { Value::Primitive(p) => p.type_name(), Value::Row(_) => format!("row"), @@ -738,22 +755,6 @@ impl Value { } } - pub(crate) fn as_string(&self) -> Result { - match self { - Value::Primitive(Primitive::String(s)) => Ok(s.clone()), - Value::Primitive(Primitive::Boolean(x)) => Ok(format!("{}", x)), - Value::Primitive(Primitive::Decimal(x)) => Ok(format!("{}", x)), - Value::Primitive(Primitive::Int(x)) => Ok(format!("{}", x)), - Value::Primitive(Primitive::Bytes(x)) => Ok(format!("{}", x)), - Value::Primitive(Primitive::Path(x)) => Ok(format!("{}", x.display())), - // TODO: this should definitely be more general with better errors - other => Err(ShellError::string(format!( - "Expected string, got {:?}", - other - ))), - } - } - pub(crate) fn is_true(&self) -> bool { match self { Value::Primitive(Primitive::Boolean(true)) => true, @@ -806,9 +807,14 @@ impl Value { Value::Primitive(Primitive::Date(s.into())) } - pub fn date_from_str(s: &str) -> Result { - let date = DateTime::parse_from_rfc3339(s) - .map_err(|err| ShellError::string(&format!("Date parse error: {}", err)))?; + pub fn date_from_str(s: Tagged<&str>) -> Result { + let date = DateTime::parse_from_rfc3339(s.item).map_err(|err| { + ShellError::labeled_error( + &format!("Date parse error: {}", err), + "original value", + s.tag, + ) + })?; let date = date.with_timezone(&chrono::offset::Utc); diff --git a/src/data/config.rs b/src/data/config.rs index 1cb4533d8e..657287d2f2 100644 --- a/src/data/config.rs +++ b/src/data/config.rs @@ -51,8 +51,9 @@ pub fn user_data() -> Result { } pub fn app_path(app_data_type: AppDataType, display: &str) -> Result { - let path = app_root(app_data_type, &APP_INFO) - .map_err(|err| ShellError::string(&format!("Couldn't open {} path:\n{}", display, err)))?; + let path = app_root(app_data_type, &APP_INFO).map_err(|err| { + ShellError::untagged_runtime_error(&format!("Couldn't open {} path:\n{}", display, err)) + })?; Ok(path) } @@ -75,10 +76,21 @@ pub fn read( let tag = tag.into(); let contents = fs::read_to_string(filename) .map(|v| v.tagged(tag)) - .map_err(|err| ShellError::string(&format!("Couldn't read config file:\n{}", err)))?; + .map_err(|err| { + ShellError::labeled_error( + &format!("Couldn't read config file:\n{}", err), + "file name", + tag, + ) + })?; - let parsed: toml::Value = toml::from_str(&contents) - .map_err(|err| ShellError::string(&format!("Couldn't parse config file:\n{}", err)))?; + let parsed: toml::Value = toml::from_str(&contents).map_err(|err| { + ShellError::labeled_error( + &format!("Couldn't parse config file:\n{}", err), + "file name", + tag, + ) + })?; let value = convert_toml_value_to_nu_value(&parsed, tag); let tag = value.tag(); diff --git a/src/data/meta.rs b/src/data/meta.rs index b66b009cc2..08125359e4 100644 --- a/src/data/meta.rs +++ b/src/data/meta.rs @@ -240,6 +240,16 @@ impl Tag { } } + pub fn for_char(pos: usize, anchor: Uuid) -> Tag { + Tag { + anchor, + span: Span { + start: pos, + end: pos + 1, + }, + } + } + pub fn unknown_span(anchor: Uuid) -> Tag { Tag { anchor, @@ -267,6 +277,24 @@ impl Tag { } } + pub fn until_option(&self, other: Option>) -> Tag { + match other { + Some(other) => { + let other = other.into(); + debug_assert!( + self.anchor == other.anchor, + "Can only merge two tags with the same anchor" + ); + + Tag { + span: Span::new(self.span.start, other.span.end), + anchor: self.anchor, + } + } + None => *self, + } + } + pub fn slice<'a>(&self, source: &'a str) -> &'a str { self.span.slice(source) } @@ -284,6 +312,7 @@ impl Tag { } } +#[allow(unused)] pub fn tag_for_tagged_list(mut iter: impl Iterator) -> Tag { let first = iter.next(); diff --git a/src/errors.rs b/src/errors.rs index a070f6f54e..2d42552250 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -20,6 +20,14 @@ impl Description { Description::Synthetic(s) => Err(s), } } + + #[allow(unused)] + fn tag(&self) -> Tag { + match self { + Description::Source(tagged) => tagged.tag, + Description::Synthetic(_) => Tag::unknown(), + } + } } #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)] @@ -36,6 +44,13 @@ pub struct ShellError { cause: Option>, } +impl ShellError { + #[allow(unused)] + pub(crate) fn tag(&self) -> Option { + self.error.tag() + } +} + impl ToDebug for ShellError { fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { self.error.fmt_debug(f, source) @@ -47,12 +62,12 @@ impl serde::de::Error for ShellError { where T: std::fmt::Display, { - ShellError::string(msg.to_string()) + ShellError::untagged_runtime_error(msg.to_string()) } } impl ShellError { - pub(crate) fn type_error( + pub fn type_error( expected: impl Into, actual: Tagged>, ) -> ShellError { @@ -63,6 +78,13 @@ impl ShellError { .start() } + pub fn untagged_runtime_error(error: impl Into) -> ShellError { + ProximateShellError::UntaggedRuntimeError { + reason: error.into(), + } + .start() + } + pub(crate) fn unexpected_eof(expected: impl Into, tag: Tag) -> ShellError { ProximateShellError::UnexpectedEof { expected: expected.into(), @@ -174,9 +196,6 @@ impl ShellError { pub(crate) fn to_diagnostic(self) -> Diagnostic { match self.error { - ProximateShellError::String(StringError { title, .. }) => { - Diagnostic::new(Severity::Error, title) - } ProximateShellError::InvalidCommand { command } => { Diagnostic::new(Severity::Error, "Invalid command") .with_label(Label::new_primary(command)) @@ -286,7 +305,7 @@ impl ShellError { } => Diagnostic::new(Severity::Error, "Syntax Error") .with_label(Label::new_primary(tag).with_message(item)), - ProximateShellError::MissingProperty { subpath, expr } => { + ProximateShellError::MissingProperty { subpath, expr, .. } => { let subpath = subpath.into_label(); let expr = expr.into_label(); @@ -310,6 +329,8 @@ impl ShellError { .with_label(Label::new_primary(left.tag()).with_message(left.item)) .with_label(Label::new_secondary(right.tag()).with_message(right.item)) } + + ProximateShellError::UntaggedRuntimeError { reason } => Diagnostic::new(Severity::Error, format!("Error: {}", reason)) } } @@ -343,20 +364,16 @@ impl ShellError { ) } - pub fn string(title: impl Into) -> ShellError { - ProximateShellError::String(StringError::new(title.into(), Value::nothing())).start() - } - pub(crate) fn unimplemented(title: impl Into) -> ShellError { - ShellError::string(&format!("Unimplemented: {}", title.into())) + ShellError::untagged_runtime_error(&format!("Unimplemented: {}", title.into())) } pub(crate) fn unexpected(title: impl Into) -> ShellError { - ShellError::string(&format!("Unexpected: {}", title.into())) + ShellError::untagged_runtime_error(&format!("Unexpected: {}", title.into())) } pub(crate) fn unreachable(title: impl Into) -> ShellError { - ShellError::string(&format!("BUG: Unreachable: {}", title.into())) + ShellError::untagged_runtime_error(&format!("BUG: Unreachable: {}", title.into())) } } @@ -401,7 +418,6 @@ impl ExpectedRange { #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)] pub enum ProximateShellError { - String(StringError), SyntaxError { problem: Tagged, }, @@ -419,6 +435,7 @@ pub enum ProximateShellError { MissingProperty { subpath: Description, expr: Description, + tag: Tag, }, MissingValue { tag: Option, @@ -439,6 +456,9 @@ pub enum ProximateShellError { left: Tagged, right: Tagged, }, + UntaggedRuntimeError { + reason: String, + }, } impl ProximateShellError { @@ -448,6 +468,22 @@ impl ProximateShellError { error: self, } } + + pub(crate) fn tag(&self) -> Option { + Some(match self { + ProximateShellError::SyntaxError { problem } => problem.tag(), + ProximateShellError::UnexpectedEof { tag, .. } => *tag, + ProximateShellError::InvalidCommand { command } => *command, + ProximateShellError::TypeError { actual, .. } => actual.tag, + ProximateShellError::MissingProperty { tag, .. } => *tag, + ProximateShellError::MissingValue { tag, .. } => return *tag, + ProximateShellError::ArgumentError { tag, .. } => *tag, + ProximateShellError::RangeError { actual_kind, .. } => actual_kind.tag, + ProximateShellError::Diagnostic(..) => return None, + ProximateShellError::UntaggedRuntimeError { .. } => return None, + ProximateShellError::CoerceError { left, right } => left.tag.until(right.tag), + }) + } } impl ToDebug for ProximateShellError { @@ -491,7 +527,6 @@ pub struct StringError { impl std::fmt::Display for ShellError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match &self.error { - ProximateShellError::String(s) => write!(f, "{}", &s.title), ProximateShellError::MissingValue { .. } => write!(f, "MissingValue"), ProximateShellError::InvalidCommand { .. } => write!(f, "InvalidCommand"), ProximateShellError::TypeError { .. } => write!(f, "TypeError"), @@ -502,6 +537,7 @@ impl std::fmt::Display for ShellError { ProximateShellError::ArgumentError { .. } => write!(f, "ArgumentError"), ProximateShellError::Diagnostic(_) => write!(f, ""), ProximateShellError::CoerceError { .. } => write!(f, "CoerceError"), + ProximateShellError::UntaggedRuntimeError { .. } => write!(f, "UntaggedRuntimeError"), } } } @@ -510,71 +546,43 @@ impl std::error::Error for ShellError {} impl std::convert::From> for ShellError { fn from(input: Box) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{}", input)) } } impl std::convert::From for ShellError { fn from(input: std::io::Error) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{}", input)) } } impl std::convert::From for ShellError { fn from(input: subprocess::PopenError) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{}", input)) } } impl std::convert::From for ShellError { fn from(input: serde_yaml::Error) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{:?}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{:?}", input)) } } impl std::convert::From for ShellError { fn from(input: toml::ser::Error) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{:?}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{:?}", input)) } } impl std::convert::From for ShellError { fn from(input: serde_json::Error) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{:?}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{:?}", input)) } } impl std::convert::From> for ShellError { fn from(input: Box) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{:?}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{:?}", input)) } } diff --git a/src/parser.rs b/src/parser.rs index 5fcfaaa27e..3fd853c85c 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -7,18 +7,18 @@ pub(crate) mod registry; use crate::errors::ShellError; pub(crate) use deserializer::ConfigDeserializer; +pub(crate) use hir::syntax_shape::flat_shape::FlatShape; pub(crate) use hir::TokensIterator; pub(crate) use parse::call_node::CallNode; pub(crate) use parse::files::Files; -pub(crate) use parse::flag::Flag; +pub(crate) use parse::flag::{Flag, FlagKind}; pub(crate) use parse::operator::Operator; pub(crate) use parse::parser::{nom_input, pipeline}; pub(crate) use parse::pipeline::{Pipeline, PipelineElement}; pub(crate) use parse::text::Text; pub(crate) use parse::token_tree::{DelimitedNode, Delimiter, TokenNode}; -pub(crate) use parse::tokens::{RawToken, Token}; +pub(crate) use parse::tokens::{RawNumber, RawToken}; pub(crate) use parse::unit::Unit; -pub(crate) use parse_command::parse_command_tail; pub(crate) use registry::CommandRegistry; pub fn parse(input: &str, anchor: uuid::Uuid) -> Result { diff --git a/src/parser/hir/expand_external_tokens.rs b/src/parser/hir/expand_external_tokens.rs index 30a2a90aaf..238cb4b01b 100644 --- a/src/parser/hir/expand_external_tokens.rs +++ b/src/parser/hir/expand_external_tokens.rs @@ -1,5 +1,11 @@ use crate::errors::ShellError; -use crate::parser::{TokenNode, TokensIterator}; +use crate::parser::{ + hir::syntax_shape::{ + color_syntax, expand_atom, AtomicToken, ColorSyntax, ExpandContext, ExpansionRule, + MaybeSpaceShape, + }, + FlatShape, TokenNode, TokensIterator, +}; use crate::{Tag, Tagged, Text}; pub fn expand_external_tokens( @@ -19,6 +25,34 @@ pub fn expand_external_tokens( Ok(out) } +#[derive(Debug, Copy, Clone)] +pub struct ExternalTokensShape; + +impl ColorSyntax for ExternalTokensShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Self::Info { + loop { + // Allow a space + color_syntax(&MaybeSpaceShape, token_nodes, context, shapes); + + // Process an external expression. External expressions are mostly words, with a + // few exceptions (like $variables and path expansion rules) + match color_syntax(&ExternalExpression, token_nodes, context, shapes).1 { + ExternalExpressionResult::Eof => break, + ExternalExpressionResult::Processed => continue, + } + } + } +} + pub fn expand_next_expression( token_nodes: &mut TokensIterator<'_>, ) -> Result, ShellError> { @@ -48,16 +82,15 @@ pub fn expand_next_expression( fn triage_external_head(node: &TokenNode) -> Result { Ok(match node { TokenNode::Token(token) => token.tag(), - TokenNode::Call(_call) => unimplemented!(), - TokenNode::Nodes(_nodes) => unimplemented!(), - TokenNode::Delimited(_delimited) => unimplemented!(), - TokenNode::Pipeline(_pipeline) => unimplemented!(), + TokenNode::Call(_call) => unimplemented!("TODO: OMG"), + TokenNode::Nodes(_nodes) => unimplemented!("TODO: OMG"), + TokenNode::Delimited(_delimited) => unimplemented!("TODO: OMG"), + TokenNode::Pipeline(_pipeline) => unimplemented!("TODO: OMG"), TokenNode::Flag(flag) => flag.tag(), - TokenNode::Member(member) => *member, TokenNode::Whitespace(_whitespace) => { unreachable!("This function should be called after next_non_ws()") } - TokenNode::Error(_error) => unimplemented!(), + TokenNode::Error(_error) => unimplemented!("TODO: OMG"), }) } @@ -73,7 +106,7 @@ fn triage_continuation<'a, 'b>( match &node { node if node.is_whitespace() => return Ok(None), - TokenNode::Token(..) | TokenNode::Flag(..) | TokenNode::Member(..) => {} + TokenNode::Token(..) | TokenNode::Flag(..) => {} TokenNode::Call(..) => unimplemented!("call"), TokenNode::Nodes(..) => unimplemented!("nodes"), TokenNode::Delimited(..) => unimplemented!("delimited"), @@ -85,3 +118,42 @@ fn triage_continuation<'a, 'b>( peeked.commit(); Ok(Some(node.tag())) } + +#[must_use] +enum ExternalExpressionResult { + Eof, + Processed, +} + +#[derive(Debug, Copy, Clone)] +struct ExternalExpression; + +impl ColorSyntax for ExternalExpression { + type Info = ExternalExpressionResult; + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> ExternalExpressionResult { + let atom = match expand_atom( + token_nodes, + "external word", + context, + ExpansionRule::permissive(), + ) { + Err(_) => unreachable!("TODO: separate infallible expand_atom"), + Ok(Tagged { + item: AtomicToken::Eof { .. }, + .. + }) => return ExternalExpressionResult::Eof, + Ok(atom) => atom, + }; + + atom.color_tokens(shapes); + return ExternalExpressionResult::Processed; + } +} diff --git a/src/parser/hir/syntax_shape.rs b/src/parser/hir/syntax_shape.rs index 5dcbd0fb76..1a140d86bd 100644 --- a/src/parser/hir/syntax_shape.rs +++ b/src/parser/hir/syntax_shape.rs @@ -1,34 +1,45 @@ mod block; mod expression; +pub(crate) mod flat_shape; use crate::cli::external_command; -use crate::commands::{classified::InternalCommand, ClassifiedCommand, Command}; +use crate::commands::{ + classified::{ClassifiedPipeline, InternalCommand}, + ClassifiedCommand, Command, +}; +use crate::parser::hir::expand_external_tokens::ExternalTokensShape; use crate::parser::hir::syntax_shape::block::AnyBlockShape; use crate::parser::hir::tokens_iterator::Peeked; -use crate::parser::parse_command::parse_command_tail; +use crate::parser::parse_command::{parse_command_tail, CommandTailShape}; +use crate::parser::PipelineElement; use crate::parser::{ hir, hir::{debug_tokens, TokensIterator}, - Operator, RawToken, TokenNode, + Operator, Pipeline, RawToken, TokenNode, }; use crate::prelude::*; use derive_new::new; use getset::Getters; -use log::trace; +use log::{self, log_enabled, trace}; use serde::{Deserialize, Serialize}; use std::path::{Path, PathBuf}; +pub(crate) use self::expression::atom::{expand_atom, AtomicToken, ExpansionRule}; +pub(crate) use self::expression::delimited::{ + color_delimited_square, expand_delimited_square, DelimitedShape, +}; pub(crate) use self::expression::file_path::FilePathShape; -pub(crate) use self::expression::list::ExpressionListShape; +pub(crate) use self::expression::list::{BackoffColoringMode, ExpressionListShape}; pub(crate) use self::expression::number::{IntShape, NumberShape}; -pub(crate) use self::expression::pattern::PatternShape; +pub(crate) use self::expression::pattern::{BarePatternShape, PatternShape}; pub(crate) use self::expression::string::StringShape; pub(crate) use self::expression::unit::UnitShape; pub(crate) use self::expression::variable_path::{ - ColumnPathShape, DotShape, ExpressionContinuation, ExpressionContinuationShape, MemberShape, - PathTailShape, VariablePathShape, + ColorableDotShape, ColumnPathShape, DotShape, ExpressionContinuation, + ExpressionContinuationShape, MemberShape, PathTailShape, VariablePathShape, }; pub(crate) use self::expression::{continue_expression, AnyExpressionShape}; +pub(crate) use self::flat_shape::FlatShape; #[derive(Debug, Copy, Clone, Serialize, Deserialize)] pub enum SyntaxShape { @@ -41,9 +52,56 @@ pub enum SyntaxShape { Int, Path, Pattern, - Binary, Block, - Boolean, +} + +impl FallibleColorSyntax for SyntaxShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + match self { + SyntaxShape::Any => { + color_fallible_syntax(&AnyExpressionShape, token_nodes, context, shapes) + } + SyntaxShape::List => { + color_syntax(&ExpressionListShape, token_nodes, context, shapes); + Ok(()) + } + SyntaxShape::Int => color_fallible_syntax(&IntShape, token_nodes, context, shapes), + SyntaxShape::String => color_fallible_syntax_with( + &StringShape, + &FlatShape::String, + token_nodes, + context, + shapes, + ), + SyntaxShape::Member => { + color_fallible_syntax(&MemberShape, token_nodes, context, shapes) + } + SyntaxShape::ColumnPath => { + color_fallible_syntax(&ColumnPathShape, token_nodes, context, shapes) + } + SyntaxShape::Number => { + color_fallible_syntax(&NumberShape, token_nodes, context, shapes) + } + SyntaxShape::Path => { + color_fallible_syntax(&FilePathShape, token_nodes, context, shapes) + } + SyntaxShape::Pattern => { + color_fallible_syntax(&PatternShape, token_nodes, context, shapes) + } + SyntaxShape::Block => { + color_fallible_syntax(&AnyBlockShape, token_nodes, context, shapes) + } + } + } } impl ExpandExpression for SyntaxShape { @@ -73,9 +131,7 @@ impl ExpandExpression for SyntaxShape { SyntaxShape::Number => expand_expr(&NumberShape, token_nodes, context), SyntaxShape::Path => expand_expr(&FilePathShape, token_nodes, context), SyntaxShape::Pattern => expand_expr(&PatternShape, token_nodes, context), - SyntaxShape::Binary => Err(ShellError::unimplemented("SyntaxShape:Binary")), SyntaxShape::Block => expand_expr(&AnyBlockShape, token_nodes, context), - SyntaxShape::Boolean => Err(ShellError::unimplemented("SyntaxShape:Boolean")), } } } @@ -92,9 +148,7 @@ impl std::fmt::Display for SyntaxShape { SyntaxShape::Number => write!(f, "Number"), SyntaxShape::Path => write!(f, "Path"), SyntaxShape::Pattern => write!(f, "Pattern"), - SyntaxShape::Binary => write!(f, "Binary"), SyntaxShape::Block => write!(f, "Block"), - SyntaxShape::Boolean => write!(f, "Boolean"), } } } @@ -148,6 +202,50 @@ pub trait ExpandExpression: std::fmt::Debug + Copy { ) -> Result; } +pub trait FallibleColorSyntax: std::fmt::Debug + Copy { + type Info; + type Input; + + fn color_syntax<'a, 'b>( + &self, + input: &Self::Input, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result; +} + +pub trait ColorSyntax: std::fmt::Debug + Copy { + type Info; + type Input; + + fn color_syntax<'a, 'b>( + &self, + input: &Self::Input, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Self::Info; +} + +// impl ColorSyntax for T +// where +// T: FallibleColorSyntax, +// { +// type Info = Result; +// type Input = T::Input; + +// fn color_syntax<'a, 'b>( +// &self, +// input: &Self::Input, +// token_nodes: &'b mut TokensIterator<'a>, +// context: &ExpandContext, +// shapes: &mut Vec>, +// ) -> Result { +// FallibleColorSyntax::color_syntax(self, input, token_nodes, context, shapes) +// } +// } + pub(crate) trait ExpandSyntax: std::fmt::Debug + Copy { type Output: std::fmt::Debug; @@ -180,6 +278,130 @@ pub(crate) fn expand_syntax<'a, 'b, T: ExpandSyntax>( } } +pub fn color_syntax<'a, 'b, T: ColorSyntax, U>( + shape: &T, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, +) -> ((), U) { + trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); + + let len = shapes.len(); + let result = shape.color_syntax(&(), token_nodes, context, shapes); + + trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source)); + + if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { + trace!(target: "nu::color_syntax", "after {}", std::any::type_name::()); + + if len < shapes.len() { + for i in len..(shapes.len()) { + trace!(target: "nu::color_syntax", "new shape :: {:?}", shapes[i]); + } + } else { + trace!(target: "nu::color_syntax", "no new shapes"); + } + } + + ((), result) +} + +pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax, U>( + shape: &T, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, +) -> Result { + trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); + + if token_nodes.at_end() { + trace!(target: "nu::color_syntax", "at eof"); + return Err(ShellError::unexpected_eof("coloring", Tag::unknown())); + } + + let len = shapes.len(); + let result = shape.color_syntax(&(), token_nodes, context, shapes); + + trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source)); + + if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { + trace!(target: "nu::color_syntax", "after {}", std::any::type_name::()); + + if len < shapes.len() { + for i in len..(shapes.len()) { + trace!(target: "nu::color_syntax", "new shape :: {:?}", shapes[i]); + } + } else { + trace!(target: "nu::color_syntax", "no new shapes"); + } + } + + result +} + +pub fn color_syntax_with<'a, 'b, T: ColorSyntax, U, I>( + shape: &T, + input: &I, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, +) -> ((), U) { + trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); + + let len = shapes.len(); + let result = shape.color_syntax(input, token_nodes, context, shapes); + + trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source)); + + if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { + trace!(target: "nu::color_syntax", "after {}", std::any::type_name::()); + + if len < shapes.len() { + for i in len..(shapes.len()) { + trace!(target: "nu::color_syntax", "new shape :: {:?}", shapes[i]); + } + } else { + trace!(target: "nu::color_syntax", "no new shapes"); + } + } + + ((), result) +} + +pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax, U, I>( + shape: &T, + input: &I, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, +) -> Result { + trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); + + if token_nodes.at_end() { + trace!(target: "nu::color_syntax", "at eof"); + return Err(ShellError::unexpected_eof("coloring", Tag::unknown())); + } + + let len = shapes.len(); + let result = shape.color_syntax(input, token_nodes, context, shapes); + + trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source)); + + if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { + trace!(target: "nu::color_syntax", "after {}", std::any::type_name::()); + + if len < shapes.len() { + for i in len..(shapes.len()) { + trace!(target: "nu::color_syntax", "new shape :: {:?}", shapes[i]); + } + } else { + trace!(target: "nu::color_syntax", "no new shapes"); + } + } + + result +} + pub(crate) fn expand_expr<'a, 'b, T: ExpandExpression>( shape: &T, token_nodes: &'b mut TokensIterator<'a>, @@ -314,6 +536,33 @@ impl ExpandSyntax for BarePathShape { #[derive(Debug, Copy, Clone)] pub struct BareShape; +impl FallibleColorSyntax for BareShape { + type Info = (); + type Input = FlatShape; + + fn color_syntax<'a, 'b>( + &self, + input: &FlatShape, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + token_nodes.peek_any_token(|token| match token { + // If it's a bare token, color it + TokenNode::Token(Tagged { + item: RawToken::Bare, + tag, + }) => { + shapes.push((*input).tagged(tag)); + Ok(()) + } + + // otherwise, fail + other => Err(ShellError::type_error("word", other.tagged_type_name())), + }) + } +} + impl ExpandSyntax for BareShape { type Output = Tagged; @@ -383,9 +632,129 @@ impl CommandSignature { } } +#[derive(Debug, Copy, Clone)] +pub struct PipelineShape; + +// The failure mode is if the head of the token stream is not a pipeline +impl FallibleColorSyntax for PipelineShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + // Make sure we're looking at a pipeline + let Pipeline { parts, .. } = token_nodes.peek_any_token(|node| node.as_pipeline())?; + + // Enumerate the pipeline parts + for part in parts { + // If the pipeline part has a prefix `|`, emit a pipe to color + if let Some(pipe) = part.pipe { + shapes.push(FlatShape::Pipe.tagged(pipe)); + } + + // Create a new iterator containing the tokens in the pipeline part to color + let mut token_nodes = TokensIterator::new(&part.tokens.item, part.tag, false); + + color_syntax(&MaybeSpaceShape, &mut token_nodes, context, shapes); + color_syntax(&CommandShape, &mut token_nodes, context, shapes); + } + + Ok(()) + } +} + +impl ExpandSyntax for PipelineShape { + type Output = ClassifiedPipeline; + fn expand_syntax<'a, 'b>( + &self, + iterator: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + let source = context.source; + + let peeked = iterator.peek_any().not_eof("pipeline")?; + let pipeline = peeked.node.as_pipeline()?; + peeked.commit(); + + let Pipeline { parts, .. } = pipeline; + + let commands: Result, ShellError> = parts + .iter() + .map(|item| classify_command(&item, context, &source)) + .collect(); + + Ok(ClassifiedPipeline { + commands: commands?, + }) + } +} + +pub enum CommandHeadKind { + External, + Internal(Signature), +} + #[derive(Debug, Copy, Clone)] pub struct CommandHeadShape; +impl FallibleColorSyntax for CommandHeadShape { + type Info = CommandHeadKind; + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result { + // If we don't ultimately find a token, roll back + token_nodes.atomic(|token_nodes| { + // First, take a look at the next token + let atom = expand_atom( + token_nodes, + "command head", + context, + ExpansionRule::permissive(), + )?; + + match atom.item { + // If the head is an explicit external command (^cmd), color it as an external command + AtomicToken::ExternalCommand { command } => { + shapes.push(FlatShape::ExternalCommand.tagged(command)); + Ok(CommandHeadKind::External) + } + + // If the head is a word, it depends on whether it matches a registered internal command + AtomicToken::Word { text } => { + let name = text.slice(context.source); + + if context.registry.has(name) { + // If the registry has the command, color it as an internal command + shapes.push(FlatShape::InternalCommand.tagged(text)); + let command = context.registry.expect_command(name); + Ok(CommandHeadKind::Internal(command.signature())) + } else { + // Otherwise, color it as an external command + shapes.push(FlatShape::ExternalCommand.tagged(text)); + Ok(CommandHeadKind::External) + } + } + + // Otherwise, we're not actually looking at a command + _ => Err(ShellError::syntax_error( + "No command at the head".tagged(atom.tag), + )), + } + }) + } +} + impl ExpandSyntax for CommandHeadShape { type Output = CommandSignature; @@ -395,7 +764,7 @@ impl ExpandSyntax for CommandHeadShape { context: &ExpandContext, ) -> Result { let node = - parse_single_node_skipping_ws(token_nodes, "command head1", |token, token_tag| { + parse_single_node_skipping_ws(token_nodes, "command head1", |token, token_tag, _| { Ok(match token { RawToken::ExternalCommand(tag) => CommandSignature::LiteralExternal { outer: token_tag, @@ -488,6 +857,44 @@ impl ExpandSyntax for ClassifiedCommandShape { #[derive(Debug, Copy, Clone)] pub struct InternalCommandHeadShape; +impl FallibleColorSyntax for InternalCommandHeadShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let peeked_head = token_nodes.peek_non_ws().not_eof("command head4"); + + let peeked_head = match peeked_head { + Err(_) => return Ok(()), + Ok(peeked_head) => peeked_head, + }; + + let _expr = match peeked_head.node { + TokenNode::Token(Tagged { + item: RawToken::Bare, + tag, + }) => shapes.push(FlatShape::Word.tagged(tag)), + + TokenNode::Token(Tagged { + item: RawToken::String(_inner_tag), + tag, + }) => shapes.push(FlatShape::String.tagged(tag)), + + _node => shapes.push(FlatShape::Error.tagged(peeked_head.node.tag())), + }; + + peeked_head.commit(); + + Ok(()) + } +} + impl ExpandExpression for InternalCommandHeadShape { fn expand_expr( &self, @@ -523,33 +930,52 @@ impl ExpandExpression for InternalCommandHeadShape { } } +pub(crate) struct SingleError<'token> { + expected: &'static str, + node: &'token Tagged, +} + +impl<'token> SingleError<'token> { + pub(crate) fn error(&self) -> ShellError { + ShellError::type_error(self.expected, self.node.type_name().tagged(self.node.tag)) + } +} + fn parse_single_node<'a, 'b, T>( token_nodes: &'b mut TokensIterator<'a>, expected: &'static str, - callback: impl FnOnce(RawToken, Tag) -> Result, + callback: impl FnOnce(RawToken, Tag, SingleError) -> Result, ) -> Result { - let peeked = token_nodes.peek_any().not_eof(expected)?; + token_nodes.peek_any_token(|node| match node { + TokenNode::Token(token) => callback( + token.item, + token.tag(), + SingleError { + expected, + node: token, + }, + ), - let expr = match peeked.node { - TokenNode::Token(token) => callback(token.item, token.tag())?, - - other => return Err(ShellError::type_error(expected, other.tagged_type_name())), - }; - - peeked.commit(); - - Ok(expr) + other => Err(ShellError::type_error(expected, other.tagged_type_name())), + }) } fn parse_single_node_skipping_ws<'a, 'b, T>( token_nodes: &'b mut TokensIterator<'a>, expected: &'static str, - callback: impl FnOnce(RawToken, Tag) -> Result, + callback: impl FnOnce(RawToken, Tag, SingleError) -> Result, ) -> Result { let peeked = token_nodes.peek_non_ws().not_eof(expected)?; let expr = match peeked.node { - TokenNode::Token(token) => callback(token.item, token.tag())?, + TokenNode::Token(token) => callback( + token.item, + token.tag(), + SingleError { + expected, + node: token, + }, + )?, other => return Err(ShellError::type_error(expected, other.tagged_type_name())), }; @@ -562,6 +988,36 @@ fn parse_single_node_skipping_ws<'a, 'b, T>( #[derive(Debug, Copy, Clone)] pub struct WhitespaceShape; +impl FallibleColorSyntax for WhitespaceShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let peeked = token_nodes.peek_any().not_eof("whitespace"); + + let peeked = match peeked { + Err(_) => return Ok(()), + Ok(peeked) => peeked, + }; + + let _tag = match peeked.node { + TokenNode::Whitespace(tag) => shapes.push(FlatShape::Whitespace.tagged(tag)), + + _other => return Ok(()), + }; + + peeked.commit(); + + Ok(()) + } +} + impl ExpandSyntax for WhitespaceShape { type Output = Tag; @@ -626,6 +1082,65 @@ pub struct MaybeSpacedExpression { inner: T, } +#[derive(Debug, Copy, Clone)] +pub struct MaybeSpaceShape; + +impl ColorSyntax for MaybeSpaceShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Self::Info { + let peeked = token_nodes.peek_any().not_eof("whitespace"); + + let peeked = match peeked { + Err(_) => return, + Ok(peeked) => peeked, + }; + + if let TokenNode::Whitespace(tag) = peeked.node { + peeked.commit(); + shapes.push(FlatShape::Whitespace.tagged(tag)); + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct SpaceShape; + +impl FallibleColorSyntax for SpaceShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let peeked = token_nodes.peek_any().not_eof("whitespace")?; + + match peeked.node { + TokenNode::Whitespace(tag) => { + peeked.commit(); + shapes.push(FlatShape::Whitespace.tagged(tag)); + Ok(()) + } + + other => Err(ShellError::type_error( + "whitespace", + other.tagged_type_name(), + )), + } + } +} + impl ExpandExpression for MaybeSpacedExpression { fn expand_expr<'a, 'b>( &self, @@ -660,3 +1175,87 @@ fn expand_variable(tag: Tag, token_tag: Tag, source: &Text) -> hir::Expression { hir::Expression::variable(tag, token_tag) } } + +fn classify_command( + command: &Tagged, + context: &ExpandContext, + source: &Text, +) -> Result { + let mut iterator = TokensIterator::new(&command.tokens.item, command.tag, true); + + let head = CommandHeadShape.expand_syntax(&mut iterator, &context)?; + + match &head { + CommandSignature::Expression(_) => Err(ShellError::syntax_error( + "Unexpected expression in command position".tagged(command.tag), + )), + + // If the command starts with `^`, treat it as an external command no matter what + CommandSignature::External(name) => { + let name_str = name.slice(source); + + external_command(&mut iterator, source, name_str.tagged(name)) + } + + CommandSignature::LiteralExternal { outer, inner } => { + let name_str = inner.slice(source); + + external_command(&mut iterator, source, name_str.tagged(outer)) + } + + CommandSignature::Internal(command) => { + let tail = + parse_command_tail(&command.signature(), &context, &mut iterator, command.tag)?; + + let (positional, named) = match tail { + None => (None, None), + Some((positional, named)) => (positional, named), + }; + + let call = hir::Call { + head: Box::new(head.to_expression()), + positional, + named, + }; + + Ok(ClassifiedCommand::Internal(InternalCommand::new( + command.name().to_string(), + command.tag, + call, + ))) + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct CommandShape; + +impl ColorSyntax for CommandShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) { + let kind = color_fallible_syntax(&CommandHeadShape, token_nodes, context, shapes); + + match kind { + Err(_) => { + // We didn't find a command, so we'll have to fall back to parsing this pipeline part + // as a blob of undifferentiated expressions + color_syntax(&ExpressionListShape, token_nodes, context, shapes); + } + + Ok(CommandHeadKind::External) => { + color_syntax(&ExternalTokensShape, token_nodes, context, shapes); + } + Ok(CommandHeadKind::Internal(signature)) => { + color_syntax_with(&CommandTailShape, &signature, token_nodes, context, shapes); + } + }; + } +} diff --git a/src/parser/hir/syntax_shape/block.rs b/src/parser/hir/syntax_shape/block.rs index a78292b34e..806681691e 100644 --- a/src/parser/hir/syntax_shape/block.rs +++ b/src/parser/hir/syntax_shape/block.rs @@ -2,10 +2,13 @@ use crate::errors::ShellError; use crate::parser::{ hir, hir::syntax_shape::{ - continue_expression, expand_expr, expand_syntax, ExpandContext, ExpandExpression, - ExpressionListShape, PathTailShape, VariablePathShape, + color_fallible_syntax, color_syntax_with, continue_expression, expand_expr, expand_syntax, + DelimitedShape, ExpandContext, ExpandExpression, ExpressionContinuationShape, + ExpressionListShape, FallibleColorSyntax, FlatShape, MemberShape, PathTailShape, + VariablePathShape, }, hir::tokens_iterator::TokensIterator, + parse::token_tree::Delimiter, RawToken, TokenNode, }; use crate::{Tag, Tagged, TaggedItem}; @@ -13,6 +16,49 @@ use crate::{Tag, Tagged, TaggedItem}; #[derive(Debug, Copy, Clone)] pub struct AnyBlockShape; +impl FallibleColorSyntax for AnyBlockShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let block = token_nodes.peek_non_ws().not_eof("block"); + + let block = match block { + Err(_) => return Ok(()), + Ok(block) => block, + }; + + // is it just a block? + let block = block.node.as_block(); + + match block { + // If so, color it as a block + Some((children, tags)) => { + let mut token_nodes = TokensIterator::new(children.item, context.tag, false); + color_syntax_with( + &DelimitedShape, + &(Delimiter::Brace, tags.0, tags.1), + &mut token_nodes, + context, + shapes, + ); + + return Ok(()); + } + _ => {} + } + + // Otherwise, look for a shorthand block. If none found, fail + color_fallible_syntax(&ShorthandBlock, token_nodes, context, shapes) + } +} + impl ExpandExpression for AnyBlockShape { fn expand_expr<'a, 'b>( &self, @@ -25,7 +71,7 @@ impl ExpandExpression for AnyBlockShape { let block = block.node.as_block(); match block { - Some(block) => { + Some((block, _tags)) => { let mut iterator = TokensIterator::new(&block.item, context.tag, false); let exprs = expand_syntax(&ExpressionListShape, &mut iterator, context)?; @@ -42,6 +88,37 @@ impl ExpandExpression for AnyBlockShape { #[derive(Debug, Copy, Clone)] pub struct ShorthandBlock; +impl FallibleColorSyntax for ShorthandBlock { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + // Try to find a shorthand head. If none found, fail + color_fallible_syntax(&ShorthandPath, token_nodes, context, shapes)?; + + loop { + // Check to see whether there's any continuation after the head expression + let result = + color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes); + + match result { + // if no continuation was found, we're done + Err(_) => break, + // if a continuation was found, look for another one + Ok(_) => continue, + } + } + + Ok(()) + } +} + impl ExpandExpression for ShorthandBlock { fn expand_expr<'a, 'b>( &self, @@ -62,6 +139,50 @@ impl ExpandExpression for ShorthandBlock { #[derive(Debug, Copy, Clone)] pub struct ShorthandPath; +impl FallibleColorSyntax for ShorthandPath { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| { + let variable = color_fallible_syntax(&VariablePathShape, token_nodes, context, shapes); + + match variable { + Ok(_) => { + // if it's a variable path, that's the head part + return Ok(()); + } + + Err(_) => { + // otherwise, we'll try to find a member path + } + } + + // look for a member (`` -> `$it.`) + color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?; + + // Now that we've synthesized the head, of the path, proceed to expand the tail of the path + // like any other path. + let tail = color_fallible_syntax(&PathTailShape, token_nodes, context, shapes); + + match tail { + Ok(_) => {} + Err(_) => { + // It's ok if there's no path tail; a single member is sufficient + } + } + + Ok(()) + }) + } +} + impl ExpandExpression for ShorthandPath { fn expand_expr<'a, 'b>( &self, @@ -92,8 +213,6 @@ impl ExpandExpression for ShorthandPath { head = hir::Expression::dot_member(head, member); } - println!("{:?}", head); - Ok(head) } } @@ -104,6 +223,49 @@ impl ExpandExpression for ShorthandPath { #[derive(Debug, Copy, Clone)] pub struct ShorthandHeadShape; +impl FallibleColorSyntax for ShorthandHeadShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + // A shorthand path must not be at EOF + let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?; + + match peeked.node { + // If the head of a shorthand path is a bare token, it expands to `$it.bare` + TokenNode::Token(Tagged { + item: RawToken::Bare, + tag, + }) => { + peeked.commit(); + shapes.push(FlatShape::BareMember.tagged(tag)); + Ok(()) + } + + // If the head of a shorthand path is a string, it expands to `$it."some string"` + TokenNode::Token(Tagged { + item: RawToken::String(_), + tag: outer, + }) => { + peeked.commit(); + shapes.push(FlatShape::StringMember.tagged(outer)); + Ok(()) + } + + other => Err(ShellError::type_error( + "shorthand head", + other.tagged_type_name(), + )), + } + } +} + impl ExpandExpression for ShorthandHeadShape { fn expand_expr<'a, 'b>( &self, diff --git a/src/parser/hir/syntax_shape/expression.rs b/src/parser/hir/syntax_shape/expression.rs index 58cfa4a1a5..fc99c38dc3 100644 --- a/src/parser/hir/syntax_shape/expression.rs +++ b/src/parser/hir/syntax_shape/expression.rs @@ -1,3 +1,4 @@ +pub(crate) mod atom; pub(crate) mod delimited; pub(crate) mod file_path; pub(crate) mod list; @@ -8,14 +9,14 @@ pub(crate) mod unit; pub(crate) mod variable_path; use crate::parser::hir::syntax_shape::{ - expand_expr, expand_syntax, expand_variable, expression::delimited::expand_delimited_expr, - BareShape, DotShape, ExpandContext, ExpandExpression, ExpandSyntax, ExpressionContinuation, - ExpressionContinuationShape, UnitShape, + color_delimited_square, color_fallible_syntax, color_fallible_syntax_with, expand_atom, + expand_delimited_square, expand_expr, expand_syntax, AtomicToken, BareShape, ColorableDotShape, + DotShape, ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, ExpressionContinuation, + ExpressionContinuationShape, FallibleColorSyntax, FlatShape, }; use crate::parser::{ hir, - hir::{Expression, Operator, TokensIterator}, - RawToken, Token, TokenNode, + hir::{Expression, TokensIterator}, }; use crate::prelude::*; use std::path::PathBuf; @@ -36,6 +37,32 @@ impl ExpandExpression for AnyExpressionShape { } } +impl FallibleColorSyntax for AnyExpressionShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + // Look for an expression at the cursor + color_fallible_syntax(&AnyExpressionStartShape, token_nodes, context, shapes)?; + + match continue_coloring_expression(token_nodes, context, shapes) { + Err(_) => { + // it's fine for there to be no continuation + } + + Ok(()) => {} + } + + Ok(()) + } +} + pub(crate) fn continue_expression( mut head: hir::Expression, token_nodes: &mut TokensIterator<'_>, @@ -64,6 +91,30 @@ pub(crate) fn continue_expression( } } +pub(crate) fn continue_coloring_expression( + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + shapes: &mut Vec>, +) -> Result<(), ShellError> { + // if there's not even one expression continuation, fail + color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes)?; + + loop { + // Check to see whether there's any continuation after the head expression + let result = + color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes); + + match result { + Err(_) => { + // We already saw one continuation, so just return + return Ok(()); + } + + Ok(_) => {} + } + } +} + #[derive(Debug, Copy, Clone)] pub struct AnyExpressionStartShape; @@ -73,59 +124,148 @@ impl ExpandExpression for AnyExpressionStartShape { token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, ) -> Result { - let size = expand_expr(&UnitShape, token_nodes, context); + let atom = expand_atom(token_nodes, "expression", context, ExpansionRule::new())?; - match size { - Ok(expr) => return Ok(expr), - Err(_) => {} - } - - let peek_next = token_nodes.peek_any().not_eof("expression")?; - - let head = match peek_next.node { - TokenNode::Token(token) => match token.item { - RawToken::Bare | RawToken::Operator(Operator::Dot) => { - let start = token.tag; - peek_next.commit(); - - let end = expand_syntax(&BareTailShape, token_nodes, context)?; - - match end { - Some(end) => return Ok(hir::Expression::bare(start.until(end))), - None => return Ok(hir::Expression::bare(start)), - } - } - _ => { - peek_next.commit(); - expand_one_context_free_token(*token, context) - } - }, - node @ TokenNode::Call(_) - | node @ TokenNode::Nodes(_) - | node @ TokenNode::Pipeline(_) - | node @ TokenNode::Flag(_) - | node @ TokenNode::Member(_) - | node @ TokenNode::Whitespace(_) => { - return Err(ShellError::type_error( - "expression", - node.tagged_type_name(), + match atom.item { + AtomicToken::Size { number, unit } => { + return Ok(hir::Expression::size( + number.to_number(context.source), + unit.item, + atom.tag, )) } - TokenNode::Delimited(delimited) => { - peek_next.commit(); - expand_delimited_expr(delimited, context) + + AtomicToken::SquareDelimited { nodes, .. } => { + expand_delimited_square(&nodes, atom.tag, context) } - TokenNode::Error(error) => return Err(*error.item.clone()), - }?; + AtomicToken::Word { .. } | AtomicToken::Dot { .. } => { + let end = expand_syntax(&BareTailShape, token_nodes, context)?; + Ok(hir::Expression::bare(atom.tag.until_option(end))) + } - Ok(head) + other => return other.tagged(atom.tag).into_hir(context, "expression"), + } + } +} + +impl FallibleColorSyntax for AnyExpressionStartShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let atom = token_nodes.spanned(|token_nodes| { + expand_atom( + token_nodes, + "expression", + context, + ExpansionRule::permissive(), + ) + }); + + let atom = match atom { + Tagged { + item: Err(_err), + tag, + } => { + shapes.push(FlatShape::Error.tagged(tag)); + return Ok(()); + } + + Tagged { + item: Ok(value), .. + } => value, + }; + + match atom.item { + AtomicToken::Size { number, unit } => shapes.push( + FlatShape::Size { + number: number.tag, + unit: unit.tag, + } + .tagged(atom.tag), + ), + + AtomicToken::SquareDelimited { nodes, tags } => { + color_delimited_square(tags, &nodes, atom.tag, context, shapes) + } + + AtomicToken::Word { .. } | AtomicToken::Dot { .. } => { + shapes.push(FlatShape::Word.tagged(atom.tag)); + } + + _ => atom.color_tokens(shapes), + } + + Ok(()) } } #[derive(Debug, Copy, Clone)] pub struct BareTailShape; +impl FallibleColorSyntax for BareTailShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let len = shapes.len(); + + loop { + let word = color_fallible_syntax_with( + &BareShape, + &FlatShape::Word, + token_nodes, + context, + shapes, + ); + + match word { + // if a word was found, continue + Ok(_) => continue, + // if a word wasn't found, try to find a dot + Err(_) => {} + } + + // try to find a dot + let dot = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Word, + token_nodes, + context, + shapes, + ); + + match dot { + // if a dot was found, try to find another word + Ok(_) => continue, + // otherwise, we're done + Err(_) => break, + } + } + + if shapes.len() > len { + Ok(()) + } else { + Err(ShellError::syntax_error( + "No tokens matched BareTailShape".tagged_unknown(), + )) + } + } +} + impl ExpandSyntax for BareTailShape { type Output = Option; @@ -158,29 +298,6 @@ impl ExpandSyntax for BareTailShape { } } -fn expand_one_context_free_token<'a, 'b>( - token: Token, - context: &ExpandContext, -) -> Result { - Ok(match token.item { - RawToken::Number(number) => { - hir::Expression::number(number.to_number(context.source), token.tag) - } - RawToken::Operator(..) => { - return Err(ShellError::syntax_error( - "unexpected operator, expected an expression".tagged(token.tag), - )) - } - RawToken::Size(..) => unimplemented!("size"), - RawToken::String(tag) => hir::Expression::string(tag, token.tag), - RawToken::Variable(tag) => expand_variable(tag, token.tag, &context.source), - RawToken::ExternalCommand(_) => unimplemented!(), - RawToken::ExternalWord => unimplemented!(), - RawToken::GlobPattern => hir::Expression::pattern(token.tag), - RawToken::Bare => hir::Expression::string(token.tag, token.tag), - }) -} - pub fn expand_file_path(string: &str, context: &ExpandContext) -> PathBuf { let expanded = shellexpand::tilde_with_context(string, || context.homedir()); diff --git a/src/parser/hir/syntax_shape/expression/atom.rs b/src/parser/hir/syntax_shape/expression/atom.rs new file mode 100644 index 0000000000..83306da741 --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/atom.rs @@ -0,0 +1,541 @@ +use crate::parser::hir::syntax_shape::{ + expand_syntax, expression::expand_file_path, parse_single_node, BarePathShape, + BarePatternShape, ExpandContext, UnitShape, +}; +use crate::parser::{ + hir, + hir::{Expression, RawNumber, TokensIterator}, + parse::flag::{Flag, FlagKind}, + DelimitedNode, Delimiter, FlatShape, RawToken, TokenNode, Unit, +}; +use crate::prelude::*; + +#[derive(Debug)] +pub enum AtomicToken<'tokens> { + Eof { + tag: Tag, + }, + Error { + error: Tagged, + }, + Number { + number: RawNumber, + }, + Size { + number: Tagged, + unit: Tagged, + }, + String { + body: Tag, + }, + ItVariable { + name: Tag, + }, + Variable { + name: Tag, + }, + ExternalCommand { + command: Tag, + }, + ExternalWord { + text: Tag, + }, + GlobPattern { + pattern: Tag, + }, + FilePath { + path: Tag, + }, + Word { + text: Tag, + }, + SquareDelimited { + tags: (Tag, Tag), + nodes: &'tokens Vec, + }, + ParenDelimited { + tags: (Tag, Tag), + nodes: &'tokens Vec, + }, + BraceDelimited { + tags: (Tag, Tag), + nodes: &'tokens Vec, + }, + Pipeline { + pipe: Option, + elements: Tagged<&'tokens Vec>, + }, + ShorthandFlag { + name: Tag, + }, + LonghandFlag { + name: Tag, + }, + Dot { + text: Tag, + }, + Operator { + text: Tag, + }, + Whitespace { + text: Tag, + }, +} + +pub type TaggedAtomicToken<'tokens> = Tagged>; + +impl<'tokens> TaggedAtomicToken<'tokens> { + pub fn into_hir( + &self, + context: &ExpandContext, + expected: &'static str, + ) -> Result { + Ok(match &self.item { + AtomicToken::Eof { .. } => { + return Err(ShellError::type_error( + expected, + "eof atomic token".tagged(self.tag), + )) + } + AtomicToken::Error { .. } => { + return Err(ShellError::type_error( + expected, + "eof atomic token".tagged(self.tag), + )) + } + AtomicToken::Operator { .. } => { + return Err(ShellError::type_error( + expected, + "operator".tagged(self.tag), + )) + } + AtomicToken::ShorthandFlag { .. } => { + return Err(ShellError::type_error( + expected, + "shorthand flag".tagged(self.tag), + )) + } + AtomicToken::LonghandFlag { .. } => { + return Err(ShellError::type_error(expected, "flag".tagged(self.tag))) + } + AtomicToken::Whitespace { .. } => { + return Err(ShellError::unimplemented("whitespace in AtomicToken")) + } + AtomicToken::Dot { .. } => { + return Err(ShellError::type_error(expected, "dot".tagged(self.tag))) + } + AtomicToken::Number { number } => { + Expression::number(number.to_number(context.source), self.tag) + } + AtomicToken::FilePath { path } => Expression::file_path( + expand_file_path(path.slice(context.source), context), + self.tag, + ), + AtomicToken::Size { number, unit } => { + Expression::size(number.to_number(context.source), **unit, self.tag) + } + AtomicToken::String { body } => Expression::string(body, self.tag), + AtomicToken::ItVariable { name } => Expression::it_variable(name, self.tag), + AtomicToken::Variable { name } => Expression::variable(name, self.tag), + AtomicToken::ExternalCommand { command } => { + Expression::external_command(command, self.tag) + } + AtomicToken::ExternalWord { text } => Expression::string(text, self.tag), + AtomicToken::GlobPattern { pattern } => Expression::pattern(pattern), + AtomicToken::Word { text } => Expression::string(text, text), + AtomicToken::SquareDelimited { .. } => unimplemented!("into_hir"), + AtomicToken::ParenDelimited { .. } => unimplemented!("into_hir"), + AtomicToken::BraceDelimited { .. } => unimplemented!("into_hir"), + AtomicToken::Pipeline { .. } => unimplemented!("into_hir"), + }) + } + + pub fn tagged_type_name(&self) -> Tagged<&'static str> { + match &self.item { + AtomicToken::Eof { .. } => "eof", + AtomicToken::Error { .. } => "error", + AtomicToken::Operator { .. } => "operator", + AtomicToken::ShorthandFlag { .. } => "shorthand flag", + AtomicToken::LonghandFlag { .. } => "flag", + AtomicToken::Whitespace { .. } => "whitespace", + AtomicToken::Dot { .. } => "dot", + AtomicToken::Number { .. } => "number", + AtomicToken::FilePath { .. } => "file path", + AtomicToken::Size { .. } => "size", + AtomicToken::String { .. } => "string", + AtomicToken::ItVariable { .. } => "$it", + AtomicToken::Variable { .. } => "variable", + AtomicToken::ExternalCommand { .. } => "external command", + AtomicToken::ExternalWord { .. } => "external word", + AtomicToken::GlobPattern { .. } => "file pattern", + AtomicToken::Word { .. } => "word", + AtomicToken::SquareDelimited { .. } => "array literal", + AtomicToken::ParenDelimited { .. } => "parenthesized expression", + AtomicToken::BraceDelimited { .. } => "block", + AtomicToken::Pipeline { .. } => "pipeline", + } + .tagged(self.tag) + } + + pub(crate) fn color_tokens(&self, shapes: &mut Vec>) { + match &self.item { + AtomicToken::Eof { .. } => {} + AtomicToken::Error { .. } => return shapes.push(FlatShape::Error.tagged(self.tag)), + AtomicToken::Operator { .. } => { + return shapes.push(FlatShape::Operator.tagged(self.tag)); + } + AtomicToken::ShorthandFlag { .. } => { + return shapes.push(FlatShape::ShorthandFlag.tagged(self.tag)); + } + AtomicToken::LonghandFlag { .. } => { + return shapes.push(FlatShape::Flag.tagged(self.tag)); + } + AtomicToken::Whitespace { .. } => { + return shapes.push(FlatShape::Whitespace.tagged(self.tag)); + } + AtomicToken::FilePath { .. } => return shapes.push(FlatShape::Path.tagged(self.tag)), + AtomicToken::Dot { .. } => return shapes.push(FlatShape::Dot.tagged(self.tag)), + AtomicToken::Number { + number: RawNumber::Decimal(_), + } => { + return shapes.push(FlatShape::Decimal.tagged(self.tag)); + } + AtomicToken::Number { + number: RawNumber::Int(_), + } => { + return shapes.push(FlatShape::Int.tagged(self.tag)); + } + AtomicToken::Size { number, unit } => { + return shapes.push( + FlatShape::Size { + number: number.tag, + unit: unit.tag, + } + .tagged(self.tag), + ); + } + AtomicToken::String { .. } => return shapes.push(FlatShape::String.tagged(self.tag)), + AtomicToken::ItVariable { .. } => { + return shapes.push(FlatShape::ItVariable.tagged(self.tag)) + } + AtomicToken::Variable { .. } => { + return shapes.push(FlatShape::Variable.tagged(self.tag)) + } + AtomicToken::ExternalCommand { .. } => { + return shapes.push(FlatShape::ExternalCommand.tagged(self.tag)); + } + AtomicToken::ExternalWord { .. } => { + return shapes.push(FlatShape::ExternalWord.tagged(self.tag)) + } + AtomicToken::GlobPattern { .. } => { + return shapes.push(FlatShape::GlobPattern.tagged(self.tag)) + } + AtomicToken::Word { .. } => return shapes.push(FlatShape::Word.tagged(self.tag)), + _ => return shapes.push(FlatShape::Error.tagged(self.tag)), + } + } +} + +#[derive(Debug)] +pub enum WhitespaceHandling { + #[allow(unused)] + AllowWhitespace, + RejectWhitespace, +} + +#[derive(Debug)] +pub struct ExpansionRule { + pub(crate) allow_external_command: bool, + pub(crate) allow_external_word: bool, + pub(crate) allow_operator: bool, + pub(crate) allow_eof: bool, + pub(crate) treat_size_as_word: bool, + pub(crate) commit_errors: bool, + pub(crate) whitespace: WhitespaceHandling, +} + +impl ExpansionRule { + pub fn new() -> ExpansionRule { + ExpansionRule { + allow_external_command: false, + allow_external_word: false, + allow_operator: false, + allow_eof: false, + treat_size_as_word: false, + commit_errors: false, + whitespace: WhitespaceHandling::RejectWhitespace, + } + } + + /// The intent of permissive mode is to return an atomic token for every possible + /// input token. This is important for error-correcting parsing, such as the + /// syntax highlighter. + pub fn permissive() -> ExpansionRule { + ExpansionRule { + allow_external_command: true, + allow_external_word: true, + allow_operator: true, + allow_eof: true, + treat_size_as_word: false, + commit_errors: true, + whitespace: WhitespaceHandling::AllowWhitespace, + } + } + + #[allow(unused)] + pub fn allow_external_command(mut self) -> ExpansionRule { + self.allow_external_command = true; + self + } + + #[allow(unused)] + pub fn allow_operator(mut self) -> ExpansionRule { + self.allow_operator = true; + self + } + + #[allow(unused)] + pub fn no_operator(mut self) -> ExpansionRule { + self.allow_operator = false; + self + } + + #[allow(unused)] + pub fn no_external_command(mut self) -> ExpansionRule { + self.allow_external_command = false; + self + } + + #[allow(unused)] + pub fn allow_external_word(mut self) -> ExpansionRule { + self.allow_external_word = true; + self + } + + #[allow(unused)] + pub fn no_external_word(mut self) -> ExpansionRule { + self.allow_external_word = false; + self + } + + #[allow(unused)] + pub fn treat_size_as_word(mut self) -> ExpansionRule { + self.treat_size_as_word = true; + self + } + + #[allow(unused)] + pub fn commit_errors(mut self) -> ExpansionRule { + self.commit_errors = true; + self + } + + #[allow(unused)] + pub fn allow_whitespace(mut self) -> ExpansionRule { + self.whitespace = WhitespaceHandling::AllowWhitespace; + self + } + + #[allow(unused)] + pub fn reject_whitespace(mut self) -> ExpansionRule { + self.whitespace = WhitespaceHandling::RejectWhitespace; + self + } +} + +/// If the caller of expand_atom throws away the returned atomic token returned, it +/// must use a checkpoint to roll it back. +pub fn expand_atom<'me, 'content>( + token_nodes: &'me mut TokensIterator<'content>, + expected: &'static str, + context: &ExpandContext, + rule: ExpansionRule, +) -> Result, ShellError> { + if token_nodes.at_end() { + match rule.allow_eof { + true => { + return Ok(AtomicToken::Eof { + tag: Tag::unknown(), + } + .tagged_unknown()) + } + false => return Err(ShellError::unexpected_eof("anything", Tag::unknown())), + } + } + + // First, we'll need to handle the situation where more than one token corresponds + // to a single atomic token + + // If treat_size_as_word, don't try to parse the head of the token stream + // as a size. + match rule.treat_size_as_word { + true => {} + false => match expand_syntax(&UnitShape, token_nodes, context) { + // If the head of the stream isn't a valid unit, we'll try to parse + // it again next as a word + Err(_) => {} + + // But if it was a valid unit, we're done here + Ok(Tagged { + item: (number, unit), + tag, + }) => return Ok(AtomicToken::Size { number, unit }.tagged(tag)), + }, + } + + // Try to parse the head of the stream as a bare path. A bare path includes + // words as well as `.`s, connected together without whitespace. + match expand_syntax(&BarePathShape, token_nodes, context) { + // If we didn't find a bare path + Err(_) => {} + Ok(tag) => { + let next = token_nodes.peek_any(); + + match next.node { + Some(token) if token.is_pattern() => { + // if the very next token is a pattern, we're looking at a glob, not a + // word, and we should try to parse it as a glob next + } + + _ => return Ok(AtomicToken::Word { text: tag }.tagged(tag)), + } + } + } + + // Try to parse the head of the stream as a pattern. A pattern includes + // words, words with `*` as well as `.`s, connected together without whitespace. + match expand_syntax(&BarePatternShape, token_nodes, context) { + // If we didn't find a bare path + Err(_) => {} + Ok(tag) => return Ok(AtomicToken::GlobPattern { pattern: tag }.tagged(tag)), + } + + // The next token corresponds to at most one atomic token + + // We need to `peek` because `parse_single_node` doesn't cover all of the + // cases that `expand_atom` covers. We should probably collapse the two + // if possible. + let peeked = token_nodes.peek_any().not_eof(expected)?; + + match peeked.node { + TokenNode::Token(_) => { + // handle this next + } + + TokenNode::Error(error) => { + peeked.commit(); + return Ok(AtomicToken::Error { + error: error.clone(), + } + .tagged(error.tag)); + } + + // [ ... ] + TokenNode::Delimited(Tagged { + item: + DelimitedNode { + delimiter: Delimiter::Square, + tags, + children, + }, + tag, + }) => { + peeked.commit(); + return Ok(AtomicToken::SquareDelimited { + nodes: children, + tags: *tags, + } + .tagged(tag)); + } + + TokenNode::Flag(Tagged { + item: + Flag { + kind: FlagKind::Shorthand, + name, + }, + tag, + }) => { + peeked.commit(); + return Ok(AtomicToken::ShorthandFlag { name: *name }.tagged(tag)); + } + + TokenNode::Flag(Tagged { + item: + Flag { + kind: FlagKind::Longhand, + name, + }, + tag, + }) => { + peeked.commit(); + return Ok(AtomicToken::ShorthandFlag { name: *name }.tagged(tag)); + } + + // If we see whitespace, process the whitespace according to the whitespace + // handling rules + TokenNode::Whitespace(tag) => match rule.whitespace { + // if whitespace is allowed, return a whitespace token + WhitespaceHandling::AllowWhitespace => { + peeked.commit(); + return Ok(AtomicToken::Whitespace { text: *tag }.tagged(tag)); + } + + // if whitespace is disallowed, return an error + WhitespaceHandling::RejectWhitespace => { + return Err(ShellError::syntax_error( + "Unexpected whitespace".tagged(tag), + )) + } + }, + + other => { + let tag = peeked.node.tag(); + + peeked.commit(); + return Ok(AtomicToken::Error { + error: ShellError::type_error("token", other.tagged_type_name()).tagged(tag), + } + .tagged(tag)); + } + } + + parse_single_node(token_nodes, expected, |token, token_tag, err| { + Ok(match token { + // First, the error cases. Each error case corresponds to a expansion rule + // flag that can be used to allow the case + + // rule.allow_operator + RawToken::Operator(_) if !rule.allow_operator => return Err(err.error()), + // rule.allow_external_command + RawToken::ExternalCommand(_) if !rule.allow_external_command => { + return Err(ShellError::type_error( + expected, + token.type_name().tagged(token_tag), + )) + } + // rule.allow_external_word + RawToken::ExternalWord if !rule.allow_external_word => { + return Err(ShellError::invalid_external_word(token_tag)) + } + + RawToken::Number(number) => AtomicToken::Number { number }.tagged(token_tag), + RawToken::Operator(_) => AtomicToken::Operator { text: token_tag }.tagged(token_tag), + RawToken::String(body) => AtomicToken::String { body }.tagged(token_tag), + RawToken::Variable(name) if name.slice(context.source) == "it" => { + AtomicToken::ItVariable { name }.tagged(token_tag) + } + RawToken::Variable(name) => AtomicToken::Variable { name }.tagged(token_tag), + RawToken::ExternalCommand(command) => { + AtomicToken::ExternalCommand { command }.tagged(token_tag) + } + RawToken::ExternalWord => { + AtomicToken::ExternalWord { text: token_tag }.tagged(token_tag) + } + RawToken::GlobPattern => { + AtomicToken::GlobPattern { pattern: token_tag }.tagged(token_tag) + } + RawToken::Bare => AtomicToken::Word { text: token_tag }.tagged(token_tag), + }) + }) +} diff --git a/src/parser/hir/syntax_shape/expression/delimited.rs b/src/parser/hir/syntax_shape/expression/delimited.rs index 0a01b0fc26..001e3812f4 100644 --- a/src/parser/hir/syntax_shape/expression/delimited.rs +++ b/src/parser/hir/syntax_shape/expression/delimited.rs @@ -1,38 +1,49 @@ -use crate::parser::hir::syntax_shape::{expand_syntax, ExpandContext, ExpressionListShape}; -use crate::parser::{hir, hir::TokensIterator}; -use crate::parser::{DelimitedNode, Delimiter}; +use crate::parser::hir::syntax_shape::{ + color_syntax, expand_syntax, ColorSyntax, ExpandContext, ExpressionListShape, TokenNode, +}; +use crate::parser::{hir, hir::TokensIterator, Delimiter, FlatShape}; use crate::prelude::*; -pub fn expand_delimited_expr( - delimited: &Tagged, +pub fn expand_delimited_square( + children: &Vec, + tag: Tag, context: &ExpandContext, ) -> Result { - match &delimited.item { - DelimitedNode { - delimiter: Delimiter::Square, - children, - } => { - let mut tokens = TokensIterator::new(&children, delimited.tag, false); + let mut tokens = TokensIterator::new(&children, tag, false); - let list = expand_syntax(&ExpressionListShape, &mut tokens, context); + let list = expand_syntax(&ExpressionListShape, &mut tokens, context); - Ok(hir::Expression::list(list?, delimited.tag)) - } + Ok(hir::Expression::list(list?, tag)) +} - DelimitedNode { - delimiter: Delimiter::Paren, - .. - } => Err(ShellError::type_error( - "expression", - "unimplemented call expression".tagged(delimited.tag), - )), +pub fn color_delimited_square( + (open, close): (Tag, Tag), + children: &Vec, + tag: Tag, + context: &ExpandContext, + shapes: &mut Vec>, +) { + shapes.push(FlatShape::OpenDelimiter(Delimiter::Square).tagged(open)); + let mut tokens = TokensIterator::new(&children, tag, false); + let _list = color_syntax(&ExpressionListShape, &mut tokens, context, shapes); + shapes.push(FlatShape::CloseDelimiter(Delimiter::Square).tagged(close)); +} - DelimitedNode { - delimiter: Delimiter::Brace, - .. - } => Err(ShellError::type_error( - "expression", - "unimplemented block expression".tagged(delimited.tag), - )), +#[derive(Debug, Copy, Clone)] +pub struct DelimitedShape; + +impl ColorSyntax for DelimitedShape { + type Info = (); + type Input = (Delimiter, Tag, Tag); + fn color_syntax<'a, 'b>( + &self, + (delimiter, open, close): &(Delimiter, Tag, Tag), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Self::Info { + shapes.push(FlatShape::OpenDelimiter(*delimiter).tagged(open)); + color_syntax(&ExpressionListShape, token_nodes, context, shapes); + shapes.push(FlatShape::CloseDelimiter(*delimiter).tagged(close)); } } diff --git a/src/parser/hir/syntax_shape/expression/file_path.rs b/src/parser/hir/syntax_shape/expression/file_path.rs index c0e5c7c2ab..e73dc8d647 100644 --- a/src/parser/hir/syntax_shape/expression/file_path.rs +++ b/src/parser/hir/syntax_shape/expression/file_path.rs @@ -1,59 +1,71 @@ +use crate::parser::hir::syntax_shape::expression::atom::{expand_atom, AtomicToken, ExpansionRule}; use crate::parser::hir::syntax_shape::{ - expand_syntax, expression::expand_file_path, parse_single_node, BarePathShape, ExpandContext, - ExpandExpression, + expression::expand_file_path, ExpandContext, ExpandExpression, FallibleColorSyntax, FlatShape, }; -use crate::parser::{hir, hir::TokensIterator, RawToken}; +use crate::parser::{hir, hir::TokensIterator}; use crate::prelude::*; #[derive(Debug, Copy, Clone)] pub struct FilePathShape; +impl FallibleColorSyntax for FilePathShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let atom = expand_atom( + token_nodes, + "file path", + context, + ExpansionRule::permissive(), + ); + + let atom = match atom { + Err(_) => return Ok(()), + Ok(atom) => atom, + }; + + match atom.item { + AtomicToken::Word { .. } + | AtomicToken::String { .. } + | AtomicToken::Number { .. } + | AtomicToken::Size { .. } => { + shapes.push(FlatShape::Path.tagged(atom.tag)); + } + + _ => atom.color_tokens(shapes), + } + + Ok(()) + } +} + impl ExpandExpression for FilePathShape { fn expand_expr<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, ) -> Result { - let bare = expand_syntax(&BarePathShape, token_nodes, context); + let atom = expand_atom(token_nodes, "file path", context, ExpansionRule::new())?; - match bare { - Ok(tag) => { - let string = tag.slice(context.source); - let path = expand_file_path(string, context); - return Ok(hir::Expression::file_path(path, tag)); + match atom.item { + AtomicToken::Word { text: body } | AtomicToken::String { body } => { + let path = expand_file_path(body.slice(context.source), context); + return Ok(hir::Expression::file_path(path, atom.tag)); } - Err(_) => {} + + AtomicToken::Number { .. } | AtomicToken::Size { .. } => { + let path = atom.tag.slice(context.source); + return Ok(hir::Expression::file_path(path, atom.tag)); + } + + _ => return atom.into_hir(context, "file path"), } - - parse_single_node(token_nodes, "Path", |token, token_tag| { - Ok(match token { - RawToken::GlobPattern => { - return Err(ShellError::type_error( - "Path", - "glob pattern".tagged(token_tag), - )) - } - RawToken::Operator(..) => { - return Err(ShellError::type_error("Path", "operator".tagged(token_tag))) - } - RawToken::Variable(tag) if tag.slice(context.source) == "it" => { - hir::Expression::it_variable(tag, token_tag) - } - RawToken::Variable(tag) => hir::Expression::variable(tag, token_tag), - RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag), - RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)), - RawToken::Number(_) => hir::Expression::bare(token_tag), - RawToken::Size(_, _) => hir::Expression::bare(token_tag), - RawToken::Bare => hir::Expression::file_path( - expand_file_path(token_tag.slice(context.source), context), - token_tag, - ), - - RawToken::String(tag) => hir::Expression::file_path( - expand_file_path(tag.slice(context.source), context), - token_tag, - ), - }) - }) } } diff --git a/src/parser/hir/syntax_shape/expression/list.rs b/src/parser/hir/syntax_shape/expression/list.rs index 9d28f44141..4109108a37 100644 --- a/src/parser/hir/syntax_shape/expression/list.rs +++ b/src/parser/hir/syntax_shape/expression/list.rs @@ -2,10 +2,14 @@ use crate::errors::ShellError; use crate::parser::{ hir, hir::syntax_shape::{ - expand_expr, maybe_spaced, spaced, AnyExpressionShape, ExpandContext, ExpandSyntax, + color_fallible_syntax, color_syntax, expand_atom, expand_expr, maybe_spaced, spaced, + AnyExpressionShape, ColorSyntax, ExpandContext, ExpandSyntax, ExpansionRule, + MaybeSpaceShape, SpaceShape, }, - hir::{debug_tokens, TokensIterator}, + hir::TokensIterator, + FlatShape, }; +use crate::Tagged; #[derive(Debug, Copy, Clone)] pub struct ExpressionListShape; @@ -28,8 +32,6 @@ impl ExpandSyntax for ExpressionListShape { exprs.push(expr); - println!("{:?}", debug_tokens(token_nodes, context.source)); - loop { if token_nodes.at_end_possible_ws() { return Ok(exprs); @@ -41,3 +43,134 @@ impl ExpandSyntax for ExpressionListShape { } } } + +impl ColorSyntax for ExpressionListShape { + type Info = (); + type Input = (); + + /// The intent of this method is to fully color an expression list shape infallibly. + /// This means that if we can't expand a token into an expression, we fall back to + /// a simpler coloring strategy. + /// + /// This would apply to something like `where x >`, which includes an incomplete + /// binary operator. Since we will fail to process it as a binary operator, we'll + /// fall back to a simpler coloring and move on. + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) { + // We encountered a parsing error and will continue with simpler coloring ("backoff + // coloring mode") + let mut backoff = false; + + // Consume any leading whitespace + color_syntax(&MaybeSpaceShape, token_nodes, context, shapes); + + loop { + // If we reached the very end of the token stream, we're done + if token_nodes.at_end() { + return; + } + + if backoff { + let len = shapes.len(); + + // If we previously encountered a parsing error, use backoff coloring mode + color_syntax(&SimplestExpression, token_nodes, context, shapes); + + if len == shapes.len() && !token_nodes.at_end() { + // This should never happen, but if it does, a panic is better than an infinite loop + panic!("Unexpected tokens left that couldn't be colored even with SimplestExpression") + } + } else { + // Try to color the head of the stream as an expression + match color_fallible_syntax(&AnyExpressionShape, token_nodes, context, shapes) { + // If no expression was found, switch to backoff coloring mode + Err(_) => { + backoff = true; + continue; + } + Ok(_) => {} + } + + // If an expression was found, consume a space + match color_fallible_syntax(&SpaceShape, token_nodes, context, shapes) { + Err(_) => { + // If no space was found, we're either at the end or there's an error. + // Either way, switch to backoff coloring mode. If we're at the end + // it won't have any consequences. + backoff = true; + } + Ok(_) => { + // Otherwise, move on to the next expression + } + } + } + } + } +} + +/// BackoffColoringMode consumes all of the remaining tokens in an infallible way +#[derive(Debug, Copy, Clone)] +pub struct BackoffColoringMode; + +impl ColorSyntax for BackoffColoringMode { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &Self::Input, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Self::Info { + loop { + if token_nodes.at_end() { + break; + } + + let len = shapes.len(); + color_syntax(&SimplestExpression, token_nodes, context, shapes); + + if len == shapes.len() && !token_nodes.at_end() { + // This shouldn't happen, but if it does, a panic is better than an infinite loop + panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, shapes); + } + } + } +} + +/// The point of `SimplestExpression` is to serve as an infallible base case for coloring. +/// As a last ditch effort, if we can't find any way to parse the head of the stream as an +/// expression, fall back to simple coloring. +#[derive(Debug, Copy, Clone)] +pub struct SimplestExpression; + +impl ColorSyntax for SimplestExpression { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) { + let atom = expand_atom( + token_nodes, + "any token", + context, + ExpansionRule::permissive(), + ); + + match atom { + Err(_) => {} + Ok(atom) => atom.color_tokens(shapes), + } + } +} diff --git a/src/parser/hir/syntax_shape/expression/number.rs b/src/parser/hir/syntax_shape/expression/number.rs index 5b77044a2d..8d3cb048c6 100644 --- a/src/parser/hir/syntax_shape/expression/number.rs +++ b/src/parser/hir/syntax_shape/expression/number.rs @@ -1,4 +1,7 @@ -use crate::parser::hir::syntax_shape::{parse_single_node, ExpandContext, ExpandExpression}; +use crate::parser::hir::syntax_shape::{ + expand_atom, parse_single_node, ExpandContext, ExpandExpression, ExpansionRule, + FallibleColorSyntax, FlatShape, +}; use crate::parser::{ hir, hir::{RawNumber, TokensIterator}, @@ -15,20 +18,9 @@ impl ExpandExpression for NumberShape { token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, ) -> Result { - parse_single_node(token_nodes, "Number", |token, token_tag| { + parse_single_node(token_nodes, "Number", |token, token_tag, err| { Ok(match token { - RawToken::GlobPattern => { - return Err(ShellError::type_error( - "Number", - "glob pattern".to_string().tagged(token_tag), - )) - } - RawToken::Operator(..) => { - return Err(ShellError::type_error( - "Number", - "operator".to_string().tagged(token_tag), - )) - } + RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()), RawToken::Variable(tag) if tag.slice(context.source) == "it" => { hir::Expression::it_variable(tag, token_tag) } @@ -38,9 +30,6 @@ impl ExpandExpression for NumberShape { RawToken::Number(number) => { hir::Expression::number(number.to_number(context.source), token_tag) } - RawToken::Size(number, unit) => { - hir::Expression::size(number.to_number(context.source), unit, token_tag) - } RawToken::Bare => hir::Expression::bare(token_tag), RawToken::String(tag) => hir::Expression::string(tag, token_tag), }) @@ -48,6 +37,35 @@ impl ExpandExpression for NumberShape { } } +impl FallibleColorSyntax for NumberShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let atom = token_nodes.spanned(|token_nodes| { + expand_atom(token_nodes, "number", context, ExpansionRule::permissive()) + }); + + let atom = match atom { + Tagged { item: Err(_), tag } => { + shapes.push(FlatShape::Error.tagged(tag)); + return Ok(()); + } + Tagged { item: Ok(atom), .. } => atom, + }; + + atom.color_tokens(shapes); + + Ok(()) + } +} + #[derive(Debug, Copy, Clone)] pub struct IntShape; @@ -57,41 +75,51 @@ impl ExpandExpression for IntShape { token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, ) -> Result { - parse_single_node(token_nodes, "Integer", |token, token_tag| { + parse_single_node(token_nodes, "Integer", |token, token_tag, err| { Ok(match token { - RawToken::GlobPattern => { - return Err(ShellError::type_error( - "Integer", - "glob pattern".to_string().tagged(token_tag), - )) - } - RawToken::Operator(..) => { - return Err(ShellError::type_error( - "Integer", - "operator".to_string().tagged(token_tag), - )) - } + RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()), + RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)), RawToken::Variable(tag) if tag.slice(context.source) == "it" => { hir::Expression::it_variable(tag, token_tag) } RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag), - RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)), RawToken::Variable(tag) => hir::Expression::variable(tag, token_tag), RawToken::Number(number @ RawNumber::Int(_)) => { hir::Expression::number(number.to_number(context.source), token_tag) } - token @ RawToken::Number(_) => { - return Err(ShellError::type_error( - "Integer", - token.type_name().tagged(token_tag), - )); - } - RawToken::Size(number, unit) => { - hir::Expression::size(number.to_number(context.source), unit, token_tag) - } + RawToken::Number(_) => return Err(err.error()), RawToken::Bare => hir::Expression::bare(token_tag), RawToken::String(tag) => hir::Expression::string(tag, token_tag), }) }) } } + +impl FallibleColorSyntax for IntShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let atom = token_nodes.spanned(|token_nodes| { + expand_atom(token_nodes, "integer", context, ExpansionRule::permissive()) + }); + + let atom = match atom { + Tagged { item: Err(_), tag } => { + shapes.push(FlatShape::Error.tagged(tag)); + return Ok(()); + } + Tagged { item: Ok(atom), .. } => atom, + }; + + atom.color_tokens(shapes); + + Ok(()) + } +} diff --git a/src/parser/hir/syntax_shape/expression/pattern.rs b/src/parser/hir/syntax_shape/expression/pattern.rs index 4105b79b4f..5c863de728 100644 --- a/src/parser/hir/syntax_shape/expression/pattern.rs +++ b/src/parser/hir/syntax_shape/expression/pattern.rs @@ -1,6 +1,7 @@ use crate::parser::hir::syntax_shape::{ - expand_bare, expand_syntax, expression::expand_file_path, parse_single_node, ExpandContext, - ExpandExpression, ExpandSyntax, + expand_atom, expand_bare, expand_syntax, expression::expand_file_path, parse_single_node, + AtomicToken, ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, FallibleColorSyntax, + FlatShape, }; use crate::parser::{hir, hir::TokensIterator, Operator, RawToken, TokenNode}; use crate::prelude::*; @@ -8,6 +9,32 @@ use crate::prelude::*; #[derive(Debug, Copy, Clone)] pub struct PatternShape; +impl FallibleColorSyntax for PatternShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| { + let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::permissive())?; + + match &atom.item { + AtomicToken::GlobPattern { .. } | AtomicToken::Word { .. } => { + shapes.push(FlatShape::GlobPattern.tagged(atom.tag)); + Ok(()) + } + + _ => Err(ShellError::type_error("pattern", atom.tagged_type_name())), + } + }) + } +} + impl ExpandExpression for PatternShape { fn expand_expr<'a, 'b>( &self, @@ -23,7 +50,7 @@ impl ExpandExpression for PatternShape { Err(_) => {} } - parse_single_node(token_nodes, "Pattern", |token, token_tag| { + parse_single_node(token_nodes, "Pattern", |token, token_tag, _| { Ok(match token { RawToken::GlobPattern => { return Err(ShellError::unreachable( @@ -44,7 +71,6 @@ impl ExpandExpression for PatternShape { RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag), RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)), RawToken::Number(_) => hir::Expression::bare(token_tag), - RawToken::Size(_, _) => hir::Expression::bare(token_tag), RawToken::String(tag) => hir::Expression::file_path( expand_file_path(tag.slice(context.source), context), diff --git a/src/parser/hir/syntax_shape/expression/string.rs b/src/parser/hir/syntax_shape/expression/string.rs index 6a4973febe..6f33ae5eb1 100644 --- a/src/parser/hir/syntax_shape/expression/string.rs +++ b/src/parser/hir/syntax_shape/expression/string.rs @@ -1,5 +1,6 @@ use crate::parser::hir::syntax_shape::{ - expand_variable, parse_single_node, ExpandContext, ExpandExpression, TestSyntax, + expand_atom, expand_variable, parse_single_node, AtomicToken, ExpandContext, ExpandExpression, + ExpansionRule, FallibleColorSyntax, FlatShape, TestSyntax, }; use crate::parser::hir::tokens_iterator::Peeked; use crate::parser::{hir, hir::TokensIterator, RawToken, TokenNode}; @@ -8,13 +9,43 @@ use crate::prelude::*; #[derive(Debug, Copy, Clone)] pub struct StringShape; +impl FallibleColorSyntax for StringShape { + type Info = (); + type Input = FlatShape; + + fn color_syntax<'a, 'b>( + &self, + input: &FlatShape, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let atom = expand_atom(token_nodes, "string", context, ExpansionRule::permissive()); + + let atom = match atom { + Err(_) => return Ok(()), + Ok(atom) => atom, + }; + + match atom { + Tagged { + item: AtomicToken::String { .. }, + tag, + } => shapes.push((*input).tagged(tag)), + other => other.color_tokens(shapes), + } + + Ok(()) + } +} + impl ExpandExpression for StringShape { fn expand_expr<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, ) -> Result { - parse_single_node(token_nodes, "String", |token, token_tag| { + parse_single_node(token_nodes, "String", |token, token_tag, _| { Ok(match token { RawToken::GlobPattern => { return Err(ShellError::type_error( @@ -32,7 +63,6 @@ impl ExpandExpression for StringShape { RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag), RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)), RawToken::Number(_) => hir::Expression::bare(token_tag), - RawToken::Size(_, _) => hir::Expression::bare(token_tag), RawToken::Bare => hir::Expression::bare(token_tag), RawToken::String(tag) => hir::Expression::string(tag, token_tag), }) diff --git a/src/parser/hir/syntax_shape/expression/unit.rs b/src/parser/hir/syntax_shape/expression/unit.rs index cc3642bda5..65fca1a468 100644 --- a/src/parser/hir/syntax_shape/expression/unit.rs +++ b/src/parser/hir/syntax_shape/expression/unit.rs @@ -1,7 +1,8 @@ -use crate::parser::hir::syntax_shape::{ExpandContext, ExpandExpression}; +use crate::data::meta::Span; +use crate::parser::hir::syntax_shape::{ExpandContext, ExpandSyntax}; use crate::parser::parse::tokens::RawNumber; use crate::parser::parse::unit::Unit; -use crate::parser::{hir, hir::TokensIterator, RawToken, TokenNode}; +use crate::parser::{hir::TokensIterator, RawToken, TokenNode}; use crate::prelude::*; use nom::branch::alt; use nom::bytes::complete::tag; @@ -12,12 +13,14 @@ use nom::IResult; #[derive(Debug, Copy, Clone)] pub struct UnitShape; -impl ExpandExpression for UnitShape { - fn expand_expr<'a, 'b>( +impl ExpandSyntax for UnitShape { + type Output = Tagged<(Tagged, Tagged)>; + + fn expand_syntax<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - ) -> Result { + ) -> Result, Tagged)>, ShellError> { let peeked = token_nodes.peek_any().not_eof("unit")?; let tag = match peeked.node { @@ -40,15 +43,12 @@ impl ExpandExpression for UnitShape { Ok((number, unit)) => (number, unit), }; - Ok(hir::Expression::size( - number.to_number(context.source), - unit, - tag, - )) + peeked.commit(); + Ok((number, unit).tagged(tag)) } } -fn unit_size(input: &str, bare_tag: Tag) -> IResult<&str, (Tagged, Unit)> { +fn unit_size(input: &str, bare_tag: Tag) -> IResult<&str, (Tagged, Tagged)> { let (input, digits) = digit1(input)?; let (input, dot) = opt(tag("."))(input)?; @@ -85,5 +85,12 @@ fn unit_size(input: &str, bare_tag: Tag) -> IResult<&str, (Tagged, Un value(Unit::MB, alt((tag("PB"), tag("pb"), tag("Pb")))), )))(input)?; - Ok((input, (number, unit))) + let start_span = number.tag.span.end(); + + let unit_tag = Tag::new( + bare_tag.anchor, + Span::from((start_span, bare_tag.span.end())), + ); + + Ok((input, (number, unit.tagged(unit_tag)))) } diff --git a/src/parser/hir/syntax_shape/expression/variable_path.rs b/src/parser/hir/syntax_shape/expression/variable_path.rs index afea1b1499..a7f17a5971 100644 --- a/src/parser/hir/syntax_shape/expression/variable_path.rs +++ b/src/parser/hir/syntax_shape/expression/variable_path.rs @@ -1,6 +1,8 @@ use crate::parser::hir::syntax_shape::{ - expand_expr, expand_syntax, parse_single_node, AnyExpressionShape, BareShape, ExpandContext, - ExpandExpression, ExpandSyntax, Peeked, SkipSyntax, StringShape, TestSyntax, WhitespaceShape, + color_fallible_syntax, color_fallible_syntax_with, expand_atom, expand_expr, expand_syntax, + parse_single_node, AnyExpressionShape, AtomicToken, BareShape, ExpandContext, ExpandExpression, + ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, Peeked, SkipSyntax, StringShape, + TestSyntax, WhitespaceShape, }; use crate::parser::{hir, hir::Expression, hir::TokensIterator, Operator, RawToken}; use crate::prelude::*; @@ -42,9 +44,81 @@ impl ExpandExpression for VariablePathShape { } } +impl FallibleColorSyntax for VariablePathShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| { + // If the head of the token stream is not a variable, fail + color_fallible_syntax(&VariableShape, token_nodes, context, shapes)?; + + loop { + // look for a dot at the head of a stream + let dot = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + token_nodes, + context, + shapes, + ); + + // if there's no dot, we're done + match dot { + Err(_) => break, + Ok(_) => {} + } + + // otherwise, look for a member, and if you don't find one, fail + color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?; + } + + Ok(()) + }) + } +} + #[derive(Debug, Copy, Clone)] pub struct PathTailShape; +/// The failure mode of `PathTailShape` is a dot followed by a non-member +impl FallibleColorSyntax for PathTailShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| loop { + let result = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + token_nodes, + context, + shapes, + ); + + match result { + Err(_) => return Ok(()), + Ok(_) => {} + } + + // If we've seen a dot but not a member, fail + color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?; + }) + } +} + impl ExpandSyntax for PathTailShape { type Output = (Vec>, Tag); fn expand_syntax<'a, 'b>( @@ -121,6 +195,63 @@ impl ExpandSyntax for ExpressionContinuationShape { } } +pub enum ContinuationInfo { + Dot, + Infix, +} + +impl FallibleColorSyntax for ExpressionContinuationShape { + type Info = ContinuationInfo; + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result { + token_nodes.atomic(|token_nodes| { + // Try to expand a `.` + let dot = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + token_nodes, + context, + shapes, + ); + + match dot { + Ok(_) => { + // we found a dot, so let's keep looking for a member; if no member was found, fail + color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?; + + Ok(ContinuationInfo::Dot) + } + Err(_) => { + let mut new_shapes = vec![]; + let result = token_nodes.atomic(|token_nodes| { + // we didn't find a dot, so let's see if we're looking at an infix. If not found, fail + color_fallible_syntax(&InfixShape, token_nodes, context, &mut new_shapes)?; + + // now that we've seen an infix shape, look for any expression. If not found, fail + color_fallible_syntax( + &AnyExpressionShape, + token_nodes, + context, + &mut new_shapes, + )?; + + Ok(ContinuationInfo::Infix) + })?; + shapes.extend(new_shapes); + Ok(result) + } + } + }) + } +} + #[derive(Debug, Copy, Clone)] pub struct VariableShape; @@ -130,7 +261,7 @@ impl ExpandExpression for VariableShape { token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, ) -> Result { - parse_single_node(token_nodes, "variable", |token, token_tag| { + parse_single_node(token_nodes, "variable", |token, token_tag, _| { Ok(match token { RawToken::Variable(tag) => { if tag.slice(context.source) == "it" { @@ -150,6 +281,43 @@ impl ExpandExpression for VariableShape { } } +impl FallibleColorSyntax for VariableShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let atom = expand_atom( + token_nodes, + "variable", + context, + ExpansionRule::permissive(), + ); + + let atom = match atom { + Err(err) => return Err(err), + Ok(atom) => atom, + }; + + match &atom.item { + AtomicToken::Variable { .. } => { + shapes.push(FlatShape::Variable.tagged(atom.tag)); + Ok(()) + } + AtomicToken::ItVariable { .. } => { + shapes.push(FlatShape::ItVariable.tagged(atom.tag)); + Ok(()) + } + _ => Err(ShellError::type_error("variable", atom.tagged_type_name())), + } + } +} + #[derive(Debug, Clone, Copy)] pub enum Member { String(/* outer */ Tag, /* inner */ Tag), @@ -272,6 +440,55 @@ pub fn expand_column_path<'a, 'b>( #[derive(Debug, Copy, Clone)] pub struct ColumnPathShape; +impl FallibleColorSyntax for ColumnPathShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + // If there's not even one member shape, fail + color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?; + + loop { + let checkpoint = token_nodes.checkpoint(); + + match color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + checkpoint.iterator, + context, + shapes, + ) { + Err(_) => { + // we already saw at least one member shape, so return successfully + return Ok(()); + } + + Ok(_) => { + match color_fallible_syntax(&MemberShape, checkpoint.iterator, context, shapes) + { + Err(_) => { + // we saw a dot but not a member (but we saw at least one member), + // so don't commit the dot but return successfully + return Ok(()); + } + + Ok(_) => { + // we saw a dot and a member, so commit it and continue on + checkpoint.commit(); + } + } + } + } + } + } +} + impl ExpandSyntax for ColumnPathShape { type Output = Tagged>; @@ -287,6 +504,43 @@ impl ExpandSyntax for ColumnPathShape { #[derive(Debug, Copy, Clone)] pub struct MemberShape; +impl FallibleColorSyntax for MemberShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let bare = color_fallible_syntax_with( + &BareShape, + &FlatShape::BareMember, + token_nodes, + context, + shapes, + ); + + match bare { + Ok(_) => return Ok(()), + Err(_) => { + // If we don't have a bare word, we'll look for a string + } + } + + // Look for a string token. If we don't find one, fail + color_fallible_syntax_with( + &StringShape, + &FlatShape::StringMember, + token_nodes, + context, + shapes, + ) + } +} + impl ExpandSyntax for MemberShape { type Output = Member; @@ -317,6 +571,34 @@ impl ExpandSyntax for MemberShape { #[derive(Debug, Copy, Clone)] pub struct DotShape; +#[derive(Debug, Copy, Clone)] +pub struct ColorableDotShape; + +impl FallibleColorSyntax for ColorableDotShape { + type Info = (); + type Input = FlatShape; + + fn color_syntax<'a, 'b>( + &self, + input: &FlatShape, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let peeked = token_nodes.peek_any().not_eof("dot")?; + + match peeked.node { + node if node.is_dot() => { + peeked.commit(); + shapes.push((*input).tagged(node.tag())); + Ok(()) + } + + other => Err(ShellError::type_error("dot", other.tagged_type_name())), + } + } +} + impl SkipSyntax for DotShape { fn skip<'a, 'b>( &self, @@ -337,7 +619,7 @@ impl ExpandSyntax for DotShape { token_nodes: &'b mut TokensIterator<'a>, _context: &ExpandContext, ) -> Result { - parse_single_node(token_nodes, "dot", |token, token_tag| { + parse_single_node(token_nodes, "dot", |token, token_tag, _| { Ok(match token { RawToken::Operator(Operator::Dot) => token_tag, _ => { @@ -354,6 +636,53 @@ impl ExpandSyntax for DotShape { #[derive(Debug, Copy, Clone)] pub struct InfixShape; +impl FallibleColorSyntax for InfixShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + outer_shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let checkpoint = token_nodes.checkpoint(); + let mut shapes = vec![]; + + // An infix operator must be prefixed by whitespace. If no whitespace was found, fail + color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context, &mut shapes)?; + + // Parse the next TokenNode after the whitespace + parse_single_node( + checkpoint.iterator, + "infix operator", + |token, token_tag, _| { + match token { + // If it's an operator (and not `.`), it's a match + RawToken::Operator(operator) if operator != Operator::Dot => { + shapes.push(FlatShape::Operator.tagged(token_tag)); + Ok(()) + } + + // Otherwise, it's not a match + _ => Err(ShellError::type_error( + "infix operator", + token.type_name().tagged(token_tag), + )), + } + }, + )?; + + // An infix operator must be followed by whitespace. If no whitespace was found, fail + color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context, &mut shapes)?; + + outer_shapes.extend(shapes); + checkpoint.commit(); + Ok(()) + } +} + impl ExpandSyntax for InfixShape { type Output = (Tag, Tagged, Tag); @@ -368,8 +697,10 @@ impl ExpandSyntax for InfixShape { let start = expand_syntax(&WhitespaceShape, checkpoint.iterator, context)?; // Parse the next TokenNode after the whitespace - let operator = - parse_single_node(checkpoint.iterator, "infix operator", |token, token_tag| { + let operator = parse_single_node( + checkpoint.iterator, + "infix operator", + |token, token_tag, _| { Ok(match token { // If it's an operator (and not `.`), it's a match RawToken::Operator(operator) if operator != Operator::Dot => { @@ -384,7 +715,8 @@ impl ExpandSyntax for InfixShape { )) } }) - })?; + }, + )?; // An infix operator must be followed by whitespace let end = expand_syntax(&WhitespaceShape, checkpoint.iterator, context)?; diff --git a/src/parser/hir/syntax_shape/flat_shape.rs b/src/parser/hir/syntax_shape/flat_shape.rs new file mode 100644 index 0000000000..48e867199e --- /dev/null +++ b/src/parser/hir/syntax_shape/flat_shape.rs @@ -0,0 +1,95 @@ +use crate::parser::{Delimiter, Flag, FlagKind, Operator, RawNumber, RawToken, TokenNode}; +use crate::{Tag, Tagged, TaggedItem, Text}; + +#[derive(Debug, Copy, Clone)] +pub enum FlatShape { + OpenDelimiter(Delimiter), + CloseDelimiter(Delimiter), + ItVariable, + Variable, + Operator, + Dot, + InternalCommand, + ExternalCommand, + ExternalWord, + BareMember, + StringMember, + String, + Path, + Word, + Pipe, + GlobPattern, + Flag, + ShorthandFlag, + Int, + Decimal, + Whitespace, + Error, + Size { number: Tag, unit: Tag }, +} + +impl FlatShape { + pub fn from(token: &TokenNode, source: &Text, shapes: &mut Vec>) -> () { + match token { + TokenNode::Token(token) => match token.item { + RawToken::Number(RawNumber::Int(_)) => { + shapes.push(FlatShape::Int.tagged(token.tag)) + } + RawToken::Number(RawNumber::Decimal(_)) => { + shapes.push(FlatShape::Decimal.tagged(token.tag)) + } + RawToken::Operator(Operator::Dot) => shapes.push(FlatShape::Dot.tagged(token.tag)), + RawToken::Operator(_) => shapes.push(FlatShape::Operator.tagged(token.tag)), + RawToken::String(_) => shapes.push(FlatShape::String.tagged(token.tag)), + RawToken::Variable(v) if v.slice(source) == "it" => { + shapes.push(FlatShape::ItVariable.tagged(token.tag)) + } + RawToken::Variable(_) => shapes.push(FlatShape::Variable.tagged(token.tag)), + RawToken::ExternalCommand(_) => { + shapes.push(FlatShape::ExternalCommand.tagged(token.tag)) + } + RawToken::ExternalWord => shapes.push(FlatShape::ExternalWord.tagged(token.tag)), + RawToken::GlobPattern => shapes.push(FlatShape::GlobPattern.tagged(token.tag)), + RawToken::Bare => shapes.push(FlatShape::Word.tagged(token.tag)), + }, + TokenNode::Call(_) => unimplemented!(), + TokenNode::Nodes(nodes) => { + for node in &nodes.item { + FlatShape::from(node, source, shapes); + } + } + TokenNode::Delimited(v) => { + shapes.push(FlatShape::OpenDelimiter(v.item.delimiter).tagged(v.item.tags.0)); + for token in &v.item.children { + FlatShape::from(token, source, shapes); + } + shapes.push(FlatShape::CloseDelimiter(v.item.delimiter).tagged(v.item.tags.1)); + } + TokenNode::Pipeline(pipeline) => { + for part in &pipeline.parts { + if let Some(_) = part.pipe { + shapes.push(FlatShape::Pipe.tagged(part.tag)); + } + } + } + TokenNode::Flag(Tagged { + item: + Flag { + kind: FlagKind::Longhand, + .. + }, + tag, + }) => shapes.push(FlatShape::Flag.tagged(tag)), + TokenNode::Flag(Tagged { + item: + Flag { + kind: FlagKind::Shorthand, + .. + }, + tag, + }) => shapes.push(FlatShape::ShorthandFlag.tagged(tag)), + TokenNode::Whitespace(_) => shapes.push(FlatShape::Whitespace.tagged(token.tag())), + TokenNode::Error(v) => shapes.push(FlatShape::Error.tagged(v.tag)), + } + } +} diff --git a/src/parser/hir/tokens_iterator.rs b/src/parser/hir/tokens_iterator.rs index c0dd9c50fd..f597c850bd 100644 --- a/src/parser/hir/tokens_iterator.rs +++ b/src/parser/hir/tokens_iterator.rs @@ -3,16 +3,13 @@ pub(crate) mod debug; use crate::errors::ShellError; use crate::parser::TokenNode; use crate::{Tag, Tagged, TaggedItem}; -use derive_new::new; -#[derive(Debug, new)] -pub struct TokensIterator<'a> { - tokens: &'a [TokenNode], +#[derive(Debug)] +pub struct TokensIterator<'content> { + tokens: &'content [TokenNode], tag: Tag, skip_ws: bool, - #[new(default)] index: usize, - #[new(default)] seen: indexmap::IndexSet, } @@ -124,11 +121,41 @@ pub fn peek_error( } impl<'content> TokensIterator<'content> { - #[cfg(test)] + pub fn new(items: &'content [TokenNode], tag: Tag, skip_ws: bool) -> TokensIterator<'content> { + TokensIterator { + tokens: items, + tag, + skip_ws, + index: 0, + seen: indexmap::IndexSet::new(), + } + } + + pub fn anchor(&self) -> uuid::Uuid { + self.tag.anchor + } + pub fn all(tokens: &'content [TokenNode], tag: Tag) -> TokensIterator<'content> { TokensIterator::new(tokens, tag, false) } + pub fn len(&self) -> usize { + self.tokens.len() + } + + pub fn spanned( + &mut self, + block: impl FnOnce(&mut TokensIterator<'content>) -> T, + ) -> Tagged { + let start = self.tag_at_cursor(); + + let result = block(self); + + let end = self.tag_at_cursor(); + + result.tagged(start.until(end)) + } + /// Use a checkpoint when you need to peek more than one token ahead, but can't be sure /// that you'll succeed. pub fn checkpoint<'me>(&'me mut self) -> Checkpoint<'content, 'me> { @@ -143,8 +170,26 @@ impl<'content> TokensIterator<'content> { } } - pub fn anchor(&self) -> uuid::Uuid { - self.tag.anchor + /// Use a checkpoint when you need to peek more than one token ahead, but can't be sure + /// that you'll succeed. + pub fn atomic<'me, T>( + &'me mut self, + block: impl FnOnce(&mut TokensIterator<'content>) -> Result, + ) -> Result { + let index = self.index; + let seen = self.seen.clone(); + + let checkpoint = Checkpoint { + iterator: self, + index, + seen, + committed: false, + }; + + let value = block(checkpoint.iterator)?; + + checkpoint.commit(); + return Ok(value); } fn eof_tag(&self) -> Tag { @@ -160,6 +205,15 @@ impl<'content> TokensIterator<'content> { } } + pub fn tag_at_cursor(&mut self) -> Tag { + let next = self.peek_any(); + + match next.node { + None => self.eof_tag(), + Some(node) => node.tag(), + } + } + pub fn remove(&mut self, position: usize) { self.seen.insert(position); } @@ -231,6 +285,26 @@ impl<'content> TokensIterator<'content> { start_next(self, false) } + // Peek the next token, including whitespace, but not EOF + pub fn peek_any_token<'me, T>( + &'me mut self, + block: impl FnOnce(&'content TokenNode) -> Result, + ) -> Result { + let peeked = start_next(self, false); + let peeked = peeked.not_eof("invariant"); + + match peeked { + Err(err) => return Err(err), + Ok(peeked) => match block(peeked.node) { + Err(err) => return Err(err), + Ok(val) => { + peeked.commit(); + return Ok(val); + } + }, + } + } + fn commit(&mut self, from: usize, to: usize) { for index in from..to { self.seen.insert(index); @@ -239,6 +313,10 @@ impl<'content> TokensIterator<'content> { self.index = to; } + pub fn pos(&self, skip_ws: bool) -> Option { + peek_pos(self, skip_ws) + } + pub fn debug_remaining(&self) -> Vec { let mut tokens = self.clone(); tokens.restart(); @@ -246,18 +324,18 @@ impl<'content> TokensIterator<'content> { } } -impl<'a> Iterator for TokensIterator<'a> { - type Item = &'a TokenNode; +impl<'content> Iterator for TokensIterator<'content> { + type Item = &'content TokenNode; - fn next(&mut self) -> Option<&'a TokenNode> { + fn next(&mut self) -> Option<&'content TokenNode> { next(self, self.skip_ws) } } fn peek<'content, 'me>( - iterator: &TokensIterator<'content>, + iterator: &'me TokensIterator<'content>, skip_ws: bool, -) -> Option<&'content TokenNode> { +) -> Option<&'me TokenNode> { let mut to = iterator.index; loop { @@ -287,6 +365,37 @@ fn peek<'content, 'me>( } } +fn peek_pos<'content, 'me>( + iterator: &'me TokensIterator<'content>, + skip_ws: bool, +) -> Option { + let mut to = iterator.index; + + loop { + if to >= iterator.tokens.len() { + return None; + } + + if iterator.seen.contains(&to) { + to += 1; + continue; + } + + if to >= iterator.tokens.len() { + return None; + } + + let node = &iterator.tokens[to]; + + match node { + TokenNode::Whitespace(_) if skip_ws => { + to += 1; + } + _ => return Some(to), + } + } +} + fn start_next<'content, 'me>( iterator: &'me mut TokensIterator<'content>, skip_ws: bool, @@ -337,7 +446,10 @@ fn start_next<'content, 'me>( } } -fn next<'a>(iterator: &mut TokensIterator<'a>, skip_ws: bool) -> Option<&'a TokenNode> { +fn next<'me, 'content>( + iterator: &'me mut TokensIterator<'content>, + skip_ws: bool, +) -> Option<&'content TokenNode> { loop { if iterator.index >= iterator.tokens.len() { return None; diff --git a/src/parser/parse/flag.rs b/src/parser/parse/flag.rs index 09d1e86337..b8995305d2 100644 --- a/src/parser/parse/flag.rs +++ b/src/parser/parse/flag.rs @@ -1,4 +1,5 @@ -use crate::Tag; +use crate::parser::hir::syntax_shape::flat_shape::FlatShape; +use crate::{Tag, Tagged, TaggedItem}; use derive_new::new; use getset::Getters; use serde::{Deserialize, Serialize}; @@ -12,6 +13,15 @@ pub enum FlagKind { #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Getters, new)] #[get = "pub(crate)"] pub struct Flag { - kind: FlagKind, - name: Tag, + pub(crate) kind: FlagKind, + pub(crate) name: Tag, +} + +impl Tagged { + pub fn color(&self) -> Tagged { + match self.item.kind { + FlagKind::Longhand => FlatShape::Flag.tagged(self.tag), + FlagKind::Shorthand => FlatShape::ShorthandFlag.tagged(self.tag), + } + } } diff --git a/src/parser/parse/parser.rs b/src/parser/parse/parser.rs index 93ba043ba1..73833f7be5 100644 --- a/src/parser/parse/parser.rs +++ b/src/parser/parse/parser.rs @@ -189,7 +189,7 @@ pub fn raw_number(input: NomSpan) -> IResult> { match input.fragment.chars().next() { None => return Ok((input, RawNumber::int((start, input.offset, input.extra)))), Some('.') => (), - Some(other) if other.is_whitespace() => { + other if is_boundary(other) => { return Ok((input, RawNumber::int((start, input.offset, input.extra)))) } _ => { @@ -215,16 +215,14 @@ pub fn raw_number(input: NomSpan) -> IResult> { let next = input.fragment.chars().next(); - if let Some(next) = next { - if !next.is_whitespace() { - return Err(nom::Err::Error(nom::error::make_error( - input, - nom::error::ErrorKind::Tag, - ))); - } + if is_boundary(next) { + Ok((input, RawNumber::decimal((start, end, input.extra)))) + } else { + Err(nom::Err::Error(nom::error::make_error( + input, + nom::error::ErrorKind::Tag, + ))) } - - Ok((input, RawNumber::decimal((start, end, input.extra)))) } #[tracable_parser] @@ -476,11 +474,14 @@ pub fn whitespace(input: NomSpan) -> IResult { )) } -pub fn delimited(input: NomSpan, delimiter: Delimiter) -> IResult>> { +pub fn delimited( + input: NomSpan, + delimiter: Delimiter, +) -> IResult>)> { let left = input.offset; - let (input, _) = char(delimiter.open())(input)?; + let (input, open_tag) = tag(delimiter.open())(input)?; let (input, inner_items) = opt(spaced_token_list)(input)?; - let (input, _) = char(delimiter.close())(input)?; + let (input, close_tag) = tag(delimiter.close())(input)?; let right = input.offset; let mut items = vec![]; @@ -489,36 +490,43 @@ pub fn delimited(input: NomSpan, delimiter: Delimiter) -> IResult IResult { - let (input, tokens) = delimited(input, Delimiter::Paren)?; + let (input, (left, right, tokens)) = delimited(input, Delimiter::Paren)?; Ok(( input, - TokenTreeBuilder::tagged_parens(tokens.item, tokens.tag), + TokenTreeBuilder::tagged_parens(tokens.item, (left, right), tokens.tag), )) } #[tracable_parser] pub fn delimited_square(input: NomSpan) -> IResult { - let (input, tokens) = delimited(input, Delimiter::Square)?; + let (input, (left, right, tokens)) = delimited(input, Delimiter::Square)?; Ok(( input, - TokenTreeBuilder::tagged_square(tokens.item, tokens.tag), + TokenTreeBuilder::tagged_square(tokens.item, (left, right), tokens.tag), )) } #[tracable_parser] pub fn delimited_brace(input: NomSpan) -> IResult { - let (input, tokens) = delimited(input, Delimiter::Brace)?; + let (input, (left, right, tokens)) = delimited(input, Delimiter::Brace)?; Ok(( input, - TokenTreeBuilder::tagged_brace(tokens.item, tokens.tag), + TokenTreeBuilder::tagged_square(tokens.item, (left, right), tokens.tag), )) } @@ -1246,7 +1254,10 @@ mod tests { left: usize, right: usize, ) -> TokenNode { - let node = DelimitedNode::new(*delimiter, children); + let start = Tag::for_char(left, delimiter.tag.anchor); + let end = Tag::for_char(right, delimiter.tag.anchor); + + let node = DelimitedNode::new(delimiter.item, (start, end), children); let spanned = node.tagged((left, right, delimiter.tag.anchor)); TokenNode::Delimited(spanned) } diff --git a/src/parser/parse/token_tree.rs b/src/parser/parse/token_tree.rs index 8cbb28264b..85961d1dab 100644 --- a/src/parser/parse/token_tree.rs +++ b/src/parser/parse/token_tree.rs @@ -1,5 +1,5 @@ use crate::errors::ShellError; -use crate::parser::parse::{call_node::*, flag::*, pipeline::*, tokens::*}; +use crate::parser::parse::{call_node::*, flag::*, operator::*, pipeline::*, tokens::*}; use crate::prelude::*; use crate::traits::ToDebug; use crate::{Tag, Tagged, Text}; @@ -17,10 +17,9 @@ pub enum TokenNode { Delimited(Tagged), Pipeline(Tagged), Flag(Tagged), - Member(Tag), Whitespace(Tag), - Error(Tagged>), + Error(Tagged), } impl ToDebug for TokenNode { @@ -78,7 +77,7 @@ impl fmt::Debug for DebugTokenNode<'_> { ) } TokenNode::Pipeline(pipeline) => write!(f, "{}", pipeline.debug(self.source)), - TokenNode::Error(s) => write!(f, " for {:?}", s.tag().slice(self.source)), + TokenNode::Error(_) => write!(f, ""), rest => write!(f, "{}", rest.tag().slice(self.source)), } } @@ -99,9 +98,8 @@ impl TokenNode { TokenNode::Delimited(s) => s.tag(), TokenNode::Pipeline(s) => s.tag(), TokenNode::Flag(s) => s.tag(), - TokenNode::Member(s) => *s, TokenNode::Whitespace(s) => *s, - TokenNode::Error(s) => s.tag(), + TokenNode::Error(s) => return s.tag, } } @@ -113,7 +111,6 @@ impl TokenNode { TokenNode::Delimited(d) => d.type_name(), TokenNode::Pipeline(_) => "pipeline", TokenNode::Flag(_) => "flag", - TokenNode::Member(_) => "member", TokenNode::Whitespace(_) => "whitespace", TokenNode::Error(_) => "error", } @@ -155,16 +152,37 @@ impl TokenNode { } } - pub fn as_block(&self) -> Option> { + pub fn is_pattern(&self) -> bool { + match self { + TokenNode::Token(Tagged { + item: RawToken::GlobPattern, + .. + }) => true, + _ => false, + } + } + + pub fn is_dot(&self) -> bool { + match self { + TokenNode::Token(Tagged { + item: RawToken::Operator(Operator::Dot), + .. + }) => true, + _ => false, + } + } + + pub fn as_block(&self) -> Option<(Tagged<&[TokenNode]>, (Tag, Tag))> { match self { TokenNode::Delimited(Tagged { item: DelimitedNode { delimiter, children, + tags, }, tag, - }) if *delimiter == Delimiter::Brace => Some((&children[..]).tagged(tag)), + }) if *delimiter == Delimiter::Brace => Some(((&children[..]).tagged(tag), *tags)), _ => None, } } @@ -203,7 +221,7 @@ impl TokenNode { pub fn as_pipeline(&self) -> Result { match self { TokenNode::Pipeline(Tagged { item, .. }) => Ok(item.clone()), - _ => Err(ShellError::string("unimplemented")), + _ => Err(ShellError::unimplemented("unimplemented")), } } @@ -259,6 +277,7 @@ impl TokenNode { #[get = "pub(crate)"] pub struct DelimitedNode { pub(crate) delimiter: Delimiter, + pub(crate) tags: (Tag, Tag), pub(crate) children: Vec, } @@ -280,19 +299,19 @@ pub enum Delimiter { } impl Delimiter { - pub(crate) fn open(&self) -> char { + pub(crate) fn open(&self) -> &'static str { match self { - Delimiter::Paren => '(', - Delimiter::Brace => '{', - Delimiter::Square => '[', + Delimiter::Paren => "(", + Delimiter::Brace => "{", + Delimiter::Square => "[", } } - pub(crate) fn close(&self) -> char { + pub(crate) fn close(&self) -> &'static str { match self { - Delimiter::Paren => ')', - Delimiter::Brace => '}', - Delimiter::Square => ']', + Delimiter::Paren => ")", + Delimiter::Brace => "}", + Delimiter::Square => "]", } } } diff --git a/src/parser/parse/token_tree_builder.rs b/src/parser/parse/token_tree_builder.rs index 67298987a4..549462a979 100644 --- a/src/parser/parse/token_tree_builder.rs +++ b/src/parser/parse/token_tree_builder.rs @@ -5,7 +5,6 @@ use crate::parser::parse::operator::Operator; use crate::parser::parse::pipeline::{Pipeline, PipelineElement}; use crate::parser::parse::token_tree::{DelimitedNode, Delimiter, TokenNode}; use crate::parser::parse::tokens::{RawNumber, RawToken}; -use crate::parser::parse::unit::Unit; use crate::parser::CallNode; use derive_new::new; use uuid::Uuid; @@ -227,31 +226,6 @@ impl TokenTreeBuilder { TokenNode::Token(RawToken::Number(input.into()).tagged(tag.into())) } - pub fn size(int: impl Into, unit: impl Into) -> CurriedToken { - let int = int.into(); - let unit = unit.into(); - - Box::new(move |b| { - let (start_int, end_int) = b.consume(&int.to_string()); - let (_, end_unit) = b.consume(unit.as_str()); - b.pos = end_unit; - - TokenTreeBuilder::tagged_size( - (RawNumber::Int((start_int, end_int, b.anchor).into()), unit), - (start_int, end_unit, b.anchor), - ) - }) - } - - pub fn tagged_size( - input: (impl Into, impl Into), - tag: impl Into, - ) -> TokenNode { - let (int, unit) = (input.0.into(), input.1.into()); - - TokenNode::Token(RawToken::Size(int, unit).tagged(tag.into())) - } - pub fn var(input: impl Into) -> CurriedToken { let input = input.into(); @@ -297,19 +271,6 @@ impl TokenTreeBuilder { TokenNode::Flag(Flag::new(FlagKind::Shorthand, input.into()).tagged(tag.into())) } - pub fn member(input: impl Into) -> CurriedToken { - let input = input.into(); - - Box::new(move |b| { - let (start, end) = b.consume(&input); - TokenTreeBuilder::tagged_member((start, end, b.anchor)) - }) - } - - pub fn tagged_member(tag: impl Into) -> TokenNode { - TokenNode::Member(tag.into()) - } - pub fn call(head: CurriedToken, input: Vec) -> CurriedCall { Box::new(move |b| { let start = b.pos; @@ -340,58 +301,79 @@ impl TokenTreeBuilder { CallNode::new(Box::new(head), tail).tagged(tag.into()) } + fn consume_delimiter( + &mut self, + input: Vec, + _open: &str, + _close: &str, + ) -> (Tag, Tag, Tag, Vec) { + let (start_open_paren, end_open_paren) = self.consume("("); + let mut output = vec![]; + for item in input { + output.push(item(self)); + } + + let (start_close_paren, end_close_paren) = self.consume(")"); + + let open = Tag::from((start_open_paren, end_open_paren, self.anchor)); + let close = Tag::from((start_close_paren, end_close_paren, self.anchor)); + let whole = Tag::from((start_open_paren, end_close_paren, self.anchor)); + + (open, close, whole, output) + } + pub fn parens(input: Vec) -> CurriedToken { Box::new(move |b| { - let (start, _) = b.consume("("); - let mut output = vec![]; - for item in input { - output.push(item(b)); - } + let (open, close, whole, output) = b.consume_delimiter(input, "(", ")"); - let (_, end) = b.consume(")"); - - TokenTreeBuilder::tagged_parens(output, (start, end, b.anchor)) + TokenTreeBuilder::tagged_parens(output, (open, close), whole) }) } - pub fn tagged_parens(input: impl Into>, tag: impl Into) -> TokenNode { - TokenNode::Delimited(DelimitedNode::new(Delimiter::Paren, input.into()).tagged(tag.into())) + pub fn tagged_parens( + input: impl Into>, + tags: (Tag, Tag), + tag: impl Into, + ) -> TokenNode { + TokenNode::Delimited( + DelimitedNode::new(Delimiter::Paren, tags, input.into()).tagged(tag.into()), + ) } pub fn square(input: Vec) -> CurriedToken { Box::new(move |b| { - let (start, _) = b.consume("["); - let mut output = vec![]; - for item in input { - output.push(item(b)); - } + let (open, close, whole, tokens) = b.consume_delimiter(input, "[", "]"); - let (_, end) = b.consume("]"); - - TokenTreeBuilder::tagged_square(output, (start, end, b.anchor)) + TokenTreeBuilder::tagged_square(tokens, (open, close), whole) }) } - pub fn tagged_square(input: impl Into>, tag: impl Into) -> TokenNode { - TokenNode::Delimited(DelimitedNode::new(Delimiter::Square, input.into()).tagged(tag.into())) + pub fn tagged_square( + input: impl Into>, + tags: (Tag, Tag), + tag: impl Into, + ) -> TokenNode { + TokenNode::Delimited( + DelimitedNode::new(Delimiter::Square, tags, input.into()).tagged(tag.into()), + ) } pub fn braced(input: Vec) -> CurriedToken { Box::new(move |b| { - let (start, _) = b.consume("{ "); - let mut output = vec![]; - for item in input { - output.push(item(b)); - } + let (open, close, whole, tokens) = b.consume_delimiter(input, "{", "}"); - let (_, end) = b.consume(" }"); - - TokenTreeBuilder::tagged_brace(output, (start, end, b.anchor)) + TokenTreeBuilder::tagged_brace(tokens, (open, close), whole) }) } - pub fn tagged_brace(input: impl Into>, tag: impl Into) -> TokenNode { - TokenNode::Delimited(DelimitedNode::new(Delimiter::Brace, input.into()).tagged(tag.into())) + pub fn tagged_brace( + input: impl Into>, + tags: (Tag, Tag), + tag: impl Into, + ) -> TokenNode { + TokenNode::Delimited( + DelimitedNode::new(Delimiter::Brace, tags, input.into()).tagged(tag.into()), + ) } pub fn sp() -> CurriedToken { diff --git a/src/parser/parse/tokens.rs b/src/parser/parse/tokens.rs index 77a856af3f..41bdfcebd6 100644 --- a/src/parser/parse/tokens.rs +++ b/src/parser/parse/tokens.rs @@ -1,4 +1,3 @@ -use crate::parser::parse::unit::*; use crate::parser::Operator; use crate::prelude::*; use crate::{Tagged, Text}; @@ -9,7 +8,6 @@ use std::str::FromStr; pub enum RawToken { Number(RawNumber), Operator(Operator), - Size(RawNumber, Unit), String(Tag), Variable(Tag), ExternalCommand(Tag), @@ -18,6 +16,21 @@ pub enum RawToken { Bare, } +impl RawToken { + pub fn type_name(&self) -> &'static str { + match self { + RawToken::Number(_) => "Number", + RawToken::Operator(..) => "operator", + RawToken::String(_) => "String", + RawToken::Variable(_) => "variable", + RawToken::ExternalCommand(_) => "external command", + RawToken::ExternalWord => "external word", + RawToken::GlobPattern => "glob pattern", + RawToken::Bare => "String", + } + } +} + #[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] pub enum RawNumber { Int(Tag), @@ -47,22 +60,6 @@ impl RawNumber { } } -impl RawToken { - pub fn type_name(&self) -> &'static str { - match self { - RawToken::Number(_) => "Number", - RawToken::Operator(..) => "operator", - RawToken::Size(..) => "Size", - RawToken::String(_) => "String", - RawToken::Variable(_) => "variable", - RawToken::ExternalCommand(_) => "external command", - RawToken::ExternalWord => "external word", - RawToken::GlobPattern => "glob pattern", - RawToken::Bare => "String", - } - } -} - pub type Token = Tagged; impl Token { @@ -72,6 +69,76 @@ impl Token { source, } } + + pub fn extract_number(&self) -> Option> { + match self.item { + RawToken::Number(number) => Some((number).tagged(self.tag)), + _ => None, + } + } + + pub fn extract_int(&self) -> Option<(Tag, Tag)> { + match self.item { + RawToken::Number(RawNumber::Int(int)) => Some((int, self.tag)), + _ => None, + } + } + + pub fn extract_decimal(&self) -> Option<(Tag, Tag)> { + match self.item { + RawToken::Number(RawNumber::Decimal(decimal)) => Some((decimal, self.tag)), + _ => None, + } + } + + pub fn extract_operator(&self) -> Option> { + match self.item { + RawToken::Operator(operator) => Some(operator.tagged(self.tag)), + _ => None, + } + } + + pub fn extract_string(&self) -> Option<(Tag, Tag)> { + match self.item { + RawToken::String(tag) => Some((tag, self.tag)), + _ => None, + } + } + + pub fn extract_variable(&self) -> Option<(Tag, Tag)> { + match self.item { + RawToken::Variable(tag) => Some((tag, self.tag)), + _ => None, + } + } + + pub fn extract_external_command(&self) -> Option<(Tag, Tag)> { + match self.item { + RawToken::ExternalCommand(tag) => Some((tag, self.tag)), + _ => None, + } + } + + pub fn extract_external_word(&self) -> Option { + match self.item { + RawToken::ExternalWord => Some(self.tag), + _ => None, + } + } + + pub fn extract_glob_pattern(&self) -> Option { + match self.item { + RawToken::GlobPattern => Some(self.tag), + _ => None, + } + } + + pub fn extract_bare(&self) -> Option { + match self.item { + RawToken::Bare => Some(self.tag), + _ => None, + } + } } pub struct DebugToken<'a> { diff --git a/src/parser/parse_command.rs b/src/parser/parse_command.rs index d383689fd9..603ff2956d 100644 --- a/src/parser/parse_command.rs +++ b/src/parser/parse_command.rs @@ -1,5 +1,8 @@ use crate::errors::{ArgumentError, ShellError}; -use crate::parser::hir::syntax_shape::{expand_expr, spaced}; +use crate::parser::hir::syntax_shape::{ + color_fallible_syntax, color_syntax, expand_expr, flat_shape::FlatShape, spaced, + BackoffColoringMode, ColorSyntax, MaybeSpaceShape, +}; use crate::parser::registry::{NamedType, PositionalType, Signature}; use crate::parser::TokensIterator; use crate::parser::{ @@ -153,6 +156,232 @@ pub fn parse_command_tail( Ok(Some((positional, named))) } +#[derive(Debug)] +struct ColoringArgs { + vec: Vec>>>, +} + +impl ColoringArgs { + fn new(len: usize) -> ColoringArgs { + let vec = vec![None; len]; + ColoringArgs { vec } + } + + fn insert(&mut self, pos: usize, shapes: Vec>) { + self.vec[pos] = Some(shapes); + } + + fn spread_shapes(self, shapes: &mut Vec>) { + for item in self.vec { + match item { + None => {} + Some(vec) => { + shapes.extend(vec); + } + } + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct CommandTailShape; + +impl ColorSyntax for CommandTailShape { + type Info = (); + type Input = Signature; + + fn color_syntax<'a, 'b>( + &self, + signature: &Signature, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Self::Info { + let mut args = ColoringArgs::new(token_nodes.len()); + trace_remaining("nodes", token_nodes.clone(), context.source()); + + for (name, kind) in &signature.named { + trace!(target: "nu::color_syntax", "looking for {} : {:?}", name, kind); + + match kind { + NamedType::Switch => { + match token_nodes.extract(|t| t.as_flag(name, context.source())) { + Some((pos, flag)) => args.insert(pos, vec![flag.color()]), + None => {} + } + } + NamedType::Mandatory(syntax_type) => { + match extract_mandatory( + signature, + name, + token_nodes, + context.source(), + Tag::unknown(), + ) { + Err(_) => { + // The mandatory flag didn't exist at all, so there's nothing to color + } + Ok((pos, flag)) => { + let mut shapes = vec![flag.color()]; + token_nodes.move_to(pos); + + if token_nodes.at_end() { + args.insert(pos, shapes); + token_nodes.restart(); + continue; + } + + // We can live with unmatched syntax after a mandatory flag + let _ = token_nodes.atomic(|token_nodes| { + color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes); + + // If the part after a mandatory flag isn't present, that's ok, but we + // should roll back any whitespace we chomped + color_fallible_syntax( + syntax_type, + token_nodes, + context, + &mut shapes, + ) + }); + + args.insert(pos, shapes); + token_nodes.restart(); + } + } + } + NamedType::Optional(syntax_type) => { + match extract_optional(name, token_nodes, context.source()) { + Err(_) => { + // The optional flag didn't exist at all, so there's nothing to color + } + Ok(Some((pos, flag))) => { + let mut shapes = vec![flag.color()]; + token_nodes.move_to(pos); + + if token_nodes.at_end() { + args.insert(pos, shapes); + token_nodes.restart(); + continue; + } + + // We can live with unmatched syntax after an optional flag + let _ = token_nodes.atomic(|token_nodes| { + color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes); + + // If the part after a mandatory flag isn't present, that's ok, but we + // should roll back any whitespace we chomped + color_fallible_syntax( + syntax_type, + token_nodes, + context, + &mut shapes, + ) + }); + + args.insert(pos, shapes); + token_nodes.restart(); + } + + Ok(None) => { + token_nodes.restart(); + } + } + } + }; + } + + trace_remaining("after named", token_nodes.clone(), context.source()); + + for arg in &signature.positional { + trace!("Processing positional {:?}", arg); + + match arg { + PositionalType::Mandatory(..) => { + if token_nodes.at_end() { + break; + } + } + + PositionalType::Optional(..) => { + if token_nodes.at_end() { + break; + } + } + } + + let mut shapes = vec![]; + let pos = token_nodes.pos(false); + + match pos { + None => break, + Some(pos) => { + // We can live with an unmatched positional argument. Hopefully it will be + // matched by a future token + let _ = token_nodes.atomic(|token_nodes| { + color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes); + + // If no match, we should roll back any whitespace we chomped + color_fallible_syntax( + &arg.syntax_type(), + token_nodes, + context, + &mut shapes, + )?; + + args.insert(pos, shapes); + + Ok(()) + }); + } + } + } + + trace_remaining("after positional", token_nodes.clone(), context.source()); + + if let Some(syntax_type) = signature.rest_positional { + loop { + if token_nodes.at_end_possible_ws() { + break; + } + + let pos = token_nodes.pos(false); + + match pos { + None => break, + Some(pos) => { + let mut shapes = vec![]; + + // If any arguments don't match, we'll fall back to backoff coloring mode + let result = token_nodes.atomic(|token_nodes| { + color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes); + + // If no match, we should roll back any whitespace we chomped + color_fallible_syntax(&syntax_type, token_nodes, context, &mut shapes)?; + + args.insert(pos, shapes); + + Ok(()) + }); + + match result { + Err(_) => break, + Ok(_) => continue, + } + } + } + } + } + + args.spread_shapes(shapes); + + // Consume any remaining tokens with backoff coloring mode + color_syntax(&BackoffColoringMode, token_nodes, context, shapes); + + shapes.sort_by(|a, b| a.tag.span.start().cmp(&b.tag.span.start())); + } +} + fn extract_switch(name: &str, tokens: &mut hir::TokensIterator<'_>, source: &Text) -> Option { tokens .extract(|t| t.as_flag(name, source)) @@ -200,6 +429,7 @@ fn extract_optional( pub fn trace_remaining(desc: &'static str, tail: hir::TokensIterator<'_>, source: &Text) { trace!( + target: "nu::expand_args", "{} = {:?}", desc, itertools::join( diff --git a/src/plugin.rs b/src/plugin.rs index afd9871108..004e937fe8 100644 --- a/src/plugin.rs +++ b/src/plugin.rs @@ -32,7 +32,7 @@ pub fn serve_plugin(plugin: &mut dyn Plugin) { let input = match input { Some(arg) => std::fs::read_to_string(arg), None => { - send_response(ShellError::string(format!("No input given."))); + send_response(ShellError::untagged_runtime_error("No input given.")); return; } }; @@ -64,7 +64,7 @@ pub fn serve_plugin(plugin: &mut dyn Plugin) { return; } e => { - send_response(ShellError::string(format!( + send_response(ShellError::untagged_runtime_error(format!( "Could not handle plugin message: {} {:?}", input, e ))); @@ -102,7 +102,7 @@ pub fn serve_plugin(plugin: &mut dyn Plugin) { break; } e => { - send_response(ShellError::string(format!( + send_response(ShellError::untagged_runtime_error(format!( "Could not handle plugin message: {} {:?}", input, e ))); @@ -111,7 +111,7 @@ pub fn serve_plugin(plugin: &mut dyn Plugin) { } } e => { - send_response(ShellError::string(format!( + send_response(ShellError::untagged_runtime_error(format!( "Could not handle plugin message: {:?}", e, ))); diff --git a/src/plugins/add.rs b/src/plugins/add.rs index 997400d67f..6fc034226c 100644 --- a/src/plugins/add.rs +++ b/src/plugins/add.rs @@ -1,7 +1,7 @@ use itertools::Itertools; use nu::{ serve_plugin, CallInfo, Plugin, ReturnSuccess, ReturnValue, ShellError, Signature, SyntaxShape, - Tagged, Value, + Tagged, TaggedItem, Value, }; pub type ColumnPath = Vec>; @@ -25,21 +25,27 @@ impl Add { Some(f) => match obj.insert_data_at_column_path(value_tag, &f, v) { Some(v) => return Ok(v), None => { - return Err(ShellError::string(format!( - "add could not find place to insert field {:?} {}", - obj, - f.iter().map(|i| &i.item).join(".") - ))) + return Err(ShellError::labeled_error( + format!( + "add could not find place to insert field {:?} {}", + obj, + f.iter().map(|i| &i.item).join(".") + ), + "column name", + value_tag, + )) } }, - None => Err(ShellError::string( + None => Err(ShellError::labeled_error( "add needs a column name when adding a value to a table", + "column name", + value_tag, )), }, - x => Err(ShellError::string(format!( - "Unrecognized type in stream: {:?}", - x - ))), + (value, _) => Err(ShellError::type_error( + "row", + value.type_name().tagged(value_tag), + )), } } } @@ -64,12 +70,7 @@ impl Plugin for Add { self.field = Some(table.as_column_path()?.item); } - _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - args[0] - ))) - } + value => return Err(ShellError::type_error("table", value.tagged_type_name())), } match &args[1] { Tagged { item: v, .. } => { diff --git a/src/plugins/edit.rs b/src/plugins/edit.rs index 6d35530ef5..c0f6dfbedd 100644 --- a/src/plugins/edit.rs +++ b/src/plugins/edit.rs @@ -3,7 +3,7 @@ use nu::{ Tagged, Value, }; -pub type ColumnPath = Vec>; +pub type ColumnPath = Tagged>>; struct Edit { field: Option, @@ -24,19 +24,22 @@ impl Edit { Some(f) => match obj.replace_data_at_column_path(value_tag, &f, v) { Some(v) => return Ok(v), None => { - return Err(ShellError::string( + return Err(ShellError::labeled_error( "edit could not find place to insert column", + "column name", + f.tag, )) } }, - None => Err(ShellError::string( + None => Err(ShellError::untagged_runtime_error( "edit needs a column when changing a value in a table", )), }, - x => Err(ShellError::string(format!( - "Unrecognized type in stream: {:?}", - x - ))), + _ => Err(ShellError::labeled_error( + "Unrecognized type in stream", + "original value", + value_tag, + )), } } } @@ -57,14 +60,9 @@ impl Plugin for Edit { item: Value::Table(_), .. } => { - self.field = Some(table.as_column_path()?.item); - } - _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - args[0] - ))) + self.field = Some(table.as_column_path()?); } + value => return Err(ShellError::type_error("table", value.tagged_type_name())), } match &args[1] { Tagged { item: v, .. } => { diff --git a/src/plugins/embed.rs b/src/plugins/embed.rs index 646db80918..4e3545d055 100644 --- a/src/plugins/embed.rs +++ b/src/plugins/embed.rs @@ -25,8 +25,10 @@ impl Embed { }); Ok(()) } - None => Err(ShellError::string( + None => Err(ShellError::labeled_error( "embed needs a field when embedding a value", + "original value", + value.tag, )), }, } @@ -52,12 +54,7 @@ impl Plugin for Embed { self.field = Some(s.clone()); self.values = Vec::new(); } - _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - args[0] - ))) - } + value => return Err(ShellError::type_error("string", value.tagged_type_name())), } } diff --git a/src/plugins/inc.rs b/src/plugins/inc.rs index 4e6f6f0f64..c58ca89369 100644 --- a/src/plugins/inc.rs +++ b/src/plugins/inc.rs @@ -14,7 +14,7 @@ pub enum SemVerAction { Patch, } -pub type ColumnPath = Vec>; +pub type ColumnPath = Tagged>>; struct Inc { field: Option, @@ -90,7 +90,11 @@ impl Inc { let replacement = match value.item.get_data_by_column_path(value.tag(), f) { Some(result) => self.inc(result.map(|x| x.clone()))?, None => { - return Err(ShellError::string("inc could not find field to replace")) + return Err(ShellError::labeled_error( + "inc could not find field to replace", + "column name", + f.tag, + )) } }; match value.item.replace_data_at_column_path( @@ -100,18 +104,22 @@ impl Inc { ) { Some(v) => return Ok(v), None => { - return Err(ShellError::string("inc could not find field to replace")) + return Err(ShellError::labeled_error( + "inc could not find field to replace", + "column name", + f.tag, + )) } } } - None => Err(ShellError::string( + None => Err(ShellError::untagged_runtime_error( "inc needs a field when incrementing a column in a table", )), }, - x => Err(ShellError::string(format!( - "Unrecognized type in stream: {:?}", - x - ))), + _ => Err(ShellError::type_error( + "incrementable value", + value.tagged_type_name(), + )), } } } @@ -145,14 +153,9 @@ impl Plugin for Inc { item: Value::Table(_), .. } => { - self.field = Some(table.as_column_path()?.item); - } - _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - arg - ))) + self.field = Some(table.as_column_path()?); } + value => return Err(ShellError::type_error("table", value.tagged_type_name())), } } } @@ -163,7 +166,11 @@ impl Plugin for Inc { match &self.error { Some(reason) => { - return Err(ShellError::string(format!("{}: {}", reason, Inc::usage()))) + return Err(ShellError::untagged_runtime_error(format!( + "{}: {}", + reason, + Inc::usage() + ))) } None => Ok(vec![]), } @@ -308,7 +315,7 @@ mod tests { assert_eq!( plugin .field - .map(|f| f.into_iter().map(|f| f.item).collect()), + .map(|f| f.iter().map(|f| f.item.clone()).collect()), Some(vec!["package".to_string(), "version".to_string()]) ); } diff --git a/src/plugins/match.rs b/src/plugins/match.rs index 1f2aad83fc..7133524050 100644 --- a/src/plugins/match.rs +++ b/src/plugins/match.rs @@ -35,11 +35,12 @@ impl Plugin for Match { } => { self.column = s.clone(); } - _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - args[0] - ))); + Tagged { tag, .. } => { + return Err(ShellError::labeled_error( + "Unrecognized type in params", + "value", + tag, + )); } } match &args[1] { @@ -49,11 +50,12 @@ impl Plugin for Match { } => { self.regex = Regex::new(s).unwrap(); } - _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - args[1] - ))); + Tagged { tag, .. } => { + return Err(ShellError::labeled_error( + "Unrecognized type in params", + "value", + tag, + )); } } } @@ -65,7 +67,7 @@ impl Plugin for Match { match &input { Tagged { item: Value::Row(dict), - .. + tag, } => { if let Some(val) = dict.entries.get(&self.column) { match val { @@ -75,22 +77,20 @@ impl Plugin for Match { } => { flag = self.regex.is_match(s); } - _ => { - return Err(ShellError::string(format!( - "value is not a string! {:?}", - &val - ))); + Tagged { tag, .. } => { + return Err(ShellError::labeled_error("expected string", "value", tag)); } } } else { - return Err(ShellError::string(format!( - "column not in row! {:?} {:?}", - &self.column, dict - ))); + return Err(ShellError::labeled_error( + format!("column not in row! {:?} {:?}", &self.column, dict), + "row", + tag, + )); } } - _ => { - return Err(ShellError::string(format!("Not a row! {:?}", &input))); + Tagged { tag, .. } => { + return Err(ShellError::labeled_error("Expected row", "value", tag)); } } if flag { diff --git a/src/plugins/str.rs b/src/plugins/str.rs index 7bd35733da..4635d60c35 100644 --- a/src/plugins/str.rs +++ b/src/plugins/str.rs @@ -105,20 +105,24 @@ impl Str { ) { Some(v) => return Ok(v), None => { - return Err(ShellError::string("str could not find field to replace")) + return Err(ShellError::type_error( + "column name", + value.tagged_type_name(), + )) } } } - None => Err(ShellError::string(format!( + None => Err(ShellError::untagged_runtime_error(format!( "{}: {}", "str needs a column when applied to a value in a row", Str::usage() ))), }, - x => Err(ShellError::string(format!( - "Unrecognized type in stream: {:?}", - x - ))), + _ => Err(ShellError::labeled_error( + "Unrecognized type in stream", + value.type_name(), + value.tag, + )), } } } @@ -167,10 +171,11 @@ impl Plugin for Str { self.field = Some(table.as_column_path()?.item); } _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - possible_field - ))) + return Err(ShellError::labeled_error( + "Unrecognized type in params", + possible_field.type_name(), + possible_field.tag, + )) } } } @@ -187,7 +192,11 @@ impl Plugin for Str { match &self.error { Some(reason) => { - return Err(ShellError::string(format!("{}: {}", reason, Str::usage()))) + return Err(ShellError::untagged_runtime_error(format!( + "{}: {}", + reason, + Str::usage() + ))) } None => Ok(vec![]), } diff --git a/src/plugins/sum.rs b/src/plugins/sum.rs index ffb39cb90b..2bb89b74e1 100644 --- a/src/plugins/sum.rs +++ b/src/plugins/sum.rs @@ -28,9 +28,11 @@ impl Sum { self.total = Some(value.clone()); Ok(()) } - _ => Err(ShellError::string(format!( - "Could not sum non-integer or unrelated types" - ))), + _ => Err(ShellError::labeled_error( + "Could not sum non-integer or unrelated types", + "source", + value.tag, + )), } } Value::Primitive(Primitive::Bytes(b)) => { @@ -47,15 +49,18 @@ impl Sum { self.total = Some(value); Ok(()) } - _ => Err(ShellError::string(format!( - "Could not sum non-integer or unrelated types" - ))), + _ => Err(ShellError::labeled_error( + "Could not sum non-integer or unrelated types", + "source", + value.tag, + )), } } - x => Err(ShellError::string(format!( - "Unrecognized type in stream: {:?}", - x - ))), + x => Err(ShellError::labeled_error( + format!("Unrecognized type in stream: {:?}", x), + "source", + value.tag, + )), } } } diff --git a/src/prelude.rs b/src/prelude.rs index eabd778717..1f80126a4f 100644 --- a/src/prelude.rs +++ b/src/prelude.rs @@ -1,3 +1,13 @@ +#[macro_export] +macro_rules! return_err { + ($expr:expr) => { + match $expr { + Err(_) => return, + Ok(expr) => expr, + }; + }; +} + #[macro_export] macro_rules! stream { ($($expr:expr),*) => {{ diff --git a/src/shell/filesystem_shell.rs b/src/shell/filesystem_shell.rs index 3c1ae79ea3..aec736ec0f 100644 --- a/src/shell/filesystem_shell.rs +++ b/src/shell/filesystem_shell.rs @@ -145,7 +145,7 @@ impl Shell for FilesystemShell { source.tag(), )); } else { - return Err(ShellError::string("Invalid pattern.")); + return Err(ShellError::untagged_runtime_error("Invalid pattern.")); } } }; diff --git a/src/shell/helper.rs b/src/shell/helper.rs index 85591cf047..b590d82826 100644 --- a/src/shell/helper.rs +++ b/src/shell/helper.rs @@ -1,11 +1,11 @@ +use crate::context::Context; +use crate::parser::hir::syntax_shape::{color_fallible_syntax, FlatShape, PipelineShape}; use crate::parser::hir::TokensIterator; use crate::parser::nom_input; use crate::parser::parse::token_tree::TokenNode; -use crate::parser::parse::tokens::RawToken; -use crate::parser::{Pipeline, PipelineElement}; -use crate::shell::shell_manager::ShellManager; -use crate::Tagged; +use crate::{Tag, Tagged, TaggedItem, Text}; use ansi_term::Color; +use log::trace; use rustyline::completion::Completer; use rustyline::error::ReadlineError; use rustyline::highlight::Highlighter; @@ -13,12 +13,12 @@ use rustyline::hint::Hinter; use std::borrow::Cow::{self, Owned}; pub(crate) struct Helper { - helper: ShellManager, + context: Context, } impl Helper { - pub(crate) fn new(helper: ShellManager) -> Helper { - Helper { helper } + pub(crate) fn new(context: Context) -> Helper { + Helper { context } } } @@ -30,7 +30,7 @@ impl Completer for Helper { pos: usize, ctx: &rustyline::Context<'_>, ) -> Result<(usize, Vec), ReadlineError> { - self.helper.complete(line, pos, ctx) + self.context.shell_manager.complete(line, pos, ctx) } } @@ -53,7 +53,7 @@ impl Completer for Helper { impl Hinter for Helper { fn hint(&self, line: &str, pos: usize, ctx: &rustyline::Context<'_>) -> Option { - self.helper.hint(line, pos, ctx) + self.context.shell_manager.hint(line, pos, ctx) } } @@ -78,20 +78,42 @@ impl Highlighter for Helper { Ok(v) => v, }; - let Pipeline { parts } = pipeline; - let mut iter = parts.into_iter(); + let tokens = vec![TokenNode::Pipeline(pipeline.clone().tagged(v.tag()))]; + let mut tokens = TokensIterator::all(&tokens[..], v.tag()); - loop { - match iter.next() { - None => { - return Cow::Owned(out); - } - Some(token) => { - let styled = paint_pipeline_element(&token, line); - out.push_str(&styled.to_string()); - } - } + let text = Text::from(line); + let expand_context = self + .context + .expand_context(&text, Tag::from((0, line.len() - 1, uuid::Uuid::nil()))); + let mut shapes = vec![]; + + // We just constructed a token list that only contains a pipeline, so it can't fail + color_fallible_syntax(&PipelineShape, &mut tokens, &expand_context, &mut shapes) + .unwrap(); + + trace!(target: "nu::shapes", + "SHAPES :: {:?}", + shapes.iter().map(|shape| shape.item).collect::>() + ); + + for shape in shapes { + let styled = paint_flat_shape(shape, line); + out.push_str(&styled); } + + Cow::Owned(out) + + // loop { + // match iter.next() { + // None => { + // return Cow::Owned(out); + // } + // Some(token) => { + // let styled = paint_pipeline_element(&token, line); + // out.push_str(&styled.to_string()); + // } + // } + // } } } } @@ -101,80 +123,55 @@ impl Highlighter for Helper { } } -fn paint_token_node(token_node: &TokenNode, line: &str) -> String { - let styled = match token_node { - TokenNode::Call(..) => Color::Cyan.bold().paint(token_node.tag().slice(line)), - TokenNode::Nodes(..) => Color::Green.bold().paint(token_node.tag().slice(line)), - TokenNode::Whitespace(..) => Color::White.normal().paint(token_node.tag().slice(line)), - TokenNode::Flag(..) => Color::Black.bold().paint(token_node.tag().slice(line)), - TokenNode::Member(..) => Color::Yellow.bold().paint(token_node.tag().slice(line)), - TokenNode::Error(..) => Color::Red.bold().paint(token_node.tag().slice(line)), - TokenNode::Delimited(..) => Color::White.paint(token_node.tag().slice(line)), - TokenNode::Pipeline(..) => Color::Blue.normal().paint(token_node.tag().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::Number(..), - .. - }) => Color::Purple.bold().paint(token_node.tag().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::Size(..), - .. - }) => Color::Purple.bold().paint(token_node.tag().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::GlobPattern, - .. - }) => Color::Cyan.normal().paint(token_node.tag().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::String(..), - .. - }) => Color::Green.normal().paint(token_node.tag().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::Variable(..), - .. - }) => Color::Yellow.bold().paint(token_node.tag().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::Bare, - .. - }) => Color::Green.normal().paint(token_node.tag().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::ExternalCommand(..), - .. - }) => Color::Cyan.bold().paint(token_node.tag().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::ExternalWord, - .. - }) => Color::Black.bold().paint(token_node.tag().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::Operator(..), - .. - }) => Color::Black.bold().paint(token_node.tag().slice(line)), - }; +#[allow(unused)] +fn vec_tag(input: Vec>) -> Option { + let mut iter = input.iter(); + let first = iter.next()?.tag; + let last = iter.last(); - styled.to_string() + Some(match last { + None => first, + Some(last) => first.until(last.tag), + }) } -fn paint_pipeline_element(pipeline_element: &PipelineElement, line: &str) -> String { - let mut styled = String::new(); - - if let Some(_) = pipeline_element.pipe { - styled.push_str(&Color::Purple.paint("|")); - } - - let mut tokens = - TokensIterator::new(&pipeline_element.tokens, pipeline_element.tokens.tag, false); - let head = tokens.next(); - - match head { - None => return styled, - Some(head) => { - styled.push_str(&Color::Cyan.bold().paint(head.tag().slice(line)).to_string()) +fn paint_flat_shape(flat_shape: Tagged, line: &str) -> String { + let style = match &flat_shape.item { + FlatShape::OpenDelimiter(_) => Color::White.normal(), + FlatShape::CloseDelimiter(_) => Color::White.normal(), + FlatShape::ItVariable => Color::Purple.bold(), + FlatShape::Variable => Color::Purple.normal(), + FlatShape::Operator => Color::Yellow.normal(), + FlatShape::Dot => Color::White.normal(), + FlatShape::InternalCommand => Color::Cyan.bold(), + FlatShape::ExternalCommand => Color::Cyan.normal(), + FlatShape::ExternalWord => Color::Black.bold(), + FlatShape::BareMember => Color::Yellow.bold(), + FlatShape::StringMember => Color::Yellow.bold(), + FlatShape::String => Color::Green.normal(), + FlatShape::Path => Color::Cyan.normal(), + FlatShape::GlobPattern => Color::Cyan.bold(), + FlatShape::Word => Color::Green.normal(), + FlatShape::Pipe => Color::Purple.bold(), + FlatShape::Flag => Color::Black.bold(), + FlatShape::ShorthandFlag => Color::Black.bold(), + FlatShape::Int => Color::Purple.bold(), + FlatShape::Decimal => Color::Purple.bold(), + FlatShape::Whitespace => Color::White.normal(), + FlatShape::Error => Color::Red.bold(), + FlatShape::Size { number, unit } => { + let number = number.slice(line); + let unit = unit.slice(line); + return format!( + "{}{}", + Color::Purple.bold().paint(number), + Color::Cyan.bold().paint(unit) + ); } - } + }; - for token in tokens { - styled.push_str(&paint_token_node(token, line)); - } - - styled.to_string() + let body = flat_shape.tag.slice(line); + style.paint(body).to_string() } impl rustyline::Helper for Helper {} From f0ca0312f3ef0061033c888b56de2a64fa530365 Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Fri, 11 Oct 2019 19:06:24 +0200 Subject: [PATCH 012/184] Adds racer, formats shell.nix --- shell.nix | 25 ++++++++++++++++++++----- 1 file changed, 20 insertions(+), 5 deletions(-) diff --git a/shell.nix b/shell.nix index e6062b2cb0..2b2bbca078 100644 --- a/shell.nix +++ b/shell.nix @@ -1,10 +1,25 @@ let - moz_overlay = import (builtins.fetchTarball https://github.com/mozilla/nixpkgs-mozilla/archive/master.tar.gz); + moz_overlay = import (builtins.fetchTarball + "https://github.com/mozilla/nixpkgs-mozilla/archive/master.tar.gz"); + nixpkgs = import { overlays = [ moz_overlay ]; }; - nightly = ((nixpkgs.rustChannelOf { date = "2019-09-01"; channel = "nightly"; }).rust.override { extensions = [ "rust-src" "rls-preview" "clippy-preview" "rust-analysis" "rustfmt-preview" ];}); -in -with nixpkgs; + + nightly = ((nixpkgs.rustChannelOf { + date = "2019-09-01"; + channel = "nightly"; + }).rust.override { + extensions = [ + "rust-src" + "rls-preview" + "clippy-preview" + "rust-analysis" + "rustfmt-preview" + ]; + }); + +in with nixpkgs; stdenv.mkDerivation { name = "nushell-rust"; - buildInputs = [ nightly openssl_1_1 pkg-config ]; + buildInputs = [ nightly openssl_1_1 pkg-config rustracer ]; + RUST_SRC_PATH = "${nightly}/lib/rustlib/src/rust/src"; } From af2ec609804d4890bd2c507941c7d273b92474dc Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Fri, 11 Oct 2019 21:13:00 +0200 Subject: [PATCH 013/184] Shell.nix cleanup. --- shell.nix | 27 +++++++++++++++++---------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/shell.nix b/shell.nix index 2b2bbca078..9e7cbe9721 100644 --- a/shell.nix +++ b/shell.nix @@ -1,25 +1,32 @@ +{ pkgs ? import { + overlays = [ + (import (builtins.fetchTarball + "https://github.com/mozilla/nixpkgs-mozilla/archive/master.tar.gz")) + ]; +} }: +with pkgs; let - moz_overlay = import (builtins.fetchTarball - "https://github.com/mozilla/nixpkgs-mozilla/archive/master.tar.gz"); - nixpkgs = import { overlays = [ moz_overlay ]; }; - - nightly = ((nixpkgs.rustChannelOf { + nightly = ((pkgs.rustChannelOf { date = "2019-09-01"; channel = "nightly"; }).rust.override { extensions = [ - "rust-src" - "rls-preview" "clippy-preview" + "rls-preview" "rust-analysis" + "rust-src" "rustfmt-preview" ]; }); -in with nixpkgs; -stdenv.mkDerivation { + nu-deps = [ openssl_1_1 pkg-config x11 python3 ]; + + rust = [ nightly rustracer cargo-watch ]; + +in stdenv.mkDerivation { name = "nushell-rust"; - buildInputs = [ nightly openssl_1_1 pkg-config rustracer ]; + buildInputs = nu-deps ++ rust; RUST_SRC_PATH = "${nightly}/lib/rustlib/src/rust/src"; + SSL_CERT_FILE = "/etc/ssl/certs/ca-certificates.crt"; } From 5ec6bac7d99e7e2e3c4aa503f17efbc9e95608cf Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Fri, 11 Oct 2019 21:39:11 +0200 Subject: [PATCH 014/184] Removes redundant parens. --- shell.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/shell.nix b/shell.nix index 9e7cbe9721..f5c61ac0a8 100644 --- a/shell.nix +++ b/shell.nix @@ -7,7 +7,7 @@ with pkgs; let - nightly = ((pkgs.rustChannelOf { + nightly = (pkgs.rustChannelOf { date = "2019-09-01"; channel = "nightly"; }).rust.override { @@ -18,7 +18,7 @@ let "rust-src" "rustfmt-preview" ]; - }); + }; nu-deps = [ openssl_1_1 pkg-config x11 python3 ]; From 439889dcefb25d92b517f0ceb050de8276759f7a Mon Sep 17 00:00:00 2001 From: Yehuda Katz Date: Fri, 11 Oct 2019 11:39:05 -0700 Subject: [PATCH 015/184] Feature flagging infrastructure This commit adds the ability to work on features behind a feature flag that won't be included in normal builds of nu. These features are not exposed as Cargo features, as they reflect incomplete features that are not yet stable. To create a feature, add it to `features.toml`: ```toml [hintsv1] description = "Adding hints based on error states in the highlighter" enabled = false ``` Each feature in `features.toml` becomes a feature flag accessible to `cfg`: ```rs println!("hintsv1 is enabled"); ``` By default, features are enabled based on the value of the `enabled` field. You can also enable a feature from the command line via the `NUSHELL_ENABLE_FLAGS` environment variable: ```sh $ NUSHELL_ENABLE_FLAGS=hintsv1 cargo run ``` You can enable all flags via `NUSHELL_ENABLE_ALL_FLAGS`. This commit also updates the CI setup to run the build with all flags off and with all flags on. It also extracts the linting test into its own parallelizable test, which means it doesn't need to run together with every other test anymore. When working on a feature, you should also add tests behind the same flag. A commit is mergable if all tests pass with and without the flag, allowing incomplete commits to land on master as long as the incomplete code builds and passes tests. --- .azure/azure-pipelines.yml | 20 ++++++++++++++ Cargo.lock | 56 +++++++++++++++++++------------------- Cargo.toml | 4 +++ build.rs | 39 ++++++++++++++++++++++++++ features.toml | 4 +++ src/main.rs | 3 ++ 6 files changed, 98 insertions(+), 28 deletions(-) create mode 100644 build.rs create mode 100644 features.toml diff --git a/.azure/azure-pipelines.yml b/.azure/azure-pipelines.yml index e1f9b93681..2ab7e05c46 100644 --- a/.azure/azure-pipelines.yml +++ b/.azure/azure-pipelines.yml @@ -5,10 +5,25 @@ strategy: matrix: linux-nightly: image: ubuntu-16.04 + style: 'unflagged' macos-nightly: image: macos-10.14 + style: 'unflagged' windows-nightly: image: vs2017-win2016 + style: 'unflagged' + linux-nightly-canary: + image: ubuntu-16.04 + style: 'canary' + macos-nightly-canary: + image: macos-10.14 + style: 'canary' + windows-nightly-canary: + image: vs2017-win2016 + style: 'canary' + fmt: + image: ubuntu-16.04 + style: 'fmt' pool: vmImage: $(image) @@ -27,6 +42,11 @@ steps: rustup component add rustfmt --toolchain `cat rust-toolchain` displayName: Install Rust - bash: RUSTFLAGS="-D warnings" cargo test --all-features + condition: eq(variables['style'], 'unflagged') + displayName: Run tests + - bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo test --all-features + condition: eq(variables['style'], 'canary') displayName: Run tests - bash: cargo fmt --all -- --check + condition: eq(variables['style'], 'fmt') displayName: Lint diff --git a/Cargo.lock b/Cargo.lock index af1d46aa0e..1457db7c51 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -138,7 +138,7 @@ dependencies = [ "num-bigint 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -148,7 +148,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -184,7 +184,7 @@ dependencies = [ "linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "md5 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -197,7 +197,7 @@ dependencies = [ "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "regex-automata 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -260,7 +260,7 @@ dependencies = [ "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -322,7 +322,7 @@ dependencies = [ "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "rust-ini 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde-hjson 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)", "toml 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)", @@ -471,7 +471,7 @@ dependencies = [ "csv-core 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "ryu 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -558,7 +558,7 @@ dependencies = [ "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", "ord_subset 3.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1105,7 +1105,7 @@ name = "indexmap" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1220,7 +1220,7 @@ dependencies = [ "itertools 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "render-tree 0.1.1 (git+https://github.com/wycats/language-reporting)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)", "termcolor 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1438,7 +1438,7 @@ dependencies = [ "bincode 1.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)", "wasm-bindgen 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1586,7 +1586,7 @@ dependencies = [ "rusqlite 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)", "rustyline 5.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde-hjson 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde_bytes 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)", "serde_ini 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1616,7 +1616,7 @@ dependencies = [ "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1808,7 +1808,7 @@ dependencies = [ "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "humantime 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "line-wrap 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "xml-rs 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1893,7 +1893,7 @@ dependencies = [ "directories 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "isatty 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", "petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde-value 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)", "tint 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2268,7 +2268,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "serde" -version = "1.0.100" +version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2304,7 +2304,7 @@ version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "ordered-float 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2312,7 +2312,7 @@ name = "serde_bytes" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2340,7 +2340,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "result 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2352,7 +2352,7 @@ dependencies = [ "indexmap 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "ryu 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2370,7 +2370,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2381,7 +2381,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2481,7 +2481,7 @@ dependencies = [ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "mime 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "mime_guess 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)", "serde_urlencoded 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2535,7 +2535,7 @@ dependencies = [ "onig 4.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "plist 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "regex-syntax 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)", "walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2641,7 +2641,7 @@ name = "toml" version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2649,7 +2649,7 @@ name = "toml" version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2758,7 +2758,7 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -3242,7 +3242,7 @@ dependencies = [ "checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" "checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" "checksum serde 0.8.23 (registry+https://github.com/rust-lang/crates.io-index)" = "9dad3f759919b92c3068c696c15c3d17238234498bbdcc80f2c469606f948ac8" -"checksum serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)" = "f4473e8506b213730ff2061073b48fa51dcc66349219e2e7c5608f0296a1d95a" +"checksum serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)" = "9796c9b7ba2ffe7a9ce53c2287dfc48080f4b2b362fcc245a259b3a7201119dd" "checksum serde-hjson 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0b833c5ad67d52ced5f5938b2980f32a9c1c5ef047f0b4fb3127e7a423c76153" "checksum serde-hjson 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6a3a4e0ea8a88553209f6cc6cfe8724ecad22e1acf372793c27d995290fe74f8" "checksum serde-value 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7a663f873dedc4eac1a559d4c6bc0d0b2c34dc5ac4702e105014b8281489e44f" diff --git a/Cargo.toml b/Cargo.toml index 80a077dd88..5a4ed6a027 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -106,6 +106,10 @@ features = ["bundled", "blob"] [dev-dependencies] pretty_assertions = "0.6.1" +[build-dependencies] +toml = "0.5.3" +serde = { version = "1.0.101", features = ["derive"] } + [lib] name = "nu" path = "src/lib.rs" diff --git a/build.rs b/build.rs new file mode 100644 index 0000000000..44a55f9573 --- /dev/null +++ b/build.rs @@ -0,0 +1,39 @@ +use serde::Deserialize; +use std::collections::HashMap; +use std::collections::HashSet; +use std::env; +use std::path::Path; + +#[derive(Deserialize)] +struct Feature { + #[allow(unused)] + description: String, + enabled: bool, +} + +fn main() -> Result<(), Box> { + let input = env::var("CARGO_MANIFEST_DIR").unwrap(); + let all_on = env::var("NUSHELL_ENABLE_ALL_FLAGS").is_ok(); + let flags: HashSet = env::var("NUSHELL_ENABLE_FLAGS") + .map(|s| s.split(",").map(|s| s.to_string()).collect()) + .unwrap_or_else(|_| HashSet::new()); + + if all_on && !flags.is_empty() { + println!( + "cargo:warning={}", + "Both NUSHELL_ENABLE_ALL_FLAGS and NUSHELL_ENABLE_FLAGS were set. You don't need both." + ); + } + + let path = Path::new(&input).join("features.toml"); + + let toml: HashMap = toml::from_str(&std::fs::read_to_string(path)?)?; + + for (key, value) in toml.iter() { + if value.enabled == true || all_on || flags.contains(key) { + println!("cargo:rustc-cfg={}", key); + } + } + + Ok(()) +} diff --git a/features.toml b/features.toml new file mode 100644 index 0000000000..290f673d26 --- /dev/null +++ b/features.toml @@ -0,0 +1,4 @@ +[hintsv1] + +description = "Adding hints based upon error states in the syntax highlighter" +enabled = false diff --git a/src/main.rs b/src/main.rs index 7f82808e74..4b10944a2b 100644 --- a/src/main.rs +++ b/src/main.rs @@ -3,6 +3,9 @@ use log::LevelFilter; use std::error::Error; fn main() -> Result<(), Box> { + #[cfg(feature1)] + println!("feature1 is enabled"); + let matches = App::new("nushell") .version(clap::crate_version!()) .arg( From 193b00764b0cf5ad0335446c55f6c8d3aeb388a9 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Sun, 13 Oct 2019 17:12:43 +1300 Subject: [PATCH 016/184] Stream support (#812) * Moves off of draining between filters. Instead, the sink will pull on the stream, and will drain element-wise. This moves the whole stream to being lazy. * Adds ctrl-c support and connects it into some of the key points where we pull on the stream. If a ctrl-c is detect, we immediately halt pulling on the stream and return to the prompt. * Moves away from having a SourceMap where anchor locations are stored. Now AnchorLocation is kept directly in the Tag. * To make this possible, split tag and span. Span is largely used in the parser and is copyable. Tag is now no longer copyable. --- Cargo.lock | 88 ------- Cargo.toml | 1 - src/cli.rs | 65 +++-- src/commands/autoview.rs | 198 ++++++++------ src/commands/classified.rs | 121 ++++----- src/commands/command.rs | 58 ++-- src/commands/config.rs | 2 +- src/commands/date.rs | 18 +- src/commands/echo.rs | 2 +- src/commands/enter.rs | 17 +- src/commands/env.rs | 16 +- src/commands/fetch.rs | 51 ++-- src/commands/from_bson.rs | 66 ++--- src/commands/from_csv.rs | 18 +- src/commands/from_ini.rs | 17 +- src/commands/from_json.rs | 32 +-- src/commands/from_sqlite.rs | 6 +- src/commands/from_toml.rs | 16 +- src/commands/from_tsv.rs | 18 +- src/commands/from_url.rs | 6 +- src/commands/from_xml.rs | 14 +- src/commands/from_yaml.rs | 18 +- src/commands/get.rs | 2 +- src/commands/help.rs | 2 +- src/commands/lines.rs | 2 +- src/commands/ls.rs | 2 +- src/commands/open.rs | 59 ++--- src/commands/pivot.rs | 2 +- src/commands/post.rs | 107 ++++---- src/commands/save.rs | 17 +- src/commands/shells.rs | 7 +- src/commands/size.rs | 2 +- src/commands/split_column.rs | 2 +- src/commands/split_row.rs | 2 +- src/commands/table.rs | 28 +- src/commands/tags.rs | 3 +- src/commands/to_bson.rs | 19 +- src/commands/to_csv.rs | 24 +- src/commands/to_json.rs | 11 +- src/commands/to_toml.rs | 13 +- src/commands/to_tsv.rs | 18 +- src/commands/to_url.rs | 8 +- src/commands/to_yaml.rs | 11 +- src/commands/version.rs | 6 +- src/commands/where_.rs | 2 +- src/commands/which_.rs | 4 +- src/context.rs | 53 +--- src/data/base.rs | 69 ++--- src/data/command.rs | 14 +- src/data/config.rs | 8 +- src/data/dict.rs | 4 +- src/data/meta.rs | 235 ++++++++++------- src/data/types.rs | 4 +- src/errors.rs | 83 +++--- src/evaluate/evaluator.rs | 39 +-- src/format/generic.rs | 4 +- src/format/table.rs | 4 +- src/lib.rs | 6 +- src/parser.rs | 4 +- src/parser/deserializer.rs | 2 +- src/parser/hir.rs | 102 ++++---- src/parser/hir/baseline_parse/tests.rs | 35 ++- src/parser/hir/binary.rs | 4 +- src/parser/hir/expand_external_tokens.rs | 28 +- src/parser/hir/external_command.rs | 2 +- src/parser/hir/named.rs | 10 +- src/parser/hir/path.rs | 7 +- src/parser/hir/syntax_shape.rs | 201 +++++++------- src/parser/hir/syntax_shape/block.rs | 62 ++--- src/parser/hir/syntax_shape/expression.rs | 47 ++-- .../hir/syntax_shape/expression/atom.rs | 247 ++++++++++-------- .../hir/syntax_shape/expression/delimited.rs | 28 +- .../hir/syntax_shape/expression/file_path.rs | 10 +- .../hir/syntax_shape/expression/list.rs | 8 +- .../hir/syntax_shape/expression/number.rs | 61 +++-- .../hir/syntax_shape/expression/pattern.rs | 14 +- .../hir/syntax_shape/expression/string.rs | 30 ++- .../hir/syntax_shape/expression/unit.rs | 44 ++-- .../syntax_shape/expression/variable_path.rs | 117 +++++---- src/parser/hir/syntax_shape/flat_shape.rs | 52 ++-- src/parser/hir/tokens_iterator.rs | 56 ++-- src/parser/parse/files.rs | 31 ++- src/parser/parse/flag.rs | 12 +- src/parser/parse/parser.rs | 118 ++++----- src/parser/parse/pipeline.rs | 8 +- src/parser/parse/token_tree.rs | 127 ++++----- src/parser/parse/token_tree_builder.rs | 171 ++++++------ src/parser/parse/tokens.rs | 68 ++--- src/parser/parse_command.rs | 35 +-- src/parser/registry.rs | 2 +- src/plugins/add.rs | 4 +- src/plugins/binaryview.rs | 3 +- src/plugins/edit.rs | 2 +- src/plugins/embed.rs | 2 +- src/plugins/inc.rs | 47 ++-- src/plugins/ps.rs | 2 +- src/plugins/str.rs | 23 +- src/plugins/sum.rs | 4 +- src/plugins/sys.rs | 31 ++- src/plugins/textview.rs | 17 +- src/prelude.rs | 18 +- src/shell/filesystem_shell.rs | 78 +++--- src/shell/help_shell.rs | 7 +- src/shell/helper.rs | 18 +- src/shell/shell.rs | 5 +- src/shell/shell_manager.rs | 74 +++--- src/shell/value_shell.rs | 19 +- src/stream.rs | 11 + tests/command_config_test.rs | 46 ++-- tests/command_open_tests.rs | 2 +- 110 files changed, 1988 insertions(+), 1892 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1457db7c51..765f42d637 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1604,7 +1604,6 @@ dependencies = [ "toml 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", "which 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1920,24 +1919,6 @@ dependencies = [ "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "rand" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "rand" version = "0.7.0" @@ -1950,15 +1931,6 @@ dependencies = [ "rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "rand_chacha" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "rand_chacha" version = "0.2.1" @@ -1989,14 +1961,6 @@ dependencies = [ "getrandom 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "rand_hc" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "rand_hc" version = "0.2.0" @@ -2005,24 +1969,6 @@ dependencies = [ "rand_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "rand_isaac" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand_jitter" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "rand_os" version = "0.1.3" @@ -2036,23 +1982,6 @@ dependencies = [ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "rand_pcg" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand_xorshift" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "raw-cpuid" version = "7.0.3" @@ -2752,15 +2681,6 @@ name = "utf8parse" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "uuid" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "vcpkg" version = "0.2.7" @@ -3201,20 +3121,13 @@ dependencies = [ "checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0" "checksum quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1" "checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe" -"checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" "checksum rand 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d47eab0e83d9693d40f825f86948aa16eff6750ead4bdffc4ab95b8b3a7f052c" -"checksum rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" "checksum rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "03a2a90da8c7523f554344f921aa97283eadf6ac484a6d2a7d0212fa7f8d6853" "checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" "checksum rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" "checksum rand_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "615e683324e75af5d43d8f7a39ffe3ee4a9dc42c5c701167a71dc59c3a493aca" -"checksum rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" "checksum rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" -"checksum rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" -"checksum rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b" "checksum rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" -"checksum rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44" -"checksum rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c" "checksum raw-cpuid 7.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b4a349ca83373cfa5d6dbb66fd76e58b2cca08da71a5f6400de0a0a6a9bceeaf" "checksum rawkey 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "33ec17a493dcb820725c002bc253f6f3ba4e4dc635e72c238540691b05e43897" "checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" @@ -3297,7 +3210,6 @@ dependencies = [ "checksum url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "75b414f6c464c879d7f9babf951f23bc3743fb7313c081b2e6ca719067ea9d61" "checksum user32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4ef4711d107b21b410a3a974b1204d9accc8b10dad75d8324b5d755de1617d47" "checksum utf8parse 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8772a4ccbb4e89959023bc5b7cb8623a795caa7092d99f3aa9501b9484d4557d" -"checksum uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)" = "90dbc611eb48397705a6b0f6e917da23ae517e4d127123d2cf7674206627d32a" "checksum vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "33dd455d0f96e90a75803cfeb7f948768c08d70a6de9a8d2362461935698bf95" "checksum vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a" "checksum version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd" diff --git a/Cargo.toml b/Cargo.toml index 5a4ed6a027..9ae1ada021 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -66,7 +66,6 @@ hex = "0.3.2" tempfile = "3.1.0" semver = "0.9.0" which = "2.0.1" -uuid = {version = "0.7.4", features = [ "v4", "serde" ]} textwrap = {version = "0.11.0", features = ["term_size"]} shellexpand = "1.0.0" futures-timer = "0.4.0" diff --git a/src/cli.rs b/src/cli.rs index 6c1ba5ef93..16dc983540 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -28,8 +28,7 @@ use std::error::Error; use std::io::{BufRead, BufReader, Write}; use std::iter::Iterator; use std::path::PathBuf; -use std::sync::atomic::{AtomicBool, Ordering}; -use std::sync::Arc; +use std::sync::atomic::Ordering; #[derive(Debug)] pub enum MaybeOwned<'a, T> { @@ -339,16 +338,15 @@ pub async fn cli() -> Result<(), Box> { // we are ok if history does not exist let _ = rl.load_history(&History::path()); - let ctrl_c = Arc::new(AtomicBool::new(false)); - let cc = ctrl_c.clone(); + let cc = context.ctrl_c.clone(); ctrlc::set_handler(move || { cc.store(true, Ordering::SeqCst); }) .expect("Error setting Ctrl-C handler"); let mut ctrlcbreak = false; loop { - if ctrl_c.load(Ordering::SeqCst) { - ctrl_c.store(false, Ordering::SeqCst); + if context.ctrl_c.load(Ordering::SeqCst) { + context.ctrl_c.store(false, Ordering::SeqCst); continue; } @@ -481,7 +479,7 @@ async fn process_line(readline: Result, ctx: &mut Context Ok(line) => { let line = chomp_newline(line); - let result = match crate::parser::parse(&line, uuid::Uuid::nil()) { + let result = match crate::parser::parse(&line) { Err(err) => { return LineResult::Error(line.to_string(), err); } @@ -549,30 +547,45 @@ async fn process_line(readline: Result, ctx: &mut Context ( Some(ClassifiedCommand::Internal(left)), Some(ClassifiedCommand::External(_)), - ) => match left - .run(ctx, input, Text::from(line), is_first_command) - .await - { + ) => match left.run(ctx, input, Text::from(line), is_first_command) { Ok(val) => ClassifiedInputStream::from_input_stream(val), Err(err) => return LineResult::Error(line.to_string(), err), }, (Some(ClassifiedCommand::Internal(left)), Some(_)) => { - match left - .run(ctx, input, Text::from(line), is_first_command) - .await - { + match left.run(ctx, input, Text::from(line), is_first_command) { Ok(val) => ClassifiedInputStream::from_input_stream(val), Err(err) => return LineResult::Error(line.to_string(), err), } } (Some(ClassifiedCommand::Internal(left)), None) => { - match left - .run(ctx, input, Text::from(line), is_first_command) - .await - { - Ok(val) => ClassifiedInputStream::from_input_stream(val), + match left.run(ctx, input, Text::from(line), is_first_command) { + Ok(val) => { + use futures::stream::TryStreamExt; + + let mut output_stream: OutputStream = val.into(); + loop { + match output_stream.try_next().await { + Ok(Some(ReturnSuccess::Value(Tagged { + item: Value::Error(e), + .. + }))) => { + return LineResult::Error(line.to_string(), e); + } + Ok(Some(_item)) => { + if ctx.ctrl_c.load(Ordering::SeqCst) { + break; + } + } + _ => { + break; + } + } + } + + return LineResult::Success(line.to_string()); + } Err(err) => return LineResult::Error(line.to_string(), err), } } @@ -620,12 +633,12 @@ fn classify_pipeline( source: &Text, ) -> Result { let mut pipeline_list = vec![pipeline.clone()]; - let mut iterator = TokensIterator::all(&mut pipeline_list, pipeline.tag()); + let mut iterator = TokensIterator::all(&mut pipeline_list, pipeline.span()); expand_syntax( &PipelineShape, &mut iterator, - &context.expand_context(source, pipeline.tag()), + &context.expand_context(source, pipeline.span()), ) } @@ -642,7 +655,13 @@ pub(crate) fn external_command( Ok(ClassifiedCommand::External(ExternalCommand { name: name.to_string(), name_tag: name.tag(), - args: arg_list_strings, + args: arg_list_strings + .iter() + .map(|x| Tagged { + tag: x.span.into(), + item: x.item.clone(), + }) + .collect(), })) } diff --git a/src/commands/autoview.rs b/src/commands/autoview.rs index 29e7d18121..4f7d7172a2 100644 --- a/src/commands/autoview.rs +++ b/src/commands/autoview.rs @@ -1,9 +1,14 @@ use crate::commands::{RawCommandArgs, WholeStreamCommand}; use crate::errors::ShellError; +use crate::parser::hir::{Expression, NamedArguments}; use crate::prelude::*; +use futures::stream::TryStreamExt; +use std::sync::atomic::Ordering; pub struct Autoview; +const STREAM_PAGE_SIZE: u64 = 50; + #[derive(Deserialize)] pub struct AutoviewArgs {} @@ -31,61 +36,132 @@ impl WholeStreamCommand for Autoview { pub fn autoview( AutoviewArgs {}: AutoviewArgs, - mut context: RunnableContext, + context: RunnableContext, raw: RawCommandArgs, ) -> Result { - Ok(OutputStream::new(async_stream! { - let input = context.input.drain_vec().await; + let binary = context.get_command("binaryview"); + let text = context.get_command("textview"); + let table = context.get_command("table"); - if input.len() > 0 { - if let Tagged { - item: Value::Primitive(Primitive::Binary(_)), - .. - } = input[0usize] - { - let binary = context.get_command("binaryview"); - if let Some(binary) = binary { - let result = binary.run(raw.with_input(input), &context.commands, false); - result.collect::>().await; - } else { - for i in input { - match i.item { - Value::Primitive(Primitive::Binary(b)) => { - use pretty_hex::*; - println!("{:?}", b.hex_dump()); + Ok(OutputStream::new(async_stream! { + let mut output_stream: OutputStream = context.input.into(); + + match output_stream.try_next().await { + Ok(Some(x)) => { + match output_stream.try_next().await { + Ok(Some(y)) => { + let ctrl_c = context.ctrl_c.clone(); + let stream = async_stream! { + yield Ok(x); + yield Ok(y); + + loop { + match output_stream.try_next().await { + Ok(Some(z)) => { + if ctrl_c.load(Ordering::SeqCst) { + break; + } + yield Ok(z); + } + _ => break, + } + } + }; + if let Some(table) = table { + let mut new_output_stream: OutputStream = stream.to_output_stream(); + let mut finished = false; + let mut current_idx = 0; + loop { + let mut new_input = VecDeque::new(); + + for _ in 0..STREAM_PAGE_SIZE { + match new_output_stream.try_next().await { + + Ok(Some(a)) => { + if let ReturnSuccess::Value(v) = a { + new_input.push_back(v); + } + } + _ => { + finished = true; + break; + } + } + } + + let raw = raw.clone(); + + let mut command_args = raw.with_input(new_input.into()); + let mut named_args = NamedArguments::new(); + named_args.insert_optional("start_number", Some(Expression::number(current_idx, Tag::unknown()))); + command_args.call_info.args.named = Some(named_args); + + let result = table.run(command_args, &context.commands, false); + result.collect::>().await; + + if finished { + break; + } else { + current_idx += STREAM_PAGE_SIZE; + } } - _ => {} } } - }; - } else if is_single_anchored_text_value(&input) { - let text = context.get_command("textview"); - if let Some(text) = text { - let result = text.run(raw.with_input(input), &context.commands, false); - result.collect::>().await; - } else { - for i in input { - match i.item { - Value::Primitive(Primitive::String(s)) => { - println!("{}", s); + _ => { + if let ReturnSuccess::Value(x) = x { + match x { + Tagged { + item: Value::Primitive(Primitive::String(ref s)), + tag: Tag { anchor, span }, + } if anchor.is_some() => { + if let Some(text) = text { + let mut stream = VecDeque::new(); + stream.push_back(Value::string(s).tagged(Tag { anchor, span })); + let result = text.run(raw.with_input(stream.into()), &context.commands, false); + result.collect::>().await; + } else { + println!("{}", s); + } + } + Tagged { + item: Value::Primitive(Primitive::String(s)), + .. + } => { + println!("{}", s); + } + + Tagged { item: Value::Primitive(Primitive::Binary(ref b)), .. } => { + if let Some(binary) = binary { + let mut stream = VecDeque::new(); + stream.push_back(x.clone()); + let result = binary.run(raw.with_input(stream.into()), &context.commands, false); + result.collect::>().await; + } else { + use pretty_hex::*; + println!("{:?}", b.hex_dump()); + } + } + + Tagged { item: Value::Error(e), .. } => { + yield Err(e); + } + Tagged { item: ref item, .. } => { + if let Some(table) = table { + let mut stream = VecDeque::new(); + stream.push_back(x.clone()); + let result = table.run(raw.with_input(stream.into()), &context.commands, false); + result.collect::>().await; + } else { + println!("{:?}", item); + } + } } - _ => {} } } } - } else if is_single_text_value(&input) { - for i in input { - match i.item { - Value::Primitive(Primitive::String(s)) => { - println!("{}", s); - } - _ => {} - } - } - } else { - let table = context.expect_command("table"); - let result = table.run(raw.with_input(input), &context.commands, false); - result.collect::>().await; + } + _ => { + //println!(""); } } @@ -95,35 +171,3 @@ pub fn autoview( } })) } - -fn is_single_text_value(input: &Vec>) -> bool { - if input.len() != 1 { - return false; - } - if let Tagged { - item: Value::Primitive(Primitive::String(_)), - .. - } = input[0] - { - true - } else { - false - } -} - -#[allow(unused)] -fn is_single_anchored_text_value(input: &Vec>) -> bool { - if input.len() != 1 { - return false; - } - - if let Tagged { - item: Value::Primitive(Primitive::String(_)), - tag: Tag { anchor, .. }, - } = input[0] - { - anchor != uuid::Uuid::nil() - } else { - false - } -} diff --git a/src/commands/classified.rs b/src/commands/classified.rs index c73a56fee4..105daff771 100644 --- a/src/commands/classified.rs +++ b/src/commands/classified.rs @@ -100,7 +100,7 @@ pub(crate) struct DynamicCommand { } impl InternalCommand { - pub(crate) async fn run( + pub(crate) fn run( self, context: &mut Context, input: ClassifiedInputStream, @@ -119,12 +119,9 @@ impl InternalCommand { let command = context.expect_command(&self.name); let result = { - let source_map = context.source_map.lock().unwrap().clone(); - context.run_command( command, self.name_tag.clone(), - source_map, self.args, &source, objects, @@ -134,69 +131,73 @@ impl InternalCommand { let result = trace_out_stream!(target: "nu::trace_stream::internal", source: &source, "output" = result); let mut result = result.values; + let mut context = context.clone(); - let mut stream = VecDeque::new(); - while let Some(item) = result.next().await { - match item? { - ReturnSuccess::Action(action) => match action { - CommandAction::ChangePath(path) => { - context.shell_manager.set_path(path); - } - CommandAction::AddAnchorLocation(uuid, anchor_location) => { - context.add_anchor_location(uuid, anchor_location); - } - CommandAction::Exit => std::process::exit(0), // TODO: save history.txt - CommandAction::EnterHelpShell(value) => { - match value { - Tagged { - item: Value::Primitive(Primitive::String(cmd)), - tag, - } => { - context.shell_manager.insert_at_current(Box::new( - HelpShell::for_command( - Value::string(cmd).tagged(tag), - &context.registry(), - )?, - )); - } - _ => { - context.shell_manager.insert_at_current(Box::new( - HelpShell::index(&context.registry())?, - )); + let stream = async_stream! { + while let Some(item) = result.next().await { + match item { + Ok(ReturnSuccess::Action(action)) => match action { + CommandAction::ChangePath(path) => { + context.shell_manager.set_path(path); + } + CommandAction::Exit => std::process::exit(0), // TODO: save history.txt + CommandAction::EnterHelpShell(value) => { + match value { + Tagged { + item: Value::Primitive(Primitive::String(cmd)), + tag, + } => { + context.shell_manager.insert_at_current(Box::new( + HelpShell::for_command( + Value::string(cmd).tagged(tag), + &context.registry(), + ).unwrap(), + )); + } + _ => { + context.shell_manager.insert_at_current(Box::new( + HelpShell::index(&context.registry()).unwrap(), + )); + } } } - } - CommandAction::EnterValueShell(value) => { - context - .shell_manager - .insert_at_current(Box::new(ValueShell::new(value))); - } - CommandAction::EnterShell(location) => { - context.shell_manager.insert_at_current(Box::new( - FilesystemShell::with_location(location, context.registry().clone())?, - )); - } - CommandAction::PreviousShell => { - context.shell_manager.prev(); - } - CommandAction::NextShell => { - context.shell_manager.next(); - } - CommandAction::LeaveShell => { - context.shell_manager.remove_at_current(); - if context.shell_manager.is_empty() { - std::process::exit(0); // TODO: save history.txt + CommandAction::EnterValueShell(value) => { + context + .shell_manager + .insert_at_current(Box::new(ValueShell::new(value))); } - } - }, + CommandAction::EnterShell(location) => { + context.shell_manager.insert_at_current(Box::new( + FilesystemShell::with_location(location, context.registry().clone()).unwrap(), + )); + } + CommandAction::PreviousShell => { + context.shell_manager.prev(); + } + CommandAction::NextShell => { + context.shell_manager.next(); + } + CommandAction::LeaveShell => { + context.shell_manager.remove_at_current(); + if context.shell_manager.is_empty() { + std::process::exit(0); // TODO: save history.txt + } + } + }, - ReturnSuccess::Value(v) => { - stream.push_back(v); + Ok(ReturnSuccess::Value(v)) => { + yield Ok(v); + } + + Err(x) => { + yield Ok(Value::Error(x).tagged_unknown()); + break; + } } } - } + }; - Ok(stream.into()) + Ok(stream.to_input_stream()) } } @@ -346,7 +347,7 @@ impl ExternalCommand { let stdout = popen.stdout.take().unwrap(); let file = futures::io::AllowStdIo::new(stdout); let stream = Framed::new(file, LinesCodec {}); - let stream = stream.map(move |line| Value::string(line.unwrap()).tagged(name_tag)); + let stream = stream.map(move |line| Value::string(line.unwrap()).tagged(&name_tag)); Ok(ClassifiedInputStream::from_input_stream( stream.boxed() as BoxStream<'static, Tagged> )) diff --git a/src/commands/command.rs b/src/commands/command.rs index 7fb08bcefa..5f3f4809bd 100644 --- a/src/commands/command.rs +++ b/src/commands/command.rs @@ -1,4 +1,3 @@ -use crate::context::{AnchorLocation, SourceMap}; use crate::data::Value; use crate::errors::ShellError; use crate::evaluate::Scope; @@ -11,13 +10,12 @@ use serde::{Deserialize, Serialize}; use std::fmt; use std::ops::Deref; use std::path::PathBuf; -use uuid::Uuid; +use std::sync::atomic::AtomicBool; #[derive(Deserialize, Serialize, Debug, Clone)] pub struct UnevaluatedCallInfo { pub args: hir::Call, pub source: Text, - pub source_map: SourceMap, pub name_tag: Tag, } @@ -37,7 +35,6 @@ impl UnevaluatedCallInfo { Ok(CallInfo { args, - source_map: self.source_map, name_tag: self.name_tag, }) } @@ -46,7 +43,6 @@ impl UnevaluatedCallInfo { #[derive(Deserialize, Serialize, Debug, Clone)] pub struct CallInfo { pub args: registry::EvaluatedArgs, - pub source_map: SourceMap, pub name_tag: Tag, } @@ -62,7 +58,7 @@ impl CallInfo { args: T::deserialize(&mut deserializer)?, context: RunnablePerItemContext { shell_manager: shell_manager.clone(), - name: self.name_tag, + name: self.name_tag.clone(), }, callback, }) @@ -73,6 +69,7 @@ impl CallInfo { #[get = "pub(crate)"] pub struct CommandArgs { pub host: Arc>, + pub ctrl_c: Arc, pub shell_manager: ShellManager, pub call_info: UnevaluatedCallInfo, pub input: InputStream, @@ -82,6 +79,7 @@ pub struct CommandArgs { #[get = "pub(crate)"] pub struct RawCommandArgs { pub host: Arc>, + pub ctrl_c: Arc, pub shell_manager: ShellManager, pub call_info: UnevaluatedCallInfo, } @@ -90,6 +88,7 @@ impl RawCommandArgs { pub fn with_input(self, input: Vec>) -> CommandArgs { CommandArgs { host: self.host, + ctrl_c: self.ctrl_c, shell_manager: self.shell_manager, call_info: self.call_info, input: input.into(), @@ -109,12 +108,14 @@ impl CommandArgs { registry: ®istry::CommandRegistry, ) -> Result { let host = self.host.clone(); + let ctrl_c = self.ctrl_c.clone(); let shell_manager = self.shell_manager.clone(); let input = self.input; let call_info = self.call_info.evaluate(registry, &Scope::empty())?; Ok(EvaluatedWholeStreamCommandArgs::new( host, + ctrl_c, shell_manager, call_info, input, @@ -127,12 +128,13 @@ impl CommandArgs { callback: fn(T, RunnableContext) -> Result, ) -> Result, ShellError> { let shell_manager = self.shell_manager.clone(); - let source_map = self.call_info.source_map.clone(); let host = self.host.clone(); + let ctrl_c = self.ctrl_c.clone(); let args = self.evaluate_once(registry)?; + let call_info = args.call_info.clone(); let (input, args) = args.split(); let name_tag = args.call_info.name_tag; - let mut deserializer = ConfigDeserializer::from_call_info(args.call_info); + let mut deserializer = ConfigDeserializer::from_call_info(call_info); Ok(RunnableArgs { args: T::deserialize(&mut deserializer)?, @@ -141,8 +143,8 @@ impl CommandArgs { commands: registry.clone(), shell_manager, name: name_tag, - source_map, host, + ctrl_c, }, callback, }) @@ -155,17 +157,20 @@ impl CommandArgs { ) -> Result, ShellError> { let raw_args = RawCommandArgs { host: self.host.clone(), + ctrl_c: self.ctrl_c.clone(), shell_manager: self.shell_manager.clone(), call_info: self.call_info.clone(), }; let shell_manager = self.shell_manager.clone(); - let source_map = self.call_info.source_map.clone(); let host = self.host.clone(); + let ctrl_c = self.ctrl_c.clone(); let args = self.evaluate_once(registry)?; + let call_info = args.call_info.clone(); + let (input, args) = args.split(); let name_tag = args.call_info.name_tag; - let mut deserializer = ConfigDeserializer::from_call_info(args.call_info); + let mut deserializer = ConfigDeserializer::from_call_info(call_info.clone()); Ok(RunnableRawArgs { args: T::deserialize(&mut deserializer)?, @@ -174,8 +179,8 @@ impl CommandArgs { commands: registry.clone(), shell_manager, name: name_tag, - source_map, host, + ctrl_c, }, raw_args, callback, @@ -198,18 +203,12 @@ pub struct RunnableContext { pub input: InputStream, pub shell_manager: ShellManager, pub host: Arc>, + pub ctrl_c: Arc, pub commands: CommandRegistry, - pub source_map: SourceMap, pub name: Tag, } impl RunnableContext { - pub fn expect_command(&self, name: &str) -> Arc { - self.commands - .get_command(name) - .expect(&format!("Expected command {}", name)) - } - pub fn get_command(&self, name: &str) -> Option> { self.commands.get_command(name) } @@ -270,6 +269,7 @@ impl Deref for EvaluatedWholeStreamCommandArgs { impl EvaluatedWholeStreamCommandArgs { pub fn new( host: Arc>, + ctrl_c: Arc, shell_manager: ShellManager, call_info: CallInfo, input: impl Into, @@ -277,6 +277,7 @@ impl EvaluatedWholeStreamCommandArgs { EvaluatedWholeStreamCommandArgs { args: EvaluatedCommandArgs { host, + ctrl_c, shell_manager, call_info, }, @@ -285,7 +286,7 @@ impl EvaluatedWholeStreamCommandArgs { } pub fn name_tag(&self) -> Tag { - self.args.call_info.name_tag + self.args.call_info.name_tag.clone() } pub fn parts(self) -> (InputStream, registry::EvaluatedArgs) { @@ -317,12 +318,14 @@ impl Deref for EvaluatedFilterCommandArgs { impl EvaluatedFilterCommandArgs { pub fn new( host: Arc>, + ctrl_c: Arc, shell_manager: ShellManager, call_info: CallInfo, ) -> EvaluatedFilterCommandArgs { EvaluatedFilterCommandArgs { args: EvaluatedCommandArgs { host, + ctrl_c, shell_manager, call_info, }, @@ -334,6 +337,7 @@ impl EvaluatedFilterCommandArgs { #[get = "pub(crate)"] pub struct EvaluatedCommandArgs { pub host: Arc>, + pub ctrl_c: Arc, pub shell_manager: ShellManager, pub call_info: CallInfo, } @@ -376,7 +380,6 @@ impl EvaluatedCommandArgs { #[derive(Debug, Serialize, Deserialize)] pub enum CommandAction { ChangePath(String), - AddAnchorLocation(Uuid, AnchorLocation), Exit, EnterShell(String), EnterValueShell(Tagged), @@ -390,9 +393,6 @@ impl ToDebug for CommandAction { fn fmt_debug(&self, f: &mut fmt::Formatter, _source: &str) -> fmt::Result { match self { CommandAction::ChangePath(s) => write!(f, "action:change-path={}", s), - CommandAction::AddAnchorLocation(u, source) => { - write!(f, "action:add-span-source={}@{:?}", u, source) - } CommandAction::Exit => write!(f, "action:exit"), CommandAction::EnterShell(s) => write!(f, "action:enter-shell={}", s), CommandAction::EnterValueShell(t) => { @@ -564,6 +564,7 @@ impl Command { ) -> OutputStream { let raw_args = RawCommandArgs { host: args.host, + ctrl_c: args.ctrl_c, shell_manager: args.shell_manager, call_info: args.call_info, }; @@ -633,6 +634,7 @@ impl WholeStreamCommand for FnFilterCommand { ) -> Result { let CommandArgs { host, + ctrl_c, shell_manager, call_info, input, @@ -650,8 +652,12 @@ impl WholeStreamCommand for FnFilterCommand { Ok(args) => args, }; - let args = - EvaluatedFilterCommandArgs::new(host.clone(), shell_manager.clone(), call_info); + let args = EvaluatedFilterCommandArgs::new( + host.clone(), + ctrl_c.clone(), + shell_manager.clone(), + call_info, + ); match func(args) { Err(err) => return OutputStream::from(vec![Err(err)]).values, diff --git a/src/commands/config.rs b/src/commands/config.rs index 337e3437f9..82fbbf1db6 100644 --- a/src/commands/config.rs +++ b/src/commands/config.rs @@ -58,7 +58,7 @@ pub fn config( }: ConfigArgs, RunnableContext { name, .. }: RunnableContext, ) -> Result { - let name_span = name; + let name_span = name.clone(); let configuration = if let Some(supplied) = load { Some(supplied.item().clone()) diff --git a/src/commands/date.rs b/src/commands/date.rs index 6df9e27209..bff6b550f7 100644 --- a/src/commands/date.rs +++ b/src/commands/date.rs @@ -39,27 +39,27 @@ where { let mut indexmap = IndexMap::new(); - indexmap.insert("year".to_string(), Value::int(dt.year()).tagged(tag)); - indexmap.insert("month".to_string(), Value::int(dt.month()).tagged(tag)); - indexmap.insert("day".to_string(), Value::int(dt.day()).tagged(tag)); - indexmap.insert("hour".to_string(), Value::int(dt.hour()).tagged(tag)); - indexmap.insert("minute".to_string(), Value::int(dt.minute()).tagged(tag)); - indexmap.insert("second".to_string(), Value::int(dt.second()).tagged(tag)); + indexmap.insert("year".to_string(), Value::int(dt.year()).tagged(&tag)); + indexmap.insert("month".to_string(), Value::int(dt.month()).tagged(&tag)); + indexmap.insert("day".to_string(), Value::int(dt.day()).tagged(&tag)); + indexmap.insert("hour".to_string(), Value::int(dt.hour()).tagged(&tag)); + indexmap.insert("minute".to_string(), Value::int(dt.minute()).tagged(&tag)); + indexmap.insert("second".to_string(), Value::int(dt.second()).tagged(&tag)); let tz = dt.offset(); indexmap.insert( "timezone".to_string(), - Value::string(format!("{}", tz)).tagged(tag), + Value::string(format!("{}", tz)).tagged(&tag), ); - Value::Row(Dictionary::from(indexmap)).tagged(tag) + Value::Row(Dictionary::from(indexmap)).tagged(&tag) } pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result { let args = args.evaluate_once(registry)?; let mut date_out = VecDeque::new(); - let tag = args.call_info.name_tag; + let tag = args.call_info.name_tag.clone(); let value = if args.has("utc") { let utc: DateTime = Utc::now(); diff --git a/src/commands/echo.rs b/src/commands/echo.rs index 5bfc12efb7..4483f91371 100644 --- a/src/commands/echo.rs +++ b/src/commands/echo.rs @@ -35,7 +35,7 @@ fn run( _registry: &CommandRegistry, _raw_args: &RawCommandArgs, ) -> Result { - let name = call_info.name_tag; + let name = call_info.name_tag.clone(); let mut output = String::new(); diff --git a/src/commands/enter.rs b/src/commands/enter.rs index 94688acd56..4a400241e8 100644 --- a/src/commands/enter.rs +++ b/src/commands/enter.rs @@ -67,7 +67,7 @@ impl PerItemCommand for Enter { let full_path = std::path::PathBuf::from(cwd); - let (file_extension, contents, contents_tag, anchor_location) = + let (file_extension, contents, contents_tag) = crate::commands::open::fetch( &full_path, &location_clone, @@ -75,18 +75,9 @@ impl PerItemCommand for Enter { ) .await.unwrap(); - if contents_tag.anchor != uuid::Uuid::nil() { - // If we have loaded something, track its source - yield ReturnSuccess::action(CommandAction::AddAnchorLocation( - contents_tag.anchor, - anchor_location, - )); - } - - match contents { Value::Primitive(Primitive::String(_)) => { - let tagged_contents = contents.tagged(contents_tag); + let tagged_contents = contents.tagged(&contents_tag); if let Some(extension) = file_extension { let command_name = format!("from-{}", extension); @@ -95,6 +86,7 @@ impl PerItemCommand for Enter { { let new_args = RawCommandArgs { host: raw_args.host, + ctrl_c: raw_args.ctrl_c, shell_manager: raw_args.shell_manager, call_info: UnevaluatedCallInfo { args: crate::parser::hir::Call { @@ -103,7 +95,6 @@ impl PerItemCommand for Enter { named: None, }, source: raw_args.call_info.source, - source_map: raw_args.call_info.source_map, name_tag: raw_args.call_info.name_tag, }, }; @@ -123,7 +114,7 @@ impl PerItemCommand for Enter { yield Ok(ReturnSuccess::Action(CommandAction::EnterValueShell( Tagged { item, - tag: contents_tag, + tag: contents_tag.clone(), }))); } x => yield x, diff --git a/src/commands/env.rs b/src/commands/env.rs index c0af785557..0572b499c1 100644 --- a/src/commands/env.rs +++ b/src/commands/env.rs @@ -37,22 +37,22 @@ pub fn get_environment(tag: Tag) -> Result, Box Result, Box Result { let args = args.evaluate_once(registry)?; let mut env_out = VecDeque::new(); - let tag = args.call_info.name_tag; + let tag = args.call_info.name_tag.clone(); let value = get_environment(tag)?; env_out.push_back(value); diff --git a/src/commands/fetch.rs b/src/commands/fetch.rs index e7966a61bf..e66536729f 100644 --- a/src/commands/fetch.rs +++ b/src/commands/fetch.rs @@ -10,7 +10,6 @@ use mime::Mime; use std::path::PathBuf; use std::str::FromStr; use surf::mime; -use uuid::Uuid; pub struct Fetch; impl PerItemCommand for Fetch { @@ -48,7 +47,7 @@ fn run( ShellError::labeled_error( "No file or directory specified", "for command", - call_info.name_tag, + &call_info.name_tag, ) })? { file => file, @@ -68,7 +67,7 @@ fn run( yield Err(e); return; } - let (file_extension, contents, contents_tag, anchor_location) = result.unwrap(); + let (file_extension, contents, contents_tag) = result.unwrap(); let file_extension = if has_raw { None @@ -78,21 +77,14 @@ fn run( file_extension.or(path_str.split('.').last().map(String::from)) }; - if contents_tag.anchor != uuid::Uuid::nil() { - // If we have loaded something, track its source - yield ReturnSuccess::action(CommandAction::AddAnchorLocation( - contents_tag.anchor, - anchor_location, - )); - } - - let tagged_contents = contents.tagged(contents_tag); + let tagged_contents = contents.tagged(&contents_tag); if let Some(extension) = file_extension { let command_name = format!("from-{}", extension); if let Some(converter) = registry.get_command(&command_name) { let new_args = RawCommandArgs { host: raw_args.host, + ctrl_c: raw_args.ctrl_c, shell_manager: raw_args.shell_manager, call_info: UnevaluatedCallInfo { args: crate::parser::hir::Call { @@ -101,7 +93,6 @@ fn run( named: None }, source: raw_args.call_info.source, - source_map: raw_args.call_info.source_map, name_tag: raw_args.call_info.name_tag, } }; @@ -115,7 +106,7 @@ fn run( } } Ok(ReturnSuccess::Value(Tagged { item, .. })) => { - yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag })); + yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() })); } x => yield x, } @@ -131,10 +122,7 @@ fn run( Ok(stream.to_output_stream()) } -pub async fn fetch( - location: &str, - span: Span, -) -> Result<(Option, Value, Tag, AnchorLocation), ShellError> { +pub async fn fetch(location: &str, span: Span) -> Result<(Option, Value, Tag), ShellError> { if let Err(_) = url::Url::parse(location) { return Err(ShellError::labeled_error( "Incomplete or incorrect url", @@ -160,9 +148,8 @@ pub async fn fetch( })?), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )), (mime::APPLICATION, mime::JSON) => Ok(( Some("json".to_string()), @@ -175,9 +162,8 @@ pub async fn fetch( })?), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )), (mime::APPLICATION, mime::OCTET_STREAM) => { let buf: Vec = r.body_bytes().await.map_err(|_| { @@ -192,9 +178,8 @@ pub async fn fetch( Value::binary(buf), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )) } (mime::IMAGE, mime::SVG) => Ok(( @@ -208,9 +193,8 @@ pub async fn fetch( })?), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )), (mime::IMAGE, image_ty) => { let buf: Vec = r.body_bytes().await.map_err(|_| { @@ -225,9 +209,8 @@ pub async fn fetch( Value::binary(buf), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )) } (mime::TEXT, mime::HTML) => Ok(( @@ -241,9 +224,8 @@ pub async fn fetch( })?), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )), (mime::TEXT, mime::PLAIN) => { let path_extension = url::Url::parse(location) @@ -268,9 +250,8 @@ pub async fn fetch( })?), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )) } (ty, sub_ty) => Ok(( @@ -278,9 +259,8 @@ pub async fn fetch( Value::string(format!("Not yet supported MIME type: {} {}", ty, sub_ty)), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )), } } @@ -289,9 +269,8 @@ pub async fn fetch( Value::string(format!("No content type found")), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )), }, Err(_) => { diff --git a/src/commands/from_bson.rs b/src/commands/from_bson.rs index 7dd00983fc..469e15f35e 100644 --- a/src/commands/from_bson.rs +++ b/src/commands/from_bson.rs @@ -33,7 +33,7 @@ fn bson_array(input: &Vec, tag: Tag) -> Result>, ShellEr let mut out = vec![]; for value in input { - out.push(convert_bson_value_to_nu_value(value, tag)?); + out.push(convert_bson_value_to_nu_value(value, &tag)?); } Ok(out) @@ -46,100 +46,100 @@ fn convert_bson_value_to_nu_value( let tag = tag.into(); Ok(match v { - Bson::FloatingPoint(n) => Value::Primitive(Primitive::from(*n)).tagged(tag), - Bson::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(tag), - Bson::Array(a) => Value::Table(bson_array(a, tag)?).tagged(tag), + Bson::FloatingPoint(n) => Value::Primitive(Primitive::from(*n)).tagged(&tag), + Bson::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(&tag), + Bson::Array(a) => Value::Table(bson_array(a, tag.clone())?).tagged(&tag), Bson::Document(doc) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); for (k, v) in doc.iter() { - collected.insert_tagged(k.clone(), convert_bson_value_to_nu_value(v, tag)?); + collected.insert_tagged(k.clone(), convert_bson_value_to_nu_value(v, &tag)?); } collected.into_tagged_value() } - Bson::Boolean(b) => Value::Primitive(Primitive::Boolean(*b)).tagged(tag), - Bson::Null => Value::Primitive(Primitive::Nothing).tagged(tag), + Bson::Boolean(b) => Value::Primitive(Primitive::Boolean(*b)).tagged(&tag), + Bson::Null => Value::Primitive(Primitive::Nothing).tagged(&tag), Bson::RegExp(r, opts) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); collected.insert_tagged( "$regex".to_string(), - Value::Primitive(Primitive::String(String::from(r))).tagged(tag), + Value::Primitive(Primitive::String(String::from(r))).tagged(&tag), ); collected.insert_tagged( "$options".to_string(), - Value::Primitive(Primitive::String(String::from(opts))).tagged(tag), + Value::Primitive(Primitive::String(String::from(opts))).tagged(&tag), ); collected.into_tagged_value() } - Bson::I32(n) => Value::number(n).tagged(tag), - Bson::I64(n) => Value::number(n).tagged(tag), + Bson::I32(n) => Value::number(n).tagged(&tag), + Bson::I64(n) => Value::number(n).tagged(&tag), Bson::Decimal128(n) => { // TODO: this really isn't great, and we should update this to do a higher // fidelity translation let decimal = BigDecimal::from_str(&format!("{}", n)).map_err(|_| { ShellError::range_error( ExpectedRange::BigDecimal, - &n.tagged(tag), + &n.tagged(&tag), format!("converting BSON Decimal128 to BigDecimal"), ) })?; - Value::Primitive(Primitive::Decimal(decimal)).tagged(tag) + Value::Primitive(Primitive::Decimal(decimal)).tagged(&tag) } Bson::JavaScriptCode(js) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); collected.insert_tagged( "$javascript".to_string(), - Value::Primitive(Primitive::String(String::from(js))).tagged(tag), + Value::Primitive(Primitive::String(String::from(js))).tagged(&tag), ); collected.into_tagged_value() } Bson::JavaScriptCodeWithScope(js, doc) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); collected.insert_tagged( "$javascript".to_string(), - Value::Primitive(Primitive::String(String::from(js))).tagged(tag), + Value::Primitive(Primitive::String(String::from(js))).tagged(&tag), ); collected.insert_tagged( "$scope".to_string(), - convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag)?, + convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag.clone())?, ); collected.into_tagged_value() } Bson::TimeStamp(ts) => { - let mut collected = TaggedDictBuilder::new(tag); - collected.insert_tagged("$timestamp".to_string(), Value::number(ts).tagged(tag)); + let mut collected = TaggedDictBuilder::new(tag.clone()); + collected.insert_tagged("$timestamp".to_string(), Value::number(ts).tagged(&tag)); collected.into_tagged_value() } Bson::Binary(bst, bytes) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); collected.insert_tagged( "$binary_subtype".to_string(), match bst { BinarySubtype::UserDefined(u) => Value::number(u), _ => Value::Primitive(Primitive::String(binary_subtype_to_string(*bst))), } - .tagged(tag), + .tagged(&tag), ); collected.insert_tagged( "$binary".to_string(), - Value::Primitive(Primitive::Binary(bytes.to_owned())).tagged(tag), + Value::Primitive(Primitive::Binary(bytes.to_owned())).tagged(&tag), ); collected.into_tagged_value() } Bson::ObjectId(obj_id) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); collected.insert_tagged( "$object_id".to_string(), - Value::Primitive(Primitive::String(obj_id.to_hex())).tagged(tag), + Value::Primitive(Primitive::String(obj_id.to_hex())).tagged(&tag), ); collected.into_tagged_value() } - Bson::UtcDatetime(dt) => Value::Primitive(Primitive::Date(*dt)).tagged(tag), + Bson::UtcDatetime(dt) => Value::Primitive(Primitive::Date(*dt)).tagged(&tag), Bson::Symbol(s) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); collected.insert_tagged( "$symbol".to_string(), - Value::Primitive(Primitive::String(String::from(s))).tagged(tag), + Value::Primitive(Primitive::String(String::from(s))).tagged(&tag), ); collected.into_tagged_value() } @@ -208,13 +208,13 @@ fn from_bson(args: CommandArgs, registry: &CommandRegistry) -> Result - match from_bson_bytes_to_value(vb, tag) { + match from_bson_bytes_to_value(vb, tag.clone()) { Ok(x) => yield ReturnSuccess::value(x), Err(_) => { yield Err(ShellError::labeled_error_with_secondary( "Could not parse as BSON", "input cannot be parsed as BSON", - tag, + tag.clone(), "value originates from here", value_tag, )) @@ -223,7 +223,7 @@ fn from_bson(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - tag, + tag.clone(), "value originates from here", value_tag, )), diff --git a/src/commands/from_csv.rs b/src/commands/from_csv.rs index ea90ab3de1..877c8dc166 100644 --- a/src/commands/from_csv.rs +++ b/src/commands/from_csv.rs @@ -62,12 +62,12 @@ pub fn from_csv_string_to_value( if let Some(row_values) = iter.next() { let row_values = row_values?; - let mut row = TaggedDictBuilder::new(tag); + let mut row = TaggedDictBuilder::new(tag.clone()); for (idx, entry) in row_values.iter().enumerate() { row.insert_tagged( fields.get(idx).unwrap(), - Value::Primitive(Primitive::String(String::from(entry))).tagged(tag), + Value::Primitive(Primitive::String(String::from(entry))).tagged(&tag), ); } @@ -77,7 +77,7 @@ pub fn from_csv_string_to_value( } } - Ok(Tagged::from_item(Value::Table(rows), tag)) + Ok(Value::Table(rows).tagged(&tag)) } fn from_csv( @@ -96,7 +96,7 @@ fn from_csv( for value in values { let value_tag = value.tag(); - latest_tag = Some(value_tag); + latest_tag = Some(value_tag.clone()); match value.item { Value::Primitive(Primitive::String(s)) => { concat_string.push_str(&s); @@ -105,15 +105,15 @@ fn from_csv( _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - name_tag, + name_tag.clone(), "value originates from here", - value_tag, + value_tag.clone(), )), } } - match from_csv_string_to_value(concat_string, skip_headers, name_tag) { + match from_csv_string_to_value(concat_string, skip_headers, name_tag.clone()) { Ok(x) => match x { Tagged { item: Value::Table(list), .. } => { for l in list { @@ -126,9 +126,9 @@ fn from_csv( yield Err(ShellError::labeled_error_with_secondary( "Could not parse as CSV", "input cannot be parsed as CSV", - name_tag, + name_tag.clone(), "value originates from here", - last_tag, + last_tag.clone(), )) } , } diff --git a/src/commands/from_ini.rs b/src/commands/from_ini.rs index d53ad67773..e55bbd45c4 100644 --- a/src/commands/from_ini.rs +++ b/src/commands/from_ini.rs @@ -45,10 +45,13 @@ fn convert_ini_top_to_nu_value( tag: impl Into, ) -> Tagged { let tag = tag.into(); - let mut top_level = TaggedDictBuilder::new(tag); + let mut top_level = TaggedDictBuilder::new(tag.clone()); for (key, value) in v.iter() { - top_level.insert_tagged(key.clone(), convert_ini_second_to_nu_value(value, tag)); + top_level.insert_tagged( + key.clone(), + convert_ini_second_to_nu_value(value, tag.clone()), + ); } top_level.into_tagged_value() @@ -75,7 +78,7 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result { concat_string.push_str(&s); @@ -84,15 +87,15 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - tag, + &tag, "value originates from here", - value_tag, + &value_tag, )), } } - match from_ini_string_to_value(concat_string, tag) { + match from_ini_string_to_value(concat_string, tag.clone()) { Ok(x) => match x { Tagged { item: Value::Table(list), .. } => { for l in list { @@ -105,7 +108,7 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result) - let tag = tag.into(); match v { - serde_hjson::Value::Null => Value::Primitive(Primitive::Nothing).tagged(tag), - serde_hjson::Value::Bool(b) => Value::boolean(*b).tagged(tag), - serde_hjson::Value::F64(n) => Value::number(n).tagged(tag), - serde_hjson::Value::U64(n) => Value::number(n).tagged(tag), - serde_hjson::Value::I64(n) => Value::number(n).tagged(tag), + serde_hjson::Value::Null => Value::Primitive(Primitive::Nothing).tagged(&tag), + serde_hjson::Value::Bool(b) => Value::boolean(*b).tagged(&tag), + serde_hjson::Value::F64(n) => Value::number(n).tagged(&tag), + serde_hjson::Value::U64(n) => Value::number(n).tagged(&tag), + serde_hjson::Value::I64(n) => Value::number(n).tagged(&tag), serde_hjson::Value::String(s) => { - Value::Primitive(Primitive::String(String::from(s))).tagged(tag) + Value::Primitive(Primitive::String(String::from(s))).tagged(&tag) } serde_hjson::Value::Array(a) => Value::Table( a.iter() - .map(|x| convert_json_value_to_nu_value(x, tag)) + .map(|x| convert_json_value_to_nu_value(x, &tag)) .collect(), ) .tagged(tag), serde_hjson::Value::Object(o) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(&tag); for (k, v) in o.iter() { - collected.insert_tagged(k.clone(), convert_json_value_to_nu_value(v, tag)); + collected.insert_tagged(k.clone(), convert_json_value_to_nu_value(v, &tag)); } collected.into_tagged_value() @@ -82,7 +82,7 @@ fn from_json( for value in values { let value_tag = value.tag(); - latest_tag = Some(value_tag); + latest_tag = Some(value_tag.clone()); match value.item { Value::Primitive(Primitive::String(s)) => { concat_string.push_str(&s); @@ -91,9 +91,9 @@ fn from_json( _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - name_tag, + &name_tag, "value originates from here", - value_tag, + &value_tag, )), } @@ -106,15 +106,15 @@ fn from_json( continue; } - match from_json_string_to_value(json_str.to_string(), name_tag) { + match from_json_string_to_value(json_str.to_string(), &name_tag) { Ok(x) => yield ReturnSuccess::value(x), Err(_) => { - if let Some(last_tag) = latest_tag { + if let Some(ref last_tag) = latest_tag { yield Err(ShellError::labeled_error_with_secondary( "Could nnot parse as JSON", "input cannot be parsed as JSON", - name_tag, + &name_tag, "value originates from here", last_tag)) } @@ -122,7 +122,7 @@ fn from_json( } } } else { - match from_json_string_to_value(concat_string, name_tag) { + match from_json_string_to_value(concat_string, name_tag.clone()) { Ok(x) => match x { Tagged { item: Value::Table(list), .. } => { diff --git a/src/commands/from_sqlite.rs b/src/commands/from_sqlite.rs index 20d087bd5c..7b93dc1633 100644 --- a/src/commands/from_sqlite.rs +++ b/src/commands/from_sqlite.rs @@ -138,7 +138,7 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result - match from_sqlite_bytes_to_value(vb, tag) { + match from_sqlite_bytes_to_value(vb, tag.clone()) { Ok(x) => match x { Tagged { item: Value::Table(list), .. } => { for l in list { @@ -151,7 +151,7 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result Result yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - tag, + &tag, "value originates from here", value_tag, )), diff --git a/src/commands/from_toml.rs b/src/commands/from_toml.rs index c0098d9267..2cfd059165 100644 --- a/src/commands/from_toml.rs +++ b/src/commands/from_toml.rs @@ -36,7 +36,7 @@ pub fn convert_toml_value_to_nu_value(v: &toml::Value, tag: impl Into) -> T toml::Value::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(tag), toml::Value::Array(a) => Value::Table( a.iter() - .map(|x| convert_toml_value_to_nu_value(x, tag)) + .map(|x| convert_toml_value_to_nu_value(x, &tag)) .collect(), ) .tagged(tag), @@ -44,10 +44,10 @@ pub fn convert_toml_value_to_nu_value(v: &toml::Value, tag: impl Into) -> T Value::Primitive(Primitive::String(dt.to_string())).tagged(tag) } toml::Value::Table(t) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(&tag); for (k, v) in t.iter() { - collected.insert_tagged(k.clone(), convert_toml_value_to_nu_value(v, tag)); + collected.insert_tagged(k.clone(), convert_toml_value_to_nu_value(v, &tag)); } collected.into_tagged_value() @@ -79,7 +79,7 @@ pub fn from_toml( for value in values { let value_tag = value.tag(); - latest_tag = Some(value_tag); + latest_tag = Some(value_tag.clone()); match value.item { Value::Primitive(Primitive::String(s)) => { concat_string.push_str(&s); @@ -88,15 +88,15 @@ pub fn from_toml( _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - tag, + &tag, "value originates from here", - value_tag, + &value_tag, )), } } - match from_toml_string_to_value(concat_string, tag) { + match from_toml_string_to_value(concat_string, tag.clone()) { Ok(x) => match x { Tagged { item: Value::Table(list), .. } => { for l in list { @@ -109,7 +109,7 @@ pub fn from_toml( yield Err(ShellError::labeled_error_with_secondary( "Could not parse as TOML", "input cannot be parsed as TOML", - tag, + &tag, "value originates from here", last_tag, )) diff --git a/src/commands/from_tsv.rs b/src/commands/from_tsv.rs index bba532d17b..80951b71aa 100644 --- a/src/commands/from_tsv.rs +++ b/src/commands/from_tsv.rs @@ -63,12 +63,12 @@ pub fn from_tsv_string_to_value( if let Some(row_values) = iter.next() { let row_values = row_values?; - let mut row = TaggedDictBuilder::new(tag); + let mut row = TaggedDictBuilder::new(&tag); for (idx, entry) in row_values.iter().enumerate() { row.insert_tagged( fields.get(idx).unwrap(), - Value::Primitive(Primitive::String(String::from(entry))).tagged(tag), + Value::Primitive(Primitive::String(String::from(entry))).tagged(&tag), ); } @@ -78,7 +78,7 @@ pub fn from_tsv_string_to_value( } } - Ok(Tagged::from_item(Value::Table(rows), tag)) + Ok(Value::Table(rows).tagged(&tag)) } fn from_tsv( @@ -97,7 +97,7 @@ fn from_tsv( for value in values { let value_tag = value.tag(); - latest_tag = Some(value_tag); + latest_tag = Some(value_tag.clone()); match value.item { Value::Primitive(Primitive::String(s)) => { concat_string.push_str(&s); @@ -106,15 +106,15 @@ fn from_tsv( _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - name_tag, + &name_tag, "value originates from here", - value_tag, + &value_tag, )), } } - match from_tsv_string_to_value(concat_string, skip_headers, name_tag) { + match from_tsv_string_to_value(concat_string, skip_headers, name_tag.clone()) { Ok(x) => match x { Tagged { item: Value::Table(list), .. } => { for l in list { @@ -127,9 +127,9 @@ fn from_tsv( yield Err(ShellError::labeled_error_with_secondary( "Could not parse as TSV", "input cannot be parsed as TSV", - name_tag, + &name_tag, "value originates from here", - last_tag, + &last_tag, )) } , } diff --git a/src/commands/from_url.rs b/src/commands/from_url.rs index 662508deb6..ad23ea5b53 100644 --- a/src/commands/from_url.rs +++ b/src/commands/from_url.rs @@ -39,7 +39,7 @@ fn from_url(args: CommandArgs, registry: &CommandRegistry) -> Result { concat_string.push_str(&s); @@ -47,9 +47,9 @@ fn from_url(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - tag, + &tag, "value originates from here", - value_tag, + &value_tag, )), } diff --git a/src/commands/from_xml.rs b/src/commands/from_xml.rs index 5bba67b42a..0425eb408b 100644 --- a/src/commands/from_xml.rs +++ b/src/commands/from_xml.rs @@ -34,7 +34,7 @@ fn from_node_to_value<'a, 'd>(n: &roxmltree::Node<'a, 'd>, tag: impl Into) let mut children_values = vec![]; for c in n.children() { - children_values.push(from_node_to_value(&c, tag)); + children_values.push(from_node_to_value(&c, &tag)); } let children_values: Vec> = children_values @@ -94,7 +94,7 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result { concat_string.push_str(&s); @@ -103,15 +103,15 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - tag, + &tag, "value originates from here", - value_tag, + &value_tag, )), } } - match from_xml_string_to_value(concat_string, tag) { + match from_xml_string_to_value(concat_string, tag.clone()) { Ok(x) => match x { Tagged { item: Value::Table(list), .. } => { for l in list { @@ -124,9 +124,9 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result) -> serde_yaml::Value::String(s) => Value::string(s).tagged(tag), serde_yaml::Value::Sequence(a) => Value::Table( a.iter() - .map(|x| convert_yaml_value_to_nu_value(x, tag)) + .map(|x| convert_yaml_value_to_nu_value(x, &tag)) .collect(), ) .tagged(tag), serde_yaml::Value::Mapping(t) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(&tag); for (k, v) in t.iter() { match k { serde_yaml::Value::String(k) => { - collected.insert_tagged(k.clone(), convert_yaml_value_to_nu_value(v, tag)); + collected.insert_tagged(k.clone(), convert_yaml_value_to_nu_value(v, &tag)); } _ => unimplemented!("Unknown key type"), } @@ -108,7 +108,7 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result { concat_string.push_str(&s); @@ -117,15 +117,15 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - tag, + &tag, "value originates from here", - value_tag, + &value_tag, )), } } - match from_yaml_string_to_value(concat_string, tag) { + match from_yaml_string_to_value(concat_string, tag.clone()) { Ok(x) => match x { Tagged { item: Value::Table(list), .. } => { for l in list { @@ -138,9 +138,9 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result Ok(obj.clone()), - _ => Ok(Value::nothing().tagged(obj.tag)), + _ => Ok(Value::nothing().tagged(&obj.tag)), }, } } diff --git a/src/commands/help.rs b/src/commands/help.rs index d780f13459..04e03fb10d 100644 --- a/src/commands/help.rs +++ b/src/commands/help.rs @@ -26,7 +26,7 @@ impl PerItemCommand for Help { _raw_args: &RawCommandArgs, _input: Tagged, ) -> Result { - let tag = call_info.name_tag; + let tag = &call_info.name_tag; match call_info.args.nth(0) { Some(Tagged { diff --git a/src/commands/lines.rs b/src/commands/lines.rs index d2a9cdffd1..8375098b70 100644 --- a/src/commands/lines.rs +++ b/src/commands/lines.rs @@ -58,7 +58,7 @@ fn lines(args: CommandArgs, registry: &CommandRegistry) -> Result Result { - context.shell_manager.ls(path, context.name) + context.shell_manager.ls(path, &context) } diff --git a/src/commands/open.rs b/src/commands/open.rs index 6ea752e9da..2972144bcd 100644 --- a/src/commands/open.rs +++ b/src/commands/open.rs @@ -7,7 +7,6 @@ use crate::parser::hir::SyntaxShape; use crate::parser::registry::Signature; use crate::prelude::*; use std::path::{Path, PathBuf}; -use uuid::Uuid; pub struct Open; impl PerItemCommand for Open { @@ -49,7 +48,7 @@ fn run( ShellError::labeled_error( "No file or directory specified", "for command", - call_info.name_tag, + &call_info.name_tag, ) })? { file => file, @@ -69,7 +68,7 @@ fn run( yield Err(e); return; } - let (file_extension, contents, contents_tag, anchor_location) = result.unwrap(); + let (file_extension, contents, contents_tag) = result.unwrap(); let file_extension = if has_raw { None @@ -79,21 +78,14 @@ fn run( file_extension.or(path_str.split('.').last().map(String::from)) }; - if contents_tag.anchor != uuid::Uuid::nil() { - // If we have loaded something, track its source - yield ReturnSuccess::action(CommandAction::AddAnchorLocation( - contents_tag.anchor, - anchor_location, - )); - } - - let tagged_contents = contents.tagged(contents_tag); + let tagged_contents = contents.tagged(&contents_tag); if let Some(extension) = file_extension { let command_name = format!("from-{}", extension); if let Some(converter) = registry.get_command(&command_name) { let new_args = RawCommandArgs { host: raw_args.host, + ctrl_c: raw_args.ctrl_c, shell_manager: raw_args.shell_manager, call_info: UnevaluatedCallInfo { args: crate::parser::hir::Call { @@ -102,7 +94,6 @@ fn run( named: None }, source: raw_args.call_info.source, - source_map: raw_args.call_info.source_map, name_tag: raw_args.call_info.name_tag, } }; @@ -116,7 +107,7 @@ fn run( } } Ok(ReturnSuccess::Value(Tagged { item, .. })) => { - yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag })); + yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() })); } x => yield x, } @@ -136,7 +127,7 @@ pub async fn fetch( cwd: &PathBuf, location: &str, span: Span, -) -> Result<(Option, Value, Tag, AnchorLocation), ShellError> { +) -> Result<(Option, Value, Tag), ShellError> { let mut cwd = cwd.clone(); cwd.push(Path::new(location)); @@ -149,9 +140,8 @@ pub async fn fetch( Value::string(s), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::File(cwd.to_string_lossy().to_string())), }, - AnchorLocation::File(cwd.to_string_lossy().to_string()), )), Err(_) => { //Non utf8 data. @@ -168,18 +158,20 @@ pub async fn fetch( Value::string(s), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), }, - AnchorLocation::File(cwd.to_string_lossy().to_string()), )), Err(_) => Ok(( None, Value::binary(bytes), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), }, - AnchorLocation::File(cwd.to_string_lossy().to_string()), )), } } else { @@ -188,9 +180,10 @@ pub async fn fetch( Value::binary(bytes), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), }, - AnchorLocation::File(cwd.to_string_lossy().to_string()), )) } } @@ -206,18 +199,20 @@ pub async fn fetch( Value::string(s), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), }, - AnchorLocation::File(cwd.to_string_lossy().to_string()), )), Err(_) => Ok(( None, Value::binary(bytes), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), }, - AnchorLocation::File(cwd.to_string_lossy().to_string()), )), } } else { @@ -226,9 +221,10 @@ pub async fn fetch( Value::binary(bytes), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), }, - AnchorLocation::File(cwd.to_string_lossy().to_string()), )) } } @@ -237,9 +233,10 @@ pub async fn fetch( Value::binary(bytes), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), }, - AnchorLocation::File(cwd.to_string_lossy().to_string()), )), } } diff --git a/src/commands/pivot.rs b/src/commands/pivot.rs index 1a6bb901fb..e52ab90924 100644 --- a/src/commands/pivot.rs +++ b/src/commands/pivot.rs @@ -104,7 +104,7 @@ pub fn pivot(args: PivotArgs, context: RunnableContext) -> Result Result { + let name_tag = call_info.name_tag.clone(); let call_info = call_info.clone(); - let path = match call_info.args.nth(0).ok_or_else(|| { - ShellError::labeled_error("No url specified", "for command", call_info.name_tag) - })? { - file => file.clone(), - }; - let body = match call_info.args.nth(1).ok_or_else(|| { - ShellError::labeled_error("No body specified", "for command", call_info.name_tag) - })? { - file => file.clone(), - }; + let path = + match call_info.args.nth(0).ok_or_else(|| { + ShellError::labeled_error("No url specified", "for command", &name_tag) + })? { + file => file.clone(), + }; + let body = + match call_info.args.nth(1).ok_or_else(|| { + ShellError::labeled_error("No body specified", "for command", &name_tag) + })? { + file => file.clone(), + }; let path_str = path.as_string()?; let path_span = path.tag(); let has_raw = call_info.args.has("raw"); @@ -79,7 +82,7 @@ fn run( let headers = get_headers(&call_info)?; let stream = async_stream! { - let (file_extension, contents, contents_tag, anchor_location) = + let (file_extension, contents, contents_tag) = post(&path_str, &body, user, password, &headers, path_span, ®istry, &raw_args).await.unwrap(); let file_extension = if has_raw { @@ -90,21 +93,14 @@ fn run( file_extension.or(path_str.split('.').last().map(String::from)) }; - if contents_tag.anchor != uuid::Uuid::nil() { - // If we have loaded something, track its source - yield ReturnSuccess::action(CommandAction::AddAnchorLocation( - contents_tag.anchor, - anchor_location, - )); - } - - let tagged_contents = contents.tagged(contents_tag); + let tagged_contents = contents.tagged(&contents_tag); if let Some(extension) = file_extension { let command_name = format!("from-{}", extension); if let Some(converter) = registry.get_command(&command_name) { let new_args = RawCommandArgs { host: raw_args.host, + ctrl_c: raw_args.ctrl_c, shell_manager: raw_args.shell_manager, call_info: UnevaluatedCallInfo { args: crate::parser::hir::Call { @@ -113,7 +109,6 @@ fn run( named: None }, source: raw_args.call_info.source, - source_map: raw_args.call_info.source_map, name_tag: raw_args.call_info.name_tag, } }; @@ -127,7 +122,7 @@ fn run( } } Ok(ReturnSuccess::Value(Tagged { item, .. })) => { - yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag })); + yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() })); } x => yield x, } @@ -207,7 +202,7 @@ pub async fn post( tag: Tag, registry: &CommandRegistry, raw_args: &RawCommandArgs, -) -> Result<(Option, Value, Tag, AnchorLocation), ShellError> { +) -> Result<(Option, Value, Tag), ShellError> { let registry = registry.clone(); let raw_args = raw_args.clone(); if location.starts_with("http:") || location.starts_with("https:") { @@ -248,6 +243,7 @@ pub async fn post( if let Some(converter) = registry.get_command("to-json") { let new_args = RawCommandArgs { host: raw_args.host, + ctrl_c: raw_args.ctrl_c, shell_manager: raw_args.shell_manager, call_info: UnevaluatedCallInfo { args: crate::parser::hir::Call { @@ -256,7 +252,6 @@ pub async fn post( named: None, }, source: raw_args.call_info.source, - source_map: raw_args.call_info.source_map, name_tag: raw_args.call_info.name_tag, }, }; @@ -280,7 +275,7 @@ pub async fn post( return Err(ShellError::labeled_error( "Save could not successfully save", "unexpected data during save", - *tag, + tag, )); } } @@ -296,7 +291,7 @@ pub async fn post( return Err(ShellError::labeled_error( "Could not automatically convert table", "needs manual conversion", - *tag, + tag, )); } } @@ -312,11 +307,13 @@ pub async fn post( ShellError::labeled_error( "Could not load text from remote url", "could not load", - tag, + &tag, ) })?), - tag, - AnchorLocation::Url(location.to_string()), + Tag { + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, + }, )), (mime::APPLICATION, mime::JSON) => Ok(( Some("json".to_string()), @@ -324,25 +321,29 @@ pub async fn post( ShellError::labeled_error( "Could not load text from remote url", "could not load", - tag, + &tag, ) })?), - tag, - AnchorLocation::Url(location.to_string()), + Tag { + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, + }, )), (mime::APPLICATION, mime::OCTET_STREAM) => { let buf: Vec = r.body_bytes().await.map_err(|_| { ShellError::labeled_error( "Could not load binary file", "could not load", - tag, + &tag, ) })?; Ok(( None, Value::binary(buf), - tag, - AnchorLocation::Url(location.to_string()), + Tag { + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, + }, )) } (mime::IMAGE, image_ty) => { @@ -350,14 +351,16 @@ pub async fn post( ShellError::labeled_error( "Could not load image file", "could not load", - tag, + &tag, ) })?; Ok(( Some(image_ty.to_string()), Value::binary(buf), - tag, - AnchorLocation::Url(location.to_string()), + Tag { + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, + }, )) } (mime::TEXT, mime::HTML) => Ok(( @@ -366,11 +369,13 @@ pub async fn post( ShellError::labeled_error( "Could not load text from remote url", "could not load", - tag, + &tag, ) })?), - tag, - AnchorLocation::Url(location.to_string()), + Tag { + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, + }, )), (mime::TEXT, mime::PLAIN) => { let path_extension = url::Url::parse(location) @@ -390,11 +395,13 @@ pub async fn post( ShellError::labeled_error( "Could not load text from remote url", "could not load", - tag, + &tag, ) })?), - tag, - AnchorLocation::Url(location.to_string()), + Tag { + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, + }, )) } (ty, sub_ty) => Ok(( @@ -403,16 +410,20 @@ pub async fn post( "Not yet supported MIME type: {} {}", ty, sub_ty )), - tag, - AnchorLocation::Url(location.to_string()), + Tag { + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, + }, )), } } None => Ok(( None, Value::string(format!("No content type found")), - tag, - AnchorLocation::Url(location.to_string()), + Tag { + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, + }, )), }, Err(_) => { diff --git a/src/commands/save.rs b/src/commands/save.rs index 0156fc3557..ac48fe280f 100644 --- a/src/commands/save.rs +++ b/src/commands/save.rs @@ -119,33 +119,32 @@ fn save( input, name, shell_manager, - source_map, host, + ctrl_c, commands: registry, .. }: RunnableContext, raw_args: RawCommandArgs, ) -> Result { let mut full_path = PathBuf::from(shell_manager.path()); - let name_tag = name; + let name_tag = name.clone(); - let source_map = source_map.clone(); let stream = async_stream! { let input: Vec> = input.values.collect().await; if path.is_none() { // If there is no filename, check the metadata for the anchor filename if input.len() > 0 { let anchor = input[0].anchor(); - match source_map.get(&anchor) { + match anchor { Some(path) => match path { AnchorLocation::File(file) => { - full_path.push(Path::new(file)); + full_path.push(Path::new(&file)); } _ => { yield Err(ShellError::labeled_error( "Save requires a filepath (1)", "needs path", - name_tag, + name_tag.clone(), )); } }, @@ -153,7 +152,7 @@ fn save( yield Err(ShellError::labeled_error( "Save requires a filepath (2)", "needs path", - name_tag, + name_tag.clone(), )); } } @@ -161,7 +160,7 @@ fn save( yield Err(ShellError::labeled_error( "Save requires a filepath (3)", "needs path", - name_tag, + name_tag.clone(), )); } } else { @@ -179,6 +178,7 @@ fn save( if let Some(converter) = registry.get_command(&command_name) { let new_args = RawCommandArgs { host, + ctrl_c, shell_manager, call_info: UnevaluatedCallInfo { args: crate::parser::hir::Call { @@ -187,7 +187,6 @@ fn save( named: None }, source: raw_args.call_info.source, - source_map: raw_args.call_info.source_map, name_tag: raw_args.call_info.name_tag, } }; diff --git a/src/commands/shells.rs b/src/commands/shells.rs index 2aee2c8564..6058a42032 100644 --- a/src/commands/shells.rs +++ b/src/commands/shells.rs @@ -2,6 +2,7 @@ use crate::commands::WholeStreamCommand; use crate::data::TaggedDictBuilder; use crate::errors::ShellError; use crate::prelude::*; +use std::sync::atomic::Ordering; pub struct Shells; @@ -32,14 +33,14 @@ fn shells(args: CommandArgs, _registry: &CommandRegistry) -> Result Result Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - tag, + &tag, "value originates from here", v.tag(), )), diff --git a/src/commands/split_column.rs b/src/commands/split_column.rs index 00e2609f26..d174283023 100644 --- a/src/commands/split_column.rs +++ b/src/commands/split_column.rs @@ -94,7 +94,7 @@ fn split_column( _ => Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - name, + &name, "value originates from here", v.tag(), )), diff --git a/src/commands/split_row.rs b/src/commands/split_row.rs index e70e5cfa84..94f7564b40 100644 --- a/src/commands/split_row.rs +++ b/src/commands/split_row.rs @@ -60,7 +60,7 @@ fn split_row( result.push_back(Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - name, + &name, "value originates from here", v.tag(), ))); diff --git a/src/commands/table.rs b/src/commands/table.rs index e9fbe35f2e..8ad2c246db 100644 --- a/src/commands/table.rs +++ b/src/commands/table.rs @@ -5,16 +5,13 @@ use crate::prelude::*; pub struct Table; -#[derive(Deserialize)] -pub struct TableArgs {} - impl WholeStreamCommand for Table { fn name(&self) -> &str { "table" } fn signature(&self) -> Signature { - Signature::build("table") + Signature::build("table").named("start_number", SyntaxShape::Number) } fn usage(&self) -> &str { @@ -26,16 +23,29 @@ impl WholeStreamCommand for Table { args: CommandArgs, registry: &CommandRegistry, ) -> Result { - args.process(registry, table)?.run() + table(args, registry) } } -pub fn table(_args: TableArgs, context: RunnableContext) -> Result { +fn table(args: CommandArgs, registry: &CommandRegistry) -> Result { + let args = args.evaluate_once(registry)?; + let stream = async_stream! { - let input: Vec> = context.input.into_vec().await; + let host = args.host.clone(); + let start_number = match args.get("start_number") { + Some(Tagged { item: Value::Primitive(Primitive::Int(i)), .. }) => { + i.to_usize().unwrap() + } + _ => { + 0 + } + }; + + let input: Vec> = args.input.into_vec().await; if input.len() > 0 { - let mut host = context.host.lock().unwrap(); - let view = TableView::from_list(&input); + let mut host = host.lock().unwrap(); + let view = TableView::from_list(&input, start_number); + if let Some(view) = view { handle_unexpected(&mut *host, |host| crate::format::print_view(&view, host)); } diff --git a/src/commands/tags.rs b/src/commands/tags.rs index 2b710d1b61..221e8cc303 100644 --- a/src/commands/tags.rs +++ b/src/commands/tags.rs @@ -28,7 +28,6 @@ impl WholeStreamCommand for Tags { } fn tags(args: CommandArgs, _registry: &CommandRegistry) -> Result { - let source_map = args.call_info.source_map.clone(); Ok(args .input .values @@ -42,7 +41,7 @@ fn tags(args: CommandArgs, _registry: &CommandRegistry) -> Result { tags.insert("anchor", Value::string(source)); } diff --git a/src/commands/to_bson.rs b/src/commands/to_bson.rs index a36d99c077..eabf8381ec 100644 --- a/src/commands/to_bson.rs +++ b/src/commands/to_bson.rs @@ -46,7 +46,7 @@ pub fn value_to_bson_value(v: &Tagged) -> Result { Value::Primitive(Primitive::BeginningOfStream) => Bson::Null, Value::Primitive(Primitive::Decimal(d)) => Bson::FloatingPoint(d.to_f64().unwrap()), Value::Primitive(Primitive::Int(i)) => { - Bson::I64(i.tagged(v.tag).coerce_into("converting to BSON")?) + Bson::I64(i.tagged(&v.tag).coerce_into("converting to BSON")?) } Value::Primitive(Primitive::Nothing) => Bson::Null, Value::Primitive(Primitive::String(s)) => Bson::String(s.clone()), @@ -58,6 +58,7 @@ pub fn value_to_bson_value(v: &Tagged) -> Result { .collect::>()?, ), Value::Block(_) => Bson::Null, + Value::Error(e) => return Err(e.clone()), Value::Primitive(Primitive::Binary(b)) => Bson::Binary(BinarySubtype::Generic, b.clone()), Value::Row(o) => object_value_to_bson(o)?, }) @@ -170,7 +171,7 @@ fn get_binary_subtype<'a>(tagged_value: &'a Tagged) -> Result unreachable!(), }), Value::Primitive(Primitive::Int(i)) => Ok(BinarySubtype::UserDefined( - i.tagged(tagged_value.tag) + i.tagged(&tagged_value.tag) .coerce_into("converting to BSON binary subtype")?, )), _ => Err(ShellError::type_error( @@ -207,12 +208,12 @@ fn bson_value_to_bytes(bson: Bson, tag: Tag) -> Result, ShellError> { Bson::Array(a) => { for v in a.into_iter() { match v { - Bson::Document(d) => shell_encode_document(&mut out, d, tag)?, + Bson::Document(d) => shell_encode_document(&mut out, d, tag.clone())?, _ => { return Err(ShellError::labeled_error( format!("All top level values must be Documents, got {:?}", v), "requires BSON-compatible document", - tag, + &tag, )) } } @@ -237,7 +238,7 @@ fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result> = args.input.values.collect().await; let to_process_input = if input.len() > 1 { - let tag = input[0].tag; + let tag = input[0].tag.clone(); vec![Tagged { item: Value::Table(input), tag } ] } else if input.len() == 1 { input @@ -248,14 +249,14 @@ fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result { - match bson_value_to_bytes(bson_value, name_tag) { + match bson_value_to_bytes(bson_value, name_tag.clone()) { Ok(x) => yield ReturnSuccess::value( - Value::binary(x).tagged(name_tag), + Value::binary(x).tagged(&name_tag), ), _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a table with BSON-compatible structure.tag() from pipeline", "requires BSON-compatible input", - name_tag, + &name_tag, "originates from here".to_string(), value.tag(), )), @@ -264,7 +265,7 @@ fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error( "Expected a table with BSON-compatible structure from pipeline", "requires BSON-compatible input", - name_tag)) + &name_tag)) } } }; diff --git a/src/commands/to_csv.rs b/src/commands/to_csv.rs index 66121df53e..90f4837453 100644 --- a/src/commands/to_csv.rs +++ b/src/commands/to_csv.rs @@ -47,7 +47,7 @@ pub fn value_to_csv_value(v: &Tagged) -> Tagged { Value::Block(_) => Value::Primitive(Primitive::Nothing), _ => Value::Primitive(Primitive::Nothing), } - .tagged(v.tag) + .tagged(v.tag.clone()) } fn to_string_helper(v: &Tagged) -> Result { @@ -61,7 +61,13 @@ fn to_string_helper(v: &Tagged) -> Result { Value::Table(_) => return Ok(String::from("[Table]")), Value::Row(_) => return Ok(String::from("[Row]")), Value::Primitive(Primitive::String(s)) => return Ok(s.to_string()), - _ => return Err(ShellError::labeled_error("Unexpected value", "", v.tag)), + _ => { + return Err(ShellError::labeled_error( + "Unexpected value", + "", + v.tag.clone(), + )) + } } } @@ -99,14 +105,14 @@ pub fn to_string(tagged_value: &Tagged) -> Result { ShellError::labeled_error( "Could not convert record", "original value", - tagged_value.tag, + &tagged_value.tag, ) })?) .map_err(|_| { ShellError::labeled_error( "Could not convert record", "original value", - tagged_value.tag, + &tagged_value.tag, ) })?); } @@ -136,14 +142,14 @@ pub fn to_string(tagged_value: &Tagged) -> Result { ShellError::labeled_error( "Could not convert record", "original value", - tagged_value.tag, + &tagged_value.tag, ) })?) .map_err(|_| { ShellError::labeled_error( "Could not convert record", "original value", - tagged_value.tag, + &tagged_value.tag, ) })?); } @@ -160,7 +166,7 @@ fn to_csv( let input: Vec> = input.values.collect().await; let to_process_input = if input.len() > 1 { - let tag = input[0].tag; + let tag = input[0].tag.clone(); vec![Tagged { item: Value::Table(input), tag } ] } else if input.len() == 1 { input @@ -176,13 +182,13 @@ fn to_csv( } else { x }; - yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(name_tag)) + yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(&name_tag)) } _ => { yield Err(ShellError::labeled_error_with_secondary( "Expected a table with CSV-compatible structure.tag() from pipeline", "requires CSV-compatible input", - name_tag, + &name_tag, "originates from here".to_string(), value.tag(), )) diff --git a/src/commands/to_json.rs b/src/commands/to_json.rs index 9c06299aad..40edc5aeb8 100644 --- a/src/commands/to_json.rs +++ b/src/commands/to_json.rs @@ -42,7 +42,7 @@ pub fn value_to_json_value(v: &Tagged) -> Result serde_json::Value::Number(serde_json::Number::from( - CoerceInto::::coerce_into(i.tagged(v.tag), "converting to JSON number")?, + CoerceInto::::coerce_into(i.tagged(&v.tag), "converting to JSON number")?, )), Value::Primitive(Primitive::Nothing) => serde_json::Value::Null, Value::Primitive(Primitive::Pattern(s)) => serde_json::Value::String(s.clone()), @@ -50,6 +50,7 @@ pub fn value_to_json_value(v: &Tagged) -> Result serde_json::Value::String(s.display().to_string()), Value::Table(l) => serde_json::Value::Array(json_list(l)?), + Value::Error(e) => return Err(e.clone()), Value::Block(_) => serde_json::Value::Null, Value::Primitive(Primitive::Binary(b)) => serde_json::Value::Array( b.iter() @@ -85,7 +86,7 @@ fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result> = args.input.values.collect().await; let to_process_input = if input.len() > 1 { - let tag = input[0].tag; + let tag = input[0].tag.clone(); vec![Tagged { item: Value::Table(input), tag } ] } else if input.len() == 1 { input @@ -98,12 +99,12 @@ fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result { match serde_json::to_string(&json_value) { Ok(x) => yield ReturnSuccess::value( - Value::Primitive(Primitive::String(x)).tagged(name_tag), + Value::Primitive(Primitive::String(x)).tagged(&name_tag), ), _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a table with JSON-compatible structure.tag() from pipeline", "requires JSON-compatible input", - name_tag, + &name_tag, "originates from here".to_string(), value.tag(), )), @@ -112,7 +113,7 @@ fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error( "Expected a table with JSON-compatible structure from pipeline", "requires JSON-compatible input", - name_tag)) + &name_tag)) } } }; diff --git a/src/commands/to_toml.rs b/src/commands/to_toml.rs index 6c8904e0c2..778fdd2561 100644 --- a/src/commands/to_toml.rs +++ b/src/commands/to_toml.rs @@ -38,10 +38,10 @@ pub fn value_to_toml_value(v: &Tagged) -> Result toml::Value::String("".to_string()) } Value::Primitive(Primitive::Decimal(f)) => { - toml::Value::Float(f.tagged(v.tag).coerce_into("converting to TOML float")?) + toml::Value::Float(f.tagged(&v.tag).coerce_into("converting to TOML float")?) } Value::Primitive(Primitive::Int(i)) => { - toml::Value::Integer(i.tagged(v.tag).coerce_into("converting to TOML integer")?) + toml::Value::Integer(i.tagged(&v.tag).coerce_into("converting to TOML integer")?) } Value::Primitive(Primitive::Nothing) => toml::Value::String("".to_string()), Value::Primitive(Primitive::Pattern(s)) => toml::Value::String(s.clone()), @@ -49,6 +49,7 @@ pub fn value_to_toml_value(v: &Tagged) -> Result Value::Primitive(Primitive::Path(s)) => toml::Value::String(s.display().to_string()), Value::Table(l) => toml::Value::Array(collect_values(l)?), + Value::Error(e) => return Err(e.clone()), Value::Block(_) => toml::Value::String("".to_string()), Value::Primitive(Primitive::Binary(b)) => { toml::Value::Array(b.iter().map(|x| toml::Value::Integer(*x as i64)).collect()) @@ -80,7 +81,7 @@ fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result> = args.input.values.collect().await; let to_process_input = if input.len() > 1 { - let tag = input[0].tag; + let tag = input[0].tag.clone(); vec![Tagged { item: Value::Table(input), tag } ] } else if input.len() == 1 { input @@ -93,12 +94,12 @@ fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result { match toml::to_string(&toml_value) { Ok(x) => yield ReturnSuccess::value( - Value::Primitive(Primitive::String(x)).tagged(name_tag), + Value::Primitive(Primitive::String(x)).tagged(&name_tag), ), _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a table with TOML-compatible structure.tag() from pipeline", "requires TOML-compatible input", - name_tag, + &name_tag, "originates from here".to_string(), value.tag(), )), @@ -107,7 +108,7 @@ fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error( "Expected a table with TOML-compatible structure from pipeline", "requires TOML-compatible input", - name_tag)) + &name_tag)) } } }; diff --git a/src/commands/to_tsv.rs b/src/commands/to_tsv.rs index 7127a3195b..83cb4a07f1 100644 --- a/src/commands/to_tsv.rs +++ b/src/commands/to_tsv.rs @@ -49,7 +49,7 @@ pub fn value_to_tsv_value(tagged_value: &Tagged) -> Tagged { Value::Block(_) => Value::Primitive(Primitive::Nothing), _ => Value::Primitive(Primitive::Nothing), } - .tagged(tagged_value.tag) + .tagged(&tagged_value.tag) } fn to_string_helper(tagged_value: &Tagged) -> Result { @@ -68,7 +68,7 @@ fn to_string_helper(tagged_value: &Tagged) -> Result return Err(ShellError::labeled_error( "Unexpected value", "original value", - tagged_value.tag, + &tagged_value.tag, )) } } @@ -107,14 +107,14 @@ pub fn to_string(tagged_value: &Tagged) -> Result { ShellError::labeled_error( "Could not convert record", "original value", - tagged_value.tag, + &tagged_value.tag, ) })?) .map_err(|_| { ShellError::labeled_error( "Could not convert record", "original value", - tagged_value.tag, + &tagged_value.tag, ) })?); } @@ -144,14 +144,14 @@ pub fn to_string(tagged_value: &Tagged) -> Result { ShellError::labeled_error( "Could not convert record", "original value", - tagged_value.tag, + &tagged_value.tag, ) })?) .map_err(|_| { ShellError::labeled_error( "Could not convert record", "original value", - tagged_value.tag, + &tagged_value.tag, ) })?); } @@ -168,7 +168,7 @@ fn to_tsv( let input: Vec> = input.values.collect().await; let to_process_input = if input.len() > 1 { - let tag = input[0].tag; + let tag = input[0].tag.clone(); vec![Tagged { item: Value::Table(input), tag } ] } else if input.len() == 1 { input @@ -184,13 +184,13 @@ fn to_tsv( } else { x }; - yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(name_tag)) + yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(&name_tag)) } _ => { yield Err(ShellError::labeled_error_with_secondary( "Expected a table with TSV-compatible structure.tag() from pipeline", "requires TSV-compatible input", - name_tag, + &name_tag, "originates from here".to_string(), value.tag(), )) diff --git a/src/commands/to_url.rs b/src/commands/to_url.rs index dfba5faf4d..8dee0a87d5 100644 --- a/src/commands/to_url.rs +++ b/src/commands/to_url.rs @@ -47,7 +47,7 @@ fn to_url(args: CommandArgs, registry: &CommandRegistry) -> Result Result { - yield ReturnSuccess::value(Value::string(s).tagged(tag)); + yield ReturnSuccess::value(Value::string(s).tagged(&tag)); } _ => { yield Err(ShellError::labeled_error( "Failed to convert to url-encoded", "cannot url-encode", - tag, + &tag, )) } } @@ -72,7 +72,7 @@ fn to_url(args: CommandArgs, registry: &CommandRegistry) -> Result) -> Result serde_yaml::Value::Number(serde_yaml::Number::from( - CoerceInto::::coerce_into(i.tagged(v.tag), "converting to YAML number")?, + CoerceInto::::coerce_into(i.tagged(&v.tag), "converting to YAML number")?, )), Value::Primitive(Primitive::Nothing) => serde_yaml::Value::Null, Value::Primitive(Primitive::Pattern(s)) => serde_yaml::Value::String(s.clone()), @@ -55,6 +55,7 @@ pub fn value_to_yaml_value(v: &Tagged) -> Result return Err(e.clone()), Value::Block(_) => serde_yaml::Value::Null, Value::Primitive(Primitive::Binary(b)) => serde_yaml::Value::Sequence( b.iter() @@ -81,7 +82,7 @@ fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result> = args.input.values.collect().await; let to_process_input = if input.len() > 1 { - let tag = input[0].tag; + let tag = input[0].tag.clone(); vec![Tagged { item: Value::Table(input), tag } ] } else if input.len() == 1 { input @@ -94,12 +95,12 @@ fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result { match serde_yaml::to_string(&yaml_value) { Ok(x) => yield ReturnSuccess::value( - Value::Primitive(Primitive::String(x)).tagged(name_tag), + Value::Primitive(Primitive::String(x)).tagged(&name_tag), ), _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a table with YAML-compatible structure.tag() from pipeline", "requires YAML-compatible input", - name_tag, + &name_tag, "originates from here".to_string(), value.tag(), )), @@ -108,7 +109,7 @@ fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error( "Expected a table with YAML-compatible structure from pipeline", "requires YAML-compatible input", - name_tag)) + &name_tag)) } } }; diff --git a/src/commands/version.rs b/src/commands/version.rs index 01a134929e..11b243f08b 100644 --- a/src/commands/version.rs +++ b/src/commands/version.rs @@ -31,14 +31,14 @@ impl WholeStreamCommand for Version { pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result { let args = args.evaluate_once(registry)?; - let tag = args.call_info.name_tag; + let tag = args.call_info.name_tag.clone(); let mut indexmap = IndexMap::new(); indexmap.insert( "version".to_string(), - Value::string(clap::crate_version!()).tagged(tag), + Value::string(clap::crate_version!()).tagged(&tag), ); - let value = Value::Row(Dictionary::from(indexmap)).tagged(tag); + let value = Value::Row(Dictionary::from(indexmap)).tagged(&tag); Ok(OutputStream::one(value)) } diff --git a/src/commands/where_.rs b/src/commands/where_.rs index 673c6dda84..9e3c4d2c07 100644 --- a/src/commands/where_.rs +++ b/src/commands/where_.rs @@ -49,7 +49,7 @@ impl PerItemCommand for Where { return Err(ShellError::labeled_error( "Expected a condition", "where needs a condition", - *tag, + tag, )) } }; diff --git a/src/commands/which_.rs b/src/commands/which_.rs index 905515848c..e3b6d1c96c 100644 --- a/src/commands/which_.rs +++ b/src/commands/which_.rs @@ -33,7 +33,7 @@ pub fn which(args: CommandArgs, registry: &CommandRegistry) -> Result 0 { @@ -52,7 +52,7 @@ pub fn which(args: CommandArgs, registry: &CommandRegistry) -> Result); - -impl SourceMap { - pub fn insert(&mut self, uuid: Uuid, anchor_location: AnchorLocation) { - self.0.insert(uuid, anchor_location); - } - - pub fn get(&self, uuid: &Uuid) -> Option<&AnchorLocation> { - self.0.get(uuid) - } - - pub fn new() -> SourceMap { - SourceMap(HashMap::new()) - } -} - #[derive(Clone, new)] pub struct CommandRegistry { #[new(value = "Arc::new(Mutex::new(IndexMap::default()))")] @@ -77,8 +58,8 @@ impl CommandRegistry { #[derive(Clone)] pub struct Context { registry: CommandRegistry, - pub(crate) source_map: Arc>, host: Arc>, + pub ctrl_c: Arc, pub(crate) shell_manager: ShellManager, } @@ -90,17 +71,17 @@ impl Context { pub(crate) fn expand_context<'context>( &'context self, source: &'context Text, - tag: Tag, + span: Span, ) -> ExpandContext<'context> { - ExpandContext::new(&self.registry, tag, source, self.shell_manager.homedir()) + ExpandContext::new(&self.registry, span, source, self.shell_manager.homedir()) } pub(crate) fn basic() -> Result> { let registry = CommandRegistry::new(); Ok(Context { registry: registry.clone(), - source_map: Arc::new(Mutex::new(SourceMap::new())), host: Arc::new(Mutex::new(crate::env::host::BasicHost)), + ctrl_c: Arc::new(AtomicBool::new(false)), shell_manager: ShellManager::basic(registry)?, }) } @@ -117,12 +98,6 @@ impl Context { } } - pub fn add_anchor_location(&mut self, uuid: Uuid, anchor_location: AnchorLocation) { - let mut source_map = self.source_map.lock().unwrap(); - - source_map.insert(uuid, anchor_location); - } - pub(crate) fn get_command(&self, name: &str) -> Option> { self.registry.get_command(name) } @@ -135,27 +110,19 @@ impl Context { &mut self, command: Arc, name_tag: Tag, - source_map: SourceMap, args: hir::Call, source: &Text, input: InputStream, is_first_command: bool, ) -> OutputStream { - let command_args = self.command_args(args, input, source, source_map, name_tag); + let command_args = self.command_args(args, input, source, name_tag); command.run(command_args, self.registry(), is_first_command) } - fn call_info( - &self, - args: hir::Call, - source: &Text, - source_map: SourceMap, - name_tag: Tag, - ) -> UnevaluatedCallInfo { + fn call_info(&self, args: hir::Call, source: &Text, name_tag: Tag) -> UnevaluatedCallInfo { UnevaluatedCallInfo { args, source: source.clone(), - source_map, name_tag, } } @@ -165,13 +132,13 @@ impl Context { args: hir::Call, input: InputStream, source: &Text, - source_map: SourceMap, name_tag: Tag, ) -> CommandArgs { CommandArgs { host: self.host.clone(), + ctrl_c: self.ctrl_c.clone(), shell_manager: self.shell_manager.clone(), - call_info: self.call_info(args, source, source_map, name_tag), + call_info: self.call_info(args, source, name_tag), input, } } diff --git a/src/data/base.rs b/src/data/base.rs index 735196c97f..f7b875ef53 100644 --- a/src/data/base.rs +++ b/src/data/base.rs @@ -213,7 +213,7 @@ impl Block { let scope = Scope::new(value.clone()); if self.expressions.len() == 0 { - return Ok(Value::nothing().tagged(self.tag)); + return Ok(Value::nothing().tagged(&self.tag)); } let mut last = None; @@ -245,6 +245,9 @@ pub enum Value { Row(crate::data::Dictionary), Table(Vec>), + // Errors are a type of value too + Error(ShellError), + Block(Block), } @@ -293,6 +296,7 @@ impl fmt::Debug for ValueDebug<'_> { Value::Row(o) => o.debug(f), Value::Table(l) => debug_list(l).fmt(f), Value::Block(_) => write!(f, "[[block]]"), + Value::Error(_) => write!(f, "[[error]]"), } } } @@ -300,7 +304,7 @@ impl fmt::Debug for ValueDebug<'_> { impl Tagged { pub fn tagged_type_name(&self) -> Tagged { let name = self.type_name(); - Tagged::from_item(name, self.tag()) + name.tagged(self.tag()) } } @@ -312,7 +316,7 @@ impl std::convert::TryFrom<&Tagged> for Block { Value::Block(block) => Ok(block.clone()), v => Err(ShellError::type_error( "Block", - value.copy_tag(v.type_name()), + v.type_name().tagged(value.tag()), )), } } @@ -324,11 +328,11 @@ impl std::convert::TryFrom<&Tagged> for i64 { fn try_from(value: &Tagged) -> Result { match value.item() { Value::Primitive(Primitive::Int(int)) => { - int.tagged(value.tag).coerce_into("converting to i64") + int.tagged(&value.tag).coerce_into("converting to i64") } v => Err(ShellError::type_error( "Integer", - value.copy_tag(v.type_name()), + v.type_name().tagged(value.tag()), )), } } @@ -342,7 +346,7 @@ impl std::convert::TryFrom<&Tagged> for String { Value::Primitive(Primitive::String(s)) => Ok(s.clone()), v => Err(ShellError::type_error( "String", - value.copy_tag(v.type_name()), + v.type_name().tagged(value.tag()), )), } } @@ -356,7 +360,7 @@ impl std::convert::TryFrom<&Tagged> for Vec { Value::Primitive(Primitive::Binary(b)) => Ok(b.clone()), v => Err(ShellError::type_error( "Binary", - value.copy_tag(v.type_name()), + v.type_name().tagged(value.tag()), )), } } @@ -370,7 +374,7 @@ impl<'a> std::convert::TryFrom<&'a Tagged> for &'a crate::data::Dictionar Value::Row(d) => Ok(d), v => Err(ShellError::type_error( "Dictionary", - value.copy_tag(v.type_name()), + v.type_name().tagged(value.tag()), )), } } @@ -392,7 +396,7 @@ impl std::convert::TryFrom>> for Switch { Value::Primitive(Primitive::Boolean(true)) => Ok(Switch::Present), v => Err(ShellError::type_error( "Boolean", - value.copy_tag(v.type_name()), + v.type_name().tagged(value.tag()), )), }, } @@ -410,19 +414,19 @@ impl Tagged { match &self.item { Value::Table(table) => { for item in table { - out.push(item.as_string()?.tagged(item.tag)); + out.push(item.as_string()?.tagged(&item.tag)); } } other => { return Err(ShellError::type_error( "column name", - other.type_name().tagged(self.tag), + other.type_name().tagged(&self.tag), )) } } - Ok(out.tagged(self.tag)) + Ok(out.tagged(&self.tag)) } pub(crate) fn as_string(&self) -> Result { @@ -437,7 +441,7 @@ impl Tagged { other => Err(ShellError::labeled_error( "Expected string", other.type_name(), - self.tag, + &self.tag, )), } } @@ -450,6 +454,7 @@ impl Value { Value::Row(_) => format!("row"), Value::Table(_) => format!("list"), Value::Block(_) => format!("block"), + Value::Error(_) => format!("error"), } } @@ -465,6 +470,7 @@ impl Value { .collect(), Value::Block(_) => vec![], Value::Table(_) => vec![], + Value::Error(_) => vec![], } } @@ -503,7 +509,7 @@ impl Value { } } - Some(Tagged::from_item(current, tag)) + Some(current.tagged(tag)) } pub fn get_data_by_path(&self, tag: Tag, path: &str) -> Option> { @@ -515,7 +521,7 @@ impl Value { } } - Some(Tagged::from_item(current, tag)) + Some(current.tagged(tag)) } pub fn insert_data_at_path( @@ -535,8 +541,8 @@ impl Value { // Special case for inserting at the top level current .entries - .insert(path.to_string(), Tagged::from_item(new_value, tag)); - return Some(Tagged::from_item(new_obj, tag)); + .insert(path.to_string(), new_value.tagged(&tag)); + return Some(new_obj.tagged(&tag)); } for idx in 0..split_path.len() { @@ -547,13 +553,13 @@ impl Value { Value::Row(o) => { o.entries.insert( split_path[idx + 1].to_string(), - Tagged::from_item(new_value, tag), + new_value.tagged(&tag), ); } _ => {} } - return Some(Tagged::from_item(new_obj, tag)); + return Some(new_obj.tagged(&tag)); } else { match next.item { Value::Row(ref mut o) => { @@ -584,11 +590,10 @@ impl Value { if split_path.len() == 1 { // Special case for inserting at the top level - current.entries.insert( - split_path[0].item.clone(), - Tagged::from_item(new_value, tag), - ); - return Some(Tagged::from_item(new_obj, tag)); + current + .entries + .insert(split_path[0].item.clone(), new_value.tagged(&tag)); + return Some(new_obj.tagged(&tag)); } for idx in 0..split_path.len() { @@ -599,13 +604,13 @@ impl Value { Value::Row(o) => { o.entries.insert( split_path[idx + 1].to_string(), - Tagged::from_item(new_value, tag), + new_value.tagged(&tag), ); } _ => {} } - return Some(Tagged::from_item(new_obj, tag)); + return Some(new_obj.tagged(&tag)); } else { match next.item { Value::Row(ref mut o) => { @@ -639,8 +644,8 @@ impl Value { match current.entries.get_mut(split_path[idx]) { Some(next) => { if idx == (split_path.len() - 1) { - *next = Tagged::from_item(replaced_value, tag); - return Some(Tagged::from_item(new_obj, tag)); + *next = replaced_value.tagged(&tag); + return Some(new_obj.tagged(&tag)); } else { match next.item { Value::Row(ref mut o) => { @@ -672,8 +677,8 @@ impl Value { match current.entries.get_mut(&split_path[idx].item) { Some(next) => { if idx == (split_path.len() - 1) { - *next = Tagged::from_item(replaced_value, tag); - return Some(Tagged::from_item(new_obj, tag)); + *next = replaced_value.tagged(&tag); + return Some(new_obj.tagged(&tag)); } else { match next.item { Value::Row(ref mut o) => { @@ -697,6 +702,7 @@ impl Value { Value::Row(o) => o.get_data(desc), Value::Block(_) => MaybeOwned::Owned(Value::nothing()), Value::Table(_) => MaybeOwned::Owned(Value::nothing()), + Value::Error(_) => MaybeOwned::Owned(Value::nothing()), } } @@ -706,7 +712,7 @@ impl Value { Value::Block(b) => itertools::join( b.expressions .iter() - .map(|e| e.source(&b.source).to_string()), + .map(|e| e.span.slice(&b.source).to_string()), "; ", ), Value::Row(_) => format!("[table: 1 row]"), @@ -715,6 +721,7 @@ impl Value { l.len(), if l.len() == 1 { "row" } else { "rows" } ), + Value::Error(_) => format!("[error]"), } } diff --git a/src/data/command.rs b/src/data/command.rs index a2046aa7aa..25301e6fa1 100644 --- a/src/data/command.rs +++ b/src/data/command.rs @@ -7,7 +7,7 @@ use std::ops::Deref; pub(crate) fn command_dict(command: Arc, tag: impl Into) -> Tagged { let tag = tag.into(); - let mut cmd_dict = TaggedDictBuilder::new(tag); + let mut cmd_dict = TaggedDictBuilder::new(&tag); cmd_dict.insert("name", Value::string(command.name())); @@ -42,7 +42,7 @@ fn for_spec(name: &str, ty: &str, required: bool, tag: impl Into) -> Tagged fn signature_dict(signature: Signature, tag: impl Into) -> Tagged { let tag = tag.into(); - let mut sig = TaggedListBuilder::new(tag); + let mut sig = TaggedListBuilder::new(&tag); for arg in signature.positional.iter() { let is_required = match arg { @@ -50,19 +50,19 @@ fn signature_dict(signature: Signature, tag: impl Into) -> Tagged { PositionalType::Optional(_, _) => false, }; - sig.insert_tagged(for_spec(arg.name(), "argument", is_required, tag)); + sig.insert_tagged(for_spec(arg.name(), "argument", is_required, &tag)); } if let Some(_) = signature.rest_positional { let is_required = false; - sig.insert_tagged(for_spec("rest", "argument", is_required, tag)); + sig.insert_tagged(for_spec("rest", "argument", is_required, &tag)); } for (name, ty) in signature.named.iter() { match ty { - NamedType::Mandatory(_) => sig.insert_tagged(for_spec(name, "flag", true, tag)), - NamedType::Optional(_) => sig.insert_tagged(for_spec(name, "flag", false, tag)), - NamedType::Switch => sig.insert_tagged(for_spec(name, "switch", false, tag)), + NamedType::Mandatory(_) => sig.insert_tagged(for_spec(name, "flag", true, &tag)), + NamedType::Optional(_) => sig.insert_tagged(for_spec(name, "flag", false, &tag)), + NamedType::Switch => sig.insert_tagged(for_spec(name, "switch", false, &tag)), } } diff --git a/src/data/config.rs b/src/data/config.rs index 657287d2f2..26e3e3c7d5 100644 --- a/src/data/config.rs +++ b/src/data/config.rs @@ -75,12 +75,12 @@ pub fn read( let tag = tag.into(); let contents = fs::read_to_string(filename) - .map(|v| v.tagged(tag)) + .map(|v| v.tagged(&tag)) .map_err(|err| { ShellError::labeled_error( &format!("Couldn't read config file:\n{}", err), "file name", - tag, + &tag, ) })?; @@ -88,7 +88,7 @@ pub fn read( ShellError::labeled_error( &format!("Couldn't parse config file:\n{}", err), "file name", - tag, + &tag, ) })?; @@ -98,7 +98,7 @@ pub fn read( Value::Row(Dictionary { entries }) => Ok(entries), other => Err(ShellError::type_error( "Dictionary", - other.type_name().tagged(tag), + other.type_name().tagged(&tag), )), } } diff --git a/src/data/dict.rs b/src/data/dict.rs index c14c86dd90..8f9bb556ba 100644 --- a/src/data/dict.rs +++ b/src/data/dict.rs @@ -115,7 +115,7 @@ impl TaggedListBuilder { } pub fn push(&mut self, value: impl Into) { - self.list.push(value.into().tagged(self.tag)); + self.list.push(value.into().tagged(&self.tag)); } pub fn insert_tagged(&mut self, value: impl Into>) { @@ -155,7 +155,7 @@ impl TaggedDictBuilder { } pub fn insert(&mut self, key: impl Into, value: impl Into) { - self.dict.insert(key.into(), value.into().tagged(self.tag)); + self.dict.insert(key.into(), value.into().tagged(&self.tag)); } pub fn insert_tagged(&mut self, key: impl Into, value: impl Into>) { diff --git a/src/data/meta.rs b/src/data/meta.rs index 08125359e4..2f3f0cc4c1 100644 --- a/src/data/meta.rs +++ b/src/data/meta.rs @@ -1,15 +1,52 @@ -use crate::context::{AnchorLocation, SourceMap}; +use crate::context::AnchorLocation; use crate::parser::parse::parser::TracableContext; use crate::prelude::*; -use crate::Text; use derive_new::new; use getset::Getters; use serde::Deserialize; use serde::Serialize; use std::path::{Path, PathBuf}; -use uuid::Uuid; #[derive(new, Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)] +pub struct Spanned { + pub span: Span, + pub item: T, +} + +impl Spanned { + pub fn map(self, input: impl FnOnce(T) -> U) -> Spanned { + let span = self.span; + + let mapped = input(self.item); + mapped.spanned(span) + } +} + +pub trait SpannedItem: Sized { + fn spanned(self, span: impl Into) -> Spanned { + Spanned { + item: self, + span: span.into(), + } + } + + fn spanned_unknown(self) -> Spanned { + Spanned { + item: self, + span: Span::unknown(), + } + } +} +impl SpannedItem for T {} + +impl std::ops::Deref for Spanned { + type Target = T; + + fn deref(&self) -> &T { + &self.item + } +} +#[derive(new, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)] pub struct Tagged { pub tag: Tag, pub item: T, @@ -17,7 +54,7 @@ pub struct Tagged { impl HasTag for Tagged { fn tag(&self) -> Tag { - self.tag + self.tag.clone() } } @@ -29,20 +66,23 @@ impl AsRef for Tagged { pub trait TaggedItem: Sized { fn tagged(self, tag: impl Into) -> Tagged { - Tagged::from_item(self, tag.into()) + Tagged { + item: self, + tag: tag.into(), + } } // For now, this is a temporary facility. In many cases, there are other useful spans that we // could be using, such as the original source spans of JSON or Toml files, but we don't yet // have the infrastructure to make that work. fn tagged_unknown(self) -> Tagged { - Tagged::from_item( - self, - Tag { + Tagged { + item: self, + tag: Tag { span: Span::unknown(), - anchor: uuid::Uuid::nil(), + anchor: None, }, - ) + } } } @@ -57,48 +97,29 @@ impl std::ops::Deref for Tagged { } impl Tagged { - pub fn with_tag(self, tag: impl Into) -> Tagged { - Tagged::from_item(self.item, tag) - } - - pub fn from_item(item: T, tag: impl Into) -> Tagged { - Tagged { - item, - tag: tag.into(), - } - } - pub fn map(self, input: impl FnOnce(T) -> U) -> Tagged { let tag = self.tag(); let mapped = input(self.item); - Tagged::from_item(mapped, tag) - } - - pub(crate) fn copy_tag(&self, output: U) -> Tagged { - Tagged::from_item(output, self.tag()) - } - - pub fn source(&self, source: &Text) -> Text { - Text::from(self.tag().slice(source)) + mapped.tagged(tag) } pub fn tag(&self) -> Tag { - self.tag + self.tag.clone() } pub fn span(&self) -> Span { self.tag.span } - pub fn anchor(&self) -> uuid::Uuid { - self.tag.anchor + pub fn anchor(&self) -> Option { + self.tag.anchor.clone() } - pub fn anchor_name(&self, source_map: &SourceMap) -> Option { - match source_map.get(&self.tag.anchor) { - Some(AnchorLocation::File(file)) => Some(file.clone()), - Some(AnchorLocation::Url(url)) => Some(url.clone()), + pub fn anchor_name(&self) -> Option { + match self.tag.anchor { + Some(AnchorLocation::File(ref file)) => Some(file.clone()), + Some(AnchorLocation::Url(ref url)) => Some(url.clone()), _ => None, } } @@ -114,26 +135,32 @@ impl Tagged { impl From<&Tag> for Tag { fn from(input: &Tag) -> Tag { - *input + input.clone() } } -impl From> for Span { - fn from(input: nom_locate::LocatedSpanEx<&str, Uuid>) -> Span { +impl From> for Span { + fn from(input: nom_locate::LocatedSpanEx<&str, TracableContext>) -> Span { + Span::new(input.offset, input.offset + input.fragment.len()) + } +} + +impl From> for Span { + fn from(input: nom_locate::LocatedSpanEx<&str, u64>) -> Span { Span::new(input.offset, input.offset + input.fragment.len()) } } impl From<( - nom_locate::LocatedSpanEx, - nom_locate::LocatedSpanEx, + nom_locate::LocatedSpanEx, + nom_locate::LocatedSpanEx, )> for Span { fn from( input: ( - nom_locate::LocatedSpanEx, - nom_locate::LocatedSpanEx, + nom_locate::LocatedSpanEx, + nom_locate::LocatedSpanEx, ), ) -> Span { Span { @@ -159,42 +186,48 @@ impl From<&std::ops::Range> for Span { } #[derive( - Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters, new, + Debug, Clone, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters, new, )] pub struct Tag { - pub anchor: Uuid, + pub anchor: Option, pub span: Span, } impl From for Tag { fn from(span: Span) -> Self { - Tag { - anchor: uuid::Uuid::nil(), - span, - } + Tag { anchor: None, span } } } impl From<&Span> for Tag { fn from(span: &Span) -> Self { Tag { - anchor: uuid::Uuid::nil(), + anchor: None, span: *span, } } } impl From<(usize, usize, TracableContext)> for Tag { - fn from((start, end, context): (usize, usize, TracableContext)) -> Self { + fn from((start, end, _context): (usize, usize, TracableContext)) -> Self { Tag { - anchor: context.origin, + anchor: None, span: Span::new(start, end), } } } -impl From<(usize, usize, Uuid)> for Tag { - fn from((start, end, anchor): (usize, usize, Uuid)) -> Self { +impl From<(usize, usize, AnchorLocation)> for Tag { + fn from((start, end, anchor): (usize, usize, AnchorLocation)) -> Self { + Tag { + anchor: Some(anchor), + span: Span::new(start, end), + } + } +} + +impl From<(usize, usize, Option)> for Tag { + fn from((start, end, anchor): (usize, usize, Option)) -> Self { Tag { anchor, span: Span::new(start, end), @@ -202,19 +235,10 @@ impl From<(usize, usize, Uuid)> for Tag { } } -impl From<(usize, usize, Option)> for Tag { - fn from((start, end, anchor): (usize, usize, Option)) -> Self { - Tag { - anchor: anchor.unwrap_or(uuid::Uuid::nil()), - span: Span::new(start, end), - } - } -} - impl From> for Tag { fn from(input: nom_locate::LocatedSpanEx<&str, TracableContext>) -> Tag { Tag { - anchor: input.extra.origin, + anchor: None, span: Span::new(input.offset, input.offset + input.fragment.len()), } } @@ -234,15 +258,12 @@ impl From<&Tag> for Span { impl Tag { pub fn unknown_anchor(span: Span) -> Tag { - Tag { - anchor: uuid::Uuid::nil(), - span, - } + Tag { anchor: None, span } } - pub fn for_char(pos: usize, anchor: Uuid) -> Tag { + pub fn for_char(pos: usize, anchor: AnchorLocation) -> Tag { Tag { - anchor, + anchor: Some(anchor), span: Span { start: pos, end: pos + 1, @@ -250,16 +271,16 @@ impl Tag { } } - pub fn unknown_span(anchor: Uuid) -> Tag { + pub fn unknown_span(anchor: AnchorLocation) -> Tag { Tag { - anchor, + anchor: Some(anchor), span: Span::unknown(), } } pub fn unknown() -> Tag { Tag { - anchor: uuid::Uuid::nil(), + anchor: None, span: Span::unknown(), } } @@ -273,7 +294,7 @@ impl Tag { Tag { span: Span::new(self.span.start, other.span.end), - anchor: self.anchor, + anchor: self.anchor.clone(), } } @@ -288,10 +309,10 @@ impl Tag { Tag { span: Span::new(self.span.start, other.span.end), - anchor: self.anchor, + anchor: self.anchor.clone(), } } - None => *self, + None => self.clone(), } } @@ -360,6 +381,42 @@ impl Span { Span { start, end } } + pub fn for_char(pos: usize) -> Span { + Span { + start: pos, + end: pos + 1, + } + } + + pub fn until(&self, other: impl Into) -> Span { + let other = other.into(); + + Span::new(self.start, other.end) + } + + pub fn until_option(&self, other: Option>) -> Span { + match other { + Some(other) => { + let other = other.into(); + + Span::new(self.start, other.end) + } + None => *self, + } + } + + pub fn string<'a>(&self, source: &'a str) -> String { + self.slice(source).to_string() + } + + pub fn spanned_slice<'a>(&self, source: &'a str) -> Spanned<&'a str> { + self.slice(source).spanned(*self) + } + + pub fn spanned_string<'a>(&self, source: &'a str) -> Spanned { + self.slice(source).to_string().spanned(*self) + } + /* pub fn unknown_with_uuid(uuid: Uuid) -> Span { Span { @@ -404,27 +461,3 @@ impl language_reporting::ReportingSpan for Span { self.end } } - -impl language_reporting::ReportingSpan for Tag { - fn with_start(&self, start: usize) -> Self { - Tag { - span: Span::new(start, self.span.end), - anchor: self.anchor, - } - } - - fn with_end(&self, end: usize) -> Self { - Tag { - span: Span::new(self.span.start, end), - anchor: self.anchor, - } - } - - fn start(&self) -> usize { - self.span.start - } - - fn end(&self) -> usize { - self.span.end - } -} diff --git a/src/data/types.rs b/src/data/types.rs index 8dca43d878..b4ff545deb 100644 --- a/src/data/types.rs +++ b/src/data/types.rs @@ -54,7 +54,7 @@ impl ExtractType for i64 { &Tagged { item: Value::Primitive(Primitive::Int(int)), .. - } => Ok(int.tagged(value.tag).coerce_into("converting to i64")?), + } => Ok(int.tagged(&value.tag).coerce_into("converting to i64")?), other => Err(ShellError::type_error("Integer", other.tagged_type_name())), } } @@ -68,7 +68,7 @@ impl ExtractType for u64 { &Tagged { item: Value::Primitive(Primitive::Int(int)), .. - } => Ok(int.tagged(value.tag).coerce_into("converting to u64")?), + } => Ok(int.tagged(&value.tag).coerce_into("converting to u64")?), other => Err(ShellError::type_error("Integer", other.tagged_type_name())), } } diff --git a/src/errors.rs b/src/errors.rs index 2d42552250..11628dde4b 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -14,9 +14,9 @@ pub enum Description { } impl Description { - fn into_label(self) -> Result, String> { + fn into_label(self) -> Result, String> { match self { - Description::Source(s) => Ok(Label::new_primary(s.tag()).with_message(s.item)), + Description::Source(s) => Ok(Label::new_primary(s.span()).with_message(s.item)), Description::Synthetic(s) => Err(s), } } @@ -24,7 +24,7 @@ impl Description { #[allow(unused)] fn tag(&self) -> Tag { match self { - Description::Source(tagged) => tagged.tag, + Description::Source(tagged) => tagged.tag.clone(), Description::Synthetic(_) => Tag::unknown(), } } @@ -85,10 +85,10 @@ impl ShellError { .start() } - pub(crate) fn unexpected_eof(expected: impl Into, tag: Tag) -> ShellError { + pub(crate) fn unexpected_eof(expected: impl Into, tag: impl Into) -> ShellError { ProximateShellError::UnexpectedEof { expected: expected.into(), - tag, + tag: tag.into(), } .start() } @@ -100,7 +100,7 @@ impl ShellError { ) -> ShellError { ProximateShellError::RangeError { kind: expected.into(), - actual_kind: actual.copy_tag(format!("{:?}", actual.item)), + actual_kind: format!("{:?}", actual.item).tagged(actual.tag()), operation, } .start() @@ -143,22 +143,22 @@ impl ShellError { pub(crate) fn argument_error( command: impl Into, kind: ArgumentError, - tag: Tag, + tag: impl Into, ) -> ShellError { ProximateShellError::ArgumentError { command: command.into(), error: kind, - tag, + tag: tag.into(), } .start() } - pub(crate) fn invalid_external_word(tag: Tag) -> ShellError { + pub(crate) fn invalid_external_word(tag: impl Into) -> ShellError { ProximateShellError::ArgumentError { command: "Invalid argument to Nu command (did you mean to call an external command?)" .into(), error: ArgumentError::InvalidExternalWord, - tag, + tag: tag.into(), } .start() } @@ -183,22 +183,22 @@ impl ShellError { } nom::Err::Failure(span) | nom::Err::Error(span) => { let diagnostic = Diagnostic::new(Severity::Error, format!("Parse Error")) - .with_label(Label::new_primary(Tag::from(span.0))); + .with_label(Label::new_primary(Span::from(span.0))); ShellError::diagnostic(diagnostic) } } } - pub(crate) fn diagnostic(diagnostic: Diagnostic) -> ShellError { + pub(crate) fn diagnostic(diagnostic: Diagnostic) -> ShellError { ProximateShellError::Diagnostic(ShellDiagnostic { diagnostic }).start() } - pub(crate) fn to_diagnostic(self) -> Diagnostic { + pub(crate) fn to_diagnostic(self) -> Diagnostic { match self.error { ProximateShellError::InvalidCommand { command } => { Diagnostic::new(Severity::Error, "Invalid command") - .with_label(Label::new_primary(command)) + .with_label(Label::new_primary(command.span)) } ProximateShellError::MissingValue { tag, reason } => { let mut d = Diagnostic::new( @@ -207,7 +207,7 @@ impl ShellError { ); if let Some(tag) = tag { - d = d.with_label(Label::new_primary(tag)); + d = d.with_label(Label::new_primary(tag.span)); } d @@ -220,7 +220,7 @@ impl ShellError { ArgumentError::InvalidExternalWord => Diagnostic::new( Severity::Error, format!("Invalid bare word for Nu command (did you intend to invoke an external command?)")) - .with_label(Label::new_primary(tag)), + .with_label(Label::new_primary(tag.span)), ArgumentError::MissingMandatoryFlag(name) => Diagnostic::new( Severity::Error, format!( @@ -230,7 +230,7 @@ impl ShellError { Color::Black.bold().paint(name) ), ) - .with_label(Label::new_primary(tag)), + .with_label(Label::new_primary(tag.span)), ArgumentError::MissingMandatoryPositional(name) => Diagnostic::new( Severity::Error, format!( @@ -240,7 +240,7 @@ impl ShellError { ), ) .with_label( - Label::new_primary(tag).with_message(format!("requires {} parameter", name)), + Label::new_primary(tag.span).with_message(format!("requires {} parameter", name)), ), ArgumentError::MissingValueForName(name) => Diagnostic::new( Severity::Error, @@ -251,7 +251,7 @@ impl ShellError { Color::Black.bold().paint(name) ), ) - .with_label(Label::new_primary(tag)), + .with_label(Label::new_primary(tag.span)), }, ProximateShellError::TypeError { expected, @@ -261,7 +261,7 @@ impl ShellError { tag, }, } => Diagnostic::new(Severity::Error, "Type Error").with_label( - Label::new_primary(tag) + Label::new_primary(tag.span) .with_message(format!("Expected {}, found {}", expected, actual)), ), ProximateShellError::TypeError { @@ -272,12 +272,12 @@ impl ShellError { tag }, } => Diagnostic::new(Severity::Error, "Type Error") - .with_label(Label::new_primary(tag).with_message(expected)), + .with_label(Label::new_primary(tag.span).with_message(expected)), ProximateShellError::UnexpectedEof { expected, tag } => Diagnostic::new(Severity::Error, format!("Unexpected end of input")) - .with_label(Label::new_primary(tag).with_message(format!("Expected {}", expected))), + .with_label(Label::new_primary(tag.span).with_message(format!("Expected {}", expected))), ProximateShellError::RangeError { kind, @@ -288,7 +288,7 @@ impl ShellError { tag }, } => Diagnostic::new(Severity::Error, "Range Error").with_label( - Label::new_primary(tag).with_message(format!( + Label::new_primary(tag.span).with_message(format!( "Expected to convert {} to {} while {}, but it was out of range", item, kind.desc(), @@ -303,7 +303,7 @@ impl ShellError { item }, } => Diagnostic::new(Severity::Error, "Syntax Error") - .with_label(Label::new_primary(tag).with_message(item)), + .with_label(Label::new_primary(tag.span).with_message(item)), ProximateShellError::MissingProperty { subpath, expr, .. } => { let subpath = subpath.into_label(); @@ -326,8 +326,8 @@ impl ShellError { ProximateShellError::Diagnostic(diag) => diag.diagnostic, ProximateShellError::CoerceError { left, right } => { Diagnostic::new(Severity::Error, "Coercion error") - .with_label(Label::new_primary(left.tag()).with_message(left.item)) - .with_label(Label::new_secondary(right.tag()).with_message(right.item)) + .with_label(Label::new_primary(left.tag().span).with_message(left.item)) + .with_label(Label::new_secondary(right.tag().span).with_message(right.item)) } ProximateShellError::UntaggedRuntimeError { reason } => Diagnostic::new(Severity::Error, format!("Error: {}", reason)) @@ -341,7 +341,7 @@ impl ShellError { ) -> ShellError { ShellError::diagnostic( Diagnostic::new(Severity::Error, msg.into()) - .with_label(Label::new_primary(tag.into()).with_message(label.into())), + .with_label(Label::new_primary(tag.into().span).with_message(label.into())), ) } @@ -355,15 +355,19 @@ impl ShellError { ShellError::diagnostic( Diagnostic::new_error(msg.into()) .with_label( - Label::new_primary(primary_span.into()).with_message(primary_label.into()), + Label::new_primary(primary_span.into().span).with_message(primary_label.into()), ) .with_label( - Label::new_secondary(secondary_span.into()) + Label::new_secondary(secondary_span.into().span) .with_message(secondary_label.into()), ), ) } + // pub fn string(title: impl Into) -> ShellError { + // ProximateShellError::String(StringError::new(title.into(), String::new())).start() + // } + pub(crate) fn unimplemented(title: impl Into) -> ShellError { ShellError::untagged_runtime_error(&format!("Unimplemented: {}", title.into())) } @@ -472,16 +476,16 @@ impl ProximateShellError { pub(crate) fn tag(&self) -> Option { Some(match self { ProximateShellError::SyntaxError { problem } => problem.tag(), - ProximateShellError::UnexpectedEof { tag, .. } => *tag, - ProximateShellError::InvalidCommand { command } => *command, - ProximateShellError::TypeError { actual, .. } => actual.tag, - ProximateShellError::MissingProperty { tag, .. } => *tag, - ProximateShellError::MissingValue { tag, .. } => return *tag, - ProximateShellError::ArgumentError { tag, .. } => *tag, - ProximateShellError::RangeError { actual_kind, .. } => actual_kind.tag, + ProximateShellError::UnexpectedEof { tag, .. } => tag.clone(), + ProximateShellError::InvalidCommand { command } => command.clone(), + ProximateShellError::TypeError { actual, .. } => actual.tag.clone(), + ProximateShellError::MissingProperty { tag, .. } => tag.clone(), + ProximateShellError::MissingValue { tag, .. } => return tag.clone(), + ProximateShellError::ArgumentError { tag, .. } => tag.clone(), + ProximateShellError::RangeError { actual_kind, .. } => actual_kind.tag.clone(), ProximateShellError::Diagnostic(..) => return None, ProximateShellError::UntaggedRuntimeError { .. } => return None, - ProximateShellError::CoerceError { left, right } => left.tag.until(right.tag), + ProximateShellError::CoerceError { left, right } => left.tag.until(&right.tag), }) } } @@ -495,7 +499,7 @@ impl ToDebug for ProximateShellError { #[derive(Debug, Clone, Serialize, Deserialize)] pub struct ShellDiagnostic { - pub(crate) diagnostic: Diagnostic, + pub(crate) diagnostic: Diagnostic, } impl PartialEq for ShellDiagnostic { @@ -521,7 +525,7 @@ impl std::cmp::Ord for ShellDiagnostic { #[derive(Debug, Ord, PartialOrd, Eq, PartialEq, new, Clone, Serialize, Deserialize)] pub struct StringError { title: String, - error: Value, + error: String, } impl std::fmt::Display for ShellError { @@ -598,7 +602,6 @@ impl ShellErrorUtils> for Option> { } } } - pub trait CoerceInto { fn coerce_into(self, operation: impl Into) -> Result; } diff --git a/src/evaluate/evaluator.rs b/src/evaluate/evaluator.rs index 248d2a0816..1e19c31e78 100644 --- a/src/evaluate/evaluator.rs +++ b/src/evaluate/evaluator.rs @@ -48,19 +48,23 @@ pub(crate) fn evaluate_baseline_expr( scope: &Scope, source: &Text, ) -> Result, ShellError> { + let tag = Tag { + span: expr.span, + anchor: None, + }; match &expr.item { - RawExpression::Literal(literal) => Ok(evaluate_literal(expr.copy_tag(literal), source)), + RawExpression::Literal(literal) => Ok(evaluate_literal(literal.tagged(tag), source)), RawExpression::ExternalWord => Err(ShellError::argument_error( "Invalid external word", ArgumentError::InvalidExternalWord, - expr.tag(), + tag, )), - RawExpression::FilePath(path) => Ok(Value::path(path.clone()).tagged(expr.tag())), + RawExpression::FilePath(path) => Ok(Value::path(path.clone()).tagged(tag)), RawExpression::Synthetic(hir::Synthetic::String(s)) => { Ok(Value::string(s).tagged_unknown()) } - RawExpression::Variable(var) => evaluate_reference(var, scope, source, expr.tag()), - RawExpression::Command(_) => evaluate_command(expr.tag(), scope, source), + RawExpression::Variable(var) => evaluate_reference(var, scope, source, tag), + RawExpression::Command(_) => evaluate_command(tag, scope, source), RawExpression::ExternalCommand(external) => evaluate_external(external, scope, source), RawExpression::Binary(binary) => { let left = evaluate_baseline_expr(binary.left(), registry, scope, source)?; @@ -69,10 +73,16 @@ pub(crate) fn evaluate_baseline_expr( trace!("left={:?} right={:?}", left.item, right.item); match left.compare(binary.op(), &*right) { - Ok(result) => Ok(Value::boolean(result).tagged(expr.tag())), + Ok(result) => Ok(Value::boolean(result).tagged(tag)), Err((left_type, right_type)) => Err(ShellError::coerce_error( - binary.left().copy_tag(left_type), - binary.right().copy_tag(right_type), + left_type.tagged(Tag { + span: binary.left().span, + anchor: None, + }), + right_type.tagged(Tag { + span: binary.right().span, + anchor: None, + }), )), } } @@ -84,13 +94,10 @@ pub(crate) fn evaluate_baseline_expr( exprs.push(expr); } - Ok(Value::Table(exprs).tagged(expr.tag())) + Ok(Value::Table(exprs).tagged(tag)) } RawExpression::Block(block) => { - Ok( - Value::Block(Block::new(block.clone(), source.clone(), expr.tag())) - .tagged(expr.tag()), - ) + Ok(Value::Block(Block::new(block.clone(), source.clone(), tag.clone())).tagged(&tag)) } RawExpression::Path(path) => { let value = evaluate_baseline_expr(path.head(), registry, scope, source)?; @@ -113,16 +120,16 @@ pub(crate) fn evaluate_baseline_expr( return Err(ShellError::labeled_error( "Unknown column", format!("did you mean '{}'?", possible_matches[0].1), - expr.tag(), + &tag, )); } Some(next) => { - item = next.clone().item.tagged(expr.tag()); + item = next.clone().item.tagged(&tag); } }; } - Ok(item.item().clone().tagged(expr.tag())) + Ok(item.item().clone().tagged(tag)) } RawExpression::Boolean(_boolean) => unimplemented!(), } diff --git a/src/format/generic.rs b/src/format/generic.rs index b6f9e29f26..fd058f31fc 100644 --- a/src/format/generic.rs +++ b/src/format/generic.rs @@ -14,7 +14,7 @@ impl RenderView for GenericView<'_> { match self.value { Value::Primitive(p) => Ok(host.stdout(&p.format(None))), Value::Table(l) => { - let view = TableView::from_list(l); + let view = TableView::from_list(l, 0); if let Some(view) = view { view.render_view(host)?; @@ -35,6 +35,8 @@ impl RenderView for GenericView<'_> { view.render_view(host)?; Ok(()) } + + Value::Error(e) => Err(e.clone()), } } } diff --git a/src/format/table.rs b/src/format/table.rs index 286be222c3..b2680a6c96 100644 --- a/src/format/table.rs +++ b/src/format/table.rs @@ -34,7 +34,7 @@ impl TableView { ret } - pub fn from_list(values: &[Tagged]) -> Option { + pub fn from_list(values: &[Tagged], starting_idx: usize) -> Option { if values.len() == 0 { return None; } @@ -68,7 +68,7 @@ impl TableView { if values.len() > 1 { // Indices are black, bold, right-aligned: - row.insert(0, (format!("{}", idx.to_string()), "Fdbr")); + row.insert(0, (format!("{}", (starting_idx + idx).to_string()), "Fdbr")); } entries.push(row); diff --git a/src/lib.rs b/src/lib.rs index b955f426e9..bfcaa4510f 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,4 +1,4 @@ -#![recursion_limit = "512"] +#![recursion_limit = "1024"] #[macro_use] mod prelude; @@ -21,7 +21,7 @@ mod traits; mod utils; pub use crate::commands::command::{CallInfo, ReturnSuccess, ReturnValue}; -pub use crate::context::{AnchorLocation, SourceMap}; +pub use crate::context::AnchorLocation; pub use crate::env::host::BasicHost; pub use crate::parser::hir::SyntaxShape; pub use crate::parser::parse::token_tree_builder::TokenTreeBuilder; @@ -31,7 +31,7 @@ pub use cli::cli; pub use data::base::{Primitive, Value}; pub use data::config::{config_path, APP_INFO}; pub use data::dict::{Dictionary, TaggedDictBuilder}; -pub use data::meta::{Span, Tag, Tagged, TaggedItem}; +pub use data::meta::{Span, Spanned, SpannedItem, Tag, Tagged, TaggedItem}; pub use errors::{CoerceInto, ShellError}; pub use num_traits::cast::ToPrimitive; pub use parser::parse::text::Text; diff --git a/src/parser.rs b/src/parser.rs index 3fd853c85c..37c8c09c30 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -21,10 +21,10 @@ pub(crate) use parse::tokens::{RawNumber, RawToken}; pub(crate) use parse::unit::Unit; pub(crate) use registry::CommandRegistry; -pub fn parse(input: &str, anchor: uuid::Uuid) -> Result { +pub fn parse(input: &str) -> Result { let _ = pretty_env_logger::try_init(); - match pipeline(nom_input(input, anchor)) { + match pipeline(nom_input(input)) { Ok((_rest, val)) => Ok(val), Err(err) => Err(ShellError::parse_error(err)), } diff --git a/src/parser/deserializer.rs b/src/parser/deserializer.rs index 43409fc4df..4b8bf913d5 100644 --- a/src/parser/deserializer.rs +++ b/src/parser/deserializer.rs @@ -52,7 +52,7 @@ impl<'de> ConfigDeserializer<'de> { self.stack.push(DeserializerItem { key_struct_field: Some((name.to_string(), name)), - val: value.unwrap_or_else(|| Value::nothing().tagged(self.call.name_tag)), + val: value.unwrap_or_else(|| Value::nothing().tagged(&self.call.name_tag)), }); Ok(()) diff --git a/src/parser/hir.rs b/src/parser/hir.rs index 4fd0a71b3d..ac6423943d 100644 --- a/src/parser/hir.rs +++ b/src/parser/hir.rs @@ -86,7 +86,7 @@ pub enum RawExpression { FilePath(PathBuf), ExternalCommand(ExternalCommand), - Command(Tag), + Command(Span), Boolean(bool), } @@ -123,14 +123,14 @@ impl RawExpression { } } -pub type Expression = Tagged; +pub type Expression = Spanned; impl std::fmt::Display for Expression { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let span = self.tag.span; + let span = self.span; match &self.item { - RawExpression::Literal(literal) => write!(f, "{}", literal.tagged(self.tag)), + RawExpression::Literal(literal) => write!(f, "{}", literal.tagged(self.span)), RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{}", s), RawExpression::Command(_) => write!(f, "Command{{ {}..{} }}", span.start(), span.end()), RawExpression::ExternalWord => { @@ -159,97 +159,97 @@ impl std::fmt::Display for Expression { } impl Expression { - pub(crate) fn number(i: impl Into, tag: impl Into) -> Expression { - RawExpression::Literal(Literal::Number(i.into())).tagged(tag.into()) + pub(crate) fn number(i: impl Into, span: impl Into) -> Expression { + RawExpression::Literal(Literal::Number(i.into())).spanned(span.into()) } pub(crate) fn size( i: impl Into, unit: impl Into, - tag: impl Into, + span: impl Into, ) -> Expression { - RawExpression::Literal(Literal::Size(i.into(), unit.into())).tagged(tag.into()) + RawExpression::Literal(Literal::Size(i.into(), unit.into())).spanned(span.into()) } pub(crate) fn synthetic_string(s: impl Into) -> Expression { - RawExpression::Synthetic(Synthetic::String(s.into())).tagged_unknown() + RawExpression::Synthetic(Synthetic::String(s.into())).spanned_unknown() } - pub(crate) fn string(inner: impl Into, outer: impl Into) -> Expression { - RawExpression::Literal(Literal::String(inner.into())).tagged(outer.into()) + pub(crate) fn string(inner: impl Into, outer: impl Into) -> Expression { + RawExpression::Literal(Literal::String(inner.into())).spanned(outer.into()) } pub(crate) fn path( head: Expression, - tail: Vec>>, - tag: impl Into, + tail: Vec>>, + span: impl Into, ) -> Expression { let tail = tail.into_iter().map(|t| t.map(|s| s.into())).collect(); - RawExpression::Path(Box::new(Path::new(head, tail))).tagged(tag.into()) + RawExpression::Path(Box::new(Path::new(head, tail))).spanned(span.into()) } - pub(crate) fn dot_member(head: Expression, next: Tagged>) -> Expression { - let Tagged { item, tag } = head; - let new_tag = head.tag.until(next.tag); + pub(crate) fn dot_member(head: Expression, next: Spanned>) -> Expression { + let Spanned { item, span } = head; + let new_span = head.span.until(next.span); match item { RawExpression::Path(path) => { let (head, mut tail) = path.parts(); tail.push(next.map(|i| i.into())); - Expression::path(head, tail, new_tag) + Expression::path(head, tail, new_span) } - other => Expression::path(other.tagged(tag), vec![next], new_tag), + other => Expression::path(other.spanned(span), vec![next], new_span), } } pub(crate) fn infix( left: Expression, - op: Tagged>, + op: Spanned>, right: Expression, ) -> Expression { - let new_tag = left.tag.until(right.tag); + let new_span = left.span.until(right.span); RawExpression::Binary(Box::new(Binary::new(left, op.map(|o| o.into()), right))) - .tagged(new_tag) + .spanned(new_span) } - pub(crate) fn file_path(path: impl Into, outer: impl Into) -> Expression { - RawExpression::FilePath(path.into()).tagged(outer) + pub(crate) fn file_path(path: impl Into, outer: impl Into) -> Expression { + RawExpression::FilePath(path.into()).spanned(outer) } - pub(crate) fn list(list: Vec, tag: impl Into) -> Expression { - RawExpression::List(list).tagged(tag) + pub(crate) fn list(list: Vec, span: impl Into) -> Expression { + RawExpression::List(list).spanned(span) } - pub(crate) fn bare(tag: impl Into) -> Expression { - RawExpression::Literal(Literal::Bare).tagged(tag) + pub(crate) fn bare(span: impl Into) -> Expression { + RawExpression::Literal(Literal::Bare).spanned(span) } - pub(crate) fn pattern(tag: impl Into) -> Expression { - RawExpression::Literal(Literal::GlobPattern).tagged(tag.into()) + pub(crate) fn pattern(span: impl Into) -> Expression { + RawExpression::Literal(Literal::GlobPattern).spanned(span.into()) } - pub(crate) fn variable(inner: impl Into, outer: impl Into) -> Expression { - RawExpression::Variable(Variable::Other(inner.into())).tagged(outer) + pub(crate) fn variable(inner: impl Into, outer: impl Into) -> Expression { + RawExpression::Variable(Variable::Other(inner.into())).spanned(outer) } - pub(crate) fn external_command(inner: impl Into, outer: impl Into) -> Expression { - RawExpression::ExternalCommand(ExternalCommand::new(inner.into())).tagged(outer) + pub(crate) fn external_command(inner: impl Into, outer: impl Into) -> Expression { + RawExpression::ExternalCommand(ExternalCommand::new(inner.into())).spanned(outer) } - pub(crate) fn it_variable(inner: impl Into, outer: impl Into) -> Expression { - RawExpression::Variable(Variable::It(inner.into())).tagged(outer) + pub(crate) fn it_variable(inner: impl Into, outer: impl Into) -> Expression { + RawExpression::Variable(Variable::It(inner.into())).spanned(outer) } } impl ToDebug for Expression { fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { - match self.item() { - RawExpression::Literal(l) => l.tagged(self.tag()).fmt_debug(f, source), + match &self.item { + RawExpression::Literal(l) => l.spanned(self.span).fmt_debug(f, source), RawExpression::FilePath(p) => write!(f, "{}", p.display()), - RawExpression::ExternalWord => write!(f, "{}", self.tag().slice(source)), + RawExpression::ExternalWord => write!(f, "{}", self.span.slice(source)), RawExpression::Command(tag) => write!(f, "{}", tag.slice(source)), RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{:?}", s), RawExpression::Variable(Variable::It(_)) => write!(f, "$it"), @@ -281,8 +281,8 @@ impl ToDebug for Expression { } } -impl From> for Expression { - fn from(path: Tagged) -> Expression { +impl From> for Expression { + fn from(path: Spanned) -> Expression { path.map(|p| RawExpression::Path(Box::new(p))) } } @@ -296,14 +296,14 @@ impl From> for Expression { pub enum Literal { Number(Number), Size(Number, Unit), - String(Tag), + String(Span), GlobPattern, Bare, } impl std::fmt::Display for Tagged { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", Tagged::new(self.tag, &self.item)) + write!(f, "{}", Tagged::new(self.tag.clone(), &self.item)) } } @@ -321,14 +321,14 @@ impl std::fmt::Display for Tagged<&Literal> { } } -impl ToDebug for Tagged<&Literal> { +impl ToDebug for Spanned<&Literal> { fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { - match self.item() { - Literal::Number(number) => write!(f, "{:?}", *number), + match self.item { + Literal::Number(number) => write!(f, "{:?}", number), Literal::Size(number, unit) => write!(f, "{:?}{:?}", *number, unit), Literal::String(tag) => write!(f, "{}", tag.slice(source)), - Literal::GlobPattern => write!(f, "{}", self.tag().slice(source)), - Literal::Bare => write!(f, "{}", self.tag().slice(source)), + Literal::GlobPattern => write!(f, "{}", self.span.slice(source)), + Literal::Bare => write!(f, "{}", self.span.slice(source)), } } } @@ -347,15 +347,15 @@ impl Literal { #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum Variable { - It(Tag), - Other(Tag), + It(Span), + Other(Span), } impl std::fmt::Display for Variable { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Variable::It(_) => write!(f, "$it"), - Variable::Other(tag) => write!(f, "${{ {}..{} }}", tag.span.start(), tag.span.end()), + Variable::Other(span) => write!(f, "${{ {}..{} }}", span.start(), span.end()), } } } diff --git a/src/parser/hir/baseline_parse/tests.rs b/src/parser/hir/baseline_parse/tests.rs index badb177513..d3b9248496 100644 --- a/src/parser/hir/baseline_parse/tests.rs +++ b/src/parser/hir/baseline_parse/tests.rs @@ -6,15 +6,14 @@ use crate::parser::hir::syntax_shape::*; use crate::parser::hir::TokensIterator; use crate::parser::parse::token_tree_builder::{CurriedToken, TokenTreeBuilder as b}; use crate::parser::TokenNode; -use crate::{Span, Tag, Tagged, TaggedItem, Text}; +use crate::{Span, SpannedItem, Tag, Tagged, Text}; use pretty_assertions::assert_eq; use std::fmt::Debug; -use uuid::Uuid; #[test] fn test_parse_string() { parse_tokens(StringShape, vec![b::string("hello")], |tokens| { - hir::Expression::string(inner_string_tag(tokens[0].tag()), tokens[0].tag()) + hir::Expression::string(inner_string_span(tokens[0].span()), tokens[0].span()) }); } @@ -28,7 +27,7 @@ fn test_parse_path() { let bare = tokens[2].expect_bare(); hir::Expression::path( hir::Expression::it_variable(inner_var, outer_var), - vec!["cpu".tagged(bare)], + vec!["cpu".spanned(bare)], outer_var.until(bare), ) }, @@ -50,7 +49,7 @@ fn test_parse_path() { hir::Expression::path( hir::Expression::variable(inner_var, outer_var), - vec!["amount".tagged(amount), "max ghz".tagged(outer_max_ghz)], + vec!["amount".spanned(amount), "max ghz".spanned(outer_max_ghz)], outer_var.until(outer_max_ghz), ) }, @@ -64,13 +63,16 @@ fn test_parse_command() { vec![b::bare("ls"), b::sp(), b::pattern("*.txt")], |tokens| { let bare = tokens[0].expect_bare(); - let pat = tokens[2].tag(); + let pat = tokens[2].span(); ClassifiedCommand::Internal(InternalCommand::new( "ls".to_string(), - bare, + Tag { + span: bare, + anchor: None, + }, hir::Call { - head: Box::new(hir::RawExpression::Command(bare).tagged(bare)), + head: Box::new(hir::RawExpression::Command(bare).spanned(bare)), positional: Some(vec![hir::Expression::pattern(pat)]), named: None, }, @@ -99,7 +101,7 @@ fn test_parse_command() { hir::Expression::path( hir::Expression::variable(inner_var, outer_var), - vec!["amount".tagged(amount), "max ghz".tagged(outer_max_ghz)], + vec!["amount".spanned(amount), "max ghz".spanned(outer_max_ghz)], outer_var.until(outer_max_ghz), ) }, @@ -112,11 +114,11 @@ fn parse_tokens( expected: impl FnOnce(Tagged<&[TokenNode]>) -> T, ) { let tokens = b::token_list(tokens); - let (tokens, source) = b::build(test_origin(), tokens); + let (tokens, source) = b::build(tokens); ExpandContext::with_empty(&Text::from(source), |context| { let tokens = tokens.expect_list(); - let mut iterator = TokensIterator::all(tokens.item, *context.tag()); + let mut iterator = TokensIterator::all(tokens.item, *context.span()); let expr = expand_syntax(&shape, &mut iterator, &context); @@ -132,13 +134,6 @@ fn parse_tokens( }) } -fn test_origin() -> Uuid { - Uuid::nil() -} - -fn inner_string_tag(tag: Tag) -> Tag { - Tag { - span: Span::new(tag.span.start() + 1, tag.span.end() - 1), - anchor: tag.anchor, - } +fn inner_string_span(span: Span) -> Span { + Span::new(span.start() + 1, span.end() - 1) } diff --git a/src/parser/hir/binary.rs b/src/parser/hir/binary.rs index a44c41d63a..67c597cb86 100644 --- a/src/parser/hir/binary.rs +++ b/src/parser/hir/binary.rs @@ -1,6 +1,6 @@ use crate::parser::{hir::Expression, Operator}; use crate::prelude::*; -use crate::Tagged; + use derive_new::new; use getset::Getters; use serde::{Deserialize, Serialize}; @@ -12,7 +12,7 @@ use std::fmt; #[get = "pub(crate)"] pub struct Binary { left: Expression, - op: Tagged, + op: Spanned, right: Expression, } diff --git a/src/parser/hir/expand_external_tokens.rs b/src/parser/hir/expand_external_tokens.rs index 238cb4b01b..af966945bd 100644 --- a/src/parser/hir/expand_external_tokens.rs +++ b/src/parser/hir/expand_external_tokens.rs @@ -6,17 +6,17 @@ use crate::parser::{ }, FlatShape, TokenNode, TokensIterator, }; -use crate::{Tag, Tagged, Text}; +use crate::{Span, Spanned, Text}; pub fn expand_external_tokens( token_nodes: &mut TokensIterator<'_>, source: &Text, -) -> Result>, ShellError> { - let mut out: Vec> = vec![]; +) -> Result>, ShellError> { + let mut out: Vec> = vec![]; loop { - if let Some(tag) = expand_next_expression(token_nodes)? { - out.push(tag.tagged_string(source)); + if let Some(span) = expand_next_expression(token_nodes)? { + out.push(span.spanned_string(source)); } else { break; } @@ -37,7 +37,7 @@ impl ColorSyntax for ExternalTokensShape { _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Self::Info { loop { // Allow a space @@ -55,7 +55,7 @@ impl ColorSyntax for ExternalTokensShape { pub fn expand_next_expression( token_nodes: &mut TokensIterator<'_>, -) -> Result, ShellError> { +) -> Result, ShellError> { let first = token_nodes.next_non_ws(); let first = match first { @@ -79,14 +79,14 @@ pub fn expand_next_expression( Ok(Some(first.until(last))) } -fn triage_external_head(node: &TokenNode) -> Result { +fn triage_external_head(node: &TokenNode) -> Result { Ok(match node { - TokenNode::Token(token) => token.tag(), + TokenNode::Token(token) => token.span, TokenNode::Call(_call) => unimplemented!("TODO: OMG"), TokenNode::Nodes(_nodes) => unimplemented!("TODO: OMG"), TokenNode::Delimited(_delimited) => unimplemented!("TODO: OMG"), TokenNode::Pipeline(_pipeline) => unimplemented!("TODO: OMG"), - TokenNode::Flag(flag) => flag.tag(), + TokenNode::Flag(flag) => flag.span, TokenNode::Whitespace(_whitespace) => { unreachable!("This function should be called after next_non_ws()") } @@ -96,7 +96,7 @@ fn triage_external_head(node: &TokenNode) -> Result { fn triage_continuation<'a, 'b>( nodes: &'a mut TokensIterator<'b>, -) -> Result, ShellError> { +) -> Result, ShellError> { let mut peeked = nodes.peek_any(); let node = match peeked.node { @@ -116,7 +116,7 @@ fn triage_continuation<'a, 'b>( } peeked.commit(); - Ok(Some(node.tag())) + Ok(Some(node.span())) } #[must_use] @@ -137,7 +137,7 @@ impl ColorSyntax for ExternalExpression { _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> ExternalExpressionResult { let atom = match expand_atom( token_nodes, @@ -146,7 +146,7 @@ impl ColorSyntax for ExternalExpression { ExpansionRule::permissive(), ) { Err(_) => unreachable!("TODO: separate infallible expand_atom"), - Ok(Tagged { + Ok(Spanned { item: AtomicToken::Eof { .. }, .. }) => return ExternalExpressionResult::Eof, diff --git a/src/parser/hir/external_command.rs b/src/parser/hir/external_command.rs index 2dd42c1312..df71328cab 100644 --- a/src/parser/hir/external_command.rs +++ b/src/parser/hir/external_command.rs @@ -9,7 +9,7 @@ use std::fmt; )] #[get = "pub(crate)"] pub struct ExternalCommand { - pub(crate) name: Tag, + pub(crate) name: Span, } impl ToDebug for ExternalCommand { diff --git a/src/parser/hir/named.rs b/src/parser/hir/named.rs index 838f643be5..f7387e4fd4 100644 --- a/src/parser/hir/named.rs +++ b/src/parser/hir/named.rs @@ -43,9 +43,13 @@ impl NamedArguments { match switch { None => self.named.insert(name.into(), NamedValue::AbsentSwitch), - Some(flag) => self - .named - .insert(name, NamedValue::PresentSwitch(*flag.name())), + Some(flag) => self.named.insert( + name, + NamedValue::PresentSwitch(Tag { + span: *flag.name(), + anchor: None, + }), + ), }; } diff --git a/src/parser/hir/path.rs b/src/parser/hir/path.rs index a1925102fb..5867132986 100644 --- a/src/parser/hir/path.rs +++ b/src/parser/hir/path.rs @@ -1,6 +1,5 @@ use crate::parser::hir::Expression; use crate::prelude::*; -use crate::Tagged; use derive_new::new; use getset::{Getters, MutGetters}; use serde::{Deserialize, Serialize}; @@ -24,7 +23,7 @@ use std::fmt; pub struct Path { head: Expression, #[get_mut = "pub(crate)"] - tail: Vec>, + tail: Vec>, } impl fmt::Display for Path { @@ -40,7 +39,7 @@ impl fmt::Display for Path { } impl Path { - pub(crate) fn parts(self) -> (Expression, Vec>) { + pub(crate) fn parts(self) -> (Expression, Vec>) { (self.head, self.tail) } } @@ -50,7 +49,7 @@ impl ToDebug for Path { write!(f, "{}", self.head.debug(source))?; for part in &self.tail { - write!(f, ".{}", part.item())?; + write!(f, ".{}", part.item)?; } Ok(()) diff --git a/src/parser/hir/syntax_shape.rs b/src/parser/hir/syntax_shape.rs index 1a140d86bd..8accfbde2b 100644 --- a/src/parser/hir/syntax_shape.rs +++ b/src/parser/hir/syntax_shape.rs @@ -64,7 +64,7 @@ impl FallibleColorSyntax for SyntaxShape { _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result<(), ShellError> { match self { SyntaxShape::Any => { @@ -158,7 +158,7 @@ pub struct ExpandContext<'context> { #[get = "pub(crate)"] registry: &'context CommandRegistry, #[get = "pub(crate)"] - tag: Tag, + span: Span, #[get = "pub(crate)"] source: &'context Text, homedir: Option, @@ -179,7 +179,7 @@ impl<'context> ExpandContext<'context> { callback(ExpandContext { registry: ®istry, - tag: Tag::unknown(), + span: Span::unknown(), source, homedir: None, }) @@ -211,7 +211,7 @@ pub trait FallibleColorSyntax: std::fmt::Debug + Copy { input: &Self::Input, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result; } @@ -224,7 +224,7 @@ pub trait ColorSyntax: std::fmt::Debug + Copy { input: &Self::Input, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Self::Info; } @@ -240,7 +240,7 @@ pub trait ColorSyntax: std::fmt::Debug + Copy { // input: &Self::Input, // token_nodes: &'b mut TokensIterator<'a>, // context: &ExpandContext, -// shapes: &mut Vec>, +// shapes: &mut Vec>, // ) -> Result { // FallibleColorSyntax::color_syntax(self, input, token_nodes, context, shapes) // } @@ -282,7 +282,7 @@ pub fn color_syntax<'a, 'b, T: ColorSyntax, U>( shape: &T, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> ((), U) { trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); @@ -310,7 +310,7 @@ pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result { trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); @@ -344,7 +344,7 @@ pub fn color_syntax_with<'a, 'b, T: ColorSyntax, U, I>( input: &I, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> ((), U) { trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); @@ -373,7 +373,7 @@ pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result { trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); @@ -446,15 +446,15 @@ pub trait SkipSyntax: std::fmt::Debug + Copy { enum BarePathState { Initial, - Seen(Tag, Tag), + Seen(Span, Span), Error(ShellError), } impl BarePathState { - pub fn seen(self, tag: Tag) -> BarePathState { + pub fn seen(self, span: Span) -> BarePathState { match self { - BarePathState::Initial => BarePathState::Seen(tag, tag), - BarePathState::Seen(start, _) => BarePathState::Seen(start, tag), + BarePathState::Initial => BarePathState::Seen(span, span), + BarePathState::Seen(start, _) => BarePathState::Seen(start, span), BarePathState::Error(err) => BarePathState::Error(err), } } @@ -467,7 +467,7 @@ impl BarePathState { } } - pub fn into_bare(self) -> Result { + pub fn into_bare(self) -> Result { match self { BarePathState::Initial => unreachable!("into_bare in initial state"), BarePathState::Seen(start, end) => Ok(start.until(end)), @@ -480,7 +480,7 @@ pub fn expand_bare<'a, 'b>( token_nodes: &'b mut TokensIterator<'a>, _context: &ExpandContext, predicate: impl Fn(&TokenNode) -> bool, -) -> Result { +) -> Result { let mut state = BarePathState::Initial; loop { @@ -494,7 +494,7 @@ pub fn expand_bare<'a, 'b>( } Some(node) => { if predicate(node) { - state = state.seen(node.tag()); + state = state.seen(node.span()); peeked.commit(); } else { state = state.end(peeked, "word"); @@ -511,19 +511,19 @@ pub fn expand_bare<'a, 'b>( pub struct BarePathShape; impl ExpandSyntax for BarePathShape { - type Output = Tag; + type Output = Span; fn expand_syntax<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - ) -> Result { + ) -> Result { expand_bare(token_nodes, context, |token| match token { - TokenNode::Token(Tagged { + TokenNode::Token(Spanned { item: RawToken::Bare, .. }) - | TokenNode::Token(Tagged { + | TokenNode::Token(Spanned { item: RawToken::Operator(Operator::Dot), .. }) => true, @@ -545,15 +545,15 @@ impl FallibleColorSyntax for BareShape { input: &FlatShape, token_nodes: &'b mut TokensIterator<'a>, _context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result<(), ShellError> { token_nodes.peek_any_token(|token| match token { // If it's a bare token, color it - TokenNode::Token(Tagged { + TokenNode::Token(Spanned { item: RawToken::Bare, - tag, + span, }) => { - shapes.push((*input).tagged(tag)); + shapes.push((*input).spanned(*span)); Ok(()) } @@ -564,7 +564,7 @@ impl FallibleColorSyntax for BareShape { } impl ExpandSyntax for BareShape { - type Output = Tagged; + type Output = Spanned; fn expand_syntax<'a, 'b>( &self, @@ -574,12 +574,12 @@ impl ExpandSyntax for BareShape { let peeked = token_nodes.peek_any().not_eof("word")?; match peeked.node { - TokenNode::Token(Tagged { + TokenNode::Token(Spanned { item: RawToken::Bare, - tag, + span, }) => { peeked.commit(); - Ok(tag.tagged_string(context.source)) + Ok(span.spanned_string(context.source)) } other => Err(ShellError::type_error("word", other.tagged_type_name())), @@ -608,9 +608,9 @@ impl TestSyntax for BareShape { #[derive(Debug)] pub enum CommandSignature { - Internal(Tagged>), - LiteralExternal { outer: Tag, inner: Tag }, - External(Tag), + Internal(Spanned>), + LiteralExternal { outer: Span, inner: Span }, + External(Span), Expression(hir::Expression), } @@ -618,14 +618,15 @@ impl CommandSignature { pub fn to_expression(&self) -> hir::Expression { match self { CommandSignature::Internal(command) => { - let tag = command.tag; - hir::RawExpression::Command(tag).tagged(tag) + let span = command.span; + hir::RawExpression::Command(span).spanned(span) } CommandSignature::LiteralExternal { outer, inner } => { - hir::RawExpression::ExternalCommand(hir::ExternalCommand::new(*inner)).tagged(outer) + hir::RawExpression::ExternalCommand(hir::ExternalCommand::new(*inner)) + .spanned(*outer) } - CommandSignature::External(tag) => { - hir::RawExpression::ExternalCommand(hir::ExternalCommand::new(*tag)).tagged(tag) + CommandSignature::External(span) => { + hir::RawExpression::ExternalCommand(hir::ExternalCommand::new(*span)).spanned(*span) } CommandSignature::Expression(expr) => expr.clone(), } @@ -645,7 +646,7 @@ impl FallibleColorSyntax for PipelineShape { _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result<(), ShellError> { // Make sure we're looking at a pipeline let Pipeline { parts, .. } = token_nodes.peek_any_token(|node| node.as_pipeline())?; @@ -654,11 +655,11 @@ impl FallibleColorSyntax for PipelineShape { for part in parts { // If the pipeline part has a prefix `|`, emit a pipe to color if let Some(pipe) = part.pipe { - shapes.push(FlatShape::Pipe.tagged(pipe)); + shapes.push(FlatShape::Pipe.spanned(pipe)); } // Create a new iterator containing the tokens in the pipeline part to color - let mut token_nodes = TokensIterator::new(&part.tokens.item, part.tag, false); + let mut token_nodes = TokensIterator::new(&part.tokens.item, part.span, false); color_syntax(&MaybeSpaceShape, &mut token_nodes, context, shapes); color_syntax(&CommandShape, &mut token_nodes, context, shapes); @@ -685,7 +686,7 @@ impl ExpandSyntax for PipelineShape { let commands: Result, ShellError> = parts .iter() - .map(|item| classify_command(&item, context, &source)) + .map(|item| classify_command(item, context, &source)) .collect(); Ok(ClassifiedPipeline { @@ -711,7 +712,7 @@ impl FallibleColorSyntax for CommandHeadShape { _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result { // If we don't ultimately find a token, roll back token_nodes.atomic(|token_nodes| { @@ -726,7 +727,7 @@ impl FallibleColorSyntax for CommandHeadShape { match atom.item { // If the head is an explicit external command (^cmd), color it as an external command AtomicToken::ExternalCommand { command } => { - shapes.push(FlatShape::ExternalCommand.tagged(command)); + shapes.push(FlatShape::ExternalCommand.spanned(command)); Ok(CommandHeadKind::External) } @@ -736,19 +737,19 @@ impl FallibleColorSyntax for CommandHeadShape { if context.registry.has(name) { // If the registry has the command, color it as an internal command - shapes.push(FlatShape::InternalCommand.tagged(text)); + shapes.push(FlatShape::InternalCommand.spanned(text)); let command = context.registry.expect_command(name); Ok(CommandHeadKind::Internal(command.signature())) } else { // Otherwise, color it as an external command - shapes.push(FlatShape::ExternalCommand.tagged(text)); + shapes.push(FlatShape::ExternalCommand.spanned(text)); Ok(CommandHeadKind::External) } } // Otherwise, we're not actually looking at a command _ => Err(ShellError::syntax_error( - "No command at the head".tagged(atom.tag), + "No command at the head".tagged(atom.span), )), } }) @@ -764,25 +765,25 @@ impl ExpandSyntax for CommandHeadShape { context: &ExpandContext, ) -> Result { let node = - parse_single_node_skipping_ws(token_nodes, "command head1", |token, token_tag, _| { + parse_single_node_skipping_ws(token_nodes, "command head1", |token, token_span, _| { Ok(match token { - RawToken::ExternalCommand(tag) => CommandSignature::LiteralExternal { - outer: token_tag, - inner: tag, + RawToken::ExternalCommand(span) => CommandSignature::LiteralExternal { + outer: token_span, + inner: span, }, RawToken::Bare => { - let name = token_tag.slice(context.source); + let name = token_span.slice(context.source); if context.registry.has(name) { let command = context.registry.expect_command(name); - CommandSignature::Internal(command.tagged(token_tag)) + CommandSignature::Internal(command.spanned(token_span)) } else { - CommandSignature::External(token_tag) + CommandSignature::External(token_span) } } _ => { return Err(ShellError::type_error( "command head2", - token.type_name().tagged(token_tag), + token.type_name().tagged(token_span), )) } }) @@ -813,7 +814,7 @@ impl ExpandSyntax for ClassifiedCommandShape { match &head { CommandSignature::Expression(expr) => Err(ShellError::syntax_error( - "Unexpected expression in command position".tagged(expr.tag), + "Unexpected expression in command position".tagged(expr.span), )), // If the command starts with `^`, treat it as an external command no matter what @@ -831,7 +832,7 @@ impl ExpandSyntax for ClassifiedCommandShape { CommandSignature::Internal(command) => { let tail = - parse_command_tail(&command.signature(), &context, iterator, command.tag)?; + parse_command_tail(&command.signature(), &context, iterator, command.span)?; let (positional, named) = match tail { None => (None, None), @@ -846,7 +847,10 @@ impl ExpandSyntax for ClassifiedCommandShape { Ok(ClassifiedCommand::Internal(InternalCommand::new( command.item.name().to_string(), - command.tag, + Tag { + span: command.span, + anchor: None, + }, call, ))) } @@ -866,7 +870,7 @@ impl FallibleColorSyntax for InternalCommandHeadShape { _input: &(), token_nodes: &'b mut TokensIterator<'a>, _context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result<(), ShellError> { let peeked_head = token_nodes.peek_non_ws().not_eof("command head4"); @@ -876,17 +880,17 @@ impl FallibleColorSyntax for InternalCommandHeadShape { }; let _expr = match peeked_head.node { - TokenNode::Token(Tagged { + TokenNode::Token(Spanned { item: RawToken::Bare, - tag, - }) => shapes.push(FlatShape::Word.tagged(tag)), + span, + }) => shapes.push(FlatShape::Word.spanned(*span)), - TokenNode::Token(Tagged { + TokenNode::Token(Spanned { item: RawToken::String(_inner_tag), - tag, - }) => shapes.push(FlatShape::String.tagged(tag)), + span, + }) => shapes.push(FlatShape::String.spanned(*span)), - _node => shapes.push(FlatShape::Error.tagged(peeked_head.node.tag())), + _node => shapes.push(FlatShape::Error.spanned(peeked_head.node.span())), }; peeked_head.commit(); @@ -905,16 +909,16 @@ impl ExpandExpression for InternalCommandHeadShape { let expr = match peeked_head.node { TokenNode::Token( - spanned @ Tagged { + spanned @ Spanned { item: RawToken::Bare, .. }, ) => spanned.map(|_| hir::RawExpression::Literal(hir::Literal::Bare)), - TokenNode::Token(Tagged { - item: RawToken::String(inner_tag), - tag, - }) => hir::RawExpression::Literal(hir::Literal::String(*inner_tag)).tagged(*tag), + TokenNode::Token(Spanned { + item: RawToken::String(inner_span), + span, + }) => hir::RawExpression::Literal(hir::Literal::String(*inner_span)).spanned(*span), node => { return Err(ShellError::type_error( @@ -932,24 +936,24 @@ impl ExpandExpression for InternalCommandHeadShape { pub(crate) struct SingleError<'token> { expected: &'static str, - node: &'token Tagged, + node: &'token Spanned, } impl<'token> SingleError<'token> { pub(crate) fn error(&self) -> ShellError { - ShellError::type_error(self.expected, self.node.type_name().tagged(self.node.tag)) + ShellError::type_error(self.expected, self.node.type_name().tagged(self.node.span)) } } fn parse_single_node<'a, 'b, T>( token_nodes: &'b mut TokensIterator<'a>, expected: &'static str, - callback: impl FnOnce(RawToken, Tag, SingleError) -> Result, + callback: impl FnOnce(RawToken, Span, SingleError) -> Result, ) -> Result { token_nodes.peek_any_token(|node| match node { TokenNode::Token(token) => callback( token.item, - token.tag(), + token.span, SingleError { expected, node: token, @@ -963,14 +967,14 @@ fn parse_single_node<'a, 'b, T>( fn parse_single_node_skipping_ws<'a, 'b, T>( token_nodes: &'b mut TokensIterator<'a>, expected: &'static str, - callback: impl FnOnce(RawToken, Tag, SingleError) -> Result, + callback: impl FnOnce(RawToken, Span, SingleError) -> Result, ) -> Result { let peeked = token_nodes.peek_non_ws().not_eof(expected)?; let expr = match peeked.node { TokenNode::Token(token) => callback( token.item, - token.tag(), + token.span, SingleError { expected, node: token, @@ -997,7 +1001,7 @@ impl FallibleColorSyntax for WhitespaceShape { _input: &(), token_nodes: &'b mut TokensIterator<'a>, _context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result<(), ShellError> { let peeked = token_nodes.peek_any().not_eof("whitespace"); @@ -1007,7 +1011,7 @@ impl FallibleColorSyntax for WhitespaceShape { }; let _tag = match peeked.node { - TokenNode::Whitespace(tag) => shapes.push(FlatShape::Whitespace.tagged(tag)), + TokenNode::Whitespace(span) => shapes.push(FlatShape::Whitespace.spanned(*span)), _other => return Ok(()), }; @@ -1019,7 +1023,7 @@ impl FallibleColorSyntax for WhitespaceShape { } impl ExpandSyntax for WhitespaceShape { - type Output = Tag; + type Output = Span; fn expand_syntax<'a, 'b>( &self, @@ -1028,7 +1032,7 @@ impl ExpandSyntax for WhitespaceShape { ) -> Result { let peeked = token_nodes.peek_any().not_eof("whitespace")?; - let tag = match peeked.node { + let span = match peeked.node { TokenNode::Whitespace(tag) => *tag, other => { @@ -1041,7 +1045,7 @@ impl ExpandSyntax for WhitespaceShape { peeked.commit(); - Ok(tag) + Ok(span) } } @@ -1094,7 +1098,7 @@ impl ColorSyntax for MaybeSpaceShape { _input: &(), token_nodes: &'b mut TokensIterator<'a>, _context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Self::Info { let peeked = token_nodes.peek_any().not_eof("whitespace"); @@ -1103,9 +1107,9 @@ impl ColorSyntax for MaybeSpaceShape { Ok(peeked) => peeked, }; - if let TokenNode::Whitespace(tag) = peeked.node { + if let TokenNode::Whitespace(span) = peeked.node { peeked.commit(); - shapes.push(FlatShape::Whitespace.tagged(tag)); + shapes.push(FlatShape::Whitespace.spanned(*span)); } } } @@ -1122,14 +1126,14 @@ impl FallibleColorSyntax for SpaceShape { _input: &(), token_nodes: &'b mut TokensIterator<'a>, _context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result<(), ShellError> { let peeked = token_nodes.peek_any().not_eof("whitespace")?; match peeked.node { - TokenNode::Whitespace(tag) => { + TokenNode::Whitespace(span) => { peeked.commit(); - shapes.push(FlatShape::Whitespace.tagged(tag)); + shapes.push(FlatShape::Whitespace.spanned(*span)); Ok(()) } @@ -1168,26 +1172,26 @@ pub fn spaced(inner: T) -> SpacedExpression { SpacedExpression { inner } } -fn expand_variable(tag: Tag, token_tag: Tag, source: &Text) -> hir::Expression { - if tag.slice(source) == "it" { - hir::Expression::it_variable(tag, token_tag) +fn expand_variable(span: Span, token_span: Span, source: &Text) -> hir::Expression { + if span.slice(source) == "it" { + hir::Expression::it_variable(span, token_span) } else { - hir::Expression::variable(tag, token_tag) + hir::Expression::variable(span, token_span) } } fn classify_command( - command: &Tagged, + command: &Spanned, context: &ExpandContext, source: &Text, ) -> Result { - let mut iterator = TokensIterator::new(&command.tokens.item, command.tag, true); + let mut iterator = TokensIterator::new(&command.tokens.item, command.span, true); let head = CommandHeadShape.expand_syntax(&mut iterator, &context)?; match &head { CommandSignature::Expression(_) => Err(ShellError::syntax_error( - "Unexpected expression in command position".tagged(command.tag), + "Unexpected expression in command position".tagged(command.span), )), // If the command starts with `^`, treat it as an external command no matter what @@ -1205,7 +1209,7 @@ fn classify_command( CommandSignature::Internal(command) => { let tail = - parse_command_tail(&command.signature(), &context, &mut iterator, command.tag)?; + parse_command_tail(&command.signature(), &context, &mut iterator, command.span)?; let (positional, named) = match tail { None => (None, None), @@ -1220,7 +1224,10 @@ fn classify_command( Ok(ClassifiedCommand::Internal(InternalCommand::new( command.name().to_string(), - command.tag, + Tag { + span: command.span, + anchor: None, + }, call, ))) } @@ -1239,7 +1246,7 @@ impl ColorSyntax for CommandShape { _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) { let kind = color_fallible_syntax(&CommandHeadShape, token_nodes, context, shapes); diff --git a/src/parser/hir/syntax_shape/block.rs b/src/parser/hir/syntax_shape/block.rs index 806681691e..7518d8f946 100644 --- a/src/parser/hir/syntax_shape/block.rs +++ b/src/parser/hir/syntax_shape/block.rs @@ -11,7 +11,7 @@ use crate::parser::{ parse::token_tree::Delimiter, RawToken, TokenNode, }; -use crate::{Tag, Tagged, TaggedItem}; +use crate::{Span, Spanned, SpannedItem}; #[derive(Debug, Copy, Clone)] pub struct AnyBlockShape; @@ -25,7 +25,7 @@ impl FallibleColorSyntax for AnyBlockShape { _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result<(), ShellError> { let block = token_nodes.peek_non_ws().not_eof("block"); @@ -39,11 +39,11 @@ impl FallibleColorSyntax for AnyBlockShape { match block { // If so, color it as a block - Some((children, tags)) => { - let mut token_nodes = TokensIterator::new(children.item, context.tag, false); + Some((children, spans)) => { + let mut token_nodes = TokensIterator::new(children.item, context.span, false); color_syntax_with( &DelimitedShape, - &(Delimiter::Brace, tags.0, tags.1), + &(Delimiter::Brace, spans.0, spans.1), &mut token_nodes, context, shapes, @@ -72,11 +72,11 @@ impl ExpandExpression for AnyBlockShape { match block { Some((block, _tags)) => { - let mut iterator = TokensIterator::new(&block.item, context.tag, false); + let mut iterator = TokensIterator::new(&block.item, context.span, false); let exprs = expand_syntax(&ExpressionListShape, &mut iterator, context)?; - return Ok(hir::RawExpression::Block(exprs).tagged(block.tag)); + return Ok(hir::RawExpression::Block(exprs).spanned(block.span)); } _ => {} } @@ -97,7 +97,7 @@ impl FallibleColorSyntax for ShorthandBlock { _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result<(), ShellError> { // Try to find a shorthand head. If none found, fail color_fallible_syntax(&ShorthandPath, token_nodes, context, shapes)?; @@ -126,10 +126,10 @@ impl ExpandExpression for ShorthandBlock { context: &ExpandContext, ) -> Result { let path = expand_expr(&ShorthandPath, token_nodes, context)?; - let start = path.tag; + let start = path.span; let expr = continue_expression(path, token_nodes, context)?; - let end = expr.tag; - let block = hir::RawExpression::Block(vec![expr]).tagged(start.until(end)); + let end = expr.span; + let block = hir::RawExpression::Block(vec![expr]).spanned(start.until(end)); Ok(block) } @@ -148,7 +148,7 @@ impl FallibleColorSyntax for ShorthandPath { _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result<(), ShellError> { token_nodes.atomic(|token_nodes| { let variable = color_fallible_syntax(&VariablePathShape, token_nodes, context, shapes); @@ -232,29 +232,29 @@ impl FallibleColorSyntax for ShorthandHeadShape { _input: &(), token_nodes: &'b mut TokensIterator<'a>, _context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result<(), ShellError> { // A shorthand path must not be at EOF let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?; match peeked.node { // If the head of a shorthand path is a bare token, it expands to `$it.bare` - TokenNode::Token(Tagged { + TokenNode::Token(Spanned { item: RawToken::Bare, - tag, + span, }) => { peeked.commit(); - shapes.push(FlatShape::BareMember.tagged(tag)); + shapes.push(FlatShape::BareMember.spanned(*span)); Ok(()) } // If the head of a shorthand path is a string, it expands to `$it."some string"` - TokenNode::Token(Tagged { + TokenNode::Token(Spanned { item: RawToken::String(_), - tag: outer, + span: outer, }) => { peeked.commit(); - shapes.push(FlatShape::StringMember.tagged(outer)); + shapes.push(FlatShape::StringMember.spanned(*outer)); Ok(()) } @@ -277,40 +277,40 @@ impl ExpandExpression for ShorthandHeadShape { match peeked.node { // If the head of a shorthand path is a bare token, it expands to `$it.bare` - TokenNode::Token(Tagged { + TokenNode::Token(Spanned { item: RawToken::Bare, - tag, + span, }) => { // Commit the peeked token peeked.commit(); // Synthesize an `$it` expression - let it = synthetic_it(token_nodes.anchor()); + let it = synthetic_it(); // Make a path out of `$it` and the bare token as a member Ok(hir::Expression::path( it, - vec![tag.tagged_string(context.source)], - tag, + vec![span.spanned_string(context.source)], + *span, )) } // If the head of a shorthand path is a string, it expands to `$it."some string"` - TokenNode::Token(Tagged { + TokenNode::Token(Spanned { item: RawToken::String(inner), - tag: outer, + span: outer, }) => { // Commit the peeked token peeked.commit(); // Synthesize an `$it` expression - let it = synthetic_it(token_nodes.anchor()); + let it = synthetic_it(); // Make a path out of `$it` and the bare token as a member Ok(hir::Expression::path( it, - vec![inner.string(context.source).tagged(outer)], - outer, + vec![inner.string(context.source).spanned(*outer)], + *outer, )) } @@ -325,6 +325,6 @@ impl ExpandExpression for ShorthandHeadShape { } } -fn synthetic_it(origin: uuid::Uuid) -> hir::Expression { - hir::Expression::it_variable(Tag::unknown_span(origin), Tag::unknown_span(origin)) +fn synthetic_it() -> hir::Expression { + hir::Expression::it_variable(Span::unknown(), Span::unknown()) } diff --git a/src/parser/hir/syntax_shape/expression.rs b/src/parser/hir/syntax_shape/expression.rs index fc99c38dc3..0be63eaeb6 100644 --- a/src/parser/hir/syntax_shape/expression.rs +++ b/src/parser/hir/syntax_shape/expression.rs @@ -46,7 +46,7 @@ impl FallibleColorSyntax for AnyExpressionShape { _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result<(), ShellError> { // Look for an expression at the cursor color_fallible_syntax(&AnyExpressionStartShape, token_nodes, context, shapes)?; @@ -94,7 +94,7 @@ pub(crate) fn continue_expression( pub(crate) fn continue_coloring_expression( token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result<(), ShellError> { // if there's not even one expression continuation, fail color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes)?; @@ -131,20 +131,23 @@ impl ExpandExpression for AnyExpressionStartShape { return Ok(hir::Expression::size( number.to_number(context.source), unit.item, - atom.tag, + Tag { + span: atom.span, + anchor: None, + }, )) } AtomicToken::SquareDelimited { nodes, .. } => { - expand_delimited_square(&nodes, atom.tag, context) + expand_delimited_square(&nodes, atom.span.into(), context) } AtomicToken::Word { .. } | AtomicToken::Dot { .. } => { let end = expand_syntax(&BareTailShape, token_nodes, context)?; - Ok(hir::Expression::bare(atom.tag.until_option(end))) + Ok(hir::Expression::bare(atom.span.until_option(end))) } - other => return other.tagged(atom.tag).into_hir(context, "expression"), + other => return other.spanned(atom.span).into_hir(context, "expression"), } } } @@ -158,7 +161,7 @@ impl FallibleColorSyntax for AnyExpressionStartShape { _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result<(), ShellError> { let atom = token_nodes.spanned(|token_nodes| { expand_atom( @@ -170,15 +173,15 @@ impl FallibleColorSyntax for AnyExpressionStartShape { }); let atom = match atom { - Tagged { + Spanned { item: Err(_err), - tag, + span, } => { - shapes.push(FlatShape::Error.tagged(tag)); + shapes.push(FlatShape::Error.spanned(span)); return Ok(()); } - Tagged { + Spanned { item: Ok(value), .. } => value, }; @@ -186,18 +189,18 @@ impl FallibleColorSyntax for AnyExpressionStartShape { match atom.item { AtomicToken::Size { number, unit } => shapes.push( FlatShape::Size { - number: number.tag, - unit: unit.tag, + number: number.span.into(), + unit: unit.span.into(), } - .tagged(atom.tag), + .spanned(atom.span), ), - AtomicToken::SquareDelimited { nodes, tags } => { - color_delimited_square(tags, &nodes, atom.tag, context, shapes) + AtomicToken::SquareDelimited { nodes, spans } => { + color_delimited_square(spans, &nodes, atom.span.into(), context, shapes) } AtomicToken::Word { .. } | AtomicToken::Dot { .. } => { - shapes.push(FlatShape::Word.tagged(atom.tag)); + shapes.push(FlatShape::Word.spanned(atom.span)); } _ => atom.color_tokens(shapes), @@ -219,7 +222,7 @@ impl FallibleColorSyntax for BareTailShape { _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result<(), ShellError> { let len = shapes.len(); @@ -267,19 +270,19 @@ impl FallibleColorSyntax for BareTailShape { } impl ExpandSyntax for BareTailShape { - type Output = Option; + type Output = Option; fn expand_syntax<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - ) -> Result, ShellError> { - let mut end: Option = None; + ) -> Result, ShellError> { + let mut end: Option = None; loop { match expand_syntax(&BareShape, token_nodes, context) { Ok(bare) => { - end = Some(bare.tag); + end = Some(bare.span); continue; } diff --git a/src/parser/hir/syntax_shape/expression/atom.rs b/src/parser/hir/syntax_shape/expression/atom.rs index 83306da741..bb1b8065ec 100644 --- a/src/parser/hir/syntax_shape/expression/atom.rs +++ b/src/parser/hir/syntax_shape/expression/atom.rs @@ -9,82 +9,83 @@ use crate::parser::{ DelimitedNode, Delimiter, FlatShape, RawToken, TokenNode, Unit, }; use crate::prelude::*; +use crate::{Span, Spanned}; #[derive(Debug)] pub enum AtomicToken<'tokens> { Eof { - tag: Tag, + span: Span, }, Error { - error: Tagged, + error: Spanned, }, Number { number: RawNumber, }, Size { - number: Tagged, - unit: Tagged, + number: Spanned, + unit: Spanned, }, String { - body: Tag, + body: Span, }, ItVariable { - name: Tag, + name: Span, }, Variable { - name: Tag, + name: Span, }, ExternalCommand { - command: Tag, + command: Span, }, ExternalWord { - text: Tag, + text: Span, }, GlobPattern { - pattern: Tag, + pattern: Span, }, FilePath { - path: Tag, + path: Span, }, Word { - text: Tag, + text: Span, }, SquareDelimited { - tags: (Tag, Tag), + spans: (Span, Span), nodes: &'tokens Vec, }, ParenDelimited { - tags: (Tag, Tag), + span: (Span, Span), nodes: &'tokens Vec, }, BraceDelimited { - tags: (Tag, Tag), + spans: (Span, Span), nodes: &'tokens Vec, }, Pipeline { - pipe: Option, - elements: Tagged<&'tokens Vec>, + pipe: Option, + elements: Spanned<&'tokens Vec>, }, ShorthandFlag { - name: Tag, + name: Span, }, LonghandFlag { - name: Tag, + name: Span, }, Dot { - text: Tag, + text: Span, }, Operator { - text: Tag, + text: Span, }, Whitespace { - text: Tag, + text: Span, }, } -pub type TaggedAtomicToken<'tokens> = Tagged>; +pub type SpannedAtomicToken<'tokens> = Spanned>; -impl<'tokens> TaggedAtomicToken<'tokens> { +impl<'tokens> SpannedAtomicToken<'tokens> { pub fn into_hir( &self, context: &ExpandContext, @@ -94,55 +95,55 @@ impl<'tokens> TaggedAtomicToken<'tokens> { AtomicToken::Eof { .. } => { return Err(ShellError::type_error( expected, - "eof atomic token".tagged(self.tag), + "eof atomic token".tagged(self.span), )) } AtomicToken::Error { .. } => { return Err(ShellError::type_error( expected, - "eof atomic token".tagged(self.tag), + "eof atomic token".tagged(self.span), )) } AtomicToken::Operator { .. } => { return Err(ShellError::type_error( expected, - "operator".tagged(self.tag), + "operator".tagged(self.span), )) } AtomicToken::ShorthandFlag { .. } => { return Err(ShellError::type_error( expected, - "shorthand flag".tagged(self.tag), + "shorthand flag".tagged(self.span), )) } AtomicToken::LonghandFlag { .. } => { - return Err(ShellError::type_error(expected, "flag".tagged(self.tag))) + return Err(ShellError::type_error(expected, "flag".tagged(self.span))) } AtomicToken::Whitespace { .. } => { return Err(ShellError::unimplemented("whitespace in AtomicToken")) } AtomicToken::Dot { .. } => { - return Err(ShellError::type_error(expected, "dot".tagged(self.tag))) + return Err(ShellError::type_error(expected, "dot".tagged(self.span))) } AtomicToken::Number { number } => { - Expression::number(number.to_number(context.source), self.tag) + Expression::number(number.to_number(context.source), self.span) } AtomicToken::FilePath { path } => Expression::file_path( expand_file_path(path.slice(context.source), context), - self.tag, + self.span, ), AtomicToken::Size { number, unit } => { - Expression::size(number.to_number(context.source), **unit, self.tag) + Expression::size(number.to_number(context.source), **unit, self.span) } - AtomicToken::String { body } => Expression::string(body, self.tag), - AtomicToken::ItVariable { name } => Expression::it_variable(name, self.tag), - AtomicToken::Variable { name } => Expression::variable(name, self.tag), + AtomicToken::String { body } => Expression::string(*body, self.span), + AtomicToken::ItVariable { name } => Expression::it_variable(*name, self.span), + AtomicToken::Variable { name } => Expression::variable(*name, self.span), AtomicToken::ExternalCommand { command } => { - Expression::external_command(command, self.tag) + Expression::external_command(*command, self.span) } - AtomicToken::ExternalWord { text } => Expression::string(text, self.tag), - AtomicToken::GlobPattern { pattern } => Expression::pattern(pattern), - AtomicToken::Word { text } => Expression::string(text, text), + AtomicToken::ExternalWord { text } => Expression::string(*text, self.span), + AtomicToken::GlobPattern { pattern } => Expression::pattern(*pattern), + AtomicToken::Word { text } => Expression::string(*text, *text), AtomicToken::SquareDelimited { .. } => unimplemented!("into_hir"), AtomicToken::ParenDelimited { .. } => unimplemented!("into_hir"), AtomicToken::BraceDelimited { .. } => unimplemented!("into_hir"), @@ -150,6 +151,33 @@ impl<'tokens> TaggedAtomicToken<'tokens> { }) } + pub fn spanned_type_name(&self) -> Spanned<&'static str> { + match &self.item { + AtomicToken::Eof { .. } => "eof", + AtomicToken::Error { .. } => "error", + AtomicToken::Operator { .. } => "operator", + AtomicToken::ShorthandFlag { .. } => "shorthand flag", + AtomicToken::LonghandFlag { .. } => "flag", + AtomicToken::Whitespace { .. } => "whitespace", + AtomicToken::Dot { .. } => "dot", + AtomicToken::Number { .. } => "number", + AtomicToken::FilePath { .. } => "file path", + AtomicToken::Size { .. } => "size", + AtomicToken::String { .. } => "string", + AtomicToken::ItVariable { .. } => "$it", + AtomicToken::Variable { .. } => "variable", + AtomicToken::ExternalCommand { .. } => "external command", + AtomicToken::ExternalWord { .. } => "external word", + AtomicToken::GlobPattern { .. } => "file pattern", + AtomicToken::Word { .. } => "word", + AtomicToken::SquareDelimited { .. } => "array literal", + AtomicToken::ParenDelimited { .. } => "parenthesized expression", + AtomicToken::BraceDelimited { .. } => "block", + AtomicToken::Pipeline { .. } => "pipeline", + } + .spanned(self.span) + } + pub fn tagged_type_name(&self) -> Tagged<&'static str> { match &self.item { AtomicToken::Eof { .. } => "eof", @@ -174,64 +202,64 @@ impl<'tokens> TaggedAtomicToken<'tokens> { AtomicToken::BraceDelimited { .. } => "block", AtomicToken::Pipeline { .. } => "pipeline", } - .tagged(self.tag) + .tagged(self.span) } - pub(crate) fn color_tokens(&self, shapes: &mut Vec>) { + pub(crate) fn color_tokens(&self, shapes: &mut Vec>) { match &self.item { AtomicToken::Eof { .. } => {} - AtomicToken::Error { .. } => return shapes.push(FlatShape::Error.tagged(self.tag)), + AtomicToken::Error { .. } => return shapes.push(FlatShape::Error.spanned(self.span)), AtomicToken::Operator { .. } => { - return shapes.push(FlatShape::Operator.tagged(self.tag)); + return shapes.push(FlatShape::Operator.spanned(self.span)); } AtomicToken::ShorthandFlag { .. } => { - return shapes.push(FlatShape::ShorthandFlag.tagged(self.tag)); + return shapes.push(FlatShape::ShorthandFlag.spanned(self.span)); } AtomicToken::LonghandFlag { .. } => { - return shapes.push(FlatShape::Flag.tagged(self.tag)); + return shapes.push(FlatShape::Flag.spanned(self.span)); } AtomicToken::Whitespace { .. } => { - return shapes.push(FlatShape::Whitespace.tagged(self.tag)); + return shapes.push(FlatShape::Whitespace.spanned(self.span)); } - AtomicToken::FilePath { .. } => return shapes.push(FlatShape::Path.tagged(self.tag)), - AtomicToken::Dot { .. } => return shapes.push(FlatShape::Dot.tagged(self.tag)), + AtomicToken::FilePath { .. } => return shapes.push(FlatShape::Path.spanned(self.span)), + AtomicToken::Dot { .. } => return shapes.push(FlatShape::Dot.spanned(self.span)), AtomicToken::Number { number: RawNumber::Decimal(_), } => { - return shapes.push(FlatShape::Decimal.tagged(self.tag)); + return shapes.push(FlatShape::Decimal.spanned(self.span)); } AtomicToken::Number { number: RawNumber::Int(_), } => { - return shapes.push(FlatShape::Int.tagged(self.tag)); + return shapes.push(FlatShape::Int.spanned(self.span)); } AtomicToken::Size { number, unit } => { return shapes.push( FlatShape::Size { - number: number.tag, - unit: unit.tag, + number: number.span, + unit: unit.span, } - .tagged(self.tag), + .spanned(self.span), ); } - AtomicToken::String { .. } => return shapes.push(FlatShape::String.tagged(self.tag)), + AtomicToken::String { .. } => return shapes.push(FlatShape::String.spanned(self.span)), AtomicToken::ItVariable { .. } => { - return shapes.push(FlatShape::ItVariable.tagged(self.tag)) + return shapes.push(FlatShape::ItVariable.spanned(self.span)) } AtomicToken::Variable { .. } => { - return shapes.push(FlatShape::Variable.tagged(self.tag)) + return shapes.push(FlatShape::Variable.spanned(self.span)) } AtomicToken::ExternalCommand { .. } => { - return shapes.push(FlatShape::ExternalCommand.tagged(self.tag)); + return shapes.push(FlatShape::ExternalCommand.spanned(self.span)); } AtomicToken::ExternalWord { .. } => { - return shapes.push(FlatShape::ExternalWord.tagged(self.tag)) + return shapes.push(FlatShape::ExternalWord.spanned(self.span)) } AtomicToken::GlobPattern { .. } => { - return shapes.push(FlatShape::GlobPattern.tagged(self.tag)) + return shapes.push(FlatShape::GlobPattern.spanned(self.span)) } - AtomicToken::Word { .. } => return shapes.push(FlatShape::Word.tagged(self.tag)), - _ => return shapes.push(FlatShape::Error.tagged(self.tag)), + AtomicToken::Word { .. } => return shapes.push(FlatShape::Word.spanned(self.span)), + _ => return shapes.push(FlatShape::Error.spanned(self.span)), } } } @@ -350,14 +378,14 @@ pub fn expand_atom<'me, 'content>( expected: &'static str, context: &ExpandContext, rule: ExpansionRule, -) -> Result, ShellError> { +) -> Result, ShellError> { if token_nodes.at_end() { match rule.allow_eof { true => { return Ok(AtomicToken::Eof { - tag: Tag::unknown(), + span: Span::unknown(), } - .tagged_unknown()) + .spanned(Span::unknown())) } false => return Err(ShellError::unexpected_eof("anything", Tag::unknown())), } @@ -376,10 +404,10 @@ pub fn expand_atom<'me, 'content>( Err(_) => {} // But if it was a valid unit, we're done here - Ok(Tagged { + Ok(Spanned { item: (number, unit), - tag, - }) => return Ok(AtomicToken::Size { number, unit }.tagged(tag)), + span, + }) => return Ok(AtomicToken::Size { number, unit }.spanned(span)), }, } @@ -388,7 +416,7 @@ pub fn expand_atom<'me, 'content>( match expand_syntax(&BarePathShape, token_nodes, context) { // If we didn't find a bare path Err(_) => {} - Ok(tag) => { + Ok(span) => { let next = token_nodes.peek_any(); match next.node { @@ -397,7 +425,7 @@ pub fn expand_atom<'me, 'content>( // word, and we should try to parse it as a glob next } - _ => return Ok(AtomicToken::Word { text: tag }.tagged(tag)), + _ => return Ok(AtomicToken::Word { text: span }.spanned(span)), } } } @@ -407,7 +435,7 @@ pub fn expand_atom<'me, 'content>( match expand_syntax(&BarePatternShape, token_nodes, context) { // If we didn't find a bare path Err(_) => {} - Ok(tag) => return Ok(AtomicToken::GlobPattern { pattern: tag }.tagged(tag)), + Ok(span) => return Ok(AtomicToken::GlobPattern { pattern: span }.spanned(span)), } // The next token corresponds to at most one atomic token @@ -427,80 +455,84 @@ pub fn expand_atom<'me, 'content>( return Ok(AtomicToken::Error { error: error.clone(), } - .tagged(error.tag)); + .spanned(error.span)); } // [ ... ] - TokenNode::Delimited(Tagged { + TokenNode::Delimited(Spanned { item: DelimitedNode { delimiter: Delimiter::Square, - tags, + spans, children, }, - tag, + span, }) => { peeked.commit(); + let span = *span; return Ok(AtomicToken::SquareDelimited { nodes: children, - tags: *tags, + spans: *spans, } - .tagged(tag)); + .spanned(span)); } - TokenNode::Flag(Tagged { + TokenNode::Flag(Spanned { item: Flag { kind: FlagKind::Shorthand, name, }, - tag, + span, }) => { peeked.commit(); - return Ok(AtomicToken::ShorthandFlag { name: *name }.tagged(tag)); + return Ok(AtomicToken::ShorthandFlag { name: *name }.spanned(*span)); } - TokenNode::Flag(Tagged { + TokenNode::Flag(Spanned { item: Flag { kind: FlagKind::Longhand, name, }, - tag, + span, }) => { peeked.commit(); - return Ok(AtomicToken::ShorthandFlag { name: *name }.tagged(tag)); + return Ok(AtomicToken::ShorthandFlag { name: *name }.spanned(*span)); } // If we see whitespace, process the whitespace according to the whitespace // handling rules - TokenNode::Whitespace(tag) => match rule.whitespace { + TokenNode::Whitespace(span) => match rule.whitespace { // if whitespace is allowed, return a whitespace token WhitespaceHandling::AllowWhitespace => { peeked.commit(); - return Ok(AtomicToken::Whitespace { text: *tag }.tagged(tag)); + return Ok(AtomicToken::Whitespace { text: *span }.spanned(*span)); } // if whitespace is disallowed, return an error WhitespaceHandling::RejectWhitespace => { - return Err(ShellError::syntax_error( - "Unexpected whitespace".tagged(tag), - )) + return Err(ShellError::syntax_error("Unexpected whitespace".tagged( + Tag { + span: *span, + anchor: None, + }, + ))) } }, other => { - let tag = peeked.node.tag(); + let span = peeked.node.span(); peeked.commit(); return Ok(AtomicToken::Error { - error: ShellError::type_error("token", other.tagged_type_name()).tagged(tag), + error: ShellError::type_error("token", other.tagged_type_name()).spanned(span), } - .tagged(tag)); + .spanned(span)); } } - parse_single_node(token_nodes, expected, |token, token_tag, err| { + parse_single_node(token_nodes, expected, |token, token_span, err| { Ok(match token { // First, the error cases. Each error case corresponds to a expansion rule // flag that can be used to allow the case @@ -511,31 +543,38 @@ pub fn expand_atom<'me, 'content>( RawToken::ExternalCommand(_) if !rule.allow_external_command => { return Err(ShellError::type_error( expected, - token.type_name().tagged(token_tag), + token.type_name().tagged(Tag { + span: token_span, + anchor: None, + }), )) } // rule.allow_external_word RawToken::ExternalWord if !rule.allow_external_word => { - return Err(ShellError::invalid_external_word(token_tag)) + return Err(ShellError::invalid_external_word(Tag { + span: token_span, + anchor: None, + })) } - RawToken::Number(number) => AtomicToken::Number { number }.tagged(token_tag), - RawToken::Operator(_) => AtomicToken::Operator { text: token_tag }.tagged(token_tag), - RawToken::String(body) => AtomicToken::String { body }.tagged(token_tag), + RawToken::Number(number) => AtomicToken::Number { number }.spanned(token_span), + RawToken::Operator(_) => AtomicToken::Operator { text: token_span }.spanned(token_span), + RawToken::String(body) => AtomicToken::String { body }.spanned(token_span), RawToken::Variable(name) if name.slice(context.source) == "it" => { - AtomicToken::ItVariable { name }.tagged(token_tag) + AtomicToken::ItVariable { name }.spanned(token_span) } - RawToken::Variable(name) => AtomicToken::Variable { name }.tagged(token_tag), + RawToken::Variable(name) => AtomicToken::Variable { name }.spanned(token_span), RawToken::ExternalCommand(command) => { - AtomicToken::ExternalCommand { command }.tagged(token_tag) + AtomicToken::ExternalCommand { command }.spanned(token_span) } RawToken::ExternalWord => { - AtomicToken::ExternalWord { text: token_tag }.tagged(token_tag) + AtomicToken::ExternalWord { text: token_span }.spanned(token_span) } - RawToken::GlobPattern => { - AtomicToken::GlobPattern { pattern: token_tag }.tagged(token_tag) + RawToken::GlobPattern => AtomicToken::GlobPattern { + pattern: token_span, } - RawToken::Bare => AtomicToken::Word { text: token_tag }.tagged(token_tag), + .spanned(token_span), + RawToken::Bare => AtomicToken::Word { text: token_span }.spanned(token_span), }) }) } diff --git a/src/parser/hir/syntax_shape/expression/delimited.rs b/src/parser/hir/syntax_shape/expression/delimited.rs index 001e3812f4..b52340ab8f 100644 --- a/src/parser/hir/syntax_shape/expression/delimited.rs +++ b/src/parser/hir/syntax_shape/expression/delimited.rs @@ -6,27 +6,27 @@ use crate::prelude::*; pub fn expand_delimited_square( children: &Vec, - tag: Tag, + span: Span, context: &ExpandContext, ) -> Result { - let mut tokens = TokensIterator::new(&children, tag, false); + let mut tokens = TokensIterator::new(&children, span, false); let list = expand_syntax(&ExpressionListShape, &mut tokens, context); - Ok(hir::Expression::list(list?, tag)) + Ok(hir::Expression::list(list?, Tag { span, anchor: None })) } pub fn color_delimited_square( - (open, close): (Tag, Tag), + (open, close): (Span, Span), children: &Vec, - tag: Tag, + span: Span, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) { - shapes.push(FlatShape::OpenDelimiter(Delimiter::Square).tagged(open)); - let mut tokens = TokensIterator::new(&children, tag, false); + shapes.push(FlatShape::OpenDelimiter(Delimiter::Square).spanned(open)); + let mut tokens = TokensIterator::new(&children, span, false); let _list = color_syntax(&ExpressionListShape, &mut tokens, context, shapes); - shapes.push(FlatShape::CloseDelimiter(Delimiter::Square).tagged(close)); + shapes.push(FlatShape::CloseDelimiter(Delimiter::Square).spanned(close)); } #[derive(Debug, Copy, Clone)] @@ -34,16 +34,16 @@ pub struct DelimitedShape; impl ColorSyntax for DelimitedShape { type Info = (); - type Input = (Delimiter, Tag, Tag); + type Input = (Delimiter, Span, Span); fn color_syntax<'a, 'b>( &self, - (delimiter, open, close): &(Delimiter, Tag, Tag), + (delimiter, open, close): &(Delimiter, Span, Span), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Self::Info { - shapes.push(FlatShape::OpenDelimiter(*delimiter).tagged(open)); + shapes.push(FlatShape::OpenDelimiter(*delimiter).spanned(*open)); color_syntax(&ExpressionListShape, token_nodes, context, shapes); - shapes.push(FlatShape::CloseDelimiter(*delimiter).tagged(close)); + shapes.push(FlatShape::CloseDelimiter(*delimiter).spanned(*close)); } } diff --git a/src/parser/hir/syntax_shape/expression/file_path.rs b/src/parser/hir/syntax_shape/expression/file_path.rs index e73dc8d647..ccb2f8f54b 100644 --- a/src/parser/hir/syntax_shape/expression/file_path.rs +++ b/src/parser/hir/syntax_shape/expression/file_path.rs @@ -17,7 +17,7 @@ impl FallibleColorSyntax for FilePathShape { _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result<(), ShellError> { let atom = expand_atom( token_nodes, @@ -36,7 +36,7 @@ impl FallibleColorSyntax for FilePathShape { | AtomicToken::String { .. } | AtomicToken::Number { .. } | AtomicToken::Size { .. } => { - shapes.push(FlatShape::Path.tagged(atom.tag)); + shapes.push(FlatShape::Path.spanned(atom.span)); } _ => atom.color_tokens(shapes), @@ -57,12 +57,12 @@ impl ExpandExpression for FilePathShape { match atom.item { AtomicToken::Word { text: body } | AtomicToken::String { body } => { let path = expand_file_path(body.slice(context.source), context); - return Ok(hir::Expression::file_path(path, atom.tag)); + return Ok(hir::Expression::file_path(path, atom.span)); } AtomicToken::Number { .. } | AtomicToken::Size { .. } => { - let path = atom.tag.slice(context.source); - return Ok(hir::Expression::file_path(path, atom.tag)); + let path = atom.span.slice(context.source); + return Ok(hir::Expression::file_path(path, atom.span)); } _ => return atom.into_hir(context, "file path"), diff --git a/src/parser/hir/syntax_shape/expression/list.rs b/src/parser/hir/syntax_shape/expression/list.rs index 4109108a37..575ae9fcdd 100644 --- a/src/parser/hir/syntax_shape/expression/list.rs +++ b/src/parser/hir/syntax_shape/expression/list.rs @@ -9,7 +9,7 @@ use crate::parser::{ hir::TokensIterator, FlatShape, }; -use crate::Tagged; +use crate::Spanned; #[derive(Debug, Copy, Clone)] pub struct ExpressionListShape; @@ -60,7 +60,7 @@ impl ColorSyntax for ExpressionListShape { _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) { // We encountered a parsing error and will continue with simpler coloring ("backoff // coloring mode") @@ -126,7 +126,7 @@ impl ColorSyntax for BackoffColoringMode { _input: &Self::Input, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Self::Info { loop { if token_nodes.at_end() { @@ -159,7 +159,7 @@ impl ColorSyntax for SimplestExpression { _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) { let atom = expand_atom( token_nodes, diff --git a/src/parser/hir/syntax_shape/expression/number.rs b/src/parser/hir/syntax_shape/expression/number.rs index 8d3cb048c6..a4e2a93234 100644 --- a/src/parser/hir/syntax_shape/expression/number.rs +++ b/src/parser/hir/syntax_shape/expression/number.rs @@ -18,20 +18,27 @@ impl ExpandExpression for NumberShape { token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, ) -> Result { - parse_single_node(token_nodes, "Number", |token, token_tag, err| { + parse_single_node(token_nodes, "Number", |token, token_span, err| { Ok(match token { RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()), RawToken::Variable(tag) if tag.slice(context.source) == "it" => { - hir::Expression::it_variable(tag, token_tag) + hir::Expression::it_variable(tag, token_span) } - RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag), - RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)), - RawToken::Variable(tag) => hir::Expression::variable(tag, token_tag), + RawToken::ExternalCommand(tag) => { + hir::Expression::external_command(tag, token_span) + } + RawToken::ExternalWord => { + return Err(ShellError::invalid_external_word(Tag { + span: token_span, + anchor: None, + })) + } + RawToken::Variable(tag) => hir::Expression::variable(tag, token_span), RawToken::Number(number) => { - hir::Expression::number(number.to_number(context.source), token_tag) + hir::Expression::number(number.to_number(context.source), token_span) } - RawToken::Bare => hir::Expression::bare(token_tag), - RawToken::String(tag) => hir::Expression::string(tag, token_tag), + RawToken::Bare => hir::Expression::bare(token_span), + RawToken::String(tag) => hir::Expression::string(tag, token_span), }) }) } @@ -46,18 +53,18 @@ impl FallibleColorSyntax for NumberShape { _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result<(), ShellError> { let atom = token_nodes.spanned(|token_nodes| { expand_atom(token_nodes, "number", context, ExpansionRule::permissive()) }); let atom = match atom { - Tagged { item: Err(_), tag } => { - shapes.push(FlatShape::Error.tagged(tag)); + Spanned { item: Err(_), span } => { + shapes.push(FlatShape::Error.spanned(span)); return Ok(()); } - Tagged { item: Ok(atom), .. } => atom, + Spanned { item: Ok(atom), .. } => atom, }; atom.color_tokens(shapes); @@ -75,21 +82,25 @@ impl ExpandExpression for IntShape { token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, ) -> Result { - parse_single_node(token_nodes, "Integer", |token, token_tag, err| { + parse_single_node(token_nodes, "Integer", |token, token_span, err| { Ok(match token { RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()), - RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)), - RawToken::Variable(tag) if tag.slice(context.source) == "it" => { - hir::Expression::it_variable(tag, token_tag) + RawToken::ExternalWord => { + return Err(ShellError::invalid_external_word(token_span)) } - RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag), - RawToken::Variable(tag) => hir::Expression::variable(tag, token_tag), + RawToken::Variable(span) if span.slice(context.source) == "it" => { + hir::Expression::it_variable(span, token_span) + } + RawToken::ExternalCommand(span) => { + hir::Expression::external_command(span, token_span) + } + RawToken::Variable(span) => hir::Expression::variable(span, token_span), RawToken::Number(number @ RawNumber::Int(_)) => { - hir::Expression::number(number.to_number(context.source), token_tag) + hir::Expression::number(number.to_number(context.source), token_span) } RawToken::Number(_) => return Err(err.error()), - RawToken::Bare => hir::Expression::bare(token_tag), - RawToken::String(tag) => hir::Expression::string(tag, token_tag), + RawToken::Bare => hir::Expression::bare(token_span), + RawToken::String(span) => hir::Expression::string(span, token_span), }) }) } @@ -104,18 +115,18 @@ impl FallibleColorSyntax for IntShape { _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result<(), ShellError> { let atom = token_nodes.spanned(|token_nodes| { expand_atom(token_nodes, "integer", context, ExpansionRule::permissive()) }); let atom = match atom { - Tagged { item: Err(_), tag } => { - shapes.push(FlatShape::Error.tagged(tag)); + Spanned { item: Err(_), span } => { + shapes.push(FlatShape::Error.spanned(span)); return Ok(()); } - Tagged { item: Ok(atom), .. } => atom, + Spanned { item: Ok(atom), .. } => atom, }; atom.color_tokens(shapes); diff --git a/src/parser/hir/syntax_shape/expression/pattern.rs b/src/parser/hir/syntax_shape/expression/pattern.rs index 5c863de728..0a11552d5e 100644 --- a/src/parser/hir/syntax_shape/expression/pattern.rs +++ b/src/parser/hir/syntax_shape/expression/pattern.rs @@ -18,14 +18,14 @@ impl FallibleColorSyntax for PatternShape { _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result<(), ShellError> { token_nodes.atomic(|token_nodes| { let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::permissive())?; match &atom.item { AtomicToken::GlobPattern { .. } | AtomicToken::Word { .. } => { - shapes.push(FlatShape::GlobPattern.tagged(atom.tag)); + shapes.push(FlatShape::GlobPattern.spanned(atom.span)); Ok(()) } @@ -85,23 +85,23 @@ impl ExpandExpression for PatternShape { pub struct BarePatternShape; impl ExpandSyntax for BarePatternShape { - type Output = Tag; + type Output = Span; fn expand_syntax<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - ) -> Result { + ) -> Result { expand_bare(token_nodes, context, |token| match token { - TokenNode::Token(Tagged { + TokenNode::Token(Spanned { item: RawToken::Bare, .. }) - | TokenNode::Token(Tagged { + | TokenNode::Token(Spanned { item: RawToken::Operator(Operator::Dot), .. }) - | TokenNode::Token(Tagged { + | TokenNode::Token(Spanned { item: RawToken::GlobPattern, .. }) => true, diff --git a/src/parser/hir/syntax_shape/expression/string.rs b/src/parser/hir/syntax_shape/expression/string.rs index 6f33ae5eb1..0dabd70a85 100644 --- a/src/parser/hir/syntax_shape/expression/string.rs +++ b/src/parser/hir/syntax_shape/expression/string.rs @@ -18,7 +18,7 @@ impl FallibleColorSyntax for StringShape { input: &FlatShape, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result<(), ShellError> { let atom = expand_atom(token_nodes, "string", context, ExpansionRule::permissive()); @@ -28,10 +28,10 @@ impl FallibleColorSyntax for StringShape { }; match atom { - Tagged { + Spanned { item: AtomicToken::String { .. }, - tag, - } => shapes.push((*input).tagged(tag)), + span, + } => shapes.push((*input).spanned(span)), other => other.color_tokens(shapes), } @@ -45,26 +45,30 @@ impl ExpandExpression for StringShape { token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, ) -> Result { - parse_single_node(token_nodes, "String", |token, token_tag, _| { + parse_single_node(token_nodes, "String", |token, token_span, _| { Ok(match token { RawToken::GlobPattern => { return Err(ShellError::type_error( "String", - "glob pattern".tagged(token_tag), + "glob pattern".tagged(token_span), )) } RawToken::Operator(..) => { return Err(ShellError::type_error( "String", - "operator".tagged(token_tag), + "operator".tagged(token_span), )) } - RawToken::Variable(tag) => expand_variable(tag, token_tag, &context.source), - RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag), - RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)), - RawToken::Number(_) => hir::Expression::bare(token_tag), - RawToken::Bare => hir::Expression::bare(token_tag), - RawToken::String(tag) => hir::Expression::string(tag, token_tag), + RawToken::Variable(span) => expand_variable(span, token_span, &context.source), + RawToken::ExternalCommand(span) => { + hir::Expression::external_command(span, token_span) + } + RawToken::ExternalWord => { + return Err(ShellError::invalid_external_word(token_span)) + } + RawToken::Number(_) => hir::Expression::bare(token_span), + RawToken::Bare => hir::Expression::bare(token_span), + RawToken::String(span) => hir::Expression::string(span, token_span), }) }) } diff --git a/src/parser/hir/syntax_shape/expression/unit.rs b/src/parser/hir/syntax_shape/expression/unit.rs index 65fca1a468..03602f1088 100644 --- a/src/parser/hir/syntax_shape/expression/unit.rs +++ b/src/parser/hir/syntax_shape/expression/unit.rs @@ -14,24 +14,24 @@ use nom::IResult; pub struct UnitShape; impl ExpandSyntax for UnitShape { - type Output = Tagged<(Tagged, Tagged)>; + type Output = Spanned<(Spanned, Spanned)>; fn expand_syntax<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - ) -> Result, Tagged)>, ShellError> { + ) -> Result, Spanned)>, ShellError> { let peeked = token_nodes.peek_any().not_eof("unit")?; - let tag = match peeked.node { - TokenNode::Token(Tagged { + let span = match peeked.node { + TokenNode::Token(Spanned { item: RawToken::Bare, - tag, - }) => tag, + span, + }) => span, _ => return Err(peeked.type_error("unit")), }; - let unit = unit_size(tag.slice(context.source), *tag); + let unit = unit_size(span.slice(context.source), *span); let (_, (number, unit)) = match unit { Err(_) => { @@ -44,11 +44,11 @@ impl ExpandSyntax for UnitShape { }; peeked.commit(); - Ok((number, unit).tagged(tag)) + Ok((number, unit).spanned(*span)) } } -fn unit_size(input: &str, bare_tag: Tag) -> IResult<&str, (Tagged, Tagged)> { +fn unit_size(input: &str, bare_span: Span) -> IResult<&str, (Spanned, Spanned)> { let (input, digits) = digit1(input)?; let (input, dot) = opt(tag("."))(input)?; @@ -58,20 +58,18 @@ fn unit_size(input: &str, bare_tag: Tag) -> IResult<&str, (Tagged, Ta let (input, rest) = digit1(input)?; ( input, - RawNumber::decimal(( - bare_tag.span.start(), - bare_tag.span.start() + digits.len() + dot.len() + rest.len(), - bare_tag.anchor, + RawNumber::decimal(Span::new( + bare_span.start(), + bare_span.start() + digits.len() + dot.len() + rest.len(), )), ) } None => ( input, - RawNumber::int(( - bare_tag.span.start(), - bare_tag.span.start() + digits.len(), - bare_tag.anchor, + RawNumber::int(Span::new( + bare_span.start(), + bare_span.start() + digits.len(), )), ), }; @@ -85,12 +83,10 @@ fn unit_size(input: &str, bare_tag: Tag) -> IResult<&str, (Tagged, Ta value(Unit::MB, alt((tag("PB"), tag("pb"), tag("Pb")))), )))(input)?; - let start_span = number.tag.span.end(); + let start_span = number.span.end(); - let unit_tag = Tag::new( - bare_tag.anchor, - Span::from((start_span, bare_tag.span.end())), - ); - - Ok((input, (number, unit.tagged(unit_tag)))) + Ok(( + input, + (number, unit.spanned(Span::new(start_span, bare_span.end()))), + )) } diff --git a/src/parser/hir/syntax_shape/expression/variable_path.rs b/src/parser/hir/syntax_shape/expression/variable_path.rs index a7f17a5971..04b511d89a 100644 --- a/src/parser/hir/syntax_shape/expression/variable_path.rs +++ b/src/parser/hir/syntax_shape/expression/variable_path.rs @@ -23,9 +23,9 @@ impl ExpandExpression for VariablePathShape { // 2. consume the next token as a member and push it onto tail let head = expand_expr(&VariableShape, token_nodes, context)?; - let start = head.tag(); + let start = head.span; let mut end = start; - let mut tail: Vec> = vec![]; + let mut tail: Vec> = vec![]; loop { match DotShape.skip(token_nodes, context) { @@ -34,9 +34,9 @@ impl ExpandExpression for VariablePathShape { } let syntax = expand_syntax(&MemberShape, token_nodes, context)?; - let member = syntax.to_tagged_string(context.source); + let member = syntax.to_spanned_string(context.source); - end = member.tag(); + end = member.span; tail.push(member); } @@ -53,7 +53,7 @@ impl FallibleColorSyntax for VariablePathShape { _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result<(), ShellError> { token_nodes.atomic(|token_nodes| { // If the head of the token stream is not a variable, fail @@ -97,7 +97,7 @@ impl FallibleColorSyntax for PathTailShape { _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result<(), ShellError> { token_nodes.atomic(|token_nodes| loop { let result = color_fallible_syntax_with( @@ -120,13 +120,13 @@ impl FallibleColorSyntax for PathTailShape { } impl ExpandSyntax for PathTailShape { - type Output = (Vec>, Tag); + type Output = (Vec>, Span); fn expand_syntax<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, ) -> Result { - let mut end: Option = None; + let mut end: Option = None; let mut tail = vec![]; loop { @@ -136,17 +136,21 @@ impl ExpandSyntax for PathTailShape { } let syntax = expand_syntax(&MemberShape, token_nodes, context)?; - let member = syntax.to_tagged_string(context.source); - end = Some(member.tag()); + let member = syntax.to_spanned_string(context.source); + end = Some(member.span); tail.push(member); } match end { None => { - return Err(ShellError::type_error( - "path tail", - token_nodes.typed_tag_at_cursor(), - )) + return Err(ShellError::type_error("path tail", { + let typed_span = token_nodes.typed_span_at_cursor(); + + Tagged { + tag: typed_span.span.into(), + item: typed_span.item, + } + })) } Some(end) => Ok((tail, end)), @@ -156,8 +160,8 @@ impl ExpandSyntax for PathTailShape { #[derive(Debug)] pub enum ExpressionContinuation { - DotSuffix(Tag, Tagged), - InfixSuffix(Tagged, Expression), + DotSuffix(Span, Spanned), + InfixSuffix(Spanned, Expression), } /// An expression continuation @@ -179,7 +183,7 @@ impl ExpandSyntax for ExpressionContinuationShape { // If a `.` was matched, it's a `Path`, and we expect a `Member` next Ok(dot) => { let syntax = expand_syntax(&MemberShape, token_nodes, context)?; - let member = syntax.to_tagged_string(context.source); + let member = syntax.to_spanned_string(context.source); Ok(ExpressionContinuation::DotSuffix(dot, member)) } @@ -209,7 +213,7 @@ impl FallibleColorSyntax for ExpressionContinuationShape { _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result { token_nodes.atomic(|token_nodes| { // Try to expand a `.` @@ -290,7 +294,7 @@ impl FallibleColorSyntax for VariableShape { _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result<(), ShellError> { let atom = expand_atom( token_nodes, @@ -306,11 +310,11 @@ impl FallibleColorSyntax for VariableShape { match &atom.item { AtomicToken::Variable { .. } => { - shapes.push(FlatShape::Variable.tagged(atom.tag)); + shapes.push(FlatShape::Variable.spanned(atom.span)); Ok(()) } AtomicToken::ItVariable { .. } => { - shapes.push(FlatShape::ItVariable.tagged(atom.tag)); + shapes.push(FlatShape::ItVariable.spanned(atom.span)); Ok(()) } _ => Err(ShellError::type_error("variable", atom.tagged_type_name())), @@ -320,50 +324,53 @@ impl FallibleColorSyntax for VariableShape { #[derive(Debug, Clone, Copy)] pub enum Member { - String(/* outer */ Tag, /* inner */ Tag), - Bare(Tag), + String(/* outer */ Span, /* inner */ Span), + Bare(Span), } impl Member { pub(crate) fn to_expr(&self) -> hir::Expression { match self { - Member::String(outer, inner) => hir::Expression::string(inner, outer), - Member::Bare(tag) => hir::Expression::string(tag, tag), + Member::String(outer, inner) => hir::Expression::string(*inner, *outer), + Member::Bare(span) => hir::Expression::string(*span, *span), } } - pub(crate) fn tag(&self) -> Tag { + pub(crate) fn span(&self) -> Span { match self { Member::String(outer, _inner) => *outer, - Member::Bare(tag) => *tag, + Member::Bare(span) => *span, } } - pub(crate) fn to_tagged_string(&self, source: &str) -> Tagged { + pub(crate) fn to_spanned_string(&self, source: &str) -> Spanned { match self { - Member::String(outer, inner) => inner.string(source).tagged(outer), - Member::Bare(tag) => tag.tagged_string(source), + Member::String(outer, inner) => inner.string(source).spanned(*outer), + Member::Bare(span) => span.spanned_string(source), } } pub(crate) fn tagged_type_name(&self) -> Tagged<&'static str> { match self { Member::String(outer, _inner) => "string".tagged(outer), - Member::Bare(tag) => "word".tagged(tag), + Member::Bare(span) => "word".tagged(Tag { + span: *span, + anchor: None, + }), } } } enum ColumnPathState { Initial, - LeadingDot(Tag), - Dot(Tag, Vec, Tag), - Member(Tag, Vec), + LeadingDot(Span), + Dot(Span, Vec, Span), + Member(Span, Vec), Error(ShellError), } impl ColumnPathState { - pub fn dot(self, dot: Tag) -> ColumnPathState { + pub fn dot(self, dot: Span) -> ColumnPathState { match self { ColumnPathState::Initial => ColumnPathState::LeadingDot(dot), ColumnPathState::LeadingDot(_) => { @@ -379,13 +386,13 @@ impl ColumnPathState { pub fn member(self, member: Member) -> ColumnPathState { match self { - ColumnPathState::Initial => ColumnPathState::Member(member.tag(), vec![member]), + ColumnPathState::Initial => ColumnPathState::Member(member.span(), vec![member]), ColumnPathState::LeadingDot(tag) => { - ColumnPathState::Member(tag.until(member.tag()), vec![member]) + ColumnPathState::Member(tag.until(member.span()), vec![member]) } ColumnPathState::Dot(tag, mut tags, _) => { - ColumnPathState::Member(tag.until(member.tag()), { + ColumnPathState::Member(tag.until(member.span()), { tags.push(member); tags }) @@ -449,7 +456,7 @@ impl FallibleColorSyntax for ColumnPathShape { _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result<(), ShellError> { // If there's not even one member shape, fail color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?; @@ -513,7 +520,7 @@ impl FallibleColorSyntax for MemberShape { _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result<(), ShellError> { let bare = color_fallible_syntax_with( &BareShape, @@ -552,7 +559,7 @@ impl ExpandSyntax for MemberShape { let bare = BareShape.test(token_nodes, context); if let Some(peeked) = bare { let node = peeked.not_eof("column")?.commit(); - return Ok(Member::Bare(node.tag())); + return Ok(Member::Bare(node.span())); } let string = StringShape.test(token_nodes, context); @@ -583,14 +590,14 @@ impl FallibleColorSyntax for ColorableDotShape { input: &FlatShape, token_nodes: &'b mut TokensIterator<'a>, _context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Result<(), ShellError> { let peeked = token_nodes.peek_any().not_eof("dot")?; match peeked.node { node if node.is_dot() => { peeked.commit(); - shapes.push((*input).tagged(node.tag())); + shapes.push((*input).spanned(node.span())); Ok(()) } @@ -612,20 +619,20 @@ impl SkipSyntax for DotShape { } impl ExpandSyntax for DotShape { - type Output = Tag; + type Output = Span; fn expand_syntax<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, _context: &ExpandContext, ) -> Result { - parse_single_node(token_nodes, "dot", |token, token_tag, _| { + parse_single_node(token_nodes, "dot", |token, token_span, _| { Ok(match token { - RawToken::Operator(Operator::Dot) => token_tag, + RawToken::Operator(Operator::Dot) => token_span, _ => { return Err(ShellError::type_error( "dot", - token.type_name().tagged(token_tag), + token.type_name().tagged(token_span), )) } }) @@ -645,7 +652,7 @@ impl FallibleColorSyntax for InfixShape { _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - outer_shapes: &mut Vec>, + outer_shapes: &mut Vec>, ) -> Result<(), ShellError> { let checkpoint = token_nodes.checkpoint(); let mut shapes = vec![]; @@ -657,18 +664,18 @@ impl FallibleColorSyntax for InfixShape { parse_single_node( checkpoint.iterator, "infix operator", - |token, token_tag, _| { + |token, token_span, _| { match token { // If it's an operator (and not `.`), it's a match RawToken::Operator(operator) if operator != Operator::Dot => { - shapes.push(FlatShape::Operator.tagged(token_tag)); + shapes.push(FlatShape::Operator.spanned(token_span)); Ok(()) } // Otherwise, it's not a match _ => Err(ShellError::type_error( "infix operator", - token.type_name().tagged(token_tag), + token.type_name().tagged(token_span), )), } }, @@ -684,7 +691,7 @@ impl FallibleColorSyntax for InfixShape { } impl ExpandSyntax for InfixShape { - type Output = (Tag, Tagged, Tag); + type Output = (Span, Spanned, Span); fn expand_syntax<'a, 'b>( &self, @@ -700,18 +707,18 @@ impl ExpandSyntax for InfixShape { let operator = parse_single_node( checkpoint.iterator, "infix operator", - |token, token_tag, _| { + |token, token_span, _| { Ok(match token { // If it's an operator (and not `.`), it's a match RawToken::Operator(operator) if operator != Operator::Dot => { - operator.tagged(token_tag) + operator.spanned(token_span) } // Otherwise, it's not a match _ => { return Err(ShellError::type_error( "infix operator", - token.type_name().tagged(token_tag), + token.type_name().tagged(token_span), )) } }) diff --git a/src/parser/hir/syntax_shape/flat_shape.rs b/src/parser/hir/syntax_shape/flat_shape.rs index 48e867199e..b961d1f567 100644 --- a/src/parser/hir/syntax_shape/flat_shape.rs +++ b/src/parser/hir/syntax_shape/flat_shape.rs @@ -1,5 +1,5 @@ use crate::parser::{Delimiter, Flag, FlagKind, Operator, RawNumber, RawToken, TokenNode}; -use crate::{Tag, Tagged, TaggedItem, Text}; +use crate::{Span, Spanned, SpannedItem, Text}; #[derive(Debug, Copy, Clone)] pub enum FlatShape { @@ -25,32 +25,34 @@ pub enum FlatShape { Decimal, Whitespace, Error, - Size { number: Tag, unit: Tag }, + Size { number: Span, unit: Span }, } impl FlatShape { - pub fn from(token: &TokenNode, source: &Text, shapes: &mut Vec>) -> () { + pub fn from(token: &TokenNode, source: &Text, shapes: &mut Vec>) -> () { match token { TokenNode::Token(token) => match token.item { RawToken::Number(RawNumber::Int(_)) => { - shapes.push(FlatShape::Int.tagged(token.tag)) + shapes.push(FlatShape::Int.spanned(token.span)) } RawToken::Number(RawNumber::Decimal(_)) => { - shapes.push(FlatShape::Decimal.tagged(token.tag)) + shapes.push(FlatShape::Decimal.spanned(token.span)) } - RawToken::Operator(Operator::Dot) => shapes.push(FlatShape::Dot.tagged(token.tag)), - RawToken::Operator(_) => shapes.push(FlatShape::Operator.tagged(token.tag)), - RawToken::String(_) => shapes.push(FlatShape::String.tagged(token.tag)), + RawToken::Operator(Operator::Dot) => { + shapes.push(FlatShape::Dot.spanned(token.span)) + } + RawToken::Operator(_) => shapes.push(FlatShape::Operator.spanned(token.span)), + RawToken::String(_) => shapes.push(FlatShape::String.spanned(token.span)), RawToken::Variable(v) if v.slice(source) == "it" => { - shapes.push(FlatShape::ItVariable.tagged(token.tag)) + shapes.push(FlatShape::ItVariable.spanned(token.span)) } - RawToken::Variable(_) => shapes.push(FlatShape::Variable.tagged(token.tag)), + RawToken::Variable(_) => shapes.push(FlatShape::Variable.spanned(token.span)), RawToken::ExternalCommand(_) => { - shapes.push(FlatShape::ExternalCommand.tagged(token.tag)) + shapes.push(FlatShape::ExternalCommand.spanned(token.span)) } - RawToken::ExternalWord => shapes.push(FlatShape::ExternalWord.tagged(token.tag)), - RawToken::GlobPattern => shapes.push(FlatShape::GlobPattern.tagged(token.tag)), - RawToken::Bare => shapes.push(FlatShape::Word.tagged(token.tag)), + RawToken::ExternalWord => shapes.push(FlatShape::ExternalWord.spanned(token.span)), + RawToken::GlobPattern => shapes.push(FlatShape::GlobPattern.spanned(token.span)), + RawToken::Bare => shapes.push(FlatShape::Word.spanned(token.span)), }, TokenNode::Call(_) => unimplemented!(), TokenNode::Nodes(nodes) => { @@ -59,37 +61,37 @@ impl FlatShape { } } TokenNode::Delimited(v) => { - shapes.push(FlatShape::OpenDelimiter(v.item.delimiter).tagged(v.item.tags.0)); + shapes.push(FlatShape::OpenDelimiter(v.item.delimiter).spanned(v.item.spans.0)); for token in &v.item.children { FlatShape::from(token, source, shapes); } - shapes.push(FlatShape::CloseDelimiter(v.item.delimiter).tagged(v.item.tags.1)); + shapes.push(FlatShape::CloseDelimiter(v.item.delimiter).spanned(v.item.spans.1)); } TokenNode::Pipeline(pipeline) => { for part in &pipeline.parts { if let Some(_) = part.pipe { - shapes.push(FlatShape::Pipe.tagged(part.tag)); + shapes.push(FlatShape::Pipe.spanned(part.span)); } } } - TokenNode::Flag(Tagged { + TokenNode::Flag(Spanned { item: Flag { kind: FlagKind::Longhand, .. }, - tag, - }) => shapes.push(FlatShape::Flag.tagged(tag)), - TokenNode::Flag(Tagged { + span, + }) => shapes.push(FlatShape::Flag.spanned(*span)), + TokenNode::Flag(Spanned { item: Flag { kind: FlagKind::Shorthand, .. }, - tag, - }) => shapes.push(FlatShape::ShorthandFlag.tagged(tag)), - TokenNode::Whitespace(_) => shapes.push(FlatShape::Whitespace.tagged(token.tag())), - TokenNode::Error(v) => shapes.push(FlatShape::Error.tagged(v.tag)), + span, + }) => shapes.push(FlatShape::ShorthandFlag.spanned(*span)), + TokenNode::Whitespace(_) => shapes.push(FlatShape::Whitespace.spanned(token.span())), + TokenNode::Error(v) => shapes.push(FlatShape::Error.spanned(v.span)), } } } diff --git a/src/parser/hir/tokens_iterator.rs b/src/parser/hir/tokens_iterator.rs index f597c850bd..dbcf5e6c4c 100644 --- a/src/parser/hir/tokens_iterator.rs +++ b/src/parser/hir/tokens_iterator.rs @@ -2,12 +2,12 @@ pub(crate) mod debug; use crate::errors::ShellError; use crate::parser::TokenNode; -use crate::{Tag, Tagged, TaggedItem}; +use crate::{Span, Spanned, SpannedItem}; #[derive(Debug)] pub struct TokensIterator<'content> { tokens: &'content [TokenNode], - tag: Tag, + span: Span, skip_ws: bool, index: usize, seen: indexmap::IndexSet, @@ -65,7 +65,7 @@ impl<'content, 'me> Peeked<'content, 'me> { match self.node { None => Err(ShellError::unexpected_eof( expected, - self.iterator.eof_tag(), + self.iterator.eof_span(), )), Some(node) => Ok(PeekedNode { node, @@ -77,7 +77,7 @@ impl<'content, 'me> Peeked<'content, 'me> { } pub fn type_error(&self, expected: impl Into) -> ShellError { - peek_error(&self.node, self.iterator.eof_tag(), expected) + peek_error(&self.node, self.iterator.eof_span(), expected) } } @@ -105,38 +105,38 @@ impl<'content, 'me> PeekedNode<'content, 'me> { pub fn rollback(self) {} pub fn type_error(&self, expected: impl Into) -> ShellError { - peek_error(&Some(self.node), self.iterator.eof_tag(), expected) + peek_error(&Some(self.node), self.iterator.eof_span(), expected) } } pub fn peek_error( node: &Option<&TokenNode>, - eof_tag: Tag, + eof_span: Span, expected: impl Into, ) -> ShellError { match node { - None => ShellError::unexpected_eof(expected, eof_tag), + None => ShellError::unexpected_eof(expected, eof_span), Some(node) => ShellError::type_error(expected, node.tagged_type_name()), } } impl<'content> TokensIterator<'content> { - pub fn new(items: &'content [TokenNode], tag: Tag, skip_ws: bool) -> TokensIterator<'content> { + pub fn new( + items: &'content [TokenNode], + span: Span, + skip_ws: bool, + ) -> TokensIterator<'content> { TokensIterator { tokens: items, - tag, + span, skip_ws, index: 0, seen: indexmap::IndexSet::new(), } } - pub fn anchor(&self) -> uuid::Uuid { - self.tag.anchor - } - - pub fn all(tokens: &'content [TokenNode], tag: Tag) -> TokensIterator<'content> { - TokensIterator::new(tokens, tag, false) + pub fn all(tokens: &'content [TokenNode], span: Span) -> TokensIterator<'content> { + TokensIterator::new(tokens, span, false) } pub fn len(&self) -> usize { @@ -146,14 +146,14 @@ impl<'content> TokensIterator<'content> { pub fn spanned( &mut self, block: impl FnOnce(&mut TokensIterator<'content>) -> T, - ) -> Tagged { - let start = self.tag_at_cursor(); + ) -> Spanned { + let start = self.span_at_cursor(); let result = block(self); - let end = self.tag_at_cursor(); + let end = self.span_at_cursor(); - result.tagged(start.until(end)) + result.spanned(start.until(end)) } /// Use a checkpoint when you need to peek more than one token ahead, but can't be sure @@ -192,25 +192,25 @@ impl<'content> TokensIterator<'content> { return Ok(value); } - fn eof_tag(&self) -> Tag { - Tag::from((self.tag.span.end(), self.tag.span.end(), self.tag.anchor)) + fn eof_span(&self) -> Span { + Span::new(self.span.end(), self.span.end()) } - pub fn typed_tag_at_cursor(&mut self) -> Tagged<&'static str> { + pub fn typed_span_at_cursor(&mut self) -> Spanned<&'static str> { let next = self.peek_any(); match next.node { - None => "end".tagged(self.eof_tag()), - Some(node) => node.tagged_type_name(), + None => "end".spanned(self.eof_span()), + Some(node) => node.spanned_type_name(), } } - pub fn tag_at_cursor(&mut self) -> Tag { + pub fn span_at_cursor(&mut self) -> Span { let next = self.peek_any(); match next.node { - None => self.eof_tag(), - Some(node) => node.tag(), + None => self.eof_span(), + Some(node) => node.span(), } } @@ -262,7 +262,7 @@ impl<'content> TokensIterator<'content> { pub fn clone(&self) -> TokensIterator<'content> { TokensIterator { tokens: self.tokens, - tag: self.tag, + span: self.span, index: self.index, seen: self.seen.clone(), skip_ws: self.skip_ws, diff --git a/src/parser/parse/files.rs b/src/parser/parse/files.rs index 3c28237f5d..8a2d3c90eb 100644 --- a/src/parser/parse/files.rs +++ b/src/parser/parse/files.rs @@ -1,8 +1,7 @@ -use crate::Tag; +use crate::Span; use derive_new::new; use language_reporting::{FileName, Location}; use log::trace; -use uuid::Uuid; #[derive(new, Debug, Clone)] pub struct Files { @@ -10,20 +9,20 @@ pub struct Files { } impl language_reporting::ReportingFiles for Files { - type Span = Tag; - type FileId = Uuid; + type Span = Span; + type FileId = usize; fn byte_span( &self, - file: Self::FileId, + _file: Self::FileId, from_index: usize, to_index: usize, ) -> Option { - Some(Tag::new(file, (from_index, to_index).into())) + Some(Span::new(from_index, to_index)) } - fn file_id(&self, tag: Self::Span) -> Self::FileId { - tag.anchor + fn file_id(&self, _tag: Self::Span) -> Self::FileId { + 0 } fn file_name(&self, _file: Self::FileId) -> FileName { @@ -68,14 +67,14 @@ impl language_reporting::ReportingFiles for Files { } } - fn line_span(&self, file: Self::FileId, lineno: usize) -> Option { + fn line_span(&self, _file: Self::FileId, lineno: usize) -> Option { let source = &self.snippet; let mut seen_lines = 0; let mut seen_bytes = 0; for (pos, _) in source.match_indices('\n') { if seen_lines == lineno { - return Some(Tag::new(file, (seen_bytes, pos + 1).into())); + return Some(Span::new(seen_bytes, pos + 1)); } else { seen_lines += 1; seen_bytes = pos + 1; @@ -83,20 +82,20 @@ impl language_reporting::ReportingFiles for Files { } if seen_lines == 0 { - Some(Tag::new(file, (0, self.snippet.len() - 1).into())) + Some(Span::new(0, self.snippet.len() - 1)) } else { None } } - fn source(&self, tag: Self::Span) -> Option { - trace!("source(tag={:?}) snippet={:?}", tag, self.snippet); + fn source(&self, span: Self::Span) -> Option { + trace!("source(tag={:?}) snippet={:?}", span, self.snippet); - if tag.span.start() > tag.span.end() { + if span.start() > span.end() { return None; - } else if tag.span.end() > self.snippet.len() { + } else if span.end() > self.snippet.len() { return None; } - Some(tag.slice(&self.snippet).to_string()) + Some(span.slice(&self.snippet).to_string()) } } diff --git a/src/parser/parse/flag.rs b/src/parser/parse/flag.rs index b8995305d2..28b6749f1c 100644 --- a/src/parser/parse/flag.rs +++ b/src/parser/parse/flag.rs @@ -1,5 +1,5 @@ use crate::parser::hir::syntax_shape::flat_shape::FlatShape; -use crate::{Tag, Tagged, TaggedItem}; +use crate::{Span, Spanned, SpannedItem}; use derive_new::new; use getset::Getters; use serde::{Deserialize, Serialize}; @@ -14,14 +14,14 @@ pub enum FlagKind { #[get = "pub(crate)"] pub struct Flag { pub(crate) kind: FlagKind, - pub(crate) name: Tag, + pub(crate) name: Span, } -impl Tagged { - pub fn color(&self) -> Tagged { +impl Spanned { + pub fn color(&self) -> Spanned { match self.item.kind { - FlagKind::Longhand => FlatShape::Flag.tagged(self.tag), - FlagKind::Shorthand => FlatShape::ShorthandFlag.tagged(self.tag), + FlagKind::Longhand => FlatShape::Flag.spanned(self.span), + FlagKind::Shorthand => FlatShape::ShorthandFlag.spanned(self.span), } } } diff --git a/src/parser/parse/parser.rs b/src/parser/parse/parser.rs index 73833f7be5..793f7b6cef 100644 --- a/src/parser/parse/parser.rs +++ b/src/parser/parse/parser.rs @@ -24,13 +24,11 @@ use nom_tracable::{tracable_parser, HasTracableInfo, TracableInfo}; use serde::{Deserialize, Serialize}; use std::fmt::Debug; use std::str::FromStr; -use uuid::Uuid; pub type NomSpan<'a> = LocatedSpanEx<&'a str, TracableContext>; #[derive(Debug, Clone, Copy, PartialEq, new)] pub struct TracableContext { - pub(crate) origin: Uuid, pub(crate) info: TracableInfo, } @@ -40,10 +38,7 @@ impl HasTracableInfo for TracableContext { } fn set_tracable_info(mut self, info: TracableInfo) -> Self { - TracableContext { - origin: self.origin, - info, - } + TracableContext { info } } } @@ -55,8 +50,8 @@ impl std::ops::Deref for TracableContext { } } -pub fn nom_input(s: &str, anchor: Uuid) -> NomSpan<'_> { - LocatedSpanEx::new_extra(s, TracableContext::new(anchor, TracableInfo::new())) +pub fn nom_input(s: &str) -> NomSpan<'_> { + LocatedSpanEx::new_extra(s, TracableContext::new(TracableInfo::new())) } macro_rules! operator { @@ -69,7 +64,7 @@ macro_rules! operator { Ok(( input, - TokenTreeBuilder::tagged_op(tag.fragment, (start, end, input.extra)), + TokenTreeBuilder::spanned_op(tag.fragment, Span::new(start, end)), )) } }; @@ -175,22 +170,22 @@ pub fn number(input: NomSpan) -> IResult { Ok(( input, - TokenTreeBuilder::tagged_number(number.item, number.tag), + TokenTreeBuilder::spanned_number(number.item, number.span), )) } #[tracable_parser] -pub fn raw_number(input: NomSpan) -> IResult> { +pub fn raw_number(input: NomSpan) -> IResult> { let anchoral = input; let start = input.offset; let (input, neg) = opt(tag("-"))(input)?; let (input, head) = digit1(input)?; match input.fragment.chars().next() { - None => return Ok((input, RawNumber::int((start, input.offset, input.extra)))), + None => return Ok((input, RawNumber::int(Span::new(start, input.offset)))), Some('.') => (), other if is_boundary(other) => { - return Ok((input, RawNumber::int((start, input.offset, input.extra)))) + return Ok((input, RawNumber::int(Span::new(start, input.offset)))) } _ => { return Err(nom::Err::Error(nom::error::make_error( @@ -206,7 +201,7 @@ pub fn raw_number(input: NomSpan) -> IResult> { Ok((input, dot)) => input, // it's just an integer - Err(_) => return Ok((input, RawNumber::int((start, input.offset, input.extra)))), + Err(_) => return Ok((input, RawNumber::int(Span::new(start, input.offset)))), }; let (input, tail) = digit1(input)?; @@ -216,7 +211,7 @@ pub fn raw_number(input: NomSpan) -> IResult> { let next = input.fragment.chars().next(); if is_boundary(next) { - Ok((input, RawNumber::decimal((start, end, input.extra)))) + Ok((input, RawNumber::decimal(Span::new(start, end)))) } else { Err(nom::Err::Error(nom::error::make_error( input, @@ -243,7 +238,7 @@ pub fn dq_string(input: NomSpan) -> IResult { let end = input.offset; Ok(( input, - TokenTreeBuilder::tagged_string((start1, end1, input.extra), (start, end, input.extra)), + TokenTreeBuilder::spanned_string(Span::new(start1, end1), Span::new(start, end)), )) } @@ -259,7 +254,7 @@ pub fn sq_string(input: NomSpan) -> IResult { Ok(( input, - TokenTreeBuilder::tagged_string((start1, end1, input.extra), (start, end, input.extra)), + TokenTreeBuilder::spanned_string(Span::new(start1, end1), Span::new(start, end)), )) } @@ -277,7 +272,7 @@ pub fn external(input: NomSpan) -> IResult { Ok(( input, - TokenTreeBuilder::tagged_external_command(bare, (start, end, input.extra)), + TokenTreeBuilder::spanned_external_command(bare, Span::new(start, end)), )) } @@ -302,7 +297,7 @@ pub fn pattern(input: NomSpan) -> IResult { Ok(( input, - TokenTreeBuilder::tagged_pattern((start, end, input.extra)), + TokenTreeBuilder::spanned_pattern(Span::new(start, end)), )) } @@ -335,10 +330,7 @@ pub fn bare(input: NomSpan) -> IResult { let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_bare((start, end, input.extra)), - )) + Ok((input, TokenTreeBuilder::spanned_bare(Span::new(start, end)))) } #[tracable_parser] @@ -349,7 +341,7 @@ pub fn external_word(input: NomSpan) -> IResult { Ok(( input, - TokenTreeBuilder::tagged_external_word((start, end, input.extra)), + TokenTreeBuilder::spanned_external_word(Span::new(start, end)), )) } @@ -362,7 +354,7 @@ pub fn var(input: NomSpan) -> IResult { Ok(( input, - TokenTreeBuilder::tagged_var(bare, (start, end, input.extra)), + TokenTreeBuilder::spanned_var(bare, Span::new(start, end)), )) } @@ -373,7 +365,7 @@ pub fn ident(input: NomSpan) -> IResult { let (input, _) = take_while(is_bare_char)(input)?; let end = input.offset; - Ok((input, Tag::from((start, end, input.extra.origin)))) + Ok((input, Tag::from((start, end, None)))) } #[tracable_parser] @@ -385,7 +377,7 @@ pub fn flag(input: NomSpan) -> IResult { Ok(( input, - TokenTreeBuilder::tagged_flag(bare.tag(), (start, end, input.extra)), + TokenTreeBuilder::spanned_flag(bare.span(), Span::new(start, end)), )) } @@ -398,7 +390,7 @@ pub fn shorthand(input: NomSpan) -> IResult { Ok(( input, - TokenTreeBuilder::tagged_shorthand(bare.tag(), (start, end, input.extra)), + TokenTreeBuilder::spanned_shorthand(bare.span(), Span::new(start, end)), )) } @@ -420,12 +412,12 @@ pub fn token_list(input: NomSpan) -> IResult>> { Ok(( input, - make_token_list(first, list, None).tagged((start, end, input.extra.origin)), + make_token_list(first, list, None).tagged((start, end, None)), )) } #[tracable_parser] -pub fn spaced_token_list(input: NomSpan) -> IResult>> { +pub fn spaced_token_list(input: NomSpan) -> IResult>> { let start = input.offset; let (input, pre_ws) = opt(whitespace)(input)?; let (input, items) = token_list(input)?; @@ -438,7 +430,7 @@ pub fn spaced_token_list(input: NomSpan) -> IResult IResult { let (input, ws1) = space1(input)?; let right = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_ws((left, right, input.extra)), - )) + Ok((input, TokenTreeBuilder::spanned_ws(Span::new(left, right)))) } pub fn delimited( input: NomSpan, delimiter: Delimiter, -) -> IResult>)> { +) -> IResult>)> { let left = input.offset; - let (input, open_tag) = tag(delimiter.open())(input)?; + let (input, open_span) = tag(delimiter.open())(input)?; let (input, inner_items) = opt(spaced_token_list)(input)?; - let (input, close_tag) = tag(delimiter.close())(input)?; + let (input, close_span) = tag(delimiter.close())(input)?; let right = input.offset; let mut items = vec![]; @@ -493,9 +482,9 @@ pub fn delimited( Ok(( input, ( - Tag::from(open_tag), - Tag::from(close_tag), - items.tagged((left, right, input.extra.origin)), + Span::from(open_span), + Span::from(close_span), + items.spanned(Span::new(left, right)), ), )) } @@ -506,7 +495,7 @@ pub fn delimited_paren(input: NomSpan) -> IResult { Ok(( input, - TokenTreeBuilder::tagged_parens(tokens.item, (left, right), tokens.tag), + TokenTreeBuilder::spanned_parens(tokens.item, (left, right), tokens.span), )) } @@ -516,7 +505,7 @@ pub fn delimited_square(input: NomSpan) -> IResult { Ok(( input, - TokenTreeBuilder::tagged_square(tokens.item, (left, right), tokens.tag), + TokenTreeBuilder::spanned_square(tokens.item, (left, right), tokens.span), )) } @@ -526,7 +515,7 @@ pub fn delimited_brace(input: NomSpan) -> IResult { Ok(( input, - TokenTreeBuilder::tagged_square(tokens.item, (left, right), tokens.tag), + TokenTreeBuilder::spanned_square(tokens.item, (left, right), tokens.span), )) } @@ -637,18 +626,19 @@ pub fn pipeline(input: NomSpan) -> IResult { let end = input.offset; - let head_tag = head.tag(); - let mut all_items: Vec> = - vec![PipelineElement::new(None, head).tagged(head_tag)]; + let head_span = head.span; + let mut all_items: Vec> = + vec![PipelineElement::new(None, head).spanned(head_span)]; all_items.extend(items.into_iter().map(|(pipe, items)| { - let items_tag = items.tag(); - PipelineElement::new(Some(Tag::from(pipe)), items).tagged(Tag::from(pipe).until(items_tag)) + let items_span = items.span; + PipelineElement::new(Some(Span::from(pipe)), items) + .spanned(Span::from(pipe).until(items_span)) })); Ok(( input, - TokenTreeBuilder::tagged_pipeline(all_items, (start, end, input.extra)), + TokenTreeBuilder::spanned_pipeline(all_items, Span::new(start, end)), )) } @@ -757,7 +747,7 @@ mod tests { macro_rules! equal_tokens { ($source:tt -> $tokens:expr) => { let result = apply(pipeline, "pipeline", $source); - let (expected_tree, expected_source) = TokenTreeBuilder::build(uuid::Uuid::nil(), $tokens); + let (expected_tree, expected_source) = TokenTreeBuilder::build($tokens); if result != expected_tree { let debug_result = format!("{}", result.debug($source)); @@ -778,7 +768,7 @@ mod tests { (<$parser:tt> $source:tt -> $tokens:expr) => { let result = apply($parser, stringify!($parser), $source); - let (expected_tree, expected_source) = TokenTreeBuilder::build(uuid::Uuid::nil(), $tokens); + let (expected_tree, expected_source) = TokenTreeBuilder::build($tokens); if result != expected_tree { let debug_result = format!("{}", result.debug($source)); @@ -1241,41 +1231,37 @@ mod tests { desc: &str, string: &str, ) -> TokenNode { - f(nom_input(string, uuid::Uuid::nil())).unwrap().1 + f(nom_input(string)).unwrap().1 } - fn tag(left: usize, right: usize) -> Tag { - Tag::from((left, right, uuid::Uuid::nil())) + fn span((left, right): (usize, usize)) -> Span { + Span::new(left, right) } fn delimited( - delimiter: Tagged, + delimiter: Spanned, children: Vec, left: usize, right: usize, ) -> TokenNode { - let start = Tag::for_char(left, delimiter.tag.anchor); - let end = Tag::for_char(right, delimiter.tag.anchor); + let start = Span::for_char(left); + let end = Span::for_char(right); let node = DelimitedNode::new(delimiter.item, (start, end), children); - let spanned = node.tagged((left, right, delimiter.tag.anchor)); + let spanned = node.spanned(Span::new(left, right)); TokenNode::Delimited(spanned) } fn token(token: RawToken, left: usize, right: usize) -> TokenNode { - TokenNode::Token(token.tagged((left, right, uuid::Uuid::nil()))) + TokenNode::Token(token.spanned(Span::new(left, right))) } fn build(block: CurriedNode) -> T { - let mut builder = TokenTreeBuilder::new(uuid::Uuid::nil()); + let mut builder = TokenTreeBuilder::new(); block(&mut builder) } fn build_token(block: CurriedToken) -> TokenNode { - TokenTreeBuilder::build(uuid::Uuid::nil(), block).0 - } - - fn test_uuid() -> uuid::Uuid { - uuid::Uuid::nil() + TokenTreeBuilder::build(block).0 } } diff --git a/src/parser/parse/pipeline.rs b/src/parser/parse/pipeline.rs index 36813e39c4..73db738078 100644 --- a/src/parser/parse/pipeline.rs +++ b/src/parser/parse/pipeline.rs @@ -1,13 +1,13 @@ use crate::parser::TokenNode; use crate::traits::ToDebug; -use crate::{Tag, Tagged}; +use crate::{Span, Spanned}; use derive_new::new; use getset::Getters; use std::fmt; #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, new)] pub struct Pipeline { - pub(crate) parts: Vec>, + pub(crate) parts: Vec>, // pub(crate) post_ws: Option, } @@ -23,8 +23,8 @@ impl ToDebug for Pipeline { #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)] pub struct PipelineElement { - pub pipe: Option, - pub tokens: Tagged>, + pub pipe: Option, + pub tokens: Spanned>, } impl ToDebug for PipelineElement { diff --git a/src/parser/parse/token_tree.rs b/src/parser/parse/token_tree.rs index 85961d1dab..3c7e4fc11e 100644 --- a/src/parser/parse/token_tree.rs +++ b/src/parser/parse/token_tree.rs @@ -2,7 +2,7 @@ use crate::errors::ShellError; use crate::parser::parse::{call_node::*, flag::*, operator::*, pipeline::*, tokens::*}; use crate::prelude::*; use crate::traits::ToDebug; -use crate::{Tag, Tagged, Text}; +use crate::{Tagged, Text}; use derive_new::new; use enum_utils::FromStr; use getset::Getters; @@ -12,14 +12,14 @@ use std::fmt; pub enum TokenNode { Token(Token), - Call(Tagged), - Nodes(Tagged>), - Delimited(Tagged), - Pipeline(Tagged), - Flag(Tagged), - Whitespace(Tag), + Call(Spanned), + Nodes(Spanned>), + Delimited(Spanned), + Pipeline(Spanned), + Flag(Spanned), + Whitespace(Span), - Error(Tagged), + Error(Spanned), } impl ToDebug for TokenNode { @@ -78,28 +78,28 @@ impl fmt::Debug for DebugTokenNode<'_> { } TokenNode::Pipeline(pipeline) => write!(f, "{}", pipeline.debug(self.source)), TokenNode::Error(_) => write!(f, ""), - rest => write!(f, "{}", rest.tag().slice(self.source)), + rest => write!(f, "{}", rest.span().slice(self.source)), } } } -impl From<&TokenNode> for Tag { - fn from(token: &TokenNode) -> Tag { - token.tag() +impl From<&TokenNode> for Span { + fn from(token: &TokenNode) -> Span { + token.span() } } impl TokenNode { - pub fn tag(&self) -> Tag { + pub fn span(&self) -> Span { match self { - TokenNode::Token(t) => t.tag(), - TokenNode::Nodes(t) => t.tag(), - TokenNode::Call(s) => s.tag(), - TokenNode::Delimited(s) => s.tag(), - TokenNode::Pipeline(s) => s.tag(), - TokenNode::Flag(s) => s.tag(), + TokenNode::Token(t) => t.span, + TokenNode::Nodes(t) => t.span, + TokenNode::Call(s) => s.span, + TokenNode::Delimited(s) => s.span, + TokenNode::Pipeline(s) => s.span, + TokenNode::Flag(s) => s.span, TokenNode::Whitespace(s) => *s, - TokenNode::Error(s) => return s.tag, + TokenNode::Error(s) => s.span, } } @@ -116,8 +116,12 @@ impl TokenNode { } } + pub fn spanned_type_name(&self) -> Spanned<&'static str> { + self.type_name().spanned(self.span()) + } + pub fn tagged_type_name(&self) -> Tagged<&'static str> { - self.type_name().tagged(self.tag()) + self.type_name().tagged(self.span()) } pub fn old_debug<'a>(&'a self, source: &'a Text) -> DebugTokenNode<'a> { @@ -125,26 +129,26 @@ impl TokenNode { } pub fn as_external_arg(&self, source: &Text) -> String { - self.tag().slice(source).to_string() + self.span().slice(source).to_string() } pub fn source<'a>(&self, source: &'a Text) -> &'a str { - self.tag().slice(source) + self.span().slice(source) } - pub fn get_variable(&self) -> Result<(Tag, Tag), ShellError> { + pub fn get_variable(&self) -> Result<(Span, Span), ShellError> { match self { - TokenNode::Token(Tagged { - item: RawToken::Variable(inner_tag), - tag: outer_tag, - }) => Ok((*outer_tag, *inner_tag)), + TokenNode::Token(Spanned { + item: RawToken::Variable(inner_span), + span: outer_span, + }) => Ok((*outer_span, *inner_span)), _ => Err(ShellError::type_error("variable", self.tagged_type_name())), } } pub fn is_bare(&self) -> bool { match self { - TokenNode::Token(Tagged { + TokenNode::Token(Spanned { item: RawToken::Bare, .. }) => true, @@ -154,7 +158,7 @@ impl TokenNode { pub fn is_pattern(&self) -> bool { match self { - TokenNode::Token(Tagged { + TokenNode::Token(Spanned { item: RawToken::GlobPattern, .. }) => true, @@ -164,7 +168,7 @@ impl TokenNode { pub fn is_dot(&self) -> bool { match self { - TokenNode::Token(Tagged { + TokenNode::Token(Spanned { item: RawToken::Operator(Operator::Dot), .. }) => true, @@ -172,24 +176,24 @@ impl TokenNode { } } - pub fn as_block(&self) -> Option<(Tagged<&[TokenNode]>, (Tag, Tag))> { + pub fn as_block(&self) -> Option<(Spanned<&[TokenNode]>, (Span, Span))> { match self { - TokenNode::Delimited(Tagged { + TokenNode::Delimited(Spanned { item: DelimitedNode { delimiter, children, - tags, + spans, }, - tag, - }) if *delimiter == Delimiter::Brace => Some(((&children[..]).tagged(tag), *tags)), + span, + }) if *delimiter == Delimiter::Brace => Some(((&children[..]).spanned(*span), *spans)), _ => None, } } pub fn is_external(&self) -> bool { match self { - TokenNode::Token(Tagged { + TokenNode::Token(Spanned { item: RawToken::ExternalCommand(..), .. }) => true, @@ -197,20 +201,20 @@ impl TokenNode { } } - pub fn expect_external(&self) -> Tag { + pub fn expect_external(&self) -> Span { match self { - TokenNode::Token(Tagged { - item: RawToken::ExternalCommand(tag), + TokenNode::Token(Spanned { + item: RawToken::ExternalCommand(span), .. - }) => *tag, + }) => *span, _ => panic!("Only call expect_external if you checked is_external first"), } } - pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option> { + pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option> { match self { TokenNode::Flag( - flag @ Tagged { + flag @ Spanned { item: Flag { .. }, .. }, ) if value == flag.name().slice(source) => Some(*flag), @@ -220,7 +224,7 @@ impl TokenNode { pub fn as_pipeline(&self) -> Result { match self { - TokenNode::Pipeline(Tagged { item, .. }) => Ok(item.clone()), + TokenNode::Pipeline(Spanned { item, .. }) => Ok(item.clone()), _ => Err(ShellError::unimplemented("unimplemented")), } } @@ -232,12 +236,12 @@ impl TokenNode { } } - pub fn expect_string(&self) -> (Tag, Tag) { + pub fn expect_string(&self) -> (Span, Span) { match self { - TokenNode::Token(Tagged { - item: RawToken::String(inner_tag), - tag: outer_tag, - }) => (*outer_tag, *inner_tag), + TokenNode::Token(Spanned { + item: RawToken::String(inner_span), + span: outer_span, + }) => (*outer_span, *inner_span), other => panic!("Expected string, found {:?}", other), } } @@ -247,27 +251,30 @@ impl TokenNode { impl TokenNode { pub fn expect_list(&self) -> Tagged<&[TokenNode]> { match self { - TokenNode::Nodes(Tagged { item, tag }) => (&item[..]).tagged(tag), + TokenNode::Nodes(Spanned { item, span }) => (&item[..]).tagged(Tag { + span: *span, + anchor: None, + }), other => panic!("Expected list, found {:?}", other), } } - pub fn expect_var(&self) -> (Tag, Tag) { + pub fn expect_var(&self) -> (Span, Span) { match self { - TokenNode::Token(Tagged { - item: RawToken::Variable(inner_tag), - tag: outer_tag, - }) => (*outer_tag, *inner_tag), + TokenNode::Token(Spanned { + item: RawToken::Variable(inner_span), + span: outer_span, + }) => (*outer_span, *inner_span), other => panic!("Expected var, found {:?}", other), } } - pub fn expect_bare(&self) -> Tag { + pub fn expect_bare(&self) -> Span { match self { - TokenNode::Token(Tagged { + TokenNode::Token(Spanned { item: RawToken::Bare, - tag, - }) => *tag, + span, + }) => *span, other => panic!("Expected var, found {:?}", other), } } @@ -277,7 +284,7 @@ impl TokenNode { #[get = "pub(crate)"] pub struct DelimitedNode { pub(crate) delimiter: Delimiter, - pub(crate) tags: (Tag, Tag), + pub(crate) spans: (Span, Span), pub(crate) children: Vec, } diff --git a/src/parser/parse/token_tree_builder.rs b/src/parser/parse/token_tree_builder.rs index 549462a979..891e6b9e16 100644 --- a/src/parser/parse/token_tree_builder.rs +++ b/src/parser/parse/token_tree_builder.rs @@ -7,7 +7,6 @@ use crate::parser::parse::token_tree::{DelimitedNode, Delimiter, TokenNode}; use crate::parser::parse::tokens::{RawNumber, RawToken}; use crate::parser::CallNode; use derive_new::new; -use uuid::Uuid; #[derive(new)] pub struct TokenTreeBuilder { @@ -16,33 +15,34 @@ pub struct TokenTreeBuilder { #[new(default)] output: String, - - anchor: Uuid, } pub type CurriedToken = Box TokenNode + 'static>; pub type CurriedCall = Box Tagged + 'static>; impl TokenTreeBuilder { - pub fn build(anchor: Uuid, block: impl FnOnce(&mut Self) -> TokenNode) -> (TokenNode, String) { - let mut builder = TokenTreeBuilder::new(anchor); + pub fn build(block: impl FnOnce(&mut Self) -> TokenNode) -> (TokenNode, String) { + let mut builder = TokenTreeBuilder::new(); let node = block(&mut builder); (node, builder.output) } - fn build_tagged(&mut self, callback: impl FnOnce(&mut TokenTreeBuilder) -> T) -> Tagged { + fn build_spanned( + &mut self, + callback: impl FnOnce(&mut TokenTreeBuilder) -> T, + ) -> Spanned { let start = self.pos; let ret = callback(self); let end = self.pos; - ret.tagged((start, end, self.anchor)) + ret.spanned(Span::new(start, end)) } pub fn pipeline(input: Vec>) -> CurriedToken { Box::new(move |b| { let start = b.pos; - let mut out: Vec> = vec![]; + let mut out: Vec> = vec![]; let mut input = input.into_iter().peekable(); let head = input @@ -50,34 +50,37 @@ impl TokenTreeBuilder { .expect("A pipeline must contain at least one element"); let pipe = None; - let head = b.build_tagged(|b| head.into_iter().map(|node| node(b)).collect()); + let head = b.build_spanned(|b| head.into_iter().map(|node| node(b)).collect()); - let head_tag: Tag = head.tag; - out.push(PipelineElement::new(pipe, head).tagged(head_tag)); + let head_span: Span = head.span; + out.push(PipelineElement::new(pipe, head).spanned(head_span)); loop { match input.next() { None => break, Some(node) => { let start = b.pos; - let pipe = Some(b.consume_tag("|")); + let pipe = Some(b.consume_span("|")); let node = - b.build_tagged(|b| node.into_iter().map(|node| node(b)).collect()); + b.build_spanned(|b| node.into_iter().map(|node| node(b)).collect()); let end = b.pos; - out.push(PipelineElement::new(pipe, node).tagged((start, end, b.anchor))); + out.push(PipelineElement::new(pipe, node).spanned(Span::new(start, end))); } } } let end = b.pos; - TokenTreeBuilder::tagged_pipeline(out, (start, end, b.anchor)) + TokenTreeBuilder::spanned_pipeline(out, Span::new(start, end)) }) } - pub fn tagged_pipeline(input: Vec>, tag: impl Into) -> TokenNode { - TokenNode::Pipeline(Pipeline::new(input).tagged(tag.into())) + pub fn spanned_pipeline( + input: Vec>, + span: impl Into, + ) -> TokenNode { + TokenNode::Pipeline(Pipeline::new(input).spanned(span)) } pub fn token_list(input: Vec) -> CurriedToken { @@ -86,12 +89,12 @@ impl TokenTreeBuilder { let tokens = input.into_iter().map(|i| i(b)).collect(); let end = b.pos; - TokenTreeBuilder::tagged_token_list(tokens, (start, end, b.anchor)) + TokenTreeBuilder::tagged_token_list(tokens, (start, end, None)) }) } pub fn tagged_token_list(input: Vec, tag: impl Into) -> TokenNode { - TokenNode::Nodes(input.tagged(tag)) + TokenNode::Nodes(input.spanned(tag.into().span)) } pub fn op(input: impl Into) -> CurriedToken { @@ -102,12 +105,12 @@ impl TokenTreeBuilder { b.pos = end; - TokenTreeBuilder::tagged_op(input, (start, end, b.anchor)) + TokenTreeBuilder::spanned_op(input, Span::new(start, end)) }) } - pub fn tagged_op(input: impl Into, tag: impl Into) -> TokenNode { - TokenNode::Token(RawToken::Operator(input.into()).tagged(tag.into())) + pub fn spanned_op(input: impl Into, span: impl Into) -> TokenNode { + TokenNode::Token(RawToken::Operator(input.into()).spanned(span.into())) } pub fn string(input: impl Into) -> CurriedToken { @@ -119,15 +122,15 @@ impl TokenTreeBuilder { let (_, end) = b.consume("\""); b.pos = end; - TokenTreeBuilder::tagged_string( - (inner_start, inner_end, b.anchor), - (start, end, b.anchor), + TokenTreeBuilder::spanned_string( + Span::new(inner_start, inner_end), + Span::new(start, end), ) }) } - pub fn tagged_string(input: impl Into, tag: impl Into) -> TokenNode { - TokenNode::Token(RawToken::String(input.into()).tagged(tag.into())) + pub fn spanned_string(input: impl Into, span: impl Into) -> TokenNode { + TokenNode::Token(RawToken::String(input.into()).spanned(span.into())) } pub fn bare(input: impl Into) -> CurriedToken { @@ -137,12 +140,12 @@ impl TokenTreeBuilder { let (start, end) = b.consume(&input); b.pos = end; - TokenTreeBuilder::tagged_bare((start, end, b.anchor)) + TokenTreeBuilder::spanned_bare(Span::new(start, end)) }) } - pub fn tagged_bare(tag: impl Into) -> TokenNode { - TokenNode::Token(RawToken::Bare.tagged(tag.into())) + pub fn spanned_bare(span: impl Into) -> TokenNode { + TokenNode::Token(RawToken::Bare.spanned(span)) } pub fn pattern(input: impl Into) -> CurriedToken { @@ -152,12 +155,12 @@ impl TokenTreeBuilder { let (start, end) = b.consume(&input); b.pos = end; - TokenTreeBuilder::tagged_pattern((start, end, b.anchor)) + TokenTreeBuilder::spanned_pattern(Span::new(start, end)) }) } - pub fn tagged_pattern(input: impl Into) -> TokenNode { - TokenNode::Token(RawToken::GlobPattern.tagged(input.into())) + pub fn spanned_pattern(input: impl Into) -> TokenNode { + TokenNode::Token(RawToken::GlobPattern.spanned(input.into())) } pub fn external_word(input: impl Into) -> CurriedToken { @@ -167,12 +170,12 @@ impl TokenTreeBuilder { let (start, end) = b.consume(&input); b.pos = end; - TokenTreeBuilder::tagged_external_word((start, end, b.anchor)) + TokenTreeBuilder::spanned_external_word(Span::new(start, end)) }) } - pub fn tagged_external_word(input: impl Into) -> TokenNode { - TokenNode::Token(RawToken::ExternalWord.tagged(input.into())) + pub fn spanned_external_word(input: impl Into) -> TokenNode { + TokenNode::Token(RawToken::ExternalWord.spanned(input.into())) } pub fn external_command(input: impl Into) -> CurriedToken { @@ -183,15 +186,15 @@ impl TokenTreeBuilder { let (inner_start, end) = b.consume(&input); b.pos = end; - TokenTreeBuilder::tagged_external_command( - (inner_start, end, b.anchor), - (outer_start, end, b.anchor), + TokenTreeBuilder::spanned_external_command( + Span::new(inner_start, end), + Span::new(outer_start, end), ) }) } - pub fn tagged_external_command(inner: impl Into, outer: impl Into) -> TokenNode { - TokenNode::Token(RawToken::ExternalCommand(inner.into()).tagged(outer.into())) + pub fn spanned_external_command(inner: impl Into, outer: impl Into) -> TokenNode { + TokenNode::Token(RawToken::ExternalCommand(inner.into()).spanned(outer.into())) } pub fn int(input: impl Into) -> CurriedToken { @@ -201,9 +204,9 @@ impl TokenTreeBuilder { let (start, end) = b.consume(&int.to_string()); b.pos = end; - TokenTreeBuilder::tagged_number( - RawNumber::Int((start, end, b.anchor).into()), - (start, end, b.anchor), + TokenTreeBuilder::spanned_number( + RawNumber::Int(Span::new(start, end)), + Span::new(start, end), ) }) } @@ -215,15 +218,15 @@ impl TokenTreeBuilder { let (start, end) = b.consume(&decimal.to_string()); b.pos = end; - TokenTreeBuilder::tagged_number( - RawNumber::Decimal((start, end, b.anchor).into()), - (start, end, b.anchor), + TokenTreeBuilder::spanned_number( + RawNumber::Decimal(Span::new(start, end)), + Span::new(start, end), ) }) } - pub fn tagged_number(input: impl Into, tag: impl Into) -> TokenNode { - TokenNode::Token(RawToken::Number(input.into()).tagged(tag.into())) + pub fn spanned_number(input: impl Into, span: impl Into) -> TokenNode { + TokenNode::Token(RawToken::Number(input.into()).spanned(span.into())) } pub fn var(input: impl Into) -> CurriedToken { @@ -233,12 +236,12 @@ impl TokenTreeBuilder { let (start, _) = b.consume("$"); let (inner_start, end) = b.consume(&input); - TokenTreeBuilder::tagged_var((inner_start, end, b.anchor), (start, end, b.anchor)) + TokenTreeBuilder::spanned_var(Span::new(inner_start, end), Span::new(start, end)) }) } - pub fn tagged_var(input: impl Into, tag: impl Into) -> TokenNode { - TokenNode::Token(RawToken::Variable(input.into()).tagged(tag.into())) + pub fn spanned_var(input: impl Into, span: impl Into) -> TokenNode { + TokenNode::Token(RawToken::Variable(input.into()).spanned(span.into())) } pub fn flag(input: impl Into) -> CurriedToken { @@ -248,12 +251,12 @@ impl TokenTreeBuilder { let (start, _) = b.consume("--"); let (inner_start, end) = b.consume(&input); - TokenTreeBuilder::tagged_flag((inner_start, end, b.anchor), (start, end, b.anchor)) + TokenTreeBuilder::spanned_flag(Span::new(inner_start, end), Span::new(start, end)) }) } - pub fn tagged_flag(input: impl Into, tag: impl Into) -> TokenNode { - TokenNode::Flag(Flag::new(FlagKind::Longhand, input.into()).tagged(tag.into())) + pub fn spanned_flag(input: impl Into, span: impl Into) -> TokenNode { + TokenNode::Flag(Flag::new(FlagKind::Longhand, input.into()).spanned(span.into())) } pub fn shorthand(input: impl Into) -> CurriedToken { @@ -263,12 +266,12 @@ impl TokenTreeBuilder { let (start, _) = b.consume("-"); let (inner_start, end) = b.consume(&input); - TokenTreeBuilder::tagged_shorthand((inner_start, end, b.anchor), (start, end, b.anchor)) + TokenTreeBuilder::spanned_shorthand((inner_start, end), (start, end)) }) } - pub fn tagged_shorthand(input: impl Into, tag: impl Into) -> TokenNode { - TokenNode::Flag(Flag::new(FlagKind::Shorthand, input.into()).tagged(tag.into())) + pub fn spanned_shorthand(input: impl Into, span: impl Into) -> TokenNode { + TokenNode::Flag(Flag::new(FlagKind::Shorthand, input.into()).spanned(span.into())) } pub fn call(head: CurriedToken, input: Vec) -> CurriedCall { @@ -284,7 +287,7 @@ impl TokenTreeBuilder { let end = b.pos; - TokenTreeBuilder::tagged_call(nodes, (start, end, b.anchor)) + TokenTreeBuilder::tagged_call(nodes, (start, end, None)) }) } @@ -306,7 +309,7 @@ impl TokenTreeBuilder { input: Vec, _open: &str, _close: &str, - ) -> (Tag, Tag, Tag, Vec) { + ) -> (Span, Span, Span, Vec) { let (start_open_paren, end_open_paren) = self.consume("("); let mut output = vec![]; for item in input { @@ -315,9 +318,9 @@ impl TokenTreeBuilder { let (start_close_paren, end_close_paren) = self.consume(")"); - let open = Tag::from((start_open_paren, end_open_paren, self.anchor)); - let close = Tag::from((start_close_paren, end_close_paren, self.anchor)); - let whole = Tag::from((start_open_paren, end_close_paren, self.anchor)); + let open = Span::new(start_open_paren, end_open_paren); + let close = Span::new(start_close_paren, end_close_paren); + let whole = Span::new(start_open_paren, end_close_paren); (open, close, whole, output) } @@ -326,17 +329,17 @@ impl TokenTreeBuilder { Box::new(move |b| { let (open, close, whole, output) = b.consume_delimiter(input, "(", ")"); - TokenTreeBuilder::tagged_parens(output, (open, close), whole) + TokenTreeBuilder::spanned_parens(output, (open, close), whole) }) } - pub fn tagged_parens( + pub fn spanned_parens( input: impl Into>, - tags: (Tag, Tag), - tag: impl Into, + spans: (Span, Span), + span: impl Into, ) -> TokenNode { TokenNode::Delimited( - DelimitedNode::new(Delimiter::Paren, tags, input.into()).tagged(tag.into()), + DelimitedNode::new(Delimiter::Paren, spans, input.into()).spanned(span.into()), ) } @@ -344,17 +347,17 @@ impl TokenTreeBuilder { Box::new(move |b| { let (open, close, whole, tokens) = b.consume_delimiter(input, "[", "]"); - TokenTreeBuilder::tagged_square(tokens, (open, close), whole) + TokenTreeBuilder::spanned_square(tokens, (open, close), whole) }) } - pub fn tagged_square( + pub fn spanned_square( input: impl Into>, - tags: (Tag, Tag), - tag: impl Into, + spans: (Span, Span), + span: impl Into, ) -> TokenNode { TokenNode::Delimited( - DelimitedNode::new(Delimiter::Square, tags, input.into()).tagged(tag.into()), + DelimitedNode::new(Delimiter::Square, spans, input.into()).spanned(span.into()), ) } @@ -362,24 +365,24 @@ impl TokenTreeBuilder { Box::new(move |b| { let (open, close, whole, tokens) = b.consume_delimiter(input, "{", "}"); - TokenTreeBuilder::tagged_brace(tokens, (open, close), whole) + TokenTreeBuilder::spanned_brace(tokens, (open, close), whole) }) } - pub fn tagged_brace( + pub fn spanned_brace( input: impl Into>, - tags: (Tag, Tag), - tag: impl Into, + spans: (Span, Span), + span: impl Into, ) -> TokenNode { TokenNode::Delimited( - DelimitedNode::new(Delimiter::Brace, tags, input.into()).tagged(tag.into()), + DelimitedNode::new(Delimiter::Brace, spans, input.into()).spanned(span.into()), ) } pub fn sp() -> CurriedToken { Box::new(|b| { let (start, end) = b.consume(" "); - TokenNode::Whitespace(Tag::from((start, end, b.anchor))) + TokenNode::Whitespace(Span::new(start, end)) }) } @@ -388,12 +391,12 @@ impl TokenTreeBuilder { Box::new(move |b| { let (start, end) = b.consume(&input); - TokenTreeBuilder::tagged_ws((start, end, b.anchor)) + TokenTreeBuilder::spanned_ws(Span::new(start, end)) }) } - pub fn tagged_ws(tag: impl Into) -> TokenNode { - TokenNode::Whitespace(tag.into()) + pub fn spanned_ws(span: impl Into) -> TokenNode { + TokenNode::Whitespace(span.into()) } fn consume(&mut self, input: &str) -> (usize, usize) { @@ -403,10 +406,10 @@ impl TokenTreeBuilder { (start, self.pos) } - fn consume_tag(&mut self, input: &str) -> Tag { + fn consume_span(&mut self, input: &str) -> Span { let start = self.pos; self.pos += input.len(); self.output.push_str(input); - (start, self.pos, self.anchor).into() + Span::new(start, self.pos) } } diff --git a/src/parser/parse/tokens.rs b/src/parser/parse/tokens.rs index 41bdfcebd6..94955d84d9 100644 --- a/src/parser/parse/tokens.rs +++ b/src/parser/parse/tokens.rs @@ -1,6 +1,6 @@ use crate::parser::Operator; use crate::prelude::*; -use crate::{Tagged, Text}; +use crate::Text; use std::fmt; use std::str::FromStr; @@ -8,9 +8,9 @@ use std::str::FromStr; pub enum RawToken { Number(RawNumber), Operator(Operator), - String(Tag), - Variable(Tag), - ExternalCommand(Tag), + String(Span), + Variable(Span), + ExternalCommand(Span), ExternalWord, GlobPattern, Bare, @@ -33,21 +33,21 @@ impl RawToken { #[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] pub enum RawNumber { - Int(Tag), - Decimal(Tag), + Int(Span), + Decimal(Span), } impl RawNumber { - pub fn int(tag: impl Into) -> Tagged { - let tag = tag.into(); + pub fn int(span: impl Into) -> Spanned { + let span = span.into(); - RawNumber::Int(tag).tagged(tag) + RawNumber::Int(span).spanned(span) } - pub fn decimal(tag: impl Into) -> Tagged { - let tag = tag.into(); + pub fn decimal(span: impl Into) -> Spanned { + let span = span.into(); - RawNumber::Decimal(tag).tagged(tag) + RawNumber::Decimal(span).spanned(span) } pub(crate) fn to_number(self, source: &Text) -> Number { @@ -60,7 +60,7 @@ impl RawNumber { } } -pub type Token = Tagged; +pub type Token = Spanned; impl Token { pub fn debug<'a>(&self, source: &'a Text) -> DebugToken<'a> { @@ -70,72 +70,72 @@ impl Token { } } - pub fn extract_number(&self) -> Option> { + pub fn extract_number(&self) -> Option> { match self.item { - RawToken::Number(number) => Some((number).tagged(self.tag)), + RawToken::Number(number) => Some((number).spanned(self.span)), _ => None, } } - pub fn extract_int(&self) -> Option<(Tag, Tag)> { + pub fn extract_int(&self) -> Option<(Span, Span)> { match self.item { - RawToken::Number(RawNumber::Int(int)) => Some((int, self.tag)), + RawToken::Number(RawNumber::Int(int)) => Some((int, self.span)), _ => None, } } - pub fn extract_decimal(&self) -> Option<(Tag, Tag)> { + pub fn extract_decimal(&self) -> Option<(Span, Span)> { match self.item { - RawToken::Number(RawNumber::Decimal(decimal)) => Some((decimal, self.tag)), + RawToken::Number(RawNumber::Decimal(decimal)) => Some((decimal, self.span)), _ => None, } } - pub fn extract_operator(&self) -> Option> { + pub fn extract_operator(&self) -> Option> { match self.item { - RawToken::Operator(operator) => Some(operator.tagged(self.tag)), + RawToken::Operator(operator) => Some(operator.spanned(self.span)), _ => None, } } - pub fn extract_string(&self) -> Option<(Tag, Tag)> { + pub fn extract_string(&self) -> Option<(Span, Span)> { match self.item { - RawToken::String(tag) => Some((tag, self.tag)), + RawToken::String(span) => Some((span, self.span)), _ => None, } } - pub fn extract_variable(&self) -> Option<(Tag, Tag)> { + pub fn extract_variable(&self) -> Option<(Span, Span)> { match self.item { - RawToken::Variable(tag) => Some((tag, self.tag)), + RawToken::Variable(span) => Some((span, self.span)), _ => None, } } - pub fn extract_external_command(&self) -> Option<(Tag, Tag)> { + pub fn extract_external_command(&self) -> Option<(Span, Span)> { match self.item { - RawToken::ExternalCommand(tag) => Some((tag, self.tag)), + RawToken::ExternalCommand(span) => Some((span, self.span)), _ => None, } } - pub fn extract_external_word(&self) -> Option { + pub fn extract_external_word(&self) -> Option { match self.item { - RawToken::ExternalWord => Some(self.tag), + RawToken::ExternalWord => Some(self.span), _ => None, } } - pub fn extract_glob_pattern(&self) -> Option { + pub fn extract_glob_pattern(&self) -> Option { match self.item { - RawToken::GlobPattern => Some(self.tag), + RawToken::GlobPattern => Some(self.span), _ => None, } } - pub fn extract_bare(&self) -> Option { + pub fn extract_bare(&self) -> Option { match self.item { - RawToken::Bare => Some(self.tag), + RawToken::Bare => Some(self.span), _ => None, } } @@ -148,6 +148,6 @@ pub struct DebugToken<'a> { impl fmt::Debug for DebugToken<'_> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.node.tag().slice(self.source)) + write!(f, "{}", self.node.span.slice(self.source)) } } diff --git a/src/parser/parse_command.rs b/src/parser/parse_command.rs index 603ff2956d..935794f3c1 100644 --- a/src/parser/parse_command.rs +++ b/src/parser/parse_command.rs @@ -10,14 +10,14 @@ use crate::parser::{ Flag, }; use crate::traits::ToDebug; -use crate::{Tag, Tagged, Text}; +use crate::{Span, Spanned, Tag, Text}; use log::trace; pub fn parse_command_tail( config: &Signature, context: &ExpandContext, tail: &mut TokensIterator, - command_tag: Tag, + command_span: Span, ) -> Result>, Option)>, ShellError> { let mut named = NamedArguments::new(); trace_remaining("nodes", tail.clone(), context.source()); @@ -32,7 +32,7 @@ pub fn parse_command_tail( named.insert_switch(name, flag); } NamedType::Mandatory(syntax_type) => { - match extract_mandatory(config, name, tail, context.source(), command_tag) { + match extract_mandatory(config, name, tail, context.source(), command_span) { Err(err) => return Err(err), // produce a correct diagnostic Ok((pos, flag)) => { tail.move_to(pos); @@ -41,7 +41,7 @@ pub fn parse_command_tail( return Err(ShellError::argument_error( config.name.clone(), ArgumentError::MissingValueForName(name.to_string()), - flag.tag(), + flag.span, )); } @@ -62,7 +62,7 @@ pub fn parse_command_tail( return Err(ShellError::argument_error( config.name.clone(), ArgumentError::MissingValueForName(name.to_string()), - flag.tag(), + flag.span, )); } @@ -98,7 +98,10 @@ pub fn parse_command_tail( return Err(ShellError::argument_error( config.name.clone(), ArgumentError::MissingMandatoryPositional(arg.name().to_string()), - command_tag, + Tag { + span: command_span, + anchor: None, + }, )); } } @@ -158,7 +161,7 @@ pub fn parse_command_tail( #[derive(Debug)] struct ColoringArgs { - vec: Vec>>>, + vec: Vec>>>, } impl ColoringArgs { @@ -167,11 +170,11 @@ impl ColoringArgs { ColoringArgs { vec } } - fn insert(&mut self, pos: usize, shapes: Vec>) { + fn insert(&mut self, pos: usize, shapes: Vec>) { self.vec[pos] = Some(shapes); } - fn spread_shapes(self, shapes: &mut Vec>) { + fn spread_shapes(self, shapes: &mut Vec>) { for item in self.vec { match item { None => {} @@ -195,7 +198,7 @@ impl ColorSyntax for CommandTailShape { signature: &Signature, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, + shapes: &mut Vec>, ) -> Self::Info { let mut args = ColoringArgs::new(token_nodes.len()); trace_remaining("nodes", token_nodes.clone(), context.source()); @@ -216,7 +219,7 @@ impl ColorSyntax for CommandTailShape { name, token_nodes, context.source(), - Tag::unknown(), + Span::unknown(), ) { Err(_) => { // The mandatory flag didn't exist at all, so there's nothing to color @@ -378,7 +381,7 @@ impl ColorSyntax for CommandTailShape { // Consume any remaining tokens with backoff coloring mode color_syntax(&BackoffColoringMode, token_nodes, context, shapes); - shapes.sort_by(|a, b| a.tag.span.start().cmp(&b.tag.span.start())); + shapes.sort_by(|a, b| a.span.start().cmp(&b.span.start())); } } @@ -393,15 +396,15 @@ fn extract_mandatory( name: &str, tokens: &mut hir::TokensIterator<'_>, source: &Text, - tag: Tag, -) -> Result<(usize, Tagged), ShellError> { + span: Span, +) -> Result<(usize, Spanned), ShellError> { let flag = tokens.extract(|t| t.as_flag(name, source)); match flag { None => Err(ShellError::argument_error( config.name.clone(), ArgumentError::MissingMandatoryFlag(name.to_string()), - tag, + span, )), Some((pos, flag)) => { @@ -415,7 +418,7 @@ fn extract_optional( name: &str, tokens: &mut hir::TokensIterator<'_>, source: &Text, -) -> Result<(Option<(usize, Tagged)>), ShellError> { +) -> Result<(Option<(usize, Spanned)>), ShellError> { let flag = tokens.extract(|t| t.as_flag(name, source)); match flag { diff --git a/src/parser/registry.rs b/src/parser/registry.rs index 888e5ae1e9..790925e800 100644 --- a/src/parser/registry.rs +++ b/src/parser/registry.rs @@ -298,7 +298,7 @@ pub(crate) fn evaluate_args( for (name, value) in n.named.iter() { match value { hir::named::NamedValue::PresentSwitch(tag) => { - results.insert(name.clone(), Value::boolean(true).tagged(*tag)); + results.insert(name.clone(), Value::boolean(true).tagged(tag)); } hir::named::NamedValue::Value(expr) => { results.insert( diff --git a/src/plugins/add.rs b/src/plugins/add.rs index 6fc034226c..98cf3819b3 100644 --- a/src/plugins/add.rs +++ b/src/plugins/add.rs @@ -22,7 +22,7 @@ impl Add { let value_tag = value.tag(); match (value.item, self.value.clone()) { (obj @ Value::Row(_), Some(v)) => match &self.field { - Some(f) => match obj.insert_data_at_column_path(value_tag, &f, v) { + Some(f) => match obj.insert_data_at_column_path(value_tag.clone(), &f, v) { Some(v) => return Ok(v), None => { return Err(ShellError::labeled_error( @@ -32,7 +32,7 @@ impl Add { f.iter().map(|i| &i.item).join(".") ), "column name", - value_tag, + &value_tag, )) } }, diff --git a/src/plugins/binaryview.rs b/src/plugins/binaryview.rs index d5488d3241..b834f440e2 100644 --- a/src/plugins/binaryview.rs +++ b/src/plugins/binaryview.rs @@ -24,8 +24,7 @@ impl Plugin for BinaryView { let value_anchor = v.anchor(); match v.item { Value::Primitive(Primitive::Binary(b)) => { - let source = call_info.source_map.get(&value_anchor); - let _ = view_binary(&b, source, call_info.args.has("lores")); + let _ = view_binary(&b, value_anchor.as_ref(), call_info.args.has("lores")); } _ => {} } diff --git a/src/plugins/edit.rs b/src/plugins/edit.rs index c0f6dfbedd..34653bd66d 100644 --- a/src/plugins/edit.rs +++ b/src/plugins/edit.rs @@ -27,7 +27,7 @@ impl Edit { return Err(ShellError::labeled_error( "edit could not find place to insert column", "column name", - f.tag, + &f.tag, )) } }, diff --git a/src/plugins/embed.rs b/src/plugins/embed.rs index 4e3545d055..97dd6a2713 100644 --- a/src/plugins/embed.rs +++ b/src/plugins/embed.rs @@ -28,7 +28,7 @@ impl Embed { None => Err(ShellError::labeled_error( "embed needs a field when embedding a value", "original value", - value.tag, + &tag, )), }, } diff --git a/src/plugins/inc.rs b/src/plugins/inc.rs index c58ca89369..38788014ad 100644 --- a/src/plugins/inc.rs +++ b/src/plugins/inc.rs @@ -82,9 +82,7 @@ impl Inc { Value::Primitive(Primitive::Bytes(b)) => { Ok(Value::bytes(b + 1 as u64).tagged(value.tag())) } - Value::Primitive(Primitive::String(ref s)) => { - Ok(Tagged::from_item(self.apply(&s)?, value.tag())) - } + Value::Primitive(Primitive::String(ref s)) => Ok(self.apply(&s)?.tagged(value.tag())), Value::Row(_) => match self.field { Some(ref f) => { let replacement = match value.item.get_data_by_column_path(value.tag(), f) { @@ -93,7 +91,7 @@ impl Inc { return Err(ShellError::labeled_error( "inc could not find field to replace", "column name", - f.tag, + &f.tag, )) } }; @@ -107,7 +105,7 @@ impl Inc { return Err(ShellError::labeled_error( "inc could not find field to replace", "column name", - f.tag, + &f.tag, )) } } @@ -191,20 +189,18 @@ mod tests { use super::{Inc, SemVerAction}; use indexmap::IndexMap; use nu::{ - CallInfo, EvaluatedArgs, Plugin, ReturnSuccess, SourceMap, Tag, Tagged, TaggedDictBuilder, - TaggedItem, Value, + CallInfo, EvaluatedArgs, Plugin, ReturnSuccess, Tag, Tagged, TaggedDictBuilder, TaggedItem, + Value, }; struct CallStub { - anchor: uuid::Uuid, positionals: Vec>, flags: IndexMap>, } impl CallStub { - fn new(anchor: uuid::Uuid) -> CallStub { + fn new() -> CallStub { CallStub { - anchor, positionals: vec![], flags: indexmap::IndexMap::new(), } @@ -221,19 +217,18 @@ mod tests { fn with_parameter(&mut self, name: &str) -> &mut Self { let fields: Vec> = name .split(".") - .map(|s| Value::string(s.to_string()).tagged(Tag::unknown_span(self.anchor))) + .map(|s| Value::string(s.to_string()).tagged(Tag::unknown())) .collect(); self.positionals - .push(Value::Table(fields).tagged(Tag::unknown_span(self.anchor))); + .push(Value::Table(fields).tagged(Tag::unknown())); self } fn create(&self) -> CallInfo { CallInfo { args: EvaluatedArgs::new(Some(self.positionals.clone()), Some(self.flags.clone())), - source_map: SourceMap::new(), - name_tag: Tag::unknown_span(self.anchor), + name_tag: Tag::unknown(), } } } @@ -260,7 +255,7 @@ mod tests { let mut plugin = Inc::new(); assert!(plugin - .begin_filter(CallStub::new(test_uuid()).with_long_flag("major").create()) + .begin_filter(CallStub::new().with_long_flag("major").create()) .is_ok()); assert!(plugin.action.is_some()); } @@ -270,7 +265,7 @@ mod tests { let mut plugin = Inc::new(); assert!(plugin - .begin_filter(CallStub::new(test_uuid()).with_long_flag("minor").create()) + .begin_filter(CallStub::new().with_long_flag("minor").create()) .is_ok()); assert!(plugin.action.is_some()); } @@ -280,7 +275,7 @@ mod tests { let mut plugin = Inc::new(); assert!(plugin - .begin_filter(CallStub::new(test_uuid()).with_long_flag("patch").create()) + .begin_filter(CallStub::new().with_long_flag("patch").create()) .is_ok()); assert!(plugin.action.is_some()); } @@ -291,7 +286,7 @@ mod tests { assert!(plugin .begin_filter( - CallStub::new(test_uuid()) + CallStub::new() .with_long_flag("major") .with_long_flag("minor") .create(), @@ -305,11 +300,7 @@ mod tests { let mut plugin = Inc::new(); assert!(plugin - .begin_filter( - CallStub::new(test_uuid()) - .with_parameter("package.version") - .create() - ) + .begin_filter(CallStub::new().with_parameter("package.version").create()) .is_ok()); assert_eq!( @@ -347,7 +338,7 @@ mod tests { assert!(plugin .begin_filter( - CallStub::new(test_uuid()) + CallStub::new() .with_long_flag("major") .with_parameter("version") .create() @@ -375,7 +366,7 @@ mod tests { assert!(plugin .begin_filter( - CallStub::new(test_uuid()) + CallStub::new() .with_long_flag("minor") .with_parameter("version") .create() @@ -404,7 +395,7 @@ mod tests { assert!(plugin .begin_filter( - CallStub::new(test_uuid()) + CallStub::new() .with_long_flag("patch") .with_parameter(&field) .create() @@ -425,8 +416,4 @@ mod tests { _ => {} } } - - fn test_uuid() -> uuid::Uuid { - uuid::Uuid::nil() - } } diff --git a/src/plugins/ps.rs b/src/plugins/ps.rs index 1ae9938d34..2db73d395a 100644 --- a/src/plugins/ps.rs +++ b/src/plugins/ps.rs @@ -40,7 +40,7 @@ async fn ps(tag: Tag) -> Vec> { let mut output = vec![]; while let Some(res) = processes.next().await { if let Ok((process, usage)) = res { - let mut dict = TaggedDictBuilder::new(tag); + let mut dict = TaggedDictBuilder::new(&tag); dict.insert("pid", Value::int(process.pid())); if let Ok(name) = process.name().await { dict.insert("name", Value::string(name)); diff --git a/src/plugins/str.rs b/src/plugins/str.rs index 4635d60c35..60625e7f17 100644 --- a/src/plugins/str.rs +++ b/src/plugins/str.rs @@ -89,14 +89,12 @@ impl Str { impl Str { fn strutils(&self, value: Tagged) -> Result, ShellError> { match value.item { - Value::Primitive(Primitive::String(ref s)) => { - Ok(Tagged::from_item(self.apply(&s)?, value.tag())) - } + Value::Primitive(Primitive::String(ref s)) => Ok(self.apply(&s)?.tagged(value.tag())), Value::Row(_) => match self.field { Some(ref f) => { let replacement = match value.item.get_data_by_column_path(value.tag(), f) { Some(result) => self.strutils(result.map(|x| x.clone()))?, - None => return Ok(Tagged::from_item(Value::nothing(), value.tag)), + None => return Ok(Value::nothing().tagged(value.tag)), }; match value.item.replace_data_at_column_path( value.tag(), @@ -174,7 +172,7 @@ impl Plugin for Str { return Err(ShellError::labeled_error( "Unrecognized type in params", possible_field.type_name(), - possible_field.tag, + &possible_field.tag, )) } } @@ -216,13 +214,12 @@ mod tests { use super::{Action, Str}; use indexmap::IndexMap; use nu::{ - CallInfo, EvaluatedArgs, Plugin, Primitive, ReturnSuccess, SourceMap, Tag, Tagged, - TaggedDictBuilder, TaggedItem, Value, + CallInfo, EvaluatedArgs, Plugin, Primitive, ReturnSuccess, Tag, Tagged, TaggedDictBuilder, + TaggedItem, Value, }; use num_bigint::BigInt; struct CallStub { - anchor: uuid::Uuid, positionals: Vec>, flags: IndexMap>, } @@ -230,7 +227,6 @@ mod tests { impl CallStub { fn new() -> CallStub { CallStub { - anchor: uuid::Uuid::nil(), positionals: vec![], flags: indexmap::IndexMap::new(), } @@ -247,19 +243,18 @@ mod tests { fn with_parameter(&mut self, name: &str) -> &mut Self { let fields: Vec> = name .split(".") - .map(|s| Value::string(s.to_string()).tagged(Tag::unknown_span(self.anchor))) + .map(|s| Value::string(s.to_string()).tagged(Tag::unknown())) .collect(); self.positionals - .push(Value::Table(fields).tagged(Tag::unknown_span(self.anchor))); + .push(Value::Table(fields).tagged(Tag::unknown())); self } fn create(&self) -> CallInfo { CallInfo { args: EvaluatedArgs::new(Some(self.positionals.clone()), Some(self.flags.clone())), - source_map: SourceMap::new(), - name_tag: Tag::unknown_span(self.anchor), + name_tag: Tag::unknown(), } } } @@ -271,7 +266,7 @@ mod tests { } fn unstructured_sample_record(value: &str) -> Tagged { - Tagged::from_item(Value::string(value), Tag::unknown()) + Value::string(value).tagged(Tag::unknown()) } #[test] diff --git a/src/plugins/sum.rs b/src/plugins/sum.rs index 2bb89b74e1..d08d45713d 100644 --- a/src/plugins/sum.rs +++ b/src/plugins/sum.rs @@ -21,7 +21,7 @@ impl Sum { tag, }) => { //TODO: handle overflow - self.total = Some(Value::int(i + j).tagged(*tag)); + self.total = Some(Value::int(i + j).tagged(tag)); Ok(()) } None => { @@ -36,7 +36,7 @@ impl Sum { } } Value::Primitive(Primitive::Bytes(b)) => { - match self.total { + match &self.total { Some(Tagged { item: Value::Primitive(Primitive::Bytes(j)), tag, diff --git a/src/plugins/sys.rs b/src/plugins/sys.rs index 1f86b51d7e..55bf5028bf 100644 --- a/src/plugins/sys.rs +++ b/src/plugins/sys.rs @@ -80,7 +80,7 @@ async fn mem(tag: Tag) -> Tagged { } async fn host(tag: Tag) -> Tagged { - let mut dict = TaggedDictBuilder::with_capacity(tag, 6); + let mut dict = TaggedDictBuilder::with_capacity(&tag, 6); let (platform_result, uptime_result) = futures::future::join(host::platform(), host::uptime()).await; @@ -95,7 +95,7 @@ async fn host(tag: Tag) -> Tagged { // Uptime if let Ok(uptime) = uptime_result { - let mut uptime_dict = TaggedDictBuilder::with_capacity(tag, 4); + let mut uptime_dict = TaggedDictBuilder::with_capacity(&tag, 4); let uptime = uptime.get::().round() as i64; let days = uptime / (60 * 60 * 24); @@ -116,7 +116,10 @@ async fn host(tag: Tag) -> Tagged { let mut user_vec = vec![]; while let Some(user) = users.next().await { if let Ok(user) = user { - user_vec.push(Tagged::from_item(Value::string(user.username()), tag)); + user_vec.push(Tagged { + item: Value::string(user.username()), + tag: tag.clone(), + }); } } let user_list = Value::Table(user_vec); @@ -130,7 +133,7 @@ async fn disks(tag: Tag) -> Option { let mut partitions = disk::partitions_physical(); while let Some(part) = partitions.next().await { if let Ok(part) = part { - let mut dict = TaggedDictBuilder::with_capacity(tag, 6); + let mut dict = TaggedDictBuilder::with_capacity(&tag, 6); dict.insert( "device", Value::string( @@ -176,7 +179,7 @@ async fn battery(tag: Tag) -> Option { if let Ok(batteries) = manager.batteries() { for battery in batteries { if let Ok(battery) = battery { - let mut dict = TaggedDictBuilder::new(tag); + let mut dict = TaggedDictBuilder::new(&tag); if let Some(vendor) = battery.vendor() { dict.insert("vendor", Value::string(vendor)); } @@ -217,7 +220,7 @@ async fn temp(tag: Tag) -> Option { let mut sensors = sensors::temperatures(); while let Some(sensor) = sensors.next().await { if let Ok(sensor) = sensor { - let mut dict = TaggedDictBuilder::new(tag); + let mut dict = TaggedDictBuilder::new(&tag); dict.insert("unit", Value::string(sensor.unit())); if let Some(label) = sensor.label() { dict.insert("label", Value::string(label)); @@ -259,7 +262,7 @@ async fn net(tag: Tag) -> Option { let mut io_counters = net::io_counters(); while let Some(nic) = io_counters.next().await { if let Ok(nic) = nic { - let mut network_idx = TaggedDictBuilder::with_capacity(tag, 3); + let mut network_idx = TaggedDictBuilder::with_capacity(&tag, 3); network_idx.insert("name", Value::string(nic.interface())); network_idx.insert( "sent", @@ -280,11 +283,17 @@ async fn net(tag: Tag) -> Option { } async fn sysinfo(tag: Tag) -> Vec> { - let mut sysinfo = TaggedDictBuilder::with_capacity(tag, 7); + let mut sysinfo = TaggedDictBuilder::with_capacity(&tag, 7); - let (host, cpu, disks, memory, temp) = - futures::future::join5(host(tag), cpu(tag), disks(tag), mem(tag), temp(tag)).await; - let (net, battery) = futures::future::join(net(tag), battery(tag)).await; + let (host, cpu, disks, memory, temp) = futures::future::join5( + host(tag.clone()), + cpu(tag.clone()), + disks(tag.clone()), + mem(tag.clone()), + temp(tag.clone()), + ) + .await; + let (net, battery) = futures::future::join(net(tag.clone()), battery(tag.clone())).await; sysinfo.insert_tagged("host", host); if let Some(cpu) = cpu { diff --git a/src/plugins/textview.rs b/src/plugins/textview.rs index cce8bd7084..88507183e0 100644 --- a/src/plugins/textview.rs +++ b/src/plugins/textview.rs @@ -1,8 +1,7 @@ use crossterm::{cursor, terminal, RawScreen}; use crossterm::{InputEvent, KeyEvent}; use nu::{ - serve_plugin, AnchorLocation, CallInfo, Plugin, Primitive, ShellError, Signature, SourceMap, - Tagged, Value, + serve_plugin, AnchorLocation, CallInfo, Plugin, Primitive, ShellError, Signature, Tagged, Value, }; use syntect::easy::HighlightLines; @@ -29,8 +28,8 @@ impl Plugin for TextView { Ok(Signature::build("textview").desc("Autoview of text data.")) } - fn sink(&mut self, call_info: CallInfo, input: Vec>) { - view_text_value(&input[0], &call_info.source_map); + fn sink(&mut self, _call_info: CallInfo, input: Vec>) { + view_text_value(&input[0]); } } @@ -215,20 +214,18 @@ fn scroll_view(s: &str) { scroll_view_lines_if_needed(v, false); } -fn view_text_value(value: &Tagged, source_map: &SourceMap) { +fn view_text_value(value: &Tagged) { let value_anchor = value.anchor(); match value.item { Value::Primitive(Primitive::String(ref s)) => { - let source = source_map.get(&value_anchor); - - if let Some(source) = source { + if let Some(source) = value_anchor { let extension: Option = match source { AnchorLocation::File(file) => { - let path = Path::new(file); + let path = Path::new(&file); path.extension().map(|x| x.to_string_lossy().to_string()) } AnchorLocation::Url(url) => { - let url = url::Url::parse(url); + let url = url::Url::parse(&url); if let Ok(url) = url { let url = url.clone(); if let Some(mut segments) = url.path_segments() { diff --git a/src/prelude.rs b/src/prelude.rs index 1f80126a4f..4b12a07bda 100644 --- a/src/prelude.rs +++ b/src/prelude.rs @@ -66,7 +66,7 @@ pub(crate) use crate::commands::RawCommandArgs; pub(crate) use crate::context::CommandRegistry; pub(crate) use crate::context::{AnchorLocation, Context}; pub(crate) use crate::data::base as value; -pub(crate) use crate::data::meta::{Tag, Tagged, TaggedItem}; +pub(crate) use crate::data::meta::{Span, Spanned, SpannedItem, Tag, Tagged, TaggedItem}; pub(crate) use crate::data::types::ExtractType; pub(crate) use crate::data::{Primitive, Value}; pub(crate) use crate::env::host::handle_unexpected; @@ -109,6 +109,22 @@ where } } +pub trait ToInputStream { + fn to_input_stream(self) -> InputStream; +} + +impl ToInputStream for T +where + T: Stream + Send + 'static, + U: Into, ShellError>>, +{ + fn to_input_stream(self) -> InputStream { + InputStream { + values: self.map(|item| item.into().unwrap()).boxed(), + } + } +} + pub trait ToOutputStream { fn to_output_stream(self) -> OutputStream; } diff --git a/src/shell/filesystem_shell.rs b/src/shell/filesystem_shell.rs index aec736ec0f..72a0c241f3 100644 --- a/src/shell/filesystem_shell.rs +++ b/src/shell/filesystem_shell.rs @@ -3,7 +3,6 @@ use crate::commands::cp::CopyArgs; use crate::commands::mkdir::MkdirArgs; use crate::commands::mv::MoveArgs; use crate::commands::rm::RemoveArgs; -use crate::context::SourceMap; use crate::data::dir_entry_dict; use crate::prelude::*; use crate::shell::completer::NuCompleter; @@ -12,6 +11,7 @@ use crate::utils::FileStructure; use rustyline::completion::FilenameCompleter; use rustyline::hint::{Hinter, HistoryHinter}; use std::path::{Path, PathBuf}; +use std::sync::atomic::Ordering; pub struct FilesystemShell { pub(crate) path: String, @@ -73,7 +73,7 @@ impl FilesystemShell { } impl Shell for FilesystemShell { - fn name(&self, _source_map: &SourceMap) -> String { + fn name(&self) -> String { "filesystem".to_string() } @@ -84,7 +84,7 @@ impl Shell for FilesystemShell { fn ls( &self, pattern: Option>, - command_tag: Tag, + context: &RunnableContext, ) -> Result { let cwd = self.path(); let mut full_path = PathBuf::from(self.path()); @@ -94,7 +94,8 @@ impl Shell for FilesystemShell { _ => {} } - let mut shell_entries = VecDeque::new(); + let ctrl_c = context.ctrl_c.clone(); + let name_tag = context.name.clone(); //If it's not a glob, try to display the contents of the entry if it's a directory let lossy_path = full_path.to_string_lossy(); @@ -114,24 +115,30 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( e.to_string(), e.to_string(), - command_tag, + name_tag, )); } } Ok(o) => o, }; - for entry in entries { - let entry = entry?; - let filepath = entry.path(); - let filename = if let Ok(fname) = filepath.strip_prefix(&cwd) { - fname - } else { - Path::new(&filepath) - }; - let value = dir_entry_dict(filename, &entry.metadata()?, command_tag)?; - shell_entries.push_back(ReturnSuccess::value(value)) - } - return Ok(shell_entries.to_output_stream()); + let stream = async_stream! { + for entry in entries { + if ctrl_c.load(Ordering::SeqCst) { + break; + } + if let Ok(entry) = entry { + let filepath = entry.path(); + let filename = if let Ok(fname) = filepath.strip_prefix(&cwd) { + fname + } else { + Path::new(&filepath) + }; + let value = dir_entry_dict(filename, &entry.metadata().unwrap(), &name_tag)?; + yield ReturnSuccess::value(value); + } + } + }; + return Ok(stream.to_output_stream()); } } @@ -151,20 +158,25 @@ impl Shell for FilesystemShell { }; // Enumerate the entries from the glob and add each - for entry in entries { - if let Ok(entry) = entry { - let filename = if let Ok(fname) = entry.strip_prefix(&cwd) { - fname - } else { - Path::new(&entry) - }; - let metadata = std::fs::metadata(&entry)?; - let value = dir_entry_dict(filename, &metadata, command_tag)?; - shell_entries.push_back(ReturnSuccess::value(value)) + let stream = async_stream! { + for entry in entries { + if ctrl_c.load(Ordering::SeqCst) { + break; + } + if let Ok(entry) = entry { + let filename = if let Ok(fname) = entry.strip_prefix(&cwd) { + fname + } else { + Path::new(&entry) + }; + let metadata = std::fs::metadata(&entry).unwrap(); + if let Ok(value) = dir_entry_dict(filename, &metadata, &name_tag) { + yield ReturnSuccess::value(value); + } + } } - } - - Ok(shell_entries.to_output_stream()) + }; + Ok(stream.to_output_stream()) } fn cd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result { @@ -175,7 +187,7 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( "Can not change to home directory", "can not go to home", - args.call_info.name_tag, + &args.call_info.name_tag, )) } }, @@ -957,7 +969,7 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( "unable to show current directory", "pwd command failed", - args.call_info.name_tag, + &args.call_info.name_tag, )); } }; @@ -965,7 +977,7 @@ impl Shell for FilesystemShell { let mut stream = VecDeque::new(); stream.push_back(ReturnSuccess::value( Value::Primitive(Primitive::String(p.to_string_lossy().to_string())) - .tagged(args.call_info.name_tag), + .tagged(&args.call_info.name_tag), )); Ok(stream.into()) diff --git a/src/shell/help_shell.rs b/src/shell/help_shell.rs index 0fedd9ad79..7c0e74bde4 100644 --- a/src/shell/help_shell.rs +++ b/src/shell/help_shell.rs @@ -3,7 +3,6 @@ use crate::commands::cp::CopyArgs; use crate::commands::mkdir::MkdirArgs; use crate::commands::mv::MoveArgs; use crate::commands::rm::RemoveArgs; -use crate::context::SourceMap; use crate::data::{command_dict, TaggedDictBuilder}; use crate::prelude::*; use crate::shell::shell::Shell; @@ -98,8 +97,8 @@ impl HelpShell { } impl Shell for HelpShell { - fn name(&self, source_map: &SourceMap) -> String { - let anchor_name = self.value.anchor_name(source_map); + fn name(&self) -> String { + let anchor_name = self.value.anchor_name(); format!( "{}", match anchor_name { @@ -129,7 +128,7 @@ impl Shell for HelpShell { fn ls( &self, _pattern: Option>, - _command_tag: Tag, + _context: &RunnableContext, ) -> Result { Ok(self .commands() diff --git a/src/shell/helper.rs b/src/shell/helper.rs index b590d82826..dc3ab96dc1 100644 --- a/src/shell/helper.rs +++ b/src/shell/helper.rs @@ -3,7 +3,7 @@ use crate::parser::hir::syntax_shape::{color_fallible_syntax, FlatShape, Pipelin use crate::parser::hir::TokensIterator; use crate::parser::nom_input; use crate::parser::parse::token_tree::TokenNode; -use crate::{Tag, Tagged, TaggedItem, Text}; +use crate::{Span, Spanned, SpannedItem, Tag, Tagged, Text}; use ansi_term::Color; use log::trace; use rustyline::completion::Completer; @@ -67,7 +67,7 @@ impl Highlighter for Helper { } fn highlight<'l>(&self, line: &'l str, _pos: usize) -> Cow<'l, str> { - let tokens = crate::parser::pipeline(nom_input(line, uuid::Uuid::nil())); + let tokens = crate::parser::pipeline(nom_input(line)); match tokens { Err(_) => Cow::Borrowed(line), @@ -78,13 +78,13 @@ impl Highlighter for Helper { Ok(v) => v, }; - let tokens = vec![TokenNode::Pipeline(pipeline.clone().tagged(v.tag()))]; - let mut tokens = TokensIterator::all(&tokens[..], v.tag()); + let tokens = vec![TokenNode::Pipeline(pipeline.clone().spanned(v.span()))]; + let mut tokens = TokensIterator::all(&tokens[..], v.span()); let text = Text::from(line); let expand_context = self .context - .expand_context(&text, Tag::from((0, line.len() - 1, uuid::Uuid::nil()))); + .expand_context(&text, Span::new(0, line.len() - 1)); let mut shapes = vec![]; // We just constructed a token list that only contains a pipeline, so it can't fail @@ -126,16 +126,16 @@ impl Highlighter for Helper { #[allow(unused)] fn vec_tag(input: Vec>) -> Option { let mut iter = input.iter(); - let first = iter.next()?.tag; + let first = iter.next()?.tag.clone(); let last = iter.last(); Some(match last { None => first, - Some(last) => first.until(last.tag), + Some(last) => first.until(&last.tag), }) } -fn paint_flat_shape(flat_shape: Tagged, line: &str) -> String { +fn paint_flat_shape(flat_shape: Spanned, line: &str) -> String { let style = match &flat_shape.item { FlatShape::OpenDelimiter(_) => Color::White.normal(), FlatShape::CloseDelimiter(_) => Color::White.normal(), @@ -170,7 +170,7 @@ fn paint_flat_shape(flat_shape: Tagged, line: &str) -> String { } }; - let body = flat_shape.tag.slice(line); + let body = flat_shape.span.slice(line); style.paint(body).to_string() } diff --git a/src/shell/shell.rs b/src/shell/shell.rs index c567e474a3..507fc0517b 100644 --- a/src/shell/shell.rs +++ b/src/shell/shell.rs @@ -3,20 +3,19 @@ use crate::commands::cp::CopyArgs; use crate::commands::mkdir::MkdirArgs; use crate::commands::mv::MoveArgs; use crate::commands::rm::RemoveArgs; -use crate::context::SourceMap; use crate::errors::ShellError; use crate::prelude::*; use crate::stream::OutputStream; use std::path::PathBuf; pub trait Shell: std::fmt::Debug { - fn name(&self, source_map: &SourceMap) -> String; + fn name(&self) -> String; fn homedir(&self) -> Option; fn ls( &self, pattern: Option>, - command_tag: Tag, + context: &RunnableContext, ) -> Result; fn cd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result; fn cp(&self, args: CopyArgs, name: Tag, path: &str) -> Result; diff --git a/src/shell/shell_manager.rs b/src/shell/shell_manager.rs index c4c42367ed..149fdd58d1 100644 --- a/src/shell/shell_manager.rs +++ b/src/shell/shell_manager.rs @@ -10,18 +10,19 @@ use crate::shell::shell::Shell; use crate::stream::OutputStream; use std::error::Error; use std::path::PathBuf; +use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::{Arc, Mutex}; #[derive(Clone, Debug)] pub struct ShellManager { - pub(crate) current_shell: usize, + pub(crate) current_shell: Arc, pub(crate) shells: Arc>>>, } impl ShellManager { pub fn basic(commands: CommandRegistry) -> Result> { Ok(ShellManager { - current_shell: 0, + current_shell: Arc::new(AtomicUsize::new(0)), shells: Arc::new(Mutex::new(vec![Box::new(FilesystemShell::basic( commands, )?)])), @@ -30,24 +31,29 @@ impl ShellManager { pub fn insert_at_current(&mut self, shell: Box) { self.shells.lock().unwrap().push(shell); - self.current_shell = self.shells.lock().unwrap().len() - 1; + self.current_shell + .store(self.shells.lock().unwrap().len() - 1, Ordering::SeqCst); self.set_path(self.path()); } + pub fn current_shell(&self) -> usize { + self.current_shell.load(Ordering::SeqCst) + } + pub fn remove_at_current(&mut self) { { let mut shells = self.shells.lock().unwrap(); if shells.len() > 0 { - if self.current_shell == shells.len() - 1 { + if self.current_shell() == shells.len() - 1 { shells.pop(); let new_len = shells.len(); if new_len > 0 { - self.current_shell = new_len - 1; + self.current_shell.store(new_len - 1, Ordering::SeqCst); } else { return; } } else { - shells.remove(self.current_shell); + shells.remove(self.current_shell()); } } } @@ -59,17 +65,17 @@ impl ShellManager { } pub fn path(&self) -> String { - self.shells.lock().unwrap()[self.current_shell].path() + self.shells.lock().unwrap()[self.current_shell()].path() } pub fn pwd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result { let env = self.shells.lock().unwrap(); - env[self.current_shell].pwd(args) + env[self.current_shell()].pwd(args) } pub fn set_path(&mut self, path: String) { - self.shells.lock().unwrap()[self.current_shell].set_path(path) + self.shells.lock().unwrap()[self.current_shell()].set_path(path) } pub fn complete( @@ -78,20 +84,21 @@ impl ShellManager { pos: usize, ctx: &rustyline::Context<'_>, ) -> Result<(usize, Vec), rustyline::error::ReadlineError> { - self.shells.lock().unwrap()[self.current_shell].complete(line, pos, ctx) + self.shells.lock().unwrap()[self.current_shell()].complete(line, pos, ctx) } pub fn hint(&self, line: &str, pos: usize, ctx: &rustyline::Context<'_>) -> Option { - self.shells.lock().unwrap()[self.current_shell].hint(line, pos, ctx) + self.shells.lock().unwrap()[self.current_shell()].hint(line, pos, ctx) } pub fn next(&mut self) { { let shell_len = self.shells.lock().unwrap().len(); - if self.current_shell == (shell_len - 1) { - self.current_shell = 0; + if self.current_shell() == (shell_len - 1) { + self.current_shell.store(0, Ordering::SeqCst); } else { - self.current_shell += 1; + self.current_shell + .store(self.current_shell() + 1, Ordering::SeqCst); } } self.set_path(self.path()); @@ -100,10 +107,11 @@ impl ShellManager { pub fn prev(&mut self) { { let shell_len = self.shells.lock().unwrap().len(); - if self.current_shell == 0 { - self.current_shell = shell_len - 1; + if self.current_shell() == 0 { + self.current_shell.store(shell_len - 1, Ordering::SeqCst); } else { - self.current_shell -= 1; + self.current_shell + .store(self.current_shell() - 1, Ordering::SeqCst); } } self.set_path(self.path()); @@ -112,23 +120,23 @@ impl ShellManager { pub fn homedir(&self) -> Option { let env = self.shells.lock().unwrap(); - env[self.current_shell].homedir() + env[self.current_shell()].homedir() } pub fn ls( &self, path: Option>, - command_tag: Tag, + context: &RunnableContext, ) -> Result { let env = self.shells.lock().unwrap(); - env[self.current_shell].ls(path, command_tag) + env[self.current_shell()].ls(path, context) } pub fn cd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result { let env = self.shells.lock().unwrap(); - env[self.current_shell].cd(args) + env[self.current_shell()].cd(args) } pub fn cp( @@ -140,13 +148,13 @@ impl ShellManager { match env { Ok(x) => { - let path = x[self.current_shell].path(); - x[self.current_shell].cp(args, context.name, &path) + let path = x[self.current_shell()].path(); + x[self.current_shell()].cp(args, context.name.clone(), &path) } Err(e) => Err(ShellError::labeled_error( format!("Internal error: could not lock {}", e), "Internal error: could not lock", - context.name, + &context.name, )), } } @@ -160,13 +168,13 @@ impl ShellManager { match env { Ok(x) => { - let path = x[self.current_shell].path(); - x[self.current_shell].rm(args, context.name, &path) + let path = x[self.current_shell()].path(); + x[self.current_shell()].rm(args, context.name.clone(), &path) } Err(e) => Err(ShellError::labeled_error( format!("Internal error: could not lock {}", e), "Internal error: could not lock", - context.name, + &context.name, )), } } @@ -180,13 +188,13 @@ impl ShellManager { match env { Ok(x) => { - let path = x[self.current_shell].path(); - x[self.current_shell].mkdir(args, context.name, &path) + let path = x[self.current_shell()].path(); + x[self.current_shell()].mkdir(args, context.name.clone(), &path) } Err(e) => Err(ShellError::labeled_error( format!("Internal error: could not lock {}", e), "Internal error: could not lock", - context.name, + &context.name, )), } } @@ -200,13 +208,13 @@ impl ShellManager { match env { Ok(x) => { - let path = x[self.current_shell].path(); - x[self.current_shell].mv(args, context.name, &path) + let path = x[self.current_shell()].path(); + x[self.current_shell()].mv(args, context.name.clone(), &path) } Err(e) => Err(ShellError::labeled_error( format!("Internal error: could not lock {}", e), "Internal error: could not lock", - context.name, + &context.name, )), } } diff --git a/src/shell/value_shell.rs b/src/shell/value_shell.rs index d95d07cb97..0aa9e341bb 100644 --- a/src/shell/value_shell.rs +++ b/src/shell/value_shell.rs @@ -3,7 +3,6 @@ use crate::commands::cp::CopyArgs; use crate::commands::mkdir::MkdirArgs; use crate::commands::mv::MoveArgs; use crate::commands::rm::RemoveArgs; -use crate::context::SourceMap; use crate::prelude::*; use crate::shell::shell::Shell; use crate::utils::ValueStructure; @@ -72,8 +71,8 @@ impl ValueShell { } impl Shell for ValueShell { - fn name(&self, source_map: &SourceMap) -> String { - let anchor_name = self.value.anchor_name(source_map); + fn name(&self) -> String { + let anchor_name = self.value.anchor_name(); format!( "{}", match anchor_name { @@ -90,9 +89,10 @@ impl Shell for ValueShell { fn ls( &self, target: Option>, - command_name: Tag, + context: &RunnableContext, ) -> Result { let mut full_path = PathBuf::from(self.path()); + let name_tag = context.name.clone(); match &target { Some(value) => full_path.push(value.as_ref()), @@ -114,7 +114,7 @@ impl Shell for ValueShell { return Err(ShellError::labeled_error( "Can not list entries inside", "No such path exists", - command_name, + name_tag, )); } @@ -166,7 +166,7 @@ impl Shell for ValueShell { return Err(ShellError::labeled_error( "Can not change to path inside", "No such path exists", - args.call_info.name_tag, + &args.call_info.name_tag, )); } @@ -213,10 +213,9 @@ impl Shell for ValueShell { fn pwd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result { let mut stream = VecDeque::new(); - stream.push_back(ReturnSuccess::value(Tagged::from_item( - Value::string(self.path()), - args.call_info.name_tag, - ))); + stream.push_back(ReturnSuccess::value( + Value::string(self.path()).tagged(&args.call_info.name_tag), + )); Ok(stream.into()) } diff --git a/src/stream.rs b/src/stream.rs index 066acb74a1..f6f2d5e2e1 100644 --- a/src/stream.rs +++ b/src/stream.rs @@ -23,6 +23,17 @@ impl InputStream { } } +impl Stream for InputStream { + type Item = Tagged; + + fn poll_next( + mut self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> core::task::Poll> { + Stream::poll_next(std::pin::Pin::new(&mut self.values), cx) + } +} + impl From>> for InputStream { fn from(input: BoxStream<'static, Tagged>) -> InputStream { InputStream { values: input } diff --git a/tests/command_config_test.rs b/tests/command_config_test.rs index dd0f4e0ebb..8a45be47c5 100644 --- a/tests/command_config_test.rs +++ b/tests/command_config_test.rs @@ -86,30 +86,30 @@ fn sets_configuration_value() { h::delete_file_at(nu::config_path().unwrap().join("test_4.toml")); } -#[test] -fn removes_configuration_value() { - Playground::setup("config_test_5", |dirs, sandbox| { - sandbox.with_files(vec![FileWithContent( - "test_5.toml", - r#" - caballeros = [1, 1, 1] - podershell = [1, 1, 1] - "#, - )]); +// #[test] +// fn removes_configuration_value() { +// Playground::setup("config_test_5", |dirs, sandbox| { +// sandbox.with_files(vec![FileWithContent( +// "test_5.toml", +// r#" +// caballeros = [1, 1, 1] +// podershell = [1, 1, 1] +// "#, +// )]); - nu!( - cwd: dirs.test(), - "config --load test_5.toml --remove podershell" - ); +// nu!( +// cwd: dirs.test(), +// "config --load test_5.toml --remove podershell" +// ); - let actual = nu_error!( - cwd: dirs.root(), - r#"open "{}/test_5.toml" | get podershell | echo $it"#, - dirs.config_path() - ); +// let actual = nu_error!( +// cwd: dirs.root(), +// r#"open "{}/test_5.toml" | get podershell | echo $it"#, +// dirs.config_path() +// ); - assert!(actual.contains("Unknown column")); - }); +// assert!(actual.contains("Unknown column")); +// }); - h::delete_file_at(nu::config_path().unwrap().join("test_5.toml")); -} +// h::delete_file_at(nu::config_path().unwrap().join("test_5.toml")); +// } diff --git a/tests/command_open_tests.rs b/tests/command_open_tests.rs index e9047883cf..53e393eef4 100644 --- a/tests/command_open_tests.rs +++ b/tests/command_open_tests.rs @@ -222,7 +222,7 @@ fn open_can_parse_utf16_ini() { fn errors_if_file_not_found() { let actual = nu_error!( cwd: "tests/fixtures/formats", - "open i_dont_exist.txt | echo $it" + "open i_dont_exist.txt" ); assert!(actual.contains("File could not be opened")); From 2716bb020f537470511f1036b1ef95c029a455d7 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Sun, 13 Oct 2019 17:53:58 +1300 Subject: [PATCH 017/184] Fix #811 (#813) --- Cargo.lock | 1046 ++++++++++++++------------------ Cargo.toml | 4 +- src/parser/parse/token_tree.rs | 3 +- 3 files changed, 474 insertions(+), 579 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 765f42d637..da47189204 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,7 +2,7 @@ # It is not intended for manual editing. [[package]] name = "adler32" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -47,10 +47,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "arrayvec" -version = "0.4.11" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", + "nodrop 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -67,9 +67,9 @@ name = "async-stream-impl" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -77,24 +77,24 @@ name = "atty" version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "autocfg" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "backtrace" -version = "0.3.34" +version = "0.3.38" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "backtrace-sys 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-demangle 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -103,7 +103,7 @@ version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -119,10 +119,10 @@ name = "battery" version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "core-foundation 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "nix 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -143,27 +143,27 @@ dependencies = [ [[package]] name = "bincode" -version = "1.1.4" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "bitflags" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "blake2b_simd" -version = "0.5.6" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "arrayref 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", - "arrayvec 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", - "constant_time_eq 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "arrayvec 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", + "constant_time_eq 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -180,21 +180,21 @@ dependencies = [ "chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)", "decimal 2.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "md5 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "bstr" -version = "0.2.6" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "regex-automata 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", @@ -202,12 +202,12 @@ dependencies = [ [[package]] name = "bumpalo" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "byte-unit" -version = "3.0.1" +version = "3.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -226,7 +226,7 @@ version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -234,7 +234,7 @@ name = "c2-chacha" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "ppv-lite86 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -243,13 +243,13 @@ name = "cc" version = "1.0.45" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "jobserver 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)", + "jobserver 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.10.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "cfg-if" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -257,7 +257,7 @@ name = "chrono" version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", @@ -279,7 +279,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "strsim 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-width 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", @@ -311,7 +311,7 @@ name = "cloudabi" version = "0.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -319,19 +319,19 @@ name = "config" version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "rust-ini 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde-hjson 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "toml 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)", "yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "constant_time_eq" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -340,7 +340,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "core-foundation-sys 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -353,7 +353,7 @@ name = "crc32fast" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -374,8 +374,8 @@ name = "crossbeam-utils" version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -409,7 +409,7 @@ dependencies = [ "crossterm_screen 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "crossterm_utils 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "crossterm_winapi 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -441,7 +441,7 @@ dependencies = [ "crossterm_cursor 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "crossterm_utils 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "crossterm_winapi 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -450,7 +450,7 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "crossterm_winapi 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -467,10 +467,10 @@ name = "csv" version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bstr 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", + "bstr 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "csv-core 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "ryu 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "ryu 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -484,11 +484,11 @@ dependencies = [ [[package]] name = "ctor" -version = "0.1.9" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -502,30 +502,29 @@ dependencies = [ [[package]] name = "curl" -version = "0.4.22" +version = "0.4.25" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "curl-sys 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)", - "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "curl-sys 0.4.23 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-probe 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.9.49 (registry+https://github.com/rust-lang/crates.io-index)", - "schannel 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.51 (registry+https://github.com/rust-lang/crates.io-index)", + "schannel 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)", "socket2 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "curl-sys" -version = "0.4.20" +version = "0.4.23" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "libnghttp2-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.9.49 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.51 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -536,7 +535,7 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "darwin-libproc-sys 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -545,7 +544,7 @@ name = "darwin-libproc-sys" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -553,9 +552,9 @@ name = "decimal" version = "2.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "ord_subset 3.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", @@ -566,7 +565,7 @@ name = "deflate" version = "0.7.20" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "adler32 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "adler32 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -575,9 +574,9 @@ name = "derive-new" version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -590,7 +589,7 @@ name = "directories" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -599,7 +598,7 @@ name = "dirs" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "redox_users 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -609,7 +608,7 @@ name = "dirs" version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "dirs-sys 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -618,8 +617,8 @@ name = "dirs-sys" version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "redox_users 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -636,66 +635,44 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "either" -version = "1.5.2" +version = "1.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "encode_unicode" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "enum-utils" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "enum-utils-from-str 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive_internals 0.24.1 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "enum-utils-from-str" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "env_logger" version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", - "humantime 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "termcolor 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "failure" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "backtrace 0.3.34 (registry+https://github.com/rust-lang/crates.io-index)", - "failure_derive 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "backtrace 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)", + "failure_derive 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "failure_derive" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)", - "synstructure 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "synstructure 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -715,13 +692,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "flate2" -version = "1.0.9" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "miniz-sys 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", - "miniz_oxide_c_api 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "miniz_oxide 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -736,7 +713,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "futures" -version = "0.1.28" +version = "0.1.29" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -804,7 +781,7 @@ name = "futures-util-preview" version = "0.3.0-alpha.18" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", "futures-io-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", @@ -826,11 +803,12 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.1.8" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -838,9 +816,9 @@ name = "getset" version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -848,8 +826,8 @@ name = "git2" version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "libgit2-sys 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -870,33 +848,33 @@ dependencies = [ [[package]] name = "heim" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-cpu 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-disk 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-host 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-memory 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-net 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-process 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-sensors 0.0.3-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-virt 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-cpu 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-disk 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-host 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-memory 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-net 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-process 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-sensors 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-virt 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-common" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "core-foundation 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", "futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "nix 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)", "pin-utils 0.1.0-alpha.4 (registry+https://github.com/rust-lang/crates.io-index)", @@ -906,41 +884,41 @@ dependencies = [ [[package]] name = "heim-cpu" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-derive" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-disk" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "core-foundation 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "widestring 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -948,66 +926,66 @@ dependencies = [ [[package]] name = "heim-host" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "platforms 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "platforms 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-memory" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-net" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "hex 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "macaddr 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "nix 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-process" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "darwin-libproc 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-cpu 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-host 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-net 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-cpu 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-host 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-net 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "ntapi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1017,35 +995,35 @@ dependencies = [ [[package]] name = "heim-runtime" -version = "0.0.4-alpha.1" +version = "0.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "threadpool 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-sensors" -version = "0.0.3-alpha.1" +version = "0.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-virt" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "raw-cpuid 7.0.3 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1071,7 +1049,7 @@ dependencies = [ [[package]] name = "humantime" -version = "1.2.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1089,11 +1067,11 @@ dependencies = [ [[package]] name = "image" -version = "0.22.2" +version = "0.22.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "jpeg-decoder 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", + "jpeg-decoder 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)", "num-iter 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", "num-rational 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1113,35 +1091,34 @@ name = "inflate" version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "adler32 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "adler32 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "iovec" -version = "0.1.2" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "isahc" -version = "0.7.1" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-channel 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", - "curl 0.4.22 (registry+https://github.com/rust-lang/crates.io-index)", - "curl-sys 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)", + "curl 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)", + "curl-sys 0.4.23 (registry+https://github.com/rust-lang/crates.io-index)", "futures-io-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", "futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", "http 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "sluice 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "sluice 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1149,8 +1126,8 @@ name = "isatty" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1160,7 +1137,7 @@ name = "itertools" version = "0.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "either 1.5.2 (registry+https://github.com/rust-lang/crates.io-index)", + "either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1168,7 +1145,7 @@ name = "itertools" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "either 1.5.2 (registry+https://github.com/rust-lang/crates.io-index)", + "either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1178,17 +1155,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "jobserver" -version = "0.1.16" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "getrandom 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "jpeg-decoder" -version = "0.1.15" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1196,10 +1173,10 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.27" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "wasm-bindgen 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1221,7 +1198,7 @@ dependencies = [ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "render-tree 0.1.1 (git+https://github.com/wycats/language-reporting)", "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "termcolor 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1232,7 +1209,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "lazy_static" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1242,19 +1219,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "lexical-core" -version = "0.4.3" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "arrayvec 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", - "ryu 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "stackvector 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", - "static_assertions 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "ryu 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "static_assertions 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "libc" -version = "0.2.60" +version = "0.2.62" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1263,9 +1240,9 @@ version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1274,7 +1251,7 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1283,7 +1260,7 @@ version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1293,8 +1270,8 @@ version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1303,7 +1280,7 @@ name = "line-wrap" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "safemem 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "safemem 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1325,7 +1302,7 @@ name = "log" version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1346,7 +1323,7 @@ name = "mach" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1354,7 +1331,7 @@ name = "mach" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1362,7 +1339,7 @@ name = "malloc_buf" version = "0.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1380,7 +1357,7 @@ name = "memchr" version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1394,35 +1371,15 @@ version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "mime 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", - "unicase 2.4.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "miniz-sys" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "unicase 2.5.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "miniz_oxide" -version = "0.3.1" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "adler32 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "miniz_oxide_c_api" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "miniz_oxide 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "adler32 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1435,12 +1392,12 @@ name = "neso" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bincode 1.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1448,10 +1405,10 @@ name = "nix" version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1460,16 +1417,16 @@ name = "nix" version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "nodrop" -version = "0.1.13" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1483,10 +1440,10 @@ dependencies = [ [[package]] name = "nom" -version = "5.0.0" +version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lexical-core 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", + "lexical-core 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1496,7 +1453,7 @@ name = "nom-tracable" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "nom 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "nom 5.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "nom-tracable-macros 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "nom_locate 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1507,7 +1464,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1517,7 +1474,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytecount 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "nom 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "nom 5.0.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1539,7 +1496,7 @@ dependencies = [ "battery 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", "bigdecimal 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "bson 0.14.0 (registry+https://github.com/rust-lang/crates.io-index)", - "byte-unit 3.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "byte-unit 3.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", "chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)", "chrono-humanize 0.0.11 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1551,16 +1508,15 @@ dependencies = [ "derive-new 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)", "dirs 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "dunce 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "enum-utils 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "futures-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", "futures-timer 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "futures_codec 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "getset 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", "git2 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", "glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "heim 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", + "heim 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", "hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "image 0.22.2 (registry+https://github.com/rust-lang/crates.io-index)", + "image 0.22.3 (registry+https://github.com/rust-lang/crates.io-index)", "indexmap 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "language-reporting 0.3.1 (git+https://github.com/wycats/language-reporting)", @@ -1568,21 +1524,21 @@ dependencies = [ "mime 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "natural 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "neso 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", - "nom 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "nom 5.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "nom-tracable 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "nom_locate 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "num-bigint 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "onig_sys 69.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "pin-utils 0.1.0-alpha.4 (registry+https://github.com/rust-lang/crates.io-index)", - "pretty-hex 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "pretty-hex 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "pretty_assertions 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "pretty_env_logger 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "prettytable-rs 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "ptree 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "rawkey 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "roxmltree 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "roxmltree 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "rusqlite 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)", "rustyline 5.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1590,9 +1546,9 @@ dependencies = [ "serde-hjson 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde_bytes 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)", "serde_ini 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "serde_urlencoded 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_yaml 0.8.9 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_yaml 0.8.11 (registry+https://github.com/rust-lang/crates.io-index)", "shellexpand 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "sublime_fuzzy 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "subprocess 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1612,7 +1568,7 @@ name = "num-bigint" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1623,7 +1579,7 @@ name = "num-integer" version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1632,7 +1588,7 @@ name = "num-iter" version = "0.1.39" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1642,7 +1598,7 @@ name = "num-rational" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1660,7 +1616,7 @@ name = "num-traits" version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1668,7 +1624,7 @@ name = "num_cpus" version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1708,12 +1664,12 @@ dependencies = [ [[package]] name = "onig" -version = "4.3.2" +version = "4.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "onig_sys 69.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1723,7 +1679,7 @@ version = "69.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1733,13 +1689,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "openssl-sys" -version = "0.9.49" +version = "0.9.51" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1790,12 +1746,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "pkg-config" -version = "0.3.15" +version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "platforms" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1805,7 +1761,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "humantime 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "line-wrap 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "xml-rs 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1816,7 +1772,7 @@ name = "png" version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "deflate 0.7.20 (registry+https://github.com/rust-lang/crates.io-index)", "inflate 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1829,7 +1785,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "pretty-hex" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1838,7 +1794,7 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", - "ctor 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "ctor 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", "difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "output_vt100 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1860,23 +1816,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "csv 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "encode_unicode 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "encode_unicode 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "term 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-width 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "proc-macro2" -version = "0.4.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "proc-macro2" -version = "1.0.1" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1894,7 +1842,7 @@ dependencies = [ "petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde-value 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "tint 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1903,31 +1851,23 @@ name = "quick-error" version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "quote" -version = "0.6.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "quote" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "rand" -version = "0.7.0" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "getrandom 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "getrandom 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1937,7 +1877,7 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "c2-chacha 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1955,10 +1895,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "rand_core" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "getrandom 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", + "getrandom 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1966,7 +1906,7 @@ name = "rand_hc" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "rand_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1976,7 +1916,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1987,7 +1927,7 @@ name = "raw-cpuid" version = "7.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2026,7 +1966,7 @@ name = "redox_users" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "rust-argon2 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2034,12 +1974,12 @@ dependencies = [ [[package]] name = "regex" -version = "1.2.1" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "regex-syntax 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)", + "regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", "thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2053,7 +1993,7 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.6.11" +version = "0.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -2081,10 +2021,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "roxmltree" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "xmlparser 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", + "xmlparser 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2092,7 +2032,7 @@ name = "rusqlite" version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "fallible-iterator 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "fallible-streaming-iterator 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", "libsqlite3-sys 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2107,7 +2047,7 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", - "blake2b_simd 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)", + "blake2b_simd 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2118,7 +2058,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "rustc-demangle" -version = "0.1.15" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -2140,7 +2080,7 @@ version = "5.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "dirs 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "nix 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2152,12 +2092,12 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "safemem" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -2170,10 +2110,10 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.15" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2200,7 +2140,7 @@ name = "serde" version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2211,7 +2151,7 @@ dependencies = [ "lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde 0.8.23 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2220,10 +2160,10 @@ name = "serde-hjson" version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde 0.8.23 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2246,21 +2186,12 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.98" +version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "serde_derive_internals" -version = "0.24.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2275,12 +2206,12 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.40" +version = "1.0.41" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "indexmap 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "ryu 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "ryu 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2305,7 +2236,7 @@ dependencies = [ [[package]] name = "serde_yaml" -version = "0.8.9" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2335,7 +2266,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "sluice" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2353,8 +2284,8 @@ name = "socket2" version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2364,18 +2295,9 @@ name = "sourcefile" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "stackvector" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", - "unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "static_assertions" -version = "0.3.3" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -2394,7 +2316,7 @@ version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "crossbeam-utils 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2405,49 +2327,39 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "futures-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", "http 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", - "isahc 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", - "js-sys 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)", + "isahc 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", + "js-sys 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "mime 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "mime_guess 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "serde_urlencoded 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-futures 0.3.25 (registry+https://github.com/rust-lang/crates.io-index)", - "web-sys 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-futures 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)", + "web-sys 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "syn" -version = "0.15.43" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "syn" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "synstructure" -version = "0.10.2" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2455,18 +2367,18 @@ name = "syntect" version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bincode 1.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "flate2 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)", + "bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "flate2 1.0.12 (registry+https://github.com/rust-lang/crates.io-index)", "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "onig 4.3.2 (registry+https://github.com/rust-lang/crates.io-index)", + "onig 4.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "plist 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "regex-syntax 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)", + "regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)", "yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2476,9 +2388,9 @@ name = "tempfile" version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2500,7 +2412,7 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2509,7 +2421,7 @@ name = "termcolor" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "wincolor 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "wincolor 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2526,7 +2438,7 @@ name = "thread_local" version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2542,7 +2454,7 @@ name = "time" version = "0.1.42" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2561,7 +2473,7 @@ version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2583,12 +2495,12 @@ dependencies = [ [[package]] name = "typenum" -version = "1.10.0" +version = "1.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "unicase" -version = "2.4.0" +version = "2.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2620,31 +2532,18 @@ name = "unicode-width" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "unicode-xid" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - [[package]] name = "unicode-xid" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "unreachable" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "uom" version = "0.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)", + "typenum 1.11.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2654,7 +2553,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "num-rational 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)", + "typenum 1.11.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2711,93 +2610,100 @@ dependencies = [ "winapi-util 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "wasi" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "wasm-bindgen" -version = "0.2.50" +version = "0.2.51" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-macro 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-macro 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.50" +version = "0.2.51" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bumpalo 2.5.0 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bumpalo 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-shared 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-shared 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "wasm-bindgen-futures" -version = "0.3.25" +version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", "futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "js-sys 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "js-sys 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", + "web-sys 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.50" +version = "0.2.51" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-macro-support 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-macro-support 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.50" +version = "0.2.51" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-backend 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-shared 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-backend 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-shared 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.50" +version = "0.2.51" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "wasm-bindgen-webidl" -version = "0.2.50" +version = "0.2.51" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-backend 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-backend 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", "weedle 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "web-sys" -version = "0.3.27" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "js-sys 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)", + "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "js-sys 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", "sourcefile 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-webidl 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-webidl 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2813,8 +2719,8 @@ name = "which" version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2861,7 +2767,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "wincolor" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2873,8 +2779,8 @@ name = "x11" version = "2.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2890,7 +2796,7 @@ name = "xcb" version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2906,7 +2812,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "xmlparser" -version = "0.9.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -2918,36 +2824,36 @@ dependencies = [ ] [metadata] -"checksum adler32 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7e522997b529f05601e05166c07ed17789691f562762c7f3b987263d2dedee5c" +"checksum adler32 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "5d2e7343e7fc9de883d1b0341e0b13970f764c14101234857d2ddafa1cb1cac2" "checksum aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "58fb5e95d83b38284460a5fda7d6470aa0b8844d283a0b614b8535e880800d2d" "checksum ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b" "checksum ansi_term 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" "checksum app_dirs 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e73a24bad9bd6a94d6395382a6c69fe071708ae4409f763c5475e14ee896313d" "checksum arrayref 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "0d382e583f07208808f6b1249e60848879ba3543f57c32277bf52d69c2f0f0ee" -"checksum arrayvec 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)" = "b8d73f9beda665eaa98ab9e4f7442bd4e7de6652587de55b2525e52e29c1b0ba" +"checksum arrayvec 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "cd9fd44efafa8690358b7408d253adf110036b88f55672a933f01d616ad9b1b9" "checksum async-stream 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "650be9b667e47506c42ee53034fb1935443cb2447a3a5c0a75e303d2e756fa73" "checksum async-stream-impl 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4f0d8c5b411e36dcfb04388bacfec54795726b1f0148adcb0f377a96d6747e0e" "checksum atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "1803c647a3ec87095e7ae7acfca019e98de5ec9a7d01343f611cf3152ed71a90" -"checksum autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "22130e92352b948e7e82a49cdb0aa94f2211761117f29e052dd397c1ac33542b" -"checksum backtrace 0.3.34 (registry+https://github.com/rust-lang/crates.io-index)" = "b5164d292487f037ece34ec0de2fcede2faa162f085dd96d2385ab81b12765ba" +"checksum autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "b671c8fb71b457dd4ae18c4ba1e59aa81793daacc361d82fcd410cef0d491875" +"checksum backtrace 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)" = "690a62be8920ccf773ee00ef0968649b0e724cda8bd5b12286302b4ae955fdf5" "checksum backtrace-sys 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)" = "82a830b4ef2d1124a711c71d263c5abdc710ef8e907bd508c88be475cebc422b" "checksum base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0b25d992356d2eb0ed82172f5248873db5560c4721f564b13cb5193bda5e668e" "checksum battery 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6d6fe5630049e900227cd89afce4c1204b88ec8e61a2581bb96fcce26f047b" "checksum bigdecimal 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "460825c9e21708024d67c07057cd5560e5acdccac85de0de624a81d3de51bacb" -"checksum bincode 1.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "9f04a5e50dc80b3d5d35320889053637d15011aed5e66b66b37ae798c65da6f7" -"checksum bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3d155346769a6855b86399e9bc3814ab343cd3d62c7e985113d46a0ec3c281fd" -"checksum blake2b_simd 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "461f4b879a8eb70c1debf7d0788a9a5ff15f1ea9d25925fea264ef4258bed6b2" +"checksum bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b8ab639324e3ee8774d296864fbc0dbbb256cf1a41c490b94cba90c082915f92" +"checksum bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8a606a02debe2813760609f57a64a2ffd27d9fdf5b2f133eaca0b248dd92cdd2" +"checksum blake2b_simd 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)" = "5850aeee1552f495dd0250014cf64b82b7c8879a89d83b33bbdace2cc4f63182" "checksum block 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0d8c1fef690941d3e7788d328517591fecc684c084084702d6ff1641e993699a" "checksum bson 0.14.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d61895d21e2194d1ce1d434cff69025daac1e49a8b4698eb04b05722dbc08b33" -"checksum bstr 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "e0a692f1c740e7e821ca71a22cf99b9b2322dfa94d10f71443befb1797b3946a" -"checksum bumpalo 2.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2cd43d82f27d68911e6ee11ee791fb248f138f5d69424dc02e098d4f152b0b05" -"checksum byte-unit 3.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90139954ec9776c4832d44f212e558ccdacbe915a881bf3de3a1a487fa8d1e87" +"checksum bstr 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8d6c2c5b58ab920a4f5aeaaca34b4488074e8cc7596af94e6f8c6ff247c60245" +"checksum bumpalo 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ad807f2fc2bf185eeb98ff3a901bd46dc5ad58163d0fa4577ba0d25674d71708" +"checksum byte-unit 3.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6894a79550807490d9f19a138a6da0f8830e70c83e83402dd23f16fd6c479056" "checksum bytecount 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f861d9ce359f56dbcb6e0c2a1cb84e52ad732cadb57b806adeb3c7668caccbd8" "checksum byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a7c3dd8985a7111efc5c80b44e23ecdd8c007de8ade3b96595387e812b957cf5" "checksum bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "206fdffcfa2df7cbe15601ef46c813fce0965eb3286db6b56c583b814b51c81c" "checksum c2-chacha 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7d64d04786e0f528460fc884753cf8dddcc466be308f6026f8e355c41a0e4101" "checksum cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)" = "4fc9a35e1f4290eb9e5fc54ba6cf40671ed2a2514c3eeb2b2a908dda2ea5a1be" -"checksum cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "b486ce3ccf7ffd79fdeb678eac06a9e6c09fc88d33836340becb8fffe87c5e33" +"checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" "checksum chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e8493056968583b0193c1bb04d6f7684586f3726992d6c573261941a895dbd68" "checksum chrono-humanize 0.0.11 (registry+https://github.com/rust-lang/crates.io-index)" = "eb2ff48a655fe8d2dae9a39e66af7fd8ff32a879e8c4e27422c25596a8b5e90d" "checksum clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5067f5bb2d80ef5d68b4c87db81601f0b75bca627bc2ef76b141d7b846a3c6d9" @@ -2955,7 +2861,7 @@ dependencies = [ "checksum clipboard-win 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e3a093d6fed558e5fe24c3dfc85a68bb68f1c824f440d3ba5aca189e2998786b" "checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" "checksum config 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f9107d78ed62b3fa5a86e7d18e647abed48cfd8f8fab6c72f4cdb982d196f7e6" -"checksum constant_time_eq 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8ff012e225ce166d4422e0e78419d901719760f62ae2b7969ca6b564d1b54a9e" +"checksum constant_time_eq 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "995a44c877f9212528ccc74b21a232f66ad69001e40ede5bcee2ac9ef2657120" "checksum core-foundation 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "25b9e03f145fd4f2bf705e07b900cd41fc636598fe5dc452fd0db1441c3f496d" "checksum core-foundation-sys 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e7ca8a5221364ef15ce201e8ed2f609fc312682a8f4e0e3d4aa5879764e0fa3b" "checksum crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ba125de2af0df55319f41944744ad91c71113bf74a4646efff39afe1f6842db1" @@ -2972,10 +2878,10 @@ dependencies = [ "checksum crossterm_winapi 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "b055e7cc627c452e6a9b977022f48a2db6f0ff73df446ca970f95eef9c381d45" "checksum csv 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "37519ccdfd73a75821cac9319d4fce15a81b9fcf75f951df5b9988aa3a0af87d" "checksum csv-core 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "9b5cadb6b25c77aeff80ba701712494213f4a8418fcda2ee11b6560c3ad0bf4c" -"checksum ctor 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "3b4c17619643c1252b5f690084b82639dd7fac141c57c8e77a00e0148132092c" +"checksum ctor 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "cd8ce37ad4184ab2ce004c33bf6379185d3b1c95801cab51026bd271bf68eedc" "checksum ctrlc 3.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c7dfd2d8b4c82121dfdff120f818e09fc4380b0b7e17a742081a89b94853e87f" -"checksum curl 0.4.22 (registry+https://github.com/rust-lang/crates.io-index)" = "f8ed9a22aa8c4e49ac0c896279ef532a43a7df2f54fcd19fa36960de029f965f" -"checksum curl-sys 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)" = "5e90ae10f635645cba9cad1023535f54915a95c58c44751c6ed70dbaeb17a408" +"checksum curl 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)" = "06aa71e9208a54def20792d877bc663d6aae0732b9852e612c4a933177c31283" +"checksum curl-sys 0.4.23 (registry+https://github.com/rust-lang/crates.io-index)" = "f71cd2dbddb49c744c1c9e0b96106f50a634e8759ec51bcd5399a578700a3ab3" "checksum darwin-libproc 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ade5a88af8d9646bf770687321a9488a0f2b4610aa08b0373016cd1af37f0a31" "checksum darwin-libproc-sys 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c30d1a078d74da1183b02fed8a8b07afc412d3998334b53b750d0ed03b031541" "checksum decimal 2.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e6458723bc760383275fbc02f4c769b2e5f3de782abaf5e7e0b9b7f0368a63ed" @@ -2988,20 +2894,18 @@ dependencies = [ "checksum dirs-sys 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "afa0b23de8fd801745c471deffa6e12d248f962c9fd4b4c33787b055599bde7b" "checksum dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "ea57b42383d091c85abcc2706240b94ab2a8fa1fc81c10ff23c4de06e2a90b5e" "checksum dunce 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d0ad6bf6a88548d1126045c413548df1453d9be094a8ab9fd59bf1fdd338da4f" -"checksum either 1.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "5527cfe0d098f36e3f8839852688e63c8fff1c90b2b405aef730615f9a7bcf7b" -"checksum encode_unicode 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "90b2c9496c001e8cb61827acdefad780795c42264c137744cae6f7d9e3450abd" -"checksum enum-utils 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f1ae672d9891879fb93e17ab6015c4e3bbe63fbeb23a41b9ac39ffa845b8836" -"checksum enum-utils-from-str 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6b5669381f76d7320e122abdd4a8307f986634f6d067fb69e31179422175801a" +"checksum either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "bb1f6b1ce1c140482ea30ddd3335fc0024ac7ee112895426e0a629a6c20adfe3" +"checksum encode_unicode 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" "checksum env_logger 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "aafcde04e90a5226a6443b7aabdb016ba2f8307c847d524724bd9b346dd1a2d3" -"checksum failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "795bd83d3abeb9220f257e597aa0080a508b27533824adf336529648f6abf7e2" -"checksum failure_derive 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "ea1063915fd7ef4309e222a5a07cf9c319fb9c7836b1f89b85458672dbb127e1" +"checksum failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "f8273f13c977665c5db7eb2b99ae520952fe5ac831ae4cd09d80c4c7042b5ed9" +"checksum failure_derive 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0bc225b78e0391e4b8683440bf2e63c2deeeb2ce5189eab46e2b68c6d3725d08" "checksum fallible-iterator 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" "checksum fallible-streaming-iterator 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a" "checksum fixedbitset 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "86d4de0081402f5e88cdac65c8dcdcc73118c1a7a465e2a05f0da05843a8ea33" -"checksum flate2 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)" = "550934ad4808d5d39365e5d61727309bf18b3b02c6c56b729cb92e7dd84bc3d8" +"checksum flate2 1.0.12 (registry+https://github.com/rust-lang/crates.io-index)" = "ad3c5233c9a940c8719031b423d7e6c16af66e031cb0420b0896f5245bf181d3" "checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3" "checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" -"checksum futures 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)" = "45dc39533a6cae6da2b56da48edae506bb767ec07370f86f70fc062e9d435869" +"checksum futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)" = "1b980f2816d6ee8673b6517b52cb0e808a180efc92e5c19d02cdda79066703ef" "checksum futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "f477fd0292c4a4ae77044454e7f2b413207942ad405f759bb0b4698b7ace5b12" "checksum futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "4a2f26f774b81b3847dcda0c81bd4b6313acfb4f69e5a0390c7cb12c058953e9" "checksum futures-executor-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "80705612926df8a1bc05f0057e77460e29318801f988bf7d803a734cf54e7528" @@ -3011,47 +2915,47 @@ dependencies = [ "checksum futures-timer 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "878f1d2fc31355fa02ed2372e741b0c17e58373341e6a122569b4623a14a7d33" "checksum futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "7df53daff1e98cc024bf2720f3ceb0414d96fbb0a94f3cad3a5c3bf3be1d261c" "checksum futures_codec 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "36552cd31353fd135114510d53b8d120758120c36aa636a9341970f9efb1e4a0" -"checksum getrandom 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "34f33de6f0ae7c9cb5e574502a562e2b512799e32abb801cd1e79ad952b62b49" +"checksum getrandom 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "473a1265acc8ff1e808cd0a1af8cee3c2ee5200916058a2ca113c29f2d903571" "checksum getset 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "117a5b13aecd4e10161bb3feb22dda898e8552836c2391d8e4645d5e703ab866" "checksum git2 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "39f27186fbb5ec67ece9a56990292bc5aed3c3fc51b9b07b0b52446b1dfb4a82" "checksum glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" "checksum heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205" -"checksum heim 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "02692a4aa3bed77933da9ae7915aef7fcceb65eff9d9251be189b1acc0b77f65" -"checksum heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "559807533108e09863125eeccb38a7213cef5a7a7deadd3fac2674e1f8d3db70" -"checksum heim-cpu 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "60c237652eaa091b39f996deb41aa7baae67cab5f25204154c14414f46ef69c1" -"checksum heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b3f326db96a03106afcea6839b13f7d95b09cffd063eaa94ef0fd3e796214a66" -"checksum heim-disk 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bd75c64f2d054ce1297ad08f2ca41bf7db7e9ca868221b2fb7427210579e85a1" -"checksum heim-host 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6401c858723568a09e0f09e09bda833e0019c34aa512ccdeba236fce45e4eeb1" -"checksum heim-memory 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "424a549b6c3faecc2492cd3d49f1f89ed9f191c7995741b89e674b85a262e303" -"checksum heim-net 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d0ebbcbabe86dbc1c8713ecc1f54630549f82fa07520083cf9a0edcdd77d329a" -"checksum heim-process 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "564f0d9d123c708688721fb2c2aacc198bd5eec3d995eb8c25d369500c66ca7d" -"checksum heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "df59b2a6e00b7f4532dc00736d74bf721a4587d4dbf90793c524ed0a7eddfa19" -"checksum heim-sensors 0.0.3-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "512afc3c0562aa26ae4e236a4b371901fbf7ddac843c961b2ef201936e79a7cd" -"checksum heim-virt 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "95372a84d2a0a5709899449fbb8ed296a9ce5b9fc0ba4729f0c26f7d5ebdf155" +"checksum heim 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "de848466ae9659d5ab634615bdd0b7d558a41ae524ee4d59c880d12499af5b77" +"checksum heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "63f408c31e695732096a0383df16cd3efee4adb32ba3ad086fb85a7dc8f53100" +"checksum heim-cpu 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "5785004dfdbd68a814d504b27b8ddc16c748a856835dfb6e65b15142090664ef" +"checksum heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "9573bedf4673c1b254bce7f1521559329d2b27995b693b695fa13be2b15c188b" +"checksum heim-disk 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "c84980e62564828ae4ca70a8bfbdb0f139cc89abb6c91b8b4809518346a72366" +"checksum heim-host 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "1de019d5969f6bab766311be378788bd1bb068b59c4f3861c539a420fc258ed3" +"checksum heim-memory 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "a9cdbe6433197da8387dcd0cf1afd9184db4385d55f8a76355b28ceabe99cdc5" +"checksum heim-net 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7b0f5e590eb2f8b23229ff4b06f7e7aee0e229837d3697f362014343682ae073" +"checksum heim-process 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "a64874316339b9c0c7953e7a87d2b32e2400bf6778650ac11b76b05d3c37e121" +"checksum heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "13ef10b5ab5a501e6537b1414db0e3c488425d88bb131bd4e9ff7c0e61e5fbd1" +"checksum heim-sensors 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8ad8b3c9032bca1a76dd43e1eb5c8044e0c505343cb21949dc7acd1bc55b408b" +"checksum heim-virt 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "bb2dda5314da10a8fbcdf130c065abc65f02c3ace72c6f143ad4537520536e2b" "checksum hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "805026a5d0141ffc30abb3be3173848ad46a1b1664fe632428479619a3644d77" "checksum hex 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "023b39be39e3a2da62a94feb433e91e8bcd37676fbc8bea371daf52b7a769a3e" "checksum http 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)" = "372bcb56f939e449117fb0869c2e8fd8753a8223d92a172c6e808cf123a5b6e4" -"checksum humantime 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3ca7e5f2e110db35f93b837c81797f3714500b81d517bf20c431b16d3ca4f114" +"checksum humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f" "checksum idna 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "02e2673c30ee86b5b96a9cb52ad15718aa1f966f5ab9ad54a8b95d5ca33120a9" -"checksum image 0.22.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ee0665404aa0f2ad154021777b785878b0e5b1c1da030455abc3d9ed257c2c67" +"checksum image 0.22.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b4be8aaefbe7545dc42ae925afb55a0098f226a3fe5ef721872806f44f57826" "checksum indexmap 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a61202fbe46c4a951e9404a720a0180bcf3212c750d735cb5c4ba4dc551299f3" "checksum inflate 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "1cdb29978cc5797bd8dcc8e5bf7de604891df2a8dc576973d71a281e916db2ff" -"checksum iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dbe6e417e7d0975db6512b90796e8ce223145ac4e33c377e4a42882a0e88bb08" -"checksum isahc 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e1b971511b5d8de4a51d4da4bc8e374bf60ce841e91b116f46ae06ae2e2a8e9b" +"checksum iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e" +"checksum isahc 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)" = "769f5071e5bf0b45489eefe0ec96b97328675db38d02ea5e923519d52e690cb8" "checksum isatty 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e31a8281fc93ec9693494da65fbf28c0c2aa60a2eaec25dc58e2f31952e95edc" "checksum itertools 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)" = "0d47946d458e94a1b7bcabbf6521ea7c037062c81f534615abcad76e84d4970d" "checksum itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5b8467d9c1cebe26feb08c640139247fac215782d35371ade9a2136ed6085358" "checksum itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "501266b7edd0174f8530248f87f99c88fbe60ca4ef3dd486835b8d8d53136f7f" -"checksum jobserver 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "f74e73053eaf95399bf926e48fc7a2a3ce50bd0eaaa2357d391e95b2dcdd4f10" -"checksum jpeg-decoder 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "c8b7d43206b34b3f94ea9445174bda196e772049b9bddbc620c9d29b2d20110d" -"checksum js-sys 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)" = "1efc4f2a556c58e79c5500912e221dd826bec64ff4aabd8ce71ccef6da02d7d4" +"checksum jobserver 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "f2b1d42ef453b30b7387e113da1c83ab1605d90c5b4e0eb8e96d016ed3b8c160" +"checksum jpeg-decoder 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "c1aae18ffeeae409c6622c3b6a7ee49792a7e5a062eea1b135fbb74e301792ba" +"checksum js-sys 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)" = "2cc9a97d7cec30128fd8b28a7c1f9df1c001ceb9b441e2b755e24130a6b43c79" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" "checksum language-reporting 0.3.1 (git+https://github.com/wycats/language-reporting)" = "" "checksum lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "76f033c7ad61445c5b347c7382dd1237847eb1bce590fe50365dcb33d546be73" -"checksum lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bc5729f27f159ddd61f4df6228e827e86643d4d3e7c32183cb30a1c08f604a14" +"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" "checksum lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f" -"checksum lexical-core 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b8b0f90c979adde96d19eb10eb6431ba0c441e2f9e9bdff868b2f6f5114ff519" -"checksum libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)" = "d44e80633f007889c7eff624b709ab43c92d708caad982295768a7b13ca3b5eb" +"checksum lexical-core 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2304bccb228c4b020f3a4835d247df0a02a7c4686098d4167762cfbbe4c5cb14" +"checksum libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)" = "34fcd2c08d2f832f376f4173a231990fa5aef4e99fb569867318a227ef4c06ba" "checksum libgit2-sys 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a30f8637eb59616ee3b8a00f6adff781ee4ddd8343a615b8238de756060cc1b3" "checksum libnghttp2-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "02254d44f4435dd79e695f2c2b83cd06a47919adea30216ceaf0c57ca0a72463" "checksum libsqlite3-sys 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5e5b95e89c330291768dc840238db7f9e204fd208511ab6319b56193a7f2ae25" @@ -3070,16 +2974,14 @@ dependencies = [ "checksum memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "88579771288728879b57485cc7d6b07d648c9f0141eb955f8ab7f9d45394468e" "checksum mime 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)" = "dd1d63acd1b78403cc0c325605908475dd9b9a3acbf65ed8bcab97e27014afcf" "checksum mime_guess 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1a0ed03949aef72dbdf3116a383d7b38b4768e6f960528cd6a6044aa9ed68599" -"checksum miniz-sys 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "1e9e3ae51cea1576ceba0dde3d484d30e6e5b86dee0b2d412fe3a16a15c98202" -"checksum miniz_oxide 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "fe2959c5a0747a8d7a56b4444c252ffd2dda5d452cfd147cdfdda73b1c3ece5b" -"checksum miniz_oxide_c_api 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6c675792957b0d19933816c4e1d56663c341dd9bfa31cb2140ff2267c1d8ecf4" +"checksum miniz_oxide 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "304f66c19be2afa56530fa7c39796192eef38618da8d19df725ad7c6d6b2aaae" "checksum natural 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fd659d7d6b4554da2c0e7a486d5952b24dfce0e0bac88ab53b270f4efe1010a6" "checksum neso 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6b3c31defbcb081163db18437fd88c2a267cb3e26f7bd5e4b186e4b1b38fe8c8" "checksum nix 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6c722bee1037d430d0f8e687bbdbf222f27cc6e4e68d5caf630857bb2b6dbdce" "checksum nix 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3b2e0b4f3320ed72aaedb9a5ac838690a8047c7b275da22711fddff4f8a14229" -"checksum nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9667ddcc6cc8a43afc9b7917599d7216aa09c463919ea32c59ed6cac8bc945" +"checksum nodrop 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" "checksum nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2ad2a91a8e869eeb30b9cb3119ae87773a8f4ae617f41b1eb9c154b2905f7bd6" -"checksum nom 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e9761d859320e381010a4f7f8ed425f2c924de33ad121ace447367c713ad561b" +"checksum nom 5.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c618b63422da4401283884e6668d39f819a106ef51f5f59b81add00075da35ca" "checksum nom-tracable 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "edaa64ad2837d831d4a17966c9a83aa5101cc320730f5b724811c8f7442a2528" "checksum nom-tracable-macros 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fd25f70877a9fe68bd406b3dd3ff99e94ce9de776cf2a96e0d99de90b53d4765" "checksum nom_locate 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f932834fd8e391fc7710e2ba17e8f9f8645d846b55aa63207e17e110a1e1ce35" @@ -3095,10 +2997,10 @@ dependencies = [ "checksum objc-foundation 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1add1b659e36c9607c7aab864a76c7a4c2760cd0cd2e120f3fb8b952c7e22bf9" "checksum objc_id 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c92d4ddb4bd7b50d730c215ff871754d0da6b2178849f8a2a2ab69712d0c073b" "checksum ole32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5d2c49021782e5233cd243168edfa8037574afed4eba4bbaf538b3d8d1789d8c" -"checksum onig 4.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a646989adad8a19f49be2090374712931c3a59835cb5277b4530f48b417f26e7" +"checksum onig 4.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8518fcb2b1b8c2f45f0ad499df4fda6087fc3475ca69a185c173b8315d2fb383" "checksum onig_sys 69.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388410bf5fa341f10e58e6db3975f4bea1ac30247dd79d37a9e5ced3cb4cc3b0" "checksum openssl-probe 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "77af24da69f9d9341038eba93a073b1fdaaa1b788221b00a69bce9e762cb32de" -"checksum openssl-sys 0.9.49 (registry+https://github.com/rust-lang/crates.io-index)" = "f4fad9e54bd23bd4cbbe48fdc08a1b8091707ac869ef8508edea2fec77dcc884" +"checksum openssl-sys 0.9.51 (registry+https://github.com/rust-lang/crates.io-index)" = "ba24190c8f0805d3bd2ce028f439fe5af1d55882bbe6261bed1dbc93b50dd6b1" "checksum ord_subset 3.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d7ce14664caf5b27f5656ff727defd68ae1eb75ef3c4d95259361df1eb376bef" "checksum ordered-float 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "18869315e81473c951eb56ad5558bbc56978562d3ecfb87abb7a1e944cea4518" "checksum ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063" @@ -3106,26 +3008,24 @@ dependencies = [ "checksum percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" "checksum petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)" = "9c3659d1ee90221741f65dd128d9998311b0e40c5d3c23a62445938214abce4f" "checksum pin-utils 0.1.0-alpha.4 (registry+https://github.com/rust-lang/crates.io-index)" = "5894c618ce612a3fa23881b152b608bafb8c56cfc22f434a3ba3120b40f7b587" -"checksum pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "a7c1d2cfa5a714db3b5f24f0915e74fcdf91d09d496ba61329705dda7774d2af" -"checksum platforms 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6cfec0daac55b13af394ceaaad095d17c790f77bdc9329264f06e49d6cd3206c" +"checksum pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)" = "72d5370d90f49f70bd033c3d75e87fc529fbfff9d6f7cccef07d6170079d91ea" +"checksum platforms 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "feb3b2b1033b8a60b4da6ee470325f887758c95d5320f52f9ce0df055a55940e" "checksum plist 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "5f2a9f075f6394100e7c105ed1af73fb1859d6fd14e49d4290d578120beb167f" "checksum png 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8422b27bb2c013dd97b9aef69e161ce262236f49aaf46a0489011c8ff0264602" "checksum ppv-lite86 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e3cbf9f658cdb5000fcf6f362b8ea2ba154b9f146a61c7a20d647034c6b6561b" -"checksum pretty-hex 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "119929a2a3b731bb3d888f7a1b5dc3c1db28b6c134def5d99f7e16e2da16b8f7" +"checksum pretty-hex 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "be91bcc43e73799dc46a6c194a55e7aae1d86cc867c860fd4a436019af21bd8c" "checksum pretty_assertions 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3f81e1644e1b54f5a68959a29aa86cde704219254669da328ecfdf6a1f09d427" "checksum pretty_env_logger 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "717ee476b1690853d222af4634056d830b5197ffd747726a9a1eee6da9f49074" "checksum prettytable-rs 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0fd04b170004fa2daccf418a7f8253aaf033c27760b5f225889024cf66d7ac2e" -"checksum proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)" = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759" -"checksum proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4c5c2380ae88876faae57698be9e9775e3544decad214599c3a6266cca6ac802" +"checksum proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "90cf5f418035b98e655e9cdb225047638296b862b42411c4e45bb88d700f7fc0" "checksum ptree 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6b0a3be00b19ee7bd33238c1c523a7ab4df697345f6b36f90827a7860ea938d4" "checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0" -"checksum quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1" "checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe" -"checksum rand 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d47eab0e83d9693d40f825f86948aa16eff6750ead4bdffc4ab95b8b3a7f052c" +"checksum rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3ae1b169243eaf61759b8475a998f0a385e42042370f3a7dbaf35246eacc8412" "checksum rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "03a2a90da8c7523f554344f921aa97283eadf6ac484a6d2a7d0212fa7f8d6853" "checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" "checksum rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" -"checksum rand_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "615e683324e75af5d43d8f7a39ffe3ee4a9dc42c5c701167a71dc59c3a493aca" +"checksum rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" "checksum rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" "checksum rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" "checksum raw-cpuid 7.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b4a349ca83373cfa5d6dbb66fd76e58b2cca08da71a5f6400de0a0a6a9bceeaf" @@ -3134,24 +3034,24 @@ dependencies = [ "checksum readkey 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d98db94bb4f3e926c8d8186547cd9366d958d753aff5801214d93d38214e8f0f" "checksum redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)" = "2439c63f3f6139d1b57529d16bc3b8bb855230c8efcc5d3a896c8bea7c3b1e84" "checksum redox_users 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4ecedbca3bf205f8d8f5c2b44d83cd0690e39ee84b951ed649e9f1841132b66d" -"checksum regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "88c3d9193984285d544df4a30c23a4e62ead42edf70a4452ceb76dac1ce05c26" +"checksum regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "dc220bd33bdce8f093101afe22a037b8eb0e5af33592e6a9caafff0d4cb81cbd" "checksum regex-automata 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "92b73c2a1770c255c240eaa4ee600df1704a38dc3feaa6e949e7fcd4f8dc09f9" -"checksum regex-syntax 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)" = "b143cceb2ca5e56d5671988ef8b15615733e7ee16cd348e064333b251b89343f" +"checksum regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "11a7e20d1cce64ef2fed88b66d347f88bd9babb82845b2b858f3edbf59a4f716" "checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e" "checksum render-tree 0.1.1 (git+https://github.com/wycats/language-reporting)" = "" "checksum result 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "194d8e591e405d1eecf28819740abed6d719d1a2db87fc0bcdedee9a26d55560" -"checksum roxmltree 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "153c367ce9fb8ef7afe637ef92bd083ba0f88b03ef3fcf0287d40be05ae0a61c" +"checksum roxmltree 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b1a3193e568c6e262f817fd07af085c7f79241a947aedd3779d47eadc170e174" "checksum rusqlite 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2a194373ef527035645a1bc21b10dc2125f73497e6e155771233eb187aedd051" "checksum rust-argon2 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4ca4eaef519b494d1f2848fc602d18816fed808a981aedf4f1f00ceb7c9d32cf" "checksum rust-ini 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3e52c148ef37f8c375d49d5a73aa70713125b7f19095948a923f80afdeb22ec2" -"checksum rustc-demangle 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "a7f4dccf6f4891ebcc0c39f9b6eb1a83b9bf5d747cb439ec6fba4f3b977038af" +"checksum rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783" "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" "checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" "checksum rustyline 5.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4795e277e6e57dec9df62b515cd4991371daa80e8dc8d80d596e58722b89c417" -"checksum ryu 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c92464b447c0ee8c4fb3824ecc8383b81717b9f1e74ba2e72540aef7b9f82997" -"checksum safemem 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e133ccc4f4d1cd4f89cc8a7ff618287d56dc7f638b8e38fc32c5fdcadc339dd5" +"checksum ryu 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "19d2271fa48eaf61e53cc88b4ad9adcbafa2d512c531e7fadb6dc11a4d3656c5" +"checksum safemem 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d2b08423011dae9a5ca23f07cf57dac3857f5c885d352b76f6d95f4aea9434d0" "checksum same-file 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "585e8ddcedc187886a30fa705c47985c3fa88d06624095856b36ca0b82ff4421" -"checksum schannel 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "f2f6abf258d99c3c1c5c2131d99d064e94b7b3dd5f416483057f308fea253339" +"checksum schannel 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "87f550b06b6cba9c8b8be3ee73f391990116bf527450d2556e9b9ce263b9a021" "checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" "checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" "checksum serde 0.8.23 (registry+https://github.com/rust-lang/crates.io-index)" = "9dad3f759919b92c3068c696c15c3d17238234498bbdcc80f2c469606f948ac8" @@ -3160,29 +3060,26 @@ dependencies = [ "checksum serde-hjson 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6a3a4e0ea8a88553209f6cc6cfe8724ecad22e1acf372793c27d995290fe74f8" "checksum serde-value 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7a663f873dedc4eac1a559d4c6bc0d0b2c34dc5ac4702e105014b8281489e44f" "checksum serde_bytes 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)" = "45af0182ff64abaeea290235eb67da3825a576c5d53e642c4d5b652e12e6effc" -"checksum serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)" = "01e69e1b8a631f245467ee275b8c757b818653c6d704cdbcaeb56b56767b529c" -"checksum serde_derive_internals 0.24.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8a80c6c0b1ebbcea4ec2c7e9e2e9fa197a425d17f1afec8ba79fcd1352b18ffb" +"checksum serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)" = "4b133a43a1ecd55d4086bd5b4dc6c1751c68b1bfbeba7a5040442022c7e7c02e" "checksum serde_ini 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "eb236687e2bb073a7521c021949be944641e671b8505a94069ca37b656c81139" -"checksum serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)" = "051c49229f282f7c6f3813f8286cc1e3323e8051823fce42c7ea80fe13521704" +"checksum serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)" = "2f72eb2a68a7dc3f9a691bfda9305a1c017a6215e5a4545c258500d2099a37c2" "checksum serde_test 0.8.23 (registry+https://github.com/rust-lang/crates.io-index)" = "110b3dbdf8607ec493c22d5d947753282f3bae73c0f56d322af1e8c78e4c23d5" "checksum serde_urlencoded 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9ec5d77e2d4c73717816afac02670d5c4f534ea95ed430442cad02e7a6e32c97" -"checksum serde_yaml 0.8.9 (registry+https://github.com/rust-lang/crates.io-index)" = "38b08a9a90e5260fe01c6480ec7c811606df6d3a660415808c3c3fa8ed95b582" +"checksum serde_yaml 0.8.11 (registry+https://github.com/rust-lang/crates.io-index)" = "691b17f19fc1ec9d94ec0b5864859290dff279dbd7b03f017afda54eb36c3c35" "checksum shell32-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9ee04b46101f57121c9da2b151988283b6beb79b34f5bb29a58ee48cb695122c" "checksum shellexpand 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de7a5b5a9142fd278a10e0209b021a1b85849352e6951f4f914735c976737564" "checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8" -"checksum sluice 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ec70d7c3b17c262d4a18f7291c6ce62bf47170915f3b795434d3c5c49a4e59b7" +"checksum sluice 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0a7d06dfb3e8743bc19e6de8a302277471d08077d68946b307280496dc5a3531" "checksum smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)" = "ab606a9c5e214920bb66c458cd7be8ef094f813f20fe77a54cc7dbfff220d4b7" "checksum socket2 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)" = "e8b74de517221a2cb01a53349cf54182acdc31a074727d3079068448c0676d85" "checksum sourcefile 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "4bf77cb82ba8453b42b6ae1d692e4cdc92f9a47beaf89a847c8be83f4e328ad3" -"checksum stackvector 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "1c4725650978235083241fab0fdc8e694c3de37821524e7534a1a9061d1068af" -"checksum static_assertions 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b4f8de36da215253eb5f24020bfaa0646613b48bf7ebe36cdfa37c3b3b33b241" +"checksum static_assertions 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "7f3eb36b47e512f8f1c9e3d10c2c1965bc992bd9cdb024fa581e2194501c83d3" "checksum strsim 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" "checksum sublime_fuzzy 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97bd7ad698ea493a3a7f60c2ffa117c234f341e09f8cc2d39cef10cdde077acf" "checksum subprocess 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)" = "28fc0f40f0c0da73339d347aa7d6d2b90341a95683a47722bc4eebed71ff3c00" "checksum surf 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "018eed64aede455beb88505d50c5c64882bebbe0996d4b660c272e3d8bb6f883" -"checksum syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)" = "ee06ea4b620ab59a2267c6b48be16244a3389f8bfa0986bdd15c35b890b00af3" -"checksum syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "c65d951ab12d976b61a41cf9ed4531fc19735c6e6d84a4bb1453711e762ec731" -"checksum synstructure 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)" = "02353edf96d6e4dc81aea2d8490a7e9db177bf8acb0e951c24940bf866cb313f" +"checksum syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf" +"checksum synstructure 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3f085a5855930c0441ca1288cf044ea4aecf4f43a91668abdb870b4ba546a203" "checksum syntect 3.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e80b8831c5a543192ffc3727f01cf0e57579c6ac15558e3048bfb5708892167b" "checksum tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9" "checksum term 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "edd106a334b7657c10b7c540a0106114feadeb4dc314513e97df481d5d966f42" @@ -3196,15 +3093,13 @@ dependencies = [ "checksum tokio-io 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "5090db468dad16e1a7a54c8c67280c5e4b544f3d3e018f0b913b400261f85926" "checksum toml 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "758664fc71a3a69038656bee8b6be6477d2a6c315a6b81f7081f591bffa4111f" "checksum toml 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c7aabe75941d914b72bf3e5d3932ed92ce0664d49d8432305a8b547c37227724" -"checksum typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "612d636f949607bdf9b123b4a6f6d966dedf3ff669f7f045890d3a4a73948169" -"checksum unicase 2.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a84e5511b2a947f3ae965dcb29b13b7b1691b6e7332cf5dbc1744138d5acb7f6" +"checksum typenum 1.11.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6d2783fe2d6b8c1101136184eb41be8b1ad379e4657050b8aaff0c79ee7575f9" +"checksum unicase 2.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2e2e6bd1e59e56598518beb94fd6db628ded570326f0a98c679a304bd9f00150" "checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" "checksum unicode-normalization 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "141339a08b982d942be2ca06ff8b076563cbe223d1befd5450716790d44e2426" "checksum unicode-segmentation 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1967f4cdfc355b37fd76d2a954fb2ed3871034eb4f26d60537d88795cfc332a9" "checksum unicode-width 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "7007dbd421b92cc6e28410fe7362e2e0a2503394908f417b68ec8d1c364c4e20" -"checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" "checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c" -"checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56" "checksum uom 0.23.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3ef5bbe8385736e498dbb0033361f764ab43a435192513861447b9f7714b3fec" "checksum uom 0.25.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3198c29f199fa8a23d732f4aa21ddc4f4d0a257cb0c2a44afea30145ce2575c1" "checksum url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "75b414f6c464c879d7f9babf951f23bc3743fb7313c081b2e6ca719067ea9d61" @@ -3215,14 +3110,15 @@ dependencies = [ "checksum version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd" "checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" "checksum walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "9658c94fa8b940eab2250bd5a457f9c48b748420d71293b165c8cdbe2f55f71e" -"checksum wasm-bindgen 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "dcddca308b16cd93c2b67b126c688e5467e4ef2e28200dc7dfe4ae284f2faefc" -"checksum wasm-bindgen-backend 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "f805d9328b5fc7e5c6399960fd1889271b9b58ae17bdb2417472156cc9fafdd0" -"checksum wasm-bindgen-futures 0.3.25 (registry+https://github.com/rust-lang/crates.io-index)" = "73c25810ee684c909488c214f55abcbc560beb62146d352b9588519e73c2fed9" -"checksum wasm-bindgen-macro 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "3ff88201a482abfc63921621f6cb18eb1efd74f136b05e5841e7f8ca434539e9" -"checksum wasm-bindgen-macro-support 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "6a433d89ecdb9f77d46fcf00c8cf9f3467b7de9954d8710c175f61e2e245bb0e" -"checksum wasm-bindgen-shared 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "d41fc1bc3570cdf8d108c15e014045fd45a95bb5eb36605f96a90461fc34027d" -"checksum wasm-bindgen-webidl 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "be53d289bf2fa7645a089cfd5c7a34bf4fe94221f58cf86ee42a7b4bc854ff14" -"checksum web-sys 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)" = "6435c477200ad486089a7a72c2bd6c9bdf9740bd7fff868806076218076d8c51" +"checksum wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b89c3ce4ce14bdc6fb6beaf9ec7928ca331de5df7e5ea278375642a2f478570d" +"checksum wasm-bindgen 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)" = "cd34c5ba0d228317ce388e87724633c57edca3e7531feb4e25e35aaa07a656af" +"checksum wasm-bindgen-backend 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)" = "927196b315c23eed2748442ba675a4c54a1a079d90d9bdc5ad16ce31cf90b15b" +"checksum wasm-bindgen-futures 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)" = "83420b37346c311b9ed822af41ec2e82839bfe99867ec6c54e2da43b7538771c" +"checksum wasm-bindgen-macro 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)" = "92c2442bf04d89792816650820c3fb407af8da987a9f10028d5317f5b04c2b4a" +"checksum wasm-bindgen-macro-support 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)" = "9c075d27b7991c68ca0f77fe628c3513e64f8c477d422b859e03f28751b46fc5" +"checksum wasm-bindgen-shared 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)" = "83d61fe986a7af038dd8b5ec660e5849cbd9f38e7492b9404cc48b2b4df731d1" +"checksum wasm-bindgen-webidl 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)" = "9b979afb0535fe4749906a674082db1211de8aef466331d43232f63accb7c07c" +"checksum web-sys 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)" = "c84440699cd02ca23bed6f045ffb1497bc18a3c2628bd13e2093186faaaacf6b" "checksum weedle 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3bb43f70885151e629e2a19ce9e50bd730fd436cfd4b666894c9ce4de9141164" "checksum which 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b57acb10231b9493c8472b20cb57317d0679a49e0bdbee44b3b803a6473af164" "checksum widestring 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "effc0e4ff8085673ea7b9b2e3c73f6bd4d118810c9009ed8f1e16bd96c331db6" @@ -3232,11 +3128,11 @@ dependencies = [ "checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" "checksum winapi-util 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7168bab6e1daee33b4557efd0e95d5ca70a03706d39fa5f3fe7a236f584b03c9" "checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" -"checksum wincolor 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "561ed901ae465d6185fa7864d63fbd5720d0ef718366c9a4dc83cf6170d7e9ba" +"checksum wincolor 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "96f5016b18804d24db43cebf3c77269e7569b8954a8464501c216cc5e070eaa9" "checksum x11 2.18.1 (registry+https://github.com/rust-lang/crates.io-index)" = "39697e3123f715483d311b5826e254b6f3cfebdd83cf7ef3358f579c3d68e235" "checksum x11-clipboard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "89bd49c06c9eb5d98e6ba6536cf64ac9f7ee3a009b2f53996d405b3944f6bcea" "checksum xcb 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "5e917a3f24142e9ff8be2414e36c649d47d6cc2ba81f16201cdef96e533e02de" "checksum xdg 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d089681aa106a86fade1b0128fb5daf07d5867a509ab036d99988dec80429a57" "checksum xml-rs 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "541b12c998c5b56aa2b4e6f18f03664eef9a4fd0a246a55594efae6cc2d964b5" -"checksum xmlparser 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ecec95f00fb0ff019153e64ea520f87d1409769db3e8f4db3ea588638a3e1cee" +"checksum xmlparser 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8110496c5bcc0d966b0b2da38d5a791aa139eeb0b80e7840a7463c2b806921eb" "checksum yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "65923dd1784f44da1d2c3dbbc5e822045628c590ba72123e1c73d3c230c4434d" diff --git a/Cargo.toml b/Cargo.toml index 9ae1ada021..e9a0d013ca 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -56,7 +56,6 @@ url = "2.1.0" roxmltree = "0.7.0" nom_locate = "1.0.0" nom-tracable = "0.4.0" -enum-utils = "0.1.1" unicode-xid = "0.2.0" serde_ini = "0.2.0" subprocess = "0.1.18" @@ -75,8 +74,8 @@ bigdecimal = { version = "0.1.0", features = ["serde"] } natural = "0.3.0" serde_urlencoded = "0.6.1" sublime_fuzzy = "0.5" -regex = "1" +regex = {version = "1", optional = true } neso = { version = "0.5.0", optional = true } crossterm = { version = "0.10.2", optional = true } syntect = {version = "3.2.0", optional = true } @@ -144,6 +143,7 @@ path = "src/plugins/skip.rs" [[bin]] name = "nu_plugin_match" path = "src/plugins/match.rs" +required-features = ["regex"] [[bin]] name = "nu_plugin_sys" diff --git a/src/parser/parse/token_tree.rs b/src/parser/parse/token_tree.rs index 3c7e4fc11e..c3c1df652a 100644 --- a/src/parser/parse/token_tree.rs +++ b/src/parser/parse/token_tree.rs @@ -4,7 +4,6 @@ use crate::prelude::*; use crate::traits::ToDebug; use crate::{Tagged, Text}; use derive_new::new; -use enum_utils::FromStr; use getset::Getters; use std::fmt; @@ -298,7 +297,7 @@ impl DelimitedNode { } } -#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, FromStr)] +#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)] pub enum Delimiter { Paren, Brace, From 341cc1ea63b17e6acc6d3494f66feb96cd79fcb5 Mon Sep 17 00:00:00 2001 From: Jason Gedge Date: Sun, 13 Oct 2019 12:00:30 -0400 Subject: [PATCH 018/184] Ignore errors in `ls`. `std::fs::metadata` will attempt to follow symlinks, which results in a "No such file or directory" error if the path pointed to by the symlink does not exist. This shouldn't prevent `ls` from succeeding, so we ignore errors. Also, switching to use of `symlink_metadata` means we get stat info on the symlink itself, not what it points to. This means `ls` will now include broken symlinks in its listing. --- src/shell/filesystem_shell.rs | 35 ++++++++++++++++++++--------------- 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/src/shell/filesystem_shell.rs b/src/shell/filesystem_shell.rs index 72a0c241f3..f0adeebeb8 100644 --- a/src/shell/filesystem_shell.rs +++ b/src/shell/filesystem_shell.rs @@ -128,13 +128,16 @@ impl Shell for FilesystemShell { } if let Ok(entry) = entry { let filepath = entry.path(); - let filename = if let Ok(fname) = filepath.strip_prefix(&cwd) { - fname - } else { - Path::new(&filepath) - }; - let value = dir_entry_dict(filename, &entry.metadata().unwrap(), &name_tag)?; - yield ReturnSuccess::value(value); + if let Ok(metadata) = std::fs::symlink_metadata(&filepath) { + let filename = if let Ok(fname) = filepath.strip_prefix(&cwd) { + fname + } else { + Path::new(&filepath) + }; + + let value = dir_entry_dict(filename, &metadata, &name_tag)?; + yield ReturnSuccess::value(value); + } } } }; @@ -164,14 +167,16 @@ impl Shell for FilesystemShell { break; } if let Ok(entry) = entry { - let filename = if let Ok(fname) = entry.strip_prefix(&cwd) { - fname - } else { - Path::new(&entry) - }; - let metadata = std::fs::metadata(&entry).unwrap(); - if let Ok(value) = dir_entry_dict(filename, &metadata, &name_tag) { - yield ReturnSuccess::value(value); + if let Ok(metadata) = std::fs::symlink_metadata(&entry) { + let filename = if let Ok(fname) = entry.strip_prefix(&cwd) { + fname + } else { + Path::new(&entry) + }; + + if let Ok(value) = dir_entry_dict(filename, &metadata, &name_tag) { + yield ReturnSuccess::value(value); + } } } } From 0f7e73646f80d0918c30ccdc511d7cd4f45606c6 Mon Sep 17 00:00:00 2001 From: Jason Gedge Date: Sun, 13 Oct 2019 14:21:44 -0400 Subject: [PATCH 019/184] Bump heim in Cargo.toml to match Cargo.lock --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index e9a0d013ca..955beeddf9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -80,7 +80,7 @@ neso = { version = "0.5.0", optional = true } crossterm = { version = "0.10.2", optional = true } syntect = {version = "3.2.0", optional = true } onig_sys = {version = "=69.1.0", optional = true } -heim = {version = "0.0.8-alpha.1", optional = true } +heim = {version = "0.0.8", optional = true } battery = {version = "0.7.4", optional = true } rawkey = {version = "0.1.2", optional = true } clipboard = {version = "0.5", optional = true } From 648d4865b18786ecfb1b31eefc5786632e9fe9bb Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Sun, 13 Oct 2019 21:15:30 +0200 Subject: [PATCH 020/184] Adds unimplemented module, tests. --- src/commands.rs | 2 ++ src/commands/from_ssv.rs | 43 ++++++++++++++++++++++++++++++ tests/filters_test.rs | 56 ++++++++++++++++++++++++++++++++++++++++ 3 files changed, 101 insertions(+) create mode 100644 src/commands/from_ssv.rs diff --git a/src/commands.rs b/src/commands.rs index 72c07e38e6..93729aef68 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -22,6 +22,7 @@ pub(crate) mod from_csv; pub(crate) mod from_ini; pub(crate) mod from_json; pub(crate) mod from_sqlite; +pub(crate) mod from_ssv; pub(crate) mod from_toml; pub(crate) mod from_tsv; pub(crate) mod from_url; @@ -91,6 +92,7 @@ pub(crate) use from_ini::FromINI; pub(crate) use from_json::FromJSON; pub(crate) use from_sqlite::FromDB; pub(crate) use from_sqlite::FromSQLite; +pub(crate) use from_ssv::FromSSV; pub(crate) use from_toml::FromTOML; pub(crate) use from_tsv::FromTSV; pub(crate) use from_url::FromURL; diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs new file mode 100644 index 0000000000..0da2b6f562 --- /dev/null +++ b/src/commands/from_ssv.rs @@ -0,0 +1,43 @@ +use crate::commands::WholeStreamCommand; +use crate::data::{Primitive, TaggedDictBuilder, Value}; +use crate::prelude::*; + +pub struct FromSSV; + +#[derive(Deserialize)] +pub struct FromSSVArgs { + headerless: bool, +} + +const STRING_REPRESENTATION: &str = "from-ssv"; + +impl WholeStreamCommand for FromSSV { + fn name(&self) -> &str { + STRING_REPRESENTATION + } + + fn signature(&self) -> Signature { + Signature::build(STRING_REPRESENTATION).switch("headerless") + } + + fn usage(&self) -> &str { + "Parse text as .ssv and create a table." + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, from_ssv)?.run() + } +} + +fn from_ssv( + FromSSVArgs { + headerless: headerless, + }: FromSSVArgs, + RunnableContext { input, name, .. }: RunnableContext, +) -> Result { + unimplemented!() +} diff --git a/tests/filters_test.rs b/tests/filters_test.rs index f994fa4494..70fd752967 100644 --- a/tests/filters_test.rs +++ b/tests/filters_test.rs @@ -355,6 +355,62 @@ fn converts_from_tsv_text_skipping_headers_to_structured_table() { }) } +#[test] +fn converts_from_ssv_text_to_structured_table() { + Playground::setup("filter_from_ssv_test_1", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContentToBeTrimmed( + "oc_get_svc.ssv", + r#" + NAME LABELS SELECTOR IP PORT(S) + docker-registry docker-registry=default docker-registry=default 172.30.78.158 5000/TCP + kubernetes component=apiserver,provider=kubernetes 172.30.0.2 443/TCP + kubernetes-ro component=apiserver,provider=kubernetes 172.30.0.1 80/TCP + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open oc_get_svc.ssv + | from-ssv + | nth 0 + | get NAME + | echo $it + "# + )); + + assert_eq!(actual, "docker-registry"); + }) +} + +#[test] +fn converts_from_ssv_text_skipping_headers_to_structured_table() { + Playground::setup("filter_from_ssv_test_2", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContentToBeTrimmed( + "oc_get_svc.ssv", + r#" + NAME LABELS SELECTOR IP PORT(S) + docker-registry docker-registry=default docker-registry=default 172.30.78.158 5000/TCP + kubernetes component=apiserver,provider=kubernetes 172.30.0.2 443/TCP + kubernetes-ro component=apiserver,provider=kubernetes 172.30.0.1 80/TCP + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open oc_get_svc.ssv + | from-ssv --headerless + | nth 2 + | get Column2 + | echo $it + "# + )); + + assert_eq!(actual, "component=apiserver,provider=kubernetes"); + }) +} + #[test] fn can_convert_table_to_bson_and_back_into_table() { let actual = nu!( From de1c4e6c8894915825d6bd67cc7ed85d26b25be6 Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Sun, 13 Oct 2019 22:50:45 +0200 Subject: [PATCH 021/184] Implements from-ssv --- src/cli.rs | 1 + src/commands/from_ssv.rs | 88 ++++++++++++++++++++++++++++++++++++++-- tests/filters_test.rs | 12 +++--- 3 files changed, 91 insertions(+), 10 deletions(-) diff --git a/src/cli.rs b/src/cli.rs index 38e2474faf..5bfd7ff681 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -272,6 +272,7 @@ pub async fn cli() -> Result<(), Box> { whole_stream_command(Env), whole_stream_command(FromCSV), whole_stream_command(FromTSV), + whole_stream_command(FromSSV), whole_stream_command(FromINI), whole_stream_command(FromBSON), whole_stream_command(FromJSON), diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs index 0da2b6f562..59ba2f2f8f 100644 --- a/src/commands/from_ssv.rs +++ b/src/commands/from_ssv.rs @@ -33,11 +33,91 @@ impl WholeStreamCommand for FromSSV { } } +fn from_ssv_string_to_value( + s: &str, + headerless: bool, + tag: impl Into, +) -> Result, &str> { + let mut lines = s.lines(); + let tag = tag.into(); + + let headers = lines + .next() + .expect("No content.") + .split_whitespace() + .map(|s| s.to_owned()) + .collect::>(); + + let header_row = if headerless { + (0..headers.len()) + .map(|i| format!("Column{}", i + 1)) + .collect::>() + } else { + headers + }; + + let rows = lines + .map(|l| { + let mut row = TaggedDictBuilder::new(tag); + for (column, value) in header_row.iter().zip(l.split_whitespace()) { + row.insert_tagged( + column.to_owned(), + Value::Primitive(Primitive::String(String::from(value))).tagged(tag), + ) + } + row.into_tagged_value() + }) + .collect(); + + Ok(Tagged::from_item(Value::Table(rows), tag)) +} + fn from_ssv( - FromSSVArgs { - headerless: headerless, - }: FromSSVArgs, + FromSSVArgs { headerless }: FromSSVArgs, RunnableContext { input, name, .. }: RunnableContext, ) -> Result { - unimplemented!() + let stream = async_stream! { + let values: Vec> = input.values.collect().await; + let mut concat_string = String::new(); + let mut latest_tag: Option = None; + + for value in values { + let value_tag = value.tag(); + latest_tag = Some(value_tag); + match value.item { + Value::Primitive(Primitive::String(s)) => { + concat_string.push_str(&s); + concat_string.push_str("\n"); + + } + _ => yield Err(ShellError::labeled_error_with_secondary ( + "Expected a string from pipeline", + "requires string input", + name, + "value originates from here", + value_tag + )), + } + } + + match from_ssv_string_to_value(&concat_string, headerless, name) { + Ok(x) => match x { + Tagged { item: Value::Table(list), ..} => { + for l in list { yield ReturnSuccess::value(l) } + } + x => yield ReturnSuccess::value(x) + }, + Err(_) => if let Some(last_tag) = latest_tag { + yield Err(ShellError::labeled_error_with_secondary( + "Could not parse as SSV", + "input cannot be parsed ssv", + name, + "value originates from here", + last_tag, + )) + } + } + }; + + Ok(stream.to_output_stream()) } diff --git a/tests/filters_test.rs b/tests/filters_test.rs index 70fd752967..ed841af4ca 100644 --- a/tests/filters_test.rs +++ b/tests/filters_test.rs @@ -359,7 +359,7 @@ fn converts_from_tsv_text_skipping_headers_to_structured_table() { fn converts_from_ssv_text_to_structured_table() { Playground::setup("filter_from_ssv_test_1", |dirs, sandbox| { sandbox.with_files(vec![FileWithContentToBeTrimmed( - "oc_get_svc.ssv", + "oc_get_svc.txt", r#" NAME LABELS SELECTOR IP PORT(S) docker-registry docker-registry=default docker-registry=default 172.30.78.158 5000/TCP @@ -371,15 +371,15 @@ fn converts_from_ssv_text_to_structured_table() { let actual = nu!( cwd: dirs.test(), h::pipeline( r#" - open oc_get_svc.ssv + open oc_get_svc.txt | from-ssv | nth 0 - | get NAME + | get IP | echo $it "# )); - assert_eq!(actual, "docker-registry"); + assert_eq!(actual, "172.30.78.158"); }) } @@ -387,7 +387,7 @@ fn converts_from_ssv_text_to_structured_table() { fn converts_from_ssv_text_skipping_headers_to_structured_table() { Playground::setup("filter_from_ssv_test_2", |dirs, sandbox| { sandbox.with_files(vec![FileWithContentToBeTrimmed( - "oc_get_svc.ssv", + "oc_get_svc.txt", r#" NAME LABELS SELECTOR IP PORT(S) docker-registry docker-registry=default docker-registry=default 172.30.78.158 5000/TCP @@ -399,7 +399,7 @@ fn converts_from_ssv_text_skipping_headers_to_structured_table() { let actual = nu!( cwd: dirs.test(), h::pipeline( r#" - open oc_get_svc.ssv + open oc_get_svc.txt | from-ssv --headerless | nth 2 | get Column2 From 8422d40e2c36edc31bf42f8d38b8305abdfbe19c Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Sun, 13 Oct 2019 23:09:10 +0200 Subject: [PATCH 022/184] Add from-ssv to readme. --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 7df2c92ec9..45867f1daf 100644 --- a/README.md +++ b/README.md @@ -284,6 +284,7 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat | from-ini | Parse text as .ini and create table | | from-json | Parse text as .json and create table | | from-sqlite | Parse binary data as sqlite .db and create table | +| from-ssv | Parse text as whitespace-separated values and create table| | from-toml | Parse text as .toml and create table | | from-tsv | Parse text as .tsv and create table | | from-url | Parse urlencoded string and create a table | From 38b5979881ed974824f4c94e07403d089ea8b424 Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Sun, 13 Oct 2019 23:09:24 +0200 Subject: [PATCH 023/184] Make usage string clearer. --- src/commands/from_ssv.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs index 59ba2f2f8f..ede06da2da 100644 --- a/src/commands/from_ssv.rs +++ b/src/commands/from_ssv.rs @@ -21,7 +21,7 @@ impl WholeStreamCommand for FromSSV { } fn usage(&self) -> &str { - "Parse text as .ssv and create a table." + "Parse text as whitespace-separated values and create a table." } fn run( From 20e891db6e59c02e00ae5f0fbfa61c7f843a4295 Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Sun, 13 Oct 2019 23:09:40 +0200 Subject: [PATCH 024/184] Move variable assignment to clarify use. --- src/commands/from_ssv.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs index ede06da2da..2f3f574fb3 100644 --- a/src/commands/from_ssv.rs +++ b/src/commands/from_ssv.rs @@ -39,7 +39,6 @@ fn from_ssv_string_to_value( tag: impl Into, ) -> Result, &str> { let mut lines = s.lines(); - let tag = tag.into(); let headers = lines .next() @@ -56,6 +55,7 @@ fn from_ssv_string_to_value( headers }; + let tag = tag.into(); let rows = lines .map(|l| { let mut row = TaggedDictBuilder::new(tag); From 6c0bf6e0abc3a0f2f84bb17b748b63ed7e4ddb91 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Mon, 14 Oct 2019 17:48:27 +1300 Subject: [PATCH 025/184] Fix panic if external is not found --- src/commands/classified.rs | 65 ++++++++++++++++++++++---------------- 1 file changed, 37 insertions(+), 28 deletions(-) diff --git a/src/commands/classified.rs b/src/commands/classified.rs index 105daff771..c2380d4ffe 100644 --- a/src/commands/classified.rs +++ b/src/commands/classified.rs @@ -317,41 +317,50 @@ impl ExternalCommand { trace!(target: "nu::run::external", "set up stdin pipe"); trace!(target: "nu::run::external", "built process {:?}", process); - let mut popen = process.popen().unwrap(); + let popen = process.popen(); trace!(target: "nu::run::external", "next = {:?}", stream_next); - match stream_next { - StreamNext::Last => { - let _ = popen.detach(); - loop { - match popen.poll() { - None => { - let _ = std::thread::sleep(std::time::Duration::new(0, 100000000)); - } - _ => { - let _ = popen.terminate(); - break; + if let Ok(mut popen) = popen { + match stream_next { + StreamNext::Last => { + let _ = popen.detach(); + loop { + match popen.poll() { + None => { + let _ = std::thread::sleep(std::time::Duration::new(0, 100000000)); + } + _ => { + let _ = popen.terminate(); + break; + } } } + Ok(ClassifiedInputStream::new()) + } + StreamNext::External => { + let _ = popen.detach(); + let stdout = popen.stdout.take().unwrap(); + Ok(ClassifiedInputStream::from_stdout(stdout)) + } + StreamNext::Internal => { + let _ = popen.detach(); + let stdout = popen.stdout.take().unwrap(); + let file = futures::io::AllowStdIo::new(stdout); + let stream = Framed::new(file, LinesCodec {}); + let stream = + stream.map(move |line| Value::string(line.unwrap()).tagged(&name_tag)); + Ok(ClassifiedInputStream::from_input_stream( + stream.boxed() as BoxStream<'static, Tagged> + )) } - Ok(ClassifiedInputStream::new()) - } - StreamNext::External => { - let _ = popen.detach(); - let stdout = popen.stdout.take().unwrap(); - Ok(ClassifiedInputStream::from_stdout(stdout)) - } - StreamNext::Internal => { - let _ = popen.detach(); - let stdout = popen.stdout.take().unwrap(); - let file = futures::io::AllowStdIo::new(stdout); - let stream = Framed::new(file, LinesCodec {}); - let stream = stream.map(move |line| Value::string(line.unwrap()).tagged(&name_tag)); - Ok(ClassifiedInputStream::from_input_stream( - stream.boxed() as BoxStream<'static, Tagged> - )) } + } else { + return Err(ShellError::labeled_error( + "Command not found", + "command not found", + name_tag, + )); } } } From 7c40aed73878c84fcd9cbd0d2bf9bf50ce6f83ec Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Mon, 14 Oct 2019 18:00:10 +1300 Subject: [PATCH 026/184] Don't panick of no suggestions are found --- src/evaluate/evaluator.rs | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/src/evaluate/evaluator.rs b/src/evaluate/evaluator.rs index 1e19c31e78..75eb2f4667 100644 --- a/src/evaluate/evaluator.rs +++ b/src/evaluate/evaluator.rs @@ -117,11 +117,19 @@ pub(crate) fn evaluate_baseline_expr( possible_matches.sort(); - return Err(ShellError::labeled_error( - "Unknown column", - format!("did you mean '{}'?", possible_matches[0].1), - &tag, - )); + if possible_matches.len() > 0 { + return Err(ShellError::labeled_error( + "Unknown column", + format!("did you mean '{}'?", possible_matches[0].1), + &tag, + )); + } else { + return Err(ShellError::labeled_error( + "Unknown column", + "row does not have this column", + &tag, + )); + } } Some(next) => { item = next.clone().item.tagged(&tag); From a4a1588fbcebd93ad855930fb87fde91dc668676 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Mon, 14 Oct 2019 18:28:54 +1300 Subject: [PATCH 027/184] Fix confusing unnamed column and crash --- src/commands/get.rs | 18 +++++++++++++----- src/format/table.rs | 2 +- 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/src/commands/get.rs b/src/commands/get.rs index e8fc62ca64..21dbe6b0a7 100644 --- a/src/commands/get.rs +++ b/src/commands/get.rs @@ -60,11 +60,19 @@ pub fn get_column_path( possible_matches.sort(); - return Err(ShellError::labeled_error( - "Unknown column", - format!("did you mean '{}'?", possible_matches[0].1), - tag_for_tagged_list(path.iter().map(|p| p.tag())), - )); + if possible_matches.len() > 0 { + return Err(ShellError::labeled_error( + "Unknown column", + format!("did you mean '{}'?", possible_matches[0].1), + tag_for_tagged_list(path.iter().map(|p| p.tag())), + )); + } else { + return Err(ShellError::labeled_error( + "Unknown column", + "row does not contain this column", + tag_for_tagged_list(path.iter().map(|p| p.tag())), + )); + } } } } diff --git a/src/format/table.rs b/src/format/table.rs index b2680a6c96..f4b318dae8 100644 --- a/src/format/table.rs +++ b/src/format/table.rs @@ -42,7 +42,7 @@ impl TableView { let mut headers = TableView::merge_descriptors(values); if headers.len() == 0 { - headers.push("value".to_string()); + headers.push("".to_string()); } let mut entries = vec![]; From 63039666b0aa36f40b15b8bdd740154e7bdf6d51 Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Mon, 14 Oct 2019 07:37:34 +0200 Subject: [PATCH 028/184] Changes from_ssv_to_string_value to return an Option. --- src/commands/from_ssv.rs | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs index 2f3f574fb3..3232fd1e3e 100644 --- a/src/commands/from_ssv.rs +++ b/src/commands/from_ssv.rs @@ -37,12 +37,11 @@ fn from_ssv_string_to_value( s: &str, headerless: bool, tag: impl Into, -) -> Result, &str> { +) -> Option> { let mut lines = s.lines(); let headers = lines - .next() - .expect("No content.") + .next()? .split_whitespace() .map(|s| s.to_owned()) .collect::>(); @@ -69,7 +68,7 @@ fn from_ssv_string_to_value( }) .collect(); - Ok(Tagged::from_item(Value::Table(rows), tag)) + Some(Tagged::from_item(Value::Table(rows), tag)) } fn from_ssv( @@ -101,13 +100,13 @@ fn from_ssv( } match from_ssv_string_to_value(&concat_string, headerless, name) { - Ok(x) => match x { + Some(x) => match x { Tagged { item: Value::Table(list), ..} => { for l in list { yield ReturnSuccess::value(l) } } x => yield ReturnSuccess::value(x) }, - Err(_) => if let Some(last_tag) = latest_tag { + None => if let Some(last_tag) = latest_tag { yield Err(ShellError::labeled_error_with_secondary( "Could not parse as SSV", "input cannot be parsed ssv", @@ -115,7 +114,7 @@ fn from_ssv( "value originates from here", last_tag, )) - } + }, } }; From 38225d0dbadd2e8af530085e0a137d98fbf1d37e Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Mon, 14 Oct 2019 07:48:10 +0200 Subject: [PATCH 029/184] Removes extra newline --- src/commands/from_ssv.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs index 3232fd1e3e..a47a8662f5 100644 --- a/src/commands/from_ssv.rs +++ b/src/commands/from_ssv.rs @@ -86,8 +86,6 @@ fn from_ssv( match value.item { Value::Primitive(Primitive::String(s)) => { concat_string.push_str(&s); - concat_string.push_str("\n"); - } _ => yield Err(ShellError::labeled_error_with_secondary ( "Expected a string from pipeline", From 0b210ce5bf55b91ce88a8706724deeb844953634 Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Mon, 14 Oct 2019 07:48:19 +0200 Subject: [PATCH 030/184] Filters out empty lines before table creation. --- src/commands/from_ssv.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs index a47a8662f5..41a611f8cf 100644 --- a/src/commands/from_ssv.rs +++ b/src/commands/from_ssv.rs @@ -38,7 +38,7 @@ fn from_ssv_string_to_value( headerless: bool, tag: impl Into, ) -> Option> { - let mut lines = s.lines(); + let mut lines = s.lines().filter(|l| !l.is_empty()); let headers = lines .next()? From a9293f62a8fd9565a67e5b01ada5d120bf5ff37e Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Mon, 14 Oct 2019 09:43:54 +0200 Subject: [PATCH 031/184] Adds some initial ideas for refactoring. --- src/commands/from_ssv.rs | 40 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs index 41a611f8cf..354d2cb2d1 100644 --- a/src/commands/from_ssv.rs +++ b/src/commands/from_ssv.rs @@ -33,6 +33,27 @@ impl WholeStreamCommand for FromSSV { } } +fn string_to_table(s: &str, headerless: bool) -> std::iter::Map> { + let mut lines = s.lines().filter(|l| !l.trim().is_empty()); + + let headers = lines + .next() + .unwrap() + .split_whitespace() + .map(|s| s.to_owned()) + .collect::>(); + + let header_row = if headerless { + (0..headers.len()) + .map(|i| format!("Column{}", i + 1)) + .collect::>() + } else { + headers + }; + + lines.map(|l| header_row.iter().zip(l.split_whitespace())) +} + fn from_ssv_string_to_value( s: &str, headerless: bool, @@ -118,3 +139,22 @@ fn from_ssv( Ok(stream.to_output_stream()) } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn it_trims_empty_and_whitespace_only_lines() { + let input = r#" + + a b + + 1 2 + + 3 4 + "#; + + let +} +} \ No newline at end of file From 104b7824f58631d8dcfef96493827d9375ea7937 Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Mon, 14 Oct 2019 16:34:06 +0200 Subject: [PATCH 032/184] Updates return types. --- src/commands/from_ssv.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs index 354d2cb2d1..5f88147afe 100644 --- a/src/commands/from_ssv.rs +++ b/src/commands/from_ssv.rs @@ -33,7 +33,7 @@ impl WholeStreamCommand for FromSSV { } } -fn string_to_table(s: &str, headerless: bool) -> std::iter::Map> { +fn string_to_table(s: &str, headerless: bool) -> Vec> { let mut lines = s.lines().filter(|l| !l.trim().is_empty()); let headers = lines @@ -51,7 +51,7 @@ fn string_to_table(s: &str, headerless: bool) -> std::iter::Map Date: Mon, 14 Oct 2019 13:46:37 -0500 Subject: [PATCH 033/184] Color escaped externals. --- src/parser/hir/syntax_shape.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/parser/hir/syntax_shape.rs b/src/parser/hir/syntax_shape.rs index 8accfbde2b..72fcf9ecb4 100644 --- a/src/parser/hir/syntax_shape.rs +++ b/src/parser/hir/syntax_shape.rs @@ -726,8 +726,8 @@ impl FallibleColorSyntax for CommandHeadShape { match atom.item { // If the head is an explicit external command (^cmd), color it as an external command - AtomicToken::ExternalCommand { command } => { - shapes.push(FlatShape::ExternalCommand.spanned(command)); + AtomicToken::ExternalCommand { .. } => { + shapes.push(FlatShape::ExternalCommand.spanned(atom.span)); Ok(CommandHeadKind::External) } From 22d2360c4b0760208cf582a7eaa1301be819949b Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Mon, 14 Oct 2019 22:00:25 +0200 Subject: [PATCH 034/184] Adds conversion test for leading whitespace. Refactors string parsing into a separate function. --- src/commands/from_ssv.rs | 81 +++++++++++++++++++++++++--------------- 1 file changed, 50 insertions(+), 31 deletions(-) diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs index 5f88147afe..56a3f10868 100644 --- a/src/commands/from_ssv.rs +++ b/src/commands/from_ssv.rs @@ -44,14 +44,22 @@ fn string_to_table(s: &str, headerless: bool) -> Vec> { .collect::>(); let header_row = if headerless { - (0..headers.len()) - .map(|i| format!("Column{}", i + 1)) + (1..=headers.len()) + .map(|i| format!("Column{}", i)) .collect::>() } else { headers }; - lines.map(|l| header_row.iter().zip(l.split_whitespace())).collect() + lines + .map(|l| { + header_row + .iter() + .zip(l.split_whitespace()) + .map(|(a, b)| (String::from(a), String::from(b))) + .collect() + }) + .collect() } fn from_ssv_string_to_value( @@ -59,33 +67,18 @@ fn from_ssv_string_to_value( headerless: bool, tag: impl Into, ) -> Option> { - let mut lines = s.lines().filter(|l| !l.is_empty()); - - let headers = lines - .next()? - .split_whitespace() - .map(|s| s.to_owned()) - .collect::>(); - - let header_row = if headerless { - (0..headers.len()) - .map(|i| format!("Column{}", i + 1)) - .collect::>() - } else { - headers - }; - let tag = tag.into(); - let rows = lines - .map(|l| { - let mut row = TaggedDictBuilder::new(tag); - for (column, value) in header_row.iter().zip(l.split_whitespace()) { - row.insert_tagged( - column.to_owned(), - Value::Primitive(Primitive::String(String::from(value))).tagged(tag), + let rows = string_to_table(s, headerless) + .iter() + .map(|row| { + let mut tagged_dict = TaggedDictBuilder::new(tag); + for (col, entry) in row { + tagged_dict.insert_tagged( + col, + Value::Primitive(Primitive::String(String::from(entry))).tagged(tag), ) } - row.into_tagged_value() + tagged_dict.into_tagged_value() }) .collect(); @@ -143,18 +136,44 @@ fn from_ssv( #[cfg(test)] mod tests { use super::*; + fn owned(x: &str, y: &str) -> (String, String) { + (String::from(x), String::from(y)) + } #[test] fn it_trims_empty_and_whitespace_only_lines() { let input = r#" - a b + a b - 1 2 + 1 2 3 4 "#; + let result = string_to_table(input, false); + assert_eq!( + result, + vec![ + vec![owned("a", "1"), owned("b", "2")], + vec![owned("a", "3"), owned("b", "4")] + ] + ); + } - let + #[test] + fn it_ignores_headers_when_headerless() { + let input = r#" + a b + 1 2 + 3 4 + "#; + let result = string_to_table(input, true); + assert_eq!( + result, + vec![ + vec![owned("Column1", "1"), owned("Column2", "2")], + vec![owned("Column1", "3"), owned("Column2", "4")] + ] + ); + } } -} \ No newline at end of file From 43ead45db6755cfdcc2e65dc367c7de2ba6f611c Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Mon, 14 Oct 2019 22:03:17 +0200 Subject: [PATCH 035/184] Removes rust_src_path and ssl_cert_file vars. --- shell.nix | 2 -- 1 file changed, 2 deletions(-) diff --git a/shell.nix b/shell.nix index f5c61ac0a8..d528cf8491 100644 --- a/shell.nix +++ b/shell.nix @@ -27,6 +27,4 @@ let in stdenv.mkDerivation { name = "nushell-rust"; buildInputs = nu-deps ++ rust; - RUST_SRC_PATH = "${nightly}/lib/rustlib/src/rust/src"; - SSL_CERT_FILE = "/etc/ssl/certs/ca-certificates.crt"; } From ee8cd671cb206f412c5e5b25b07b9feb9661f632 Mon Sep 17 00:00:00 2001 From: Jason Gedge Date: Mon, 14 Oct 2019 16:30:23 -0400 Subject: [PATCH 036/184] Fix bug with multiple input objects to an external command. Previously, we would build a command that looked something like this: "$it" "&&" "" "$it" So that the "&&" and "" would also be arguments to the command, instead of a chained command. This commit builds up a command string that can be passed to an external shell. --- src/commands/classified.rs | 74 ++++++++++++++++++-------------------- tests/commands_test.rs | 22 ++++++++++++ tests/helpers/mod.rs | 54 ++++++++++++++++++++++++++++ 3 files changed, 110 insertions(+), 40 deletions(-) diff --git a/src/commands/classified.rs b/src/commands/classified.rs index c2380d4ffe..440413ddd4 100644 --- a/src/commands/classified.rs +++ b/src/commands/classified.rs @@ -225,7 +225,6 @@ impl ExternalCommand { ) -> Result { let stdin = input.stdin; let inputs: Vec> = input.objects.into_vec().await; - let name_tag = self.name_tag.clone(); trace!(target: "nu::run::external", "-> {}", self.name); trace!(target: "nu::run::external", "inputs = {:?}", inputs); @@ -235,53 +234,47 @@ impl ExternalCommand { arg_string.push_str(&arg); } + trace!(target: "nu::run::external", "command = {:?}", self.name); + let mut process; - - process = Exec::cmd(&self.name); - - trace!(target: "nu::run::external", "command = {:?}", process); - if arg_string.contains("$it") { - let mut first = true; - - for i in &inputs { - if i.as_string().is_err() { - let mut tag = None; - for arg in &self.args { - if arg.item.contains("$it") { - tag = Some(arg.tag()); + let input_strings = inputs + .iter() + .map(|i| { + i.as_string().map_err(|_| { + let arg = self.args.iter().find(|arg| arg.item.contains("$it")); + if let Some(arg) = arg { + ShellError::labeled_error( + "External $it needs string data", + "given row instead of string data", + arg.tag(), + ) + } else { + ShellError::labeled_error( + "Error: $it needs string data", + "given something else", + self.name_tag.clone(), + ) } - } - if let Some(tag) = tag { - return Err(ShellError::labeled_error( - "External $it needs string data", - "given row instead of string data", - tag, - )); - } else { - return Err(ShellError::labeled_error( - "Error: $it needs string data", - "given something else", - name_tag, - )); - } - } - if !first { - process = process.arg("&&"); - process = process.arg(&self.name); - } else { - first = false; - } + }) + }) + .collect::, ShellError>>()?; - for arg in &self.args { + let commands = input_strings.iter().map(|i| { + let args = self.args.iter().filter_map(|arg| { if arg.chars().all(|c| c.is_whitespace()) { - continue; + None + } else { + Some(arg.replace("$it", &i)) } + }); - process = process.arg(&arg.replace("$it", &i.as_string()?)); - } - } + format!("{} {}", self.name, itertools::join(args, " ")) + }); + + process = Exec::shell(itertools::join(commands, " && ")) } else { + process = Exec::cmd(&self.name); for arg in &self.args { let arg_chars: Vec<_> = arg.chars().collect(); if arg_chars.len() > 1 @@ -321,6 +314,7 @@ impl ExternalCommand { trace!(target: "nu::run::external", "next = {:?}", stream_next); + let name_tag = self.name_tag.clone(); if let Ok(mut popen) = popen { match stream_next { StreamNext::Last => { diff --git a/tests/commands_test.rs b/tests/commands_test.rs index 30636eafca..cfa6f74334 100644 --- a/tests/commands_test.rs +++ b/tests/commands_test.rs @@ -164,6 +164,28 @@ fn save_figures_out_intelligently_where_to_write_out_with_metadata() { }) } +#[test] +fn it_arg_works_with_many_inputs_to_external_command() { + Playground::setup("it_arg_works_with_many_inputs", |dirs, sandbox| { + sandbox.with_files(vec![ + FileWithContent("file1", "text"), + FileWithContent("file2", " and more text"), + ]); + + let (stdout, stderr) = nu_combined!( + cwd: dirs.test(), h::pipeline( + r#" + echo file1 file2 + | split-row " " + | cat $it + "# + )); + + assert_eq!("text and more text", stdout); + assert!(!stderr.contains("No such file or directory")); + }) +} + #[test] fn save_can_write_out_csv() { Playground::setup("save_test_2", |dirs, _| { diff --git a/tests/helpers/mod.rs b/tests/helpers/mod.rs index 199038b531..86c8a10e7f 100644 --- a/tests/helpers/mod.rs +++ b/tests/helpers/mod.rs @@ -155,6 +155,60 @@ macro_rules! nu_error { }}; } +#[macro_export] +macro_rules! nu_combined { + (cwd: $cwd:expr, $path:expr, $($part:expr),*) => {{ + use $crate::helpers::DisplayPath; + + let path = format!($path, $( + $part.display_path() + ),*); + + nu_combined!($cwd, &path) + }}; + + (cwd: $cwd:expr, $path:expr) => {{ + nu_combined!($cwd, $path) + }}; + + ($cwd:expr, $path:expr) => {{ + pub use std::error::Error; + pub use std::io::prelude::*; + pub use std::process::{Command, Stdio}; + + let commands = &*format!( + " + cd {} + {} + exit", + $crate::helpers::in_directory($cwd), + $crate::helpers::DisplayPath::display_path(&$path) + ); + + let mut process = Command::new(helpers::executable_path()) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn() + .expect("couldn't run test"); + + let stdin = process.stdin.as_mut().expect("couldn't open stdin"); + stdin + .write_all(commands.as_bytes()) + .expect("couldn't write to stdin"); + + let output = process + .wait_with_output() + .expect("couldn't read from stdout/stderr"); + + let err = String::from_utf8_lossy(&output.stderr).into_owned(); + let out = String::from_utf8_lossy(&output.stdout).into_owned(); + let out = out.replace("\r\n", ""); + let out = out.replace("\n", ""); + (out, err) + }}; +} + pub enum Stub<'a> { FileWithContent(&'a str, &'a str), FileWithContentToBeTrimmed(&'a str, &'a str), From b2c53a09672f86f6f6e35001f05a4aa6f8fd8a1e Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Mon, 14 Oct 2019 23:14:45 +0200 Subject: [PATCH 037/184] Updates commands to work after tag is no longer copy. --- src/commands/from_ssv.rs | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs index 56a3f10868..782fb531f4 100644 --- a/src/commands/from_ssv.rs +++ b/src/commands/from_ssv.rs @@ -71,18 +71,18 @@ fn from_ssv_string_to_value( let rows = string_to_table(s, headerless) .iter() .map(|row| { - let mut tagged_dict = TaggedDictBuilder::new(tag); + let mut tagged_dict = TaggedDictBuilder::new(&tag); for (col, entry) in row { tagged_dict.insert_tagged( col, - Value::Primitive(Primitive::String(String::from(entry))).tagged(tag), + Value::Primitive(Primitive::String(String::from(entry))).tagged(&tag), ) } tagged_dict.into_tagged_value() }) .collect(); - Some(Tagged::from_item(Value::Table(rows), tag)) + Some(Value::Table(rows).tagged(&tag)) } fn from_ssv( @@ -96,7 +96,7 @@ fn from_ssv( for value in values { let value_tag = value.tag(); - latest_tag = Some(value_tag); + latest_tag = Some(value_tag.clone()); match value.item { Value::Primitive(Primitive::String(s)) => { concat_string.push_str(&s); @@ -104,27 +104,27 @@ fn from_ssv( _ => yield Err(ShellError::labeled_error_with_secondary ( "Expected a string from pipeline", "requires string input", - name, + &name, "value originates from here", - value_tag + &value_tag )), } } - match from_ssv_string_to_value(&concat_string, headerless, name) { + match from_ssv_string_to_value(&concat_string, headerless, name.clone()) { Some(x) => match x { Tagged { item: Value::Table(list), ..} => { for l in list { yield ReturnSuccess::value(l) } } x => yield ReturnSuccess::value(x) }, - None => if let Some(last_tag) = latest_tag { + None => if let Some(tag) = latest_tag { yield Err(ShellError::labeled_error_with_secondary( "Could not parse as SSV", "input cannot be parsed ssv", - name, + &name, "value originates from here", - last_tag, + &tag, )) }, } From de12393eaf497286288d95c23431104fbc87fa16 Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Mon, 14 Oct 2019 23:25:52 +0200 Subject: [PATCH 038/184] Updates shell.nix. --- shell.nix | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/shell.nix b/shell.nix index d528cf8491..4e287ab115 100644 --- a/shell.nix +++ b/shell.nix @@ -8,7 +8,7 @@ with pkgs; let nightly = (pkgs.rustChannelOf { - date = "2019-09-01"; + date = "2019-10-14"; channel = "nightly"; }).rust.override { extensions = [ @@ -27,4 +27,5 @@ let in stdenv.mkDerivation { name = "nushell-rust"; buildInputs = nu-deps ++ rust; + SSL_CERT_FILE = "/etc/ssl/certs/ca-certificates.crt"; } From d21389d549109324a4487d59a2b9d114df38d326 Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Tue, 15 Oct 2019 00:24:32 +0200 Subject: [PATCH 039/184] Removes unwrap. A rogue unwrap had been left in the code, but has now been replaced by an option. --- src/commands/from_ssv.rs | 42 ++++++++++++++++++++++++---------------- 1 file changed, 25 insertions(+), 17 deletions(-) diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs index 782fb531f4..1be9b4567a 100644 --- a/src/commands/from_ssv.rs +++ b/src/commands/from_ssv.rs @@ -33,12 +33,11 @@ impl WholeStreamCommand for FromSSV { } } -fn string_to_table(s: &str, headerless: bool) -> Vec> { +fn string_to_table(s: &str, headerless: bool) -> Option>> { let mut lines = s.lines().filter(|l| !l.trim().is_empty()); let headers = lines - .next() - .unwrap() + .next()? .split_whitespace() .map(|s| s.to_owned()) .collect::>(); @@ -51,15 +50,17 @@ fn string_to_table(s: &str, headerless: bool) -> Vec> { headers }; - lines - .map(|l| { - header_row - .iter() - .zip(l.split_whitespace()) - .map(|(a, b)| (String::from(a), String::from(b))) - .collect() - }) - .collect() + Some( + lines + .map(|l| { + header_row + .iter() + .zip(l.split_whitespace()) + .map(|(a, b)| (String::from(a), String::from(b))) + .collect() + }) + .collect(), + ) } fn from_ssv_string_to_value( @@ -68,7 +69,7 @@ fn from_ssv_string_to_value( tag: impl Into, ) -> Option> { let tag = tag.into(); - let rows = string_to_table(s, headerless) + let rows = string_to_table(s, headerless)? .iter() .map(|row| { let mut tagged_dict = TaggedDictBuilder::new(&tag); @@ -153,10 +154,10 @@ mod tests { let result = string_to_table(input, false); assert_eq!( result, - vec![ + Some(vec![ vec![owned("a", "1"), owned("b", "2")], vec![owned("a", "3"), owned("b", "4")] - ] + ]) ); } @@ -170,10 +171,17 @@ mod tests { let result = string_to_table(input, true); assert_eq!( result, - vec![ + Some(vec![ vec![owned("Column1", "1"), owned("Column2", "2")], vec![owned("Column1", "3"), owned("Column2", "4")] - ] + ]) ); } + + #[test] + fn it_returns_none_given_an_empty_string() { + let input = ""; + let result = string_to_table(input, true); + assert_eq!(result, None); + } } From 65008bb912076f288d0c0dbf8ba0ae9f3641d374 Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Mon, 14 Oct 2019 23:46:19 +0200 Subject: [PATCH 040/184] Deletes nix-specific configuration. --- .envrc | 1 - shell.nix | 31 ------------------------------- 2 files changed, 32 deletions(-) delete mode 100644 .envrc delete mode 100644 shell.nix diff --git a/.envrc b/.envrc deleted file mode 100644 index 65326bb6dd..0000000000 --- a/.envrc +++ /dev/null @@ -1 +0,0 @@ -use nix \ No newline at end of file diff --git a/shell.nix b/shell.nix deleted file mode 100644 index 4e287ab115..0000000000 --- a/shell.nix +++ /dev/null @@ -1,31 +0,0 @@ -{ pkgs ? import { - overlays = [ - (import (builtins.fetchTarball - "https://github.com/mozilla/nixpkgs-mozilla/archive/master.tar.gz")) - ]; -} }: -with pkgs; -let - - nightly = (pkgs.rustChannelOf { - date = "2019-10-14"; - channel = "nightly"; - }).rust.override { - extensions = [ - "clippy-preview" - "rls-preview" - "rust-analysis" - "rust-src" - "rustfmt-preview" - ]; - }; - - nu-deps = [ openssl_1_1 pkg-config x11 python3 ]; - - rust = [ nightly rustracer cargo-watch ]; - -in stdenv.mkDerivation { - name = "nushell-rust"; - buildInputs = nu-deps ++ rust; - SSL_CERT_FILE = "/etc/ssl/certs/ca-certificates.crt"; -} From f20f3f56c7c967d2be19d11c99c69f9ff3017852 Mon Sep 17 00:00:00 2001 From: Yehuda Katz Date: Mon, 14 Oct 2019 16:11:00 -0700 Subject: [PATCH 041/184] Start moving coloring into the token stream The benefit of this is that coloring can be made atomic alongside token stream forwarding. I put the feature behind a flag so I can continue to iterate on it without possibly regressing existing functionality. It's a lot of places where the flags have to go, but I expect it to be a short-lived flag, and the flags are fully contained in the parser. --- features.toml | 9 + src/parser/hir/expand_external_tokens.rs | 61 ++- src/parser/hir/syntax_shape.rs | 468 ++++++++++++++++++ src/parser/hir/syntax_shape/block.rs | 170 ++++++- src/parser/hir/syntax_shape/expression.rs | 163 ++++++ .../hir/syntax_shape/expression/delimited.rs | 30 ++ .../hir/syntax_shape/expression/file_path.rs | 39 ++ .../hir/syntax_shape/expression/list.rs | 128 ++++- .../hir/syntax_shape/expression/number.rs | 60 +++ .../hir/syntax_shape/expression/pattern.rs | 27 + .../hir/syntax_shape/expression/string.rs | 31 ++ .../syntax_shape/expression/variable_path.rs | 307 ++++++++++++ src/parser/hir/tokens_iterator.rs | 98 +++- src/parser/parse_command.rs | 201 ++++++++ src/shell/helper.rs | 26 +- 15 files changed, 1808 insertions(+), 10 deletions(-) diff --git a/features.toml b/features.toml index 290f673d26..f7cea6d9e9 100644 --- a/features.toml +++ b/features.toml @@ -2,3 +2,12 @@ description = "Adding hints based upon error states in the syntax highlighter" enabled = false + +[coloring_in_tokens] + +description = "Move coloring into the TokensIterator so they can be atomic with the rest of the iterator" +reason = """ +This is laying the groundwork for merging coloring and parsing. It also makes token_nodes.atomic() naturally +work with coloring, which is pretty useful on its own. +""" +enabled = false \ No newline at end of file diff --git a/src/parser/hir/expand_external_tokens.rs b/src/parser/hir/expand_external_tokens.rs index af966945bd..e277efe2e8 100644 --- a/src/parser/hir/expand_external_tokens.rs +++ b/src/parser/hir/expand_external_tokens.rs @@ -1,10 +1,12 @@ use crate::errors::ShellError; +#[cfg(not(coloring_in_tokens))] +use crate::parser::hir::syntax_shape::FlatShape; use crate::parser::{ hir::syntax_shape::{ color_syntax, expand_atom, AtomicToken, ColorSyntax, ExpandContext, ExpansionRule, MaybeSpaceShape, }, - FlatShape, TokenNode, TokensIterator, + TokenNode, TokensIterator, }; use crate::{Span, Spanned, Text}; @@ -28,6 +30,7 @@ pub fn expand_external_tokens( #[derive(Debug, Copy, Clone)] pub struct ExternalTokensShape; +#[cfg(not(coloring_in_tokens))] impl ColorSyntax for ExternalTokensShape { type Info = (); type Input = (); @@ -53,6 +56,31 @@ impl ColorSyntax for ExternalTokensShape { } } +#[cfg(coloring_in_tokens)] +impl ColorSyntax for ExternalTokensShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Self::Info { + loop { + // Allow a space + color_syntax(&MaybeSpaceShape, token_nodes, context); + + // Process an external expression. External expressions are mostly words, with a + // few exceptions (like $variables and path expansion rules) + match color_syntax(&ExternalExpression, token_nodes, context).1 { + ExternalExpressionResult::Eof => break, + ExternalExpressionResult::Processed => continue, + } + } + } +} + pub fn expand_next_expression( token_nodes: &mut TokensIterator<'_>, ) -> Result, ShellError> { @@ -128,6 +156,7 @@ enum ExternalExpressionResult { #[derive(Debug, Copy, Clone)] struct ExternalExpression; +#[cfg(not(coloring_in_tokens))] impl ColorSyntax for ExternalExpression { type Info = ExternalExpressionResult; type Input = (); @@ -157,3 +186,33 @@ impl ColorSyntax for ExternalExpression { return ExternalExpressionResult::Processed; } } + +#[cfg(coloring_in_tokens)] +impl ColorSyntax for ExternalExpression { + type Info = ExternalExpressionResult; + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> ExternalExpressionResult { + let atom = match expand_atom( + token_nodes, + "external word", + context, + ExpansionRule::permissive(), + ) { + Err(_) => unreachable!("TODO: separate infallible expand_atom"), + Ok(Spanned { + item: AtomicToken::Eof { .. }, + .. + }) => return ExternalExpressionResult::Eof, + Ok(atom) => atom, + }; + + atom.color_tokens(token_nodes.mut_shapes()); + return ExternalExpressionResult::Processed; + } +} diff --git a/src/parser/hir/syntax_shape.rs b/src/parser/hir/syntax_shape.rs index 8accfbde2b..37bb8abed4 100644 --- a/src/parser/hir/syntax_shape.rs +++ b/src/parser/hir/syntax_shape.rs @@ -55,6 +55,7 @@ pub enum SyntaxShape { Block, } +#[cfg(not(coloring_in_tokens))] impl FallibleColorSyntax for SyntaxShape { type Info = (); type Input = (); @@ -104,6 +105,39 @@ impl FallibleColorSyntax for SyntaxShape { } } +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for SyntaxShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + match self { + SyntaxShape::Any => color_fallible_syntax(&AnyExpressionShape, token_nodes, context), + SyntaxShape::List => { + color_syntax(&ExpressionListShape, token_nodes, context); + Ok(()) + } + SyntaxShape::Int => color_fallible_syntax(&IntShape, token_nodes, context), + SyntaxShape::String => { + color_fallible_syntax_with(&StringShape, &FlatShape::String, token_nodes, context) + } + SyntaxShape::Member => color_fallible_syntax(&MemberShape, token_nodes, context), + SyntaxShape::ColumnPath => { + color_fallible_syntax(&ColumnPathShape, token_nodes, context) + } + SyntaxShape::Number => color_fallible_syntax(&NumberShape, token_nodes, context), + SyntaxShape::Path => color_fallible_syntax(&FilePathShape, token_nodes, context), + SyntaxShape::Pattern => color_fallible_syntax(&PatternShape, token_nodes, context), + SyntaxShape::Block => color_fallible_syntax(&AnyBlockShape, token_nodes, context), + } + } +} + impl ExpandExpression for SyntaxShape { fn expand_expr<'a, 'b>( &self, @@ -202,6 +236,20 @@ pub trait ExpandExpression: std::fmt::Debug + Copy { ) -> Result; } +#[cfg(coloring_in_tokens)] +pub trait FallibleColorSyntax: std::fmt::Debug + Copy { + type Info; + type Input; + + fn color_syntax<'a, 'b>( + &self, + input: &Self::Input, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result; +} + +#[cfg(not(coloring_in_tokens))] pub trait FallibleColorSyntax: std::fmt::Debug + Copy { type Info; type Input; @@ -215,6 +263,7 @@ pub trait FallibleColorSyntax: std::fmt::Debug + Copy { ) -> Result; } +#[cfg(not(coloring_in_tokens))] pub trait ColorSyntax: std::fmt::Debug + Copy { type Info; type Input; @@ -228,6 +277,19 @@ pub trait ColorSyntax: std::fmt::Debug + Copy { ) -> Self::Info; } +#[cfg(coloring_in_tokens)] +pub trait ColorSyntax: std::fmt::Debug + Copy { + type Info; + type Input; + + fn color_syntax<'a, 'b>( + &self, + input: &Self::Input, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Self::Info; +} + // impl ColorSyntax for T // where // T: FallibleColorSyntax, @@ -278,6 +340,7 @@ pub(crate) fn expand_syntax<'a, 'b, T: ExpandSyntax>( } } +#[cfg(not(coloring_in_tokens))] pub fn color_syntax<'a, 'b, T: ColorSyntax, U>( shape: &T, token_nodes: &'b mut TokensIterator<'a>, @@ -306,6 +369,35 @@ pub fn color_syntax<'a, 'b, T: ColorSyntax, U>( ((), result) } +#[cfg(coloring_in_tokens)] +pub fn color_syntax<'a, 'b, T: ColorSyntax, U>( + shape: &T, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, +) -> ((), U) { + trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); + + let len = token_nodes.shapes().len(); + let result = shape.color_syntax(&(), token_nodes, context); + + trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source)); + + if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { + trace!(target: "nu::color_syntax", "after {}", std::any::type_name::()); + + if len < token_nodes.shapes().len() { + for i in len..(token_nodes.shapes().len()) { + trace!(target: "nu::color_syntax", "new shape :: {:?}", token_nodes.shapes()[i]); + } + } else { + trace!(target: "nu::color_syntax", "no new shapes"); + } + } + + ((), result) +} + +#[cfg(not(coloring_in_tokens))] pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax, U>( shape: &T, token_nodes: &'b mut TokensIterator<'a>, @@ -339,6 +431,40 @@ pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax, U>( + shape: &T, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, +) -> Result { + trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); + + if token_nodes.at_end() { + trace!(target: "nu::color_syntax", "at eof"); + return Err(ShellError::unexpected_eof("coloring", Tag::unknown())); + } + + let len = token_nodes.shapes().len(); + let result = shape.color_syntax(&(), token_nodes, context); + + trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source)); + + if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { + trace!(target: "nu::color_syntax", "after {}", std::any::type_name::()); + + if len < token_nodes.shapes().len() { + for i in len..(token_nodes.shapes().len()) { + trace!(target: "nu::color_syntax", "new shape :: {:?}", token_nodes.shapes()[i]); + } + } else { + trace!(target: "nu::color_syntax", "no new shapes"); + } + } + + result +} + +#[cfg(not(coloring_in_tokens))] pub fn color_syntax_with<'a, 'b, T: ColorSyntax, U, I>( shape: &T, input: &I, @@ -368,6 +494,36 @@ pub fn color_syntax_with<'a, 'b, T: ColorSyntax, U, I>( ((), result) } +#[cfg(coloring_in_tokens)] +pub fn color_syntax_with<'a, 'b, T: ColorSyntax, U, I>( + shape: &T, + input: &I, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, +) -> ((), U) { + trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); + + let len = token_nodes.shapes().len(); + let result = shape.color_syntax(input, token_nodes, context); + + trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source)); + + if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { + trace!(target: "nu::color_syntax", "after {}", std::any::type_name::()); + + if len < token_nodes.shapes().len() { + for i in len..(token_nodes.shapes().len()) { + trace!(target: "nu::color_syntax", "new shape :: {:?}", token_nodes.shapes()[i]); + } + } else { + trace!(target: "nu::color_syntax", "no new shapes"); + } + } + + ((), result) +} + +#[cfg(not(coloring_in_tokens))] pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax, U, I>( shape: &T, input: &I, @@ -402,6 +558,40 @@ pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax, U, I>( + shape: &T, + input: &I, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, +) -> Result { + trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); + + if token_nodes.at_end() { + trace!(target: "nu::color_syntax", "at eof"); + return Err(ShellError::unexpected_eof("coloring", Tag::unknown())); + } + + let len = token_nodes.shapes().len(); + let result = shape.color_syntax(input, token_nodes, context); + + trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source)); + + if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { + trace!(target: "nu::color_syntax", "after {}", std::any::type_name::()); + + if len < token_nodes.shapes().len() { + for i in len..(token_nodes.shapes().len()) { + trace!(target: "nu::color_syntax", "new shape :: {:?}", token_nodes.shapes()[i]); + } + } else { + trace!(target: "nu::color_syntax", "no new shapes"); + } + } + + result +} + pub(crate) fn expand_expr<'a, 'b, T: ExpandExpression>( shape: &T, token_nodes: &'b mut TokensIterator<'a>, @@ -536,6 +726,7 @@ impl ExpandSyntax for BarePathShape { #[derive(Debug, Copy, Clone)] pub struct BareShape; +#[cfg(not(coloring_in_tokens))] impl FallibleColorSyntax for BareShape { type Info = (); type Input = FlatShape; @@ -563,6 +754,37 @@ impl FallibleColorSyntax for BareShape { } } +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for BareShape { + type Info = (); + type Input = FlatShape; + + fn color_syntax<'a, 'b>( + &self, + input: &FlatShape, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result<(), ShellError> { + let span = token_nodes.peek_any_token(|token| match token { + // If it's a bare token, color it + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => { + // token_nodes.color_shape((*input).spanned(*span)); + Ok(span) + } + + // otherwise, fail + other => Err(ShellError::type_error("word", other.tagged_type_name())), + })?; + + token_nodes.color_shape((*input).spanned(*span)); + + Ok(()) + } +} + impl ExpandSyntax for BareShape { type Output = Spanned; @@ -636,6 +858,7 @@ impl CommandSignature { #[derive(Debug, Copy, Clone)] pub struct PipelineShape; +#[cfg(not(coloring_in_tokens))] // The failure mode is if the head of the token stream is not a pipeline impl FallibleColorSyntax for PipelineShape { type Info = (); @@ -669,6 +892,39 @@ impl FallibleColorSyntax for PipelineShape { } } +#[cfg(coloring_in_tokens)] +// The failure mode is if the head of the token stream is not a pipeline +impl FallibleColorSyntax for PipelineShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + // Make sure we're looking at a pipeline + let Pipeline { parts, .. } = token_nodes.peek_any_token(|node| node.as_pipeline())?; + + // Enumerate the pipeline parts + for part in parts { + // If the pipeline part has a prefix `|`, emit a pipe to color + if let Some(pipe) = part.pipe { + token_nodes.color_shape(FlatShape::Pipe.spanned(pipe)) + } + + // Create a new iterator containing the tokens in the pipeline part to color + let mut token_nodes = TokensIterator::new(&part.tokens.item, part.span, false); + + color_syntax(&MaybeSpaceShape, &mut token_nodes, context); + color_syntax(&CommandShape, &mut token_nodes, context); + } + + Ok(()) + } +} + impl ExpandSyntax for PipelineShape { type Output = ClassifiedPipeline; fn expand_syntax<'a, 'b>( @@ -703,6 +959,7 @@ pub enum CommandHeadKind { #[derive(Debug, Copy, Clone)] pub struct CommandHeadShape; +#[cfg(not(coloring_in_tokens))] impl FallibleColorSyntax for CommandHeadShape { type Info = CommandHeadKind; type Input = (); @@ -756,6 +1013,59 @@ impl FallibleColorSyntax for CommandHeadShape { } } +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for CommandHeadShape { + type Info = CommandHeadKind; + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + // If we don't ultimately find a token, roll back + token_nodes.atomic(|token_nodes| { + // First, take a look at the next token + let atom = expand_atom( + token_nodes, + "command head", + context, + ExpansionRule::permissive(), + )?; + + match atom.item { + // If the head is an explicit external command (^cmd), color it as an external command + AtomicToken::ExternalCommand { command } => { + token_nodes.color_shape(FlatShape::ExternalCommand.spanned(command)); + Ok(CommandHeadKind::External) + } + + // If the head is a word, it depends on whether it matches a registered internal command + AtomicToken::Word { text } => { + let name = text.slice(context.source); + + if context.registry.has(name) { + // If the registry has the command, color it as an internal command + token_nodes.color_shape(FlatShape::InternalCommand.spanned(text)); + let command = context.registry.expect_command(name); + Ok(CommandHeadKind::Internal(command.signature())) + } else { + // Otherwise, color it as an external command + token_nodes.color_shape(FlatShape::ExternalCommand.spanned(text)); + Ok(CommandHeadKind::External) + } + } + + // Otherwise, we're not actually looking at a command + _ => Err(ShellError::syntax_error( + "No command at the head".tagged(atom.span), + )), + } + }) + } +} + impl ExpandSyntax for CommandHeadShape { type Output = CommandSignature; @@ -861,6 +1171,7 @@ impl ExpandSyntax for ClassifiedCommandShape { #[derive(Debug, Copy, Clone)] pub struct InternalCommandHeadShape; +#[cfg(not(coloring_in_tokens))] impl FallibleColorSyntax for InternalCommandHeadShape { type Info = (); type Input = (); @@ -899,6 +1210,44 @@ impl FallibleColorSyntax for InternalCommandHeadShape { } } +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for InternalCommandHeadShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result<(), ShellError> { + let peeked_head = token_nodes.peek_non_ws().not_eof("command head4"); + + let peeked_head = match peeked_head { + Err(_) => return Ok(()), + Ok(peeked_head) => peeked_head, + }; + + let node = peeked_head.commit(); + + let _expr = match node { + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => token_nodes.color_shape(FlatShape::Word.spanned(*span)), + + TokenNode::Token(Spanned { + item: RawToken::String(_inner_tag), + span, + }) => token_nodes.color_shape(FlatShape::String.spanned(*span)), + + _node => token_nodes.color_shape(FlatShape::Error.spanned(node.span())), + }; + + Ok(()) + } +} + impl ExpandExpression for InternalCommandHeadShape { fn expand_expr( &self, @@ -992,6 +1341,7 @@ fn parse_single_node_skipping_ws<'a, 'b, T>( #[derive(Debug, Copy, Clone)] pub struct WhitespaceShape; +#[cfg(not(coloring_in_tokens))] impl FallibleColorSyntax for WhitespaceShape { type Info = (); type Input = (); @@ -1022,6 +1372,38 @@ impl FallibleColorSyntax for WhitespaceShape { } } +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for WhitespaceShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result<(), ShellError> { + let peeked = token_nodes.peek_any().not_eof("whitespace"); + + let peeked = match peeked { + Err(_) => return Ok(()), + Ok(peeked) => peeked, + }; + + let node = peeked.commit(); + + let _ = match node { + TokenNode::Whitespace(span) => { + token_nodes.color_shape(FlatShape::Whitespace.spanned(*span)) + } + + _other => return Ok(()), + }; + + Ok(()) + } +} + impl ExpandSyntax for WhitespaceShape { type Output = Span; @@ -1089,6 +1471,7 @@ pub struct MaybeSpacedExpression { #[derive(Debug, Copy, Clone)] pub struct MaybeSpaceShape; +#[cfg(not(coloring_in_tokens))] impl ColorSyntax for MaybeSpaceShape { type Info = (); type Input = (); @@ -1114,9 +1497,35 @@ impl ColorSyntax for MaybeSpaceShape { } } +#[cfg(coloring_in_tokens)] +impl ColorSyntax for MaybeSpaceShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Self::Info { + let peeked = token_nodes.peek_any().not_eof("whitespace"); + + let peeked = match peeked { + Err(_) => return, + Ok(peeked) => peeked, + }; + + if let TokenNode::Whitespace(span) = peeked.node { + peeked.commit(); + token_nodes.color_shape(FlatShape::Whitespace.spanned(*span)); + } + } +} + #[derive(Debug, Copy, Clone)] pub struct SpaceShape; +#[cfg(not(coloring_in_tokens))] impl FallibleColorSyntax for SpaceShape { type Info = (); type Input = (); @@ -1145,6 +1554,34 @@ impl FallibleColorSyntax for SpaceShape { } } +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for SpaceShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result<(), ShellError> { + let peeked = token_nodes.peek_any().not_eof("whitespace")?; + + match peeked.node { + TokenNode::Whitespace(span) => { + peeked.commit(); + token_nodes.color_shape(FlatShape::Whitespace.spanned(*span)); + Ok(()) + } + + other => Err(ShellError::type_error( + "whitespace", + other.tagged_type_name(), + )), + } + } +} + impl ExpandExpression for MaybeSpacedExpression { fn expand_expr<'a, 'b>( &self, @@ -1237,6 +1674,7 @@ fn classify_command( #[derive(Debug, Copy, Clone)] pub struct CommandShape; +#[cfg(not(coloring_in_tokens))] impl ColorSyntax for CommandShape { type Info = (); type Input = (); @@ -1266,3 +1704,33 @@ impl ColorSyntax for CommandShape { }; } } + +#[cfg(coloring_in_tokens)] +impl ColorSyntax for CommandShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) { + let kind = color_fallible_syntax(&CommandHeadShape, token_nodes, context); + + match kind { + Err(_) => { + // We didn't find a command, so we'll have to fall back to parsing this pipeline part + // as a blob of undifferentiated expressions + color_syntax(&ExpressionListShape, token_nodes, context); + } + + Ok(CommandHeadKind::External) => { + color_syntax(&ExternalTokensShape, token_nodes, context); + } + Ok(CommandHeadKind::Internal(signature)) => { + color_syntax_with(&CommandTailShape, &signature, token_nodes, context); + } + }; + } +} diff --git a/src/parser/hir/syntax_shape/block.rs b/src/parser/hir/syntax_shape/block.rs index 7518d8f946..fdf2ecb3f8 100644 --- a/src/parser/hir/syntax_shape/block.rs +++ b/src/parser/hir/syntax_shape/block.rs @@ -1,11 +1,12 @@ use crate::errors::ShellError; +#[cfg(not(coloring_in_tokens))] +use crate::parser::hir::syntax_shape::FlatShape; use crate::parser::{ hir, hir::syntax_shape::{ color_fallible_syntax, color_syntax_with, continue_expression, expand_expr, expand_syntax, DelimitedShape, ExpandContext, ExpandExpression, ExpressionContinuationShape, - ExpressionListShape, FallibleColorSyntax, FlatShape, MemberShape, PathTailShape, - VariablePathShape, + ExpressionListShape, FallibleColorSyntax, MemberShape, PathTailShape, VariablePathShape, }, hir::tokens_iterator::TokensIterator, parse::token_tree::Delimiter, @@ -16,6 +17,7 @@ use crate::{Span, Spanned, SpannedItem}; #[derive(Debug, Copy, Clone)] pub struct AnyBlockShape; +#[cfg(not(coloring_in_tokens))] impl FallibleColorSyntax for AnyBlockShape { type Info = (); type Input = (); @@ -59,6 +61,48 @@ impl FallibleColorSyntax for AnyBlockShape { } } +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for AnyBlockShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let block = token_nodes.peek_non_ws().not_eof("block"); + + let block = match block { + Err(_) => return Ok(()), + Ok(block) => block, + }; + + // is it just a block? + let block = block.node.as_block(); + + match block { + // If so, color it as a block + Some((children, spans)) => { + let mut token_nodes = TokensIterator::new(children.item, context.span, false); + color_syntax_with( + &DelimitedShape, + &(Delimiter::Brace, spans.0, spans.1), + &mut token_nodes, + context, + ); + + return Ok(()); + } + _ => {} + } + + // Otherwise, look for a shorthand block. If none found, fail + color_fallible_syntax(&ShorthandBlock, token_nodes, context) + } +} + impl ExpandExpression for AnyBlockShape { fn expand_expr<'a, 'b>( &self, @@ -88,6 +132,7 @@ impl ExpandExpression for AnyBlockShape { #[derive(Debug, Copy, Clone)] pub struct ShorthandBlock; +#[cfg(not(coloring_in_tokens))] impl FallibleColorSyntax for ShorthandBlock { type Info = (); type Input = (); @@ -119,6 +164,36 @@ impl FallibleColorSyntax for ShorthandBlock { } } +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for ShorthandBlock { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + // Try to find a shorthand head. If none found, fail + color_fallible_syntax(&ShorthandPath, token_nodes, context)?; + + loop { + // Check to see whether there's any continuation after the head expression + let result = color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context); + + match result { + // if no continuation was found, we're done + Err(_) => break, + // if a continuation was found, look for another one + Ok(_) => continue, + } + } + + Ok(()) + } +} + impl ExpandExpression for ShorthandBlock { fn expand_expr<'a, 'b>( &self, @@ -139,6 +214,7 @@ impl ExpandExpression for ShorthandBlock { #[derive(Debug, Copy, Clone)] pub struct ShorthandPath; +#[cfg(not(coloring_in_tokens))] impl FallibleColorSyntax for ShorthandPath { type Info = (); type Input = (); @@ -183,6 +259,50 @@ impl FallibleColorSyntax for ShorthandPath { } } +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for ShorthandPath { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| { + let variable = color_fallible_syntax(&VariablePathShape, token_nodes, context); + + match variable { + Ok(_) => { + // if it's a variable path, that's the head part + return Ok(()); + } + + Err(_) => { + // otherwise, we'll try to find a member path + } + } + + // look for a member (`` -> `$it.`) + color_fallible_syntax(&MemberShape, token_nodes, context)?; + + // Now that we've synthesized the head, of the path, proceed to expand the tail of the path + // like any other path. + let tail = color_fallible_syntax(&PathTailShape, token_nodes, context); + + match tail { + Ok(_) => {} + Err(_) => { + // It's ok if there's no path tail; a single member is sufficient + } + } + + Ok(()) + }) + } +} + impl ExpandExpression for ShorthandPath { fn expand_expr<'a, 'b>( &self, @@ -223,6 +343,52 @@ impl ExpandExpression for ShorthandPath { #[derive(Debug, Copy, Clone)] pub struct ShorthandHeadShape; +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for ShorthandHeadShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + // A shorthand path must not be at EOF + let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?; + + match peeked.node { + // If the head of a shorthand path is a bare token, it expands to `$it.bare` + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => { + peeked.commit(); + shapes.push(FlatShape::BareMember.spanned(*span)); + Ok(()) + } + + // If the head of a shorthand path is a string, it expands to `$it."some string"` + TokenNode::Token(Spanned { + item: RawToken::String(_), + span: outer, + }) => { + peeked.commit(); + shapes.push(FlatShape::StringMember.spanned(*outer)); + Ok(()) + } + + other => Err(ShellError::type_error( + "shorthand head", + other.tagged_type_name(), + )), + } + } +} + +#[cfg(coloring_in_tokens)] +#[cfg(not(coloring_in_tokens))] impl FallibleColorSyntax for ShorthandHeadShape { type Info = (); type Input = (); diff --git a/src/parser/hir/syntax_shape/expression.rs b/src/parser/hir/syntax_shape/expression.rs index 0be63eaeb6..eccebf7516 100644 --- a/src/parser/hir/syntax_shape/expression.rs +++ b/src/parser/hir/syntax_shape/expression.rs @@ -37,6 +37,7 @@ impl ExpandExpression for AnyExpressionShape { } } +#[cfg(not(coloring_in_tokens))] impl FallibleColorSyntax for AnyExpressionShape { type Info = (); type Input = (); @@ -63,6 +64,32 @@ impl FallibleColorSyntax for AnyExpressionShape { } } +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for AnyExpressionShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + // Look for an expression at the cursor + color_fallible_syntax(&AnyExpressionStartShape, token_nodes, context)?; + + match continue_coloring_expression(token_nodes, context) { + Err(_) => { + // it's fine for there to be no continuation + } + + Ok(()) => {} + } + + Ok(()) + } +} + pub(crate) fn continue_expression( mut head: hir::Expression, token_nodes: &mut TokensIterator<'_>, @@ -91,6 +118,7 @@ pub(crate) fn continue_expression( } } +#[cfg(not(coloring_in_tokens))] pub(crate) fn continue_coloring_expression( token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, @@ -115,6 +143,29 @@ pub(crate) fn continue_coloring_expression( } } +#[cfg(coloring_in_tokens)] +pub(crate) fn continue_coloring_expression( + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, +) -> Result<(), ShellError> { + // if there's not even one expression continuation, fail + color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context)?; + + loop { + // Check to see whether there's any continuation after the head expression + let result = color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context); + + match result { + Err(_) => { + // We already saw one continuation, so just return + return Ok(()); + } + + Ok(_) => {} + } + } +} + #[derive(Debug, Copy, Clone)] pub struct AnyExpressionStartShape; @@ -152,6 +203,7 @@ impl ExpandExpression for AnyExpressionStartShape { } } +#[cfg(not(coloring_in_tokens))] impl FallibleColorSyntax for AnyExpressionStartShape { type Info = (); type Input = (); @@ -210,9 +262,70 @@ impl FallibleColorSyntax for AnyExpressionStartShape { } } +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for AnyExpressionStartShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let atom = token_nodes.spanned(|token_nodes| { + expand_atom( + token_nodes, + "expression", + context, + ExpansionRule::permissive(), + ) + }); + + let atom = match atom { + Spanned { + item: Err(_err), + span, + } => { + token_nodes.color_shape(FlatShape::Error.spanned(span)); + return Ok(()); + } + + Spanned { + item: Ok(value), .. + } => value, + }; + + match atom.item { + AtomicToken::Size { number, unit } => token_nodes.color_shape( + FlatShape::Size { + number: number.span.into(), + unit: unit.span.into(), + } + .spanned(atom.span), + ), + + AtomicToken::SquareDelimited { nodes, spans } => { + token_nodes.child((&nodes[..]).spanned(atom.span), |tokens| { + color_delimited_square(spans, tokens, atom.span.into(), context); + }); + } + + AtomicToken::Word { .. } | AtomicToken::Dot { .. } => { + token_nodes.color_shape(FlatShape::Word.spanned(atom.span)); + } + + _ => atom.color_tokens(token_nodes.mut_shapes()), + } + + Ok(()) + } +} + #[derive(Debug, Copy, Clone)] pub struct BareTailShape; +#[cfg(not(coloring_in_tokens))] impl FallibleColorSyntax for BareTailShape { type Info = (); type Input = (); @@ -269,6 +382,56 @@ impl FallibleColorSyntax for BareTailShape { } } +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for BareTailShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let len = token_nodes.shapes().len(); + + loop { + let word = + color_fallible_syntax_with(&BareShape, &FlatShape::Word, token_nodes, context); + + match word { + // if a word was found, continue + Ok(_) => continue, + // if a word wasn't found, try to find a dot + Err(_) => {} + } + + // try to find a dot + let dot = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Word, + token_nodes, + context, + ); + + match dot { + // if a dot was found, try to find another word + Ok(_) => continue, + // otherwise, we're done + Err(_) => break, + } + } + + if token_nodes.shapes().len() > len { + Ok(()) + } else { + Err(ShellError::syntax_error( + "No tokens matched BareTailShape".tagged_unknown(), + )) + } + } +} + impl ExpandSyntax for BareTailShape { type Output = Option; diff --git a/src/parser/hir/syntax_shape/expression/delimited.rs b/src/parser/hir/syntax_shape/expression/delimited.rs index b52340ab8f..5f8406c6cb 100644 --- a/src/parser/hir/syntax_shape/expression/delimited.rs +++ b/src/parser/hir/syntax_shape/expression/delimited.rs @@ -16,6 +16,7 @@ pub fn expand_delimited_square( Ok(hir::Expression::list(list?, Tag { span, anchor: None })) } +#[cfg(not(coloring_in_tokens))] pub fn color_delimited_square( (open, close): (Span, Span), children: &Vec, @@ -29,9 +30,22 @@ pub fn color_delimited_square( shapes.push(FlatShape::CloseDelimiter(Delimiter::Square).spanned(close)); } +#[cfg(coloring_in_tokens)] +pub fn color_delimited_square( + (open, close): (Span, Span), + token_nodes: &mut TokensIterator, + _span: Span, + context: &ExpandContext, +) { + token_nodes.color_shape(FlatShape::OpenDelimiter(Delimiter::Square).spanned(open)); + let _list = color_syntax(&ExpressionListShape, token_nodes, context); + token_nodes.color_shape(FlatShape::CloseDelimiter(Delimiter::Square).spanned(close)); +} + #[derive(Debug, Copy, Clone)] pub struct DelimitedShape; +#[cfg(not(coloring_in_tokens))] impl ColorSyntax for DelimitedShape { type Info = (); type Input = (Delimiter, Span, Span); @@ -47,3 +61,19 @@ impl ColorSyntax for DelimitedShape { shapes.push(FlatShape::CloseDelimiter(*delimiter).spanned(*close)); } } + +#[cfg(coloring_in_tokens)] +impl ColorSyntax for DelimitedShape { + type Info = (); + type Input = (Delimiter, Span, Span); + fn color_syntax<'a, 'b>( + &self, + (delimiter, open, close): &(Delimiter, Span, Span), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Self::Info { + token_nodes.color_shape(FlatShape::OpenDelimiter(*delimiter).spanned(*open)); + color_syntax(&ExpressionListShape, token_nodes, context); + token_nodes.color_shape(FlatShape::CloseDelimiter(*delimiter).spanned(*close)); + } +} diff --git a/src/parser/hir/syntax_shape/expression/file_path.rs b/src/parser/hir/syntax_shape/expression/file_path.rs index ccb2f8f54b..acde8fba13 100644 --- a/src/parser/hir/syntax_shape/expression/file_path.rs +++ b/src/parser/hir/syntax_shape/expression/file_path.rs @@ -8,6 +8,7 @@ use crate::prelude::*; #[derive(Debug, Copy, Clone)] pub struct FilePathShape; +#[cfg(not(coloring_in_tokens))] impl FallibleColorSyntax for FilePathShape { type Info = (); type Input = (); @@ -46,6 +47,44 @@ impl FallibleColorSyntax for FilePathShape { } } +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for FilePathShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let atom = expand_atom( + token_nodes, + "file path", + context, + ExpansionRule::permissive(), + ); + + let atom = match atom { + Err(_) => return Ok(()), + Ok(atom) => atom, + }; + + match atom.item { + AtomicToken::Word { .. } + | AtomicToken::String { .. } + | AtomicToken::Number { .. } + | AtomicToken::Size { .. } => { + token_nodes.color_shape(FlatShape::Path.spanned(atom.span)); + } + + _ => atom.color_tokens(token_nodes.mut_shapes()), + } + + Ok(()) + } +} + impl ExpandExpression for FilePathShape { fn expand_expr<'a, 'b>( &self, diff --git a/src/parser/hir/syntax_shape/expression/list.rs b/src/parser/hir/syntax_shape/expression/list.rs index 575ae9fcdd..5a1ea8e383 100644 --- a/src/parser/hir/syntax_shape/expression/list.rs +++ b/src/parser/hir/syntax_shape/expression/list.rs @@ -1,4 +1,6 @@ use crate::errors::ShellError; +#[cfg(not(coloring_in_tokens))] +use crate::parser::hir::syntax_shape::FlatShape; use crate::parser::{ hir, hir::syntax_shape::{ @@ -7,8 +9,8 @@ use crate::parser::{ MaybeSpaceShape, SpaceShape, }, hir::TokensIterator, - FlatShape, }; +#[cfg(not(coloring_in_tokens))] use crate::Spanned; #[derive(Debug, Copy, Clone)] @@ -44,6 +46,7 @@ impl ExpandSyntax for ExpressionListShape { } } +#[cfg(not(coloring_in_tokens))] impl ColorSyntax for ExpressionListShape { type Info = (); type Input = (); @@ -113,10 +116,80 @@ impl ColorSyntax for ExpressionListShape { } } +#[cfg(coloring_in_tokens)] +impl ColorSyntax for ExpressionListShape { + type Info = (); + type Input = (); + + /// The intent of this method is to fully color an expression list shape infallibly. + /// This means that if we can't expand a token into an expression, we fall back to + /// a simpler coloring strategy. + /// + /// This would apply to something like `where x >`, which includes an incomplete + /// binary operator. Since we will fail to process it as a binary operator, we'll + /// fall back to a simpler coloring and move on. + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) { + // We encountered a parsing error and will continue with simpler coloring ("backoff + // coloring mode") + let mut backoff = false; + + // Consume any leading whitespace + color_syntax(&MaybeSpaceShape, token_nodes, context); + + loop { + // If we reached the very end of the token stream, we're done + if token_nodes.at_end() { + return; + } + + if backoff { + let len = token_nodes.shapes().len(); + + // If we previously encountered a parsing error, use backoff coloring mode + color_syntax(&SimplestExpression, token_nodes, context); + + if len == token_nodes.shapes().len() && !token_nodes.at_end() { + // This should never happen, but if it does, a panic is better than an infinite loop + panic!("Unexpected tokens left that couldn't be colored even with SimplestExpression") + } + } else { + // Try to color the head of the stream as an expression + match color_fallible_syntax(&AnyExpressionShape, token_nodes, context) { + // If no expression was found, switch to backoff coloring mode + Err(_) => { + backoff = true; + continue; + } + Ok(_) => {} + } + + // If an expression was found, consume a space + match color_fallible_syntax(&SpaceShape, token_nodes, context) { + Err(_) => { + // If no space was found, we're either at the end or there's an error. + // Either way, switch to backoff coloring mode. If we're at the end + // it won't have any consequences. + backoff = true; + } + Ok(_) => { + // Otherwise, move on to the next expression + } + } + } + } + } +} + /// BackoffColoringMode consumes all of the remaining tokens in an infallible way #[derive(Debug, Copy, Clone)] pub struct BackoffColoringMode; +#[cfg(not(coloring_in_tokens))] impl ColorSyntax for BackoffColoringMode { type Info = (); type Input = (); @@ -144,12 +217,40 @@ impl ColorSyntax for BackoffColoringMode { } } +#[cfg(coloring_in_tokens)] +impl ColorSyntax for BackoffColoringMode { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &Self::Input, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Self::Info { + loop { + if token_nodes.at_end() { + break; + } + + let len = token_nodes.shapes().len(); + color_syntax(&SimplestExpression, token_nodes, context); + + if len == token_nodes.shapes().len() && !token_nodes.at_end() { + // This shouldn't happen, but if it does, a panic is better than an infinite loop + panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, token_nodes.shapes()); + } + } + } +} + /// The point of `SimplestExpression` is to serve as an infallible base case for coloring. /// As a last ditch effort, if we can't find any way to parse the head of the stream as an /// expression, fall back to simple coloring. #[derive(Debug, Copy, Clone)] pub struct SimplestExpression; +#[cfg(not(coloring_in_tokens))] impl ColorSyntax for SimplestExpression { type Info = (); type Input = (); @@ -174,3 +275,28 @@ impl ColorSyntax for SimplestExpression { } } } + +#[cfg(coloring_in_tokens)] +impl ColorSyntax for SimplestExpression { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) { + let atom = expand_atom( + token_nodes, + "any token", + context, + ExpansionRule::permissive(), + ); + + match atom { + Err(_) => {} + Ok(atom) => atom.color_tokens(token_nodes.mut_shapes()), + } + } +} diff --git a/src/parser/hir/syntax_shape/expression/number.rs b/src/parser/hir/syntax_shape/expression/number.rs index a4e2a93234..d1475cbaf3 100644 --- a/src/parser/hir/syntax_shape/expression/number.rs +++ b/src/parser/hir/syntax_shape/expression/number.rs @@ -44,6 +44,7 @@ impl ExpandExpression for NumberShape { } } +#[cfg(not(coloring_in_tokens))] impl FallibleColorSyntax for NumberShape { type Info = (); type Input = (); @@ -73,6 +74,35 @@ impl FallibleColorSyntax for NumberShape { } } +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for NumberShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let atom = token_nodes.spanned(|token_nodes| { + expand_atom(token_nodes, "number", context, ExpansionRule::permissive()) + }); + + let atom = match atom { + Spanned { item: Err(_), span } => { + token_nodes.color_shape(FlatShape::Error.spanned(span)); + return Ok(()); + } + Spanned { item: Ok(atom), .. } => atom, + }; + + atom.color_tokens(token_nodes.mut_shapes()); + + Ok(()) + } +} + #[derive(Debug, Copy, Clone)] pub struct IntShape; @@ -106,6 +136,7 @@ impl ExpandExpression for IntShape { } } +#[cfg(not(coloring_in_tokens))] impl FallibleColorSyntax for IntShape { type Info = (); type Input = (); @@ -134,3 +165,32 @@ impl FallibleColorSyntax for IntShape { Ok(()) } } + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for IntShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let atom = token_nodes.spanned(|token_nodes| { + expand_atom(token_nodes, "integer", context, ExpansionRule::permissive()) + }); + + let atom = match atom { + Spanned { item: Err(_), span } => { + token_nodes.color_shape(FlatShape::Error.spanned(span)); + return Ok(()); + } + Spanned { item: Ok(atom), .. } => atom, + }; + + atom.color_tokens(token_nodes.mut_shapes()); + + Ok(()) + } +} diff --git a/src/parser/hir/syntax_shape/expression/pattern.rs b/src/parser/hir/syntax_shape/expression/pattern.rs index 0a11552d5e..328e8f795e 100644 --- a/src/parser/hir/syntax_shape/expression/pattern.rs +++ b/src/parser/hir/syntax_shape/expression/pattern.rs @@ -9,6 +9,7 @@ use crate::prelude::*; #[derive(Debug, Copy, Clone)] pub struct PatternShape; +#[cfg(not(coloring_in_tokens))] impl FallibleColorSyntax for PatternShape { type Info = (); type Input = (); @@ -35,6 +36,32 @@ impl FallibleColorSyntax for PatternShape { } } +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for PatternShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| { + let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::permissive())?; + + match &atom.item { + AtomicToken::GlobPattern { .. } | AtomicToken::Word { .. } => { + token_nodes.color_shape(FlatShape::GlobPattern.spanned(atom.span)); + Ok(()) + } + + _ => Err(ShellError::type_error("pattern", atom.tagged_type_name())), + } + }) + } +} + impl ExpandExpression for PatternShape { fn expand_expr<'a, 'b>( &self, diff --git a/src/parser/hir/syntax_shape/expression/string.rs b/src/parser/hir/syntax_shape/expression/string.rs index 0dabd70a85..e74fa0a6a7 100644 --- a/src/parser/hir/syntax_shape/expression/string.rs +++ b/src/parser/hir/syntax_shape/expression/string.rs @@ -9,6 +9,7 @@ use crate::prelude::*; #[derive(Debug, Copy, Clone)] pub struct StringShape; +#[cfg(not(coloring_in_tokens))] impl FallibleColorSyntax for StringShape { type Info = (); type Input = FlatShape; @@ -39,6 +40,36 @@ impl FallibleColorSyntax for StringShape { } } +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for StringShape { + type Info = (); + type Input = FlatShape; + + fn color_syntax<'a, 'b>( + &self, + input: &FlatShape, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let atom = expand_atom(token_nodes, "string", context, ExpansionRule::permissive()); + + let atom = match atom { + Err(_) => return Ok(()), + Ok(atom) => atom, + }; + + match atom { + Spanned { + item: AtomicToken::String { .. }, + span, + } => token_nodes.color_shape((*input).spanned(span)), + other => other.color_tokens(token_nodes.mut_shapes()), + } + + Ok(()) + } +} + impl ExpandExpression for StringShape { fn expand_expr<'a, 'b>( &self, diff --git a/src/parser/hir/syntax_shape/expression/variable_path.rs b/src/parser/hir/syntax_shape/expression/variable_path.rs index 04b511d89a..380b3f936c 100644 --- a/src/parser/hir/syntax_shape/expression/variable_path.rs +++ b/src/parser/hir/syntax_shape/expression/variable_path.rs @@ -44,6 +44,7 @@ impl ExpandExpression for VariablePathShape { } } +#[cfg(not(coloring_in_tokens))] impl FallibleColorSyntax for VariablePathShape { type Info = (); type Input = (); @@ -84,9 +85,49 @@ impl FallibleColorSyntax for VariablePathShape { } } +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for VariablePathShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| { + // If the head of the token stream is not a variable, fail + color_fallible_syntax(&VariableShape, token_nodes, context)?; + + loop { + // look for a dot at the head of a stream + let dot = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + token_nodes, + context, + ); + + // if there's no dot, we're done + match dot { + Err(_) => break, + Ok(_) => {} + } + + // otherwise, look for a member, and if you don't find one, fail + color_fallible_syntax(&MemberShape, token_nodes, context)?; + } + + Ok(()) + }) + } +} + #[derive(Debug, Copy, Clone)] pub struct PathTailShape; +#[cfg(not(coloring_in_tokens))] /// The failure mode of `PathTailShape` is a dot followed by a non-member impl FallibleColorSyntax for PathTailShape { type Info = (); @@ -119,6 +160,37 @@ impl FallibleColorSyntax for PathTailShape { } } +#[cfg(coloring_in_tokens)] +/// The failure mode of `PathTailShape` is a dot followed by a non-member +impl FallibleColorSyntax for PathTailShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| loop { + let result = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + token_nodes, + context, + ); + + match result { + Err(_) => return Ok(()), + Ok(_) => {} + } + + // If we've seen a dot but not a member, fail + color_fallible_syntax(&MemberShape, token_nodes, context)?; + }) + } +} + impl ExpandSyntax for PathTailShape { type Output = (Vec>, Span); fn expand_syntax<'a, 'b>( @@ -204,6 +276,7 @@ pub enum ContinuationInfo { Infix, } +#[cfg(not(coloring_in_tokens))] impl FallibleColorSyntax for ExpressionContinuationShape { type Info = ContinuationInfo; type Input = (); @@ -256,6 +329,51 @@ impl FallibleColorSyntax for ExpressionContinuationShape { } } +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for ExpressionContinuationShape { + type Info = ContinuationInfo; + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + token_nodes.atomic(|token_nodes| { + // Try to expand a `.` + let dot = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + token_nodes, + context, + ); + + match dot { + Ok(_) => { + // we found a dot, so let's keep looking for a member; if no member was found, fail + color_fallible_syntax(&MemberShape, token_nodes, context)?; + + Ok(ContinuationInfo::Dot) + } + Err(_) => { + let result = token_nodes.atomic(|token_nodes| { + // we didn't find a dot, so let's see if we're looking at an infix. If not found, fail + color_fallible_syntax(&InfixShape, token_nodes, context)?; + + // now that we've seen an infix shape, look for any expression. If not found, fail + color_fallible_syntax(&AnyExpressionShape, token_nodes, context)?; + + Ok(ContinuationInfo::Infix) + })?; + + Ok(result) + } + } + }) + } +} + #[derive(Debug, Copy, Clone)] pub struct VariableShape; @@ -285,6 +403,7 @@ impl ExpandExpression for VariableShape { } } +#[cfg(not(coloring_in_tokens))] impl FallibleColorSyntax for VariableShape { type Info = (); type Input = (); @@ -322,6 +441,43 @@ impl FallibleColorSyntax for VariableShape { } } +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for VariableShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let atom = expand_atom( + token_nodes, + "variable", + context, + ExpansionRule::permissive(), + ); + + let atom = match atom { + Err(err) => return Err(err), + Ok(atom) => atom, + }; + + match &atom.item { + AtomicToken::Variable { .. } => { + token_nodes.color_shape(FlatShape::Variable.spanned(atom.span)); + Ok(()) + } + AtomicToken::ItVariable { .. } => { + token_nodes.color_shape(FlatShape::ItVariable.spanned(atom.span)); + Ok(()) + } + _ => Err(ShellError::type_error("variable", atom.tagged_type_name())), + } + } +} + #[derive(Debug, Clone, Copy)] pub enum Member { String(/* outer */ Span, /* inner */ Span), @@ -447,6 +603,7 @@ pub fn expand_column_path<'a, 'b>( #[derive(Debug, Copy, Clone)] pub struct ColumnPathShape; +#[cfg(not(coloring_in_tokens))] impl FallibleColorSyntax for ColumnPathShape { type Info = (); type Input = (); @@ -496,6 +653,53 @@ impl FallibleColorSyntax for ColumnPathShape { } } +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for ColumnPathShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + // If there's not even one member shape, fail + color_fallible_syntax(&MemberShape, token_nodes, context)?; + + loop { + let checkpoint = token_nodes.checkpoint(); + + match color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + checkpoint.iterator, + context, + ) { + Err(_) => { + // we already saw at least one member shape, so return successfully + return Ok(()); + } + + Ok(_) => { + match color_fallible_syntax(&MemberShape, checkpoint.iterator, context) { + Err(_) => { + // we saw a dot but not a member (but we saw at least one member), + // so don't commit the dot but return successfully + return Ok(()); + } + + Ok(_) => { + // we saw a dot and a member, so commit it and continue on + checkpoint.commit(); + } + } + } + } + } + } +} + impl ExpandSyntax for ColumnPathShape { type Output = Tagged>; @@ -511,6 +715,7 @@ impl ExpandSyntax for ColumnPathShape { #[derive(Debug, Copy, Clone)] pub struct MemberShape; +#[cfg(not(coloring_in_tokens))] impl FallibleColorSyntax for MemberShape { type Info = (); type Input = (); @@ -548,6 +753,32 @@ impl FallibleColorSyntax for MemberShape { } } +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for MemberShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let bare = + color_fallible_syntax_with(&BareShape, &FlatShape::BareMember, token_nodes, context); + + match bare { + Ok(_) => return Ok(()), + Err(_) => { + // If we don't have a bare word, we'll look for a string + } + } + + // Look for a string token. If we don't find one, fail + color_fallible_syntax_with(&StringShape, &FlatShape::StringMember, token_nodes, context) + } +} + impl ExpandSyntax for MemberShape { type Output = Member; @@ -581,6 +812,7 @@ pub struct DotShape; #[derive(Debug, Copy, Clone)] pub struct ColorableDotShape; +#[cfg(not(coloring_in_tokens))] impl FallibleColorSyntax for ColorableDotShape { type Info = (); type Input = FlatShape; @@ -606,6 +838,31 @@ impl FallibleColorSyntax for ColorableDotShape { } } +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for ColorableDotShape { + type Info = (); + type Input = FlatShape; + + fn color_syntax<'a, 'b>( + &self, + input: &FlatShape, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result<(), ShellError> { + let peeked = token_nodes.peek_any().not_eof("dot")?; + + match peeked.node { + node if node.is_dot() => { + peeked.commit(); + token_nodes.color_shape((*input).spanned(node.span())); + Ok(()) + } + + other => Err(ShellError::type_error("dot", other.tagged_type_name())), + } + } +} + impl SkipSyntax for DotShape { fn skip<'a, 'b>( &self, @@ -643,6 +900,7 @@ impl ExpandSyntax for DotShape { #[derive(Debug, Copy, Clone)] pub struct InfixShape; +#[cfg(not(coloring_in_tokens))] impl FallibleColorSyntax for InfixShape { type Info = (); type Input = (); @@ -690,6 +948,55 @@ impl FallibleColorSyntax for InfixShape { } } +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for InfixShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let checkpoint = token_nodes.checkpoint(); + + // An infix operator must be prefixed by whitespace. If no whitespace was found, fail + color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context)?; + + // Parse the next TokenNode after the whitespace + let operator_span = parse_single_node( + checkpoint.iterator, + "infix operator", + |token, token_span, _| { + match token { + // If it's an operator (and not `.`), it's a match + RawToken::Operator(operator) if operator != Operator::Dot => { + // token_nodes.color_shape(FlatShape::Operator.spanned(token_span)); + Ok(token_span) + } + + // Otherwise, it's not a match + _ => Err(ShellError::type_error( + "infix operator", + token.type_name().tagged(token_span), + )), + } + }, + )?; + + checkpoint + .iterator + .color_shape(FlatShape::Operator.spanned(operator_span)); + + // An infix operator must be followed by whitespace. If no whitespace was found, fail + color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context)?; + + checkpoint.commit(); + Ok(()) + } +} + impl ExpandSyntax for InfixShape { type Output = (Span, Spanned, Span); diff --git a/src/parser/hir/tokens_iterator.rs b/src/parser/hir/tokens_iterator.rs index dbcf5e6c4c..094c5af8c6 100644 --- a/src/parser/hir/tokens_iterator.rs +++ b/src/parser/hir/tokens_iterator.rs @@ -1,16 +1,23 @@ pub(crate) mod debug; use crate::errors::ShellError; +#[cfg(coloring_in_tokens)] +use crate::parser::hir::syntax_shape::FlatShape; use crate::parser::TokenNode; use crate::{Span, Spanned, SpannedItem}; +#[allow(unused)] +use getset::Getters; -#[derive(Debug)] +#[derive(Getters, Debug)] pub struct TokensIterator<'content> { tokens: &'content [TokenNode], span: Span, skip_ws: bool, index: usize, seen: indexmap::IndexSet, + #[cfg(coloring_in_tokens)] + #[get = "pub"] + shapes: Vec>, } #[derive(Debug)] @@ -18,6 +25,8 @@ pub struct Checkpoint<'content, 'me> { pub(crate) iterator: &'me mut TokensIterator<'content>, index: usize, seen: indexmap::IndexSet, + #[cfg(coloring_in_tokens)] + shape_start: usize, committed: bool, } @@ -32,6 +41,8 @@ impl<'content, 'me> std::ops::Drop for Checkpoint<'content, 'me> { if !self.committed { self.iterator.index = self.index; self.iterator.seen = self.seen.clone(); + #[cfg(coloring_in_tokens)] + self.iterator.shapes.truncate(self.shape_start); } } } @@ -132,6 +143,8 @@ impl<'content> TokensIterator<'content> { skip_ws, index: 0, seen: indexmap::IndexSet::new(), + #[cfg(coloring_in_tokens)] + shapes: vec![], } } @@ -156,10 +169,47 @@ impl<'content> TokensIterator<'content> { result.spanned(start.until(end)) } + #[cfg(coloring_in_tokens)] + pub fn color_shape(&mut self, shape: Spanned) { + self.shapes.push(shape); + } + + #[cfg(coloring_in_tokens)] + pub fn mut_shapes(&mut self) -> &mut Vec> { + &mut self.shapes + } + + #[cfg(coloring_in_tokens)] + pub fn child( + &mut self, + tokens: Spanned<&'content [TokenNode]>, + block: impl FnOnce(&mut TokensIterator) -> T, + ) -> T { + let mut shapes = vec![]; + std::mem::swap(&mut shapes, &mut self.shapes); + + let mut iterator = TokensIterator { + tokens: tokens.item, + span: tokens.span, + skip_ws: false, + index: 0, + seen: indexmap::IndexSet::new(), + shapes, + }; + + let result = block(&mut iterator); + + std::mem::swap(&mut iterator.shapes, &mut self.shapes); + + result + } + /// Use a checkpoint when you need to peek more than one token ahead, but can't be sure /// that you'll succeed. pub fn checkpoint<'me>(&'me mut self) -> Checkpoint<'content, 'me> { let index = self.index; + #[cfg(coloring_in_tokens)] + let shape_start = self.shapes.len(); let seen = self.seen.clone(); Checkpoint { @@ -167,6 +217,8 @@ impl<'content> TokensIterator<'content> { index, seen, committed: false, + #[cfg(coloring_in_tokens)] + shape_start, } } @@ -177,6 +229,8 @@ impl<'content> TokensIterator<'content> { block: impl FnOnce(&mut TokensIterator<'content>) -> Result, ) -> Result { let index = self.index; + #[cfg(coloring_in_tokens)] + let shape_start = self.shapes.len(); let seen = self.seen.clone(); let checkpoint = Checkpoint { @@ -184,6 +238,8 @@ impl<'content> TokensIterator<'content> { index, seen, committed: false, + #[cfg(coloring_in_tokens)] + shape_start, }; let value = block(checkpoint.iterator)?; @@ -192,6 +248,44 @@ impl<'content> TokensIterator<'content> { return Ok(value); } + #[cfg(coloring_in_tokens)] + /// Use a checkpoint when you need to peek more than one token ahead, but can't be sure + /// that you'll succeed. + pub fn atomic_returning_shapes<'me, T>( + &'me mut self, + block: impl FnOnce(&mut TokensIterator<'content>) -> Result, + ) -> (Result, Vec>) { + let index = self.index; + let mut shapes = vec![]; + + let seen = self.seen.clone(); + std::mem::swap(&mut self.shapes, &mut shapes); + + let checkpoint = Checkpoint { + iterator: self, + index, + seen, + committed: false, + shape_start: 0, + }; + + let value = block(checkpoint.iterator); + + let value = match value { + Err(err) => { + drop(checkpoint); + std::mem::swap(&mut self.shapes, &mut shapes); + return (Err(err), vec![]); + } + + Ok(value) => value, + }; + + checkpoint.commit(); + std::mem::swap(&mut self.shapes, &mut shapes); + return (Ok(value), shapes); + } + fn eof_span(&self) -> Span { Span::new(self.span.end(), self.span.end()) } @@ -266,6 +360,8 @@ impl<'content> TokensIterator<'content> { index: self.index, seen: self.seen.clone(), skip_ws: self.skip_ws, + #[cfg(coloring_in_tokens)] + shapes: self.shapes.clone(), } } diff --git a/src/parser/parse_command.rs b/src/parser/parse_command.rs index 935794f3c1..a4365db247 100644 --- a/src/parser/parse_command.rs +++ b/src/parser/parse_command.rs @@ -189,6 +189,7 @@ impl ColoringArgs { #[derive(Debug, Copy, Clone)] pub struct CommandTailShape; +#[cfg(not(coloring_in_tokens))] impl ColorSyntax for CommandTailShape { type Info = (); type Input = Signature; @@ -385,6 +386,206 @@ impl ColorSyntax for CommandTailShape { } } +#[cfg(coloring_in_tokens)] +impl ColorSyntax for CommandTailShape { + type Info = (); + type Input = Signature; + + fn color_syntax<'a, 'b>( + &self, + signature: &Signature, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Self::Info { + let mut args = ColoringArgs::new(token_nodes.len()); + trace_remaining("nodes", token_nodes.clone(), context.source()); + + for (name, kind) in &signature.named { + trace!(target: "nu::color_syntax", "looking for {} : {:?}", name, kind); + + match kind { + NamedType::Switch => { + match token_nodes.extract(|t| t.as_flag(name, context.source())) { + Some((pos, flag)) => args.insert(pos, vec![flag.color()]), + None => {} + } + } + NamedType::Mandatory(syntax_type) => { + match extract_mandatory( + signature, + name, + token_nodes, + context.source(), + Span::unknown(), + ) { + Err(_) => { + // The mandatory flag didn't exist at all, so there's nothing to color + } + Ok((pos, flag)) => { + let (_, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| { + token_nodes.color_shape(flag.color()); + token_nodes.move_to(pos); + + if token_nodes.at_end() { + // args.insert(pos, shapes); + // token_nodes.restart(); + return Ok(()); + // continue; + } + + // We still want to color the flag even if the following tokens don't match, so don't + // propagate the error to the parent atomic block if it fails + let _ = token_nodes.atomic(|token_nodes| { + // We can live with unmatched syntax after a mandatory flag + color_syntax(&MaybeSpaceShape, token_nodes, context); + + // If the part after a mandatory flag isn't present, that's ok, but we + // should roll back any whitespace we chomped + color_fallible_syntax(syntax_type, token_nodes, context)?; + + Ok(()) + }); + + Ok(()) + }); + + args.insert(pos, shapes); + token_nodes.restart(); + } + } + } + NamedType::Optional(syntax_type) => { + match extract_optional(name, token_nodes, context.source()) { + Err(_) => { + // The optional flag didn't exist at all, so there's nothing to color + } + Ok(Some((pos, flag))) => { + let (_, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| { + token_nodes.color_shape(flag.color()); + token_nodes.move_to(pos); + + if token_nodes.at_end() { + // args.insert(pos, shapes); + // token_nodes.restart(); + return Ok(()); + // continue; + } + + // We still want to color the flag even if the following tokens don't match, so don't + // propagate the error to the parent atomic block if it fails + let _ = token_nodes.atomic(|token_nodes| { + // We can live with unmatched syntax after a mandatory flag + color_syntax(&MaybeSpaceShape, token_nodes, context); + + // If the part after a mandatory flag isn't present, that's ok, but we + // should roll back any whitespace we chomped + color_fallible_syntax(syntax_type, token_nodes, context)?; + + Ok(()) + }); + + Ok(()) + }); + + args.insert(pos, shapes); + token_nodes.restart(); + } + + Ok(None) => { + token_nodes.restart(); + } + } + } + }; + } + + trace_remaining("after named", token_nodes.clone(), context.source()); + + for arg in &signature.positional { + trace!("Processing positional {:?}", arg); + + match arg { + PositionalType::Mandatory(..) => { + if token_nodes.at_end() { + break; + } + } + + PositionalType::Optional(..) => { + if token_nodes.at_end() { + break; + } + } + } + + let pos = token_nodes.pos(false); + + match pos { + None => break, + Some(pos) => { + // We can live with an unmatched positional argument. Hopefully it will be + // matched by a future token + let (_, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| { + color_syntax(&MaybeSpaceShape, token_nodes, context); + + // If no match, we should roll back any whitespace we chomped + color_fallible_syntax(&arg.syntax_type(), token_nodes, context)?; + + Ok(()) + }); + + args.insert(pos, shapes); + } + } + } + + trace_remaining("after positional", token_nodes.clone(), context.source()); + + if let Some(syntax_type) = signature.rest_positional { + loop { + if token_nodes.at_end_possible_ws() { + break; + } + + let pos = token_nodes.pos(false); + + match pos { + None => break, + Some(pos) => { + // If any arguments don't match, we'll fall back to backoff coloring mode + let (result, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| { + color_syntax(&MaybeSpaceShape, token_nodes, context); + + // If no match, we should roll back any whitespace we chomped + color_fallible_syntax(&syntax_type, token_nodes, context)?; + + Ok(()) + }); + + args.insert(pos, shapes); + + match result { + Err(_) => break, + Ok(_) => continue, + } + } + } + } + } + + args.spread_shapes(token_nodes.mut_shapes()); + + // Consume any remaining tokens with backoff coloring mode + color_syntax(&BackoffColoringMode, token_nodes, context); + + // This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring + // this solution. + token_nodes + .mut_shapes() + .sort_by(|a, b| a.span.start().cmp(&b.span.start())); + } +} + fn extract_switch(name: &str, tokens: &mut hir::TokensIterator<'_>, source: &Text) -> Option { tokens .extract(|t| t.as_flag(name, source)) diff --git a/src/shell/helper.rs b/src/shell/helper.rs index dc3ab96dc1..9b5446f5df 100644 --- a/src/shell/helper.rs +++ b/src/shell/helper.rs @@ -85,11 +85,27 @@ impl Highlighter for Helper { let expand_context = self .context .expand_context(&text, Span::new(0, line.len() - 1)); - let mut shapes = vec![]; - // We just constructed a token list that only contains a pipeline, so it can't fail - color_fallible_syntax(&PipelineShape, &mut tokens, &expand_context, &mut shapes) + #[cfg(not(coloring_in_tokens))] + let shapes = { + let mut shapes = vec![]; + color_fallible_syntax( + &PipelineShape, + &mut tokens, + &expand_context, + &mut shapes, + ) .unwrap(); + shapes + }; + + #[cfg(coloring_in_tokens)] + let shapes = { + // We just constructed a token list that only contains a pipeline, so it can't fail + color_fallible_syntax(&PipelineShape, &mut tokens, &expand_context).unwrap(); + + tokens.shapes() + }; trace!(target: "nu::shapes", "SHAPES :: {:?}", @@ -97,7 +113,7 @@ impl Highlighter for Helper { ); for shape in shapes { - let styled = paint_flat_shape(shape, line); + let styled = paint_flat_shape(&shape, line); out.push_str(&styled); } @@ -135,7 +151,7 @@ fn vec_tag(input: Vec>) -> Option { }) } -fn paint_flat_shape(flat_shape: Spanned, line: &str) -> String { +fn paint_flat_shape(flat_shape: &Spanned, line: &str) -> String { let style = match &flat_shape.item { FlatShape::OpenDelimiter(_) => Color::White.normal(), FlatShape::CloseDelimiter(_) => Color::White.normal(), From 452b5c58e8d3680bfdf12626a2052effffacf900 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Tue, 15 Oct 2019 15:38:22 +1300 Subject: [PATCH 042/184] Update README.md --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 7df2c92ec9..48fae584a5 100644 --- a/README.md +++ b/README.md @@ -46,16 +46,16 @@ Optional dependencies: * To use Nu with all possible optional features enabled, you'll also need the following: * on Linux (on Debian/Ubuntu): `apt install libxcb-composite0-dev libx11-dev` -To install Nu via cargo (make sure you have installed [rustup](https://rustup.rs/) and the nightly compiler via `rustup install nightly`): +To install Nu via cargo (make sure you have installed [rustup](https://rustup.rs/) and the beta compiler via `rustup install beta`): ``` -cargo +nightly install nu +cargo +beta install nu ``` You can also install Nu with all the bells and whistles (be sure to have installed the [dependencies](https://book.nushell.sh/en/installation#dependencies) for your platform): ``` -cargo +nightly install nu --all-features +cargo +beta install nu --all-features ``` ## Docker From 3a9945637193f7350c79e7c20f220eeccd6464fe Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Tue, 15 Oct 2019 18:41:05 +1300 Subject: [PATCH 043/184] Bump the version ahead of release --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 955beeddf9..e4a89f5d06 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "nu" -version = "0.3.0" +version = "0.4.0" authors = ["Yehuda Katz ", "Jonathan Turner ", "Andrés N. Robalino "] description = "A shell for the GitHub era" license = "MIT" From e250a3f213428412c6121f628075e0dc08af1bf3 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Tue, 15 Oct 2019 18:52:15 +1300 Subject: [PATCH 044/184] Update README.md --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index bf6b65585b..3cc54584f6 100644 --- a/README.md +++ b/README.md @@ -173,7 +173,7 @@ We can pipeline this into a command that gets the contents of one of the columns ━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━┯━━━━━━┯━━━━━━━━━ authors │ description │ edition │ license │ name │ version ─────────────────┼────────────────────────────┼─────────┼─────────┼──────┼───────── - [table: 3 rows] │ A shell for the GitHub era │ 2018 │ ISC │ nu │ 0.3.0 + [table: 3 rows] │ A shell for the GitHub era │ 2018 │ MIT │ nu │ 0.4.0 ━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━┷━━━━━━┷━━━━━━━━━ ``` @@ -181,7 +181,7 @@ Finally, we can use commands outside of Nu once we have the data we want: ``` /home/jonathan/Source/nushell(master)> open Cargo.toml | get package.version | echo $it -0.3.0 +0.4.0 ``` Here we use the variable `$it` to refer to the value being piped to the external command. @@ -200,7 +200,7 @@ Nu supports plugins that offer additional functionality to the shell and follow There are a few examples in the `plugins` directory. -Plugins are binaries that are available in your path and follow a "nu_plugin_*" naming convention. These binaries interact with nu via a simple JSON-RPC protocol where the command identifies itself and passes along its configuration, which then makes it available for use. If the plugin is a filter, data streams to it one element at a time, and it can stream data back in return via stdin/stdout. If the plugin is a sink, it is given the full vector of final data and is given free reign over stdin/stdout to use as it pleases. +Plugins are binaries that are available in your path and follow a `nu_plugin_*` naming convention. These binaries interact with nu via a simple JSON-RPC protocol where the command identifies itself and passes along its configuration, which then makes it available for use. If the plugin is a filter, data streams to it one element at a time, and it can stream data back in return via stdin/stdout. If the plugin is a sink, it is given the full vector of final data and is given free reign over stdin/stdout to use as it pleases. # Goals From 3f60c9d4169bb7ffb83c1b80190a1eb3e575aa82 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Tue, 15 Oct 2019 04:17:55 -0500 Subject: [PATCH 045/184] 'first' gets first row if no amount desired given. --- src/commands/first.rs | 15 +++++++++++---- tests/commands_test.rs | 22 ++++++++++++++++------ 2 files changed, 27 insertions(+), 10 deletions(-) diff --git a/src/commands/first.rs b/src/commands/first.rs index 71d05be7e1..face12bddc 100644 --- a/src/commands/first.rs +++ b/src/commands/first.rs @@ -7,7 +7,7 @@ pub struct First; #[derive(Deserialize)] pub struct FirstArgs { - amount: Tagged, + rows: Option>, } impl WholeStreamCommand for First { @@ -16,7 +16,7 @@ impl WholeStreamCommand for First { } fn signature(&self) -> Signature { - Signature::build("first").required("amount", SyntaxShape::Int) + Signature::build("first").optional("rows", SyntaxShape::Int) } fn usage(&self) -> &str { @@ -33,8 +33,15 @@ impl WholeStreamCommand for First { } fn first( - FirstArgs { amount }: FirstArgs, + FirstArgs { rows }: FirstArgs, context: RunnableContext, ) -> Result { - Ok(OutputStream::from_input(context.input.values.take(*amount))) + + let rows_desired = if let Some(quantity) = rows { + *quantity + } else { + 1 + }; + + Ok(OutputStream::from_input(context.input.values.take(rows_desired))) } diff --git a/tests/commands_test.rs b/tests/commands_test.rs index cfa6f74334..2a58dd0d13 100644 --- a/tests/commands_test.rs +++ b/tests/commands_test.rs @@ -32,13 +32,23 @@ fn first_gets_first_rows_by_amount() { } #[test] -fn first_requires_an_amount() { - Playground::setup("first_test_2", |dirs, _| { - let actual = nu_error!( - cwd: dirs.test(), "ls | first" - ); +fn first_gets_first_row_when_no_amount_given() { + Playground::setup("first_test_2", |dirs, sandbox| { + sandbox.with_files(vec![EmptyFile("los-tres-amigos.PASSTEST.txt")]); - assert!(actual.contains("requires amount parameter")); + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + ls + | get name + | first + | split-column "." + | get Column2 + | echo $it + "# + )); + + assert_eq!(actual, "PASSTEST"); }) } From 96ef478fbc3ecff3884dcb25ebaa9a357e56c531 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Tue, 15 Oct 2019 04:18:35 -0500 Subject: [PATCH 046/184] Better error messages. --- src/commands/classified.rs | 2 +- src/commands/config.rs | 4 ++-- src/plugins/docker.rs | 17 ++++++----------- 3 files changed, 9 insertions(+), 14 deletions(-) diff --git a/src/commands/classified.rs b/src/commands/classified.rs index 440413ddd4..7204af77c6 100644 --- a/src/commands/classified.rs +++ b/src/commands/classified.rs @@ -251,7 +251,7 @@ impl ExternalCommand { ) } else { ShellError::labeled_error( - "Error: $it needs string data", + "$it needs string data", "given something else", self.name_tag.clone(), ) diff --git a/src/commands/config.rs b/src/commands/config.rs index 82fbbf1db6..4f9625b211 100644 --- a/src/commands/config.rs +++ b/src/commands/config.rs @@ -71,7 +71,7 @@ pub fn config( if let Some(v) = get { let key = v.to_string(); let value = result.get(&key).ok_or_else(|| { - ShellError::labeled_error(&format!("Missing key in config"), "key", v.tag()) + ShellError::labeled_error("Missing key in config", "key", v.tag()) })?; let mut results = VecDeque::new(); @@ -121,7 +121,7 @@ pub fn config( config::write(&result, &configuration)?; } else { return Err(ShellError::labeled_error( - "{} does not exist in config", + "Key does not exist in config", "key", v.tag(), )); diff --git a/src/plugins/docker.rs b/src/plugins/docker.rs index 9cb8a52e80..e0a06ab3d4 100644 --- a/src/plugins/docker.rs +++ b/src/plugins/docker.rs @@ -21,8 +21,8 @@ async fn docker(sub_command: &String, name: Tag) -> Result>, S "images" => docker_images(name), _ => Err(ShellError::labeled_error( "Unsupported Docker command", - format!("'{}'?", sub_command), - name.span, + "unknown docker command", + name, )), } } @@ -46,7 +46,7 @@ fn process_docker_output(cmd_output: &str, tag: Tag) -> Result .filter(|s| s.trim() != "") .collect(); - let mut dict = TaggedDictBuilder::new(tag); + let mut dict = TaggedDictBuilder::new(&tag); for (i, v) in values.iter().enumerate() { dict.insert(header[i].to_string(), Value::string(v.trim().to_string())); } @@ -92,18 +92,13 @@ impl Plugin for Docker { if let Some(args) = callinfo.args.positional { match &args[0] { Tagged { - item: Value::Primitive(Primitive::String(s)), + item: Value::Primitive(Primitive::String(command)), .. - } => match block_on(docker(&s, callinfo.name_tag)) { + } => match block_on(docker(&command, args[0].tag())) { Ok(v) => return Ok(v.into_iter().map(ReturnSuccess::value).collect()), Err(e) => return Err(e), }, - _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - args[0] - ))) - } + _ => return Err(ShellError::type_error("string", args[0].tagged_type_name())), } } From 5ed1ed54a6eea351f4852a5777dc8a75586ffd55 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Tue, 15 Oct 2019 05:16:47 -0500 Subject: [PATCH 047/184] Move off 'sum' to internal command 'count' for tests. --- tests/command_ls_tests.rs | 39 +++++++++------------------- tests/commands_test.rs | 54 ++++++++++++++++++++++++++------------- 2 files changed, 48 insertions(+), 45 deletions(-) diff --git a/tests/command_ls_tests.rs b/tests/command_ls_tests.rs index f6f5f39f86..a0ae959e12 100644 --- a/tests/command_ls_tests.rs +++ b/tests/command_ls_tests.rs @@ -7,26 +7,21 @@ use helpers::{Playground, Stub::*}; fn ls_lists_regular_files() { Playground::setup("ls_test_1", |dirs, sandbox| { sandbox.with_files(vec![ - EmptyFile("yehuda.10.txt"), - EmptyFile("jonathan.10.txt"), - EmptyFile("andres.10.txt"), + EmptyFile("yehuda.txt"), + EmptyFile("jonathan.txt"), + EmptyFile("andres.txt"), ]); let actual = nu!( cwd: dirs.test(), h::pipeline( r#" ls - | get name - | lines - | split-column "." - | get Column2 - | str --to-int - | sum + | count | echo $it "# )); - assert_eq!(actual, "30"); + assert_eq!(actual, "3"); }) } @@ -34,22 +29,17 @@ fn ls_lists_regular_files() { fn ls_lists_regular_files_using_asterisk_wildcard() { Playground::setup("ls_test_2", |dirs, sandbox| { sandbox.with_files(vec![ - EmptyFile("los.1.txt"), - EmptyFile("tres.1.txt"), - EmptyFile("amigos.1.txt"), - EmptyFile("arepas.1.clu"), + EmptyFile("los.txt"), + EmptyFile("tres.txt"), + EmptyFile("amigos.txt"), + EmptyFile("arepas.clu"), ]); let actual = nu!( cwd: dirs.test(), h::pipeline( r#" ls *.txt - | get name - | lines - | split-column "." - | get Column2 - | str --to-int - | sum + | count | echo $it "# )); @@ -72,16 +62,11 @@ fn ls_lists_regular_files_using_question_mark_wildcard() { cwd: dirs.test(), h::pipeline( r#" ls *.??.txt - | get name - | lines - | split-column "." - | get Column2 - | str --to-int - | sum + | count | echo $it "# )); - assert_eq!(actual, "30"); + assert_eq!(actual, "3"); }) } diff --git a/tests/commands_test.rs b/tests/commands_test.rs index 2a58dd0d13..6544042f66 100644 --- a/tests/commands_test.rs +++ b/tests/commands_test.rs @@ -7,48 +7,66 @@ use helpers::{Playground, Stub::*}; fn first_gets_first_rows_by_amount() { Playground::setup("first_test_1", |dirs, sandbox| { sandbox.with_files(vec![ - EmptyFile("los.1.txt"), - EmptyFile("tres.1.txt"), - EmptyFile("amigos.1.txt"), - EmptyFile("arepas.1.clu"), + EmptyFile("los.txt"), + EmptyFile("tres.txt"), + EmptyFile("amigos.txt"), + EmptyFile("arepas.clu"), ]); let actual = nu!( cwd: dirs.test(), h::pipeline( r#" ls - | get name - | first 2 - | split-column "." - | get Column2 - | str --to-int - | sum + | first 3 + | count | echo $it "# )); - assert_eq!(actual, "2"); + assert_eq!(actual, "3"); }) } #[test] -fn first_gets_first_row_when_no_amount_given() { +fn first_gets_all_rows_if_amount_higher_than_all_rows() { Playground::setup("first_test_2", |dirs, sandbox| { - sandbox.with_files(vec![EmptyFile("los-tres-amigos.PASSTEST.txt")]); + sandbox.with_files(vec![ + EmptyFile("los.txt"), + EmptyFile("tres.txt"), + EmptyFile("amigos.txt"), + EmptyFile("arepas.clu"), + ]); let actual = nu!( cwd: dirs.test(), h::pipeline( r#" ls - | get name - | first - | split-column "." - | get Column2 + | first 99 + | count | echo $it "# )); - assert_eq!(actual, "PASSTEST"); + assert_eq!(actual, "4"); + }) +} + +#[test] +fn first_gets_first_row_when_no_amount_given() { + Playground::setup("first_test_3", |dirs, sandbox| { + sandbox.with_files(vec![EmptyFile("caballeros.txt"), EmptyFile("arepas.clu")]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + ls + | first + | count + | echo $it + "# + )); + + assert_eq!(actual, "1"); }) } From 821ee5e726508ac35e299f29e740dffd0a3114e2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Tue, 15 Oct 2019 05:19:06 -0500 Subject: [PATCH 048/184] count command introduced. --- src/cli.rs | 2 +- src/commands.rs | 2 ++ src/commands/count.rs | 46 +++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 49 insertions(+), 1 deletion(-) create mode 100644 src/commands/count.rs diff --git a/src/cli.rs b/src/cli.rs index 0182ad1002..ad3eb8d39b 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -258,7 +258,6 @@ pub async fn cli() -> Result<(), Box> { whole_stream_command(Next), whole_stream_command(Previous), whole_stream_command(Debug), - whole_stream_command(Lines), whole_stream_command(Shells), whole_stream_command(SplitColumn), whole_stream_command(SplitRow), @@ -277,6 +276,7 @@ pub async fn cli() -> Result<(), Box> { whole_stream_command(ToYAML), whole_stream_command(SortBy), whole_stream_command(Tags), + whole_stream_command(Count), whole_stream_command(First), whole_stream_command(Last), whole_stream_command(Env), diff --git a/src/commands.rs b/src/commands.rs index 61a45dbb3a..0b155891cc 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -8,6 +8,7 @@ pub(crate) mod classified; pub(crate) mod clip; pub(crate) mod command; pub(crate) mod config; +pub(crate) mod count; pub(crate) mod cp; pub(crate) mod date; pub(crate) mod debug; @@ -78,6 +79,7 @@ pub(crate) use command::{ pub(crate) use classified::ClassifiedCommand; pub(crate) use config::Config; +pub(crate) use count::Count; pub(crate) use cp::Cpy; pub(crate) use date::Date; pub(crate) use debug::Debug; diff --git a/src/commands/count.rs b/src/commands/count.rs new file mode 100644 index 0000000000..5e44283737 --- /dev/null +++ b/src/commands/count.rs @@ -0,0 +1,46 @@ +use crate::commands::WholeStreamCommand; +use crate::data::Value; +use crate::errors::ShellError; +use crate::parser::CommandRegistry; +use crate::prelude::*; +use futures::stream::StreamExt; + +pub struct Count; + +#[derive(Deserialize)] +pub struct CountArgs {} + +impl WholeStreamCommand for Count { + fn name(&self) -> &str { + "count" + } + + fn signature(&self) -> Signature { + Signature::build("count") + } + + fn usage(&self) -> &str { + "Show the total number of rows." + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, count)?.run() + } +} + +pub fn count( + CountArgs {}: CountArgs, + RunnableContext { input, name, .. }: RunnableContext, +) -> Result { + let stream = async_stream! { + let rows: Vec> = input.values.collect().await; + + yield ReturnSuccess::value(Value::int(rows.len()).tagged(name)) + }; + + Ok(stream.to_output_stream()) +} From ec2e35ad81d2212b49bc6df0ab1a52d61ba23e67 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Tue, 15 Oct 2019 05:41:34 -0500 Subject: [PATCH 049/184] 'last' gets last row if no amount desired given. --- src/commands/last.rs | 18 +++++++++++------- tests/commands_test.rs | 43 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 54 insertions(+), 7 deletions(-) diff --git a/src/commands/last.rs b/src/commands/last.rs index 321506846b..04db0f4c48 100644 --- a/src/commands/last.rs +++ b/src/commands/last.rs @@ -7,7 +7,7 @@ pub struct Last; #[derive(Deserialize)] pub struct LastArgs { - amount: Tagged, + rows: Option>, } impl WholeStreamCommand for Last { @@ -16,7 +16,7 @@ impl WholeStreamCommand for Last { } fn signature(&self) -> Signature { - Signature::build("last").required("amount", SyntaxShape::Number) + Signature::build("last").optional("rows", SyntaxShape::Number) } fn usage(&self) -> &str { @@ -32,13 +32,17 @@ impl WholeStreamCommand for Last { } } -fn last( - LastArgs { amount }: LastArgs, - context: RunnableContext, -) -> Result { +fn last(LastArgs { rows }: LastArgs, context: RunnableContext) -> Result { let stream = async_stream! { let v: Vec<_> = context.input.into_vec().await; - let count = (*amount as usize); + + let rows_desired = if let Some(quantity) = rows { + *quantity + } else { + 1 + }; + + let count = (rows_desired as usize); if count < v.len() { let k = v.len() - count; for x in v[k..].iter() { diff --git a/tests/commands_test.rs b/tests/commands_test.rs index 6544042f66..4d6fa84a65 100644 --- a/tests/commands_test.rs +++ b/tests/commands_test.rs @@ -70,6 +70,49 @@ fn first_gets_first_row_when_no_amount_given() { }) } +#[test] +fn last_gets_last_rows_by_amount() { + Playground::setup("last_test_1", |dirs, sandbox| { + sandbox.with_files(vec![ + EmptyFile("los.txt"), + EmptyFile("tres.txt"), + EmptyFile("amigos.txt"), + EmptyFile("arepas.clu"), + ]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + ls + | last 3 + | count + | echo $it + "# + )); + + assert_eq!(actual, "3"); + }) +} + +#[test] +fn last_gets_last_row_when_no_amount_given() { + Playground::setup("last_test_2", |dirs, sandbox| { + sandbox.with_files(vec![EmptyFile("caballeros.txt"), EmptyFile("arepas.clu")]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + ls + | last + | count + | echo $it + "# + )); + + assert_eq!(actual, "1"); + }) +} + #[test] fn get() { Playground::setup("get_test_1", |dirs, sandbox| { From 0373006710ac23c71af4af1f3a201529d0e9e98c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Tue, 15 Oct 2019 05:42:24 -0500 Subject: [PATCH 050/184] Formatting. --- src/commands/config.rs | 6 +++--- src/commands/first.rs | 5 +++-- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/src/commands/config.rs b/src/commands/config.rs index 4f9625b211..9cde5213de 100644 --- a/src/commands/config.rs +++ b/src/commands/config.rs @@ -70,9 +70,9 @@ pub fn config( if let Some(v) = get { let key = v.to_string(); - let value = result.get(&key).ok_or_else(|| { - ShellError::labeled_error("Missing key in config", "key", v.tag()) - })?; + let value = result + .get(&key) + .ok_or_else(|| ShellError::labeled_error("Missing key in config", "key", v.tag()))?; let mut results = VecDeque::new(); diff --git a/src/commands/first.rs b/src/commands/first.rs index face12bddc..4c1c3b8c35 100644 --- a/src/commands/first.rs +++ b/src/commands/first.rs @@ -36,12 +36,13 @@ fn first( FirstArgs { rows }: FirstArgs, context: RunnableContext, ) -> Result { - let rows_desired = if let Some(quantity) = rows { *quantity } else { 1 }; - Ok(OutputStream::from_input(context.input.values.take(rows_desired))) + Ok(OutputStream::from_input( + context.input.values.take(rows_desired), + )) } From 81affaa584079c8e981accee9ae390c1e0e32c23 Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Tue, 15 Oct 2019 19:10:38 +0200 Subject: [PATCH 051/184] Adds tests for allowed-spaces option. --- src/commands/from_ssv.rs | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs index 1be9b4567a..3c61e211f4 100644 --- a/src/commands/from_ssv.rs +++ b/src/commands/from_ssv.rs @@ -184,4 +184,43 @@ mod tests { let result = string_to_table(input, true); assert_eq!(result, None); } + + #[test] + fn it_allows_a_predefined_number_of_spaces() { + let input = r#" + column a column b + entry 1 entry number 2 + 3 four + "#; + + let result = string_to_table(input, false); + assert_eq!( + result, + Some(vec![ + vec![ + owned("column a", "entry 1"), + owned("column b", "entry number 2") + ], + vec![owned("column a", "3"), owned("column b", "four")] + ]) + ); + } + + #[test] + fn it_trims_remaining_separator_space() { + let input = r#" + colA colB colC + val1 val2 val3 + "#; + + let trimmed = |s: &str| s.trim() == s; + + let result = string_to_table(input, false).unwrap(); + assert_eq!( + true, + result + .iter() + .all(|row| row.iter().all(|(a, b)| trimmed(a) && trimmed(b))) + ) + } } From d32e97b81288297d02e1f044fc1216765ee78bf5 Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Tue, 15 Oct 2019 19:52:12 +0200 Subject: [PATCH 052/184] Implements variable space separator length, version 1. --- src/commands/from_ssv.rs | 44 +++++++++++++++++++++++++++++----------- 1 file changed, 32 insertions(+), 12 deletions(-) diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs index 3c61e211f4..6d2dcfda57 100644 --- a/src/commands/from_ssv.rs +++ b/src/commands/from_ssv.rs @@ -7,9 +7,11 @@ pub struct FromSSV; #[derive(Deserialize)] pub struct FromSSVArgs { headerless: bool, + n: Option>, } const STRING_REPRESENTATION: &str = "from-ssv"; +const DEFAULT_ALLOWED_SPACES: usize = 0; impl WholeStreamCommand for FromSSV { fn name(&self) -> &str { @@ -17,7 +19,9 @@ impl WholeStreamCommand for FromSSV { } fn signature(&self) -> Signature { - Signature::build(STRING_REPRESENTATION).switch("headerless") + Signature::build(STRING_REPRESENTATION) + .switch("headerless") + .named("n", SyntaxShape::Int) } fn usage(&self) -> &str { @@ -33,12 +37,19 @@ impl WholeStreamCommand for FromSSV { } } -fn string_to_table(s: &str, headerless: bool) -> Option>> { +fn string_to_table( + s: &str, + headerless: bool, + split_at: usize, +) -> Option>> { let mut lines = s.lines().filter(|l| !l.trim().is_empty()); + let separator = " ".repeat(std::cmp::max(split_at, 1)); let headers = lines .next()? - .split_whitespace() + .split(&separator) + .map(|s| s.trim()) + .filter(|s| !s.is_empty()) .map(|s| s.to_owned()) .collect::>(); @@ -55,7 +66,11 @@ fn string_to_table(s: &str, headerless: bool) -> Option Option, ) -> Option> { let tag = tag.into(); - let rows = string_to_table(s, headerless)? + let rows = string_to_table(s, headerless, split_at)? .iter() .map(|row| { let mut tagged_dict = TaggedDictBuilder::new(&tag); @@ -87,13 +103,17 @@ fn from_ssv_string_to_value( } fn from_ssv( - FromSSVArgs { headerless }: FromSSVArgs, + FromSSVArgs { headerless, n }: FromSSVArgs, RunnableContext { input, name, .. }: RunnableContext, ) -> Result { let stream = async_stream! { let values: Vec> = input.values.collect().await; let mut concat_string = String::new(); let mut latest_tag: Option = None; + let split_at = match n { + Some(number) => number.item, + None => DEFAULT_ALLOWED_SPACES + }; for value in values { let value_tag = value.tag(); @@ -112,7 +132,7 @@ fn from_ssv( } } - match from_ssv_string_to_value(&concat_string, headerless, name.clone()) { + match from_ssv_string_to_value(&concat_string, headerless, split_at, name.clone()) { Some(x) => match x { Tagged { item: Value::Table(list), ..} => { for l in list { yield ReturnSuccess::value(l) } @@ -151,7 +171,7 @@ mod tests { 3 4 "#; - let result = string_to_table(input, false); + let result = string_to_table(input, false, 1); assert_eq!( result, Some(vec![ @@ -168,7 +188,7 @@ mod tests { 1 2 3 4 "#; - let result = string_to_table(input, true); + let result = string_to_table(input, true, 1); assert_eq!( result, Some(vec![ @@ -181,7 +201,7 @@ mod tests { #[test] fn it_returns_none_given_an_empty_string() { let input = ""; - let result = string_to_table(input, true); + let result = string_to_table(input, true, 1); assert_eq!(result, None); } @@ -193,7 +213,7 @@ mod tests { 3 four "#; - let result = string_to_table(input, false); + let result = string_to_table(input, false, 3); assert_eq!( result, Some(vec![ @@ -215,7 +235,7 @@ mod tests { let trimmed = |s: &str| s.trim() == s; - let result = string_to_table(input, false).unwrap(); + let result = string_to_table(input, false, 2).unwrap(); assert_eq!( true, result From e7b37bee08d8d15af22a9116240d10f617f0be8f Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Tue, 15 Oct 2019 20:58:46 +0200 Subject: [PATCH 053/184] Adds filter test for named param. --- tests/filters_test.rs | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/tests/filters_test.rs b/tests/filters_test.rs index ed841af4ca..a84622c37f 100644 --- a/tests/filters_test.rs +++ b/tests/filters_test.rs @@ -383,6 +383,34 @@ fn converts_from_ssv_text_to_structured_table() { }) } +#[test] +fn converts_from_ssv_text_to_structured_table_with_separator_specified() { + Playground::setup("filter_from_ssv_test_1", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContentToBeTrimmed( + "oc_get_svc.txt", + r#" + NAME LABELS SELECTOR IP PORT(S) + docker-registry docker-registry=default docker-registry=default 172.30.78.158 5000/TCP + kubernetes component=apiserver,provider=kubernetes 172.30.0.2 443/TCP + kubernetes-ro component=apiserver,provider=kubernetes 172.30.0.1 80/TCP + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open oc_get_svc.txt + | from-ssv -n 3 + | nth 0 + | get IP + | echo $it + "# + )); + + assert_eq!(actual, "172.30.78.158"); + }) +} + #[test] fn converts_from_ssv_text_skipping_headers_to_structured_table() { Playground::setup("filter_from_ssv_test_2", |dirs, sandbox| { From b4c639a5d9b286a74e822a800f5fc27e48140ee6 Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Tue, 15 Oct 2019 21:01:14 +0200 Subject: [PATCH 054/184] Updates description of command in readme. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 3cc54584f6..33d381d710 100644 --- a/README.md +++ b/README.md @@ -284,7 +284,7 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat | from-ini | Parse text as .ini and create table | | from-json | Parse text as .json and create table | | from-sqlite | Parse binary data as sqlite .db and create table | -| from-ssv | Parse text as whitespace-separated values and create table| +| from-ssv -n | Parse text as space-separated values and create table | | from-toml | Parse text as .toml and create table | | from-tsv | Parse text as .tsv and create table | | from-url | Parse urlencoded string and create a table | From 294c2c600ddf098b2106a0634922b559338ee463 Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Tue, 15 Oct 2019 21:10:15 +0200 Subject: [PATCH 055/184] Update the usage string to match the readme. --- src/commands/from_ssv.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs index 6d2dcfda57..0d56c20e0a 100644 --- a/src/commands/from_ssv.rs +++ b/src/commands/from_ssv.rs @@ -25,7 +25,7 @@ impl WholeStreamCommand for FromSSV { } fn usage(&self) -> &str { - "Parse text as whitespace-separated values and create a table." + "Parse text as space-separated values and create a table." } fn run( From 5635b8378df40c9b05f2c72597b2926a459cf4b9 Mon Sep 17 00:00:00 2001 From: sdfnz <30536578+sdfnz@users.noreply.github.com> Date: Tue, 15 Oct 2019 14:23:32 -0500 Subject: [PATCH 056/184] Added documentation for the sum command --- docs/commands/sum.md | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 docs/commands/sum.md diff --git a/docs/commands/sum.md b/docs/commands/sum.md new file mode 100644 index 0000000000..f5c59848dd --- /dev/null +++ b/docs/commands/sum.md @@ -0,0 +1,35 @@ +# sum + +This command allows you to calculate the sum of values in a column. + +## Examples + +To get the sum of the file sizes in a directory, simply pipe the size column from the ls command to the sum command. + +```shell +> ls | get size | sum +━━━━━━━━━ + value +━━━━━━━━━ + 51.0 MB +━━━━━━━━━ +``` + +Note that sum only works for integer and byte values at the moment, and if the shell doesn't recognize the values in a column as one of those types, it will return an error. + +```shell +> open example.csv +━━━┯━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━ + # │ fruit │ amount │ quality +───┼─────────┼────────┼────────── + 0 │ apples │ 1 │ fresh + 1 │ bananas │ 2 │ old + 2 │ oranges │ 7 │ fresh + 3 │ kiwis │ 25 │ rotten +━━━┷━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━ +``` + +```shell +> open example.csv | get amount | sum +error: Unrecognized type in stream: Primitive(String("1")) +``` From 1bb301aafa6a2fdebc035301764214e9a3e5bcb6 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Wed, 16 Oct 2019 08:54:46 +1300 Subject: [PATCH 057/184] Bump dep for language-reporting --- Cargo.toml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index e4a89f5d06..cd6be5d9fa 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -41,8 +41,7 @@ serde-hjson = "0.9.1" serde_yaml = "0.8" serde_bytes = "0.11.2" getset = "0.0.8" -#language-reporting = "0.3.1" -language-reporting = { git = "https://github.com/wycats/language-reporting" } +language-reporting = "0.4.0" app_dirs = "1.2.1" csv = "1.1" toml = "0.5.3" From 0d2044e72e21d3a44a034b9eeb64de783cdd323c Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Tue, 15 Oct 2019 22:05:32 +0200 Subject: [PATCH 058/184] Changes flag to `minimum-spaces`. --- src/commands/from_ssv.rs | 12 ++++++++---- tests/filters_test.rs | 2 +- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs index 0d56c20e0a..4cbb3c78f6 100644 --- a/src/commands/from_ssv.rs +++ b/src/commands/from_ssv.rs @@ -7,7 +7,8 @@ pub struct FromSSV; #[derive(Deserialize)] pub struct FromSSVArgs { headerless: bool, - n: Option>, + #[serde(rename(deserialize = "minimum-spaces"))] + minimum_spaces: Option>, } const STRING_REPRESENTATION: &str = "from-ssv"; @@ -21,7 +22,7 @@ impl WholeStreamCommand for FromSSV { fn signature(&self) -> Signature { Signature::build(STRING_REPRESENTATION) .switch("headerless") - .named("n", SyntaxShape::Int) + .named("minimum-spaces", SyntaxShape::Int) } fn usage(&self) -> &str { @@ -103,14 +104,17 @@ fn from_ssv_string_to_value( } fn from_ssv( - FromSSVArgs { headerless, n }: FromSSVArgs, + FromSSVArgs { + headerless, + minimum_spaces, + }: FromSSVArgs, RunnableContext { input, name, .. }: RunnableContext, ) -> Result { let stream = async_stream! { let values: Vec> = input.values.collect().await; let mut concat_string = String::new(); let mut latest_tag: Option = None; - let split_at = match n { + let split_at = match minimum_spaces { Some(number) => number.item, None => DEFAULT_ALLOWED_SPACES }; diff --git a/tests/filters_test.rs b/tests/filters_test.rs index a84622c37f..f0d5dead61 100644 --- a/tests/filters_test.rs +++ b/tests/filters_test.rs @@ -400,7 +400,7 @@ fn converts_from_ssv_text_to_structured_table_with_separator_specified() { cwd: dirs.test(), h::pipeline( r#" open oc_get_svc.txt - | from-ssv -n 3 + | from-ssv --minimum-spaces 3 | nth 0 | get IP | echo $it From f8d44e732bc103caf4c963f1c0ef34511704a206 Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Tue, 15 Oct 2019 22:05:47 +0200 Subject: [PATCH 059/184] Updates default minimum spaces to allow single spaces by default. --- src/commands/from_ssv.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs index 4cbb3c78f6..001ea8d0c6 100644 --- a/src/commands/from_ssv.rs +++ b/src/commands/from_ssv.rs @@ -12,7 +12,7 @@ pub struct FromSSVArgs { } const STRING_REPRESENTATION: &str = "from-ssv"; -const DEFAULT_ALLOWED_SPACES: usize = 0; +const DEFAULT_MINIMUM_SPACES: usize = 2; impl WholeStreamCommand for FromSSV { fn name(&self) -> &str { @@ -116,7 +116,7 @@ fn from_ssv( let mut latest_tag: Option = None; let split_at = match minimum_spaces { Some(number) => number.item, - None => DEFAULT_ALLOWED_SPACES + None => DEFAULT_MINIMUM_SPACES }; for value in values { From 587bb13be55f030d83459d78c88841886453ffbc Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Tue, 15 Oct 2019 23:19:16 +0200 Subject: [PATCH 060/184] Updates readme with new name of flag. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 33d381d710..cf36fd2cb1 100644 --- a/README.md +++ b/README.md @@ -284,7 +284,7 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat | from-ini | Parse text as .ini and create table | | from-json | Parse text as .json and create table | | from-sqlite | Parse binary data as sqlite .db and create table | -| from-ssv -n | Parse text as space-separated values and create table | +| from-ssv --minimum-spaces | Parse text as space-separated values and create table | | from-toml | Parse text as .toml and create table | | from-tsv | Parse text as .tsv and create table | | from-url | Parse urlencoded string and create a table | From 74b0e4e5413fdb75b037630f52277540fcfa8270 Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Tue, 15 Oct 2019 23:20:06 +0200 Subject: [PATCH 061/184] Adds more info to the usage string. --- src/commands/from_ssv.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs index 001ea8d0c6..7aca350964 100644 --- a/src/commands/from_ssv.rs +++ b/src/commands/from_ssv.rs @@ -26,7 +26,7 @@ impl WholeStreamCommand for FromSSV { } fn usage(&self) -> &str { - "Parse text as space-separated values and create a table." + "Parse text as space-separated values and create a table. The default minimum number of spaces counted as a separator is 2." } fn run( From d91b73544275b1a889547311d0c7ad647f762f20 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Wed, 16 Oct 2019 15:09:47 +1300 Subject: [PATCH 062/184] Update cargo.lock --- Cargo.lock | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index da47189204..763ab16798 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1190,13 +1190,13 @@ dependencies = [ [[package]] name = "language-reporting" -version = "0.3.1" -source = "git+https://github.com/wycats/language-reporting#1e2100290fec96f69646e1e61482d80f7a8e7855" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "derive-new 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "render-tree 0.1.1 (git+https://github.com/wycats/language-reporting)", + "render-tree 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "termcolor 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1487,7 +1487,7 @@ dependencies = [ [[package]] name = "nu" -version = "0.3.0" +version = "0.4.0" dependencies = [ "ansi_term 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)", "app_dirs 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1519,7 +1519,7 @@ dependencies = [ "image 0.22.3 (registry+https://github.com/rust-lang/crates.io-index)", "indexmap 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", - "language-reporting 0.3.1 (git+https://github.com/wycats/language-reporting)", + "language-reporting 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "mime 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "natural 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2007,7 +2007,7 @@ dependencies = [ [[package]] name = "render-tree" version = "0.1.1" -source = "git+https://github.com/wycats/language-reporting#1e2100290fec96f69646e1e61482d80f7a8e7855" +source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "itertools 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2950,7 +2950,7 @@ dependencies = [ "checksum jpeg-decoder 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "c1aae18ffeeae409c6622c3b6a7ee49792a7e5a062eea1b135fbb74e301792ba" "checksum js-sys 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)" = "2cc9a97d7cec30128fd8b28a7c1f9df1c001ceb9b441e2b755e24130a6b43c79" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" -"checksum language-reporting 0.3.1 (git+https://github.com/wycats/language-reporting)" = "" +"checksum language-reporting 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4e6a84e1e6cccd818617d299427ad1519f127af2738b1d3a581835ef56ae298b" "checksum lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "76f033c7ad61445c5b347c7382dd1237847eb1bce590fe50365dcb33d546be73" "checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" "checksum lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f" @@ -3038,7 +3038,7 @@ dependencies = [ "checksum regex-automata 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "92b73c2a1770c255c240eaa4ee600df1704a38dc3feaa6e949e7fcd4f8dc09f9" "checksum regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "11a7e20d1cce64ef2fed88b66d347f88bd9babb82845b2b858f3edbf59a4f716" "checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e" -"checksum render-tree 0.1.1 (git+https://github.com/wycats/language-reporting)" = "" +"checksum render-tree 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "68ed587df09cfb7ce1bc6fe8f77e24db219f222c049326ccbfb948ec67e31664" "checksum result 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "194d8e591e405d1eecf28819740abed6d719d1a2db87fc0bcdedee9a26d55560" "checksum roxmltree 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b1a3193e568c6e262f817fd07af085c7f79241a947aedd3779d47eadc170e174" "checksum rusqlite 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2a194373ef527035645a1bc21b10dc2125f73497e6e155771233eb187aedd051" From 9a02fac0e5b2e90412d18d7356121ba84af47ec9 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Thu, 17 Oct 2019 07:28:49 +1300 Subject: [PATCH 063/184] Rename to --- src/format/table.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/format/table.rs b/src/format/table.rs index f4b318dae8..a59e1adafb 100644 --- a/src/format/table.rs +++ b/src/format/table.rs @@ -42,7 +42,7 @@ impl TableView { let mut headers = TableView::merge_descriptors(values); if headers.len() == 0 { - headers.push("".to_string()); + headers.push("".to_string()); } let mut entries = vec![]; From a0ed6ea3c8f1c774163886eb4ebbbe324aeca527 Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Thu, 17 Oct 2019 00:17:58 +0200 Subject: [PATCH 064/184] Adds new tests and updates old ones. New tests are added to test for additional cases that might be trickier to handle with the new logic. Old tests are updated where their expectations are no longer expected to hold true. For instance: previously, lines would be treated separately, allowing any index offset between columns on different rows, as long as they had the same row index as decided by a separator. When this is no longer the case, some things need to be adjusted. --- src/commands/from_ssv.rs | 75 +++++++++++++++++++++++++++++++++++----- 1 file changed, 66 insertions(+), 9 deletions(-) diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs index 7aca350964..39ad1f7c73 100644 --- a/src/commands/from_ssv.rs +++ b/src/commands/from_ssv.rs @@ -171,9 +171,9 @@ mod tests { a b - 1 2 + 1 2 - 3 4 + 3 4 "#; let result = string_to_table(input, false, 1); assert_eq!( @@ -185,6 +185,20 @@ mod tests { ); } + #[test] + fn it_deals_with_single_column_input() { + let input = r#" + a + 1 + 2 + "#; + let result = string_to_table(input, false, 1); + assert_eq!( + result, + Some(vec![vec![owned("a", "1")], vec![owned("a", "2")]]) + ); + } + #[test] fn it_ignores_headers_when_headerless() { let input = r#" @@ -206,15 +220,15 @@ mod tests { fn it_returns_none_given_an_empty_string() { let input = ""; let result = string_to_table(input, true, 1); - assert_eq!(result, None); + assert!(result.is_none()); } #[test] fn it_allows_a_predefined_number_of_spaces() { let input = r#" column a column b - entry 1 entry number 2 - 3 four + entry 1 entry number 2 + 3 four "#; let result = string_to_table(input, false, 3); @@ -239,12 +253,55 @@ mod tests { let trimmed = |s: &str| s.trim() == s; + let result = string_to_table(input, false, 2).unwrap(); + assert!(result + .iter() + .all(|row| row.iter().all(|(a, b)| trimmed(a) && trimmed(b)))) + } + + #[test] + fn it_keeps_empty_columns() { + let input = r#" + colA col B col C + val2 val3 + val4 val 5 val 6 + val7 val8 + "#; + let result = string_to_table(input, false, 2).unwrap(); assert_eq!( - true, - result - .iter() - .all(|row| row.iter().all(|(a, b)| trimmed(a) && trimmed(b))) + result, + vec![ + vec![ + owned("colA", ""), + owned("col B", "val2"), + owned("col C", "val3") + ], + vec![ + owned("colA", "val4"), + owned("col B", "val 5"), + owned("col C", "val 6") + ], + vec![ + owned("colA", "val7"), + owned("col B", ""), + owned("col C", "val8") + ], + ] + ) + } + + #[test] + fn it_drops_trailing_values() { + let input = r#" + colA col B + val1 val2 trailing value that should be ignored + "#; + + let result = string_to_table(input, false, 2).unwrap(); + assert_eq!( + result, + vec![vec![owned("colA", "val1"), owned("col B", "val2"),],] ) } } From 9b1ff9b5667f864212efa69e51716fbd47cdd157 Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Thu, 17 Oct 2019 00:20:48 +0200 Subject: [PATCH 065/184] Updates the table creation logic. The table parsing/creation logic has changed from treating every line the same to processing each line in context of the column header's placement. Previously, lines on separate rows would go towards the same column as long as they were the same index based on separator alone. Now, each item's index is based on vertical alignment to the column header. This may seem brittle, but it solves the problem of some tables operating with empty cells that would cause remaining values to be paired with the wrong column. Based on kubernetes output (get pods, events), the new method has shown to have much greater success rates for parsing. --- src/commands/from_ssv.rs | 44 +++++++++++++++++++++++----------------- 1 file changed, 25 insertions(+), 19 deletions(-) diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs index 39ad1f7c73..3af9e76084 100644 --- a/src/commands/from_ssv.rs +++ b/src/commands/from_ssv.rs @@ -46,33 +46,39 @@ fn string_to_table( let mut lines = s.lines().filter(|l| !l.trim().is_empty()); let separator = " ".repeat(std::cmp::max(split_at, 1)); - let headers = lines - .next()? - .split(&separator) - .map(|s| s.trim()) - .filter(|s| !s.is_empty()) - .map(|s| s.to_owned()) - .collect::>(); + let headers_raw = lines.next()?; - let header_row = if headerless { - (1..=headers.len()) - .map(|i| format!("Column{}", i)) - .collect::>() - } else { + let headers = headers_raw + .trim() + .split(&separator) + .map(str::trim) + .filter(|s| !s.is_empty()) + .map(|s| (headers_raw.find(s).unwrap(), s.to_owned())); + + let columns = if headerless { headers + .enumerate() + .map(|(header_no, (string_index, _))| { + (string_index, format!("Column{}", header_no + 1)) + }) + .collect::>() + } else { + headers.collect::>() }; Some( lines .map(|l| { - header_row + columns .iter() - .zip( - l.split(&separator) - .map(|s| s.trim()) - .filter(|s| !s.is_empty()), - ) - .map(|(a, b)| (String::from(a), String::from(b))) + .enumerate() + .filter_map(|(i, (start, col))| { + (match columns.get(i + 1) { + Some((end, _)) => l.get(*start..*end), + None => l.get(*start..)?.split(&separator).next(), + }) + .and_then(|s| Some((col.clone(), String::from(s.trim())))) + }) .collect() }) .collect(), From 305ca11eb57801ec491984f336294c3331e04903 Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Thu, 17 Oct 2019 09:40:00 +0200 Subject: [PATCH 066/184] Changes the parsing to use the full value of the final column. Previously it would split the last column on the first separator value found between the start of the column and the end of the row. Changing this to using everything from the start of the column to the end of the string makes it behave more similarly to the other columns, making it less surprising. --- src/commands/from_ssv.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs index 3af9e76084..913df9981a 100644 --- a/src/commands/from_ssv.rs +++ b/src/commands/from_ssv.rs @@ -75,7 +75,7 @@ fn string_to_table( .filter_map(|(i, (start, col))| { (match columns.get(i + 1) { Some((end, _)) => l.get(*start..*end), - None => l.get(*start..)?.split(&separator).next(), + None => l.get(*start..), }) .and_then(|s| Some((col.clone(), String::from(s.trim())))) }) @@ -298,16 +298,16 @@ mod tests { } #[test] - fn it_drops_trailing_values() { + fn it_uses_the_full_final_column() { let input = r#" colA col B - val1 val2 trailing value that should be ignored + val1 val2 trailing value that should be included "#; let result = string_to_table(input, false, 2).unwrap(); assert_eq!( result, - vec![vec![owned("colA", "val1"), owned("col B", "val2"),],] + vec![vec![owned("colA", "val1"), owned("col B", "val2 trailing value that should be included"),],] ) } } From f21405399cb60df969158abe7a40abc2aee780cd Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Thu, 17 Oct 2019 09:56:06 +0200 Subject: [PATCH 067/184] Formats file. --- src/commands/from_ssv.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs index 913df9981a..f14d89356a 100644 --- a/src/commands/from_ssv.rs +++ b/src/commands/from_ssv.rs @@ -307,7 +307,10 @@ mod tests { let result = string_to_table(input, false, 2).unwrap(); assert_eq!( result, - vec![vec![owned("colA", "val1"), owned("col B", "val2 trailing value that should be included"),],] + vec![vec![ + owned("colA", "val1"), + owned("col B", "val2 trailing value that should be included"), + ],] ) } } From 321629a6932bc5f311383c10d48f908df8acbde9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antti=20Ker=C3=A4nen?= Date: Thu, 17 Oct 2019 22:57:02 +0300 Subject: [PATCH 068/184] Fix size comparison in 'where size' Fixes #840 --- src/parser/hir/syntax_shape/expression/unit.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/parser/hir/syntax_shape/expression/unit.rs b/src/parser/hir/syntax_shape/expression/unit.rs index 03602f1088..2c01038ebc 100644 --- a/src/parser/hir/syntax_shape/expression/unit.rs +++ b/src/parser/hir/syntax_shape/expression/unit.rs @@ -78,9 +78,9 @@ fn unit_size(input: &str, bare_span: Span) -> IResult<&str, (Spanned, value(Unit::B, alt((tag("B"), tag("b")))), value(Unit::KB, alt((tag("KB"), tag("kb"), tag("Kb")))), value(Unit::MB, alt((tag("MB"), tag("mb"), tag("Mb")))), - value(Unit::MB, alt((tag("GB"), tag("gb"), tag("Gb")))), - value(Unit::MB, alt((tag("TB"), tag("tb"), tag("Tb")))), - value(Unit::MB, alt((tag("PB"), tag("pb"), tag("Pb")))), + value(Unit::GB, alt((tag("GB"), tag("gb"), tag("Gb")))), + value(Unit::TB, alt((tag("TB"), tag("tb"), tag("Tb")))), + value(Unit::PB, alt((tag("PB"), tag("pb"), tag("Pb")))), )))(input)?; let start_span = number.span.end(); From 5ce4b12cc18c25a615bec30bf824f11dc4bf51d0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Fri, 18 Oct 2019 07:08:04 -0500 Subject: [PATCH 069/184] Inc plugin increments appropiately given a table containing a version in it. --- src/data/base.rs | 130 ++++++++++++++++++++++++++++++++++++++++++- src/lib.rs | 4 ++ src/plugins/embed.rs | 44 ++++++--------- src/plugins/inc.rs | 19 +++++-- 4 files changed, 164 insertions(+), 33 deletions(-) diff --git a/src/data/base.rs b/src/data/base.rs index f7b875ef53..c95ee26e86 100644 --- a/src/data/base.rs +++ b/src/data/base.rs @@ -8,6 +8,7 @@ use crate::Text; use chrono::{DateTime, Utc}; use chrono_humanize::Humanize; use derive_new::new; +use indexmap::IndexMap; use log::trace; use serde::{Deserialize, Serialize}; use std::fmt; @@ -452,7 +453,7 @@ impl Value { match self { Value::Primitive(p) => p.type_name(), Value::Row(_) => format!("row"), - Value::Table(_) => format!("list"), + Value::Table(_) => format!("table"), Value::Block(_) => format!("block"), Value::Error(_) => format!("error"), } @@ -684,6 +685,15 @@ impl Value { Value::Row(ref mut o) => { current = o; } + Value::Table(ref mut l) => match l.get_mut(0) { + Some(Tagged { + item: Value::Row(ref mut dict), + .. + }) => { + current = dict; + } + _ => return None, + }, _ => return None, } } @@ -769,6 +779,21 @@ impl Value { } } + #[allow(unused)] + pub fn row(entries: IndexMap>) -> Value { + Value::Row(entries.into()) + } + + pub fn table(list: &Vec>) -> Value { + let mut out = vec![]; + + for v in list { + out.push(v.clone()); + } + + Value::Table(out) + } + pub fn string(s: impl Into) -> Value { Value::Primitive(Primitive::String(s.into())) } @@ -927,3 +952,106 @@ fn coerce_compare_primitive( _ => return Err((left.type_name(), right.type_name())), }) } +#[cfg(test)] +mod tests { + + use crate::data::meta::*; + use crate::Value; + use indexmap::IndexMap; + + fn string(input: impl Into) -> Tagged { + Value::string(input.into()).tagged_unknown() + } + + fn row(entries: IndexMap>) -> Tagged { + Value::row(entries).tagged_unknown() + } + + fn table(list: &Vec>) -> Tagged { + Value::table(list).tagged_unknown() + } + + fn column_path(paths: &Vec>) -> Tagged>> { + let paths = paths + .iter() + .map(|p| string(p.as_string().unwrap())) + .collect(); + let table = table(&paths); + table.as_column_path().unwrap() + } + #[test] + fn gets_the_matching_field_from_a_row() { + let field = "amigos"; + + let row = Value::row(indexmap! { + field.into() => table(&vec![ + string("andres"), + string("jonathan"), + string("yehuda"), + ]), + }); + + assert_eq!( + table(&vec![ + string("andres"), + string("jonathan"), + string("yehuda") + ]), + *row.get_data_by_key(field).unwrap() + ); + } + + #[test] + fn gets_the_first_row_with_matching_field_from_rows_inside_a_table() { + let field = "name"; + + let table = Value::table(&vec![ + row(indexmap! {field.into() => string("andres")}), + row(indexmap! {field.into() => string("jonathan")}), + row(indexmap! {field.into() => string("yehuda")}), + ]); + + assert_eq!(string("andres"), *table.get_data_by_key(field).unwrap()); + } + + #[test] + fn gets_the_matching_field_from_nested_rows_inside_a_row() { + let _field = "package.version"; + let field = vec![string("package"), string("version")]; + let field = column_path(&field); + + let (version, tag) = string("0.4.0").into_parts(); + + let row = Value::row(indexmap! { + "package".into() => row(indexmap!{ + "name".into() => string("nu"), + "version".into() => string("0.4.0"), + }) + }); + + assert_eq!(version, **row.get_data_by_column_path(tag, &field).unwrap()) + } + + #[test] + fn gets_the_first_row_with_matching_field_from_nested_rows_inside_a_table() { + let _field = "package.authors.name"; + let field = vec![string("package"), string("authors"), string("name")]; + let field = column_path(&field); + + let (name, tag) = string("Andrés N. Robalino").into_parts(); + + let row = Value::row(indexmap! { + "package".into() => row(indexmap!{ + "authors".into() => table(&vec![ + row(indexmap!{"name".into()=> string("Andrés N. Robalino")}), + row(indexmap!{"name".into()=> string("Jonathan Turner")}), + row(indexmap!{"name".into() => string("Yehuda Katz")}) + ]), + "name".into() => string("nu"), + "version".into() => string("0.4.0"), + }) + }); + + assert_eq!(name, **row.get_data_by_column_path(tag, &field).unwrap()) + } +} diff --git a/src/lib.rs b/src/lib.rs index bfcaa4510f..520e08a136 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,5 +1,9 @@ #![recursion_limit = "1024"] +#[cfg(test)] +#[macro_use] +extern crate indexmap; + #[macro_use] mod prelude; diff --git a/src/plugins/embed.rs b/src/plugins/embed.rs index 97dd6a2713..e659bfeb3b 100644 --- a/src/plugins/embed.rs +++ b/src/plugins/embed.rs @@ -1,6 +1,9 @@ +#[macro_use] +extern crate indexmap; + use nu::{ serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, - SyntaxShape, Tag, Tagged, TaggedDictBuilder, Value, + SyntaxShape, Tag, Tagged, TaggedItem, Value, }; struct Embed { @@ -16,22 +19,8 @@ impl Embed { } fn embed(&mut self, value: Tagged) -> Result<(), ShellError> { - match value { - Tagged { item, tag } => match &self.field { - Some(_) => { - self.values.push(Tagged { - item: item, - tag: tag, - }); - Ok(()) - } - None => Err(ShellError::labeled_error( - "embed needs a field when embedding a value", - "original value", - &tag, - )), - }, - } + self.values.push(value); + Ok(()) } } @@ -39,8 +28,7 @@ impl Plugin for Embed { fn config(&mut self) -> Result { Ok(Signature::build("embed") .desc("Embeds a new field to the table.") - .required("Field", SyntaxShape::String) - .rest(SyntaxShape::String) + .optional("field", SyntaxShape::String) .filter()) } @@ -67,15 +55,15 @@ impl Plugin for Embed { } fn end_filter(&mut self) -> Result, ShellError> { - let mut root = TaggedDictBuilder::new(Tag::unknown()); - root.insert_tagged( - self.field.as_ref().unwrap(), - Tagged { - item: Value::Table(self.values.clone()), - tag: Tag::unknown(), - }, - ); - Ok(vec![ReturnSuccess::value(root.into_tagged_value())]) + let row = Value::row(indexmap! { + match &self.field { + Some(key) => key.clone(), + None => "root".into(), + } => Value::table(&self.values).tagged(Tag::unknown()), + }) + .tagged(Tag::unknown()); + + Ok(vec![ReturnSuccess::value(row)]) } } diff --git a/src/plugins/inc.rs b/src/plugins/inc.rs index 38788014ad..1cb6cb2b97 100644 --- a/src/plugins/inc.rs +++ b/src/plugins/inc.rs @@ -14,7 +14,7 @@ pub enum SemVerAction { Patch, } -pub type ColumnPath = Tagged>>; +pub type ColumnPath = Vec>; struct Inc { field: Option, @@ -83,6 +83,16 @@ impl Inc { Ok(Value::bytes(b + 1 as u64).tagged(value.tag())) } Value::Primitive(Primitive::String(ref s)) => Ok(self.apply(&s)?.tagged(value.tag())), + Value::Table(values) => { + if values.len() == 1 { + return Ok(Value::Table(vec![self.inc(values[0].clone())?]).tagged(value.tag())); + } else { + return Err(ShellError::type_error( + "incrementable value", + value.tagged_type_name(), + )); + } + } Value::Row(_) => match self.field { Some(ref f) => { let replacement = match value.item.get_data_by_column_path(value.tag(), f) { @@ -91,10 +101,11 @@ impl Inc { return Err(ShellError::labeled_error( "inc could not find field to replace", "column name", - &f.tag, + value.tag(), )) } }; + match value.item.replace_data_at_column_path( value.tag(), f, @@ -105,7 +116,7 @@ impl Inc { return Err(ShellError::labeled_error( "inc could not find field to replace", "column name", - &f.tag, + value.tag(), )) } } @@ -151,7 +162,7 @@ impl Plugin for Inc { item: Value::Table(_), .. } => { - self.field = Some(table.as_column_path()?); + self.field = Some(table.as_column_path()?.item().to_vec()); } value => return Err(ShellError::type_error("table", value.tagged_type_name())), } From e913e26c01a77e207b5619302ccb0e1d036632c3 Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Fri, 18 Oct 2019 20:02:24 +0200 Subject: [PATCH 070/184] Deletes impl From<&str> The code still compiles, so this doesn't seem to break anything. That also means it's not critical to fix it, but having dead code around isn't great either. --- src/parser/parse/unit.rs | 6 ------ 1 file changed, 6 deletions(-) diff --git a/src/parser/parse/unit.rs b/src/parser/parse/unit.rs index aa19580ac2..e89986f8ac 100644 --- a/src/parser/parse/unit.rs +++ b/src/parser/parse/unit.rs @@ -39,12 +39,6 @@ impl Unit { } } -impl From<&str> for Unit { - fn from(input: &str) -> Unit { - Unit::from_str(input).unwrap() - } -} - impl FromStr for Unit { type Err = (); fn from_str(input: &str) -> Result::Err> { From fc1301c92d191ba7cc2b825edcd04f2aa37d4ac6 Mon Sep 17 00:00:00 2001 From: jdvr Date: Sat, 19 Oct 2019 00:41:24 +0200 Subject: [PATCH 071/184] #194 Added trash crate and send files to the trash using a flag --- Cargo.lock | 10 ++++++++++ Cargo.toml | 1 + src/commands/rm.rs | 4 +++- src/shell/filesystem_shell.rs | 9 +++++++-- 4 files changed, 21 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 763ab16798..510cc4d8ba 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1558,6 +1558,7 @@ dependencies = [ "term 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "toml 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", + "trash 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "which 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2493,6 +2494,14 @@ dependencies = [ "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "trash" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "typenum" version = "1.11.2" @@ -3093,6 +3102,7 @@ dependencies = [ "checksum tokio-io 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "5090db468dad16e1a7a54c8c67280c5e4b544f3d3e018f0b913b400261f85926" "checksum toml 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "758664fc71a3a69038656bee8b6be6477d2a6c315a6b81f7081f591bffa4111f" "checksum toml 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c7aabe75941d914b72bf3e5d3932ed92ce0664d49d8432305a8b547c37227724" +"checksum trash 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f2f24d31505f49e989b1ee2c03c323251f6763d5907d471b71192dac92e323f8" "checksum typenum 1.11.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6d2783fe2d6b8c1101136184eb41be8b1ad379e4657050b8aaff0c79ee7575f9" "checksum unicase 2.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2e2e6bd1e59e56598518beb94fd6db628ded570326f0a98c679a304bd9f00150" "checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" diff --git a/Cargo.toml b/Cargo.toml index cd6be5d9fa..cf1e5d791d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -73,6 +73,7 @@ bigdecimal = { version = "0.1.0", features = ["serde"] } natural = "0.3.0" serde_urlencoded = "0.6.1" sublime_fuzzy = "0.5" +trash = "1.0.0" regex = {version = "1", optional = true } neso = { version = "0.5.0", optional = true } diff --git a/src/commands/rm.rs b/src/commands/rm.rs index ac5aeaae7d..c1e671f4b0 100644 --- a/src/commands/rm.rs +++ b/src/commands/rm.rs @@ -11,6 +11,7 @@ pub struct Remove; pub struct RemoveArgs { pub target: Tagged, pub recursive: Tagged, + pub trash: Tagged, } impl PerItemCommand for Remove { @@ -21,11 +22,12 @@ impl PerItemCommand for Remove { fn signature(&self) -> Signature { Signature::build("rm") .required("path", SyntaxShape::Pattern) + .switch("trash") .switch("recursive") } fn usage(&self) -> &str { - "Remove a file, (for removing directory append '--recursive')" + "Remove a file. Append '--recursive' to remove directories and '--trash' for seding it to system recycle bin" } fn run( diff --git a/src/shell/filesystem_shell.rs b/src/shell/filesystem_shell.rs index f0adeebeb8..3cf9afab21 100644 --- a/src/shell/filesystem_shell.rs +++ b/src/shell/filesystem_shell.rs @@ -8,6 +8,7 @@ use crate::prelude::*; use crate::shell::completer::NuCompleter; use crate::shell::shell::Shell; use crate::utils::FileStructure; +use trash as SendToTrash; use rustyline::completion::FilenameCompleter; use rustyline::hint::{Hinter, HistoryHinter}; use std::path::{Path, PathBuf}; @@ -860,7 +861,7 @@ impl Shell for FilesystemShell { fn rm( &self, - RemoveArgs { target, recursive }: RemoveArgs, + RemoveArgs { target, recursive, trash }: RemoveArgs, name: Tag, path: &str, ) -> Result { @@ -946,7 +947,11 @@ impl Shell for FilesystemShell { if path.is_dir() { std::fs::remove_dir_all(&path)?; } else if path.is_file() { - std::fs::remove_file(&path)?; + if trash.item { + SendToTrash::remove(path).unwrap(); + } else { + std::fs::remove_file(&path)?; + } } } Err(e) => { From 0e86430ea34fba749c8d7da02f89f1659cc5e264 Mon Sep 17 00:00:00 2001 From: "notryanb@gmail.com" Date: Tue, 8 Oct 2019 22:17:02 -0400 Subject: [PATCH 072/184] get very basic average working --- Cargo.toml | 4 ++ src/plugins/average.rs | 106 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 110 insertions(+) create mode 100644 src/plugins/average.rs diff --git a/Cargo.toml b/Cargo.toml index cd6be5d9fa..16b8c85863 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -119,6 +119,10 @@ path = "src/plugins/inc.rs" name = "nu_plugin_sum" path = "src/plugins/sum.rs" +[[bin]] +name = "nu_plugin_average" +path = "src/plugins/average.rs" + [[bin]] name = "nu_plugin_embed" path = "src/plugins/embed.rs" diff --git a/src/plugins/average.rs b/src/plugins/average.rs new file mode 100644 index 0000000000..5e76560d40 --- /dev/null +++ b/src/plugins/average.rs @@ -0,0 +1,106 @@ +use nu::{ + serve_plugin, CoerceInto, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, + Tagged, TaggedItem, Value, +}; + +#[derive(Debug)] +struct Average { + total: Option>, + count: u64, +} + +impl Average { + fn new() -> Average { + Average { total: None, count: 1 } + } + + fn average(&mut self, value: Tagged) -> Result<(), ShellError> { + match value.item() { + Value::Primitive(Primitive::Nothing) => Ok(()), + Value::Primitive(Primitive::Int(i)) => { + match &self.total { + Some(Tagged { + item: Value::Primitive(Primitive::Int(j)), + tag, + }) => { + self.total = Some(Value::int(i + j).tagged(tag)); + self.count = self.count + 1; + Ok(()) + } + None => { + self.total = Some(value.clone()); + Ok(()) + } + _ => Err(ShellError::string(format!( + "Could not calculate average of non-integer or unrelated types" + ))), + } + } + Value::Primitive(Primitive::Bytes(b)) => { + match self.total { + Some(Tagged { + item: Value::Primitive(Primitive::Bytes(j)), + tag, + }) => { + self.total = Some(Value::int(b + j).tagged(tag)); + self.count = self.count + 1; + Ok(()) + } + None => { + self.total = Some(value); + Ok(()) + } + _ => Err(ShellError::string(format!( + "Could not calculate average of non-integer or unrelated types" + ))), + } + } + x => Err(ShellError::string(format!( + "Unrecognized type in stream: {:?}", + x + ))), + } + + } +} + +impl Plugin for Average { + fn config(&mut self) -> Result { + Ok(Signature::build("average") + .desc("Compute the average of a column of numerical values.") + .filter()) + } + + fn begin_filter(&mut self, _: CallInfo) -> Result, ShellError> { + Ok(vec![]) + } + + fn filter(&mut self, input: Tagged) -> Result, ShellError> { + self.average(input)?; + Ok(vec![]) + } + + fn end_filter(&mut self) -> Result, ShellError> { + match self.total { + None => Ok(vec![]), + Some(ref v) => { + match v.item() { + Value::Primitive(Primitive::Int(i)) => { + let total: u64 = i.tagged(v.tag).coerce_into("converting for average")?; + let avg = total as f64 / self.count as f64; + let decimal_value: Value= Primitive::from(avg).into(); + let tagged_value = decimal_value.tagged(v.tag); + Ok(vec![ReturnSuccess::value(tagged_value)]) + } + _ => unreachable!() + + } + }, + } + } +} + +fn main() { + serve_plugin(&mut Average::new()); +} + From 8262c2dd333137886480fc331bf3cbe800bf1afd Mon Sep 17 00:00:00 2001 From: "notryanb@gmail.com" Date: Sun, 13 Oct 2019 21:08:14 -0400 Subject: [PATCH 073/184] add support for average on byte columns and fmt the code --- src/plugins/average.rs | 118 +++++++++++++++++++++++------------------ 1 file changed, 65 insertions(+), 53 deletions(-) diff --git a/src/plugins/average.rs b/src/plugins/average.rs index 5e76560d40..8f82a23d10 100644 --- a/src/plugins/average.rs +++ b/src/plugins/average.rs @@ -1,6 +1,6 @@ use nu::{ - serve_plugin, CoerceInto, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, - Tagged, TaggedItem, Value, + serve_plugin, CallInfo, CoerceInto, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, + Signature, Tagged, TaggedItem, Value, }; #[derive(Debug)] @@ -11,56 +11,61 @@ struct Average { impl Average { fn new() -> Average { - Average { total: None, count: 1 } + Average { + total: None, + count: 0, + } } fn average(&mut self, value: Tagged) -> Result<(), ShellError> { match value.item() { Value::Primitive(Primitive::Nothing) => Ok(()), - Value::Primitive(Primitive::Int(i)) => { - match &self.total { - Some(Tagged { - item: Value::Primitive(Primitive::Int(j)), - tag, - }) => { - self.total = Some(Value::int(i + j).tagged(tag)); - self.count = self.count + 1; - Ok(()) - } - None => { - self.total = Some(value.clone()); - Ok(()) - } - _ => Err(ShellError::string(format!( - "Could not calculate average of non-integer or unrelated types" - ))), + Value::Primitive(Primitive::Int(i)) => match &self.total { + Some(Tagged { + item: Value::Primitive(Primitive::Int(j)), + tag, + }) => { + self.total = Some(Value::int(i + j).tagged(tag)); + self.count += 1; + Ok(()) } - } - Value::Primitive(Primitive::Bytes(b)) => { - match self.total { - Some(Tagged { - item: Value::Primitive(Primitive::Bytes(j)), - tag, - }) => { - self.total = Some(Value::int(b + j).tagged(tag)); - self.count = self.count + 1; - Ok(()) - } - None => { - self.total = Some(value); - Ok(()) - } - _ => Err(ShellError::string(format!( - "Could not calculate average of non-integer or unrelated types" - ))), + None => { + self.total = Some(value.clone()); + self.count += 1; + Ok(()) } - } - x => Err(ShellError::string(format!( - "Unrecognized type in stream: {:?}", - x - ))), + _ => Err(ShellError::labeled_error( + "Could calculate average of non-integer or unrelated types", + "source", + value.tag, + )), + }, + Value::Primitive(Primitive::Bytes(b)) => match &self.total { + Some(Tagged { + item: Value::Primitive(Primitive::Bytes(j)), + tag, + }) => { + self.total = Some(Value::bytes(b + j).tagged(tag)); + self.count += 1; + Ok(()) + } + None => { + self.total = Some(value); + self.count += 1; + Ok(()) + } + _ => Err(ShellError::labeled_error( + "Could calculate average of non-integer or unrelated types", + "source", + value.tag, + )), + }, + x => Err(ShellError::labeled_error( + format!("Unrecognized type in stream: {:?}", x), + "source", + value.tag, + )), } - } } @@ -83,19 +88,27 @@ impl Plugin for Average { fn end_filter(&mut self) -> Result, ShellError> { match self.total { None => Ok(vec![]), - Some(ref v) => { - match v.item() { + Some(ref inner) => { + match inner.item() { Value::Primitive(Primitive::Int(i)) => { - let total: u64 = i.tagged(v.tag).coerce_into("converting for average")?; + let total: u64 = i + .tagged(inner.tag.clone()) + .coerce_into("converting for average")?; let avg = total as f64 / self.count as f64; - let decimal_value: Value= Primitive::from(avg).into(); - let tagged_value = decimal_value.tagged(v.tag); + let primitive_value: Value = Primitive::from(avg).into(); + let tagged_value = primitive_value.tagged(inner.tag.clone()); Ok(vec![ReturnSuccess::value(tagged_value)]) } - _ => unreachable!() - + Value::Primitive(Primitive::Bytes(bytes)) => { + // let total: u64 = b.tagged(inner.tag.clone()).coerce_into("converting for average")?; + let avg = *bytes as f64 / self.count as f64; + let primitive_value: Value = Primitive::from(avg).into(); + let tagged_value = primitive_value.tagged(inner.tag.clone()); + Ok(vec![ReturnSuccess::value(tagged_value)]) + } + _ => Ok(vec![]), } - }, + } } } } @@ -103,4 +116,3 @@ impl Plugin for Average { fn main() { serve_plugin(&mut Average::new()); } - From 43fbf4345d626bc1d0d6fbeb0c03c8ffc9c538fa Mon Sep 17 00:00:00 2001 From: "notryanb@gmail.com" Date: Mon, 14 Oct 2019 17:55:42 -0400 Subject: [PATCH 074/184] remove comment and add test for averaging integers --- src/plugins/average.rs | 37 +++++++++++++++++-------------------- tests/filters_test.rs | 15 +++++++++++++++ 2 files changed, 32 insertions(+), 20 deletions(-) diff --git a/src/plugins/average.rs b/src/plugins/average.rs index 8f82a23d10..f78078450a 100644 --- a/src/plugins/average.rs +++ b/src/plugins/average.rs @@ -88,27 +88,24 @@ impl Plugin for Average { fn end_filter(&mut self) -> Result, ShellError> { match self.total { None => Ok(vec![]), - Some(ref inner) => { - match inner.item() { - Value::Primitive(Primitive::Int(i)) => { - let total: u64 = i - .tagged(inner.tag.clone()) - .coerce_into("converting for average")?; - let avg = total as f64 / self.count as f64; - let primitive_value: Value = Primitive::from(avg).into(); - let tagged_value = primitive_value.tagged(inner.tag.clone()); - Ok(vec![ReturnSuccess::value(tagged_value)]) - } - Value::Primitive(Primitive::Bytes(bytes)) => { - // let total: u64 = b.tagged(inner.tag.clone()).coerce_into("converting for average")?; - let avg = *bytes as f64 / self.count as f64; - let primitive_value: Value = Primitive::from(avg).into(); - let tagged_value = primitive_value.tagged(inner.tag.clone()); - Ok(vec![ReturnSuccess::value(tagged_value)]) - } - _ => Ok(vec![]), + Some(ref inner) => match inner.item() { + Value::Primitive(Primitive::Int(i)) => { + let total: u64 = i + .tagged(inner.tag.clone()) + .coerce_into("converting for average")?; + let avg = total as f64 / self.count as f64; + let primitive_value: Value = Primitive::from(avg).into(); + let tagged_value = primitive_value.tagged(inner.tag.clone()); + Ok(vec![ReturnSuccess::value(tagged_value)]) } - } + Value::Primitive(Primitive::Bytes(bytes)) => { + let avg = *bytes as f64 / self.count as f64; + let primitive_value: Value = Primitive::from(avg).into(); + let tagged_value = primitive_value.tagged(inner.tag.clone()); + Ok(vec![ReturnSuccess::value(tagged_value)]) + } + _ => Ok(vec![]), + }, } } } diff --git a/tests/filters_test.rs b/tests/filters_test.rs index f0d5dead61..7696b2f80b 100644 --- a/tests/filters_test.rs +++ b/tests/filters_test.rs @@ -579,6 +579,21 @@ fn can_sum() { assert_eq!(actual, "203") } +#[test] +fn can_average() { + let actual = nu!( + cwd: "tests/fixtures/formats", h::pipeline( + r#" + open sgml_description.json + | get glossary.GlossDiv.GlossList.GlossEntry.Sections + | average + | echo $it + "# + )); + + assert_eq!(actual, "101.5000000000000") +} + #[test] fn can_filter_by_unit_size_comparison() { let actual = nu!( From f9fbb0eb3c3a4482afd5eaaebb90c6a743fa6aaf Mon Sep 17 00:00:00 2001 From: "notryanb@gmail.com" Date: Wed, 16 Oct 2019 20:40:19 -0400 Subject: [PATCH 075/184] add docs for average and give more specific examples for sum --- docs/commands/average.md | 42 ++++++++++++++++++++++++++++++++++++++++ docs/commands/sum.md | 42 ++++++++++++++++++++++++---------------- 2 files changed, 67 insertions(+), 17 deletions(-) create mode 100644 docs/commands/average.md diff --git a/docs/commands/average.md b/docs/commands/average.md new file mode 100644 index 0000000000..701ad6091b --- /dev/null +++ b/docs/commands/average.md @@ -0,0 +1,42 @@ +# average This command allows you to calculate the average of values in a column. ## Examples +To get the average of the file sizes in a directory, simply pipe the size column from the ls command to the sum command. + +```shell +> ls | get size | average +━━━━━━━━━ + +━━━━━━━━━ +2282.727272727273 +━━━━━━━━━ +``` + +```shell +> pwd | split-row / | size | get chars | average +━━━━━━━━━ + +━━━━━━━━━ +5.250000000000000 +━━━━━━━━━ +``` + +Note that average only works for integer and byte values at the moment, and if the shell doesn't recognize the values in a column as one of those types, it will return an error. +One way to solve this is to convert each row to an integer and then pipe the result to `average` + +```shell +> open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | average +error: Unrecognized type in stream: Primitive(String("2509000000")) +- shell:1:0 +1 | open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | average + | ^^^^ source +``` + +```shell +> open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | str --to-int | average +━━━━━━━━━━━━━━━━━━━ + +─────────────────── + 3239404444.000000 +━━━━━━━━━━━━━━━━━━━ +``` + + diff --git a/docs/commands/sum.md b/docs/commands/sum.md index f5c59848dd..f20dcb5f37 100644 --- a/docs/commands/sum.md +++ b/docs/commands/sum.md @@ -1,9 +1,4 @@ -# sum - -This command allows you to calculate the sum of values in a column. - -## Examples - +# sum This command allows you to calculate the sum of values in a column. ## Examples To get the sum of the file sizes in a directory, simply pipe the size column from the ls command to the sum command. ```shell @@ -15,21 +10,34 @@ To get the sum of the file sizes in a directory, simply pipe the size column fro ━━━━━━━━━ ``` +To get the sum of the characters in your present working directory. +```shell +> pwd | split-row / | size | get chars | sum +━━━━━━━━━ + +━━━━━━━━━ +21 +━━━━━━━━━ +``` + + + Note that sum only works for integer and byte values at the moment, and if the shell doesn't recognize the values in a column as one of those types, it will return an error. +One way to solve this is to convert each row to an integer and then pipe the result to `sum` ```shell -> open example.csv -━━━┯━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━ - # │ fruit │ amount │ quality -───┼─────────┼────────┼────────── - 0 │ apples │ 1 │ fresh - 1 │ bananas │ 2 │ old - 2 │ oranges │ 7 │ fresh - 3 │ kiwis │ 25 │ rotten -━━━┷━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━ +> open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | average +error: Unrecognized type in stream: Primitive(String("2509000000")) +- shell:1:0 +1 | open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | sum + | ^^^^ source ``` ```shell -> open example.csv | get amount | sum -error: Unrecognized type in stream: Primitive(String("1")) +> open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | str --to-int | sum +━━━━━━━━━━━━━ + +───────────── + 29154639996 +━━━━━━━━━━━━━ ``` From 2f5eeab56745ba96662f05aade730e45bf74d796 Mon Sep 17 00:00:00 2001 From: "notryanb@gmail.com" Date: Wed, 16 Oct 2019 20:45:23 -0400 Subject: [PATCH 076/184] fix typos and incorrect commands --- docs/commands/average.md | 11 +++++++---- docs/commands/sum.md | 15 ++++++++------- 2 files changed, 15 insertions(+), 11 deletions(-) diff --git a/docs/commands/average.md b/docs/commands/average.md index 701ad6091b..d4095e518f 100644 --- a/docs/commands/average.md +++ b/docs/commands/average.md @@ -1,5 +1,8 @@ -# average This command allows you to calculate the average of values in a column. ## Examples -To get the average of the file sizes in a directory, simply pipe the size column from the ls command to the sum command. +# average +This command allows you to calculate the average of values in a column. + +## Examples +To get the average of the file sizes in a directory, simply pipe the size column from the ls command to the average command. ```shell > ls | get size | average @@ -19,8 +22,8 @@ To get the average of the file sizes in a directory, simply pipe the size column ━━━━━━━━━ ``` -Note that average only works for integer and byte values at the moment, and if the shell doesn't recognize the values in a column as one of those types, it will return an error. -One way to solve this is to convert each row to an integer and then pipe the result to `average` +Note that average only works for integer and byte values. If the shell doesn't recognize the values in a column as one of those types, it will return an error. +One way to solve this is to convert each row to an integer when possible and then pipe the result to `average` ```shell > open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | average diff --git a/docs/commands/sum.md b/docs/commands/sum.md index f20dcb5f37..7482ca0c54 100644 --- a/docs/commands/sum.md +++ b/docs/commands/sum.md @@ -1,4 +1,7 @@ -# sum This command allows you to calculate the sum of values in a column. ## Examples +# sum +This command allows you to calculate the sum of values in a column. + +## Examples To get the sum of the file sizes in a directory, simply pipe the size column from the ls command to the sum command. ```shell @@ -10,7 +13,7 @@ To get the sum of the file sizes in a directory, simply pipe the size column fro ━━━━━━━━━ ``` -To get the sum of the characters in your present working directory. +To get the sum of the characters that make up your present working directory. ```shell > pwd | split-row / | size | get chars | sum ━━━━━━━━━ @@ -20,13 +23,11 @@ To get the sum of the characters in your present working directory. ━━━━━━━━━ ``` - - -Note that sum only works for integer and byte values at the moment, and if the shell doesn't recognize the values in a column as one of those types, it will return an error. -One way to solve this is to convert each row to an integer and then pipe the result to `sum` +Note that sum only works for integer and byte values. If the shell doesn't recognize the values in a column as one of those types, it will return an error. +One way to solve this is to convert each row to an integer when possible and then pipe the result to `sum` ```shell -> open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | average +> open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | sum error: Unrecognized type in stream: Primitive(String("2509000000")) - shell:1:0 1 | open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | sum From 4f91d2512a8952be06738d9797a084dc32c75734 Mon Sep 17 00:00:00 2001 From: "notryanb@gmail.com" Date: Wed, 16 Oct 2019 20:45:37 -0400 Subject: [PATCH 077/184] add a test to calculate average of bytes --- tests/filters_test.rs | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/tests/filters_test.rs b/tests/filters_test.rs index 7696b2f80b..0754f76d20 100644 --- a/tests/filters_test.rs +++ b/tests/filters_test.rs @@ -580,7 +580,7 @@ fn can_sum() { } #[test] -fn can_average() { +fn can_average_numbers() { let actual = nu!( cwd: "tests/fixtures/formats", h::pipeline( r#" @@ -594,6 +594,16 @@ fn can_average() { assert_eq!(actual, "101.5000000000000") } +#[test] +fn can_average_bytes() { + let actual = nu!( + cwd: "tests/fixtures/formats", + "ls | get size | average | echo $it" + ); + + assert_eq!(actual, "2282.727272727273"); +} + #[test] fn can_filter_by_unit_size_comparison() { let actual = nu!( From 9eda573a434bc4d9fe41d04815c9662256eba1d0 Mon Sep 17 00:00:00 2001 From: "notryanb@gmail.com" Date: Fri, 18 Oct 2019 20:43:07 -0400 Subject: [PATCH 078/184] filter out the files that have the same size on multiple operating systems --- tests/filters_test.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/filters_test.rs b/tests/filters_test.rs index 0754f76d20..1eb55448b7 100644 --- a/tests/filters_test.rs +++ b/tests/filters_test.rs @@ -598,10 +598,10 @@ fn can_average_numbers() { fn can_average_bytes() { let actual = nu!( cwd: "tests/fixtures/formats", - "ls | get size | average | echo $it" + "ls | sort-by name | skip 1 | first 2 | get size | average | echo $it" ); - assert_eq!(actual, "2282.727272727273"); + assert_eq!(actual, "1600.000000000000"); } #[test] From 74dddc880d451b879dd43beae087c7ffa643a377 Mon Sep 17 00:00:00 2001 From: jdvr Date: Sat, 19 Oct 2019 12:25:48 +0200 Subject: [PATCH 079/184] "#194 Added trash switch checked before normal rm command action" --- src/shell/filesystem_shell.rs | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/src/shell/filesystem_shell.rs b/src/shell/filesystem_shell.rs index 3cf9afab21..a7d6a42248 100644 --- a/src/shell/filesystem_shell.rs +++ b/src/shell/filesystem_shell.rs @@ -944,14 +944,12 @@ impl Shell for FilesystemShell { )); } - if path.is_dir() { + if trash.item { + SendToTrash::remove(path).unwrap(); + } else if path.is_dir() { std::fs::remove_dir_all(&path)?; } else if path.is_file() { - if trash.item { - SendToTrash::remove(path).unwrap(); - } else { - std::fs::remove_file(&path)?; - } + std::fs::remove_file(&path)?; } } Err(e) => { From c209d0d487dd72882af52ffa9850b5dbccb63978 Mon Sep 17 00:00:00 2001 From: jdvr Date: Sat, 19 Oct 2019 22:52:39 +0200 Subject: [PATCH 080/184] 194 Fixed file format --- src/shell/filesystem_shell.rs | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/shell/filesystem_shell.rs b/src/shell/filesystem_shell.rs index a7d6a42248..7b8310141c 100644 --- a/src/shell/filesystem_shell.rs +++ b/src/shell/filesystem_shell.rs @@ -8,11 +8,11 @@ use crate::prelude::*; use crate::shell::completer::NuCompleter; use crate::shell::shell::Shell; use crate::utils::FileStructure; -use trash as SendToTrash; use rustyline::completion::FilenameCompleter; use rustyline::hint::{Hinter, HistoryHinter}; use std::path::{Path, PathBuf}; use std::sync::atomic::Ordering; +use trash as SendToTrash; pub struct FilesystemShell { pub(crate) path: String, @@ -861,7 +861,11 @@ impl Shell for FilesystemShell { fn rm( &self, - RemoveArgs { target, recursive, trash }: RemoveArgs, + RemoveArgs { + target, + recursive, + trash, + }: RemoveArgs, name: Tag, path: &str, ) -> Result { From f24bc5c826a7fe67ed0e514a43eb92103e0d6e29 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Sun, 20 Oct 2019 06:55:56 -0500 Subject: [PATCH 081/184] Improvements to Value mutable operations. --- src/data/base.rs | 336 ++++++++++++++++++++++++++++------------------- src/data/dict.rs | 11 ++ 2 files changed, 210 insertions(+), 137 deletions(-) diff --git a/src/data/base.rs b/src/data/base.rs index c95ee26e86..2cf1f2cedb 100644 --- a/src/data/base.rs +++ b/src/data/base.rs @@ -497,6 +497,28 @@ impl Value { } } + pub(crate) fn get_mut_data_by_key(&mut self, name: &str) -> Option<&mut Tagged> { + match self { + Value::Row(ref mut o) => o.get_mut_data_by_key(name), + Value::Table(ref mut l) => { + for item in l { + match item { + Tagged { + item: Value::Row(ref mut o), + .. + } => match o.get_mut_data_by_key(name) { + Some(v) => return Some(v), + None => {} + }, + _ => {} + } + } + None + } + _ => None, + } + } + pub fn get_data_by_column_path( &self, tag: Tag, @@ -513,18 +535,6 @@ impl Value { Some(current.tagged(tag)) } - pub fn get_data_by_path(&self, tag: Tag, path: &str) -> Option> { - let mut current = self; - for p in path.split(".") { - match current.get_data_by_key(p) { - Some(v) => current = v, - None => return None, - } - } - - Some(current.tagged(tag)) - } - pub fn insert_data_at_path( &self, tag: Tag, @@ -629,41 +639,6 @@ impl Value { None } - pub fn replace_data_at_path( - &self, - tag: Tag, - path: &str, - replaced_value: Value, - ) -> Option> { - let mut new_obj = self.clone(); - - let split_path: Vec<_> = path.split(".").collect(); - - if let Value::Row(ref mut o) = new_obj { - let mut current = o; - for idx in 0..split_path.len() { - match current.entries.get_mut(split_path[idx]) { - Some(next) => { - if idx == (split_path.len() - 1) { - *next = replaced_value.tagged(&tag); - return Some(new_obj.tagged(&tag)); - } else { - match next.item { - Value::Row(ref mut o) => { - current = o; - } - _ => return None, - } - } - } - _ => return None, - } - } - } - - None - } - pub fn replace_data_at_column_path( &self, tag: Tag, @@ -671,34 +646,20 @@ impl Value { replaced_value: Value, ) -> Option> { let mut new_obj = self.clone(); + let mut current = &mut new_obj; - if let Value::Row(ref mut o) = new_obj { - let mut current = o; - for idx in 0..split_path.len() { - match current.entries.get_mut(&split_path[idx].item) { - Some(next) => { - if idx == (split_path.len() - 1) { - *next = replaced_value.tagged(&tag); - return Some(new_obj.tagged(&tag)); - } else { - match next.item { - Value::Row(ref mut o) => { - current = o; - } - Value::Table(ref mut l) => match l.get_mut(0) { - Some(Tagged { - item: Value::Row(ref mut dict), - .. - }) => { - current = dict; - } - _ => return None, - }, - _ => return None, - } - } + for idx in 0..split_path.len() { + match current.get_mut_data_by_key(&split_path[idx].item) { + Some(next) => { + if idx == (split_path.len() - 1) { + *next = replaced_value.tagged(&tag); + return Some(new_obj.tagged(&tag)); + } else { + current = &mut next.item; } - _ => return None, + } + None => { + return None; } } } @@ -785,13 +746,7 @@ impl Value { } pub fn table(list: &Vec>) -> Value { - let mut out = vec![]; - - for v in list { - out.push(v.clone()); - } - - Value::Table(out) + Value::Table(list.to_vec()) } pub fn string(s: impl Into) -> Value { @@ -972,86 +927,193 @@ mod tests { } fn column_path(paths: &Vec>) -> Tagged>> { - let paths = paths - .iter() - .map(|p| string(p.as_string().unwrap())) - .collect(); - let table = table(&paths); - table.as_column_path().unwrap() + table( + &paths + .iter() + .map(|p| string(p.as_string().unwrap())) + .collect(), + ) + .as_column_path() + .unwrap() } + #[test] - fn gets_the_matching_field_from_a_row() { - let field = "amigos"; + fn gets_matching_field_from_a_row() { + let row = Value::row(indexmap! { + "amigos".into() => table(&vec![string("andres"),string("jonathan"),string("yehuda")]) + }); + + assert_eq!( + *row.get_data_by_key("amigos").unwrap(), + table(&vec![ + string("andres"), + string("jonathan"), + string("yehuda") + ]) + ); + } + + #[test] + fn gets_matching_field_from_nested_rows_inside_a_row() { + let field_path = column_path(&vec![string("package"), string("version")]); + + let (version, tag) = string("0.4.0").into_parts(); let row = Value::row(indexmap! { - field.into() => table(&vec![ + "package".into() => + row(indexmap! { + "name".into() => string("nu"), + "version".into() => string("0.4.0") + }) + }); + + assert_eq!( + **row.get_data_by_column_path(tag, &field_path).unwrap(), + version + ) + } + + #[test] + fn gets_first_matching_field_from_rows_with_same_field_inside_a_table() { + let field_path = column_path(&vec![string("package"), string("authors"), string("name")]); + + let (name, tag) = string("Andrés N. Robalino").into_parts(); + + let row = Value::row(indexmap! { + "package".into() => row(indexmap! { + "name".into() => string("nu"), + "version".into() => string("0.4.0"), + "authors".into() => table(&vec![ + row(indexmap!{"name".into() => string("Andrés N. Robalino")}), + row(indexmap!{"name".into() => string("Jonathan Turner")}), + row(indexmap!{"name".into() => string("Yehuda Katz")}) + ]) + }) + }); + + assert_eq!( + **row.get_data_by_column_path(tag, &field_path).unwrap(), + name + ) + } + + #[test] + fn replaces_matching_field_from_a_row() { + let field_path = column_path(&vec![string("amigos")]); + + let sample = Value::row(indexmap! { + "amigos".into() => table(&vec![ string("andres"), string("jonathan"), string("yehuda"), ]), }); - assert_eq!( - table(&vec![ - string("andres"), - string("jonathan"), - string("yehuda") - ]), - *row.get_data_by_key(field).unwrap() - ); + let (replacement, tag) = string("jonas").into_parts(); + + let actual = sample + .replace_data_at_column_path(tag, &field_path, replacement) + .unwrap(); + + assert_eq!(actual, row(indexmap! {"amigos".into() => string("jonas")})); } #[test] - fn gets_the_first_row_with_matching_field_from_rows_inside_a_table() { - let field = "name"; - - let table = Value::table(&vec![ - row(indexmap! {field.into() => string("andres")}), - row(indexmap! {field.into() => string("jonathan")}), - row(indexmap! {field.into() => string("yehuda")}), + fn replaces_matching_field_from_nested_rows_inside_a_row() { + let field_path = column_path(&vec![ + string("package"), + string("authors"), + string("los.3.caballeros"), ]); - assert_eq!(string("andres"), *table.get_data_by_key(field).unwrap()); - } - - #[test] - fn gets_the_matching_field_from_nested_rows_inside_a_row() { - let _field = "package.version"; - let field = vec![string("package"), string("version")]; - let field = column_path(&field); - - let (version, tag) = string("0.4.0").into_parts(); - - let row = Value::row(indexmap! { - "package".into() => row(indexmap!{ - "name".into() => string("nu"), - "version".into() => string("0.4.0"), + let sample = Value::row(indexmap! { + "package".into() => row(indexmap! { + "authors".into() => row(indexmap! { + "los.3.mosqueteros".into() => table(&vec![string("andres::yehuda::jonathan")]), + "los.3.amigos".into() => table(&vec![string("andres::yehuda::jonathan")]), + "los.3.caballeros".into() => table(&vec![string("andres::yehuda::jonathan")]) + }) }) }); - assert_eq!(version, **row.get_data_by_column_path(tag, &field).unwrap()) + let (replacement, tag) = table(&vec![string("yehuda::jonathan::andres")]).into_parts(); + + let actual = sample + .replace_data_at_column_path(tag.clone(), &field_path, replacement.clone()) + .unwrap(); + + assert_eq!( + actual, + Value::row(indexmap! { + "package".into() => row(indexmap! { + "authors".into() => row(indexmap! { + "los.3.mosqueteros".into() => table(&vec![string("andres::yehuda::jonathan")]), + "los.3.amigos".into() => table(&vec![string("andres::yehuda::jonathan")]), + "los.3.caballeros".into() => replacement.tagged(&tag)})})}) + .tagged(tag) + ); } - #[test] - fn gets_the_first_row_with_matching_field_from_nested_rows_inside_a_table() { - let _field = "package.authors.name"; - let field = vec![string("package"), string("authors"), string("name")]; - let field = column_path(&field); + fn replaces_matching_field_from_rows_inside_a_table() { + let field_path = column_path(&vec![ + string("shell_policy"), + string("releases"), + string("nu.version.arepa"), + ]); - let (name, tag) = string("Andrés N. Robalino").into_parts(); - - let row = Value::row(indexmap! { - "package".into() => row(indexmap!{ - "authors".into() => table(&vec![ - row(indexmap!{"name".into()=> string("Andrés N. Robalino")}), - row(indexmap!{"name".into()=> string("Jonathan Turner")}), - row(indexmap!{"name".into() => string("Yehuda Katz")}) - ]), - "name".into() => string("nu"), - "version".into() => string("0.4.0"), + let sample = Value::row(indexmap! { + "shell_policy".into() => row(indexmap! { + "releases".into() => table(&vec![ + row(indexmap! { + "nu.version.arepa".into() => row(indexmap! { + "code".into() => string("0.4.0"), "tag_line".into() => string("GitHub-era") + }) + }), + row(indexmap! { + "nu.version.taco".into() => row(indexmap! { + "code".into() => string("0.3.0"), "tag_line".into() => string("GitHub-era") + }) + }), + row(indexmap! { + "nu.version.stable".into() => row(indexmap! { + "code".into() => string("0.2.0"), "tag_line".into() => string("GitHub-era") + }) + }) + ]) }) }); - assert_eq!(name, **row.get_data_by_column_path(tag, &field).unwrap()) + let (replacement, tag) = row(indexmap! { + "code".into() => string("0.5.0"), + "tag_line".into() => string("CABALLEROS") + }) + .into_parts(); + + let actual = sample + .replace_data_at_column_path(tag.clone(), &field_path, replacement.clone()) + .unwrap(); + + assert_eq!( + actual, + Value::row(indexmap! { + "shell_policy".into() => row(indexmap! { + "releases".into() => table(&vec![ + row(indexmap! { + "nu.version.arepa".into() => replacement.tagged(&tag) + }), + row(indexmap! { + "nu.version.taco".into() => row(indexmap! { + "code".into() => string("0.3.0"), "tag_line".into() => string("GitHub-era") + }) + }), + row(indexmap! { + "nu.version.stable".into() => row(indexmap! { + "code".into() => string("0.2.0"), "tag_line".into() => string("GitHub-era") + }) + }) + ]) + }) + }).tagged(&tag) + ); } } diff --git a/src/data/dict.rs b/src/data/dict.rs index 8f9bb556ba..432170f361 100644 --- a/src/data/dict.rs +++ b/src/data/dict.rs @@ -89,6 +89,17 @@ impl Dictionary { } } + pub(crate) fn get_mut_data_by_key(&mut self, name: &str) -> Option<&mut Tagged> { + match self + .entries + .iter_mut() + .find(|(desc_name, _)| *desc_name == name) + { + Some((_, v)) => Some(v), + None => None, + } + } + pub(crate) fn debug(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut debug = f.debug_struct("Dictionary"); From 0611f56776e68254712185a8d25697e8a31c2aa3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Sun, 20 Oct 2019 18:42:07 -0500 Subject: [PATCH 082/184] Can group cells by given column name. --- README.md | 2 ++ src/cli.rs | 1 + src/commands.rs | 2 ++ src/commands/count.rs | 2 +- src/commands/group_by.rs | 59 ++++++++++++++++++++++++++++++++++++++++ tests/commands_test.rs | 28 +++++++++++++++++++ 6 files changed, 93 insertions(+), 1 deletion(-) create mode 100644 src/commands/group_by.rs diff --git a/README.md b/README.md index cf36fd2cb1..c391b59903 100644 --- a/README.md +++ b/README.md @@ -249,10 +249,12 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat | command | description | | ------------- | ------------- | | add column-or-column-path value | Add a new column to the table | +| count | Show the total number of cells | | edit column-or-column-path value | Edit an existing column to have a new value | | embed column | Creates a new table of one column with the given name, and places the current table inside of it | | first amount | Show only the first number of rows | | get column-or-column-path | Open column and get data from the corresponding cells | +| group-by column | Creates a new table with the data from the table rows grouped by the column given | | inc (column-or-column-path) | Increment a value or version. Optionally use the column of a table | | last amount | Show only the last number of rows | | nth row-number | Return only the selected row | diff --git a/src/cli.rs b/src/cli.rs index ad3eb8d39b..e88ee054fe 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -275,6 +275,7 @@ pub async fn cli() -> Result<(), Box> { whole_stream_command(ToURL), whole_stream_command(ToYAML), whole_stream_command(SortBy), + whole_stream_command(GroupBy), whole_stream_command(Tags), whole_stream_command(Count), whole_stream_command(First), diff --git a/src/commands.rs b/src/commands.rs index 0b155891cc..7f0fa0a25a 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -30,6 +30,7 @@ pub(crate) mod from_url; pub(crate) mod from_xml; pub(crate) mod from_yaml; pub(crate) mod get; +pub(crate) mod group_by; pub(crate) mod help; pub(crate) mod last; pub(crate) mod lines; @@ -103,6 +104,7 @@ pub(crate) use from_xml::FromXML; pub(crate) use from_yaml::FromYAML; pub(crate) use from_yaml::FromYML; pub(crate) use get::Get; +pub(crate) use group_by::GroupBy; pub(crate) use help::Help; pub(crate) use last::Last; pub(crate) use lines::Lines; diff --git a/src/commands/count.rs b/src/commands/count.rs index 5e44283737..6fe5a94633 100644 --- a/src/commands/count.rs +++ b/src/commands/count.rs @@ -20,7 +20,7 @@ impl WholeStreamCommand for Count { } fn usage(&self) -> &str { - "Show the total number of rows." + "Show the total number of cells." } fn run( diff --git a/src/commands/group_by.rs b/src/commands/group_by.rs new file mode 100644 index 0000000000..e08ebb2afb --- /dev/null +++ b/src/commands/group_by.rs @@ -0,0 +1,59 @@ +use crate::commands::WholeStreamCommand; +use crate::data::TaggedDictBuilder; +use crate::errors::ShellError; +use crate::prelude::*; + +pub struct GroupBy; + +#[derive(Deserialize)] +pub struct GroupByArgs { + column_name: Tagged, +} + +impl WholeStreamCommand for GroupBy { + fn name(&self) -> &str { + "group-by" + } + + fn signature(&self) -> Signature { + Signature::build("group-by").required("column_name", SyntaxShape::String) + } + + fn usage(&self) -> &str { + "Creates a new table with the data from the table rows grouped by the column given." + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, group_by)?.run() + } +} + +fn group_by( + GroupByArgs { column_name }: GroupByArgs, + RunnableContext { input, name, .. }: RunnableContext, +) -> Result { + let stream = async_stream! { + let values: Vec> = input.values.collect().await; + let mut groups = indexmap::IndexMap::new(); + + for row in values { + let key = row.get_data_by_key(&column_name.item).unwrap().as_string()?; + let mut group = groups.entry(key).or_insert(vec![]); + group.push(row); + } + + let mut out = TaggedDictBuilder::new(name.clone()); + + for (k,v) in groups.iter() { + out.insert(k, Value::table(v)); + } + + yield ReturnSuccess::value(out) + }; + + Ok(stream.to_output_stream()) +} diff --git a/tests/commands_test.rs b/tests/commands_test.rs index 4d6fa84a65..7733942811 100644 --- a/tests/commands_test.rs +++ b/tests/commands_test.rs @@ -3,6 +3,34 @@ mod helpers; use helpers as h; use helpers::{Playground, Stub::*}; +#[test] +fn group_by() { + Playground::setup("group_by_test_1", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContentToBeTrimmed( + "los_tres_caballeros.csv", + r#" + first_name,last_name,rusty_luck,type + Andrés,Robalino,1,A + Jonathan,Turner,1,B + Yehuda,Katz,1,A + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open los_tres_caballeros.csv + | group-by type + | get A + | count + | echo $it + "# + )); + + assert_eq!(actual, "2"); + }) +} + #[test] fn first_gets_first_rows_by_amount() { Playground::setup("first_test_1", |dirs, sandbox| { From 39fde52d8e1eb2f1607fda68758b9917c4514da3 Mon Sep 17 00:00:00 2001 From: Charles Schleich Date: Mon, 21 Oct 2019 17:59:20 +0200 Subject: [PATCH 083/184] added Docs for sort-by command --- docs/commands/sort-by.md | 56 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 56 insertions(+) create mode 100644 docs/commands/sort-by.md diff --git a/docs/commands/sort-by.md b/docs/commands/sort-by.md new file mode 100644 index 0000000000..1f0f3da9ed --- /dev/null +++ b/docs/commands/sort-by.md @@ -0,0 +1,56 @@ + +# env + +The `sort-by` command sorts the table being displayed in the terminal by a chosen column(s). + +`sort-by` takes multiple arguments (being the names of columns) sorting by each argument in order. + + +## Examples - + +```shell +/home/example> ls | sort-by size +━━━┯━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━ + # │ name │ type │ readonly │ size │ accessed │ modified +───┼──────┼──────┼──────────┼────────┼────────────────┼──────────────── + 0 │ az │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago + 1 │ a │ File │ │ 18 B │ 4 minutes ago │ 38 minutes ago + 2 │ ad │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago + 3 │ ac │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago + 4 │ ab │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago + 5 │ c │ File │ │ 102 B │ 35 minutes ago │ 35 minutes ago + 6 │ d │ File │ │ 189 B │ 35 minutes ago │ 34 minutes ago + 7 │ b │ File │ │ 349 B │ 35 minutes ago │ 35 minutes ago +━━━┷━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━ +``` + +```shell +/home/example> ls | sort-by size name +━━━┯━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━ + # │ name │ type │ readonly │ size │ accessed │ modified +───┼──────┼──────┼──────────┼────────┼────────────────┼──────────────── + 0 │ a │ File │ │ 18 B │ 4 minutes ago │ 39 minutes ago + 1 │ ab │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago + 2 │ ac │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago + 3 │ ad │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago + 4 │ az │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago + 5 │ c │ File │ │ 102 B │ 36 minutes ago │ 35 minutes ago + 6 │ d │ File │ │ 189 B │ 35 minutes ago │ 35 minutes ago + 7 │ b │ File │ │ 349 B │ 36 minutes ago │ 36 minutes ago +``` + +``` +/home/example> ls | sort-by accessed +━━━┯━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━ + # │ name │ type │ readonly │ size │ accessed │ modified +───┼──────┼──────┼──────────┼────────┼────────────────┼──────────────── + 0 │ b │ File │ │ 349 B │ 37 minutes ago │ 37 minutes ago + 1 │ c │ File │ │ 102 B │ 37 minutes ago │ 37 minutes ago + 2 │ d │ File │ │ 189 B │ 37 minutes ago │ 36 minutes ago + 3 │ a │ File │ │ 18 B │ 6 minutes ago │ 40 minutes ago + 4 │ ab │ File │ │ 18 B │ 6 minutes ago │ 6 minutes ago + 5 │ ac │ File │ │ 18 B │ 6 minutes ago │ 6 minutes ago + 6 │ ad │ File │ │ 18 B │ 5 minutes ago │ 5 minutes ago + 7 │ az │ File │ │ 18 B │ 5 minutes ago │ 5 minutes ago +━━━┷━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━ +``` \ No newline at end of file From 4329629ee9222e1cd3ff5c108628f89e686b21d6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Tue, 22 Oct 2019 03:43:39 -0500 Subject: [PATCH 084/184] baseline coverage for xml parsing. --- src/commands/from_xml.rs | 70 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 70 insertions(+) diff --git a/src/commands/from_xml.rs b/src/commands/from_xml.rs index 0425eb408b..e99e5664e5 100644 --- a/src/commands/from_xml.rs +++ b/src/commands/from_xml.rs @@ -134,3 +134,73 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result) -> Tagged { + Value::string(input.into()).tagged_unknown() + } + + fn row(entries: IndexMap>) -> Tagged { + Value::row(entries).tagged_unknown() + } + + fn table(list: &Vec>) -> Tagged { + Value::table(list).tagged_unknown() + } + + fn parse(xml: &str) -> Tagged { + from_xml::from_xml_string_to_value(xml.to_string(), Tag::unknown()).unwrap() + } + + #[test] + fn parses_empty_element() { + let source = ""; + + assert_eq!( + parse(source), + row(indexmap! { + "nu".into() => table(&vec![]) + }) + ); + } + + #[test] + fn parses_element_with_text() { + let source = "La era de los tres caballeros"; + + assert_eq!( + parse(source), + row(indexmap! { + "nu".into() => table(&vec![string("La era de los tres caballeros")]) + }) + ); + } + + #[test] + fn parses_element_with_elements() { + let source = "\ + + Andrés + Jonathan + Yehuda +"; + + assert_eq!( + parse(source), + row(indexmap! { + "nu".into() => table(&vec![ + row(indexmap! {"dev".into() => table(&vec![string("Andrés")])}), + row(indexmap! {"dev".into() => table(&vec![string("Jonathan")])}), + row(indexmap! {"dev".into() => table(&vec![string("Yehuda")])}) + ]) + }) + ); + } +} From 8f035616a0be57646231cd18deb475965e4778b0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antti=20Ker=C3=A4nen?= Date: Tue, 22 Oct 2019 15:21:34 +0300 Subject: [PATCH 085/184] Fix `enter` crashing on nonexistent file Fixes #839 --- src/commands/enter.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/commands/enter.rs b/src/commands/enter.rs index 4a400241e8..efefd8394f 100644 --- a/src/commands/enter.rs +++ b/src/commands/enter.rs @@ -1,7 +1,6 @@ use crate::commands::command::CommandAction; use crate::commands::PerItemCommand; use crate::commands::UnevaluatedCallInfo; -use crate::data::meta::Span; use crate::errors::ShellError; use crate::parser::registry; use crate::prelude::*; @@ -34,10 +33,12 @@ impl PerItemCommand for Enter { match call_info.args.expect_nth(0)? { Tagged { item: Value::Primitive(Primitive::Path(location)), + tag, .. } => { let location_string = location.display().to_string(); let location_clone = location_string.clone(); + let tag_clone = tag.clone(); if location.starts_with("help") { let spec = location_string.split(":").collect::>(); @@ -71,9 +72,8 @@ impl PerItemCommand for Enter { crate::commands::open::fetch( &full_path, &location_clone, - Span::unknown(), - ) - .await.unwrap(); + tag_clone.span, + ).await?; match contents { Value::Primitive(Primitive::String(_)) => { From a317072e4e40d00632f2a66c9b2b408fc379e31e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Tue, 22 Oct 2019 08:08:24 -0500 Subject: [PATCH 086/184] Cover failure not found files cases. --- tests/command_enter_test.rs | 13 +++++++++++++ tests/command_open_tests.rs | 1 + 2 files changed, 14 insertions(+) diff --git a/tests/command_enter_test.rs b/tests/command_enter_test.rs index fe22b56dbe..fc4a437a23 100644 --- a/tests/command_enter_test.rs +++ b/tests/command_enter_test.rs @@ -73,3 +73,16 @@ fn knows_the_filesystems_entered() { )); }) } + +#[test] +fn errors_if_file_not_found() { + Playground::setup("enter_test_2", |dirs, _| { + let actual = nu_error!( + cwd: dirs.test(), + "enter i_dont_exist.csv" + ); + + assert!(actual.contains("File could not be opened")); + assert!(actual.contains("file not found")); + }) +} diff --git a/tests/command_open_tests.rs b/tests/command_open_tests.rs index 53e393eef4..48f438f3d6 100644 --- a/tests/command_open_tests.rs +++ b/tests/command_open_tests.rs @@ -226,4 +226,5 @@ fn errors_if_file_not_found() { ); assert!(actual.contains("File could not be opened")); + assert!(actual.contains("file not found")); } From 6a7c00eaefc412dd049149b5b2dbe4aff55051ce Mon Sep 17 00:00:00 2001 From: Yehuda Katz Date: Mon, 21 Oct 2019 08:18:43 -0700 Subject: [PATCH 087/184] Finish the job of moving shapes into the stream This commit should finish the `coloring_in_tokens` feature, which moves the shape accumulator into the token stream. This allows rollbacks of the token stream to also roll back any shapes that were added. This commit also adds a much nicer syntax highlighter trace, which shows all of the paths the highlighter took to arrive at a particular coloring output. This change is fairly substantial, but really improves the understandability of the flow. I intend to update the normal parser with a similar tracing view. In general, this change also fleshes out the concept of "atomic" token stream operations. A good next step would be to try to make the parser more error-correcting, using the coloring infrastructure. A follow-up step would involve merging the parser and highlighter shapes themselves. --- Cargo.toml | 4 +- src/fuzzysearch.rs | 4 +- src/main.rs | 3 - src/parser.rs | 1 - src/parser/hir/expand_external_tokens.rs | 10 +- src/parser/hir/syntax_shape.rs | 321 +++++++--------- src/parser/hir/syntax_shape/block.rs | 27 +- src/parser/hir/syntax_shape/expression.rs | 18 +- .../hir/syntax_shape/expression/delimited.rs | 5 + .../hir/syntax_shape/expression/file_path.rs | 6 +- .../hir/syntax_shape/expression/list.rs | 24 +- .../hir/syntax_shape/expression/number.rs | 12 +- .../hir/syntax_shape/expression/pattern.rs | 4 + .../hir/syntax_shape/expression/string.rs | 6 +- .../syntax_shape/expression/variable_path.rs | 37 +- src/parser/hir/tokens_iterator.rs | 311 +++++++++++---- src/parser/hir/tokens_iterator/debug.rs | 359 +++++++++++++++++- src/parser/parse/parser.rs | 9 - src/parser/parse/pipeline.rs | 4 +- src/parser/parse_command.rs | 28 +- src/shell/helper.rs | 45 +-- tests/commands_test.rs | 11 +- 22 files changed, 888 insertions(+), 361 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index ce101b52df..29205d9af5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -84,7 +84,7 @@ heim = {version = "0.0.8", optional = true } battery = {version = "0.7.4", optional = true } rawkey = {version = "0.1.2", optional = true } clipboard = {version = "0.5", optional = true } -ptree = {version = "0.2", optional = true } +ptree = {version = "0.2" } image = { version = "0.22.2", default_features = false, features = ["png_codec", "jpeg"], optional = true } [features] @@ -95,7 +95,7 @@ binaryview = ["image", "crossterm"] sys = ["heim", "battery"] ps = ["heim"] # trace = ["nom-tracable/trace"] -all = ["raw-key", "textview", "binaryview", "sys", "ps", "clipboard", "ptree"] +all = ["raw-key", "textview", "binaryview", "sys", "ps", "clipboard"] [dependencies.rusqlite] version = "0.20.0" diff --git a/src/fuzzysearch.rs b/src/fuzzysearch.rs index 5cb08dd3f5..c7d58ed632 100644 --- a/src/fuzzysearch.rs +++ b/src/fuzzysearch.rs @@ -73,9 +73,7 @@ pub fn interactive_fuzzy_search(lines: &Vec<&str>, max_results: usize) -> Select searchinput.pop(); selected = 0; } - _ => { - // println!("OTHER InputEvent: {:?}", k); - } + _ => {} }, _ => {} } diff --git a/src/main.rs b/src/main.rs index 4b10944a2b..7f82808e74 100644 --- a/src/main.rs +++ b/src/main.rs @@ -3,9 +3,6 @@ use log::LevelFilter; use std::error::Error; fn main() -> Result<(), Box> { - #[cfg(feature1)] - println!("feature1 is enabled"); - let matches = App::new("nushell") .version(clap::crate_version!()) .arg( diff --git a/src/parser.rs b/src/parser.rs index 37c8c09c30..7acdf6e6bf 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -14,7 +14,6 @@ pub(crate) use parse::files::Files; pub(crate) use parse::flag::{Flag, FlagKind}; pub(crate) use parse::operator::Operator; pub(crate) use parse::parser::{nom_input, pipeline}; -pub(crate) use parse::pipeline::{Pipeline, PipelineElement}; pub(crate) use parse::text::Text; pub(crate) use parse::token_tree::{DelimitedNode, Delimiter, TokenNode}; pub(crate) use parse::tokens::{RawNumber, RawToken}; diff --git a/src/parser/hir/expand_external_tokens.rs b/src/parser/hir/expand_external_tokens.rs index e277efe2e8..5733a30c81 100644 --- a/src/parser/hir/expand_external_tokens.rs +++ b/src/parser/hir/expand_external_tokens.rs @@ -61,6 +61,10 @@ impl ColorSyntax for ExternalTokensShape { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "ExternalTokensShape" + } + fn color_syntax<'a, 'b>( &self, _input: &(), @@ -192,6 +196,10 @@ impl ColorSyntax for ExternalExpression { type Info = ExternalExpressionResult; type Input = (); + fn name(&self) -> &'static str { + "ExternalExpression" + } + fn color_syntax<'a, 'b>( &self, _input: &(), @@ -212,7 +220,7 @@ impl ColorSyntax for ExternalExpression { Ok(atom) => atom, }; - atom.color_tokens(token_nodes.mut_shapes()); + token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)); return ExternalExpressionResult::Processed; } } diff --git a/src/parser/hir/syntax_shape.rs b/src/parser/hir/syntax_shape.rs index dc02e9373d..8a21fd79e6 100644 --- a/src/parser/hir/syntax_shape.rs +++ b/src/parser/hir/syntax_shape.rs @@ -11,16 +11,15 @@ use crate::parser::hir::expand_external_tokens::ExternalTokensShape; use crate::parser::hir::syntax_shape::block::AnyBlockShape; use crate::parser::hir::tokens_iterator::Peeked; use crate::parser::parse_command::{parse_command_tail, CommandTailShape}; -use crate::parser::PipelineElement; use crate::parser::{ hir, hir::{debug_tokens, TokensIterator}, - Operator, Pipeline, RawToken, TokenNode, + Operator, RawToken, TokenNode, }; use crate::prelude::*; use derive_new::new; use getset::Getters; -use log::{self, log_enabled, trace}; +use log::{self, trace}; use serde::{Deserialize, Serialize}; use std::path::{Path, PathBuf}; @@ -41,6 +40,11 @@ pub(crate) use self::expression::variable_path::{ pub(crate) use self::expression::{continue_expression, AnyExpressionShape}; pub(crate) use self::flat_shape::FlatShape; +#[cfg(not(coloring_in_tokens))] +use crate::parser::parse::pipeline::Pipeline; +#[cfg(not(coloring_in_tokens))] +use log::log_enabled; + #[derive(Debug, Copy, Clone, Serialize, Deserialize)] pub enum SyntaxShape { Any, @@ -110,6 +114,10 @@ impl FallibleColorSyntax for SyntaxShape { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "SyntaxShape" + } + fn color_syntax<'a, 'b>( &self, _input: &(), @@ -241,6 +249,8 @@ pub trait FallibleColorSyntax: std::fmt::Debug + Copy { type Info; type Input; + fn name(&self) -> &'static str; + fn color_syntax<'a, 'b>( &self, input: &Self::Input, @@ -282,6 +292,8 @@ pub trait ColorSyntax: std::fmt::Debug + Copy { type Info; type Input; + fn name(&self) -> &'static str; + fn color_syntax<'a, 'b>( &self, input: &Self::Input, @@ -290,24 +302,6 @@ pub trait ColorSyntax: std::fmt::Debug + Copy { ) -> Self::Info; } -// impl ColorSyntax for T -// where -// T: FallibleColorSyntax, -// { -// type Info = Result; -// type Input = T::Input; - -// fn color_syntax<'a, 'b>( -// &self, -// input: &Self::Input, -// token_nodes: &'b mut TokensIterator<'a>, -// context: &ExpandContext, -// shapes: &mut Vec>, -// ) -> Result { -// FallibleColorSyntax::color_syntax(self, input, token_nodes, context, shapes) -// } -// } - pub(crate) trait ExpandSyntax: std::fmt::Debug + Copy { type Output: std::fmt::Debug; @@ -323,18 +317,18 @@ pub(crate) fn expand_syntax<'a, 'b, T: ExpandSyntax>( token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, ) -> Result { - trace!(target: "nu::expand_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); + trace!(target: "nu::expand_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes.state(), context.source)); let result = shape.expand_syntax(token_nodes, context); match result { Err(err) => { - trace!(target: "nu::expand_syntax", "error :: {} :: {:?}", err, debug_tokens(token_nodes, context.source)); + trace!(target: "nu::expand_syntax", "error :: {} :: {:?}", err, debug_tokens(token_nodes.state(), context.source)); Err(err) } Ok(result) => { - trace!(target: "nu::expand_syntax", "ok :: {:?} :: {:?}", result, debug_tokens(token_nodes, context.source)); + trace!(target: "nu::expand_syntax", "ok :: {:?} :: {:?}", result, debug_tokens(token_nodes.state(), context.source)); Ok(result) } } @@ -347,12 +341,12 @@ pub fn color_syntax<'a, 'b, T: ColorSyntax, U>( context: &ExpandContext, shapes: &mut Vec>, ) -> ((), U) { - trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); + trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes.state(), context.source)); let len = shapes.len(); let result = shape.color_syntax(&(), token_nodes, context, shapes); - trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source)); + trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes.state(), context.source)); if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { trace!(target: "nu::color_syntax", "after {}", std::any::type_name::()); @@ -375,26 +369,12 @@ pub fn color_syntax<'a, 'b, T: ColorSyntax, U>( token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, ) -> ((), U) { - trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); - - let len = token_nodes.shapes().len(); - let result = shape.color_syntax(&(), token_nodes, context); - - trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source)); - - if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { - trace!(target: "nu::color_syntax", "after {}", std::any::type_name::()); - - if len < token_nodes.shapes().len() { - for i in len..(token_nodes.shapes().len()) { - trace!(target: "nu::color_syntax", "new shape :: {:?}", token_nodes.shapes()[i]); - } - } else { - trace!(target: "nu::color_syntax", "no new shapes"); - } - } - - ((), result) + ( + (), + token_nodes.color_frame(shape.name(), |token_nodes| { + shape.color_syntax(&(), token_nodes, context) + }), + ) } #[cfg(not(coloring_in_tokens))] @@ -404,7 +384,7 @@ pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax>, ) -> Result { - trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); + trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes.state(), context.source)); if token_nodes.at_end() { trace!(target: "nu::color_syntax", "at eof"); @@ -414,7 +394,7 @@ pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax()); @@ -437,31 +417,9 @@ pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax, context: &ExpandContext, ) -> Result { - trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); - - if token_nodes.at_end() { - trace!(target: "nu::color_syntax", "at eof"); - return Err(ShellError::unexpected_eof("coloring", Tag::unknown())); - } - - let len = token_nodes.shapes().len(); - let result = shape.color_syntax(&(), token_nodes, context); - - trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source)); - - if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { - trace!(target: "nu::color_syntax", "after {}", std::any::type_name::()); - - if len < token_nodes.shapes().len() { - for i in len..(token_nodes.shapes().len()) { - trace!(target: "nu::color_syntax", "new shape :: {:?}", token_nodes.shapes()[i]); - } - } else { - trace!(target: "nu::color_syntax", "no new shapes"); - } - } - - result + token_nodes.color_fallible_frame(shape.name(), |token_nodes| { + shape.color_syntax(&(), token_nodes, context) + }) } #[cfg(not(coloring_in_tokens))] @@ -472,12 +430,12 @@ pub fn color_syntax_with<'a, 'b, T: ColorSyntax, U, I>( context: &ExpandContext, shapes: &mut Vec>, ) -> ((), U) { - trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); + trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes.state(), context.source)); let len = shapes.len(); let result = shape.color_syntax(input, token_nodes, context, shapes); - trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source)); + trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes.state(), context.source)); if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { trace!(target: "nu::color_syntax", "after {}", std::any::type_name::()); @@ -501,26 +459,12 @@ pub fn color_syntax_with<'a, 'b, T: ColorSyntax, U, I>( token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, ) -> ((), U) { - trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); - - let len = token_nodes.shapes().len(); - let result = shape.color_syntax(input, token_nodes, context); - - trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source)); - - if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { - trace!(target: "nu::color_syntax", "after {}", std::any::type_name::()); - - if len < token_nodes.shapes().len() { - for i in len..(token_nodes.shapes().len()) { - trace!(target: "nu::color_syntax", "new shape :: {:?}", token_nodes.shapes()[i]); - } - } else { - trace!(target: "nu::color_syntax", "no new shapes"); - } - } - - ((), result) + ( + (), + token_nodes.color_frame(shape.name(), |token_nodes| { + shape.color_syntax(input, token_nodes, context) + }), + ) } #[cfg(not(coloring_in_tokens))] @@ -531,31 +475,9 @@ pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax>, ) -> Result { - trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); - - if token_nodes.at_end() { - trace!(target: "nu::color_syntax", "at eof"); - return Err(ShellError::unexpected_eof("coloring", Tag::unknown())); - } - - let len = shapes.len(); - let result = shape.color_syntax(input, token_nodes, context, shapes); - - trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source)); - - if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { - trace!(target: "nu::color_syntax", "after {}", std::any::type_name::()); - - if len < shapes.len() { - for i in len..(shapes.len()) { - trace!(target: "nu::color_syntax", "new shape :: {:?}", shapes[i]); - } - } else { - trace!(target: "nu::color_syntax", "no new shapes"); - } - } - - result + token_nodes.color_fallible_frame(std::any::type_name::(), |token_nodes| { + shape.color_syntax(input, token_nodes, context, shapes) + }) } #[cfg(coloring_in_tokens)] @@ -565,31 +487,9 @@ pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax, context: &ExpandContext, ) -> Result { - trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); - - if token_nodes.at_end() { - trace!(target: "nu::color_syntax", "at eof"); - return Err(ShellError::unexpected_eof("coloring", Tag::unknown())); - } - - let len = token_nodes.shapes().len(); - let result = shape.color_syntax(input, token_nodes, context); - - trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source)); - - if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { - trace!(target: "nu::color_syntax", "after {}", std::any::type_name::()); - - if len < token_nodes.shapes().len() { - for i in len..(token_nodes.shapes().len()) { - trace!(target: "nu::color_syntax", "new shape :: {:?}", token_nodes.shapes()[i]); - } - } else { - trace!(target: "nu::color_syntax", "no new shapes"); - } - } - - result + token_nodes.color_fallible_frame(shape.name(), |token_nodes| { + shape.color_syntax(input, token_nodes, context) + }) } pub(crate) fn expand_expr<'a, 'b, T: ExpandExpression>( @@ -597,18 +497,18 @@ pub(crate) fn expand_expr<'a, 'b, T: ExpandExpression>( token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, ) -> Result { - trace!(target: "nu::expand_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); + trace!(target: "nu::expand_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes.state(), context.source)); let result = shape.expand_syntax(token_nodes, context); match result { Err(err) => { - trace!(target: "nu::expand_syntax", "error :: {} :: {:?}", err, debug_tokens(token_nodes, context.source)); + trace!(target: "nu::expand_syntax", "error :: {} :: {:?}", err, debug_tokens(token_nodes.state(), context.source)); Err(err) } Ok(result) => { - trace!(target: "nu::expand_syntax", "ok :: {:?} :: {:?}", result, debug_tokens(token_nodes, context.source)); + trace!(target: "nu::expand_syntax", "ok :: {:?} :: {:?}", result, debug_tokens(token_nodes.state(), context.source)); Ok(result) } } @@ -738,7 +638,7 @@ impl FallibleColorSyntax for BareShape { _context: &ExpandContext, shapes: &mut Vec>, ) -> Result<(), ShellError> { - token_nodes.peek_any_token(|token| match token { + token_nodes.peek_any_token("word", |token| match token { // If it's a bare token, color it TokenNode::Token(Spanned { item: RawToken::Bare, @@ -759,21 +659,22 @@ impl FallibleColorSyntax for BareShape { type Info = (); type Input = FlatShape; + fn name(&self) -> &'static str { + "BareShape" + } + fn color_syntax<'a, 'b>( &self, input: &FlatShape, token_nodes: &'b mut TokensIterator<'a>, _context: &ExpandContext, ) -> Result<(), ShellError> { - let span = token_nodes.peek_any_token(|token| match token { + let span = token_nodes.peek_any_token("word", |token| match token { // If it's a bare token, color it TokenNode::Token(Spanned { item: RawToken::Bare, span, - }) => { - // token_nodes.color_shape((*input).spanned(*span)); - Ok(span) - } + }) => Ok(span), // otherwise, fail other => Err(ShellError::type_error("word", other.tagged_type_name())), @@ -872,7 +773,8 @@ impl FallibleColorSyntax for PipelineShape { shapes: &mut Vec>, ) -> Result<(), ShellError> { // Make sure we're looking at a pipeline - let Pipeline { parts, .. } = token_nodes.peek_any_token(|node| node.as_pipeline())?; + let Pipeline { parts, .. } = + token_nodes.peek_any_token("pipeline", |node| node.as_pipeline())?; // Enumerate the pipeline parts for part in parts { @@ -898,6 +800,10 @@ impl FallibleColorSyntax for PipelineShape { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "PipelineShape" + } + fn color_syntax<'a, 'b>( &self, _input: &(), @@ -905,7 +811,9 @@ impl FallibleColorSyntax for PipelineShape { context: &ExpandContext, ) -> Result<(), ShellError> { // Make sure we're looking at a pipeline - let Pipeline { parts, .. } = token_nodes.peek_any_token(|node| node.as_pipeline())?; + let pipeline = token_nodes.peek_any_token("pipeline", |node| node.as_pipeline())?; + + let parts = &pipeline.parts[..]; // Enumerate the pipeline parts for part in parts { @@ -914,40 +822,77 @@ impl FallibleColorSyntax for PipelineShape { token_nodes.color_shape(FlatShape::Pipe.spanned(pipe)) } - // Create a new iterator containing the tokens in the pipeline part to color - let mut token_nodes = TokensIterator::new(&part.tokens.item, part.span, false); + let tokens: Spanned<&[TokenNode]> = (&part.item.tokens[..]).spanned(part.span); - color_syntax(&MaybeSpaceShape, &mut token_nodes, context); - color_syntax(&CommandShape, &mut token_nodes, context); + token_nodes.child(tokens, move |token_nodes| { + color_syntax(&MaybeSpaceShape, token_nodes, context); + color_syntax(&CommandShape, token_nodes, context); + }); } Ok(()) } } +#[cfg(coloring_in_tokens)] impl ExpandSyntax for PipelineShape { type Output = ClassifiedPipeline; - fn expand_syntax<'a, 'b>( + fn expand_syntax<'content, 'me>( &self, - iterator: &'b mut TokensIterator<'a>, + iterator: &'me mut TokensIterator<'content>, context: &ExpandContext, ) -> Result { let source = context.source; let peeked = iterator.peek_any().not_eof("pipeline")?; - let pipeline = peeked.node.as_pipeline()?; - peeked.commit(); + let pipeline = peeked.commit().as_pipeline()?; - let Pipeline { parts, .. } = pipeline; + let parts = &pipeline.parts[..]; - let commands: Result, ShellError> = parts - .iter() - .map(|item| classify_command(item, context, &source)) - .collect(); + let mut out = vec![]; - Ok(ClassifiedPipeline { - commands: commands?, - }) + for part in parts { + let tokens: Spanned<&[TokenNode]> = (&part.item.tokens[..]).spanned(part.span); + + let classified = iterator.child(tokens, move |token_nodes| { + classify_command(token_nodes, context, &source) + })?; + + out.push(classified); + } + + Ok(ClassifiedPipeline { commands: out }) + } +} + +#[cfg(not(coloring_in_tokens))] +impl ExpandSyntax for PipelineShape { + type Output = ClassifiedPipeline; + fn expand_syntax<'content, 'me>( + &self, + iterator: &'me mut TokensIterator<'content>, + context: &ExpandContext, + ) -> Result { + let source = context.source; + + let peeked = iterator.peek_any().not_eof("pipeline")?; + let pipeline = peeked.commit().as_pipeline()?; + + let parts = &pipeline.parts[..]; + + let mut out = vec![]; + + for part in parts { + let tokens: Spanned<&[TokenNode]> = (&part.item.tokens[..]).spanned(part.span); + + let classified = iterator.child(tokens, move |token_nodes| { + classify_command(token_nodes, context, &source) + })?; + + out.push(classified); + } + + Ok(ClassifiedPipeline { commands: out }) } } @@ -1018,6 +963,10 @@ impl FallibleColorSyntax for CommandHeadShape { type Info = CommandHeadKind; type Input = (); + fn name(&self) -> &'static str { + "CommandHeadShape" + } + fn color_syntax<'a, 'b>( &self, _input: &(), @@ -1215,6 +1164,10 @@ impl FallibleColorSyntax for InternalCommandHeadShape { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "InternalCommandHeadShape" + } + fn color_syntax<'a, 'b>( &self, _input: &(), @@ -1299,7 +1252,7 @@ fn parse_single_node<'a, 'b, T>( expected: &'static str, callback: impl FnOnce(RawToken, Span, SingleError) -> Result, ) -> Result { - token_nodes.peek_any_token(|node| match node { + token_nodes.peek_any_token(expected, |node| match node { TokenNode::Token(token) => callback( token.item, token.span, @@ -1377,6 +1330,10 @@ impl FallibleColorSyntax for WhitespaceShape { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "WhitespaceShape" + } + fn color_syntax<'a, 'b>( &self, _input: &(), @@ -1502,6 +1459,10 @@ impl ColorSyntax for MaybeSpaceShape { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "MaybeSpaceShape" + } + fn color_syntax<'a, 'b>( &self, _input: &(), @@ -1559,6 +1520,10 @@ impl FallibleColorSyntax for SpaceShape { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "SpaceShape" + } + fn color_syntax<'a, 'b>( &self, _input: &(), @@ -1618,17 +1583,15 @@ fn expand_variable(span: Span, token_span: Span, source: &Text) -> hir::Expressi } fn classify_command( - command: &Spanned, + mut iterator: &mut TokensIterator, context: &ExpandContext, source: &Text, ) -> Result { - let mut iterator = TokensIterator::new(&command.tokens.item, command.span, true); - let head = CommandHeadShape.expand_syntax(&mut iterator, &context)?; match &head { CommandSignature::Expression(_) => Err(ShellError::syntax_error( - "Unexpected expression in command position".tagged(command.span), + "Unexpected expression in command position".tagged(iterator.whole_span()), )), // If the command starts with `^`, treat it as an external command no matter what @@ -1710,6 +1673,10 @@ impl ColorSyntax for CommandShape { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "CommandShape" + } + fn color_syntax<'a, 'b>( &self, _input: &(), diff --git a/src/parser/hir/syntax_shape/block.rs b/src/parser/hir/syntax_shape/block.rs index fdf2ecb3f8..0061c0fe8c 100644 --- a/src/parser/hir/syntax_shape/block.rs +++ b/src/parser/hir/syntax_shape/block.rs @@ -66,6 +66,10 @@ impl FallibleColorSyntax for AnyBlockShape { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "AnyBlockShape" + } + fn color_syntax<'a, 'b>( &self, _input: &(), @@ -85,13 +89,14 @@ impl FallibleColorSyntax for AnyBlockShape { match block { // If so, color it as a block Some((children, spans)) => { - let mut token_nodes = TokensIterator::new(children.item, context.span, false); - color_syntax_with( - &DelimitedShape, - &(Delimiter::Brace, spans.0, spans.1), - &mut token_nodes, - context, - ); + token_nodes.child(children, |token_nodes| { + color_syntax_with( + &DelimitedShape, + &(Delimiter::Brace, spans.0, spans.1), + token_nodes, + context, + ); + }); return Ok(()); } @@ -169,6 +174,10 @@ impl FallibleColorSyntax for ShorthandBlock { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "ShorthandBlock" + } + fn color_syntax<'a, 'b>( &self, _input: &(), @@ -264,6 +273,10 @@ impl FallibleColorSyntax for ShorthandPath { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "ShorthandPath" + } + fn color_syntax<'a, 'b>( &self, _input: &(), diff --git a/src/parser/hir/syntax_shape/expression.rs b/src/parser/hir/syntax_shape/expression.rs index eccebf7516..0681c9c403 100644 --- a/src/parser/hir/syntax_shape/expression.rs +++ b/src/parser/hir/syntax_shape/expression.rs @@ -69,6 +69,10 @@ impl FallibleColorSyntax for AnyExpressionShape { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "AnyExpressionShape" + } + fn color_syntax<'a, 'b>( &self, _input: &(), @@ -267,6 +271,10 @@ impl FallibleColorSyntax for AnyExpressionStartShape { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "AnyExpressionStartShape" + } + fn color_syntax<'a, 'b>( &self, _input: &(), @@ -315,7 +323,7 @@ impl FallibleColorSyntax for AnyExpressionStartShape { token_nodes.color_shape(FlatShape::Word.spanned(atom.span)); } - _ => atom.color_tokens(token_nodes.mut_shapes()), + _ => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)), } Ok(()) @@ -387,13 +395,17 @@ impl FallibleColorSyntax for BareTailShape { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "BareTailShape" + } + fn color_syntax<'a, 'b>( &self, _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, ) -> Result<(), ShellError> { - let len = token_nodes.shapes().len(); + let len = token_nodes.state().shapes().len(); loop { let word = @@ -422,7 +434,7 @@ impl FallibleColorSyntax for BareTailShape { } } - if token_nodes.shapes().len() > len { + if token_nodes.state().shapes().len() > len { Ok(()) } else { Err(ShellError::syntax_error( diff --git a/src/parser/hir/syntax_shape/expression/delimited.rs b/src/parser/hir/syntax_shape/expression/delimited.rs index 5f8406c6cb..8cd1e9805a 100644 --- a/src/parser/hir/syntax_shape/expression/delimited.rs +++ b/src/parser/hir/syntax_shape/expression/delimited.rs @@ -66,6 +66,11 @@ impl ColorSyntax for DelimitedShape { impl ColorSyntax for DelimitedShape { type Info = (); type Input = (Delimiter, Span, Span); + + fn name(&self) -> &'static str { + "DelimitedShape" + } + fn color_syntax<'a, 'b>( &self, (delimiter, open, close): &(Delimiter, Span, Span), diff --git a/src/parser/hir/syntax_shape/expression/file_path.rs b/src/parser/hir/syntax_shape/expression/file_path.rs index acde8fba13..f0e5ee0079 100644 --- a/src/parser/hir/syntax_shape/expression/file_path.rs +++ b/src/parser/hir/syntax_shape/expression/file_path.rs @@ -52,6 +52,10 @@ impl FallibleColorSyntax for FilePathShape { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "FilePathShape" + } + fn color_syntax<'a, 'b>( &self, _input: &(), @@ -78,7 +82,7 @@ impl FallibleColorSyntax for FilePathShape { token_nodes.color_shape(FlatShape::Path.spanned(atom.span)); } - _ => atom.color_tokens(token_nodes.mut_shapes()), + _ => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)), } Ok(()) diff --git a/src/parser/hir/syntax_shape/expression/list.rs b/src/parser/hir/syntax_shape/expression/list.rs index 5a1ea8e383..51a6b852ca 100644 --- a/src/parser/hir/syntax_shape/expression/list.rs +++ b/src/parser/hir/syntax_shape/expression/list.rs @@ -121,6 +121,10 @@ impl ColorSyntax for ExpressionListShape { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "ExpressionListShape" + } + /// The intent of this method is to fully color an expression list shape infallibly. /// This means that if we can't expand a token into an expression, we fall back to /// a simpler coloring strategy. @@ -148,12 +152,12 @@ impl ColorSyntax for ExpressionListShape { } if backoff { - let len = token_nodes.shapes().len(); + let len = token_nodes.state().shapes().len(); // If we previously encountered a parsing error, use backoff coloring mode color_syntax(&SimplestExpression, token_nodes, context); - if len == token_nodes.shapes().len() && !token_nodes.at_end() { + if len == token_nodes.state().shapes().len() && !token_nodes.at_end() { // This should never happen, but if it does, a panic is better than an infinite loop panic!("Unexpected tokens left that couldn't be colored even with SimplestExpression") } @@ -222,6 +226,10 @@ impl ColorSyntax for BackoffColoringMode { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "BackoffColoringMode" + } + fn color_syntax<'a, 'b>( &self, _input: &Self::Input, @@ -233,12 +241,12 @@ impl ColorSyntax for BackoffColoringMode { break; } - let len = token_nodes.shapes().len(); + let len = token_nodes.state().shapes().len(); color_syntax(&SimplestExpression, token_nodes, context); - if len == token_nodes.shapes().len() && !token_nodes.at_end() { + if len == token_nodes.state().shapes().len() && !token_nodes.at_end() { // This shouldn't happen, but if it does, a panic is better than an infinite loop - panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, token_nodes.shapes()); + panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, token_nodes.state().shapes()); } } } @@ -281,6 +289,10 @@ impl ColorSyntax for SimplestExpression { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "SimplestExpression" + } + fn color_syntax<'a, 'b>( &self, _input: &(), @@ -296,7 +308,7 @@ impl ColorSyntax for SimplestExpression { match atom { Err(_) => {} - Ok(atom) => atom.color_tokens(token_nodes.mut_shapes()), + Ok(atom) => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)), } } } diff --git a/src/parser/hir/syntax_shape/expression/number.rs b/src/parser/hir/syntax_shape/expression/number.rs index d1475cbaf3..d4069478e9 100644 --- a/src/parser/hir/syntax_shape/expression/number.rs +++ b/src/parser/hir/syntax_shape/expression/number.rs @@ -79,6 +79,10 @@ impl FallibleColorSyntax for NumberShape { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "NumberShape" + } + fn color_syntax<'a, 'b>( &self, _input: &(), @@ -97,7 +101,7 @@ impl FallibleColorSyntax for NumberShape { Spanned { item: Ok(atom), .. } => atom, }; - atom.color_tokens(token_nodes.mut_shapes()); + token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)); Ok(()) } @@ -171,6 +175,10 @@ impl FallibleColorSyntax for IntShape { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "IntShape" + } + fn color_syntax<'a, 'b>( &self, _input: &(), @@ -189,7 +197,7 @@ impl FallibleColorSyntax for IntShape { Spanned { item: Ok(atom), .. } => atom, }; - atom.color_tokens(token_nodes.mut_shapes()); + token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)); Ok(()) } diff --git a/src/parser/hir/syntax_shape/expression/pattern.rs b/src/parser/hir/syntax_shape/expression/pattern.rs index 328e8f795e..eab0b6e5bb 100644 --- a/src/parser/hir/syntax_shape/expression/pattern.rs +++ b/src/parser/hir/syntax_shape/expression/pattern.rs @@ -41,6 +41,10 @@ impl FallibleColorSyntax for PatternShape { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "PatternShape" + } + fn color_syntax<'a, 'b>( &self, _input: &(), diff --git a/src/parser/hir/syntax_shape/expression/string.rs b/src/parser/hir/syntax_shape/expression/string.rs index e74fa0a6a7..116ed8fd0d 100644 --- a/src/parser/hir/syntax_shape/expression/string.rs +++ b/src/parser/hir/syntax_shape/expression/string.rs @@ -45,6 +45,10 @@ impl FallibleColorSyntax for StringShape { type Info = (); type Input = FlatShape; + fn name(&self) -> &'static str { + "StringShape" + } + fn color_syntax<'a, 'b>( &self, input: &FlatShape, @@ -63,7 +67,7 @@ impl FallibleColorSyntax for StringShape { item: AtomicToken::String { .. }, span, } => token_nodes.color_shape((*input).spanned(span)), - other => other.color_tokens(token_nodes.mut_shapes()), + atom => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)), } Ok(()) diff --git a/src/parser/hir/syntax_shape/expression/variable_path.rs b/src/parser/hir/syntax_shape/expression/variable_path.rs index 380b3f936c..e983630348 100644 --- a/src/parser/hir/syntax_shape/expression/variable_path.rs +++ b/src/parser/hir/syntax_shape/expression/variable_path.rs @@ -90,6 +90,10 @@ impl FallibleColorSyntax for VariablePathShape { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "VariablePathShape" + } + fn color_syntax<'a, 'b>( &self, _input: &(), @@ -166,6 +170,10 @@ impl FallibleColorSyntax for PathTailShape { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "PathTailShape" + } + fn color_syntax<'a, 'b>( &self, _input: &(), @@ -334,6 +342,10 @@ impl FallibleColorSyntax for ExpressionContinuationShape { type Info = ContinuationInfo; type Input = (); + fn name(&self) -> &'static str { + "ExpressionContinuationShape" + } + fn color_syntax<'a, 'b>( &self, _input: &(), @@ -446,6 +458,10 @@ impl FallibleColorSyntax for VariableShape { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "VariableShape" + } + fn color_syntax<'a, 'b>( &self, _input: &(), @@ -658,6 +674,10 @@ impl FallibleColorSyntax for ColumnPathShape { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "ColumnPathShape" + } + fn color_syntax<'a, 'b>( &self, _input: &(), @@ -758,6 +778,10 @@ impl FallibleColorSyntax for MemberShape { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "MemberShape" + } + fn color_syntax<'a, 'b>( &self, _input: &(), @@ -843,6 +867,10 @@ impl FallibleColorSyntax for ColorableDotShape { type Info = (); type Input = FlatShape; + fn name(&self) -> &'static str { + "ColorableDotShape" + } + fn color_syntax<'a, 'b>( &self, input: &FlatShape, @@ -953,6 +981,10 @@ impl FallibleColorSyntax for InfixShape { type Info = (); type Input = (); + fn name(&self) -> &'static str { + "InfixShape" + } + fn color_syntax<'a, 'b>( &self, _input: &(), @@ -971,10 +1003,7 @@ impl FallibleColorSyntax for InfixShape { |token, token_span, _| { match token { // If it's an operator (and not `.`), it's a match - RawToken::Operator(operator) if operator != Operator::Dot => { - // token_nodes.color_shape(FlatShape::Operator.spanned(token_span)); - Ok(token_span) - } + RawToken::Operator(operator) if operator != Operator::Dot => Ok(token_span), // Otherwise, it's not a match _ => Err(ShellError::type_error( diff --git a/src/parser/hir/tokens_iterator.rs b/src/parser/hir/tokens_iterator.rs index 094c5af8c6..b3069247c9 100644 --- a/src/parser/hir/tokens_iterator.rs +++ b/src/parser/hir/tokens_iterator.rs @@ -1,25 +1,37 @@ pub(crate) mod debug; +use self::debug::Tracer; use crate::errors::ShellError; #[cfg(coloring_in_tokens)] use crate::parser::hir::syntax_shape::FlatShape; use crate::parser::TokenNode; +use crate::prelude::*; use crate::{Span, Spanned, SpannedItem}; #[allow(unused)] -use getset::Getters; +use getset::{Getters, MutGetters}; #[derive(Getters, Debug)] -pub struct TokensIterator<'content> { +pub struct TokensIteratorState<'content> { tokens: &'content [TokenNode], span: Span, skip_ws: bool, index: usize, seen: indexmap::IndexSet, #[cfg(coloring_in_tokens)] - #[get = "pub"] + #[cfg_attr(coloring_in_tokens, get = "pub")] shapes: Vec>, } +#[derive(Getters, MutGetters, Debug)] +pub struct TokensIterator<'content> { + #[get = "pub"] + #[get_mut = "pub"] + state: TokensIteratorState<'content>, + #[get = "pub"] + #[get_mut = "pub"] + tracer: Tracer, +} + #[derive(Debug)] pub struct Checkpoint<'content, 'me> { pub(crate) iterator: &'me mut TokensIterator<'content>, @@ -39,10 +51,12 @@ impl<'content, 'me> Checkpoint<'content, 'me> { impl<'content, 'me> std::ops::Drop for Checkpoint<'content, 'me> { fn drop(&mut self) { if !self.committed { - self.iterator.index = self.index; - self.iterator.seen = self.seen.clone(); + let state = &mut self.iterator.state; + + state.index = self.index; + state.seen = self.seen.clone(); #[cfg(coloring_in_tokens)] - self.iterator.shapes.truncate(self.shape_start); + state.shapes.truncate(self.shape_start); } } } @@ -138,13 +152,16 @@ impl<'content> TokensIterator<'content> { skip_ws: bool, ) -> TokensIterator<'content> { TokensIterator { - tokens: items, - span, - skip_ws, - index: 0, - seen: indexmap::IndexSet::new(), - #[cfg(coloring_in_tokens)] - shapes: vec![], + state: TokensIteratorState { + tokens: items, + span, + skip_ws, + index: 0, + seen: indexmap::IndexSet::new(), + #[cfg(coloring_in_tokens)] + shapes: vec![], + }, + tracer: Tracer::new(), } } @@ -153,7 +170,7 @@ impl<'content> TokensIterator<'content> { } pub fn len(&self) -> usize { - self.tokens.len() + self.state.tokens.len() } pub fn spanned( @@ -171,35 +188,146 @@ impl<'content> TokensIterator<'content> { #[cfg(coloring_in_tokens)] pub fn color_shape(&mut self, shape: Spanned) { - self.shapes.push(shape); + self.with_tracer(|_, tracer| tracer.add_shape(shape)); + self.state.shapes.push(shape); } #[cfg(coloring_in_tokens)] - pub fn mut_shapes(&mut self) -> &mut Vec> { - &mut self.shapes + pub fn mutate_shapes(&mut self, block: impl FnOnce(&mut Vec>)) { + let new_shapes: Vec> = { + let shapes = &mut self.state.shapes; + let len = shapes.len(); + block(shapes); + (len..(shapes.len())).map(|i| shapes[i]).collect() + }; + + self.with_tracer(|_, tracer| { + for shape in new_shapes { + tracer.add_shape(shape) + } + }); } #[cfg(coloring_in_tokens)] - pub fn child( - &mut self, - tokens: Spanned<&'content [TokenNode]>, - block: impl FnOnce(&mut TokensIterator) -> T, + pub fn silently_mutate_shapes(&mut self, block: impl FnOnce(&mut Vec>)) { + let shapes = &mut self.state.shapes; + block(shapes); + } + + #[cfg(coloring_in_tokens)] + pub fn sort_shapes(&mut self) { + // This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring + // this solution. + + self.state + .shapes + .sort_by(|a, b| a.span.start().cmp(&b.span.start())); + } + + #[cfg(coloring_in_tokens)] + pub fn child<'me, T>( + &'me mut self, + tokens: Spanned<&'me [TokenNode]>, + block: impl FnOnce(&mut TokensIterator<'me>) -> T, ) -> T { let mut shapes = vec![]; - std::mem::swap(&mut shapes, &mut self.shapes); + std::mem::swap(&mut shapes, &mut self.state.shapes); + + let mut tracer = Tracer::new(); + std::mem::swap(&mut tracer, &mut self.tracer); let mut iterator = TokensIterator { - tokens: tokens.item, - span: tokens.span, - skip_ws: false, - index: 0, - seen: indexmap::IndexSet::new(), - shapes, + state: TokensIteratorState { + tokens: tokens.item, + span: tokens.span, + skip_ws: false, + index: 0, + seen: indexmap::IndexSet::new(), + shapes, + }, + tracer, }; let result = block(&mut iterator); - std::mem::swap(&mut iterator.shapes, &mut self.shapes); + std::mem::swap(&mut iterator.state.shapes, &mut self.state.shapes); + std::mem::swap(&mut iterator.tracer, &mut self.tracer); + + result + } + + #[cfg(not(coloring_in_tokens))] + pub fn child<'me, T>( + &'me mut self, + tokens: Spanned<&'me [TokenNode]>, + block: impl FnOnce(&mut TokensIterator<'me>) -> T, + ) -> T { + let mut tracer = Tracer::new(); + std::mem::swap(&mut tracer, &mut self.tracer); + + let mut iterator = TokensIterator { + state: TokensIteratorState { + tokens: tokens.item, + span: tokens.span, + skip_ws: false, + index: 0, + seen: indexmap::IndexSet::new(), + }, + tracer, + }; + + let result = block(&mut iterator); + + std::mem::swap(&mut iterator.tracer, &mut self.tracer); + + result + } + + pub fn with_tracer(&mut self, block: impl FnOnce(&mut TokensIteratorState, &mut Tracer)) { + let state = &mut self.state; + let tracer = &mut self.tracer; + + block(state, tracer) + } + + #[cfg(coloring_in_tokens)] + pub fn color_frame( + &mut self, + desc: &'static str, + block: impl FnOnce(&mut TokensIterator) -> T, + ) -> T { + self.with_tracer(|_, tracer| tracer.start(desc)); + + let result = block(self); + + self.with_tracer(|_, tracer| { + tracer.success(); + }); + + result + } + + pub fn color_fallible_frame( + &mut self, + desc: &'static str, + block: impl FnOnce(&mut TokensIterator) -> Result, + ) -> Result { + self.with_tracer(|_, tracer| tracer.start(desc)); + + if self.at_end() { + self.with_tracer(|_, tracer| tracer.eof_frame()); + return Err(ShellError::unexpected_eof("coloring", Tag::unknown())); + } + + let result = block(self); + + self.with_tracer(|_, tracer| match &result { + Ok(_) => { + tracer.success(); + } + + Err(err) => tracer.failed(err), + }); result } @@ -207,10 +335,12 @@ impl<'content> TokensIterator<'content> { /// Use a checkpoint when you need to peek more than one token ahead, but can't be sure /// that you'll succeed. pub fn checkpoint<'me>(&'me mut self) -> Checkpoint<'content, 'me> { - let index = self.index; + let state = &mut self.state; + + let index = state.index; #[cfg(coloring_in_tokens)] - let shape_start = self.shapes.len(); - let seen = self.seen.clone(); + let shape_start = state.shapes.len(); + let seen = state.seen.clone(); Checkpoint { iterator: self, @@ -228,10 +358,12 @@ impl<'content> TokensIterator<'content> { &'me mut self, block: impl FnOnce(&mut TokensIterator<'content>) -> Result, ) -> Result { - let index = self.index; + let state = &mut self.state; + + let index = state.index; #[cfg(coloring_in_tokens)] - let shape_start = self.shapes.len(); - let seen = self.seen.clone(); + let shape_start = state.shapes.len(); + let seen = state.seen.clone(); let checkpoint = Checkpoint { iterator: self, @@ -255,11 +387,11 @@ impl<'content> TokensIterator<'content> { &'me mut self, block: impl FnOnce(&mut TokensIterator<'content>) -> Result, ) -> (Result, Vec>) { - let index = self.index; + let index = self.state.index; let mut shapes = vec![]; - let seen = self.seen.clone(); - std::mem::swap(&mut self.shapes, &mut shapes); + let seen = self.state.seen.clone(); + std::mem::swap(&mut self.state.shapes, &mut shapes); let checkpoint = Checkpoint { iterator: self, @@ -274,7 +406,7 @@ impl<'content> TokensIterator<'content> { let value = match value { Err(err) => { drop(checkpoint); - std::mem::swap(&mut self.shapes, &mut shapes); + std::mem::swap(&mut self.state.shapes, &mut shapes); return (Err(err), vec![]); } @@ -282,12 +414,12 @@ impl<'content> TokensIterator<'content> { }; checkpoint.commit(); - std::mem::swap(&mut self.shapes, &mut shapes); + std::mem::swap(&mut self.state.shapes, &mut shapes); return (Ok(value), shapes); } fn eof_span(&self) -> Span { - Span::new(self.span.end(), self.span.end()) + Span::new(self.state.span.end(), self.state.span.end()) } pub fn typed_span_at_cursor(&mut self) -> Spanned<&'static str> { @@ -299,6 +431,10 @@ impl<'content> TokensIterator<'content> { } } + pub fn whole_span(&self) -> Span { + self.state.span + } + pub fn span_at_cursor(&mut self) -> Span { let next = self.peek_any(); @@ -309,11 +445,11 @@ impl<'content> TokensIterator<'content> { } pub fn remove(&mut self, position: usize) { - self.seen.insert(position); + self.state.seen.insert(position); } pub fn at_end(&self) -> bool { - peek(self, self.skip_ws).is_none() + peek(self, self.state.skip_ws).is_none() } pub fn at_end_possible_ws(&self) -> bool { @@ -321,13 +457,15 @@ impl<'content> TokensIterator<'content> { } pub fn advance(&mut self) { - self.seen.insert(self.index); - self.index += 1; + self.state.seen.insert(self.state.index); + self.state.index += 1; } pub fn extract(&mut self, f: impl Fn(&TokenNode) -> Option) -> Option<(usize, T)> { - for (i, item) in self.tokens.iter().enumerate() { - if self.seen.contains(&i) { + let state = &mut self.state; + + for (i, item) in state.tokens.iter().enumerate() { + if state.seen.contains(&i) { continue; } @@ -336,7 +474,7 @@ impl<'content> TokensIterator<'content> { continue; } Some(value) => { - self.seen.insert(i); + state.seen.insert(i); return Some((i, value)); } } @@ -346,22 +484,26 @@ impl<'content> TokensIterator<'content> { } pub fn move_to(&mut self, pos: usize) { - self.index = pos; + self.state.index = pos; } pub fn restart(&mut self) { - self.index = 0; + self.state.index = 0; } pub fn clone(&self) -> TokensIterator<'content> { + let state = &self.state; TokensIterator { - tokens: self.tokens, - span: self.span, - index: self.index, - seen: self.seen.clone(), - skip_ws: self.skip_ws, - #[cfg(coloring_in_tokens)] - shapes: self.shapes.clone(), + state: TokensIteratorState { + tokens: state.tokens, + span: state.span, + index: state.index, + seen: state.seen.clone(), + skip_ws: state.skip_ws, + #[cfg(coloring_in_tokens)] + shapes: state.shapes.clone(), + }, + tracer: self.tracer.clone(), } } @@ -384,10 +526,11 @@ impl<'content> TokensIterator<'content> { // Peek the next token, including whitespace, but not EOF pub fn peek_any_token<'me, T>( &'me mut self, + expected: &'static str, block: impl FnOnce(&'content TokenNode) -> Result, ) -> Result { let peeked = start_next(self, false); - let peeked = peeked.not_eof("invariant"); + let peeked = peeked.not_eof(expected); match peeked { Err(err) => return Err(err), @@ -403,10 +546,10 @@ impl<'content> TokensIterator<'content> { fn commit(&mut self, from: usize, to: usize) { for index in from..to { - self.seen.insert(index); + self.state.seen.insert(index); } - self.index = to; + self.state.index = to; } pub fn pos(&self, skip_ws: bool) -> Option { @@ -424,7 +567,7 @@ impl<'content> Iterator for TokensIterator<'content> { type Item = &'content TokenNode; fn next(&mut self) -> Option<&'content TokenNode> { - next(self, self.skip_ws) + next(self, self.state.skip_ws) } } @@ -432,23 +575,25 @@ fn peek<'content, 'me>( iterator: &'me TokensIterator<'content>, skip_ws: bool, ) -> Option<&'me TokenNode> { - let mut to = iterator.index; + let state = iterator.state(); + + let mut to = state.index; loop { - if to >= iterator.tokens.len() { + if to >= state.tokens.len() { return None; } - if iterator.seen.contains(&to) { + if state.seen.contains(&to) { to += 1; continue; } - if to >= iterator.tokens.len() { + if to >= state.tokens.len() { return None; } - let node = &iterator.tokens[to]; + let node = &state.tokens[to]; match node { TokenNode::Whitespace(_) if skip_ws => { @@ -465,23 +610,25 @@ fn peek_pos<'content, 'me>( iterator: &'me TokensIterator<'content>, skip_ws: bool, ) -> Option { - let mut to = iterator.index; + let state = iterator.state(); + + let mut to = state.index; loop { - if to >= iterator.tokens.len() { + if to >= state.tokens.len() { return None; } - if iterator.seen.contains(&to) { + if state.seen.contains(&to) { to += 1; continue; } - if to >= iterator.tokens.len() { + if to >= state.tokens.len() { return None; } - let node = &iterator.tokens[to]; + let node = &state.tokens[to]; match node { TokenNode::Whitespace(_) if skip_ws => { @@ -496,11 +643,13 @@ fn start_next<'content, 'me>( iterator: &'me mut TokensIterator<'content>, skip_ws: bool, ) -> Peeked<'content, 'me> { - let from = iterator.index; - let mut to = iterator.index; + let state = iterator.state(); + + let from = state.index; + let mut to = state.index; loop { - if to >= iterator.tokens.len() { + if to >= state.tokens.len() { return Peeked { node: None, iterator, @@ -509,12 +658,12 @@ fn start_next<'content, 'me>( }; } - if iterator.seen.contains(&to) { + if state.seen.contains(&to) { to += 1; continue; } - if to >= iterator.tokens.len() { + if to >= state.tokens.len() { return Peeked { node: None, iterator, @@ -523,7 +672,7 @@ fn start_next<'content, 'me>( }; } - let node = &iterator.tokens[to]; + let node = &state.tokens[to]; match node { TokenNode::Whitespace(_) if skip_ws => { @@ -547,20 +696,20 @@ fn next<'me, 'content>( skip_ws: bool, ) -> Option<&'content TokenNode> { loop { - if iterator.index >= iterator.tokens.len() { + if iterator.state().index >= iterator.state().tokens.len() { return None; } - if iterator.seen.contains(&iterator.index) { + if iterator.state().seen.contains(&iterator.state().index) { iterator.advance(); continue; } - if iterator.index >= iterator.tokens.len() { + if iterator.state().index >= iterator.state().tokens.len() { return None; } - match &iterator.tokens[iterator.index] { + match &iterator.state().tokens[iterator.state().index] { TokenNode::Whitespace(_) if skip_ws => { iterator.advance(); } diff --git a/src/parser/hir/tokens_iterator/debug.rs b/src/parser/hir/tokens_iterator/debug.rs index 2e26720154..332a74067c 100644 --- a/src/parser/hir/tokens_iterator/debug.rs +++ b/src/parser/hir/tokens_iterator/debug.rs @@ -1,5 +1,13 @@ -use crate::parser::hir::tokens_iterator::TokensIterator; +use crate::errors::ShellError; +use crate::parser::hir::syntax_shape::FlatShape; +use crate::parser::hir::tokens_iterator::TokensIteratorState; +use crate::prelude::*; use crate::traits::ToDebug; +use ansi_term::Color; +use log::trace; +use ptree::*; +use std::borrow::Cow; +use std::io; #[derive(Debug)] pub(crate) enum DebugIteratorToken { @@ -8,15 +16,15 @@ pub(crate) enum DebugIteratorToken { Cursor, } -pub(crate) fn debug_tokens(iterator: &TokensIterator, source: &str) -> Vec { +pub(crate) fn debug_tokens(state: &TokensIteratorState, source: &str) -> Vec { let mut out = vec![]; - for (i, token) in iterator.tokens.iter().enumerate() { - if iterator.index == i { + for (i, token) in state.tokens.iter().enumerate() { + if state.index == i { out.push(DebugIteratorToken::Cursor); } - if iterator.seen.contains(&i) { + if state.seen.contains(&i) { out.push(DebugIteratorToken::Seen(format!("{}", token.debug(source)))); } else { out.push(DebugIteratorToken::Unseen(format!( @@ -28,3 +36,344 @@ pub(crate) fn debug_tokens(iterator: &TokensIterator, source: &str) -> Vec), + Frame(ColorFrame), +} + +impl FrameChild { + fn colored_leaf_description(&self, text: &Text, f: &mut impl io::Write) -> io::Result<()> { + match self { + FrameChild::Shape(shape) => write!( + f, + "{} {:?}", + Color::White + .bold() + .on(Color::Green) + .paint(format!("{:?}", shape.item)), + shape.span.slice(text) + ), + + FrameChild::Frame(frame) => frame.colored_leaf_description(f), + } + } + + fn into_tree_child(self, text: &Text) -> TreeChild { + match self { + FrameChild::Shape(shape) => TreeChild::Shape(shape, text.clone()), + FrameChild::Frame(frame) => TreeChild::Frame(frame, text.clone()), + } + } +} + +#[derive(Debug, Clone)] +pub struct ColorFrame { + description: &'static str, + children: Vec, + error: Option, +} + +impl ColorFrame { + fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> { + if self.has_only_error_descendents() { + if self.children.len() == 0 { + write!( + f, + "{}", + Color::White.bold().on(Color::Red).paint(self.description) + ) + } else { + write!(f, "{}", Color::Red.normal().paint(self.description)) + } + } else if self.has_descendent_shapes() { + write!(f, "{}", Color::Green.normal().paint(self.description)) + } else { + write!(f, "{}", Color::Yellow.bold().paint(self.description)) + } + } + + fn colored_description(&self, text: &Text, f: &mut impl io::Write) -> io::Result<()> { + if self.children.len() == 1 { + let child = &self.children[0]; + + self.colored_leaf_description(f)?; + write!(f, " -> ")?; + child.colored_leaf_description(text, f) + } else { + self.colored_leaf_description(f) + } + } + + fn children_for_formatting(&self, text: &Text) -> Vec { + if self.children.len() == 1 { + let child = &self.children[0]; + + match child { + FrameChild::Shape(_) => vec![], + FrameChild::Frame(frame) => frame.tree_children(text), + } + } else { + self.tree_children(text) + } + } + + fn tree_children(&self, text: &Text) -> Vec { + self.children + .clone() + .into_iter() + .map(|c| c.into_tree_child(text)) + .collect() + } + + #[allow(unused)] + fn add_shape(&mut self, shape: Spanned) { + self.children.push(FrameChild::Shape(shape)) + } + + fn has_child_shapes(&self) -> bool { + self.any_child_shape(|_| true) + } + + fn any_child_shape(&self, predicate: impl Fn(Spanned) -> bool) -> bool { + for item in &self.children { + match item { + FrameChild::Shape(shape) => { + if predicate(*shape) { + return true; + } + } + + _ => {} + } + } + + false + } + + fn any_child_frame(&self, predicate: impl Fn(&ColorFrame) -> bool) -> bool { + for item in &self.children { + match item { + FrameChild::Frame(frame) => { + if predicate(frame) { + return true; + } + } + + _ => {} + } + } + + false + } + + fn has_descendent_shapes(&self) -> bool { + if self.has_child_shapes() { + true + } else { + self.any_child_frame(|frame| frame.has_descendent_shapes()) + } + } + + fn has_only_error_descendents(&self) -> bool { + if self.children.len() == 0 { + // if this frame has no children at all, it has only error descendents if this frame + // is an error + self.error.is_some() + } else { + // otherwise, it has only error descendents if all of its children terminate in an + // error (transitively) + + let mut seen_error = false; + + for child in &self.children { + match child { + // if this frame has at least one child shape, this frame has non-error descendents + FrameChild::Shape(_) => return false, + FrameChild::Frame(frame) => { + // if the chi + if frame.has_only_error_descendents() { + seen_error = true; + } else { + return false; + } + } + } + } + + seen_error + } + } +} + +#[derive(Debug, Clone)] +pub enum TreeChild { + Shape(Spanned, Text), + Frame(ColorFrame, Text), +} + +impl TreeChild { + fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> { + match self { + TreeChild::Shape(shape, text) => write!( + f, + "{} {:?}", + Color::White + .bold() + .on(Color::Green) + .paint(format!("{:?}", shape.item)), + shape.span.slice(text) + ), + + TreeChild::Frame(frame, _) => frame.colored_leaf_description(f), + } + } +} + +impl TreeItem for TreeChild { + type Child = TreeChild; + + fn write_self(&self, f: &mut W, _style: &Style) -> io::Result<()> { + match self { + shape @ TreeChild::Shape(..) => shape.colored_leaf_description(f), + + TreeChild::Frame(frame, text) => frame.colored_description(text, f), + } + } + + fn children(&self) -> Cow<[Self::Child]> { + match self { + TreeChild::Shape(..) => Cow::Borrowed(&[]), + TreeChild::Frame(frame, text) => Cow::Owned(frame.children_for_formatting(text)), + } + } +} + +#[derive(Debug, Clone)] +pub struct Tracer { + frame_stack: Vec, +} + +impl Tracer { + pub fn print(self, source: Text) -> PrintTracer { + PrintTracer { + tracer: self, + source, + } + } + + pub fn new() -> Tracer { + let root = ColorFrame { + description: "Trace", + children: vec![], + error: None, + }; + + Tracer { + frame_stack: vec![root], + } + } + + fn current_frame(&mut self) -> &mut ColorFrame { + let frames = &mut self.frame_stack; + let last = frames.len() - 1; + &mut frames[last] + } + + fn pop_frame(&mut self) -> ColorFrame { + let result = self.frame_stack.pop().expect("Can't pop root tracer frame"); + + if self.frame_stack.len() == 0 { + panic!("Can't pop root tracer frame"); + } + + self.debug(); + + result + } + + pub fn start(&mut self, description: &'static str) { + let frame = ColorFrame { + description, + children: vec![], + error: None, + }; + + self.frame_stack.push(frame); + self.debug(); + } + + pub fn eof_frame(&mut self) { + let current = self.pop_frame(); + self.current_frame() + .children + .push(FrameChild::Frame(current)); + } + + #[allow(unused)] + pub fn finish(&mut self) { + loop { + if self.frame_stack.len() == 1 { + break; + } + + let frame = self.pop_frame(); + self.current_frame().children.push(FrameChild::Frame(frame)); + } + } + + #[allow(unused)] + pub fn add_shape(&mut self, shape: Spanned) { + self.current_frame().add_shape(shape); + } + + pub fn success(&mut self) { + let current = self.pop_frame(); + self.current_frame() + .children + .push(FrameChild::Frame(current)); + } + + pub fn failed(&mut self, error: &ShellError) { + let mut current = self.pop_frame(); + current.error = Some(error.clone()); + self.current_frame() + .children + .push(FrameChild::Frame(current)); + } + + fn debug(&self) { + trace!(target: "nu::color_syntax", + "frames = {:?}", + self.frame_stack + .iter() + .map(|f| f.description) + .collect::>() + ); + + trace!(target: "nu::color_syntax", "{:#?}", self); + } +} + +#[derive(Debug, Clone)] +pub struct PrintTracer { + tracer: Tracer, + source: Text, +} + +impl TreeItem for PrintTracer { + type Child = TreeChild; + + fn write_self(&self, f: &mut W, style: &Style) -> io::Result<()> { + write!(f, "{}", style.paint("Color Trace")) + } + + fn children(&self) -> Cow<[Self::Child]> { + Cow::Owned(vec![TreeChild::Frame( + self.tracer.frame_stack[0].clone(), + self.source.clone(), + )]) + } +} diff --git a/src/parser/parse/parser.rs b/src/parser/parse/parser.rs index 793f7b6cef..b5aefabc26 100644 --- a/src/parser/parse/parser.rs +++ b/src/parser/parse/parser.rs @@ -310,15 +310,6 @@ pub fn bare(input: NomSpan) -> IResult { let next_char = &input.fragment.chars().nth(0); let prev_char = last.fragment.chars().nth(0); - // if let (Some(prev), Some(next)) = (prev_char, next_char) { - // if prev == '.' && is_member_start(*next) { - // return Err(nom::Err::Error(nom::error::make_error( - // input, - // nom::error::ErrorKind::TakeWhile1, - // ))); - // } - // } - if let Some(next_char) = next_char { if is_external_word_char(*next_char) || is_glob_specific_char(*next_char) { return Err(nom::Err::Error(nom::error::make_error( diff --git a/src/parser/parse/pipeline.rs b/src/parser/parse/pipeline.rs index 73db738078..4a8c72119c 100644 --- a/src/parser/parse/pipeline.rs +++ b/src/parser/parse/pipeline.rs @@ -5,8 +5,9 @@ use derive_new::new; use getset::Getters; use std::fmt; -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, new)] +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Getters, new)] pub struct Pipeline { + #[get = "pub"] pub(crate) parts: Vec>, // pub(crate) post_ws: Option, } @@ -24,6 +25,7 @@ impl ToDebug for Pipeline { #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)] pub struct PipelineElement { pub pipe: Option, + #[get = "pub"] pub tokens: Spanned>, } diff --git a/src/parser/parse_command.rs b/src/parser/parse_command.rs index a4365db247..01ba60b491 100644 --- a/src/parser/parse_command.rs +++ b/src/parser/parse_command.rs @@ -90,11 +90,11 @@ pub fn parse_command_tail( let mut positional = vec![]; for arg in &config.positional { - trace!("Processing positional {:?}", arg); + trace!(target: "nu::parse", "Processing positional {:?}", arg); match arg { PositionalType::Mandatory(..) => { - if tail.at_end() { + if tail.at_end_possible_ws() { return Err(ShellError::argument_error( config.name.clone(), ArgumentError::MissingMandatoryPositional(arg.name().to_string()), @@ -107,7 +107,7 @@ pub fn parse_command_tail( } PositionalType::Optional(..) => { - if tail.at_end() { + if tail.at_end_possible_ws() { break; } } @@ -138,7 +138,7 @@ pub fn parse_command_tail( trace_remaining("after rest", tail.clone(), context.source()); - trace!("Constructed positional={:?} named={:?}", positional, named); + trace!(target: "nu::parse", "Constructed positional={:?} named={:?}", positional, named); let positional = if positional.len() == 0 { None @@ -154,7 +154,7 @@ pub fn parse_command_tail( Some(named) }; - trace!("Normalized positional={:?} named={:?}", positional, named); + trace!(target: "nu::parse", "Normalized positional={:?} named={:?}", positional, named); Ok(Some((positional, named))) } @@ -391,6 +391,10 @@ impl ColorSyntax for CommandTailShape { type Info = (); type Input = Signature; + fn name(&self) -> &'static str { + "CommandTailShape" + } + fn color_syntax<'a, 'b>( &self, signature: &Signature, @@ -427,10 +431,7 @@ impl ColorSyntax for CommandTailShape { token_nodes.move_to(pos); if token_nodes.at_end() { - // args.insert(pos, shapes); - // token_nodes.restart(); return Ok(()); - // continue; } // We still want to color the flag even if the following tokens don't match, so don't @@ -465,10 +466,7 @@ impl ColorSyntax for CommandTailShape { token_nodes.move_to(pos); if token_nodes.at_end() { - // args.insert(pos, shapes); - // token_nodes.restart(); return Ok(()); - // continue; } // We still want to color the flag even if the following tokens don't match, so don't @@ -573,16 +571,14 @@ impl ColorSyntax for CommandTailShape { } } - args.spread_shapes(token_nodes.mut_shapes()); + token_nodes.silently_mutate_shapes(|shapes| args.spread_shapes(shapes)); // Consume any remaining tokens with backoff coloring mode color_syntax(&BackoffColoringMode, token_nodes, context); // This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring // this solution. - token_nodes - .mut_shapes() - .sort_by(|a, b| a.span.start().cmp(&b.span.start())); + token_nodes.sort_shapes() } } @@ -633,7 +629,7 @@ fn extract_optional( pub fn trace_remaining(desc: &'static str, tail: hir::TokensIterator<'_>, source: &Text) { trace!( - target: "nu::expand_args", + target: "nu::parse", "{} = {:?}", desc, itertools::join( diff --git a/src/shell/helper.rs b/src/shell/helper.rs index 9b5446f5df..8f38a10002 100644 --- a/src/shell/helper.rs +++ b/src/shell/helper.rs @@ -5,7 +5,7 @@ use crate::parser::nom_input; use crate::parser::parse::token_tree::TokenNode; use crate::{Span, Spanned, SpannedItem, Tag, Tagged, Text}; use ansi_term::Color; -use log::trace; +use log::{log_enabled, trace}; use rustyline::completion::Completer; use rustyline::error::ReadlineError; use rustyline::highlight::Highlighter; @@ -34,23 +34,6 @@ impl Completer for Helper { } } -/* -impl Completer for Helper { - type Candidate = rustyline::completion::Pair; - - fn complete( - &self, - line: &str, - pos: usize, - ctx: &rustyline::Context<'_>, - ) -> Result<(usize, Vec), ReadlineError> { - let result = self.helper.complete(line, pos, ctx); - - result.map(|(x, y)| (x, y.iter().map(|z| z.into()).collect())) - } -} -*/ - impl Hinter for Helper { fn hint(&self, line: &str, pos: usize, ctx: &rustyline::Context<'_>) -> Option { self.context.shell_manager.hint(line, pos, ctx) @@ -103,14 +86,18 @@ impl Highlighter for Helper { let shapes = { // We just constructed a token list that only contains a pipeline, so it can't fail color_fallible_syntax(&PipelineShape, &mut tokens, &expand_context).unwrap(); + tokens.with_tracer(|_, tracer| tracer.finish()); - tokens.shapes() + tokens.state().shapes() }; - trace!(target: "nu::shapes", - "SHAPES :: {:?}", - shapes.iter().map(|shape| shape.item).collect::>() - ); + trace!(target: "nu::color_syntax", "{:#?}", tokens.tracer()); + + if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { + println!(""); + ptree::print_tree(&tokens.tracer().clone().print(Text::from(line))).unwrap(); + println!(""); + } for shape in shapes { let styled = paint_flat_shape(&shape, line); @@ -118,18 +105,6 @@ impl Highlighter for Helper { } Cow::Owned(out) - - // loop { - // match iter.next() { - // None => { - // return Cow::Owned(out); - // } - // Some(token) => { - // let styled = paint_pipeline_element(&token, line); - // out.push_str(&styled.to_string()); - // } - // } - // } } } } diff --git a/tests/commands_test.rs b/tests/commands_test.rs index 4d6fa84a65..1a3e63ab4f 100644 --- a/tests/commands_test.rs +++ b/tests/commands_test.rs @@ -246,13 +246,18 @@ fn it_arg_works_with_many_inputs_to_external_command() { let (stdout, stderr) = nu_combined!( cwd: dirs.test(), h::pipeline( r#" - echo file1 file2 + echo hello world | split-row " " - | cat $it + | ^echo $it "# )); - assert_eq!("text and more text", stdout); + #[cfg(windows)] + assert_eq!("hello world", stdout); + + #[cfg(not(windows))] + assert_eq!("helloworld", stdout); + assert!(!stderr.contains("No such file or directory")); }) } From 16751b5dee653ce9e401b04b101cf28e2574f8cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Tue, 22 Oct 2019 19:29:39 -0500 Subject: [PATCH 088/184] color escaped external command. --- src/parser/hir/syntax_shape.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/parser/hir/syntax_shape.rs b/src/parser/hir/syntax_shape.rs index 8a21fd79e6..2169467e43 100644 --- a/src/parser/hir/syntax_shape.rs +++ b/src/parser/hir/syntax_shape.rs @@ -985,8 +985,8 @@ impl FallibleColorSyntax for CommandHeadShape { match atom.item { // If the head is an explicit external command (^cmd), color it as an external command - AtomicToken::ExternalCommand { command } => { - token_nodes.color_shape(FlatShape::ExternalCommand.spanned(command)); + AtomicToken::ExternalCommand { .. } => { + token_nodes.color_shape(FlatShape::ExternalCommand.spanned(atom.span)); Ok(CommandHeadKind::External) } From f1630da2ccbcf38110bab7ff9e9c10956b3c7d06 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Tue, 22 Oct 2019 00:00:06 -0500 Subject: [PATCH 089/184] Suggest a column name in case one unknown column is supplied. --- README.md | 2 +- src/commands/count.rs | 2 +- src/commands/group_by.rs | 39 +++++++++++++++++++++++++++++++++++---- tests/commands_test.rs | 25 +++++++++++++++++++++++++ 4 files changed, 62 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index c391b59903..64ff0e8015 100644 --- a/README.md +++ b/README.md @@ -249,7 +249,7 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat | command | description | | ------------- | ------------- | | add column-or-column-path value | Add a new column to the table | -| count | Show the total number of cells | +| count | Show the total number of rows | | edit column-or-column-path value | Edit an existing column to have a new value | | embed column | Creates a new table of one column with the given name, and places the current table inside of it | | first amount | Show only the first number of rows | diff --git a/src/commands/count.rs b/src/commands/count.rs index 6fe5a94633..5e44283737 100644 --- a/src/commands/count.rs +++ b/src/commands/count.rs @@ -20,7 +20,7 @@ impl WholeStreamCommand for Count { } fn usage(&self) -> &str { - "Show the total number of cells." + "Show the total number of rows." } fn run( diff --git a/src/commands/group_by.rs b/src/commands/group_by.rs index e08ebb2afb..7f5f496408 100644 --- a/src/commands/group_by.rs +++ b/src/commands/group_by.rs @@ -40,10 +40,41 @@ fn group_by( let values: Vec> = input.values.collect().await; let mut groups = indexmap::IndexMap::new(); - for row in values { - let key = row.get_data_by_key(&column_name.item).unwrap().as_string()?; - let mut group = groups.entry(key).or_insert(vec![]); - group.push(row); + for value in values { + let group_key = value.get_data_by_key(&column_name.item); + + if group_key.is_none() { + + let possibilities = value.data_descriptors(); + + let mut possible_matches: Vec<_> = possibilities + .iter() + .map(|x| (natural::distance::levenshtein_distance(x, &column_name.item), x)) + .collect(); + + possible_matches.sort(); + + let err = { + if possible_matches.len() > 0 { + ShellError::labeled_error( + "Unknown column", + format!("did you mean '{}'?", possible_matches[0].1), + &column_name.tag,) + } else { + ShellError::labeled_error( + "Unknown column", + "row does not contain this column", + &column_name.tag, + ) + } + }; + + yield Err(err) + } else { + let group_key = group_key.unwrap().as_string()?; + let mut group = groups.entry(group_key).or_insert(vec![]); + group.push(value); + } } let mut out = TaggedDictBuilder::new(name.clone()); diff --git a/tests/commands_test.rs b/tests/commands_test.rs index 7733942811..45e4bcb228 100644 --- a/tests/commands_test.rs +++ b/tests/commands_test.rs @@ -31,6 +31,31 @@ fn group_by() { }) } +#[test] +fn group_by_errors_if_unknown_column_name() { + Playground::setup("group_by_test_2", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContentToBeTrimmed( + "los_tres_caballeros.csv", + r#" + first_name,last_name,rusty_luck,type + Andrés,Robalino,1,A + Jonathan,Turner,1,B + Yehuda,Katz,1,A + "#, + )]); + + let actual = nu_error!( + cwd: dirs.test(), h::pipeline( + r#" + open los_tres_caballeros.csv + | group-by ttype + "# + )); + + assert!(actual.contains("Unknown column")); + }) +} + #[test] fn first_gets_first_rows_by_amount() { Playground::setup("first_test_1", |dirs, sandbox| { From c34ebfe73918e01b9bbb9f4e2fab1d1fa12c818d Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Wed, 23 Oct 2019 20:57:04 +1300 Subject: [PATCH 090/184] Bump version Bump version so we can tell a difference between what has been released and what's in master. --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 29205d9af5..49e4920593 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "nu" -version = "0.4.0" +version = "0.4.1" authors = ["Yehuda Katz ", "Jonathan Turner ", "Andrés N. Robalino "] description = "A shell for the GitHub era" license = "MIT" From d160e834eb48b542c711caa95f298937afe76046 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Sat, 26 Oct 2019 05:43:31 +1300 Subject: [PATCH 091/184] rustyline git and add plus for filenames --- Cargo.lock | 8 ++++---- Cargo.toml | 2 +- src/parser/parse/parser.rs | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 510cc4d8ba..9f8ebfe787 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1487,7 +1487,7 @@ dependencies = [ [[package]] name = "nu" -version = "0.4.0" +version = "0.4.1" dependencies = [ "ansi_term 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)", "app_dirs 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1540,7 +1540,7 @@ dependencies = [ "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "roxmltree 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "rusqlite 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rustyline 5.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "rustyline 5.0.3 (git+https://github.com/kkawakam/rustyline.git)", "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde-hjson 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2078,7 +2078,7 @@ dependencies = [ [[package]] name = "rustyline" version = "5.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" +source = "git+https://github.com/kkawakam/rustyline.git#449c811998f630102bb2d9fb0b59b890d9eabac5" dependencies = [ "dirs 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", @@ -3056,7 +3056,7 @@ dependencies = [ "checksum rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783" "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" "checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" -"checksum rustyline 5.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4795e277e6e57dec9df62b515cd4991371daa80e8dc8d80d596e58722b89c417" +"checksum rustyline 5.0.3 (git+https://github.com/kkawakam/rustyline.git)" = "" "checksum ryu 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "19d2271fa48eaf61e53cc88b4ad9adcbafa2d512c531e7fadb6dc11a4d3656c5" "checksum safemem 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d2b08423011dae9a5ca23f07cf57dac3857f5c885d352b76f6d95f4aea9434d0" "checksum same-file 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "585e8ddcedc187886a30fa705c47985c3fa88d06624095856b36ca0b82ff4421" diff --git a/Cargo.toml b/Cargo.toml index 49e4920593..e81e830e1a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,7 +14,7 @@ documentation = "https://book.nushell.sh" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -rustyline = "5.0.3" +rustyline = { git = "https://github.com/kkawakam/rustyline.git" } chrono = { version = "0.4.9", features = ["serde"] } derive-new = "0.5.8" prettytable-rs = "0.8.0" diff --git a/src/parser/parse/parser.rs b/src/parser/parse/parser.rs index b5aefabc26..a548102db9 100644 --- a/src/parser/parse/parser.rs +++ b/src/parser/parse/parser.rs @@ -688,7 +688,7 @@ fn is_start_bare_char(c: char) -> bool { fn is_bare_char(c: char) -> bool { match c { - '+' => false, + '+' => true, _ if c.is_alphanumeric() => true, '\\' => true, '/' => true, From 72fd1b047f63286f494cee027f8be7f660f481e2 Mon Sep 17 00:00:00 2001 From: Paul Delafosse Date: Fri, 25 Oct 2019 19:52:40 +0200 Subject: [PATCH 092/184] Create docs for from-csv command Partial fix of issue nushell#711 --- docs/commands/from-csv.md | 47 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 docs/commands/from-csv.md diff --git a/docs/commands/from-csv.md b/docs/commands/from-csv.md new file mode 100644 index 0000000000..86d309d86b --- /dev/null +++ b/docs/commands/from-csv.md @@ -0,0 +1,47 @@ +# from-csv + +Converts csv data into table. Use this when nushell cannot dertermine the input file extension. + +## Example +Let's say we have the following file : +```shell +> cat pets.txt +animal, name, age +cat, Tom, 7 +dog, Alfred, 10 +chameleon, Linda, 1 +``` + +`pets.txt` is actually a .csv file but it has the .txt extension, `open` is not able to convert it into a table : + +```shell +> open pets.txt +animal, name, age +cat, Tom, 7 +dog, Alfred, 10 +chameleon, Linda, 1 +``` + +To get a table from `pets.txt` we need to use the `from-csv` command : + +```shell +> open pets.txt | from-csv +━━━┯━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━ + # │ animal │ name │ age +───┼───────────┼─────────┼────── + 0 │ cat │ Tom │ 7 + 1 │ dog │ Alfred │ 10 + 2 │ chameleon │ Linda │ 1 +━━━┷━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━ +``` + +To ignore the csv headers use `--headerless` : +```shell +━━━┯━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━ + # │ Column1 │ Column2 │ Column3 +───┼───────────┼─────────┼───────── + 0 │ dog │ Alfred │ 10 + 1 │ chameleon │ Linda │ 1 +━━━┷━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━ +``` + From 07ceec3e0b4721dc04711277ba86d7085020ccfb Mon Sep 17 00:00:00 2001 From: Paul Delafosse Date: Fri, 25 Oct 2019 20:38:55 +0200 Subject: [PATCH 093/184] Create docs for from-toml command Partial fix of issue nushell#711 --- docs/commands/from-toml.md | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 docs/commands/from-toml.md diff --git a/docs/commands/from-toml.md b/docs/commands/from-toml.md new file mode 100644 index 0000000000..d3f3364c78 --- /dev/null +++ b/docs/commands/from-toml.md @@ -0,0 +1,23 @@ +# from-toml +Converts toml data into table. Use this when nushell cannot dertermine the input file extension. + +## Example +Let's say we have the following Rust .lock file : +```shell +> open Cargo.lock +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. [[package]] name = "adler32" version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" +... +``` + +The "Cargo.lock" file is actually a .toml file, but the file extension isn't .toml. That's okay, we can use the `from-toml` command : + + +```shell +> open Cargo.lock | from-toml +━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━ + metadata │ package +────────────────┼─────────────────── + [table: 1 row] │ [table: 154 rows] +━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━ +``` \ No newline at end of file From 2706ae076d8782800849908cdaf6d2e36f95e501 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Fri, 25 Oct 2019 18:31:25 -0500 Subject: [PATCH 094/184] Move out tags when parsing and building tree nodes. --- src/parser/parse/parser.rs | 173 +++++++++++++++---------- src/parser/parse/token_tree.rs | 2 +- src/parser/parse/token_tree_builder.rs | 14 +- src/parser/parse/tokens.rs | 8 +- 4 files changed, 114 insertions(+), 83 deletions(-) diff --git a/src/parser/parse/parser.rs b/src/parser/parse/parser.rs index a548102db9..f7fce7c814 100644 --- a/src/parser/parse/parser.rs +++ b/src/parser/parse/parser.rs @@ -393,7 +393,7 @@ pub fn leaf(input: NomSpan) -> IResult { } #[tracable_parser] -pub fn token_list(input: NomSpan) -> IResult>> { +pub fn token_list(input: NomSpan) -> IResult>> { let start = input.offset; let (input, first) = node(input)?; @@ -403,7 +403,7 @@ pub fn token_list(input: NomSpan) -> IResult>> { Ok(( input, - make_token_list(first, list, None).tagged((start, end, None)), + make_token_list(first, list, None).spanned(Span::new(start, end)), )) } @@ -511,14 +511,14 @@ pub fn delimited_brace(input: NomSpan) -> IResult { } #[tracable_parser] -pub fn raw_call(input: NomSpan) -> IResult> { +pub fn raw_call(input: NomSpan) -> IResult> { let left = input.offset; let (input, items) = token_list(input)?; let right = input.offset; Ok(( input, - TokenTreeBuilder::tagged_call(items.item, (left, right, input.extra)), + TokenTreeBuilder::spanned_call(items.item, Span::new(left, right)), )) } @@ -598,7 +598,7 @@ pub fn nodes(input: NomSpan) -> IResult { Ok(( input, - TokenTreeBuilder::tagged_token_list(tokens.item, tokens.tag), + TokenTreeBuilder::spanned_token_list(tokens.item, tokens.span), )) } @@ -800,30 +800,30 @@ mod tests { ">" -> b::token_list(vec![b::op(">")]) } - // assert_leaf! { - // parsers [ operator ] - // ">=" -> 0..2 { Operator(Operator::GreaterThanOrEqual) } - // } + equal_tokens! { + + ">=" -> b::token_list(vec![b::op(">=")]) + } - // assert_leaf! { - // parsers [ operator ] - // "<" -> 0..1 { Operator(Operator::LessThan) } - // } + equal_tokens! { + + "<" -> b::token_list(vec![b::op("<")]) + } - // assert_leaf! { - // parsers [ operator ] - // "<=" -> 0..2 { Operator(Operator::LessThanOrEqual) } - // } + equal_tokens! { + + "<=" -> b::token_list(vec![b::op("<=")]) + } - // assert_leaf! { - // parsers [ operator ] - // "==" -> 0..2 { Operator(Operator::Equal) } - // } + equal_tokens! { + + "==" -> b::token_list(vec![b::op("==")]) + } - // assert_leaf! { - // parsers [ operator ] - // "!=" -> 0..2 { Operator(Operator::NotEqual) } - // } + equal_tokens! { + + "!=" -> b::token_list(vec![b::op("!=")]) + } } #[test] @@ -848,12 +848,14 @@ mod tests { } #[test] - fn test_simple_path() { + fn test_unit_sizes() { equal_tokens! { "450MB" -> b::token_list(vec![b::bare("450MB")]) } - + } + #[test] + fn test_simple_path() { equal_tokens! { "chrome.exe" -> b::token_list(vec![b::bare("chrome"), b::op(Operator::Dot), b::bare("exe")]) @@ -877,23 +879,23 @@ mod tests { #[test] fn test_flag() { - // assert_leaf! { - // parsers [ flag ] - // "--hello" -> 0..7 { Flag(Tagged::from_item(FlagKind::Longhand, span(2, 7))) } - // } + equal_tokens! { + + "--amigos" -> b::token_list(vec![b::flag("arepas")]) + } - // assert_leaf! { - // parsers [ flag ] - // "--hello-world" -> 0..13 { Flag(Tagged::from_item(FlagKind::Longhand, span(2, 13))) } - // } + equal_tokens! { + + "--all-amigos" -> b::token_list(vec![b::flag("all-amigos")]) + } } #[test] - fn test_shorthand() { - // assert_leaf! { - // parsers [ shorthand ] - // "-alt" -> 0..4 { Flag(Tagged::from_item(FlagKind::Shorthand, span(1, 4))) } - // } + fn test_shorthand_flag() { + equal_tokens! { + + "-katz" -> b::token_list(vec![b::shorthand("katz")]) + } } #[test] @@ -939,13 +941,13 @@ mod tests { equal_tokens! { - "( abc def )" -> b::token_list(vec![b::parens(vec![b::ws(" "), b::bare("abc"), b::ws(" "), b::bare("def"), b::ws(" ")])]) + "( abc def )" -> b::token_list(vec![b::parens(vec![b::ws(" "), b::bare("abc"), b::sp(), b::bare("def"), b::sp()])]) } equal_tokens! { "( abc def 123 456GB )" -> b::token_list(vec![b::parens(vec![ - b::ws(" "), b::bare("abc"), b::ws(" "), b::bare("def"), b::ws(" "), b::int(123), b::ws(" "), b::bare("456GB"), b::ws(" ") + b::ws(" "), b::bare("abc"), b::sp(), b::bare("def"), b::sp(), b::int(123), b::sp(), b::bare("456GB"), b::sp() ])]) } } @@ -964,13 +966,13 @@ mod tests { equal_tokens! { - "[ abc def ]" -> b::token_list(vec![b::square(vec![b::ws(" "), b::bare("abc"), b::ws(" "), b::bare("def"), b::ws(" ")])]) + "[ abc def ]" -> b::token_list(vec![b::square(vec![b::ws(" "), b::bare("abc"), b::sp(), b::bare("def"), b::sp()])]) } equal_tokens! { "[ abc def 123 456GB ]" -> b::token_list(vec![b::square(vec![ - b::ws(" "), b::bare("abc"), b::ws(" "), b::bare("def"), b::ws(" "), b::int(123), b::ws(" "), b::bare("456GB"), b::ws(" ") + b::ws(" "), b::bare("abc"), b::sp(), b::bare("def"), b::sp(), b::int(123), b::sp(), b::bare("456GB"), b::sp() ])]) } } @@ -984,6 +986,11 @@ mod tests { "$it.print" -> b::token_list(vec![b::var("it"), b::op("."), b::bare("print")]) } + equal_tokens! { + + "$it.0" -> b::token_list(vec![b::var("it"), b::op("."), b::int(0)]) + } + equal_tokens! { "$head.part1.part2" -> b::token_list(vec![b::var("head"), b::op("."), b::bare("part1"), b::op("."), b::bare("part2")]) @@ -1024,6 +1031,19 @@ mod tests { b::op("."), b::string("world")] ) } + + equal_tokens! { + + r#"$it."are PAS".0"# -> b::token_list( + vec![ + b::var("it"), + b::op("."), + b::string("are PAS"), + b::op("."), + b::int(0), + ] + ) + } } #[test] @@ -1062,6 +1082,19 @@ mod tests { "config --set tabs 2" -> b::token_list(vec![b::bare("config"), b::sp(), b::flag("set"), b::sp(), b::bare("tabs"), b::sp(), b::int(2)]) } + + equal_tokens! { + + "inc --patch package.version" -> b::token_list( + vec![ + b::bare("inc"), + b::sp(), + b::flag("patch"), + b::sp(), + b::bare("package"), b::op("."), b::bare("version") + ] + ) + } } #[test] @@ -1114,41 +1147,39 @@ mod tests { fn test_patterns() { equal_tokens! { - "cp ../formats/*" -> b::pipeline(vec![vec![b::bare("cp"), b::ws(" "), b::op("."), b::op("."), b::pattern("/formats/*")]]) + "cp ../formats/*" -> b::pipeline(vec![vec![b::bare("cp"), b::sp(), b::op("."), b::op("."), b::pattern("/formats/*")]]) } equal_tokens! { - "cp * /dev/null" -> b::pipeline(vec![vec![b::bare("cp"), b::ws(" "), b::pattern("*"), b::ws(" "), b::bare("/dev/null")]]) + "cp * /dev/null" -> b::pipeline(vec![vec![b::bare("cp"), b::sp(), b::pattern("*"), b::sp(), b::bare("/dev/null")]]) } } - // #[test] - // fn test_pseudo_paths() { - // let _ = pretty_env_logger::try_init(); + #[test] + fn test_pseudo_paths() { + let _ = pretty_env_logger::try_init(); - // equal_tokens!( - // r#"sys | where cpu."max ghz" > 1"# -> - // b::pipeline(vec![ - // (None, b::call(b::bare("sys"), vec![]), Some(" ")), - // ( - // Some(" "), - // b::call( - // b::bare("where"), - // vec![ - // b::sp(), - // b::path(b::bare("cpu"), vec![b::string("max ghz")]), - // b::sp(), - // b::op(">"), - // b::sp(), - // b::int(1) - // ] - // ), - // None - // ) - // ]) - // ); - // } + equal_tokens!( + + r#"sys | where cpu."max ghz" > 1"# -> b::pipeline(vec![ + vec![ + b::bare("sys"), b::sp() + ], + vec![ + b::sp(), + b::bare("where"), + b::sp(), + b::bare("cpu"), + b::op("."), + b::string("max ghz"), + b::sp(), + b::op(">"), + b::sp(), + b::int(1) + ]]) + ); + } // #[test] // fn test_smoke_pipeline() { diff --git a/src/parser/parse/token_tree.rs b/src/parser/parse/token_tree.rs index c3c1df652a..cb335e925e 100644 --- a/src/parser/parse/token_tree.rs +++ b/src/parser/parse/token_tree.rs @@ -274,7 +274,7 @@ impl TokenNode { item: RawToken::Bare, span, }) => *span, - other => panic!("Expected var, found {:?}", other), + other => panic!("Expected bare, found {:?}", other), } } } diff --git a/src/parser/parse/token_tree_builder.rs b/src/parser/parse/token_tree_builder.rs index 891e6b9e16..7146a3c201 100644 --- a/src/parser/parse/token_tree_builder.rs +++ b/src/parser/parse/token_tree_builder.rs @@ -18,7 +18,7 @@ pub struct TokenTreeBuilder { } pub type CurriedToken = Box TokenNode + 'static>; -pub type CurriedCall = Box Tagged + 'static>; +pub type CurriedCall = Box Spanned + 'static>; impl TokenTreeBuilder { pub fn build(block: impl FnOnce(&mut Self) -> TokenNode) -> (TokenNode, String) { @@ -89,12 +89,12 @@ impl TokenTreeBuilder { let tokens = input.into_iter().map(|i| i(b)).collect(); let end = b.pos; - TokenTreeBuilder::tagged_token_list(tokens, (start, end, None)) + TokenTreeBuilder::spanned_token_list(tokens, Span::new(start, end)) }) } - pub fn tagged_token_list(input: Vec, tag: impl Into) -> TokenNode { - TokenNode::Nodes(input.spanned(tag.into().span)) + pub fn spanned_token_list(input: Vec, span: impl Into) -> TokenNode { + TokenNode::Nodes(input.spanned(span.into())) } pub fn op(input: impl Into) -> CurriedToken { @@ -287,11 +287,11 @@ impl TokenTreeBuilder { let end = b.pos; - TokenTreeBuilder::tagged_call(nodes, (start, end, None)) + TokenTreeBuilder::spanned_call(nodes, Span::new(start, end)) }) } - pub fn tagged_call(input: Vec, tag: impl Into) -> Tagged { + pub fn spanned_call(input: Vec, span: impl Into) -> Spanned { if input.len() == 0 { panic!("BUG: spanned call (TODO)") } @@ -301,7 +301,7 @@ impl TokenTreeBuilder { let head = input.next().unwrap(); let tail = input.collect(); - CallNode::new(Box::new(head), tail).tagged(tag.into()) + CallNode::new(Box::new(head), tail).spanned(span.into()) } fn consume_delimiter( diff --git a/src/parser/parse/tokens.rs b/src/parser/parse/tokens.rs index 94955d84d9..29061ed7a2 100644 --- a/src/parser/parse/tokens.rs +++ b/src/parser/parse/tokens.rs @@ -19,14 +19,14 @@ pub enum RawToken { impl RawToken { pub fn type_name(&self) -> &'static str { match self { - RawToken::Number(_) => "Number", + RawToken::Number(_) => "number", RawToken::Operator(..) => "operator", - RawToken::String(_) => "String", + RawToken::String(_) => "string", RawToken::Variable(_) => "variable", RawToken::ExternalCommand(_) => "external command", RawToken::ExternalWord => "external word", RawToken::GlobPattern => "glob pattern", - RawToken::Bare => "String", + RawToken::Bare => "string", } } } @@ -72,7 +72,7 @@ impl Token { pub fn extract_number(&self) -> Option> { match self.item { - RawToken::Number(number) => Some((number).spanned(self.span)), + RawToken::Number(number) => Some(number.spanned(self.span)), _ => None, } } From 540cc4016ef403f7d1265bb79e2075e451559c3c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Sat, 26 Oct 2019 20:01:58 -0500 Subject: [PATCH 095/184] Expand tilde in patterns. --- src/errors.rs | 8 +- src/evaluate/evaluator.rs | 2 +- src/parser/hir.rs | 12 +- src/parser/hir/baseline_parse/tests.rs | 32 +--- src/parser/hir/syntax_shape.rs | 14 +- .../hir/syntax_shape/expression/atom.rs | 5 +- .../hir/syntax_shape/expression/pattern.rs | 47 ++---- .../hir/syntax_shape/expression/string.rs | 8 +- .../syntax_shape/expression/variable_path.rs | 2 +- src/parser/hir/tokens_iterator.rs | 2 +- src/parser/parse/parser.rs | 4 +- src/parser/parse/token_tree.rs | 148 +++++++++++------- 12 files changed, 135 insertions(+), 149 deletions(-) diff --git a/src/errors.rs b/src/errors.rs index 11628dde4b..dfad5692a1 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -367,6 +367,10 @@ impl ShellError { // pub fn string(title: impl Into) -> ShellError { // ProximateShellError::String(StringError::new(title.into(), String::new())).start() // } + // + // pub(crate) fn unreachable(title: impl Into) -> ShellError { + // ShellError::untagged_runtime_error(&format!("BUG: Unreachable: {}", title.into())) + // } pub(crate) fn unimplemented(title: impl Into) -> ShellError { ShellError::untagged_runtime_error(&format!("Unimplemented: {}", title.into())) @@ -375,10 +379,6 @@ impl ShellError { pub(crate) fn unexpected(title: impl Into) -> ShellError { ShellError::untagged_runtime_error(&format!("Unexpected: {}", title.into())) } - - pub(crate) fn unreachable(title: impl Into) -> ShellError { - ShellError::untagged_runtime_error(&format!("BUG: Unreachable: {}", title.into())) - } } #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)] diff --git a/src/evaluate/evaluator.rs b/src/evaluate/evaluator.rs index 75eb2f4667..f1caa21f1d 100644 --- a/src/evaluate/evaluator.rs +++ b/src/evaluate/evaluator.rs @@ -148,7 +148,7 @@ fn evaluate_literal(literal: Tagged<&hir::Literal>, source: &Text) -> Tagged int.into(), hir::Literal::Size(int, unit) => unit.compute(int), hir::Literal::String(tag) => Value::string(tag.slice(source)), - hir::Literal::GlobPattern => Value::pattern(literal.tag().slice(source)), + hir::Literal::GlobPattern(pattern) => Value::pattern(pattern), hir::Literal::Bare => Value::string(literal.tag().slice(source)), }; diff --git a/src/parser/hir.rs b/src/parser/hir.rs index ac6423943d..7108b0f7f9 100644 --- a/src/parser/hir.rs +++ b/src/parser/hir.rs @@ -227,8 +227,8 @@ impl Expression { RawExpression::Literal(Literal::Bare).spanned(span) } - pub(crate) fn pattern(span: impl Into) -> Expression { - RawExpression::Literal(Literal::GlobPattern).spanned(span.into()) + pub(crate) fn pattern(inner: impl Into, outer: impl Into) -> Expression { + RawExpression::Literal(Literal::GlobPattern(inner.into())).spanned(outer.into()) } pub(crate) fn variable(inner: impl Into, outer: impl Into) -> Expression { @@ -297,7 +297,7 @@ pub enum Literal { Number(Number), Size(Number, Unit), String(Span), - GlobPattern, + GlobPattern(String), Bare, } @@ -315,7 +315,7 @@ impl std::fmt::Display for Tagged<&Literal> { Literal::Number(number) => write!(f, "{}", number), Literal::Size(number, unit) => write!(f, "{}{}", number, unit.as_str()), Literal::String(_) => write!(f, "String{{ {}..{} }}", span.start(), span.end()), - Literal::GlobPattern => write!(f, "Glob{{ {}..{} }}", span.start(), span.end()), + Literal::GlobPattern(_) => write!(f, "Glob{{ {}..{} }}", span.start(), span.end()), Literal::Bare => write!(f, "Bare{{ {}..{} }}", span.start(), span.end()), } } @@ -327,7 +327,7 @@ impl ToDebug for Spanned<&Literal> { Literal::Number(number) => write!(f, "{:?}", number), Literal::Size(number, unit) => write!(f, "{:?}{:?}", *number, unit), Literal::String(tag) => write!(f, "{}", tag.slice(source)), - Literal::GlobPattern => write!(f, "{}", self.span.slice(source)), + Literal::GlobPattern(_) => write!(f, "{}", self.span.slice(source)), Literal::Bare => write!(f, "{}", self.span.slice(source)), } } @@ -340,7 +340,7 @@ impl Literal { Literal::Size(..) => "size", Literal::String(..) => "string", Literal::Bare => "string", - Literal::GlobPattern => "pattern", + Literal::GlobPattern(_) => "pattern", } } } diff --git a/src/parser/hir/baseline_parse/tests.rs b/src/parser/hir/baseline_parse/tests.rs index d3b9248496..ddd4af4930 100644 --- a/src/parser/hir/baseline_parse/tests.rs +++ b/src/parser/hir/baseline_parse/tests.rs @@ -6,7 +6,7 @@ use crate::parser::hir::syntax_shape::*; use crate::parser::hir::TokensIterator; use crate::parser::parse::token_tree_builder::{CurriedToken, TokenTreeBuilder as b}; use crate::parser::TokenNode; -use crate::{Span, SpannedItem, Tag, Tagged, Text}; +use crate::{Span, SpannedItem, Tag, Text}; use pretty_assertions::assert_eq; use std::fmt::Debug; @@ -63,7 +63,7 @@ fn test_parse_command() { vec![b::bare("ls"), b::sp(), b::pattern("*.txt")], |tokens| { let bare = tokens[0].expect_bare(); - let pat = tokens[2].span(); + let pattern = tokens[2].expect_pattern(); ClassifiedCommand::Internal(InternalCommand::new( "ls".to_string(), @@ -73,7 +73,7 @@ fn test_parse_command() { }, hir::Call { head: Box::new(hir::RawExpression::Command(bare).spanned(bare)), - positional: Some(vec![hir::Expression::pattern(pat)]), + positional: Some(vec![hir::Expression::pattern("*.txt", pattern)]), named: None, }, )) @@ -84,41 +84,19 @@ fn test_parse_command() { // ) }, ); - - parse_tokens( - VariablePathShape, - vec![ - b::var("cpu"), - b::op("."), - b::bare("amount"), - b::op("."), - b::string("max ghz"), - ], - |tokens| { - let (outer_var, inner_var) = tokens[0].expect_var(); - let amount = tokens[2].expect_bare(); - let (outer_max_ghz, _) = tokens[4].expect_string(); - - hir::Expression::path( - hir::Expression::variable(inner_var, outer_var), - vec!["amount".spanned(amount), "max ghz".spanned(outer_max_ghz)], - outer_var.until(outer_max_ghz), - ) - }, - ); } fn parse_tokens( shape: impl ExpandSyntax, tokens: Vec, - expected: impl FnOnce(Tagged<&[TokenNode]>) -> T, + expected: impl FnOnce(&[TokenNode]) -> T, ) { let tokens = b::token_list(tokens); let (tokens, source) = b::build(tokens); ExpandContext::with_empty(&Text::from(source), |context| { let tokens = tokens.expect_list(); - let mut iterator = TokensIterator::all(tokens.item, *context.span()); + let mut iterator = TokensIterator::all(tokens, *context.span()); let expr = expand_syntax(&shape, &mut iterator, &context); diff --git a/src/parser/hir/syntax_shape.rs b/src/parser/hir/syntax_shape.rs index 2169467e43..a38a77500b 100644 --- a/src/parser/hir/syntax_shape.rs +++ b/src/parser/hir/syntax_shape.rs @@ -497,18 +497,18 @@ pub(crate) fn expand_expr<'a, 'b, T: ExpandExpression>( token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, ) -> Result { - trace!(target: "nu::expand_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes.state(), context.source)); + trace!(target: "nu::expand_expression", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes.state(), context.source)); - let result = shape.expand_syntax(token_nodes, context); + let result = shape.expand_expr(token_nodes, context); match result { Err(err) => { - trace!(target: "nu::expand_syntax", "error :: {} :: {:?}", err, debug_tokens(token_nodes.state(), context.source)); + trace!(target: "nu::expand_expression", "error :: {} :: {:?}", err, debug_tokens(token_nodes.state(), context.source)); Err(err) } Ok(result) => { - trace!(target: "nu::expand_syntax", "ok :: {:?} :: {:?}", result, debug_tokens(token_nodes.state(), context.source)); + trace!(target: "nu::expand_expression", "ok :: {:?} :: {:?}", result, debug_tokens(token_nodes.state(), context.source)); Ok(result) } } @@ -719,11 +719,7 @@ impl TestSyntax for BareShape { let peeked = token_nodes.peek_any(); match peeked.node { - Some(TokenNode::Token(token)) => match token.item { - RawToken::Bare => Some(peeked), - _ => None, - }, - + Some(token) if token.is_bare() => Some(peeked), _ => None, } } diff --git a/src/parser/hir/syntax_shape/expression/atom.rs b/src/parser/hir/syntax_shape/expression/atom.rs index bb1b8065ec..888d9430e6 100644 --- a/src/parser/hir/syntax_shape/expression/atom.rs +++ b/src/parser/hir/syntax_shape/expression/atom.rs @@ -142,7 +142,10 @@ impl<'tokens> SpannedAtomicToken<'tokens> { Expression::external_command(*command, self.span) } AtomicToken::ExternalWord { text } => Expression::string(*text, self.span), - AtomicToken::GlobPattern { pattern } => Expression::pattern(*pattern), + AtomicToken::GlobPattern { pattern } => Expression::pattern( + expand_file_path(pattern.slice(context.source), context).to_string_lossy(), + self.span, + ), AtomicToken::Word { text } => Expression::string(*text, *text), AtomicToken::SquareDelimited { .. } => unimplemented!("into_hir"), AtomicToken::ParenDelimited { .. } => unimplemented!("into_hir"), diff --git a/src/parser/hir/syntax_shape/expression/pattern.rs b/src/parser/hir/syntax_shape/expression/pattern.rs index eab0b6e5bb..ed3bd610cd 100644 --- a/src/parser/hir/syntax_shape/expression/pattern.rs +++ b/src/parser/hir/syntax_shape/expression/pattern.rs @@ -1,7 +1,6 @@ use crate::parser::hir::syntax_shape::{ - expand_atom, expand_bare, expand_syntax, expression::expand_file_path, parse_single_node, - AtomicToken, ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, FallibleColorSyntax, - FlatShape, + expand_atom, expand_bare, expression::expand_file_path, AtomicToken, ExpandContext, + ExpandExpression, ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, }; use crate::parser::{hir, hir::TokensIterator, Operator, RawToken, TokenNode}; use crate::prelude::*; @@ -72,43 +71,17 @@ impl ExpandExpression for PatternShape { token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, ) -> Result { - let pattern = expand_syntax(&BarePatternShape, token_nodes, context); + let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::new())?; - match pattern { - Ok(tag) => { - return Ok(hir::Expression::pattern(tag)); + match atom.item { + AtomicToken::Word { text: body } + | AtomicToken::String { body } + | AtomicToken::GlobPattern { pattern: body } => { + let path = expand_file_path(body.slice(context.source), context); + return Ok(hir::Expression::pattern(path.to_string_lossy(), atom.span)); } - Err(_) => {} + _ => return atom.into_hir(context, "pattern"), } - - parse_single_node(token_nodes, "Pattern", |token, token_tag, _| { - Ok(match token { - RawToken::GlobPattern => { - return Err(ShellError::unreachable( - "glob pattern after glob already returned", - )) - } - RawToken::Operator(..) => { - return Err(ShellError::unreachable("dot after glob already returned")) - } - RawToken::Bare => { - return Err(ShellError::unreachable("bare after glob already returned")) - } - - RawToken::Variable(tag) if tag.slice(context.source) == "it" => { - hir::Expression::it_variable(tag, token_tag) - } - RawToken::Variable(tag) => hir::Expression::variable(tag, token_tag), - RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag), - RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)), - RawToken::Number(_) => hir::Expression::bare(token_tag), - - RawToken::String(tag) => hir::Expression::file_path( - expand_file_path(tag.slice(context.source), context), - token_tag, - ), - }) - }) } } diff --git a/src/parser/hir/syntax_shape/expression/string.rs b/src/parser/hir/syntax_shape/expression/string.rs index 116ed8fd0d..46015376e8 100644 --- a/src/parser/hir/syntax_shape/expression/string.rs +++ b/src/parser/hir/syntax_shape/expression/string.rs @@ -3,7 +3,7 @@ use crate::parser::hir::syntax_shape::{ ExpansionRule, FallibleColorSyntax, FlatShape, TestSyntax, }; use crate::parser::hir::tokens_iterator::Peeked; -use crate::parser::{hir, hir::TokensIterator, RawToken, TokenNode}; +use crate::parser::{hir, hir::TokensIterator, RawToken}; use crate::prelude::*; #[derive(Debug, Copy, Clone)] @@ -118,11 +118,7 @@ impl TestSyntax for StringShape { let peeked = token_nodes.peek_any(); match peeked.node { - Some(TokenNode::Token(token)) => match token.item { - RawToken::String(_) => Some(peeked), - _ => None, - }, - + Some(token) if token.is_string() => Some(peeked), _ => None, } } diff --git a/src/parser/hir/syntax_shape/expression/variable_path.rs b/src/parser/hir/syntax_shape/expression/variable_path.rs index e983630348..5ed615a9e8 100644 --- a/src/parser/hir/syntax_shape/expression/variable_path.rs +++ b/src/parser/hir/syntax_shape/expression/variable_path.rs @@ -821,7 +821,7 @@ impl ExpandSyntax for MemberShape { if let Some(peeked) = string { let node = peeked.not_eof("column")?.commit(); - let (outer, inner) = node.expect_string(); + let (outer, inner) = node.as_string().unwrap(); return Ok(Member::String(outer, inner)); } diff --git a/src/parser/hir/tokens_iterator.rs b/src/parser/hir/tokens_iterator.rs index b3069247c9..8e2f4a8f88 100644 --- a/src/parser/hir/tokens_iterator.rs +++ b/src/parser/hir/tokens_iterator.rs @@ -566,7 +566,7 @@ impl<'content> TokensIterator<'content> { impl<'content> Iterator for TokensIterator<'content> { type Item = &'content TokenNode; - fn next(&mut self) -> Option<&'content TokenNode> { + fn next(&mut self) -> Option { next(self, self.state.skip_ws) } } diff --git a/src/parser/parse/parser.rs b/src/parser/parse/parser.rs index f7fce7c814..0dd1bc8566 100644 --- a/src/parser/parse/parser.rs +++ b/src/parser/parse/parser.rs @@ -666,7 +666,7 @@ fn is_glob_specific_char(c: char) -> bool { } fn is_start_glob_char(c: char) -> bool { - is_start_bare_char(c) || is_glob_specific_char(c) + is_start_bare_char(c) || is_glob_specific_char(c) || c == '.' } fn is_glob_char(c: char) -> bool { @@ -1147,7 +1147,7 @@ mod tests { fn test_patterns() { equal_tokens! { - "cp ../formats/*" -> b::pipeline(vec![vec![b::bare("cp"), b::sp(), b::op("."), b::op("."), b::pattern("/formats/*")]]) + "cp ../formats/*" -> b::pipeline(vec![vec![b::bare("cp"), b::sp(), b::pattern("../formats/*")]]) } equal_tokens! { diff --git a/src/parser/parse/token_tree.rs b/src/parser/parse/token_tree.rs index cb335e925e..0d00dcff0d 100644 --- a/src/parser/parse/token_tree.rs +++ b/src/parser/parse/token_tree.rs @@ -155,6 +155,26 @@ impl TokenNode { } } + pub fn is_string(&self) -> bool { + match self { + TokenNode::Token(Spanned { + item: RawToken::String(_), + .. + }) => true, + _ => false, + } + } + + pub fn as_string(&self) -> Option<(Span, Span)> { + match self { + TokenNode::Token(Spanned { + item: RawToken::String(inner_span), + span: outer_span, + }) => Some((*outer_span, *inner_span)), + _ => None, + } + } + pub fn is_pattern(&self) -> bool { match self { TokenNode::Token(Spanned { @@ -200,16 +220,6 @@ impl TokenNode { } } - pub fn expect_external(&self) -> Span { - match self { - TokenNode::Token(Spanned { - item: RawToken::ExternalCommand(span), - .. - }) => *span, - _ => panic!("Only call expect_external if you checked is_external first"), - } - } - pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option> { match self { TokenNode::Flag( @@ -224,7 +234,7 @@ impl TokenNode { pub fn as_pipeline(&self) -> Result { match self { TokenNode::Pipeline(Spanned { item, .. }) => Ok(item.clone()), - _ => Err(ShellError::unimplemented("unimplemented")), + _ => Err(ShellError::type_error("pipeline", self.tagged_type_name())), } } @@ -234,49 +244,6 @@ impl TokenNode { _ => false, } } - - pub fn expect_string(&self) -> (Span, Span) { - match self { - TokenNode::Token(Spanned { - item: RawToken::String(inner_span), - span: outer_span, - }) => (*outer_span, *inner_span), - other => panic!("Expected string, found {:?}", other), - } - } -} - -#[cfg(test)] -impl TokenNode { - pub fn expect_list(&self) -> Tagged<&[TokenNode]> { - match self { - TokenNode::Nodes(Spanned { item, span }) => (&item[..]).tagged(Tag { - span: *span, - anchor: None, - }), - other => panic!("Expected list, found {:?}", other), - } - } - - pub fn expect_var(&self) -> (Span, Span) { - match self { - TokenNode::Token(Spanned { - item: RawToken::Variable(inner_span), - span: outer_span, - }) => (*outer_span, *inner_span), - other => panic!("Expected var, found {:?}", other), - } - } - - pub fn expect_bare(&self) -> Span { - match self { - TokenNode::Token(Spanned { - item: RawToken::Bare, - span, - }) => *span, - other => panic!("Expected bare, found {:?}", other), - } - } } #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)] @@ -328,3 +295,76 @@ pub struct PathNode { head: Box, tail: Vec, } + +#[cfg(test)] +impl TokenNode { + pub fn expect_external(&self) -> Span { + match self { + TokenNode::Token(Spanned { + item: RawToken::ExternalCommand(span), + .. + }) => *span, + other => panic!( + "Only call expect_external if you checked is_external first, found {:?}", + other + ), + } + } + + pub fn expect_string(&self) -> (Span, Span) { + match self { + TokenNode::Token(Spanned { + item: RawToken::String(inner_span), + span: outer_span, + }) => (*outer_span, *inner_span), + other => panic!("Expected string, found {:?}", other), + } + } + + pub fn expect_list(&self) -> &[TokenNode] { + match self { + TokenNode::Nodes(token_nodes) => &token_nodes[..], + other => panic!("Expected list, found {:?}", other), + } + } + + pub fn expect_pattern(&self) -> Span { + match self { + TokenNode::Token(Spanned { + item: RawToken::GlobPattern, + span: outer_span, + }) => *outer_span, + other => panic!("Expected pattern, found {:?}", other), + } + } + + pub fn expect_var(&self) -> (Span, Span) { + match self { + TokenNode::Token(Spanned { + item: RawToken::Variable(inner_span), + span: outer_span, + }) => (*outer_span, *inner_span), + other => panic!("Expected var, found {:?}", other), + } + } + + pub fn expect_dot(&self) -> Span { + match self { + TokenNode::Token(Spanned { + item: RawToken::Operator(Operator::Dot), + span, + }) => *span, + other => panic!("Expected dot, found {:?}", other), + } + } + + pub fn expect_bare(&self) -> Span { + match self { + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => *span, + other => panic!("Expected bare, found {:?}", other), + } + } +} From aed386b3cd515dc3b57abf62161606567bb36788 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Mon, 28 Oct 2019 05:58:39 +1300 Subject: [PATCH 096/184] Always save history, add history command --- src/cli.rs | 3 +++ src/commands.rs | 2 ++ src/commands/history.rs | 49 +++++++++++++++++++++++++++++++++++++++++ 3 files changed, 54 insertions(+) create mode 100644 src/commands/history.rs diff --git a/src/cli.rs b/src/cli.rs index e88ee054fe..d52a55e267 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -306,6 +306,7 @@ pub async fn cli() -> Result<(), Box> { whole_stream_command(SkipWhile), per_item_command(Enter), per_item_command(Help), + per_item_command(History), whole_stream_command(Exit), whole_stream_command(Autoview), whole_stream_command(Pivot), @@ -413,6 +414,7 @@ pub async fn cli() -> Result<(), Box> { match process_line(readline, &mut context).await { LineResult::Success(line) => { rl.add_history_entry(line.clone()); + let _ = rl.save_history(&History::path()); } LineResult::CtrlC => { @@ -440,6 +442,7 @@ pub async fn cli() -> Result<(), Box> { LineResult::Error(line, err) => { rl.add_history_entry(line.clone()); + let _ = rl.save_history(&History::path()); context.with_host(|host| { print_err(err, host, &Text::from(line)); diff --git a/src/commands.rs b/src/commands.rs index 7f0fa0a25a..c75ca81192 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -32,6 +32,7 @@ pub(crate) mod from_yaml; pub(crate) mod get; pub(crate) mod group_by; pub(crate) mod help; +pub(crate) mod history; pub(crate) mod last; pub(crate) mod lines; pub(crate) mod ls; @@ -106,6 +107,7 @@ pub(crate) use from_yaml::FromYML; pub(crate) use get::Get; pub(crate) use group_by::GroupBy; pub(crate) use help::Help; +pub(crate) use history::History; pub(crate) use last::Last; pub(crate) use lines::Lines; pub(crate) use ls::LS; diff --git a/src/commands/history.rs b/src/commands/history.rs new file mode 100644 index 0000000000..fdc6d655a2 --- /dev/null +++ b/src/commands/history.rs @@ -0,0 +1,49 @@ +use crate::cli::History as HistoryFile; +use crate::commands::PerItemCommand; +use crate::errors::ShellError; +use crate::parser::registry::{self}; +use crate::prelude::*; +use std::fs::File; +use std::io::{BufRead, BufReader}; + +pub struct History; + +impl PerItemCommand for History { + fn name(&self) -> &str { + "history" + } + + fn signature(&self) -> registry::Signature { + Signature::build("history") + } + + fn usage(&self) -> &str { + "Display command history." + } + + fn run( + &self, + call_info: &CallInfo, + _registry: &CommandRegistry, + _raw_args: &RawCommandArgs, + _input: Tagged, + ) -> Result { + let tag = call_info.name_tag.clone(); + + let stream = async_stream! { + let history_path = HistoryFile::path(); + let file = File::open(history_path); + if let Ok(file) = file { + let reader = BufReader::new(file); + for line in reader.lines() { + if let Ok(line) = line { + yield ReturnSuccess::value(Value::string(line).tagged(tag.clone())); + } + } + } else { + yield Err(ShellError::labeled_error("Could not open history", "history file could not be opened", tag.clone())); + } + }; + Ok(stream.to_output_stream()) + } +} From fbd980f8b03b4555b1eadb3f077c6e94d1a395a8 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Mon, 28 Oct 2019 18:15:35 +1300 Subject: [PATCH 097/184] Add descriptions to arguments --- src/commands/cd.rs | 6 +++- src/commands/config.rs | 16 ++++++---- src/commands/cp.rs | 7 ++--- src/commands/date.rs | 4 ++- src/commands/echo.rs | 2 +- src/commands/enter.rs | 6 +++- src/commands/exit.rs | 2 +- src/commands/fetch.rs | 8 +++-- src/commands/first.rs | 6 +++- src/commands/from_csv.rs | 3 +- src/commands/from_json.rs | 2 +- src/commands/from_ssv.rs | 8 +++-- src/commands/from_tsv.rs | 3 +- src/commands/get.rs | 11 +++++-- src/commands/group_by.rs | 6 +++- src/commands/help.rs | 59 ++++++++++++++++++++++++++++++------ src/commands/last.rs | 6 +++- src/commands/ls.rs | 6 +++- src/commands/mkdir.rs | 2 +- src/commands/mv.rs | 13 ++++++-- src/commands/nth.rs | 6 +++- src/commands/open.rs | 8 +++-- src/commands/pick.rs | 2 +- src/commands/pivot.rs | 9 ++++-- src/commands/post.rs | 26 +++++++++++----- src/commands/reject.rs | 2 +- src/commands/rm.rs | 11 ++++--- src/commands/save.rs | 7 +++-- src/commands/skip_while.rs | 6 +++- src/commands/sort_by.rs | 2 +- src/commands/split_column.rs | 10 ++++-- src/commands/split_row.rs | 6 +++- src/commands/table.rs | 6 +++- src/commands/to_csv.rs | 5 ++- src/commands/to_tsv.rs | 5 ++- src/commands/where_.rs | 6 +++- src/commands/which_.rs | 6 +++- src/data/command.rs | 6 ++-- src/parser/parse_command.rs | 18 +++++------ src/parser/registry.rs | 55 +++++++++++++++++++++++---------- src/plugins/add.rs | 11 ++++--- src/plugins/binaryview.rs | 2 +- src/plugins/edit.rs | 12 ++++++-- src/plugins/embed.rs | 2 +- src/plugins/inc.rs | 8 ++--- src/plugins/match.rs | 4 +-- src/plugins/skip.rs | 2 +- src/plugins/str.rs | 10 +++--- 48 files changed, 308 insertions(+), 121 deletions(-) diff --git a/src/commands/cd.rs b/src/commands/cd.rs index a3f5a8d89b..65cc45231d 100644 --- a/src/commands/cd.rs +++ b/src/commands/cd.rs @@ -10,7 +10,11 @@ impl WholeStreamCommand for CD { } fn signature(&self) -> Signature { - Signature::build("cd").optional("directory", SyntaxShape::Path) + Signature::build("cd").optional( + "directory", + SyntaxShape::Path, + "the directory to change to", + ) } fn usage(&self) -> &str { diff --git a/src/commands/config.rs b/src/commands/config.rs index 9cde5213de..a85920e455 100644 --- a/src/commands/config.rs +++ b/src/commands/config.rs @@ -26,12 +26,16 @@ impl WholeStreamCommand for Config { fn signature(&self) -> Signature { Signature::build("config") - .named("load", SyntaxShape::Path) - .named("set", SyntaxShape::Any) - .named("get", SyntaxShape::Any) - .named("remove", SyntaxShape::Any) - .switch("clear") - .switch("path") + .named( + "load", + SyntaxShape::Path, + "load the config from the path give", + ) + .named("set", SyntaxShape::Any, "set a value in the config") + .named("get", SyntaxShape::Any, "get a value from the config") + .named("remove", SyntaxShape::Any, "remove a value from the config") + .switch("clear", "clear the config") + .switch("path", "return the path to the config file") } fn usage(&self) -> &str { diff --git a/src/commands/cp.rs b/src/commands/cp.rs index bf20c74ce9..5ca21adb1e 100644 --- a/src/commands/cp.rs +++ b/src/commands/cp.rs @@ -21,10 +21,9 @@ impl PerItemCommand for Cpy { fn signature(&self) -> Signature { Signature::build("cp") - .required("src", SyntaxShape::Pattern) - .required("dst", SyntaxShape::Path) - .named("file", SyntaxShape::Any) - .switch("recursive") + .required("src", SyntaxShape::Pattern, "the place to copy from") + .required("dst", SyntaxShape::Path, "the place to copy to") + .switch("recursive", "copy recursively through subdirectories") } fn usage(&self) -> &str { diff --git a/src/commands/date.rs b/src/commands/date.rs index bff6b550f7..24ebc876e4 100644 --- a/src/commands/date.rs +++ b/src/commands/date.rs @@ -17,7 +17,9 @@ impl WholeStreamCommand for Date { } fn signature(&self) -> Signature { - Signature::build("date").switch("utc").switch("local") + Signature::build("date") + .switch("utc", "use universal time (UTC)") + .switch("local", "use the local time") } fn usage(&self) -> &str { diff --git a/src/commands/echo.rs b/src/commands/echo.rs index 4483f91371..db4993d017 100644 --- a/src/commands/echo.rs +++ b/src/commands/echo.rs @@ -12,7 +12,7 @@ impl PerItemCommand for Echo { } fn signature(&self) -> Signature { - Signature::build("echo").rest(SyntaxShape::Any) + Signature::build("echo").rest(SyntaxShape::Any, "the values to echo") } fn usage(&self) -> &str { diff --git a/src/commands/enter.rs b/src/commands/enter.rs index efefd8394f..59f7ca0f21 100644 --- a/src/commands/enter.rs +++ b/src/commands/enter.rs @@ -14,7 +14,11 @@ impl PerItemCommand for Enter { } fn signature(&self) -> registry::Signature { - Signature::build("enter").required("location", SyntaxShape::Path) + Signature::build("enter").required( + "location", + SyntaxShape::Path, + "the location to create a new shell from", + ) } fn usage(&self) -> &str { diff --git a/src/commands/exit.rs b/src/commands/exit.rs index 8a382d8b7d..b7db7cc340 100644 --- a/src/commands/exit.rs +++ b/src/commands/exit.rs @@ -11,7 +11,7 @@ impl WholeStreamCommand for Exit { } fn signature(&self) -> Signature { - Signature::build("exit").switch("now") + Signature::build("exit").switch("now", "exit out of the shell immediately") } fn usage(&self) -> &str { diff --git a/src/commands/fetch.rs b/src/commands/fetch.rs index e66536729f..703c3279c5 100644 --- a/src/commands/fetch.rs +++ b/src/commands/fetch.rs @@ -19,8 +19,12 @@ impl PerItemCommand for Fetch { fn signature(&self) -> Signature { Signature::build(self.name()) - .required("path", SyntaxShape::Path) - .switch("raw") + .required( + "path", + SyntaxShape::Path, + "the URL to fetch the contents from", + ) + .switch("raw", "fetch contents as text rather than a table") } fn usage(&self) -> &str { diff --git a/src/commands/first.rs b/src/commands/first.rs index 4c1c3b8c35..a9a287978a 100644 --- a/src/commands/first.rs +++ b/src/commands/first.rs @@ -16,7 +16,11 @@ impl WholeStreamCommand for First { } fn signature(&self) -> Signature { - Signature::build("first").optional("rows", SyntaxShape::Int) + Signature::build("first").optional( + "rows", + SyntaxShape::Int, + "starting from the front, the number of rows to return", + ) } fn usage(&self) -> &str { diff --git a/src/commands/from_csv.rs b/src/commands/from_csv.rs index 877c8dc166..7442a07fc9 100644 --- a/src/commands/from_csv.rs +++ b/src/commands/from_csv.rs @@ -16,7 +16,8 @@ impl WholeStreamCommand for FromCSV { } fn signature(&self) -> Signature { - Signature::build("from-csv").switch("headerless") + Signature::build("from-csv") + .switch("headerless", "don't treat the first row as column names") } fn usage(&self) -> &str { diff --git a/src/commands/from_json.rs b/src/commands/from_json.rs index 0687e348ac..01883818c6 100644 --- a/src/commands/from_json.rs +++ b/src/commands/from_json.rs @@ -15,7 +15,7 @@ impl WholeStreamCommand for FromJSON { } fn signature(&self) -> Signature { - Signature::build("from-json").switch("objects") + Signature::build("from-json").switch("objects", "treat each line as a separate value") } fn usage(&self) -> &str { diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs index f14d89356a..aaf6018fb7 100644 --- a/src/commands/from_ssv.rs +++ b/src/commands/from_ssv.rs @@ -21,8 +21,12 @@ impl WholeStreamCommand for FromSSV { fn signature(&self) -> Signature { Signature::build(STRING_REPRESENTATION) - .switch("headerless") - .named("minimum-spaces", SyntaxShape::Int) + .switch("headerless", "don't treat the first row as column names") + .named( + "minimum-spaces", + SyntaxShape::Int, + "the mininum spaces to separate columns", + ) } fn usage(&self) -> &str { diff --git a/src/commands/from_tsv.rs b/src/commands/from_tsv.rs index 80951b71aa..2284e95573 100644 --- a/src/commands/from_tsv.rs +++ b/src/commands/from_tsv.rs @@ -16,7 +16,8 @@ impl WholeStreamCommand for FromTSV { } fn signature(&self) -> Signature { - Signature::build("from-tsv").switch("headerless") + Signature::build("from-tsv") + .switch("headerless", "don't treat the first row as column names") } fn usage(&self) -> &str { diff --git a/src/commands/get.rs b/src/commands/get.rs index 21dbe6b0a7..70508bdb7a 100644 --- a/src/commands/get.rs +++ b/src/commands/get.rs @@ -20,8 +20,15 @@ impl WholeStreamCommand for Get { fn signature(&self) -> Signature { Signature::build("get") - .required("member", SyntaxShape::ColumnPath) - .rest(SyntaxShape::ColumnPath) + .required( + "member", + SyntaxShape::ColumnPath, + "the path to the data to get", + ) + .rest( + SyntaxShape::ColumnPath, + "optionally return additional data by path", + ) } fn usage(&self) -> &str { diff --git a/src/commands/group_by.rs b/src/commands/group_by.rs index 7f5f496408..f36d3f57dd 100644 --- a/src/commands/group_by.rs +++ b/src/commands/group_by.rs @@ -16,7 +16,11 @@ impl WholeStreamCommand for GroupBy { } fn signature(&self) -> Signature { - Signature::build("group-by").required("column_name", SyntaxShape::String) + Signature::build("group-by").required( + "column_name", + SyntaxShape::String, + "the name of the column to group by", + ) } fn usage(&self) -> &str { diff --git a/src/commands/help.rs b/src/commands/help.rs index 04e03fb10d..d5f755f67d 100644 --- a/src/commands/help.rs +++ b/src/commands/help.rs @@ -12,7 +12,7 @@ impl PerItemCommand for Help { } fn signature(&self) -> registry::Signature { - Signature::build("help").rest(SyntaxShape::Any) + Signature::build("help").rest(SyntaxShape::Any, "the name of command(s) to get help on") } fn usage(&self) -> &str { @@ -65,8 +65,8 @@ impl PerItemCommand for Help { one_liner.push_str("{flags} "); } - for positional in signature.positional { - match positional { + for positional in &signature.positional { + match &positional.0 { PositionalType::Mandatory(name, _m) => { one_liner.push_str(&format!("<{}> ", name)); } @@ -77,25 +77,66 @@ impl PerItemCommand for Help { } if signature.rest_positional.is_some() { - one_liner.push_str(" ...args"); + one_liner.push_str(&format!(" ...args",)); } + long_desc.push_str(&format!("\nUsage:\n > {}\n", one_liner)); + if signature.positional.len() > 0 || signature.rest_positional.is_some() { + long_desc.push_str("\nparameters:\n"); + for positional in signature.positional { + match positional.0 { + PositionalType::Mandatory(name, _m) => { + long_desc + .push_str(&format!(" <{}> {}\n", name, positional.1)); + } + PositionalType::Optional(name, _o) => { + long_desc + .push_str(&format!(" ({}) {}\n", name, positional.1)); + } + } + } + if signature.rest_positional.is_some() { + long_desc.push_str(&format!( + " ...args{} {}\n", + if signature.rest_positional.is_some() { + ":" + } else { + "" + }, + signature.rest_positional.unwrap().1 + )); + } + } if signature.named.len() > 0 { long_desc.push_str("\nflags:\n"); for (flag, ty) in signature.named { - match ty { + match ty.0 { NamedType::Switch => { - long_desc.push_str(&format!(" --{}\n", flag)); + long_desc.push_str(&format!( + " --{}{} {}\n", + flag, + if ty.1.len() > 0 { ":" } else { "" }, + ty.1 + )); } NamedType::Mandatory(m) => { long_desc.push_str(&format!( - " --{} <{}> (required parameter)\n", - flag, m + " --{} <{}> (required parameter){} {}\n", + flag, + m, + if ty.1.len() > 0 { ":" } else { "" }, + ty.1 )); } NamedType::Optional(o) => { - long_desc.push_str(&format!(" --{} <{}>\n", flag, o)); + long_desc.push_str(&format!( + " --{} <{}>{} {}\n", + flag, + o, + if ty.1.len() > 0 { ":" } else { "" }, + ty.1 + )); } } } diff --git a/src/commands/last.rs b/src/commands/last.rs index 04db0f4c48..abb10f5fce 100644 --- a/src/commands/last.rs +++ b/src/commands/last.rs @@ -16,7 +16,11 @@ impl WholeStreamCommand for Last { } fn signature(&self) -> Signature { - Signature::build("last").optional("rows", SyntaxShape::Number) + Signature::build("last").optional( + "rows", + SyntaxShape::Number, + "starting from the back, the number of rows to return", + ) } fn usage(&self) -> &str { diff --git a/src/commands/ls.rs b/src/commands/ls.rs index db229ecd0c..b108a53c0c 100644 --- a/src/commands/ls.rs +++ b/src/commands/ls.rs @@ -16,7 +16,11 @@ impl WholeStreamCommand for LS { } fn signature(&self) -> Signature { - Signature::build("ls").optional("path", SyntaxShape::Pattern) + Signature::build("ls").optional( + "path", + SyntaxShape::Pattern, + "a path to get the directory contents from", + ) } fn usage(&self) -> &str { diff --git a/src/commands/mkdir.rs b/src/commands/mkdir.rs index 8bf8a97d4a..e801a27530 100644 --- a/src/commands/mkdir.rs +++ b/src/commands/mkdir.rs @@ -17,7 +17,7 @@ impl PerItemCommand for Mkdir { } fn signature(&self) -> Signature { - Signature::build("mkdir").rest(SyntaxShape::Path) + Signature::build("mkdir").rest(SyntaxShape::Path, "the name(s) of the path(s) to create") } fn usage(&self) -> &str { diff --git a/src/commands/mv.rs b/src/commands/mv.rs index 2ace1fa05f..a9a11f5064 100644 --- a/src/commands/mv.rs +++ b/src/commands/mv.rs @@ -20,9 +20,16 @@ impl PerItemCommand for Move { fn signature(&self) -> Signature { Signature::build("mv") - .required("source", SyntaxShape::Pattern) - .required("destination", SyntaxShape::Path) - .named("file", SyntaxShape::Any) + .required( + "source", + SyntaxShape::Pattern, + "the location to move files/directories from", + ) + .required( + "destination", + SyntaxShape::Path, + "the location to move files/directories to", + ) } fn usage(&self) -> &str { diff --git a/src/commands/nth.rs b/src/commands/nth.rs index 18bb6f23af..bcd3057879 100644 --- a/src/commands/nth.rs +++ b/src/commands/nth.rs @@ -16,7 +16,11 @@ impl WholeStreamCommand for Nth { } fn signature(&self) -> Signature { - Signature::build("nth").required("row number", SyntaxShape::Any) + Signature::build("nth").required( + "row number", + SyntaxShape::Any, + "the number of the row to return", + ) } fn usage(&self) -> &str { diff --git a/src/commands/open.rs b/src/commands/open.rs index 2972144bcd..19c7d539ed 100644 --- a/src/commands/open.rs +++ b/src/commands/open.rs @@ -16,8 +16,12 @@ impl PerItemCommand for Open { fn signature(&self) -> Signature { Signature::build(self.name()) - .required("path", SyntaxShape::Path) - .switch("raw") + .required( + "path", + SyntaxShape::Path, + "the file path to load values from", + ) + .switch("raw", "load content as a string insead of a table") } fn usage(&self) -> &str { diff --git a/src/commands/pick.rs b/src/commands/pick.rs index 605b7f8890..b9c4e53bcc 100644 --- a/src/commands/pick.rs +++ b/src/commands/pick.rs @@ -17,7 +17,7 @@ impl WholeStreamCommand for Pick { } fn signature(&self) -> Signature { - Signature::build("pick").rest(SyntaxShape::Any) + Signature::build("pick").rest(SyntaxShape::Any, "the columns to select from the table") } fn usage(&self) -> &str { diff --git a/src/commands/pivot.rs b/src/commands/pivot.rs index e52ab90924..0556999f2d 100644 --- a/src/commands/pivot.rs +++ b/src/commands/pivot.rs @@ -21,9 +21,12 @@ impl WholeStreamCommand for Pivot { fn signature(&self) -> Signature { Signature::build("pivot") - .switch("header-row") - .switch("ignore-titles") - .rest(SyntaxShape::String) + .switch("header-row", "treat the first row as column names") + .switch("ignore-titles", "don't pivot the column names into values") + .rest( + SyntaxShape::String, + "the names to give columns once pivoted", + ) } fn usage(&self) -> &str { diff --git a/src/commands/post.rs b/src/commands/post.rs index 374616d2e5..eb06cdbae5 100644 --- a/src/commands/post.rs +++ b/src/commands/post.rs @@ -25,13 +25,25 @@ impl PerItemCommand for Post { fn signature(&self) -> Signature { Signature::build(self.name()) - .required("path", SyntaxShape::Any) - .required("body", SyntaxShape::Any) - .named("user", SyntaxShape::Any) - .named("password", SyntaxShape::Any) - .named("content-type", SyntaxShape::Any) - .named("content-length", SyntaxShape::Any) - .switch("raw") + .required("path", SyntaxShape::Any, "the URL to post to") + .required("body", SyntaxShape::Any, "the contents of the post body") + .named("user", SyntaxShape::Any, "the username when authenticating") + .named( + "password", + SyntaxShape::Any, + "the password when authenticating", + ) + .named( + "content-type", + SyntaxShape::Any, + "the MIME type of content to post", + ) + .named( + "content-length", + SyntaxShape::Any, + "the length of the content being posted", + ) + .switch("raw", "return values as a string instead of a table") } fn usage(&self) -> &str { diff --git a/src/commands/reject.rs b/src/commands/reject.rs index 3521635233..f02a72aa4c 100644 --- a/src/commands/reject.rs +++ b/src/commands/reject.rs @@ -16,7 +16,7 @@ impl WholeStreamCommand for Reject { } fn signature(&self) -> Signature { - Signature::build("reject").rest(SyntaxShape::Member) + Signature::build("reject").rest(SyntaxShape::Member, "the names of columns to remove") } fn usage(&self) -> &str { diff --git a/src/commands/rm.rs b/src/commands/rm.rs index c1e671f4b0..76222d2c28 100644 --- a/src/commands/rm.rs +++ b/src/commands/rm.rs @@ -21,13 +21,16 @@ impl PerItemCommand for Remove { fn signature(&self) -> Signature { Signature::build("rm") - .required("path", SyntaxShape::Pattern) - .switch("trash") - .switch("recursive") + .required("path", SyntaxShape::Pattern, "the file path to remove") + .switch( + "trash", + "use the platform's recycle bin instead of permanently deleting", + ) + .switch("recursive", "delete subdirectories recursively") } fn usage(&self) -> &str { - "Remove a file. Append '--recursive' to remove directories and '--trash' for seding it to system recycle bin" + "Remove a file" } fn run( diff --git a/src/commands/save.rs b/src/commands/save.rs index ac48fe280f..45063dca4e 100644 --- a/src/commands/save.rs +++ b/src/commands/save.rs @@ -93,8 +93,11 @@ impl WholeStreamCommand for Save { fn signature(&self) -> Signature { Signature::build("save") - .optional("path", SyntaxShape::Path) - .switch("raw") + .optional("path", SyntaxShape::Path, "the path to save contents to") + .switch( + "raw", + "treat values as-is rather than auto-converting based on file extension", + ) } fn usage(&self) -> &str { diff --git a/src/commands/skip_while.rs b/src/commands/skip_while.rs index a768ae6133..e8bec7dac2 100644 --- a/src/commands/skip_while.rs +++ b/src/commands/skip_while.rs @@ -17,7 +17,11 @@ impl WholeStreamCommand for SkipWhile { fn signature(&self) -> Signature { Signature::build("skip-while") - .required("condition", SyntaxShape::Block) + .required( + "condition", + SyntaxShape::Block, + "the condition that must be met to continue skipping", + ) .filter() } diff --git a/src/commands/sort_by.rs b/src/commands/sort_by.rs index 1e6b87491a..d384207c92 100644 --- a/src/commands/sort_by.rs +++ b/src/commands/sort_by.rs @@ -15,7 +15,7 @@ impl WholeStreamCommand for SortBy { } fn signature(&self) -> Signature { - Signature::build("sort-by").rest(SyntaxShape::String) + Signature::build("sort-by").rest(SyntaxShape::String, "the column(s) to sort by") } fn usage(&self) -> &str { diff --git a/src/commands/split_column.rs b/src/commands/split_column.rs index d174283023..fd872d452d 100644 --- a/src/commands/split_column.rs +++ b/src/commands/split_column.rs @@ -21,9 +21,13 @@ impl WholeStreamCommand for SplitColumn { fn signature(&self) -> Signature { Signature::build("split-column") - .required("separator", SyntaxShape::Any) - .switch("collapse-empty") - .rest(SyntaxShape::Member) + .required( + "separator", + SyntaxShape::Any, + "the character that denotes what separates columns", + ) + .switch("collapse-empty", "remove empty columns") + .rest(SyntaxShape::Member, "column names to give the new columns") } fn usage(&self) -> &str { diff --git a/src/commands/split_row.rs b/src/commands/split_row.rs index 94f7564b40..6c848c325a 100644 --- a/src/commands/split_row.rs +++ b/src/commands/split_row.rs @@ -17,7 +17,11 @@ impl WholeStreamCommand for SplitRow { } fn signature(&self) -> Signature { - Signature::build("split-row").required("separator", SyntaxShape::Any) + Signature::build("split-row").required( + "separator", + SyntaxShape::Any, + "the character that denotes what separates rows", + ) } fn usage(&self) -> &str { diff --git a/src/commands/table.rs b/src/commands/table.rs index 8ad2c246db..f8cdcd13c7 100644 --- a/src/commands/table.rs +++ b/src/commands/table.rs @@ -11,7 +11,11 @@ impl WholeStreamCommand for Table { } fn signature(&self) -> Signature { - Signature::build("table").named("start_number", SyntaxShape::Number) + Signature::build("table").named( + "start_number", + SyntaxShape::Number, + "row number to start viewing from", + ) } fn usage(&self) -> &str { diff --git a/src/commands/to_csv.rs b/src/commands/to_csv.rs index 90f4837453..d2b46d9f88 100644 --- a/src/commands/to_csv.rs +++ b/src/commands/to_csv.rs @@ -16,7 +16,10 @@ impl WholeStreamCommand for ToCSV { } fn signature(&self) -> Signature { - Signature::build("to-csv").switch("headerless") + Signature::build("to-csv").switch( + "headerless", + "do not output the columns names as the first row", + ) } fn usage(&self) -> &str { diff --git a/src/commands/to_tsv.rs b/src/commands/to_tsv.rs index 83cb4a07f1..7857d1eeec 100644 --- a/src/commands/to_tsv.rs +++ b/src/commands/to_tsv.rs @@ -16,7 +16,10 @@ impl WholeStreamCommand for ToTSV { } fn signature(&self) -> Signature { - Signature::build("to-tsv").switch("headerless") + Signature::build("to-tsv").switch( + "headerless", + "do not output the column names as the first row", + ) } fn usage(&self) -> &str { diff --git a/src/commands/where_.rs b/src/commands/where_.rs index 9e3c4d2c07..ce7367b1a6 100644 --- a/src/commands/where_.rs +++ b/src/commands/where_.rs @@ -12,7 +12,11 @@ impl PerItemCommand for Where { } fn signature(&self) -> registry::Signature { - Signature::build("where").required("condition", SyntaxShape::Block) + Signature::build("where").required( + "condition", + SyntaxShape::Block, + "the condition that must match", + ) } fn usage(&self) -> &str { diff --git a/src/commands/which_.rs b/src/commands/which_.rs index e3b6d1c96c..405efe7dca 100644 --- a/src/commands/which_.rs +++ b/src/commands/which_.rs @@ -13,7 +13,11 @@ impl WholeStreamCommand for Which { } fn signature(&self) -> Signature { - Signature::build("which").required("name", SyntaxShape::Any) + Signature::build("which").required( + "name", + SyntaxShape::Any, + "the name of the command to find the path to", + ) } fn usage(&self) -> &str { diff --git a/src/data/command.rs b/src/data/command.rs index 25301e6fa1..5993dda6f5 100644 --- a/src/data/command.rs +++ b/src/data/command.rs @@ -45,12 +45,12 @@ fn signature_dict(signature: Signature, tag: impl Into) -> Tagged { let mut sig = TaggedListBuilder::new(&tag); for arg in signature.positional.iter() { - let is_required = match arg { + let is_required = match arg.0 { PositionalType::Mandatory(_, _) => true, PositionalType::Optional(_, _) => false, }; - sig.insert_tagged(for_spec(arg.name(), "argument", is_required, &tag)); + sig.insert_tagged(for_spec(arg.0.name(), "argument", is_required, &tag)); } if let Some(_) = signature.rest_positional { @@ -59,7 +59,7 @@ fn signature_dict(signature: Signature, tag: impl Into) -> Tagged { } for (name, ty) in signature.named.iter() { - match ty { + match ty.0 { NamedType::Mandatory(_) => sig.insert_tagged(for_spec(name, "flag", true, &tag)), NamedType::Optional(_) => sig.insert_tagged(for_spec(name, "flag", false, &tag)), NamedType::Switch => sig.insert_tagged(for_spec(name, "switch", false, &tag)), diff --git a/src/parser/parse_command.rs b/src/parser/parse_command.rs index 01ba60b491..23c4d27036 100644 --- a/src/parser/parse_command.rs +++ b/src/parser/parse_command.rs @@ -25,7 +25,7 @@ pub fn parse_command_tail( for (name, kind) in &config.named { trace!(target: "nu::parse", "looking for {} : {:?}", name, kind); - match kind { + match &kind.0 { NamedType::Switch => { let flag = extract_switch(name, tail, context.source()); @@ -92,12 +92,12 @@ pub fn parse_command_tail( for arg in &config.positional { trace!(target: "nu::parse", "Processing positional {:?}", arg); - match arg { + match &arg.0 { PositionalType::Mandatory(..) => { if tail.at_end_possible_ws() { return Err(ShellError::argument_error( config.name.clone(), - ArgumentError::MissingMandatoryPositional(arg.name().to_string()), + ArgumentError::MissingMandatoryPositional(arg.0.name().to_string()), Tag { span: command_span, anchor: None, @@ -113,14 +113,14 @@ pub fn parse_command_tail( } } - let result = expand_expr(&spaced(arg.syntax_type()), tail, context)?; + let result = expand_expr(&spaced(arg.0.syntax_type()), tail, context)?; positional.push(result); } trace_remaining("after positional", tail.clone(), context.source()); - if let Some(syntax_type) = config.rest_positional { + if let Some((syntax_type, _)) = config.rest_positional { let mut out = vec![]; loop { @@ -207,7 +207,7 @@ impl ColorSyntax for CommandTailShape { for (name, kind) in &signature.named { trace!(target: "nu::color_syntax", "looking for {} : {:?}", name, kind); - match kind { + match &kind.0 { NamedType::Switch => { match token_nodes.extract(|t| t.as_flag(name, context.source())) { Some((pos, flag)) => args.insert(pos, vec![flag.color()]), @@ -300,7 +300,7 @@ impl ColorSyntax for CommandTailShape { for arg in &signature.positional { trace!("Processing positional {:?}", arg); - match arg { + match arg.0 { PositionalType::Mandatory(..) => { if token_nodes.at_end() { break; @@ -327,7 +327,7 @@ impl ColorSyntax for CommandTailShape { // If no match, we should roll back any whitespace we chomped color_fallible_syntax( - &arg.syntax_type(), + &arg.0.syntax_type(), token_nodes, context, &mut shapes, @@ -343,7 +343,7 @@ impl ColorSyntax for CommandTailShape { trace_remaining("after positional", token_nodes.clone(), context.source()); - if let Some(syntax_type) = signature.rest_positional { + if let Some((syntax_type, _)) = signature.rest_positional { loop { if token_nodes.at_end_possible_ws() { break; diff --git a/src/parser/registry.rs b/src/parser/registry.rs index 790925e800..ff0a98ae85 100644 --- a/src/parser/registry.rs +++ b/src/parser/registry.rs @@ -58,17 +58,19 @@ impl PositionalType { } } +type Description = String; + #[derive(Debug, Serialize, Deserialize, Clone, new)] pub struct Signature { pub name: String, #[new(default)] pub usage: String, #[new(default)] - pub positional: Vec, + pub positional: Vec<(PositionalType, Description)>, #[new(value = "None")] - pub rest_positional: Option, + pub rest_positional: Option<(SyntaxShape, Description)>, #[new(default)] - pub named: IndexMap, + pub named: IndexMap, #[new(value = "false")] pub is_filter: bool, } @@ -83,23 +85,42 @@ impl Signature { self } - pub fn required(mut self, name: impl Into, ty: impl Into) -> Signature { - self.positional - .push(PositionalType::Mandatory(name.into(), ty.into())); + pub fn required( + mut self, + name: impl Into, + ty: impl Into, + desc: impl Into, + ) -> Signature { + self.positional.push(( + PositionalType::Mandatory(name.into(), ty.into()), + desc.into(), + )); self } - pub fn optional(mut self, name: impl Into, ty: impl Into) -> Signature { - self.positional - .push(PositionalType::Optional(name.into(), ty.into())); + pub fn optional( + mut self, + name: impl Into, + ty: impl Into, + desc: impl Into, + ) -> Signature { + self.positional.push(( + PositionalType::Optional(name.into(), ty.into()), + desc.into(), + )); self } - pub fn named(mut self, name: impl Into, ty: impl Into) -> Signature { + pub fn named( + mut self, + name: impl Into, + ty: impl Into, + desc: impl Into, + ) -> Signature { self.named - .insert(name.into(), NamedType::Optional(ty.into())); + .insert(name.into(), (NamedType::Optional(ty.into()), desc.into())); self } @@ -108,15 +129,17 @@ impl Signature { mut self, name: impl Into, ty: impl Into, + desc: impl Into, ) -> Signature { self.named - .insert(name.into(), NamedType::Mandatory(ty.into())); + .insert(name.into(), (NamedType::Mandatory(ty.into()), desc.into())); self } - pub fn switch(mut self, name: impl Into) -> Signature { - self.named.insert(name.into(), NamedType::Switch); + pub fn switch(mut self, name: impl Into, desc: impl Into) -> Signature { + self.named + .insert(name.into(), (NamedType::Switch, desc.into())); self } @@ -126,8 +149,8 @@ impl Signature { self } - pub fn rest(mut self, ty: SyntaxShape) -> Signature { - self.rest_positional = Some(ty); + pub fn rest(mut self, ty: SyntaxShape, desc: impl Into) -> Signature { + self.rest_positional = Some((ty, desc.into())); self } } diff --git a/src/plugins/add.rs b/src/plugins/add.rs index 98cf3819b3..5bda9d0593 100644 --- a/src/plugins/add.rs +++ b/src/plugins/add.rs @@ -53,10 +53,13 @@ impl Add { impl Plugin for Add { fn config(&mut self) -> Result { Ok(Signature::build("add") - .desc("Add a new field to the table.") - .required("Field", SyntaxShape::ColumnPath) - .required("Value", SyntaxShape::String) - .rest(SyntaxShape::String) + .desc("Add a new column to the table.") + .required("column", SyntaxShape::ColumnPath, "the column name to add") + .required( + "value", + SyntaxShape::String, + "the value to give the cell(s)", + ) .filter()) } diff --git a/src/plugins/binaryview.rs b/src/plugins/binaryview.rs index b834f440e2..0072df5b4d 100644 --- a/src/plugins/binaryview.rs +++ b/src/plugins/binaryview.rs @@ -16,7 +16,7 @@ impl Plugin for BinaryView { fn config(&mut self) -> Result { Ok(Signature::build("binaryview") .desc("Autoview of binary data.") - .switch("lores")) + .switch("lores", "use low resolution output mode")) } fn sink(&mut self, call_info: CallInfo, input: Vec>) { diff --git a/src/plugins/edit.rs b/src/plugins/edit.rs index 34653bd66d..78cb32cef3 100644 --- a/src/plugins/edit.rs +++ b/src/plugins/edit.rs @@ -48,8 +48,16 @@ impl Plugin for Edit { fn config(&mut self) -> Result { Ok(Signature::build("edit") .desc("Edit an existing column to have a new value.") - .required("Field", SyntaxShape::ColumnPath) - .required("Value", SyntaxShape::String) + .required( + "Field", + SyntaxShape::ColumnPath, + "the name of the column to edit", + ) + .required( + "Value", + SyntaxShape::String, + "the new value to give the cell(s)", + ) .filter()) } diff --git a/src/plugins/embed.rs b/src/plugins/embed.rs index e659bfeb3b..6dc539d107 100644 --- a/src/plugins/embed.rs +++ b/src/plugins/embed.rs @@ -28,7 +28,7 @@ impl Plugin for Embed { fn config(&mut self) -> Result { Ok(Signature::build("embed") .desc("Embeds a new field to the table.") - .optional("field", SyntaxShape::String) + .optional("field", SyntaxShape::String, "the name of the new column") .filter()) } diff --git a/src/plugins/inc.rs b/src/plugins/inc.rs index 1cb6cb2b97..ed0416ce43 100644 --- a/src/plugins/inc.rs +++ b/src/plugins/inc.rs @@ -137,10 +137,10 @@ impl Plugin for Inc { fn config(&mut self) -> Result { Ok(Signature::build("inc") .desc("Increment a value or version. Optionally use the column of a table.") - .switch("major") - .switch("minor") - .switch("patch") - .rest(SyntaxShape::ColumnPath) + .switch("major", "increment the major version (eg 1.2.1 -> 2.0.0)") + .switch("minor", "increment the minor version (eg 1.2.1 -> 1.3.0)") + .switch("patch", "increment the patch version (eg 1.2.1 -> 1.2.2)") + .rest(SyntaxShape::ColumnPath, "the column(s) to update") .filter()) } diff --git a/src/plugins/match.rs b/src/plugins/match.rs index 7133524050..eefbf10632 100644 --- a/src/plugins/match.rs +++ b/src/plugins/match.rs @@ -22,8 +22,8 @@ impl Plugin for Match { fn config(&mut self) -> Result { Ok(Signature::build("match") .desc("filter rows by regex") - .required("member", SyntaxShape::Member) - .required("regex", SyntaxShape::String) + .required("member", SyntaxShape::Member, "the column name to match") + .required("regex", SyntaxShape::String, "the regex to match with") .filter()) } fn begin_filter(&mut self, call_info: CallInfo) -> Result, ShellError> { diff --git a/src/plugins/skip.rs b/src/plugins/skip.rs index efd3231525..5ec290fe04 100644 --- a/src/plugins/skip.rs +++ b/src/plugins/skip.rs @@ -17,7 +17,7 @@ impl Plugin for Skip { fn config(&mut self) -> Result { Ok(Signature::build("skip") .desc("Skip a number of rows") - .rest(SyntaxShape::Number) + .rest(SyntaxShape::Number, "the number of rows to skip") .filter()) } fn begin_filter(&mut self, call_info: CallInfo) -> Result, ShellError> { diff --git a/src/plugins/str.rs b/src/plugins/str.rs index 60625e7f17..8260bdac2c 100644 --- a/src/plugins/str.rs +++ b/src/plugins/str.rs @@ -128,11 +128,11 @@ impl Str { impl Plugin for Str { fn config(&mut self) -> Result { Ok(Signature::build("str") - .desc("Apply string function. Optional use the field of a table") - .switch("downcase") - .switch("upcase") - .switch("to-int") - .rest(SyntaxShape::ColumnPath) + .desc("Apply string function. Optional use the column of a table") + .switch("downcase", "convert string to lowercase") + .switch("upcase", "convert string to uppercase") + .switch("to-int", "convert string to integer") + .rest(SyntaxShape::ColumnPath, "the column(s) to convert") .filter()) } From 3f600c5b82db9989cc0026ab80a0bd1739d76535 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Mon, 28 Oct 2019 18:30:14 +1300 Subject: [PATCH 098/184] Fix build issues --- src/parser/parse_command.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/parser/parse_command.rs b/src/parser/parse_command.rs index 23c4d27036..d531da62ac 100644 --- a/src/parser/parse_command.rs +++ b/src/parser/parse_command.rs @@ -407,7 +407,7 @@ impl ColorSyntax for CommandTailShape { for (name, kind) in &signature.named { trace!(target: "nu::color_syntax", "looking for {} : {:?}", name, kind); - match kind { + match &kind.0 { NamedType::Switch => { match token_nodes.extract(|t| t.as_flag(name, context.source())) { Some((pos, flag)) => args.insert(pos, vec![flag.color()]), @@ -502,7 +502,7 @@ impl ColorSyntax for CommandTailShape { for arg in &signature.positional { trace!("Processing positional {:?}", arg); - match arg { + match &arg.0 { PositionalType::Mandatory(..) => { if token_nodes.at_end() { break; @@ -527,7 +527,7 @@ impl ColorSyntax for CommandTailShape { color_syntax(&MaybeSpaceShape, token_nodes, context); // If no match, we should roll back any whitespace we chomped - color_fallible_syntax(&arg.syntax_type(), token_nodes, context)?; + color_fallible_syntax(&arg.0.syntax_type(), token_nodes, context)?; Ok(()) }); @@ -539,7 +539,7 @@ impl ColorSyntax for CommandTailShape { trace_remaining("after positional", token_nodes.clone(), context.source()); - if let Some(syntax_type) = signature.rest_positional { + if let Some((syntax_type, _)) = signature.rest_positional { loop { if token_nodes.at_end_possible_ws() { break; From 1de80aeac3334c82eef603a11153b965a78a6b83 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Tue, 29 Oct 2019 06:51:08 +1300 Subject: [PATCH 099/184] Add support for :config and :env --- src/evaluate/evaluator.rs | 24 +++++++++++++++++++----- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/src/evaluate/evaluator.rs b/src/evaluate/evaluator.rs index 75eb2f4667..e3bc9a7730 100644 --- a/src/evaluate/evaluator.rs +++ b/src/evaluate/evaluator.rs @@ -5,6 +5,7 @@ use crate::parser::{ CommandRegistry, Text, }; use crate::prelude::*; +use crate::TaggedDictBuilder; use derive_new::new; use indexmap::IndexMap; use log::trace; @@ -164,11 +165,24 @@ fn evaluate_reference( trace!("Evaluating {} with Scope {}", name, scope); match name { hir::Variable::It(_) => Ok(scope.it.item.clone().tagged(tag)), - hir::Variable::Other(inner) => Ok(scope - .vars - .get(inner.slice(source)) - .map(|v| v.clone()) - .unwrap_or_else(|| Value::nothing().tagged(tag))), + hir::Variable::Other(inner) => match inner.slice(source) { + x if x == "nu:env" => { + let mut dict = TaggedDictBuilder::new(&tag); + for v in std::env::vars() { + dict.insert(v.0, Value::string(v.1)); + } + Ok(dict.into_tagged_value()) + } + x if x == "nu:config" => { + let config = crate::data::config::read(tag.clone(), &None)?; + Ok(Value::row(config).tagged(tag)) + } + x => Ok(scope + .vars + .get(x) + .map(|v| v.clone()) + .unwrap_or_else(|| Value::nothing().tagged(tag))), + }, } } From 53911ebecd4a4edff69b059423d3ec48f7eceb65 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Tue, 29 Oct 2019 07:40:34 +1300 Subject: [PATCH 100/184] Add support for :path --- src/commands/command.rs | 19 ++++++++++++------- src/evaluate/evaluator.rs | 12 ++++++++++++ 2 files changed, 24 insertions(+), 7 deletions(-) diff --git a/src/commands/command.rs b/src/commands/command.rs index 5f3f4809bd..6677dfbd7e 100644 --- a/src/commands/command.rs +++ b/src/commands/command.rs @@ -589,17 +589,22 @@ impl Command { out.to_output_stream() } else { let nothing = Value::nothing().tagged(Tag::unknown()); + let call_info = raw_args .clone() .call_info - .evaluate(®istry, &Scope::it_value(nothing.clone())) - .unwrap(); + .evaluate(®istry, &Scope::it_value(nothing.clone())); - match command - .run(&call_info, ®istry, &raw_args, nothing) - .into() - { - Ok(o) => o, + match call_info { + Ok(call_info) => { + match command + .run(&call_info, ®istry, &raw_args, nothing) + .into() + { + Ok(o) => o, + Err(e) => OutputStream::one(Err(e)), + } + } Err(e) => OutputStream::one(Err(e)), } } diff --git a/src/evaluate/evaluator.rs b/src/evaluate/evaluator.rs index df3186808f..9313d0fe5c 100644 --- a/src/evaluate/evaluator.rs +++ b/src/evaluate/evaluator.rs @@ -177,6 +177,18 @@ fn evaluate_reference( let config = crate::data::config::read(tag.clone(), &None)?; Ok(Value::row(config).tagged(tag)) } + x if x == "nu:path" => { + let mut table = vec![]; + match std::env::var_os("PATH") { + Some(paths) => { + for path in std::env::split_paths(&paths) { + table.push(Value::path(path).tagged(&tag)); + } + } + _ => {} + } + Ok(Value::table(&table).tagged(tag)) + } x => Ok(scope .vars .get(x) From 30b6eac03dfd9c119b0ee4c95d34f0a1758e2e5d Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Tue, 29 Oct 2019 10:22:31 +1300 Subject: [PATCH 101/184] Allow updating path in config --- src/cli.rs | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/src/cli.rs b/src/cli.rs index d52a55e267..9661cb3202 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -520,6 +520,43 @@ async fn process_line(readline: Result, ctx: &mut Context let mut iter = pipeline.commands.into_iter().peekable(); let mut is_first_command = true; + // Check the config to see if we need to update the path + // TODO: make sure config is cached so we don't path this load every call + let config = crate::data::config::read(Tag::unknown(), &None).unwrap(); + if config.contains_key("path") { + // Override the path with what they give us from config + let value = config.get("path"); + + match value { + Some(value) => match value { + Tagged { + item: Value::Table(table), + .. + } => { + let mut paths = vec![]; + for val in table { + let path_str = val.as_string(); + match path_str { + Err(_) => {} + Ok(path_str) => { + paths.push(PathBuf::from(path_str)); + } + } + } + let path_os_string = std::env::join_paths(&paths); + match path_os_string { + Ok(path_os_string) => { + std::env::set_var("PATH", path_os_string); + } + Err(_) => {} + } + } + _ => {} + }, + None => {} + } + } + loop { let item: Option = iter.next(); let next: Option<&ClassifiedCommand> = iter.peek(); From e09160e80d11e1b096a8f3c0eb3e2c42db837053 Mon Sep 17 00:00:00 2001 From: Ryan Blecher Date: Mon, 28 Oct 2019 20:22:51 -0400 Subject: [PATCH 102/184] add ability to create PathBuf from string to avoid type mismatch --- src/data/base.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/data/base.rs b/src/data/base.rs index 2cf1f2cedb..bc567f0dfe 100644 --- a/src/data/base.rs +++ b/src/data/base.rs @@ -817,6 +817,7 @@ impl Tagged { pub(crate) fn as_path(&self) -> Result { match self.item() { Value::Primitive(Primitive::Path(path)) => Ok(path.clone()), + Value::Primitive(Primitive::String(path_str)) => Ok(PathBuf::from(&path_str).clone()), other => Err(ShellError::type_error( "Path", other.type_name().tagged(self.tag()), From 392ff286b2357790f66467f7683623bfb701f4a1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Tue, 29 Oct 2019 16:04:31 -0500 Subject: [PATCH 103/184] This commit is ongoing work for making Nu working with data processing a joy. Fundamentally we embrace functional programming principles for transforming the dataset from any format picked up by Nu. This table processing "primitive" commands will build up and make pipelines composable with data processing capabilities allowing us the valuate, reduce, and map, the tables as far as even composing this declartively. On this regard, `split-by` expects some table with grouped data and we can use it further in interesting ways (Eg. collecting labels for visualizing the data in charts and/or suit it for a particular chart of our interest). --- README.md | 1 + src/cli.rs | 1 + src/commands.rs | 2 + src/commands/group_by.rs | 183 +++++++++++++++++++++------- src/commands/split_by.rs | 256 +++++++++++++++++++++++++++++++++++++++ tests/commands_test.rs | 72 +++++++++-- 6 files changed, 461 insertions(+), 54 deletions(-) create mode 100644 src/commands/split_by.rs diff --git a/README.md b/README.md index 64ff0e8015..1deb11a205 100644 --- a/README.md +++ b/README.md @@ -262,6 +262,7 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat | pivot --header-row | Pivot the tables, making columns into rows and vice versa | | reject ...columns | Remove the given columns from the table | | reverse | Reverses the table. | +| split-by column | Creates a new table with the data from the inner tables splitted by the column given | | skip amount | Skip a number of rows | | skip-while condition | Skips rows while the condition matches. | | sort-by ...columns | Sort by the given columns | diff --git a/src/cli.rs b/src/cli.rs index 9661cb3202..a66dd6cbbf 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -259,6 +259,7 @@ pub async fn cli() -> Result<(), Box> { whole_stream_command(Previous), whole_stream_command(Debug), whole_stream_command(Shells), + whole_stream_command(SplitBy), whole_stream_command(SplitColumn), whole_stream_command(SplitRow), whole_stream_command(Lines), diff --git a/src/commands.rs b/src/commands.rs index c75ca81192..0a71a93631 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -55,6 +55,7 @@ pub(crate) mod shells; pub(crate) mod size; pub(crate) mod skip_while; pub(crate) mod sort_by; +pub(crate) mod split_by; pub(crate) mod split_column; pub(crate) mod split_row; pub(crate) mod table; @@ -129,6 +130,7 @@ pub(crate) use shells::Shells; pub(crate) use size::Size; pub(crate) use skip_while::SkipWhile; pub(crate) use sort_by::SortBy; +pub(crate) use split_by::SplitBy; pub(crate) use split_column::SplitColumn; pub(crate) use split_row::SplitRow; pub(crate) use table::Table; diff --git a/src/commands/group_by.rs b/src/commands/group_by.rs index f36d3f57dd..66c1360f5d 100644 --- a/src/commands/group_by.rs +++ b/src/commands/group_by.rs @@ -36,59 +36,154 @@ impl WholeStreamCommand for GroupBy { } } -fn group_by( +pub fn group_by( GroupByArgs { column_name }: GroupByArgs, RunnableContext { input, name, .. }: RunnableContext, ) -> Result { let stream = async_stream! { let values: Vec> = input.values.collect().await; - let mut groups = indexmap::IndexMap::new(); - for value in values { - let group_key = value.get_data_by_key(&column_name.item); - - if group_key.is_none() { - - let possibilities = value.data_descriptors(); - - let mut possible_matches: Vec<_> = possibilities - .iter() - .map(|x| (natural::distance::levenshtein_distance(x, &column_name.item), x)) - .collect(); - - possible_matches.sort(); - - let err = { - if possible_matches.len() > 0 { - ShellError::labeled_error( - "Unknown column", - format!("did you mean '{}'?", possible_matches[0].1), - &column_name.tag,) - } else { - ShellError::labeled_error( - "Unknown column", - "row does not contain this column", - &column_name.tag, - ) - } - }; - - yield Err(err) - } else { - let group_key = group_key.unwrap().as_string()?; - let mut group = groups.entry(group_key).or_insert(vec![]); - group.push(value); + if values.is_empty() { + yield Err(ShellError::labeled_error( + "Expected table from pipeline", + "requires a table input", + column_name.span() + )) + } else { + match group(&column_name, values, name) { + Ok(grouped) => yield ReturnSuccess::value(grouped), + Err(err) => yield Err(err) } } - - let mut out = TaggedDictBuilder::new(name.clone()); - - for (k,v) in groups.iter() { - out.insert(k, Value::table(v)); - } - - yield ReturnSuccess::value(out) }; Ok(stream.to_output_stream()) } + +pub fn group( + column_name: &Tagged, + values: Vec>, + tag: impl Into, +) -> Result, ShellError> { + let tag = tag.into(); + + let mut groups = indexmap::IndexMap::new(); + + for value in values { + let group_key = value.get_data_by_key(column_name); + + if group_key.is_none() { + let possibilities = value.data_descriptors(); + + let mut possible_matches: Vec<_> = possibilities + .iter() + .map(|x| (natural::distance::levenshtein_distance(x, column_name), x)) + .collect(); + + possible_matches.sort(); + + if possible_matches.len() > 0 { + return Err(ShellError::labeled_error( + "Unknown column", + format!("did you mean '{}'?", possible_matches[0].1), + column_name.tag(), + )); + } else { + return Err(ShellError::labeled_error( + "Unknown column", + "row does not contain this column", + column_name.tag(), + )); + } + } + + let group_key = group_key.unwrap().as_string()?; + let group = groups.entry(group_key).or_insert(vec![]); + group.push(value); + } + + let mut out = TaggedDictBuilder::new(&tag); + + for (k, v) in groups.iter() { + out.insert(k, Value::table(v)); + } + + Ok(out.into_tagged_value()) +} + +#[cfg(test)] +mod tests { + + use crate::commands::group_by::group; + use crate::data::meta::*; + use crate::Value; + use indexmap::IndexMap; + + fn string(input: impl Into) -> Tagged { + Value::string(input.into()).tagged_unknown() + } + + fn row(entries: IndexMap>) -> Tagged { + Value::row(entries).tagged_unknown() + } + + fn table(list: &Vec>) -> Tagged { + Value::table(list).tagged_unknown() + } + + #[test] + fn groups_table_by_key() { + let for_key = String::from("date").tagged_unknown(); + + let nu_releases = vec![ + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")}, + ), + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}, + ), + ]; + + assert_eq!( + group(&for_key, nu_releases, Tag::unknown()).unwrap(), + row(indexmap! { + "August 23-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}), + row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}), + row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}) + ]), + "October 10-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}), + row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}), + row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}) + ]), + "Sept 24-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}), + row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")}), + row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")}) + ]), + }) + ); + } +} diff --git a/src/commands/split_by.rs b/src/commands/split_by.rs new file mode 100644 index 0000000000..b995b041d7 --- /dev/null +++ b/src/commands/split_by.rs @@ -0,0 +1,256 @@ +use crate::commands::WholeStreamCommand; +use crate::data::TaggedDictBuilder; +use crate::errors::ShellError; +use crate::prelude::*; + +pub struct SplitBy; + +#[derive(Deserialize)] +pub struct SplitByArgs { + column_name: Tagged, +} + +impl WholeStreamCommand for SplitBy { + fn name(&self) -> &str { + "split-by" + } + + fn signature(&self) -> Signature { + Signature::build("split-by").required( + "column_name", + SyntaxShape::String, + "the name of the column within the nested table to split by", + ) + } + + fn usage(&self) -> &str { + "Creates a new table with the data from the inner tables splitted by the column given." + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, split_by)?.run() + } +} + +pub fn split_by( + SplitByArgs { column_name }: SplitByArgs, + RunnableContext { input, name, .. }: RunnableContext, +) -> Result { + let stream = async_stream! { + let values: Vec> = input.values.collect().await; + + if values.len() > 1 || values.is_empty() { + yield Err(ShellError::labeled_error( + "Expected table from pipeline", + "requires a table input", + column_name.span() + )) + } else { + match split(&column_name, &values[0], name) { + Ok(split) => yield ReturnSuccess::value(split), + Err(err) => yield Err(err), + } + } + }; + + Ok(stream.to_output_stream()) +} + +pub fn split( + column_name: &Tagged, + value: &Tagged, + tag: impl Into, +) -> Result, ShellError> { + let origin_tag = tag.into(); + + let mut splits = indexmap::IndexMap::new(); + + match value { + Tagged { + item: Value::Row(group_sets), + .. + } => { + for (group_key, group_value) in group_sets.entries.iter() { + match *group_value { + Tagged { + item: Value::Table(ref dataset), + .. + } => { + let group = crate::commands::group_by::group( + &column_name, + dataset.to_vec(), + &origin_tag, + )?; + + match group { + Tagged { + item: Value::Row(o), + .. + } => { + for (split_label, subset) in o.entries.into_iter() { + match subset { + Tagged { + item: Value::Table(subset), + tag, + } => { + let s = splits + .entry(split_label.clone()) + .or_insert(indexmap::IndexMap::new()); + s.insert( + group_key.clone(), + Value::table(&subset).tagged(tag), + ); + } + other => { + return Err(ShellError::type_error( + "a table value", + other.tagged_type_name(), + )) + } + } + } + } + _ => { + return Err(ShellError::type_error( + "a table value", + group.tagged_type_name(), + )) + } + } + } + ref other => { + return Err(ShellError::type_error( + "a table value", + other.tagged_type_name(), + )) + } + } + } + } + _ => { + return Err(ShellError::type_error( + "a table value", + value.tagged_type_name(), + )) + } + } + + let mut out = TaggedDictBuilder::new(&origin_tag); + + for (k, v) in splits.into_iter() { + out.insert(k, Value::row(v)); + } + + Ok(out.into_tagged_value()) +} +#[cfg(test)] +mod tests { + + use crate::commands::split_by::split; + use crate::data::meta::*; + use crate::Value; + use indexmap::IndexMap; + + fn string(input: impl Into) -> Tagged { + Value::string(input.into()).tagged_unknown() + } + + fn row(entries: IndexMap>) -> Tagged { + Value::row(entries).tagged_unknown() + } + + fn table(list: &Vec>) -> Tagged { + Value::table(list).tagged_unknown() + } + + #[test] + fn splits_inner_tables_by_key() { + let for_key = String::from("country").tagged_unknown(); + + let nu_releases = row(indexmap! { + "August 23-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}), + row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}), + row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}) + ]), + "Sept 24-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}), + row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")}), + row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")}) + ]), + "October 10-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}), + row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}), + row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}) + ]) + }); + + assert_eq!( + split(&for_key, &nu_releases, Tag::unknown()).unwrap(), + Value::row(indexmap! { + "EC".into() => row(indexmap! { + "August 23-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}) + ]), + "Sept 24-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}) + ]), + "October 10-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}) + ]) + }), + "NZ".into() => row(indexmap! { + "August 23-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}) + ]), + "Sept 24-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")}) + ]), + "October 10-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}) + ]) + }), + "US".into() => row(indexmap! { + "August 23-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}) + ]), + "Sept 24-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")}) + ]), + "October 10-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}) + ]) + }) + }).tagged_unknown() + ); + } + + #[test] + fn errors_if_key_within_some_inner_table_is_missing() { + let for_key = String::from("country").tagged_unknown(); + + let nu_releases = row(indexmap! { + "August 23-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}), + row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}), + row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}) + ]), + "Sept 24-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}), + row(indexmap!{"name".into() => Value::string("JT").tagged(Tag::from(Span::new(5,10))), "date".into() => string("Sept 24-2019")}), + row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")}) + ]), + "October 10-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}), + row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}), + row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}) + ]) + }); + + assert!(split(&for_key, &nu_releases, Tag::from(Span::new(5, 10))).is_err()); + } +} diff --git a/tests/commands_test.rs b/tests/commands_test.rs index 87e1182b10..7b31c6ae4d 100644 --- a/tests/commands_test.rs +++ b/tests/commands_test.rs @@ -9,10 +9,10 @@ fn group_by() { sandbox.with_files(vec![FileWithContentToBeTrimmed( "los_tres_caballeros.csv", r#" - first_name,last_name,rusty_luck,type - Andrés,Robalino,1,A - Jonathan,Turner,1,B - Yehuda,Katz,1,A + first_name,last_name,rusty_at,type + Andrés,Robalino,10/11/2013,A + Jonathan,Turner,10/12/2013,B + Yehuda,Katz,10/11/2013,A "#, )]); @@ -20,8 +20,8 @@ fn group_by() { cwd: dirs.test(), h::pipeline( r#" open los_tres_caballeros.csv - | group-by type - | get A + | group-by rusty_at + | get "10/11/2013" | count | echo $it "# @@ -37,10 +37,10 @@ fn group_by_errors_if_unknown_column_name() { sandbox.with_files(vec![FileWithContentToBeTrimmed( "los_tres_caballeros.csv", r#" - first_name,last_name,rusty_luck,type - Andrés,Robalino,1,A - Jonathan,Turner,1,B - Yehuda,Katz,1,A + first_name,last_name,rusty_at,type + Andrés,Robalino,10/11/2013,A + Jonathan,Turner,10/12/2013,B + Yehuda,Katz,10/11/2013,A "#, )]); @@ -56,6 +56,58 @@ fn group_by_errors_if_unknown_column_name() { }) } +#[test] +fn split_by() { + Playground::setup("split_by_test_1", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContentToBeTrimmed( + "los_tres_caballeros.csv", + r#" + first_name,last_name,rusty_at,type + Andrés,Robalino,10/11/2013,A + Jonathan,Turner,10/12/2013,B + Yehuda,Katz,10/11/2013,A + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open los_tres_caballeros.csv + | group-by rusty_at + | split-by type + | get A."10/11/2013" + | count + | echo $it + "# + )); + + assert_eq!(actual, "2"); + }) +} + +#[test] +fn split_by_errors_if_no_table_given_as_input() { + Playground::setup("split_by_test_2", |dirs, sandbox| { + sandbox.with_files(vec![ + EmptyFile("los.txt"), + EmptyFile("tres.txt"), + EmptyFile("amigos.txt"), + EmptyFile("arepas.clu"), + ]); + + let actual = nu_error!( + cwd: dirs.test(), h::pipeline( + r#" + ls + | get name + | split-by type + "# + )); + + assert!(actual.contains("Expected table from pipeline")); + }) +} + #[test] fn first_gets_first_rows_by_amount() { Playground::setup("first_test_1", |dirs, sandbox| { From 3820fef801641b13a49c4106a8f5823c1eb5ef4b Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Wed, 30 Oct 2019 11:33:36 +1300 Subject: [PATCH 104/184] Add a simple read/parse plugin to better handle text data --- Cargo.toml | 6 +- src/plugins/read.rs | 156 +++++++++++++++++++++++++++++++ src/utils.rs | 4 + tests/fixtures/formats/fileA.txt | 3 + tests/tests.rs | 16 ++++ 5 files changed, 184 insertions(+), 1 deletion(-) create mode 100644 src/plugins/read.rs create mode 100644 tests/fixtures/formats/fileA.txt diff --git a/Cargo.toml b/Cargo.toml index e81e830e1a..52589cb733 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -74,8 +74,8 @@ natural = "0.3.0" serde_urlencoded = "0.6.1" sublime_fuzzy = "0.5" trash = "1.0.0" +regex = "1" -regex = {version = "1", optional = true } neso = { version = "0.5.0", optional = true } crossterm = { version = "0.10.2", optional = true } syntect = {version = "3.2.0", optional = true } @@ -136,6 +136,10 @@ path = "src/plugins/add.rs" name = "nu_plugin_edit" path = "src/plugins/edit.rs" +[[bin]] +name = "nu_plugin_read" +path = "src/plugins/read.rs" + [[bin]] name = "nu_plugin_str" path = "src/plugins/str.rs" diff --git a/src/plugins/read.rs b/src/plugins/read.rs new file mode 100644 index 0000000000..de88946e91 --- /dev/null +++ b/src/plugins/read.rs @@ -0,0 +1,156 @@ +use nu::{ + serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, + SyntaxShape, Tagged, TaggedDictBuilder, Value, +}; + +use nom::{ + bytes::complete::{tag, take_while}, + IResult, +}; +use regex::Regex; + +#[derive(Debug)] +enum ReadCommand { + Text(String), + Column(String), +} + +fn read(input: &str) -> IResult<&str, Vec> { + let mut output = vec![]; + + let mut loop_input = input; + loop { + let (input, before) = take_while(|c| c != '{')(loop_input)?; + if before.len() > 0 { + output.push(ReadCommand::Text(before.to_string())); + } + if input != "" { + // Look for column as we're now at one + let (input, _) = tag("{")(input)?; + let (input, column) = take_while(|c| c != '}')(input)?; + let (input, _) = tag("}")(input)?; + + output.push(ReadCommand::Column(column.to_string())); + loop_input = input; + } else { + loop_input = input; + } + if loop_input == "" { + break; + } + } + + Ok((loop_input, output)) +} + +fn column_names(commands: &[ReadCommand]) -> Vec { + let mut output = vec![]; + + for command in commands { + match command { + ReadCommand::Column(c) => { + output.push(c.clone()); + } + _ => {} + } + } + + output +} + +fn build_regex(commands: &[ReadCommand]) -> String { + let mut output = String::new(); + + for command in commands { + match command { + ReadCommand::Text(s) => { + output.push_str(&s.replace("(", "\\(")); + } + ReadCommand::Column(_) => { + output.push_str("(.*)"); + } + } + } + + return output; +} +struct Read { + regex: Regex, + column_names: Vec, +} + +impl Read { + fn new() -> Self { + Read { + regex: Regex::new("").unwrap(), + column_names: vec![], + } + } +} + +impl Plugin for Read { + fn config(&mut self) -> Result { + Ok(Signature::build("read") + .desc("Parse columns from string data using a simple pattern") + .required( + "pattern", + SyntaxShape::Any, + "the pattern to match. Eg) \"{foo}: {bar}\"", + ) + .filter()) + } + fn begin_filter(&mut self, call_info: CallInfo) -> Result, ShellError> { + if let Some(args) = call_info.args.positional { + match &args[0] { + Tagged { + item: Value::Primitive(Primitive::String(pattern)), + .. + } => { + //self.pattern = s.clone(); + let read_pattern = read(&pattern).unwrap(); + let read_regex = build_regex(&read_pattern.1); + + self.column_names = column_names(&read_pattern.1); + + self.regex = Regex::new(&read_regex).unwrap(); + } + Tagged { tag, .. } => { + return Err(ShellError::labeled_error( + "Unrecognized type in params", + "value", + tag, + )); + } + } + } + Ok(vec![]) + } + + fn filter(&mut self, input: Tagged) -> Result, ShellError> { + let mut results = vec![]; + match &input { + Tagged { + tag, + item: Value::Primitive(Primitive::String(s)), + } => { + //self.full_input.push_str(&s); + + for cap in self.regex.captures_iter(&s) { + let mut dict = TaggedDictBuilder::new(tag); + + for (idx, column_name) in self.column_names.iter().enumerate() { + dict.insert(column_name, Value::string(&cap[idx + 1].to_string())); + } + + results.push(ReturnSuccess::value(dict.into_tagged_value())); + } + } + _ => {} + } + Ok(results) + } +} + +fn main() { + serve_plugin(&mut Read::new()); +} diff --git a/src/utils.rs b/src/utils.rs index 6b1318f9e8..56fee491b6 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -448,6 +448,10 @@ mod tests { loc: fixtures().join("cargo_sample.toml"), at: 0 }, + Res { + loc: fixtures().join("fileA.txt"), + at: 0 + }, Res { loc: fixtures().join("jonathan.xml"), at: 0 diff --git a/tests/fixtures/formats/fileA.txt b/tests/fixtures/formats/fileA.txt new file mode 100644 index 0000000000..0ce9fb3fa2 --- /dev/null +++ b/tests/fixtures/formats/fileA.txt @@ -0,0 +1,3 @@ +VAR1=Chill +VAR2=StupidLongName +VAR3=AlsoChill diff --git a/tests/tests.rs b/tests/tests.rs index 25337edb09..1a739f1982 100644 --- a/tests/tests.rs +++ b/tests/tests.rs @@ -56,6 +56,22 @@ fn add_plugin() { assert_eq!(actual, "1"); } +#[test] +fn read_plugin() { + let actual = nu!( + cwd: "tests/fixtures/formats", h::pipeline( + r#" + open fileA.txt + | read "{Name}={Value}" + | nth 1 + | get Value + | echo $it + "# + )); + + assert_eq!(actual, "StupidLongName"); +} + #[test] fn edit_plugin() { let actual = nu!( From 81691e07c6c47644f2ff6d33c45bd6822521c421 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Wed, 30 Oct 2019 19:54:06 +1300 Subject: [PATCH 105/184] Add prepend and append commands --- src/cli.rs | 2 ++ src/commands.rs | 4 ++++ src/commands/append.rs | 49 +++++++++++++++++++++++++++++++++++++++++ src/commands/prepend.rs | 49 +++++++++++++++++++++++++++++++++++++++++ tests/tests.rs | 32 +++++++++++++++++++++++++++ 5 files changed, 136 insertions(+) create mode 100644 src/commands/append.rs create mode 100644 src/commands/prepend.rs diff --git a/src/cli.rs b/src/cli.rs index 9661cb3202..f46db10529 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -264,6 +264,8 @@ pub async fn cli() -> Result<(), Box> { whole_stream_command(Lines), whole_stream_command(Reject), whole_stream_command(Reverse), + whole_stream_command(Append), + whole_stream_command(Prepend), whole_stream_command(Trim), whole_stream_command(ToBSON), whole_stream_command(ToCSV), diff --git a/src/commands.rs b/src/commands.rs index c75ca81192..ba69d1e822 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -1,6 +1,7 @@ #[macro_use] pub(crate) mod macros; +pub(crate) mod append; pub(crate) mod args; pub(crate) mod autoview; pub(crate) mod cd; @@ -45,6 +46,7 @@ pub(crate) mod pick; pub(crate) mod pivot; pub(crate) mod plugin; pub(crate) mod post; +pub(crate) mod prepend; pub(crate) mod prev; pub(crate) mod pwd; pub(crate) mod reject; @@ -79,6 +81,7 @@ pub(crate) use command::{ UnevaluatedCallInfo, WholeStreamCommand, }; +pub(crate) use append::Append; pub(crate) use classified::ClassifiedCommand; pub(crate) use config::Config; pub(crate) use count::Count; @@ -119,6 +122,7 @@ pub(crate) use open::Open; pub(crate) use pick::Pick; pub(crate) use pivot::Pivot; pub(crate) use post::Post; +pub(crate) use prepend::Prepend; pub(crate) use prev::Previous; pub(crate) use pwd::PWD; pub(crate) use reject::Reject; diff --git a/src/commands/append.rs b/src/commands/append.rs new file mode 100644 index 0000000000..b8ca7b6e92 --- /dev/null +++ b/src/commands/append.rs @@ -0,0 +1,49 @@ +use crate::commands::WholeStreamCommand; +use crate::errors::ShellError; +use crate::parser::CommandRegistry; +use crate::prelude::*; + +#[derive(Deserialize)] +struct AppendArgs { + row: Tagged, +} + +pub struct Append; + +impl WholeStreamCommand for Append { + fn name(&self) -> &str { + "append" + } + + fn signature(&self) -> Signature { + Signature::build("append").required( + "row value", + SyntaxShape::Any, + "the value of the row to append to the table", + ) + } + + fn usage(&self) -> &str { + "Append the given row to the table" + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, append)?.run() + } +} + +fn append( + AppendArgs { row }: AppendArgs, + RunnableContext { input, .. }: RunnableContext, +) -> Result { + let mut after: VecDeque> = VecDeque::new(); + after.push_back(row); + + Ok(OutputStream::from_input( + input.values.chain(after), + )) +} diff --git a/src/commands/prepend.rs b/src/commands/prepend.rs new file mode 100644 index 0000000000..4d9c037f43 --- /dev/null +++ b/src/commands/prepend.rs @@ -0,0 +1,49 @@ +use crate::commands::WholeStreamCommand; +use crate::errors::ShellError; +use crate::parser::CommandRegistry; +use crate::prelude::*; + +#[derive(Deserialize)] +struct PrependArgs { + row: Tagged, +} + +pub struct Prepend; + +impl WholeStreamCommand for Prepend { + fn name(&self) -> &str { + "prepend" + } + + fn signature(&self) -> Signature { + Signature::build("prepend").required( + "row value", + SyntaxShape::Any, + "the value of the row to prepend to the table", + ) + } + + fn usage(&self) -> &str { + "Prepend the given row to the front of the table" + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, prepend)?.run() + } +} + +fn prepend( + PrependArgs { row }: PrependArgs, + RunnableContext { input, .. }: RunnableContext, +) -> Result { + let mut prepend: VecDeque> = VecDeque::new(); + prepend.push_back(row); + + Ok(OutputStream::from_input( + prepend.chain(input.values), + )) +} diff --git a/tests/tests.rs b/tests/tests.rs index 1a739f1982..14552a41ee 100644 --- a/tests/tests.rs +++ b/tests/tests.rs @@ -72,6 +72,38 @@ fn read_plugin() { assert_eq!(actual, "StupidLongName"); } +#[test] +fn prepend_plugin() { + let actual = nu!( + cwd: "tests/fixtures/formats", h::pipeline( + r#" + open fileA.txt + | lines + | prepend "testme" + | nth 0 + | echo $it + "# + )); + + assert_eq!(actual, "testme"); +} + +#[test] +fn append_plugin() { + let actual = nu!( + cwd: "tests/fixtures/formats", h::pipeline( + r#" + open fileA.txt + | lines + | append "testme" + | nth 3 + | echo $it + "# + )); + + assert_eq!(actual, "testme"); +} + #[test] fn edit_plugin() { let actual = nu!( From a9cd6b4f7afd8617936fd357c408dc4412857821 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Wed, 30 Oct 2019 20:04:39 +1300 Subject: [PATCH 106/184] Format files --- src/commands/append.rs | 4 +--- src/commands/prepend.rs | 4 +--- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/src/commands/append.rs b/src/commands/append.rs index b8ca7b6e92..fe22c9065e 100644 --- a/src/commands/append.rs +++ b/src/commands/append.rs @@ -43,7 +43,5 @@ fn append( let mut after: VecDeque> = VecDeque::new(); after.push_back(row); - Ok(OutputStream::from_input( - input.values.chain(after), - )) + Ok(OutputStream::from_input(input.values.chain(after))) } diff --git a/src/commands/prepend.rs b/src/commands/prepend.rs index 4d9c037f43..b6fa935b0b 100644 --- a/src/commands/prepend.rs +++ b/src/commands/prepend.rs @@ -43,7 +43,5 @@ fn prepend( let mut prepend: VecDeque> = VecDeque::new(); prepend.push_back(row); - Ok(OutputStream::from_input( - prepend.chain(input.values), - )) + Ok(OutputStream::from_input(prepend.chain(input.values))) } From 2d44b7d296d24e8a875d350dc995e1de165b5475 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Wed, 30 Oct 2019 20:22:01 +1300 Subject: [PATCH 107/184] Update README.md --- README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/README.md b/README.md index 64ff0e8015..5e482bc29e 100644 --- a/README.md +++ b/README.md @@ -249,6 +249,7 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat | command | description | | ------------- | ------------- | | add column-or-column-path value | Add a new column to the table | +| append row-data | Append a row to the end of the table | | count | Show the total number of rows | | edit column-or-column-path value | Edit an existing column to have a new value | | embed column | Creates a new table of one column with the given name, and places the current table inside of it | @@ -260,6 +261,7 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat | nth row-number | Return only the selected row | | pick ...columns | Down-select table to only these columns | | pivot --header-row | Pivot the tables, making columns into rows and vice versa | +| prepend row-data | Prepend a row to the beginning of the table | | reject ...columns | Remove the given columns from the table | | reverse | Reverses the table. | | skip amount | Skip a number of rows | @@ -293,6 +295,7 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat | from-xml | Parse text as .xml and create a table | | from-yaml | Parse text as a .yaml/.yml and create a table | | lines | Split single string into rows, one per line | +| read pattern | Convert text to a table by matching the given pattern | | size | Gather word count statistics on the text | | split-column sep ...column-names | Split row contents across multiple columns via the separator, optionally give the columns names | | split-row sep | Split row contents over multiple rows via the separator | From cea8fab3077dd545102b4f60c464f93e7e114b05 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Wed, 30 Oct 2019 05:55:26 -0500 Subject: [PATCH 108/184] "Integers" in column paths fetch a row from a table. --- src/data/base.rs | 51 +++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 46 insertions(+), 5 deletions(-) diff --git a/src/data/base.rs b/src/data/base.rs index bc567f0dfe..43cb837849 100644 --- a/src/data/base.rs +++ b/src/data/base.rs @@ -475,6 +475,13 @@ impl Value { } } + pub(crate) fn get_data_by_index(&self, idx: usize) -> Option<&Tagged> { + match self { + Value::Table(value_set) => value_set.get(idx), + _ => None, + } + } + pub(crate) fn get_data_by_key(&self, name: &str) -> Option<&Tagged> { match self { Value::Row(o) => o.get_data_by_key(name), @@ -526,7 +533,15 @@ impl Value { ) -> Option> { let mut current = self; for p in path { - match current.get_data_by_key(p) { + let value = if p.chars().all(char::is_numeric) { + current.get_data_by_index(p.chars().fold(0 as usize, |acc, c| { + c.to_digit(10).unwrap_or(0) as usize + acc + })) + } else { + current.get_data_by_key(p) + }; + + match value { Some(v) => current = v, None => return None, } @@ -960,7 +975,7 @@ mod tests { let (version, tag) = string("0.4.0").into_parts(); - let row = Value::row(indexmap! { + let value = Value::row(indexmap! { "package".into() => row(indexmap! { "name".into() => string("nu"), @@ -969,7 +984,7 @@ mod tests { }); assert_eq!( - **row.get_data_by_column_path(tag, &field_path).unwrap(), + **value.get_data_by_column_path(tag, &field_path).unwrap(), version ) } @@ -980,7 +995,7 @@ mod tests { let (name, tag) = string("Andrés N. Robalino").into_parts(); - let row = Value::row(indexmap! { + let value = Value::row(indexmap! { "package".into() => row(indexmap! { "name".into() => string("nu"), "version".into() => string("0.4.0"), @@ -993,11 +1008,37 @@ mod tests { }); assert_eq!( - **row.get_data_by_column_path(tag, &field_path).unwrap(), + **value.get_data_by_column_path(tag, &field_path).unwrap(), name ) } + #[test] + fn column_path_that_contains_just_a_numbers_gets_a_row_from_a_table() { + let field_path = column_path(&vec![string("package"), string("authors"), string("0")]); + + let (_, tag) = string("Andrés N. Robalino").into_parts(); + + let value = Value::row(indexmap! { + "package".into() => row(indexmap! { + "name".into() => string("nu"), + "version".into() => string("0.4.0"), + "authors".into() => table(&vec![ + row(indexmap!{"name".into() => string("Andrés N. Robalino")}), + row(indexmap!{"name".into() => string("Jonathan Turner")}), + row(indexmap!{"name".into() => string("Yehuda Katz")}) + ]) + }) + }); + + assert_eq!( + **value.get_data_by_column_path(tag, &field_path).unwrap(), + Value::row(indexmap! { + "name".into() => string("Andrés N. Robalino") + }) + ); + } + #[test] fn replaces_matching_field_from_a_row() { let field_path = column_path(&vec![string("amigos")]); From fd922718847638805a6afa56009a7ba17bb305e4 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Thu, 31 Oct 2019 09:14:47 +1300 Subject: [PATCH 109/184] Move rustyline dep back to crates --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 52589cb733..97b02b450c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,7 +14,7 @@ documentation = "https://book.nushell.sh" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -rustyline = { git = "https://github.com/kkawakam/rustyline.git" } +rustyline = "5.0.4" chrono = { version = "0.4.9", features = ["serde"] } derive-new = "0.5.8" prettytable-rs = "0.8.0" From 7614ce4b49cbf24ff245555c38157bc0fe56fc84 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Wed, 30 Oct 2019 17:46:40 -0500 Subject: [PATCH 110/184] Allow handling errors with failure callbacks. --- src/commands/get.rs | 117 ++++++++++++++++++++++---------------------- src/data/base.rs | 27 +++++++--- src/lib.rs | 4 +- src/plugins/inc.rs | 53 +++++++++++++++----- src/plugins/str.rs | 49 ++++++++++++++++--- src/prelude.rs | 4 +- src/utils.rs | 24 +++++++++ 7 files changed, 192 insertions(+), 86 deletions(-) diff --git a/src/commands/get.rs b/src/commands/get.rs index 70508bdb7a..69ef15333a 100644 --- a/src/commands/get.rs +++ b/src/commands/get.rs @@ -1,8 +1,8 @@ use crate::commands::WholeStreamCommand; -use crate::data::meta::tag_for_tagged_list; use crate::data::Value; use crate::errors::ShellError; use crate::prelude::*; +use crate::utils::did_you_mean; use log::trace; pub struct Get; @@ -50,56 +50,51 @@ pub fn get_column_path( path: &ColumnPath, obj: &Tagged, ) -> Result, ShellError> { - let mut current = Some(obj); - for p in path.iter() { - if let Some(obj) = current { - current = match obj.get_data_by_key(&p) { - Some(v) => Some(v), - None => - // Before we give up, see if they gave us a path that matches a field name by itself - { - let possibilities = obj.data_descriptors(); + let fields = path.clone(); - let mut possible_matches: Vec<_> = possibilities - .iter() - .map(|x| (natural::distance::levenshtein_distance(x, &p), x)) - .collect(); - - possible_matches.sort(); - - if possible_matches.len() > 0 { - return Err(ShellError::labeled_error( - "Unknown column", - format!("did you mean '{}'?", possible_matches[0].1), - tag_for_tagged_list(path.iter().map(|p| p.tag())), - )); - } else { - return Err(ShellError::labeled_error( - "Unknown column", - "row does not contain this column", - tag_for_tagged_list(path.iter().map(|p| p.tag())), - )); - } + let value = obj.get_data_by_column_path( + obj.tag(), + path, + Box::new(move |(obj_source, column_path_tried)| { + match did_you_mean(&obj_source, &column_path_tried) { + Some(suggestions) => { + return ShellError::labeled_error( + "Unknown column", + format!("did you mean '{}'?", suggestions[0].1), + tag_for_tagged_list(fields.iter().map(|p| p.tag())), + ) + } + None => { + return ShellError::labeled_error( + "Unknown column", + "row does not contain this column", + tag_for_tagged_list(fields.iter().map(|p| p.tag())), + ) } } - } - } + }), + ); - match current { - Some(v) => Ok(v.clone()), - None => match obj { - // If its None check for certain values. - Tagged { - item: Value::Primitive(Primitive::String(_)), - .. - } => Ok(obj.clone()), - Tagged { - item: Value::Primitive(Primitive::Path(_)), - .. - } => Ok(obj.clone()), - _ => Ok(Value::nothing().tagged(&obj.tag)), + let res = match value { + Ok(fetched) => match fetched { + Some(Tagged { item: v, tag }) => Ok((v.clone()).tagged(&tag)), + None => match obj { + // If its None check for certain values. + Tagged { + item: Value::Primitive(Primitive::String(_)), + .. + } => Ok(obj.clone()), + Tagged { + item: Value::Primitive(Primitive::Path(_)), + .. + } => Ok(obj.clone()), + _ => Ok(Value::nothing().tagged(&obj.tag)), + }, }, - } + Err(reason) => Err(reason), + }; + + res } pub fn get( @@ -118,26 +113,30 @@ pub fn get( let member = vec![member.clone()]; - let fields = vec![&member, &fields] + let column_paths = vec![&member, &fields] .into_iter() .flatten() .collect::>(); - for column_path in &fields { - match get_column_path(column_path, &item) { - Ok(Tagged { - item: Value::Table(l), - .. - }) => { - for item in l { - result.push_back(ReturnSuccess::value(item.clone())); + for path in column_paths { + let res = get_column_path(&path, &item); + + match res { + Ok(got) => match got { + Tagged { + item: Value::Table(rows), + .. + } => { + for item in rows { + result.push_back(ReturnSuccess::value(item.clone())); + } } - } - Ok(x) => result.push_back(ReturnSuccess::value(x.clone())), - Err(x) => result.push_back(Err(x)), + other => result + .push_back(ReturnSuccess::value((*other).clone().tagged(&item.tag))), + }, + Err(reason) => result.push_back(Err(reason)), } } - result }) .flatten(); diff --git a/src/data/base.rs b/src/data/base.rs index 43cb837849..2d0288ac87 100644 --- a/src/data/base.rs +++ b/src/data/base.rs @@ -530,7 +530,8 @@ impl Value { &self, tag: Tag, path: &Vec>, - ) -> Option> { + callback: Box)) -> ShellError>, + ) -> Result>, ShellError> { let mut current = self; for p in path { let value = if p.chars().all(char::is_numeric) { @@ -543,11 +544,11 @@ impl Value { match value { Some(v) => current = v, - None => return None, + None => return Err(callback((¤t.clone(), &p.clone()))), } } - Some(current.tagged(tag)) + Ok(Some(current.tagged(tag))) } pub fn insert_data_at_path( @@ -927,6 +928,7 @@ fn coerce_compare_primitive( mod tests { use crate::data::meta::*; + use crate::ShellError; use crate::Value; use indexmap::IndexMap; @@ -942,6 +944,10 @@ mod tests { Value::table(list).tagged_unknown() } + fn error_callback() -> impl FnOnce((&Value, &Tagged)) -> ShellError { + move |(_obj_source, _column_path_tried)| ShellError::unimplemented("will never be called.") + } + fn column_path(paths: &Vec>) -> Tagged>> { table( &paths @@ -984,7 +990,10 @@ mod tests { }); assert_eq!( - **value.get_data_by_column_path(tag, &field_path).unwrap(), + **value + .get_data_by_column_path(tag, &field_path, Box::new(error_callback())) + .unwrap() + .unwrap(), version ) } @@ -1008,7 +1017,10 @@ mod tests { }); assert_eq!( - **value.get_data_by_column_path(tag, &field_path).unwrap(), + **value + .get_data_by_column_path(tag, &field_path, Box::new(error_callback())) + .unwrap() + .unwrap(), name ) } @@ -1032,7 +1044,10 @@ mod tests { }); assert_eq!( - **value.get_data_by_column_path(tag, &field_path).unwrap(), + **value + .get_data_by_column_path(tag, &field_path, Box::new(error_callback())) + .unwrap() + .unwrap(), Value::row(indexmap! { "name".into() => string("Andrés N. Robalino") }) diff --git a/src/lib.rs b/src/lib.rs index 520e08a136..f21a70cfe2 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -30,12 +30,12 @@ pub use crate::env::host::BasicHost; pub use crate::parser::hir::SyntaxShape; pub use crate::parser::parse::token_tree_builder::TokenTreeBuilder; pub use crate::plugin::{serve_plugin, Plugin}; -pub use crate::utils::{AbsoluteFile, AbsolutePath, RelativePath}; +pub use crate::utils::{did_you_mean, AbsoluteFile, AbsolutePath, RelativePath}; pub use cli::cli; pub use data::base::{Primitive, Value}; pub use data::config::{config_path, APP_INFO}; pub use data::dict::{Dictionary, TaggedDictBuilder}; -pub use data::meta::{Span, Spanned, SpannedItem, Tag, Tagged, TaggedItem}; +pub use data::meta::{tag_for_tagged_list, Span, Spanned, SpannedItem, Tag, Tagged, TaggedItem}; pub use errors::{CoerceInto, ShellError}; pub use num_traits::cast::ToPrimitive; pub use parser::parse::text::Text; diff --git a/src/plugins/inc.rs b/src/plugins/inc.rs index ed0416ce43..fb3836dfd3 100644 --- a/src/plugins/inc.rs +++ b/src/plugins/inc.rs @@ -1,6 +1,6 @@ use nu::{ - serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, - SyntaxShape, Tagged, TaggedItem, Value, + did_you_mean, serve_plugin, tag_for_tagged_list, CallInfo, Plugin, Primitive, ReturnSuccess, + ReturnValue, ShellError, Signature, SyntaxShape, Tagged, TaggedItem, Value, }; enum Action { @@ -93,22 +93,51 @@ impl Inc { )); } } + Value::Row(_) => match self.field { Some(ref f) => { - let replacement = match value.item.get_data_by_column_path(value.tag(), f) { - Some(result) => self.inc(result.map(|x| x.clone()))?, - None => { - return Err(ShellError::labeled_error( - "inc could not find field to replace", - "column name", - value.tag(), - )) - } + let fields = f.clone(); + + let replace_for = value.item.get_data_by_column_path( + value.tag(), + &f, + Box::new(move |(obj_source, column_path_tried)| { + match did_you_mean(&obj_source, &column_path_tried) { + Some(suggestions) => { + return ShellError::labeled_error( + "Unknown column", + format!("did you mean '{}'?", suggestions[0].1), + tag_for_tagged_list(fields.iter().map(|p| p.tag())), + ) + } + None => { + return ShellError::labeled_error( + "Unknown column", + "row does not contain this column", + tag_for_tagged_list(fields.iter().map(|p| p.tag())), + ) + } + } + }), + ); + + let replacement = match replace_for { + Ok(got) => match got { + Some(result) => self.inc(result.map(|x| x.clone()))?, + None => { + return Err(ShellError::labeled_error( + "inc could not find field to replace", + "column name", + value.tag(), + )) + } + }, + Err(reason) => return Err(reason), }; match value.item.replace_data_at_column_path( value.tag(), - f, + &f, replacement.item.clone(), ) { Some(v) => return Ok(v), diff --git a/src/plugins/str.rs b/src/plugins/str.rs index 8260bdac2c..f565d5209e 100644 --- a/src/plugins/str.rs +++ b/src/plugins/str.rs @@ -1,6 +1,6 @@ use nu::{ - serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, - SyntaxShape, Tagged, TaggedItem, Value, + did_you_mean, serve_plugin, tag_for_tagged_list, CallInfo, Plugin, Primitive, ReturnSuccess, + ReturnValue, ShellError, Signature, SyntaxShape, Tagged, TaggedItem, Value, }; #[derive(Debug, Eq, PartialEq)] @@ -92,13 +92,50 @@ impl Str { Value::Primitive(Primitive::String(ref s)) => Ok(self.apply(&s)?.tagged(value.tag())), Value::Row(_) => match self.field { Some(ref f) => { - let replacement = match value.item.get_data_by_column_path(value.tag(), f) { - Some(result) => self.strutils(result.map(|x| x.clone()))?, - None => return Ok(Value::nothing().tagged(value.tag)), + let fields = f.clone(); + + let replace_for = value.item.get_data_by_column_path( + value.tag(), + &f, + Box::new(move |(obj_source, column_path_tried)| { + //let fields = f.clone(); + + match did_you_mean(&obj_source, &column_path_tried) { + Some(suggestions) => { + return ShellError::labeled_error( + "Unknown column", + format!("did you mean '{}'?", suggestions[0].1), + tag_for_tagged_list(fields.iter().map(|p| p.tag())), + ) + } + None => { + return ShellError::labeled_error( + "Unknown column", + "row does not contain this column", + tag_for_tagged_list(fields.iter().map(|p| p.tag())), + ) + } + } + }), + ); + + let replacement = match replace_for { + Ok(got) => match got { + Some(result) => self.strutils(result.map(|x| x.clone()))?, + None => { + return Err(ShellError::labeled_error( + "inc could not find field to replace", + "column name", + value.tag(), + )) + } + }, + Err(reason) => return Err(reason), }; + match value.item.replace_data_at_column_path( value.tag(), - f, + &f, replacement.item.clone(), ) { Some(v) => return Ok(v), diff --git a/src/prelude.rs b/src/prelude.rs index 4b12a07bda..6ff62c3240 100644 --- a/src/prelude.rs +++ b/src/prelude.rs @@ -66,7 +66,9 @@ pub(crate) use crate::commands::RawCommandArgs; pub(crate) use crate::context::CommandRegistry; pub(crate) use crate::context::{AnchorLocation, Context}; pub(crate) use crate::data::base as value; -pub(crate) use crate::data::meta::{Span, Spanned, SpannedItem, Tag, Tagged, TaggedItem}; +pub(crate) use crate::data::meta::{ + tag_for_tagged_list, Span, Spanned, SpannedItem, Tag, Tagged, TaggedItem, +}; pub(crate) use crate::data::types::ExtractType; pub(crate) use crate::data::{Primitive, Value}; pub(crate) use crate::env::host::handle_unexpected; diff --git a/src/utils.rs b/src/utils.rs index 56fee491b6..9822b76278 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -5,6 +5,30 @@ use std::fmt; use std::ops::Div; use std::path::{Component, Path, PathBuf}; +pub fn did_you_mean( + obj_source: &Value, + field_tried: &Tagged, +) -> Option> { + let possibilities = obj_source.data_descriptors(); + + let mut possible_matches: Vec<_> = possibilities + .into_iter() + .map(|x| { + let word = x.clone(); + let distance = natural::distance::levenshtein_distance(&word, &field_tried); + + (distance, word) + }) + .collect(); + + if possible_matches.len() > 0 { + possible_matches.sort(); + return Some(possible_matches); + } + + None +} + pub struct AbsoluteFile { inner: PathBuf, } From b54ce921dd67a1dcfa08045b1aa62fcca846600e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Thu, 31 Oct 2019 04:36:08 -0500 Subject: [PATCH 111/184] Better error messages. --- src/commands/get.rs | 20 ++++++ src/data/base.rs | 12 ++-- src/plugins/str.rs | 2 - tests/command_get_tests.rs | 125 +++++++++++++++++++++++++++++++++++++ 4 files changed, 149 insertions(+), 10 deletions(-) create mode 100644 tests/command_get_tests.rs diff --git a/src/commands/get.rs b/src/commands/get.rs index 69ef15333a..cda637495e 100644 --- a/src/commands/get.rs +++ b/src/commands/get.rs @@ -56,6 +56,26 @@ pub fn get_column_path( obj.tag(), path, Box::new(move |(obj_source, column_path_tried)| { + match obj_source { + Value::Table(rows) => { + let total = rows.len(); + let end_tag = match fields.iter().nth_back(if fields.len() > 2 { 1 } else { 0 }) + { + Some(last_field) => last_field.tag(), + None => column_path_tried.tag(), + }; + + return ShellError::labeled_error_with_secondary( + "Row not found", + format!("There isn't a row indexed at '{}'", **column_path_tried), + column_path_tried.tag(), + format!("The table only has {} rows (0..{})", total, total - 1), + end_tag, + ); + } + _ => {} + } + match did_you_mean(&obj_source, &column_path_tried) { Some(suggestions) => { return ShellError::labeled_error( diff --git a/src/data/base.rs b/src/data/base.rs index 2d0288ac87..470a97f617 100644 --- a/src/data/base.rs +++ b/src/data/base.rs @@ -459,11 +459,10 @@ impl Value { } } - // TODO: This is basically a legacy construct, I think pub fn data_descriptors(&self) -> Vec { match self { Value::Primitive(_) => vec![], - Value::Row(o) => o + Value::Row(columns) => columns .entries .keys() .into_iter() @@ -534,12 +533,9 @@ impl Value { ) -> Result>, ShellError> { let mut current = self; for p in path { - let value = if p.chars().all(char::is_numeric) { - current.get_data_by_index(p.chars().fold(0 as usize, |acc, c| { - c.to_digit(10).unwrap_or(0) as usize + acc - })) - } else { - current.get_data_by_key(p) + let value = match p.item().parse::() { + Ok(number) => current.get_data_by_index(number), + Err(_) => current.get_data_by_key(p), }; match value { diff --git a/src/plugins/str.rs b/src/plugins/str.rs index f565d5209e..e6b047dad3 100644 --- a/src/plugins/str.rs +++ b/src/plugins/str.rs @@ -98,8 +98,6 @@ impl Str { value.tag(), &f, Box::new(move |(obj_source, column_path_tried)| { - //let fields = f.clone(); - match did_you_mean(&obj_source, &column_path_tried) { Some(suggestions) => { return ShellError::labeled_error( diff --git a/tests/command_get_tests.rs b/tests/command_get_tests.rs new file mode 100644 index 0000000000..e3c2272ac7 --- /dev/null +++ b/tests/command_get_tests.rs @@ -0,0 +1,125 @@ +mod helpers; + +use helpers as h; +use helpers::{Playground, Stub::*}; +#[test] +fn get() { + Playground::setup("get_test_1", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" + nu_party_venue = "zion" + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open sample.toml + | get nu_party_venue + | echo $it + "# + )); + + assert_eq!(actual, "zion"); + }) +} + +#[test] +fn fetches_by_index_from_a_given_table() { + Playground::setup("get_test_2", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" + [package] + name = "nu" + version = "0.4.1" + authors = ["Yehuda Katz ", "Jonathan Turner ", "Andrés N. Robalino "] + description = "When arepas shells are tasty and fun." + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open sample.toml + | get package.authors.2 + | echo $it + "# + )); + + assert_eq!(actual, "Andrés N. Robalino "); + }) +} + +#[test] +fn fetches_more_than_one_column_member_path() { + Playground::setup("get_test_3", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" + [[fortune_tellers]] + name = "Andrés N. Robalino" + arepas = 1 + + [[fortune_tellers]] + name = "Jonathan Turner" + arepas = 1 + + [[fortune_tellers]] + name = "Yehuda Katz" + arepas = 1 + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open sample.toml + | get fortune_tellers.2.name fortune_tellers.0.name fortune_tellers.1.name + | nth 2 + | echo $it + "# + )); + + assert_eq!(actual, "Jonathan Turner"); + }) +} + +#[test] +fn errors_fetching_by_index_out_of_bounds_from_table() { + Playground::setup("get_test_2", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" + [spanish_lesson] + sentence_words = ["Yo", "quiero", "taconushell"] + "#, + )]); + + let actual = nu_error!( + cwd: dirs.test(), h::pipeline( + r#" + open sample.toml + | get spanish_lesson.sentence_words.3 + "# + )); + + assert!(actual.contains("Row not found")); + assert!(actual.contains("There isn't a row indexed at '3'")); + assert!(actual.contains("The table only has 3 rows (0..2)")) + }) +} + +#[test] +fn requires_at_least_one_column_member_path() { + Playground::setup("first_test_4", |dirs, sandbox| { + sandbox.with_files(vec![EmptyFile("andres.txt")]); + + let actual = nu_error!( + cwd: dirs.test(), "ls | get" + ); + + assert!(actual.contains("requires member parameter")); + }) +} From 65ae24fbf15406d1470f260b95190c4db3f16eb8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Thu, 31 Oct 2019 04:42:18 -0500 Subject: [PATCH 112/184] suite in place. --- tests/command_get_tests.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/command_get_tests.rs b/tests/command_get_tests.rs index e3c2272ac7..a6a4dea8e3 100644 --- a/tests/command_get_tests.rs +++ b/tests/command_get_tests.rs @@ -2,6 +2,7 @@ mod helpers; use helpers as h; use helpers::{Playground, Stub::*}; + #[test] fn get() { Playground::setup("get_test_1", |dirs, sandbox| { @@ -88,7 +89,7 @@ fn fetches_more_than_one_column_member_path() { #[test] fn errors_fetching_by_index_out_of_bounds_from_table() { - Playground::setup("get_test_2", |dirs, sandbox| { + Playground::setup("get_test_4", |dirs, sandbox| { sandbox.with_files(vec![FileWithContent( "sample.toml", r#" @@ -113,7 +114,7 @@ fn errors_fetching_by_index_out_of_bounds_from_table() { #[test] fn requires_at_least_one_column_member_path() { - Playground::setup("first_test_4", |dirs, sandbox| { + Playground::setup("get_test_5", |dirs, sandbox| { sandbox.with_files(vec![EmptyFile("andres.txt")]); let actual = nu_error!( From e31ed666106a57df01a509098e0088354273498a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Thu, 31 Oct 2019 14:20:22 -0500 Subject: [PATCH 113/184] get :: support fetching rows using numbers in column path. --- src/data/base.rs | 48 +++++++++++++++++++++-- tests/command_get_tests.rs | 80 ++++++++++++++++++++++++++++++++++++-- 2 files changed, 121 insertions(+), 7 deletions(-) diff --git a/src/data/base.rs b/src/data/base.rs index 470a97f617..17691e24be 100644 --- a/src/data/base.rs +++ b/src/data/base.rs @@ -533,10 +533,21 @@ impl Value { ) -> Result>, ShellError> { let mut current = self; for p in path { + // note: + // This will eventually be refactored once we are able + // to parse correctly column_paths and get them deserialized + // to values for us. let value = match p.item().parse::() { - Ok(number) => current.get_data_by_index(number), - Err(_) => current.get_data_by_key(p), - }; + Ok(number) => match current { + Value::Table(_) => current.get_data_by_index(number), + Value::Row(_) => current.get_data_by_key(p), + _ => None, + }, + Err(_) => match self { + Value::Table(_) | Value::Row(_) => current.get_data_by_key(p), + _ => None, + }, + }; // end match value { Some(v) => current = v, @@ -1022,7 +1033,7 @@ mod tests { } #[test] - fn column_path_that_contains_just_a_numbers_gets_a_row_from_a_table() { + fn column_path_that_contains_just_a_number_gets_a_row_from_a_table() { let field_path = column_path(&vec![string("package"), string("authors"), string("0")]); let (_, tag) = string("Andrés N. Robalino").into_parts(); @@ -1050,6 +1061,35 @@ mod tests { ); } + #[test] + fn column_path_that_contains_just_a_number_gets_a_row_from_a_row() { + let field_path = column_path(&vec![string("package"), string("authors"), string("0")]); + + let (_, tag) = string("Andrés N. Robalino").into_parts(); + + let value = Value::row(indexmap! { + "package".into() => row(indexmap! { + "name".into() => string("nu"), + "version".into() => string("0.4.0"), + "authors".into() => row(indexmap! { + "0".into() => row(indexmap!{"name".into() => string("Andrés N. Robalino")}), + "1".into() => row(indexmap!{"name".into() => string("Jonathan Turner")}), + "2".into() => row(indexmap!{"name".into() => string("Yehuda Katz")}), + }) + }) + }); + + assert_eq!( + **value + .get_data_by_column_path(tag, &field_path, Box::new(error_callback())) + .unwrap() + .unwrap(), + Value::row(indexmap! { + "name".into() => string("Andrés N. Robalino") + }) + ); + } + #[test] fn replaces_matching_field_from_a_row() { let field_path = column_path(&vec![string("amigos")]); diff --git a/tests/command_get_tests.rs b/tests/command_get_tests.rs index a6a4dea8e3..09348678bf 100644 --- a/tests/command_get_tests.rs +++ b/tests/command_get_tests.rs @@ -52,10 +52,61 @@ fn fetches_by_index_from_a_given_table() { assert_eq!(actual, "Andrés N. Robalino "); }) } +#[test] +fn supports_fetching_rows_from_tables_using_columns_named_as_numbers() { + Playground::setup("get_test_3", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" + [package] + 0 = "nu" + 1 = "0.4.1" + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open sample.toml + | get package.1 + | echo $it + "# + )); + + assert_eq!(actual, "0.4.1"); + }) +} + +#[test] +fn can_fetch_tables_or_rows_using_numbers_in_column_path() { + Playground::setup("get_test_4", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" + [package] + 0 = "nu" + 1 = "0.4.1" + 2 = ["Yehuda Katz ", "Jonathan Turner ", "Andrés N. Robalino "] + description = "When arepas shells are tasty and fun." + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open sample.toml + | get package.2.1 + | echo $it + "# + )); + + assert_eq!(actual, "Jonathan Turner "); + }) +} #[test] fn fetches_more_than_one_column_member_path() { - Playground::setup("get_test_3", |dirs, sandbox| { + Playground::setup("get_test_5", |dirs, sandbox| { sandbox.with_files(vec![FileWithContent( "sample.toml", r#" @@ -87,9 +138,32 @@ fn fetches_more_than_one_column_member_path() { }) } +#[test] +fn errors_fetching_by_column_not_present() { + Playground::setup("get_test_6", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" + [taconushell] + sentence_words = ["Yo", "quiero", "taconushell"] + "#, + )]); + + let actual = nu_error!( + cwd: dirs.test(), h::pipeline( + r#" + open sample.toml + | get taco + "# + )); + + assert!(actual.contains("Unknown column")); + assert!(actual.contains("did you mean 'taconushell'?")); + }) +} #[test] fn errors_fetching_by_index_out_of_bounds_from_table() { - Playground::setup("get_test_4", |dirs, sandbox| { + Playground::setup("get_test_7", |dirs, sandbox| { sandbox.with_files(vec![FileWithContent( "sample.toml", r#" @@ -114,7 +188,7 @@ fn errors_fetching_by_index_out_of_bounds_from_table() { #[test] fn requires_at_least_one_column_member_path() { - Playground::setup("get_test_5", |dirs, sandbox| { + Playground::setup("get_test_8", |dirs, sandbox| { sandbox.with_files(vec![EmptyFile("andres.txt")]); let actual = nu_error!( From cd058db0460e4197d62896899add9406a0541714 Mon Sep 17 00:00:00 2001 From: Flare576 Date: Thu, 31 Oct 2019 12:40:56 -0500 Subject: [PATCH 114/184] Substring option for str plugin Adds new substr function to str plugin with tests and documentation Function takes a start/end location as a string in the form "##,##", both sides of comma are optional, and behaves like Rust's own index operator [##..##]. --- Cargo.lock | 8 +- docs/commands/str.md | 50 ++++++++++ src/plugins/str.rs | 221 +++++++++++++++++++++++++++++++++++++++++-- 3 files changed, 265 insertions(+), 14 deletions(-) create mode 100644 docs/commands/str.md diff --git a/Cargo.lock b/Cargo.lock index 9f8ebfe787..bed539c93a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1540,7 +1540,7 @@ dependencies = [ "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "roxmltree 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "rusqlite 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rustyline 5.0.3 (git+https://github.com/kkawakam/rustyline.git)", + "rustyline 5.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde-hjson 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2077,8 +2077,8 @@ dependencies = [ [[package]] name = "rustyline" -version = "5.0.3" -source = "git+https://github.com/kkawakam/rustyline.git#449c811998f630102bb2d9fb0b59b890d9eabac5" +version = "5.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "dirs 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", @@ -3056,7 +3056,7 @@ dependencies = [ "checksum rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783" "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" "checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" -"checksum rustyline 5.0.3 (git+https://github.com/kkawakam/rustyline.git)" = "" +"checksum rustyline 5.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e9d8eb9912bc492db051324d36f5cea56984fc2afeaa5c6fa84e0b0e3cde550f" "checksum ryu 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "19d2271fa48eaf61e53cc88b4ad9adcbafa2d512c531e7fadb6dc11a4d3656c5" "checksum safemem 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d2b08423011dae9a5ca23f07cf57dac3857f5c885d352b76f6d95f4aea9434d0" "checksum same-file 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "585e8ddcedc187886a30fa705c47985c3fa88d06624095856b36ca0b82ff4421" diff --git a/docs/commands/str.md b/docs/commands/str.md new file mode 100644 index 0000000000..b9ddc8c2e4 --- /dev/null +++ b/docs/commands/str.md @@ -0,0 +1,50 @@ +# str + +Consumes either a single value or a table and converts the provided data to a string and optionally applies a change. + +## Examples + +```shell +> shells +━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + # │ │ name │ path +───┼───┼────────────┼──────────────────────────────── + 0 │ X │ filesystem │ /home/TUX/stuff/expr/stuff + 1 │ │ filesystem │ / +━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +> shells | str path --upcase +━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + # │ │ name │ path +───┼───┼────────────┼──────────────────────────────── + 0 │ X │ filesystem │ /HOME/TUX/STUFF/EXPR/STUFF + 1 │ │ filesystem │ / +━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +> shells | str path --downcase +━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + # │ │ name │ path +───┼───┼────────────┼──────────────────────────────── + 0 │ X │ filesystem │ /home/tux/stuff/expr/stuff + 1 │ │ filesystem │ / +━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +> shells | str # --to-int +━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + # │ │ name │ path +───┼───┼────────────┼──────────────────────────────── + 0 │ X │ filesystem │ /home/TUX/stuff/expr/stuff + 1 │ │ filesystem │ / +━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +> shells | str # --substring "21, 99" +━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + # │ │ name │ path +───┼───┼────────────┼──────────────────────────────── + 0 │ X │ filesystem │ stuff + 1 │ │ filesystem │ +━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +> shells | str # --substring "6," +━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + # │ │ name │ path +───┼───┼────────────┼──────────────────────────────── + 0 │ X │ filesystem │ TUX/stuff/expr/stuff + 1 │ │ filesystem │ +━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +``` diff --git a/src/plugins/str.rs b/src/plugins/str.rs index 8260bdac2c..a0950b2ec0 100644 --- a/src/plugins/str.rs +++ b/src/plugins/str.rs @@ -2,12 +2,14 @@ use nu::{ serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, SyntaxShape, Tagged, TaggedItem, Value, }; +use std::cmp; #[derive(Debug, Eq, PartialEq)] enum Action { Downcase, Upcase, ToInteger, + Substring(String), } pub type ColumnPath = Vec>; @@ -33,6 +35,26 @@ impl Str { let applied = match self.action.as_ref() { Some(Action::Downcase) => Value::string(input.to_ascii_lowercase()), Some(Action::Upcase) => Value::string(input.to_ascii_uppercase()), + Some(Action::Substring(s)) => { + // Index operator isn't perfect: https://users.rust-lang.org/t/how-to-get-a-substring-of-a-string/1351 + let no_spaces: String = s.chars().filter(|c| !c.is_whitespace()).collect(); + let v: Vec<&str> = no_spaces.split(',').collect(); + let start: usize = match v[0] { + "" => 0, + _ => v[0].parse().unwrap(), + }; + let end: usize = match v[1] { + "" => input.len(), + _ => cmp::min(v[1].parse().unwrap(), input.len()), + }; + if start > input.len() - 1 { + Value::string("") + } else if start > end { + Value::string(input) + } else { + Value::string(&input[start..end]) + } + } Some(Action::ToInteger) => match input.trim() { other => match other.parse::() { Ok(v) => Value::int(v), @@ -81,8 +103,16 @@ impl Str { } } + fn for_substring(&mut self, start_end: String) { + if self.permit() { + self.action = Some(Action::Substring(start_end)); + } else { + self.log_error("can only apply one"); + } + } + pub fn usage() -> &'static str { - "Usage: str field [--downcase|--upcase|--to-int]" + "Usage: str field [--downcase|--upcase|--to-int|--substring \"start,end\"]" } } @@ -132,6 +162,11 @@ impl Plugin for Str { .switch("downcase", "convert string to lowercase") .switch("upcase", "convert string to uppercase") .switch("to-int", "convert string to integer") + .named( + "substring", + SyntaxShape::String, + "convert string to portion of original, requires \"start,end\"", + ) .rest(SyntaxShape::ColumnPath, "the column(s) to convert") .filter()) } @@ -148,20 +183,34 @@ impl Plugin for Str { if args.has("to-int") { self.for_to_int(); } + if args.has("substring") { + if let Some(start_end) = args.get("substring") { + match start_end { + Tagged { + item: Value::Primitive(Primitive::String(s)), + .. + } => { + self.for_substring(s.to_string()); + } + _ => { + return Err(ShellError::labeled_error( + "Unrecognized type in params", + start_end.type_name(), + &start_end.tag, + )) + } + } + } + } if let Some(possible_field) = args.nth(0) { match possible_field { Tagged { item: Value::Primitive(Primitive::String(s)), tag, - } => match self.action { - Some(Action::Downcase) - | Some(Action::Upcase) - | Some(Action::ToInteger) - | None => { - self.for_field(vec![s.clone().tagged(tag)]); - } - }, + } => { + self.for_field(vec![s.clone().tagged(tag)]); + } table @ Tagged { item: Value::Table(_), .. @@ -177,7 +226,6 @@ impl Plugin for Str { } } } - for param in args.positional_iter() { match param { Tagged { @@ -232,6 +280,14 @@ mod tests { } } + fn with_named_parameter(&mut self, name: &str, value: &str) -> &mut Self { + self.flags.insert( + name.to_string(), + Value::string(value).tagged(Tag::unknown()), + ); + self + } + fn with_long_flag(&mut self, name: &str) -> &mut Self { self.flags.insert( name.to_string(), @@ -339,6 +395,7 @@ mod tests { .with_long_flag("upcase") .with_long_flag("downcase") .with_long_flag("to-int") + .with_long_flag("substring") .create(), ) .is_err()); @@ -509,4 +566,148 @@ mod tests { _ => {} } } + + #[test] + fn str_plugin_applies_substring_without_field() { + let mut plugin = Str::new(); + + assert!(plugin + .begin_filter( + CallStub::new() + .with_named_parameter("substring", "0,1") + .create() + ) + .is_ok()); + + let subject = unstructured_sample_record("0123456789"); + let output = plugin.filter(subject).unwrap(); + + match output[0].as_ref().unwrap() { + ReturnSuccess::Value(Tagged { + item: Value::Primitive(Primitive::String(s)), + .. + }) => assert_eq!(*s, String::from("0")), + _ => {} + } + } + + #[test] + fn str_plugin_applies_substring_exceeding_string_length() { + let mut plugin = Str::new(); + + assert!(plugin + .begin_filter( + CallStub::new() + .with_named_parameter("substring", "0,11") + .create() + ) + .is_ok()); + + let subject = unstructured_sample_record("0123456789"); + let output = plugin.filter(subject).unwrap(); + + match output[0].as_ref().unwrap() { + ReturnSuccess::Value(Tagged { + item: Value::Primitive(Primitive::String(s)), + .. + }) => assert_eq!(*s, String::from("0123456789")), + _ => {} + } + } + + #[test] + fn str_plugin_applies_substring_returns_blank_if_start_exceeds_length() { + let mut plugin = Str::new(); + + assert!(plugin + .begin_filter( + CallStub::new() + .with_named_parameter("substring", "20,30") + .create() + ) + .is_ok()); + + let subject = unstructured_sample_record("0123456789"); + let output = plugin.filter(subject).unwrap(); + + match output[0].as_ref().unwrap() { + ReturnSuccess::Value(Tagged { + item: Value::Primitive(Primitive::String(s)), + .. + }) => assert_eq!(*s, String::from("")), + _ => {} + } + } + + #[test] + fn str_plugin_applies_substring_treats_blank_start_as_zero() { + let mut plugin = Str::new(); + + assert!(plugin + .begin_filter( + CallStub::new() + .with_named_parameter("substring", ",5") + .create() + ) + .is_ok()); + + let subject = unstructured_sample_record("0123456789"); + let output = plugin.filter(subject).unwrap(); + + match output[0].as_ref().unwrap() { + ReturnSuccess::Value(Tagged { + item: Value::Primitive(Primitive::String(s)), + .. + }) => assert_eq!(*s, String::from("01234")), + _ => {} + } + } + + #[test] + fn str_plugin_applies_substring_treats_blank_end_as_length() { + let mut plugin = Str::new(); + + assert!(plugin + .begin_filter( + CallStub::new() + .with_named_parameter("substring", "2,") + .create() + ) + .is_ok()); + + let subject = unstructured_sample_record("0123456789"); + let output = plugin.filter(subject).unwrap(); + + match output[0].as_ref().unwrap() { + ReturnSuccess::Value(Tagged { + item: Value::Primitive(Primitive::String(s)), + .. + }) => assert_eq!(*s, String::from("23456789")), + _ => {} + } + } + + #[test] + fn str_plugin_applies_substring_returns_string_if_start_exceeds_end() { + let mut plugin = Str::new(); + + assert!(plugin + .begin_filter( + CallStub::new() + .with_named_parameter("substring", "3,1") + .create() + ) + .is_ok()); + + let subject = unstructured_sample_record("0123456789"); + let output = plugin.filter(subject).unwrap(); + + match output[0].as_ref().unwrap() { + ReturnSuccess::Value(Tagged { + item: Value::Primitive(Primitive::String(s)), + .. + }) => assert_eq!(*s, String::from("0123456789")), + _ => {} + } + } } From b822e13f1221f5891c1149d1e7f97e3d43b290d6 Mon Sep 17 00:00:00 2001 From: Dan Herrera Date: Fri, 1 Nov 2019 00:08:24 -0400 Subject: [PATCH 115/184] Add documentation for tags command --- docs/commands/tags.md | 47 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 docs/commands/tags.md diff --git a/docs/commands/tags.md b/docs/commands/tags.md new file mode 100644 index 0000000000..2c80cc19cf --- /dev/null +++ b/docs/commands/tags.md @@ -0,0 +1,47 @@ +# tags + +The tags commands allows users to access the metadata of the previous value in +the pipeline. This command may be run on multiple values of input as well. + +As of writing this, the only metadata returned includes: + +- `span`: the start and end indices of the previous value's substring location +- `anchor`: the source where data was loaded from; this may not appear if the + previous pipeline value didn't actually have a source (like trying to `open` a + dir, or running `ls` on a dir) + +## Examples + +```shell +> open README.md | tags +━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + span │ anchor +────────────────┼────────────────────────────────────────────────── + [table: 1 row] │ /Users/danielh/Projects/github/nushell/README.md +━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +``` + +```shell +> open README.md | tags | get span +━━━━━━━┯━━━━━ + start │ end +───────┼───── + 5 │ 14 +━━━━━━━┷━━━━━ +``` + +```shell +> ls | tags | first 3 | get span +━━━┯━━━━━━━┯━━━━━ + # │ start │ end +───┼───────┼───── + 0 │ 0 │ 2 + 1 │ 0 │ 2 + 2 │ 0 │ 2 +━━━┷━━━━━━━┷━━━━━ +``` + +## Reference + +More useful information on the `tags` command can be found by referencing [The +Nu Book's entry on Metadata](https://book.nushell.sh/en/metadata) From 4be88ff572f631df0f8b571e85c60d30eb9f248c Mon Sep 17 00:00:00 2001 From: Yehuda Katz Date: Mon, 28 Oct 2019 07:46:50 -0700 Subject: [PATCH 116/184] Modernize external parse and improve trace The original purpose of this PR was to modernize the external parser to use the new Shape system. This commit does include some of that change, but a more important aspect of this change is an improvement to the expansion trace. Previous commit 6a7c00ea adding trace infrastructure to the syntax coloring feature. This commit adds tracing to the expander. The bulk of that work, in addition to the tree builder logic, was an overhaul of the formatter traits to make them more general purpose, and more structured. Some highlights: - `ToDebug` was split into two traits (`ToDebug` and `DebugFormat`) because implementations needed to become objects, but a convenience method on `ToDebug` didn't qualify - `DebugFormat`'s `fmt_debug` method now takes a `DebugFormatter` rather than a standard formatter, and `DebugFormatter` has a new (but still limited) facility for structured formatting. - Implementations of `ExpandSyntax` need to produce output that implements `DebugFormat`. Unlike the highlighter changes, these changes are fairly focused in the trace output, so these changes aren't behind a flag. --- .cargo/config | 3 + Cargo.lock | 9 +- Cargo.toml | 1 + src/cli.rs | 36 +- src/commands/classified.rs | 94 +++- src/commands/command.rs | 26 +- src/context.rs | 3 +- src/data/meta.rs | 132 ++++++ src/errors.rs | 94 +++- src/lib.rs | 6 +- src/main.rs | 15 + src/parser/hir.rs | 41 +- src/parser/hir/baseline_parse/tests.rs | 19 +- src/parser/hir/binary.rs | 4 +- src/parser/hir/expand_external_tokens.rs | 362 ++++++++++----- src/parser/hir/external_command.rs | 4 +- src/parser/hir/named.rs | 4 +- src/parser/hir/path.rs | 4 +- src/parser/hir/syntax_shape.rs | 415 ++++++++++-------- src/parser/hir/syntax_shape/block.rs | 39 +- src/parser/hir/syntax_shape/expression.rs | 26 +- .../hir/syntax_shape/expression/atom.rs | 48 +- .../hir/syntax_shape/expression/delimited.rs | 7 +- .../hir/syntax_shape/expression/file_path.rs | 7 +- .../hir/syntax_shape/expression/list.rs | 27 +- .../hir/syntax_shape/expression/number.rs | 30 +- .../hir/syntax_shape/expression/pattern.rs | 14 +- .../hir/syntax_shape/expression/string.rs | 26 +- .../hir/syntax_shape/expression/unit.rs | 27 +- .../syntax_shape/expression/variable_path.rs | 246 ++++++++--- src/parser/hir/syntax_shape/flat_shape.rs | 2 +- src/parser/hir/tokens_iterator.rs | 219 +++++---- src/parser/hir/tokens_iterator/debug.rs | 357 +-------------- .../hir/tokens_iterator/debug/color_trace.rs | 351 +++++++++++++++ .../hir/tokens_iterator/debug/expand_trace.rs | 365 +++++++++++++++ src/parser/parse/call_node.rs | 8 +- src/parser/parse/operator.rs | 4 +- src/parser/parse/pipeline.rs | 24 +- src/parser/parse/token_tree.rs | 24 +- src/parser/parse/tokens.rs | 13 +- src/parser/parse/unit.rs | 7 + src/parser/parse_command.rs | 38 +- src/prelude.rs | 10 +- src/shell/helper.rs | 15 +- src/traits.rs | 124 +++++- 45 files changed, 2301 insertions(+), 1029 deletions(-) create mode 100644 src/parser/hir/tokens_iterator/debug/color_trace.rs create mode 100644 src/parser/hir/tokens_iterator/debug/expand_trace.rs diff --git a/.cargo/config b/.cargo/config index e69de29bb2..620568b44f 100644 --- a/.cargo/config +++ b/.cargo/config @@ -0,0 +1,3 @@ +[build] + +rustflags = "--cfg coloring_in_tokens" diff --git a/Cargo.lock b/Cargo.lock index 9f8ebfe787..114cbf841b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1498,6 +1498,7 @@ dependencies = [ "bson 0.14.0 (registry+https://github.com/rust-lang/crates.io-index)", "byte-unit 3.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)", "chrono-humanize 0.0.11 (registry+https://github.com/rust-lang/crates.io-index)", "clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1540,7 +1541,7 @@ dependencies = [ "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "roxmltree 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "rusqlite 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rustyline 5.0.3 (git+https://github.com/kkawakam/rustyline.git)", + "rustyline 5.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde-hjson 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2077,8 +2078,8 @@ dependencies = [ [[package]] name = "rustyline" -version = "5.0.3" -source = "git+https://github.com/kkawakam/rustyline.git#449c811998f630102bb2d9fb0b59b890d9eabac5" +version = "5.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "dirs 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", @@ -3056,7 +3057,7 @@ dependencies = [ "checksum rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783" "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" "checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" -"checksum rustyline 5.0.3 (git+https://github.com/kkawakam/rustyline.git)" = "" +"checksum rustyline 5.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e9d8eb9912bc492db051324d36f5cea56984fc2afeaa5c6fa84e0b0e3cde550f" "checksum ryu 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "19d2271fa48eaf61e53cc88b4ad9adcbafa2d512c531e7fadb6dc11a4d3656c5" "checksum safemem 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d2b08423011dae9a5ca23f07cf57dac3857f5c885d352b76f6d95f4aea9434d0" "checksum same-file 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "585e8ddcedc187886a30fa705c47985c3fa88d06624095856b36ca0b82ff4421" diff --git a/Cargo.toml b/Cargo.toml index 97b02b450c..f2ad5073fa 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -75,6 +75,7 @@ serde_urlencoded = "0.6.1" sublime_fuzzy = "0.5" trash = "1.0.0" regex = "1" +cfg-if = "0.1" neso = { version = "0.5.0", optional = true } crossterm = { version = "0.10.2", optional = true } diff --git a/src/cli.rs b/src/cli.rs index f46db10529..f050df41e7 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -14,13 +14,13 @@ use crate::git::current_branch; use crate::parser::registry::Signature; use crate::parser::{ hir, - hir::syntax_shape::{expand_syntax, PipelineShape}, - hir::{expand_external_tokens::expand_external_tokens, tokens_iterator::TokensIterator}, + hir::syntax_shape::{expand_syntax, ExpandContext, PipelineShape}, + hir::{expand_external_tokens::ExternalTokensShape, tokens_iterator::TokensIterator}, TokenNode, }; use crate::prelude::*; -use log::{debug, trace}; +use log::{debug, log_enabled, trace}; use rustyline::error::ReadlineError; use rustyline::{self, config::Configurer, config::EditMode, ColorMode, Config, Editor}; use std::env; @@ -506,6 +506,7 @@ async fn process_line(readline: Result, ctx: &mut Context Some(ClassifiedCommand::External(_)) => {} _ => pipeline .commands + .item .push(ClassifiedCommand::Internal(InternalCommand { name: "autoview".to_string(), name_tag: Tag::unknown(), @@ -513,13 +514,14 @@ async fn process_line(readline: Result, ctx: &mut Context Box::new(hir::Expression::synthetic_string("autoview")), None, None, - ), + ) + .spanned_unknown(), })), } let mut input = ClassifiedInputStream::new(); - let mut iter = pipeline.commands.into_iter().peekable(); + let mut iter = pipeline.commands.item.into_iter().peekable(); let mut is_first_command = true; // Check the config to see if we need to update the path @@ -679,11 +681,20 @@ fn classify_pipeline( let mut pipeline_list = vec![pipeline.clone()]; let mut iterator = TokensIterator::all(&mut pipeline_list, pipeline.span()); - expand_syntax( + let result = expand_syntax( &PipelineShape, &mut iterator, - &context.expand_context(source, pipeline.span()), + &context.expand_context(source), ) + .map_err(|err| err.into()); + + if log_enabled!(target: "nu::expand_syntax", log::Level::Debug) { + println!(""); + ptree::print_tree(&iterator.expand_tracer().print(source.clone())).unwrap(); + println!(""); + } + + result } // Classify this command as an external command, which doesn't give special meaning @@ -691,21 +702,22 @@ fn classify_pipeline( // strings. pub(crate) fn external_command( tokens: &mut TokensIterator, - source: &Text, + context: &ExpandContext, name: Tagged<&str>, -) -> Result { - let arg_list_strings = expand_external_tokens(tokens, source)?; +) -> Result { + let Spanned { item, span } = expand_syntax(&ExternalTokensShape, tokens, context)?; Ok(ClassifiedCommand::External(ExternalCommand { name: name.to_string(), name_tag: name.tag(), - args: arg_list_strings + args: item .iter() .map(|x| Tagged { tag: x.span.into(), item: x.item.clone(), }) - .collect(), + .collect::>() + .spanned(span), })) } diff --git a/src/commands/classified.rs b/src/commands/classified.rs index 7204af77c6..e694264620 100644 --- a/src/commands/classified.rs +++ b/src/commands/classified.rs @@ -4,7 +4,9 @@ use bytes::{BufMut, BytesMut}; use derive_new::new; use futures::stream::StreamExt; use futures_codec::{Decoder, Encoder, Framed}; +use itertools::Itertools; use log::{log_enabled, trace}; +use std::fmt; use std::io::{Error, ErrorKind}; use subprocess::Exec; @@ -72,26 +74,77 @@ impl ClassifiedInputStream { } } -#[derive(Debug)] +#[derive(Debug, Clone)] pub(crate) struct ClassifiedPipeline { - pub(crate) commands: Vec, + pub(crate) commands: Spanned>, } -#[derive(Debug, Eq, PartialEq)] +impl FormatDebug for ClassifiedPipeline { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + f.say_str( + "classified pipeline", + self.commands.iter().map(|c| c.debug(source)).join(" | "), + ) + } +} + +impl HasSpan for ClassifiedPipeline { + fn span(&self) -> Span { + self.commands.span + } +} + +#[derive(Debug, Clone, Eq, PartialEq)] pub(crate) enum ClassifiedCommand { #[allow(unused)] Expr(TokenNode), Internal(InternalCommand), #[allow(unused)] - Dynamic(hir::Call), + Dynamic(Spanned), External(ExternalCommand), } -#[derive(new, Debug, Eq, PartialEq)] +impl FormatDebug for ClassifiedCommand { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + match self { + ClassifiedCommand::Expr(expr) => expr.fmt_debug(f, source), + ClassifiedCommand::Internal(internal) => internal.fmt_debug(f, source), + ClassifiedCommand::Dynamic(dynamic) => dynamic.fmt_debug(f, source), + ClassifiedCommand::External(external) => external.fmt_debug(f, source), + } + } +} + +impl HasSpan for ClassifiedCommand { + fn span(&self) -> Span { + match self { + ClassifiedCommand::Expr(node) => node.span(), + ClassifiedCommand::Internal(command) => command.span(), + ClassifiedCommand::Dynamic(call) => call.span, + ClassifiedCommand::External(command) => command.span(), + } + } +} + +#[derive(new, Debug, Clone, Eq, PartialEq)] pub(crate) struct InternalCommand { pub(crate) name: String, pub(crate) name_tag: Tag, - pub(crate) args: hir::Call, + pub(crate) args: Spanned, +} + +impl HasSpan for InternalCommand { + fn span(&self) -> Span { + let start = self.name_tag.span; + + start.until(self.args.span) + } +} + +impl FormatDebug for InternalCommand { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + f.say("internal", self.args.debug(source)) + } } #[derive(new, Debug, Eq, PartialEq)] @@ -122,7 +175,7 @@ impl InternalCommand { context.run_command( command, self.name_tag.clone(), - self.args, + self.args.item, &source, objects, is_first_command, @@ -201,12 +254,31 @@ impl InternalCommand { } } -#[derive(Debug, Eq, PartialEq)] +#[derive(Debug, Clone, Eq, PartialEq)] pub(crate) struct ExternalCommand { pub(crate) name: String, pub(crate) name_tag: Tag, - pub(crate) args: Vec>, + pub(crate) args: Spanned>>, +} + +impl FormatDebug for ExternalCommand { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + write!(f, "{}", self.name)?; + + if self.args.item.len() > 0 { + write!(f, " ")?; + write!(f, "{}", self.args.iter().map(|i| i.debug(source)).join(" "))?; + } + + Ok(()) + } +} + +impl HasSpan for ExternalCommand { + fn span(&self) -> Span { + self.name_tag.span.until(self.args.span) + } } #[derive(Debug)] @@ -230,7 +302,7 @@ impl ExternalCommand { trace!(target: "nu::run::external", "inputs = {:?}", inputs); let mut arg_string = format!("{}", self.name); - for arg in &self.args { + for arg in &self.args.item { arg_string.push_str(&arg); } @@ -275,7 +347,7 @@ impl ExternalCommand { process = Exec::shell(itertools::join(commands, " && ")) } else { process = Exec::cmd(&self.name); - for arg in &self.args { + for arg in &self.args.item { let arg_chars: Vec<_> = arg.chars().collect(); if arg_chars.len() > 1 && arg_chars[0] == '"' diff --git a/src/commands/command.rs b/src/commands/command.rs index 6677dfbd7e..73b14ca25a 100644 --- a/src/commands/command.rs +++ b/src/commands/command.rs @@ -19,8 +19,8 @@ pub struct UnevaluatedCallInfo { pub name_tag: Tag, } -impl ToDebug for UnevaluatedCallInfo { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { +impl FormatDebug for UnevaluatedCallInfo { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { self.args.fmt_debug(f, source) } } @@ -96,8 +96,14 @@ impl RawCommandArgs { } } -impl ToDebug for CommandArgs { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { +impl std::fmt::Debug for CommandArgs { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.call_info.fmt(f) + } +} + +impl FormatDebug for CommandArgs { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { self.call_info.fmt_debug(f, source) } } @@ -377,7 +383,7 @@ impl EvaluatedCommandArgs { } } -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub enum CommandAction { ChangePath(String), Exit, @@ -389,8 +395,8 @@ pub enum CommandAction { LeaveShell, } -impl ToDebug for CommandAction { - fn fmt_debug(&self, f: &mut fmt::Formatter, _source: &str) -> fmt::Result { +impl FormatDebug for CommandAction { + fn fmt_debug(&self, f: &mut DebugFormatter, _source: &str) -> fmt::Result { match self { CommandAction::ChangePath(s) => write!(f, "action:change-path={}", s), CommandAction::Exit => write!(f, "action:exit"), @@ -408,7 +414,7 @@ impl ToDebug for CommandAction { } } -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub enum ReturnSuccess { Value(Tagged), Action(CommandAction), @@ -416,8 +422,8 @@ pub enum ReturnSuccess { pub type ReturnValue = Result; -impl ToDebug for ReturnValue { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { +impl FormatDebug for ReturnValue { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { match self { Err(err) => write!(f, "{}", err.debug(source)), Ok(ReturnSuccess::Value(v)) => write!(f, "{:?}", v.debug()), diff --git a/src/context.rs b/src/context.rs index 1454eb7c29..6983f467ad 100644 --- a/src/context.rs +++ b/src/context.rs @@ -71,9 +71,8 @@ impl Context { pub(crate) fn expand_context<'context>( &'context self, source: &'context Text, - span: Span, ) -> ExpandContext<'context> { - ExpandContext::new(&self.registry, span, source, self.shell_manager.homedir()) + ExpandContext::new(&self.registry, source, self.shell_manager.homedir()) } pub(crate) fn basic() -> Result> { diff --git a/src/data/meta.rs b/src/data/meta.rs index 2f3f0cc4c1..2017558cd2 100644 --- a/src/data/meta.rs +++ b/src/data/meta.rs @@ -5,6 +5,7 @@ use derive_new::new; use getset::Getters; use serde::Deserialize; use serde::Serialize; +use std::fmt; use std::path::{Path, PathBuf}; #[derive(new, Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)] @@ -461,3 +462,134 @@ impl language_reporting::ReportingSpan for Span { self.end } } + +pub trait HasSpan: ToDebug { + fn span(&self) -> Span; +} + +pub trait HasFallibleSpan: ToDebug { + fn maybe_span(&self) -> Option; +} + +impl HasFallibleSpan for T { + fn maybe_span(&self) -> Option { + Some(HasSpan::span(self)) + } +} + +impl HasSpan for Spanned +where + Spanned: ToDebug, +{ + fn span(&self) -> Span { + self.span + } +} + +impl HasFallibleSpan for Option { + fn maybe_span(&self) -> Option { + *self + } +} + +impl FormatDebug for Option { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + match self { + Option::None => write!(f, "no span"), + Option::Some(span) => FormatDebug::fmt_debug(span, f, source), + } + } +} + +impl FormatDebug for Span { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + write!(f, "{:?}", self.slice(source)) + } +} + +impl HasSpan for Span { + fn span(&self) -> Span { + *self + } +} + +impl FormatDebug for Option> +where + Spanned: ToDebug, +{ + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + match self { + Option::None => write!(f, "nothing"), + Option::Some(spanned) => FormatDebug::fmt_debug(spanned, f, source), + } + } +} + +impl HasFallibleSpan for Option> +where + Spanned: ToDebug, +{ + fn maybe_span(&self) -> Option { + match self { + None => None, + Some(value) => Some(value.span), + } + } +} + +impl FormatDebug for Option> +where + Tagged: ToDebug, +{ + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + match self { + Option::None => write!(f, "nothing"), + Option::Some(item) => FormatDebug::fmt_debug(item, f, source), + } + } +} + +impl HasFallibleSpan for Option> +where + Tagged: ToDebug, +{ + fn maybe_span(&self) -> Option { + match self { + None => None, + Some(value) => Some(value.tag.span), + } + } +} + +impl HasSpan for Tagged +where + Tagged: ToDebug, +{ + fn span(&self) -> Span { + self.tag.span + } +} + +impl FormatDebug for Vec { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + write!(f, "[ ")?; + write!( + f, + "{}", + self.iter().map(|item| item.debug(source)).join(" ") + )?; + write!(f, " ]") + } +} + +impl FormatDebug for String { + fn fmt_debug(&self, f: &mut DebugFormatter, _source: &str) -> fmt::Result { + write!(f, "{}", self) + } +} + +impl FormatDebug for Spanned { + fn fmt_debug(&self, f: &mut DebugFormatter, _source: &str) -> fmt::Result { + write!(f, "{}", self.item) + } +} diff --git a/src/errors.rs b/src/errors.rs index dfad5692a1..c28658028c 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -30,6 +30,82 @@ impl Description { } } +#[derive(Debug, Clone)] +pub enum ParseErrorReason { + Eof { + expected: &'static str, + }, + Mismatch { + expected: &'static str, + actual: Tagged, + }, + ArgumentError { + command: String, + error: ArgumentError, + tag: Tag, + }, +} + +#[derive(Debug, Clone)] +pub struct ParseError { + reason: ParseErrorReason, + tag: Tag, +} + +impl ParseError { + pub fn unexpected_eof(expected: &'static str, span: Span) -> ParseError { + ParseError { + reason: ParseErrorReason::Eof { expected }, + tag: span.into(), + } + } + + pub fn mismatch(expected: &'static str, actual: Tagged>) -> ParseError { + let Tagged { tag, item } = actual; + + ParseError { + reason: ParseErrorReason::Mismatch { + expected, + actual: item.into().tagged(tag.clone()), + }, + tag, + } + } + + pub fn argument_error( + command: impl Into, + kind: ArgumentError, + tag: impl Into, + ) -> ParseError { + let tag = tag.into(); + + ParseError { + reason: ParseErrorReason::ArgumentError { + command: command.into(), + error: kind, + tag: tag.clone(), + }, + tag: tag.clone(), + } + } +} + +impl From for ShellError { + fn from(error: ParseError) -> ShellError { + match error.reason { + ParseErrorReason::Eof { expected } => ShellError::unexpected_eof(expected, error.tag), + ParseErrorReason::Mismatch { actual, expected } => { + ShellError::type_error(expected, actual.clone()) + } + ParseErrorReason::ArgumentError { + command, + error, + tag, + } => ShellError::argument_error(command, error, tag), + } + } +} + #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)] pub enum ArgumentError { MissingMandatoryFlag(String), @@ -51,8 +127,8 @@ impl ShellError { } } -impl ToDebug for ShellError { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { +impl FormatDebug for ShellError { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { self.error.fmt_debug(f, source) } } @@ -153,16 +229,6 @@ impl ShellError { .start() } - pub(crate) fn invalid_external_word(tag: impl Into) -> ShellError { - ProximateShellError::ArgumentError { - command: "Invalid argument to Nu command (did you mean to call an external command?)" - .into(), - error: ArgumentError::InvalidExternalWord, - tag: tag.into(), - } - .start() - } - pub(crate) fn parse_error( error: nom::Err<( nom_locate::LocatedSpanEx<&str, TracableContext>, @@ -490,8 +556,8 @@ impl ProximateShellError { } } -impl ToDebug for ProximateShellError { - fn fmt_debug(&self, f: &mut fmt::Formatter, _source: &str) -> fmt::Result { +impl FormatDebug for ProximateShellError { + fn fmt_debug(&self, f: &mut DebugFormatter, _source: &str) -> fmt::Result { // TODO: Custom debug for inner spans write!(f, "{:?}", self) } diff --git a/src/lib.rs b/src/lib.rs index f21a70cfe2..38f770dc21 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -30,12 +30,16 @@ pub use crate::env::host::BasicHost; pub use crate::parser::hir::SyntaxShape; pub use crate::parser::parse::token_tree_builder::TokenTreeBuilder; pub use crate::plugin::{serve_plugin, Plugin}; +pub use crate::traits::{DebugFormatter, FormatDebug, ToDebug}; pub use crate::utils::{did_you_mean, AbsoluteFile, AbsolutePath, RelativePath}; pub use cli::cli; pub use data::base::{Primitive, Value}; pub use data::config::{config_path, APP_INFO}; pub use data::dict::{Dictionary, TaggedDictBuilder}; -pub use data::meta::{tag_for_tagged_list, Span, Spanned, SpannedItem, Tag, Tagged, TaggedItem}; +pub use data::meta::{ + tag_for_tagged_list, HasFallibleSpan, HasSpan, Span, Spanned, SpannedItem, Tag, Tagged, + TaggedItem, +}; pub use errors::{CoerceInto, ShellError}; pub use num_traits::cast::ToPrimitive; pub use parser::parse::text::Text; diff --git a/src/main.rs b/src/main.rs index 7f82808e74..e31c983f60 100644 --- a/src/main.rs +++ b/src/main.rs @@ -19,6 +19,12 @@ fn main() -> Result<(), Box> { .multiple(true) .takes_value(true), ) + .arg( + Arg::with_name("debug") + .long("debug") + .multiple(true) + .takes_value(true), + ) .get_matches(); let loglevel = match matches.value_of("loglevel") { @@ -48,6 +54,15 @@ fn main() -> Result<(), Box> { } } + match matches.values_of("debug") { + None => {} + Some(values) => { + for item in values { + builder.filter_module(&format!("nu::{}", item), LevelFilter::Debug); + } + } + } + builder.try_init()?; futures::executor::block_on(nu::cli())?; diff --git a/src/parser/hir.rs b/src/parser/hir.rs index 7108b0f7f9..28b8a21a03 100644 --- a/src/parser/hir.rs +++ b/src/parser/hir.rs @@ -24,7 +24,6 @@ pub(crate) use self::external_command::ExternalCommand; pub(crate) use self::named::NamedArguments; pub(crate) use self::path::Path; pub(crate) use self::syntax_shape::ExpandContext; -pub(crate) use self::tokens_iterator::debug::debug_tokens; pub(crate) use self::tokens_iterator::TokensIterator; pub use self::syntax_shape::SyntaxShape; @@ -50,8 +49,8 @@ impl Call { } } -impl ToDebug for Call { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { +impl FormatDebug for Call { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { write!(f, "({}", self.head.debug(source))?; if let Some(positional) = &self.positional { @@ -242,10 +241,14 @@ impl Expression { pub(crate) fn it_variable(inner: impl Into, outer: impl Into) -> Expression { RawExpression::Variable(Variable::It(inner.into())).spanned(outer) } + + pub(crate) fn tagged_type_name(&self) -> Tagged<&'static str> { + self.item.type_name().tagged(self.span) + } } -impl ToDebug for Expression { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { +impl FormatDebug for Spanned { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { match &self.item { RawExpression::Literal(l) => l.spanned(self.span).fmt_debug(f, source), RawExpression::FilePath(p) => write!(f, "{}", p.display()), @@ -256,7 +259,7 @@ impl ToDebug for Expression { RawExpression::Variable(Variable::Other(s)) => write!(f, "${}", s.slice(source)), RawExpression::Binary(b) => write!(f, "{}", b.debug(source)), RawExpression::ExternalCommand(c) => write!(f, "^{}", c.name().slice(source)), - RawExpression::Block(exprs) => { + RawExpression::Block(exprs) => f.say_block("block", |f| { write!(f, "{{ ")?; for expr in exprs { @@ -264,8 +267,8 @@ impl ToDebug for Expression { } write!(f, "}}") - } - RawExpression::List(exprs) => { + }), + RawExpression::List(exprs) => f.say_block("list", |f| { write!(f, "[ ")?; for expr in exprs { @@ -273,7 +276,7 @@ impl ToDebug for Expression { } write!(f, "]") - } + }), RawExpression::Path(p) => write!(f, "{}", p.debug(source)), RawExpression::Boolean(true) => write!(f, "$yes"), RawExpression::Boolean(false) => write!(f, "$no"), @@ -321,14 +324,14 @@ impl std::fmt::Display for Tagged<&Literal> { } } -impl ToDebug for Spanned<&Literal> { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { +impl FormatDebug for Spanned<&Literal> { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { match self.item { - Literal::Number(number) => write!(f, "{:?}", number), - Literal::Size(number, unit) => write!(f, "{:?}{:?}", *number, unit), - Literal::String(tag) => write!(f, "{}", tag.slice(source)), - Literal::GlobPattern(_) => write!(f, "{}", self.span.slice(source)), - Literal::Bare => write!(f, "{}", self.span.slice(source)), + Literal::Number(..) => f.say_str("number", self.span.slice(source)), + Literal::Size(..) => f.say_str("size", self.span.slice(source)), + Literal::String(..) => f.say_str("string", self.span.slice(source)), + Literal::GlobPattern(..) => f.say_str("glob", self.span.slice(source)), + Literal::Bare => f.say_str("word", self.span.slice(source)), } } } @@ -359,3 +362,9 @@ impl std::fmt::Display for Variable { } } } + +impl FormatDebug for Spanned { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + write!(f, "{}", self.span.slice(source)) + } +} diff --git a/src/parser/hir/baseline_parse/tests.rs b/src/parser/hir/baseline_parse/tests.rs index ddd4af4930..c930fbe56c 100644 --- a/src/parser/hir/baseline_parse/tests.rs +++ b/src/parser/hir/baseline_parse/tests.rs @@ -6,7 +6,7 @@ use crate::parser::hir::syntax_shape::*; use crate::parser::hir::TokensIterator; use crate::parser::parse::token_tree_builder::{CurriedToken, TokenTreeBuilder as b}; use crate::parser::TokenNode; -use crate::{Span, SpannedItem, Tag, Text}; +use crate::{HasSpan, Span, SpannedItem, Tag, Text}; use pretty_assertions::assert_eq; use std::fmt::Debug; @@ -63,7 +63,9 @@ fn test_parse_command() { vec![b::bare("ls"), b::sp(), b::pattern("*.txt")], |tokens| { let bare = tokens[0].expect_bare(); - let pattern = tokens[2].expect_pattern(); + let pat = tokens[2].expect_pattern(); + + eprintln!("{:?} {:?} {:?}", bare, pat, bare.until(pat)); ClassifiedCommand::Internal(InternalCommand::new( "ls".to_string(), @@ -73,9 +75,10 @@ fn test_parse_command() { }, hir::Call { head: Box::new(hir::RawExpression::Command(bare).spanned(bare)), - positional: Some(vec![hir::Expression::pattern("*.txt", pattern)]), + positional: Some(vec![hir::Expression::pattern("*.txt", pat)]), named: None, - }, + } + .spanned(bare.until(pat)), )) // hir::Expression::path( // hir::Expression::variable(inner_var, outer_var), @@ -86,7 +89,7 @@ fn test_parse_command() { ); } -fn parse_tokens( +fn parse_tokens( shape: impl ExpandSyntax, tokens: Vec, expected: impl FnOnce(&[TokenNode]) -> T, @@ -96,19 +99,19 @@ fn parse_tokens( ExpandContext::with_empty(&Text::from(source), |context| { let tokens = tokens.expect_list(); - let mut iterator = TokensIterator::all(tokens, *context.span()); + let mut iterator = TokensIterator::all(tokens.item, tokens.span); let expr = expand_syntax(&shape, &mut iterator, &context); let expr = match expr { Ok(expr) => expr, Err(err) => { - crate::cli::print_err(err, &BasicHost, context.source().clone()); + crate::cli::print_err(err.into(), &BasicHost, context.source().clone()); panic!("Parse failed"); } }; - assert_eq!(expr, expected(tokens)); + assert_eq!(expr, expected(tokens.item)); }) } diff --git a/src/parser/hir/binary.rs b/src/parser/hir/binary.rs index 67c597cb86..ee90e284e9 100644 --- a/src/parser/hir/binary.rs +++ b/src/parser/hir/binary.rs @@ -22,8 +22,8 @@ impl fmt::Display for Binary { } } -impl ToDebug for Binary { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { +impl FormatDebug for Binary { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { write!(f, "{}", self.left.debug(source))?; write!(f, " {} ", self.op.debug(source))?; write!(f, "{}", self.right.debug(source))?; diff --git a/src/parser/hir/expand_external_tokens.rs b/src/parser/hir/expand_external_tokens.rs index 5733a30c81..e99147c228 100644 --- a/src/parser/hir/expand_external_tokens.rs +++ b/src/parser/hir/expand_external_tokens.rs @@ -1,35 +1,55 @@ -use crate::errors::ShellError; +use crate::errors::ParseError; #[cfg(not(coloring_in_tokens))] use crate::parser::hir::syntax_shape::FlatShape; use crate::parser::{ hir::syntax_shape::{ - color_syntax, expand_atom, AtomicToken, ColorSyntax, ExpandContext, ExpansionRule, - MaybeSpaceShape, + color_syntax, expand_atom, expand_expr, expand_syntax, AtomicToken, ColorSyntax, + ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, MaybeSpaceShape, }, - TokenNode, TokensIterator, + hir::Expression, + TokensIterator, }; -use crate::{Span, Spanned, Text}; - -pub fn expand_external_tokens( - token_nodes: &mut TokensIterator<'_>, - source: &Text, -) -> Result>, ShellError> { - let mut out: Vec> = vec![]; - - loop { - if let Some(span) = expand_next_expression(token_nodes)? { - out.push(span.spanned_string(source)); - } else { - break; - } - } - - Ok(out) -} +use crate::{DebugFormatter, FormatDebug, Span, Spanned, SpannedItem}; +use std::fmt; #[derive(Debug, Copy, Clone)] pub struct ExternalTokensShape; +impl FormatDebug for Spanned>> { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + FormatDebug::fmt_debug(&self.item, f, source) + } +} + +impl ExpandSyntax for ExternalTokensShape { + type Output = Spanned>>; + + fn name(&self) -> &'static str { + "external command" + } + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + let mut out: Vec> = vec![]; + + let start = token_nodes.span_at_cursor(); + + loop { + match expand_syntax(&ExternalExpressionShape, token_nodes, context) { + Err(_) | Ok(None) => break, + Ok(Some(span)) => out.push(span.spanned_string(context.source())), + } + } + + let end = token_nodes.span_at_cursor(); + + Ok(out.spanned(start.until(end))) + } +} + #[cfg(not(coloring_in_tokens))] impl ColorSyntax for ExternalTokensShape { type Info = (); @@ -85,109 +105,200 @@ impl ColorSyntax for ExternalTokensShape { } } -pub fn expand_next_expression( - token_nodes: &mut TokensIterator<'_>, -) -> Result, ShellError> { - let first = token_nodes.next_non_ws(); +#[derive(Debug, Copy, Clone)] +pub struct ExternalExpressionShape; - let first = match first { - None => return Ok(None), - Some(v) => v, - }; +impl ExpandSyntax for ExternalExpressionShape { + type Output = Option; - let first = triage_external_head(first)?; - let mut last = first; - - loop { - let continuation = triage_continuation(token_nodes)?; - - if let Some(continuation) = continuation { - last = continuation; - } else { - break; - } + fn name(&self) -> &'static str { + "external expression" } - Ok(Some(first.until(last))) -} + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + expand_syntax(&MaybeSpaceShape, token_nodes, context)?; -fn triage_external_head(node: &TokenNode) -> Result { - Ok(match node { - TokenNode::Token(token) => token.span, - TokenNode::Call(_call) => unimplemented!("TODO: OMG"), - TokenNode::Nodes(_nodes) => unimplemented!("TODO: OMG"), - TokenNode::Delimited(_delimited) => unimplemented!("TODO: OMG"), - TokenNode::Pipeline(_pipeline) => unimplemented!("TODO: OMG"), - TokenNode::Flag(flag) => flag.span, - TokenNode::Whitespace(_whitespace) => { - unreachable!("This function should be called after next_non_ws()") + let first = expand_atom( + token_nodes, + "external command", + context, + ExpansionRule::new().allow_external_command(), + )? + .span; + + let mut last = first; + + loop { + let continuation = expand_expr(&ExternalContinuationShape, token_nodes, context); + + if let Ok(continuation) = continuation { + last = continuation.span; + } else { + break; + } } - TokenNode::Error(_error) => unimplemented!("TODO: OMG"), - }) -} -fn triage_continuation<'a, 'b>( - nodes: &'a mut TokensIterator<'b>, -) -> Result, ShellError> { - let mut peeked = nodes.peek_any(); - - let node = match peeked.node { - None => return Ok(None), - Some(node) => node, - }; - - match &node { - node if node.is_whitespace() => return Ok(None), - TokenNode::Token(..) | TokenNode::Flag(..) => {} - TokenNode::Call(..) => unimplemented!("call"), - TokenNode::Nodes(..) => unimplemented!("nodes"), - TokenNode::Delimited(..) => unimplemented!("delimited"), - TokenNode::Pipeline(..) => unimplemented!("pipeline"), - TokenNode::Whitespace(..) => unimplemented!("whitespace"), - TokenNode::Error(..) => unimplemented!("error"), + Ok(Some(first.until(last))) } - - peeked.commit(); - Ok(Some(node.span())) -} - -#[must_use] -enum ExternalExpressionResult { - Eof, - Processed, } #[derive(Debug, Copy, Clone)] struct ExternalExpression; -#[cfg(not(coloring_in_tokens))] -impl ColorSyntax for ExternalExpression { - type Info = ExternalExpressionResult; - type Input = (); +impl ExpandSyntax for ExternalExpression { + type Output = Option; - fn color_syntax<'a, 'b>( + fn name(&self) -> &'static str { + "external expression" + } + + fn expand_syntax<'a, 'b>( &self, - _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - shapes: &mut Vec>, - ) -> ExternalExpressionResult { - let atom = match expand_atom( - token_nodes, - "external word", - context, - ExpansionRule::permissive(), - ) { - Err(_) => unreachable!("TODO: separate infallible expand_atom"), - Ok(Spanned { - item: AtomicToken::Eof { .. }, - .. - }) => return ExternalExpressionResult::Eof, - Ok(atom) => atom, - }; + ) -> Result { + expand_syntax(&MaybeSpaceShape, token_nodes, context)?; - atom.color_tokens(shapes); - return ExternalExpressionResult::Processed; + let first = expand_syntax(&ExternalHeadShape, token_nodes, context)?.span; + let mut last = first; + + loop { + let continuation = expand_syntax(&ExternalContinuationShape, token_nodes, context); + + if let Ok(continuation) = continuation { + last = continuation.span; + } else { + break; + } + } + + Ok(Some(first.until(last))) + } +} + +#[derive(Debug, Copy, Clone)] +struct ExternalHeadShape; + +impl ExpandExpression for ExternalHeadShape { + fn name(&self) -> &'static str { + "external argument" + } + + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + match expand_atom( + token_nodes, + "external argument", + context, + ExpansionRule::new() + .allow_external_word() + .treat_size_as_word(), + )? { + atom => match &atom { + Spanned { item, span } => Ok(match item { + AtomicToken::Eof { .. } => unreachable!("ExpansionRule doesn't allow EOF"), + AtomicToken::Error { .. } => unreachable!("ExpansionRule doesn't allow Error"), + AtomicToken::Size { .. } => unreachable!("ExpansionRule treats size as word"), + AtomicToken::Whitespace { .. } => { + unreachable!("ExpansionRule doesn't allow Whitespace") + } + AtomicToken::ShorthandFlag { .. } + | AtomicToken::LonghandFlag { .. } + | AtomicToken::SquareDelimited { .. } + | AtomicToken::ParenDelimited { .. } + | AtomicToken::BraceDelimited { .. } + | AtomicToken::Pipeline { .. } => { + return Err(ParseError::mismatch( + "external command name", + atom.tagged_type_name(), + )) + } + AtomicToken::ExternalCommand { command } => { + Expression::external_command(*command, *span) + } + AtomicToken::Number { number } => { + Expression::number(number.to_number(context.source()), *span) + } + AtomicToken::String { body } => Expression::string(*body, *span), + AtomicToken::ItVariable { name } => Expression::it_variable(*name, *span), + AtomicToken::Variable { name } => Expression::variable(*name, *span), + AtomicToken::ExternalWord { .. } + | AtomicToken::GlobPattern { .. } + | AtomicToken::FilePath { .. } + | AtomicToken::Word { .. } + | AtomicToken::Dot { .. } + | AtomicToken::Operator { .. } => Expression::external_command(*span, *span), + }), + }, + } + } +} + +#[derive(Debug, Copy, Clone)] +struct ExternalContinuationShape; + +impl ExpandExpression for ExternalContinuationShape { + fn name(&self) -> &'static str { + "external argument" + } + + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + match expand_atom( + token_nodes, + "external argument", + context, + ExpansionRule::new() + .allow_external_word() + .treat_size_as_word(), + )? { + atom => match &atom { + Spanned { item, span } => Ok(match item { + AtomicToken::Eof { .. } => unreachable!("ExpansionRule doesn't allow EOF"), + AtomicToken::Error { .. } => unreachable!("ExpansionRule doesn't allow Error"), + AtomicToken::Number { number } => { + Expression::number(number.to_number(context.source()), *span) + } + AtomicToken::Size { .. } => unreachable!("ExpansionRule treats size as word"), + AtomicToken::ExternalCommand { .. } => { + unreachable!("ExpansionRule doesn't allow ExternalCommand") + } + AtomicToken::Whitespace { .. } => { + unreachable!("ExpansionRule doesn't allow Whitespace") + } + AtomicToken::String { body } => Expression::string(*body, *span), + AtomicToken::ItVariable { name } => Expression::it_variable(*name, *span), + AtomicToken::Variable { name } => Expression::variable(*name, *span), + AtomicToken::ExternalWord { .. } + | AtomicToken::GlobPattern { .. } + | AtomicToken::FilePath { .. } + | AtomicToken::Word { .. } + | AtomicToken::ShorthandFlag { .. } + | AtomicToken::LonghandFlag { .. } + | AtomicToken::Dot { .. } + | AtomicToken::Operator { .. } => Expression::bare(*span), + AtomicToken::SquareDelimited { .. } + | AtomicToken::ParenDelimited { .. } + | AtomicToken::BraceDelimited { .. } + | AtomicToken::Pipeline { .. } => { + return Err(ParseError::mismatch( + "external argument", + atom.tagged_type_name(), + )) + } + }), + }, + } } } @@ -224,3 +335,40 @@ impl ColorSyntax for ExternalExpression { return ExternalExpressionResult::Processed; } } + +#[must_use] +enum ExternalExpressionResult { + Eof, + Processed, +} + +#[cfg(not(coloring_in_tokens))] +impl ColorSyntax for ExternalExpression { + type Info = ExternalExpressionResult; + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> ExternalExpressionResult { + let atom = match expand_atom( + token_nodes, + "external word", + context, + ExpansionRule::permissive(), + ) { + Err(_) => unreachable!("TODO: separate infallible expand_atom"), + Ok(Spanned { + item: AtomicToken::Eof { .. }, + .. + }) => return ExternalExpressionResult::Eof, + Ok(atom) => atom, + }; + + atom.color_tokens(shapes); + return ExternalExpressionResult::Processed; + } +} diff --git a/src/parser/hir/external_command.rs b/src/parser/hir/external_command.rs index df71328cab..af207c458e 100644 --- a/src/parser/hir/external_command.rs +++ b/src/parser/hir/external_command.rs @@ -12,8 +12,8 @@ pub struct ExternalCommand { pub(crate) name: Span, } -impl ToDebug for ExternalCommand { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { +impl FormatDebug for ExternalCommand { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { write!(f, "{}", self.name.slice(source))?; Ok(()) diff --git a/src/parser/hir/named.rs b/src/parser/hir/named.rs index f7387e4fd4..152525f0aa 100644 --- a/src/parser/hir/named.rs +++ b/src/parser/hir/named.rs @@ -21,8 +21,8 @@ pub struct NamedArguments { pub(crate) named: IndexMap, } -impl ToDebug for NamedArguments { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { +impl FormatDebug for NamedArguments { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { for (name, value) in &self.named { match value { NamedValue::AbsentSwitch => continue, diff --git a/src/parser/hir/path.rs b/src/parser/hir/path.rs index 5867132986..4a9907475b 100644 --- a/src/parser/hir/path.rs +++ b/src/parser/hir/path.rs @@ -44,8 +44,8 @@ impl Path { } } -impl ToDebug for Path { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { +impl FormatDebug for Path { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { write!(f, "{}", self.head.debug(source))?; for part in &self.tail { diff --git a/src/parser/hir/syntax_shape.rs b/src/parser/hir/syntax_shape.rs index a38a77500b..32c54bc4e3 100644 --- a/src/parser/hir/syntax_shape.rs +++ b/src/parser/hir/syntax_shape.rs @@ -11,16 +11,12 @@ use crate::parser::hir::expand_external_tokens::ExternalTokensShape; use crate::parser::hir::syntax_shape::block::AnyBlockShape; use crate::parser::hir::tokens_iterator::Peeked; use crate::parser::parse_command::{parse_command_tail, CommandTailShape}; -use crate::parser::{ - hir, - hir::{debug_tokens, TokensIterator}, - Operator, RawToken, TokenNode, -}; +use crate::parser::{hir, hir::TokensIterator, Operator, RawToken, TokenNode}; use crate::prelude::*; use derive_new::new; use getset::Getters; -use log::{self, trace}; use serde::{Deserialize, Serialize}; +use std::fmt; use std::path::{Path, PathBuf}; pub(crate) use self::expression::atom::{expand_atom, AtomicToken, ExpansionRule}; @@ -40,15 +36,16 @@ pub(crate) use self::expression::variable_path::{ pub(crate) use self::expression::{continue_expression, AnyExpressionShape}; pub(crate) use self::flat_shape::FlatShape; +#[cfg(not(coloring_in_tokens))] +use crate::parser::hir::tokens_iterator::debug::debug_tokens; #[cfg(not(coloring_in_tokens))] use crate::parser::parse::pipeline::Pipeline; #[cfg(not(coloring_in_tokens))] -use log::log_enabled; +use log::{log_enabled, trace}; #[derive(Debug, Copy, Clone, Serialize, Deserialize)] pub enum SyntaxShape { Any, - List, String, Member, ColumnPath, @@ -75,10 +72,6 @@ impl FallibleColorSyntax for SyntaxShape { SyntaxShape::Any => { color_fallible_syntax(&AnyExpressionShape, token_nodes, context, shapes) } - SyntaxShape::List => { - color_syntax(&ExpressionListShape, token_nodes, context, shapes); - Ok(()) - } SyntaxShape::Int => color_fallible_syntax(&IntShape, token_nodes, context, shapes), SyntaxShape::String => color_fallible_syntax_with( &StringShape, @@ -126,10 +119,6 @@ impl FallibleColorSyntax for SyntaxShape { ) -> Result<(), ShellError> { match self { SyntaxShape::Any => color_fallible_syntax(&AnyExpressionShape, token_nodes, context), - SyntaxShape::List => { - color_syntax(&ExpressionListShape, token_nodes, context); - Ok(()) - } SyntaxShape::Int => color_fallible_syntax(&IntShape, token_nodes, context), SyntaxShape::String => { color_fallible_syntax_with(&StringShape, &FlatShape::String, token_nodes, context) @@ -147,14 +136,27 @@ impl FallibleColorSyntax for SyntaxShape { } impl ExpandExpression for SyntaxShape { + fn name(&self) -> &'static str { + match self { + SyntaxShape::Any => "any", + SyntaxShape::Int => "integer", + SyntaxShape::String => "string", + SyntaxShape::Member => "column name", + SyntaxShape::ColumnPath => "column path", + SyntaxShape::Number => "number", + SyntaxShape::Path => "file path", + SyntaxShape::Pattern => "glob pattern", + SyntaxShape::Block => "block", + } + } + fn expand_expr<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - ) -> Result { + ) -> Result { match self { SyntaxShape::Any => expand_expr(&AnyExpressionShape, token_nodes, context), - SyntaxShape::List => Err(ShellError::unimplemented("SyntaxShape:List")), SyntaxShape::Int => expand_expr(&IntShape, token_nodes, context), SyntaxShape::String => expand_expr(&StringShape, token_nodes, context), SyntaxShape::Member => { @@ -162,8 +164,9 @@ impl ExpandExpression for SyntaxShape { Ok(syntax.to_expr()) } SyntaxShape::ColumnPath => { - let Tagged { item: members, tag } = - expand_syntax(&ColumnPathShape, token_nodes, context)?; + let column_path = expand_syntax(&ColumnPathShape, token_nodes, context)?; + + let Tagged { item: members, tag } = column_path.path(); Ok(hir::Expression::list( members.into_iter().map(|s| s.to_expr()).collect(), @@ -182,7 +185,6 @@ impl std::fmt::Display for SyntaxShape { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { SyntaxShape::Any => write!(f, "Any"), - SyntaxShape::List => write!(f, "List"), SyntaxShape::String => write!(f, "String"), SyntaxShape::Int => write!(f, "Integer"), SyntaxShape::Member => write!(f, "Member"), @@ -200,8 +202,6 @@ pub struct ExpandContext<'context> { #[get = "pub(crate)"] registry: &'context CommandRegistry, #[get = "pub(crate)"] - span: Span, - #[get = "pub(crate)"] source: &'context Text, homedir: Option, } @@ -221,7 +221,6 @@ impl<'context> ExpandContext<'context> { callback(ExpandContext { registry: ®istry, - span: Span::unknown(), source, homedir: None, }) @@ -237,11 +236,13 @@ pub trait TestSyntax: std::fmt::Debug + Copy { } pub trait ExpandExpression: std::fmt::Debug + Copy { + fn name(&self) -> &'static str; + fn expand_expr<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - ) -> Result; + ) -> Result; } #[cfg(coloring_in_tokens)] @@ -303,35 +304,49 @@ pub trait ColorSyntax: std::fmt::Debug + Copy { } pub(crate) trait ExpandSyntax: std::fmt::Debug + Copy { - type Output: std::fmt::Debug; + type Output: HasFallibleSpan + Clone + std::fmt::Debug + 'static; + + fn name(&self) -> &'static str; fn expand_syntax<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - ) -> Result; + ) -> Result; } pub(crate) fn expand_syntax<'a, 'b, T: ExpandSyntax>( shape: &T, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, -) -> Result { - trace!(target: "nu::expand_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes.state(), context.source)); +) -> Result { + token_nodes.expand_frame(shape.name(), |token_nodes| { + shape.expand_syntax(token_nodes, context) + }) +} - let result = shape.expand_syntax(token_nodes, context); +pub(crate) fn expand_expr<'a, 'b, T: ExpandExpression>( + shape: &T, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, +) -> Result { + token_nodes.expand_expr_frame(shape.name(), |token_nodes| { + shape.expand_expr(token_nodes, context) + }) +} - match result { - Err(err) => { - trace!(target: "nu::expand_syntax", "error :: {} :: {:?}", err, debug_tokens(token_nodes.state(), context.source)); - Err(err) - } - - Ok(result) => { - trace!(target: "nu::expand_syntax", "ok :: {:?} :: {:?}", result, debug_tokens(token_nodes.state(), context.source)); - Ok(result) - } - } +#[cfg(coloring_in_tokens)] +pub fn color_syntax<'a, 'b, T: ColorSyntax, U>( + shape: &T, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, +) -> ((), U) { + ( + (), + token_nodes.color_frame(shape.name(), |token_nodes| { + shape.color_syntax(&(), token_nodes, context) + }), + ) } #[cfg(not(coloring_in_tokens))] @@ -363,20 +378,6 @@ pub fn color_syntax<'a, 'b, T: ColorSyntax, U>( ((), result) } -#[cfg(coloring_in_tokens)] -pub fn color_syntax<'a, 'b, T: ColorSyntax, U>( - shape: &T, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, -) -> ((), U) { - ( - (), - token_nodes.color_frame(shape.name(), |token_nodes| { - shape.color_syntax(&(), token_nodes, context) - }), - ) -} - #[cfg(not(coloring_in_tokens))] pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax, U>( shape: &T, @@ -492,36 +493,18 @@ pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax( - shape: &T, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, -) -> Result { - trace!(target: "nu::expand_expression", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes.state(), context.source)); - - let result = shape.expand_expr(token_nodes, context); - - match result { - Err(err) => { - trace!(target: "nu::expand_expression", "error :: {} :: {:?}", err, debug_tokens(token_nodes.state(), context.source)); - Err(err) - } - - Ok(result) => { - trace!(target: "nu::expand_expression", "ok :: {:?} :: {:?}", result, debug_tokens(token_nodes.state(), context.source)); - Ok(result) - } - } -} - impl ExpandSyntax for T { type Output = hir::Expression; + fn name(&self) -> &'static str { + ExpandExpression::name(self) + } + fn expand_syntax<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - ) -> Result { + ) -> Result { ExpandExpression::expand_expr(self, token_nodes, context) } } @@ -537,7 +520,7 @@ pub trait SkipSyntax: std::fmt::Debug + Copy { enum BarePathState { Initial, Seen(Span, Span), - Error(ShellError), + Error(ParseError), } impl BarePathState { @@ -549,7 +532,7 @@ impl BarePathState { } } - pub fn end(self, peeked: Peeked, reason: impl Into) -> BarePathState { + pub fn end(self, peeked: Peeked, reason: &'static str) -> BarePathState { match self { BarePathState::Initial => BarePathState::Error(peeked.type_error(reason)), BarePathState::Seen(start, end) => BarePathState::Seen(start, end), @@ -557,7 +540,7 @@ impl BarePathState { } } - pub fn into_bare(self) -> Result { + pub fn into_bare(self) -> Result { match self { BarePathState::Initial => unreachable!("into_bare in initial state"), BarePathState::Seen(start, end) => Ok(start.until(end)), @@ -570,7 +553,7 @@ pub fn expand_bare<'a, 'b>( token_nodes: &'b mut TokensIterator<'a>, _context: &ExpandContext, predicate: impl Fn(&TokenNode) -> bool, -) -> Result { +) -> Result { let mut state = BarePathState::Initial; loop { @@ -603,11 +586,15 @@ pub struct BarePathShape; impl ExpandSyntax for BarePathShape { type Output = Span; + fn name(&self) -> &'static str { + "shorthand path" + } + fn expand_syntax<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - ) -> Result { + ) -> Result { expand_bare(token_nodes, context, |token| match token { TokenNode::Token(Spanned { item: RawToken::Bare, @@ -638,19 +625,21 @@ impl FallibleColorSyntax for BareShape { _context: &ExpandContext, shapes: &mut Vec>, ) -> Result<(), ShellError> { - token_nodes.peek_any_token("word", |token| match token { - // If it's a bare token, color it - TokenNode::Token(Spanned { - item: RawToken::Bare, - span, - }) => { - shapes.push((*input).spanned(*span)); - Ok(()) - } + token_nodes + .peek_any_token("word", |token| match token { + // If it's a bare token, color it + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => { + shapes.push((*input).spanned(*span)); + Ok(()) + } - // otherwise, fail - other => Err(ShellError::type_error("word", other.tagged_type_name())), - }) + // otherwise, fail + other => Err(ParseError::mismatch("word", other.tagged_type_name())), + }) + .map_err(|err| err.into()) } } @@ -677,7 +666,7 @@ impl FallibleColorSyntax for BareShape { }) => Ok(span), // otherwise, fail - other => Err(ShellError::type_error("word", other.tagged_type_name())), + other => Err(ParseError::mismatch("word", other.tagged_type_name())), })?; token_nodes.color_shape((*input).spanned(*span)); @@ -689,11 +678,15 @@ impl FallibleColorSyntax for BareShape { impl ExpandSyntax for BareShape { type Output = Spanned; + fn name(&self) -> &'static str { + "word" + } + fn expand_syntax<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - ) -> Result { + ) -> Result { let peeked = token_nodes.peek_any().not_eof("word")?; match peeked.node { @@ -705,7 +698,7 @@ impl ExpandSyntax for BareShape { Ok(span.spanned_string(context.source)) } - other => Err(ShellError::type_error("word", other.tagged_type_name())), + other => Err(ParseError::mismatch("word", other.tagged_type_name())), } } } @@ -725,7 +718,7 @@ impl TestSyntax for BareShape { } } -#[derive(Debug)] +#[derive(Debug, Clone)] pub enum CommandSignature { Internal(Spanned>), LiteralExternal { outer: Span, inner: Span }, @@ -733,6 +726,34 @@ pub enum CommandSignature { Expression(hir::Expression), } +impl FormatDebug for CommandSignature { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + match self { + CommandSignature::Internal(internal) => { + f.say_str("internal", internal.span.slice(source)) + } + CommandSignature::LiteralExternal { outer, .. } => { + f.say_str("external", outer.slice(source)) + } + CommandSignature::External(external) => { + write!(f, "external:{}", external.slice(source)) + } + CommandSignature::Expression(expr) => expr.fmt_debug(f, source), + } + } +} + +impl HasSpan for CommandSignature { + fn span(&self) -> Span { + match self { + CommandSignature::Internal(spanned) => spanned.span, + CommandSignature::LiteralExternal { outer, .. } => *outer, + CommandSignature::External(span) => *span, + CommandSignature::Expression(expr) => expr.span, + } + } +} + impl CommandSignature { pub fn to_expression(&self) -> hir::Expression { match self { @@ -833,12 +854,17 @@ impl FallibleColorSyntax for PipelineShape { #[cfg(coloring_in_tokens)] impl ExpandSyntax for PipelineShape { type Output = ClassifiedPipeline; + + fn name(&self) -> &'static str { + "pipeline" + } + fn expand_syntax<'content, 'me>( &self, iterator: &'me mut TokensIterator<'content>, context: &ExpandContext, - ) -> Result { - let source = context.source; + ) -> Result { + let start = iterator.span_at_cursor(); let peeked = iterator.peek_any().not_eof("pipeline")?; let pipeline = peeked.commit().as_pipeline()?; @@ -851,25 +877,34 @@ impl ExpandSyntax for PipelineShape { let tokens: Spanned<&[TokenNode]> = (&part.item.tokens[..]).spanned(part.span); let classified = iterator.child(tokens, move |token_nodes| { - classify_command(token_nodes, context, &source) + expand_syntax(&ClassifiedCommandShape, token_nodes, context) })?; out.push(classified); } - Ok(ClassifiedPipeline { commands: out }) + let end = iterator.span_at_cursor(); + + Ok(ClassifiedPipeline { + commands: out.spanned(start.until(end)), + }) } } #[cfg(not(coloring_in_tokens))] impl ExpandSyntax for PipelineShape { type Output = ClassifiedPipeline; + + fn name(&self) -> &'static str { + "pipeline" + } + fn expand_syntax<'content, 'me>( &self, iterator: &'me mut TokensIterator<'content>, context: &ExpandContext, - ) -> Result { - let source = context.source; + ) -> Result { + let start = iterator.span_at_cursor(); let peeked = iterator.peek_any().not_eof("pipeline")?; let pipeline = peeked.commit().as_pipeline()?; @@ -882,13 +917,17 @@ impl ExpandSyntax for PipelineShape { let tokens: Spanned<&[TokenNode]> = (&part.item.tokens[..]).spanned(part.span); let classified = iterator.child(tokens, move |token_nodes| { - classify_command(token_nodes, context, &source) + expand_syntax(&ClassifiedCommandShape, token_nodes, context) })?; out.push(classified); } - Ok(ClassifiedPipeline { commands: out }) + let end = iterator.span_at_cursor(); + + Ok(ClassifiedPipeline { + commands: out.spanned(start.until(end)), + }) } } @@ -1014,11 +1053,15 @@ impl FallibleColorSyntax for CommandHeadShape { impl ExpandSyntax for CommandHeadShape { type Output = CommandSignature; + fn name(&self) -> &'static str { + "command head" + } + fn expand_syntax<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, - ) -> Result { + ) -> Result { let node = parse_single_node_skipping_ws(token_nodes, "command head1", |token, token_span, _| { Ok(match token { @@ -1060,29 +1103,34 @@ pub struct ClassifiedCommandShape; impl ExpandSyntax for ClassifiedCommandShape { type Output = ClassifiedCommand; + fn name(&self) -> &'static str { + "classified command" + } + fn expand_syntax<'a, 'b>( &self, iterator: &'b mut TokensIterator<'a>, context: &ExpandContext, - ) -> Result { + ) -> Result { + let start = iterator.span_at_cursor(); let head = expand_syntax(&CommandHeadShape, iterator, context)?; match &head { - CommandSignature::Expression(expr) => Err(ShellError::syntax_error( - "Unexpected expression in command position".tagged(expr.span), - )), + CommandSignature::Expression(expr) => { + Err(ParseError::mismatch("command", expr.tagged_type_name())) + } // If the command starts with `^`, treat it as an external command no matter what CommandSignature::External(name) => { let name_str = name.slice(&context.source); - external_command(iterator, &context.source, name_str.tagged(name)) + external_command(iterator, context, name_str.tagged(name)) } CommandSignature::LiteralExternal { outer, inner } => { let name_str = inner.slice(&context.source); - external_command(iterator, &context.source, name_str.tagged(outer)) + external_command(iterator, context, name_str.tagged(outer)) } CommandSignature::Internal(command) => { @@ -1094,11 +1142,14 @@ impl ExpandSyntax for ClassifiedCommandShape { Some((positional, named)) => (positional, named), }; + let end = iterator.span_at_cursor(); + let call = hir::Call { head: Box::new(head.to_expression()), positional, named, - }; + } + .spanned(start.until(end)); Ok(ClassifiedCommand::Internal(InternalCommand::new( command.item.name().to_string(), @@ -1198,12 +1249,16 @@ impl FallibleColorSyntax for InternalCommandHeadShape { } impl ExpandExpression for InternalCommandHeadShape { + fn name(&self) -> &'static str { + "internal command head" + } + fn expand_expr( &self, token_nodes: &mut TokensIterator<'_>, _context: &ExpandContext, - ) -> Result { - let peeked_head = token_nodes.peek_non_ws().not_eof("command head4")?; + ) -> Result { + let peeked_head = token_nodes.peek_non_ws().not_eof("command head")?; let expr = match peeked_head.node { TokenNode::Token( @@ -1219,8 +1274,8 @@ impl ExpandExpression for InternalCommandHeadShape { }) => hir::RawExpression::Literal(hir::Literal::String(*inner_span)).spanned(*span), node => { - return Err(ShellError::type_error( - "command head5", + return Err(ParseError::mismatch( + "command head", node.tagged_type_name(), )) } @@ -1238,16 +1293,16 @@ pub(crate) struct SingleError<'token> { } impl<'token> SingleError<'token> { - pub(crate) fn error(&self) -> ShellError { - ShellError::type_error(self.expected, self.node.type_name().tagged(self.node.span)) + pub(crate) fn error(&self) -> ParseError { + ParseError::mismatch(self.expected, self.node.type_name().tagged(self.node.span)) } } fn parse_single_node<'a, 'b, T>( token_nodes: &'b mut TokensIterator<'a>, expected: &'static str, - callback: impl FnOnce(RawToken, Span, SingleError) -> Result, -) -> Result { + callback: impl FnOnce(RawToken, Span, SingleError) -> Result, +) -> Result { token_nodes.peek_any_token(expected, |node| match node { TokenNode::Token(token) => callback( token.item, @@ -1258,7 +1313,7 @@ fn parse_single_node<'a, 'b, T>( }, ), - other => Err(ShellError::type_error(expected, other.tagged_type_name())), + other => Err(ParseError::mismatch(expected, other.tagged_type_name())), }) } @@ -1360,22 +1415,21 @@ impl FallibleColorSyntax for WhitespaceShape { impl ExpandSyntax for WhitespaceShape { type Output = Span; + fn name(&self) -> &'static str { + "whitespace" + } + fn expand_syntax<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, _context: &ExpandContext, - ) -> Result { + ) -> Result { let peeked = token_nodes.peek_any().not_eof("whitespace")?; let span = match peeked.node { TokenNode::Whitespace(tag) => *tag, - other => { - return Err(ShellError::type_error( - "whitespace", - other.tagged_type_name(), - )) - } + other => return Err(ParseError::mismatch("whitespace", other.tagged_type_name())), }; peeked.commit(); @@ -1390,11 +1444,15 @@ pub struct SpacedExpression { } impl ExpandExpression for SpacedExpression { + fn name(&self) -> &'static str { + "spaced expression" + } + fn expand_expr<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - ) -> Result { + ) -> Result { // TODO: Make the name part of the trait let peeked = token_nodes.peek_any().not_eof("whitespace")?; @@ -1404,10 +1462,7 @@ impl ExpandExpression for SpacedExpression { expand_expr(&self.inner, token_nodes, context) } - other => Err(ShellError::type_error( - "whitespace", - other.tagged_type_name(), - )), + other => Err(ParseError::mismatch("whitespace", other.tagged_type_name())), } } } @@ -1424,6 +1479,36 @@ pub struct MaybeSpacedExpression { #[derive(Debug, Copy, Clone)] pub struct MaybeSpaceShape; +impl ExpandSyntax for MaybeSpaceShape { + type Output = Option; + + fn name(&self) -> &'static str { + "maybe space" + } + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result { + let peeked = token_nodes.peek_any().not_eof("whitespace"); + + let span = match peeked { + Err(_) => None, + Ok(peeked) => { + if let TokenNode::Whitespace(..) = peeked.node { + let node = peeked.commit(); + Some(node.span()) + } else { + None + } + } + }; + + Ok(span) + } +} + #[cfg(not(coloring_in_tokens))] impl ColorSyntax for MaybeSpaceShape { type Info = (); @@ -1544,11 +1629,15 @@ impl FallibleColorSyntax for SpaceShape { } impl ExpandExpression for MaybeSpacedExpression { + fn name(&self) -> &'static str { + "maybe space" + } + fn expand_expr<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - ) -> Result { + ) -> Result { // TODO: Make the name part of the trait let peeked = token_nodes.peek_any().not_eof("whitespace")?; @@ -1578,58 +1667,6 @@ fn expand_variable(span: Span, token_span: Span, source: &Text) -> hir::Expressi } } -fn classify_command( - mut iterator: &mut TokensIterator, - context: &ExpandContext, - source: &Text, -) -> Result { - let head = CommandHeadShape.expand_syntax(&mut iterator, &context)?; - - match &head { - CommandSignature::Expression(_) => Err(ShellError::syntax_error( - "Unexpected expression in command position".tagged(iterator.whole_span()), - )), - - // If the command starts with `^`, treat it as an external command no matter what - CommandSignature::External(name) => { - let name_str = name.slice(source); - - external_command(&mut iterator, source, name_str.tagged(name)) - } - - CommandSignature::LiteralExternal { outer, inner } => { - let name_str = inner.slice(source); - - external_command(&mut iterator, source, name_str.tagged(outer)) - } - - CommandSignature::Internal(command) => { - let tail = - parse_command_tail(&command.signature(), &context, &mut iterator, command.span)?; - - let (positional, named) = match tail { - None => (None, None), - Some((positional, named)) => (positional, named), - }; - - let call = hir::Call { - head: Box::new(head.to_expression()), - positional, - named, - }; - - Ok(ClassifiedCommand::Internal(InternalCommand::new( - command.name().to_string(), - Tag { - span: command.span, - anchor: None, - }, - call, - ))) - } - } -} - #[derive(Debug, Copy, Clone)] pub struct CommandShape; diff --git a/src/parser/hir/syntax_shape/block.rs b/src/parser/hir/syntax_shape/block.rs index 0061c0fe8c..b5059bcb7b 100644 --- a/src/parser/hir/syntax_shape/block.rs +++ b/src/parser/hir/syntax_shape/block.rs @@ -6,7 +6,8 @@ use crate::parser::{ hir::syntax_shape::{ color_fallible_syntax, color_syntax_with, continue_expression, expand_expr, expand_syntax, DelimitedShape, ExpandContext, ExpandExpression, ExpressionContinuationShape, - ExpressionListShape, FallibleColorSyntax, MemberShape, PathTailShape, VariablePathShape, + ExpressionListShape, FallibleColorSyntax, MemberShape, ParseError, PathTailShape, + VariablePathShape, }, hir::tokens_iterator::TokensIterator, parse::token_tree::Delimiter, @@ -42,7 +43,7 @@ impl FallibleColorSyntax for AnyBlockShape { match block { // If so, color it as a block Some((children, spans)) => { - let mut token_nodes = TokensIterator::new(children.item, context.span, false); + let mut token_nodes = TokensIterator::new(children.item, children.span, false); color_syntax_with( &DelimitedShape, &(Delimiter::Brace, spans.0, spans.1), @@ -109,11 +110,15 @@ impl FallibleColorSyntax for AnyBlockShape { } impl ExpandExpression for AnyBlockShape { + fn name(&self) -> &'static str { + "any block" + } + fn expand_expr<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, - ) -> Result { + ) -> Result { let block = token_nodes.peek_non_ws().not_eof("block")?; // is it just a block? @@ -121,11 +126,11 @@ impl ExpandExpression for AnyBlockShape { match block { Some((block, _tags)) => { - let mut iterator = TokensIterator::new(&block.item, context.span, false); + let mut iterator = TokensIterator::new(&block.item, block.span, false); let exprs = expand_syntax(&ExpressionListShape, &mut iterator, context)?; - return Ok(hir::RawExpression::Block(exprs).spanned(block.span)); + return Ok(hir::RawExpression::Block(exprs.item).spanned(block.span)); } _ => {} } @@ -204,14 +209,18 @@ impl FallibleColorSyntax for ShorthandBlock { } impl ExpandExpression for ShorthandBlock { + fn name(&self) -> &'static str { + "shorthand block" + } + fn expand_expr<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - ) -> Result { + ) -> Result { let path = expand_expr(&ShorthandPath, token_nodes, context)?; let start = path.span; - let expr = continue_expression(path, token_nodes, context)?; + let expr = continue_expression(path, token_nodes, context); let end = expr.span; let block = hir::RawExpression::Block(vec![expr]).spanned(start.until(end)); @@ -317,11 +326,15 @@ impl FallibleColorSyntax for ShorthandPath { } impl ExpandExpression for ShorthandPath { + fn name(&self) -> &'static str { + "shorthand path" + } + fn expand_expr<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - ) -> Result { + ) -> Result { // if it's a variable path, that's the head part let path = expand_expr(&VariablePathShape, token_nodes, context); @@ -339,7 +352,7 @@ impl ExpandExpression for ShorthandPath { match tail { Err(_) => return Ok(head), - Ok((tail, _)) => { + Ok(Spanned { item: tail, .. }) => { // For each member that `PathTailShape` expanded, join it onto the existing expression // to form a new path for member in tail { @@ -446,11 +459,15 @@ impl FallibleColorSyntax for ShorthandHeadShape { } impl ExpandExpression for ShorthandHeadShape { + fn name(&self) -> &'static str { + "shorthand head" + } + fn expand_expr<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - ) -> Result { + ) -> Result { // A shorthand path must not be at EOF let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?; @@ -495,7 +512,7 @@ impl ExpandExpression for ShorthandHeadShape { // Any other token is not a valid bare head other => { - return Err(ShellError::type_error( + return Err(ParseError::mismatch( "shorthand path", other.tagged_type_name(), )) diff --git a/src/parser/hir/syntax_shape/expression.rs b/src/parser/hir/syntax_shape/expression.rs index 0681c9c403..6429ab57c3 100644 --- a/src/parser/hir/syntax_shape/expression.rs +++ b/src/parser/hir/syntax_shape/expression.rs @@ -12,7 +12,7 @@ use crate::parser::hir::syntax_shape::{ color_delimited_square, color_fallible_syntax, color_fallible_syntax_with, expand_atom, expand_delimited_square, expand_expr, expand_syntax, AtomicToken, BareShape, ColorableDotShape, DotShape, ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, ExpressionContinuation, - ExpressionContinuationShape, FallibleColorSyntax, FlatShape, + ExpressionContinuationShape, FallibleColorSyntax, FlatShape, ParseError, }; use crate::parser::{ hir, @@ -25,15 +25,19 @@ use std::path::PathBuf; pub struct AnyExpressionShape; impl ExpandExpression for AnyExpressionShape { + fn name(&self) -> &'static str { + "any expression" + } + fn expand_expr<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, - ) -> Result { + ) -> Result { // Look for an expression at the cursor let head = expand_expr(&AnyExpressionStartShape, token_nodes, context)?; - continue_expression(head, token_nodes, context) + Ok(continue_expression(head, token_nodes, context)) } } @@ -98,14 +102,14 @@ pub(crate) fn continue_expression( mut head: hir::Expression, token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, -) -> Result { +) -> hir::Expression { loop { // Check to see whether there's any continuation after the head expression let continuation = expand_syntax(&ExpressionContinuationShape, token_nodes, context); match continuation { // If there's no continuation, return the head - Err(_) => return Ok(head), + Err(_) => return head, // Otherwise, form a new expression by combining the head with the continuation Ok(continuation) => match continuation { // If the continuation is a `.member`, form a path with the new member @@ -174,11 +178,15 @@ pub(crate) fn continue_coloring_expression( pub struct AnyExpressionStartShape; impl ExpandExpression for AnyExpressionStartShape { + fn name(&self) -> &'static str { + "any expression start" + } + fn expand_expr<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, - ) -> Result { + ) -> Result { let atom = expand_atom(token_nodes, "expression", context, ExpansionRule::new())?; match atom.item { @@ -445,13 +453,17 @@ impl FallibleColorSyntax for BareTailShape { } impl ExpandSyntax for BareTailShape { + fn name(&self) -> &'static str { + "word continuation" + } + type Output = Option; fn expand_syntax<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - ) -> Result, ShellError> { + ) -> Result, ParseError> { let mut end: Option = None; loop { diff --git a/src/parser/hir/syntax_shape/expression/atom.rs b/src/parser/hir/syntax_shape/expression/atom.rs index 888d9430e6..09583c728b 100644 --- a/src/parser/hir/syntax_shape/expression/atom.rs +++ b/src/parser/hir/syntax_shape/expression/atom.rs @@ -90,40 +90,40 @@ impl<'tokens> SpannedAtomicToken<'tokens> { &self, context: &ExpandContext, expected: &'static str, - ) -> Result { + ) -> Result { Ok(match &self.item { AtomicToken::Eof { .. } => { - return Err(ShellError::type_error( + return Err(ParseError::mismatch( expected, "eof atomic token".tagged(self.span), )) } AtomicToken::Error { .. } => { - return Err(ShellError::type_error( + return Err(ParseError::mismatch( expected, "eof atomic token".tagged(self.span), )) } AtomicToken::Operator { .. } => { - return Err(ShellError::type_error( - expected, - "operator".tagged(self.span), - )) + return Err(ParseError::mismatch(expected, "operator".tagged(self.span))) } AtomicToken::ShorthandFlag { .. } => { - return Err(ShellError::type_error( + return Err(ParseError::mismatch( expected, "shorthand flag".tagged(self.span), )) } AtomicToken::LonghandFlag { .. } => { - return Err(ShellError::type_error(expected, "flag".tagged(self.span))) + return Err(ParseError::mismatch(expected, "flag".tagged(self.span))) } AtomicToken::Whitespace { .. } => { - return Err(ShellError::unimplemented("whitespace in AtomicToken")) + return Err(ParseError::mismatch( + expected, + "whitespace".tagged(self.span), + )) } AtomicToken::Dot { .. } => { - return Err(ShellError::type_error(expected, "dot".tagged(self.span))) + return Err(ParseError::mismatch(expected, "dot".tagged(self.span))) } AtomicToken::Number { number } => { Expression::number(number.to_number(context.source), self.span) @@ -381,7 +381,7 @@ pub fn expand_atom<'me, 'content>( expected: &'static str, context: &ExpandContext, rule: ExpansionRule, -) -> Result, ShellError> { +) -> Result, ParseError> { if token_nodes.at_end() { match rule.allow_eof { true => { @@ -390,7 +390,7 @@ pub fn expand_atom<'me, 'content>( } .spanned(Span::unknown())) } - false => return Err(ShellError::unexpected_eof("anything", Tag::unknown())), + false => return Err(ParseError::unexpected_eof("anything", Span::unknown())), } } @@ -515,12 +515,13 @@ pub fn expand_atom<'me, 'content>( // if whitespace is disallowed, return an error WhitespaceHandling::RejectWhitespace => { - return Err(ShellError::syntax_error("Unexpected whitespace".tagged( - Tag { + return Err(ParseError::mismatch( + expected, + "whitespace".tagged(Tag { span: *span, anchor: None, - }, - ))) + }), + )) } }, @@ -544,7 +545,7 @@ pub fn expand_atom<'me, 'content>( RawToken::Operator(_) if !rule.allow_operator => return Err(err.error()), // rule.allow_external_command RawToken::ExternalCommand(_) if !rule.allow_external_command => { - return Err(ShellError::type_error( + return Err(ParseError::mismatch( expected, token.type_name().tagged(Tag { span: token_span, @@ -554,10 +555,13 @@ pub fn expand_atom<'me, 'content>( } // rule.allow_external_word RawToken::ExternalWord if !rule.allow_external_word => { - return Err(ShellError::invalid_external_word(Tag { - span: token_span, - anchor: None, - })) + return Err(ParseError::mismatch( + expected, + "external word".tagged(Tag { + span: token_span, + anchor: None, + }), + )) } RawToken::Number(number) => AtomicToken::Number { number }.spanned(token_span), diff --git a/src/parser/hir/syntax_shape/expression/delimited.rs b/src/parser/hir/syntax_shape/expression/delimited.rs index 8cd1e9805a..02b61e4730 100644 --- a/src/parser/hir/syntax_shape/expression/delimited.rs +++ b/src/parser/hir/syntax_shape/expression/delimited.rs @@ -8,12 +8,15 @@ pub fn expand_delimited_square( children: &Vec, span: Span, context: &ExpandContext, -) -> Result { +) -> Result { let mut tokens = TokensIterator::new(&children, span, false); let list = expand_syntax(&ExpressionListShape, &mut tokens, context); - Ok(hir::Expression::list(list?, Tag { span, anchor: None })) + Ok(hir::Expression::list( + list?.item, + Tag { span, anchor: None }, + )) } #[cfg(not(coloring_in_tokens))] diff --git a/src/parser/hir/syntax_shape/expression/file_path.rs b/src/parser/hir/syntax_shape/expression/file_path.rs index f0e5ee0079..4b7caf9f3e 100644 --- a/src/parser/hir/syntax_shape/expression/file_path.rs +++ b/src/parser/hir/syntax_shape/expression/file_path.rs @@ -1,6 +1,7 @@ use crate::parser::hir::syntax_shape::expression::atom::{expand_atom, AtomicToken, ExpansionRule}; use crate::parser::hir::syntax_shape::{ expression::expand_file_path, ExpandContext, ExpandExpression, FallibleColorSyntax, FlatShape, + ParseError, }; use crate::parser::{hir, hir::TokensIterator}; use crate::prelude::*; @@ -90,11 +91,15 @@ impl FallibleColorSyntax for FilePathShape { } impl ExpandExpression for FilePathShape { + fn name(&self) -> &'static str { + "file path" + } + fn expand_expr<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, - ) -> Result { + ) -> Result { let atom = expand_atom(token_nodes, "file path", context, ExpansionRule::new())?; match atom.item { diff --git a/src/parser/hir/syntax_shape/expression/list.rs b/src/parser/hir/syntax_shape/expression/list.rs index 51a6b852ca..fa6b5864a1 100644 --- a/src/parser/hir/syntax_shape/expression/list.rs +++ b/src/parser/hir/syntax_shape/expression/list.rs @@ -1,4 +1,4 @@ -use crate::errors::ShellError; +use crate::errors::ParseError; #[cfg(not(coloring_in_tokens))] use crate::parser::hir::syntax_shape::FlatShape; use crate::parser::{ @@ -10,24 +10,36 @@ use crate::parser::{ }, hir::TokensIterator, }; -#[cfg(not(coloring_in_tokens))] -use crate::Spanned; +use crate::{DebugFormatter, FormatDebug, Spanned, SpannedItem}; +use std::fmt; #[derive(Debug, Copy, Clone)] pub struct ExpressionListShape; +impl FormatDebug for Spanned> { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + FormatDebug::fmt_debug(&self.item, f, source) + } +} + impl ExpandSyntax for ExpressionListShape { - type Output = Vec; + type Output = Spanned>; + + fn name(&self) -> &'static str { + "expression list" + } fn expand_syntax<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, - ) -> Result, ShellError> { + ) -> Result>, ParseError> { let mut exprs = vec![]; + let start = token_nodes.span_at_cursor(); + if token_nodes.at_end_possible_ws() { - return Ok(exprs); + return Ok(exprs.spanned(start)); } let expr = expand_expr(&maybe_spaced(AnyExpressionShape), token_nodes, context)?; @@ -36,7 +48,8 @@ impl ExpandSyntax for ExpressionListShape { loop { if token_nodes.at_end_possible_ws() { - return Ok(exprs); + let end = token_nodes.span_at_cursor(); + return Ok(exprs.spanned(start.until(end))); } let expr = expand_expr(&spaced(AnyExpressionShape), token_nodes, context)?; diff --git a/src/parser/hir/syntax_shape/expression/number.rs b/src/parser/hir/syntax_shape/expression/number.rs index d4069478e9..6c599cc026 100644 --- a/src/parser/hir/syntax_shape/expression/number.rs +++ b/src/parser/hir/syntax_shape/expression/number.rs @@ -1,6 +1,6 @@ use crate::parser::hir::syntax_shape::{ expand_atom, parse_single_node, ExpandContext, ExpandExpression, ExpansionRule, - FallibleColorSyntax, FlatShape, + FallibleColorSyntax, FlatShape, ParseError, }; use crate::parser::{ hir, @@ -13,11 +13,15 @@ use crate::prelude::*; pub struct NumberShape; impl ExpandExpression for NumberShape { + fn name(&self) -> &'static str { + "number" + } + fn expand_expr<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, - ) -> Result { + ) -> Result { parse_single_node(token_nodes, "Number", |token, token_span, err| { Ok(match token { RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()), @@ -28,10 +32,13 @@ impl ExpandExpression for NumberShape { hir::Expression::external_command(tag, token_span) } RawToken::ExternalWord => { - return Err(ShellError::invalid_external_word(Tag { - span: token_span, - anchor: None, - })) + return Err(ParseError::mismatch( + "number", + "syntax error".tagged(Tag { + span: token_span, + anchor: None, + }), + )) } RawToken::Variable(tag) => hir::Expression::variable(tag, token_span), RawToken::Number(number) => { @@ -111,16 +118,19 @@ impl FallibleColorSyntax for NumberShape { pub struct IntShape; impl ExpandExpression for IntShape { + fn name(&self) -> &'static str { + "integer" + } + fn expand_expr<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, - ) -> Result { + ) -> Result { parse_single_node(token_nodes, "Integer", |token, token_span, err| { Ok(match token { - RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()), - RawToken::ExternalWord => { - return Err(ShellError::invalid_external_word(token_span)) + RawToken::GlobPattern | RawToken::Operator(..) | RawToken::ExternalWord => { + return Err(err.error()) } RawToken::Variable(span) if span.slice(context.source) == "it" => { hir::Expression::it_variable(span, token_span) diff --git a/src/parser/hir/syntax_shape/expression/pattern.rs b/src/parser/hir/syntax_shape/expression/pattern.rs index ed3bd610cd..2ccd5a4f0d 100644 --- a/src/parser/hir/syntax_shape/expression/pattern.rs +++ b/src/parser/hir/syntax_shape/expression/pattern.rs @@ -1,6 +1,6 @@ use crate::parser::hir::syntax_shape::{ expand_atom, expand_bare, expression::expand_file_path, AtomicToken, ExpandContext, - ExpandExpression, ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, + ExpandExpression, ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, ParseError, }; use crate::parser::{hir, hir::TokensIterator, Operator, RawToken, TokenNode}; use crate::prelude::*; @@ -66,11 +66,15 @@ impl FallibleColorSyntax for PatternShape { } impl ExpandExpression for PatternShape { + fn name(&self) -> &'static str { + "glob pattern" + } + fn expand_expr<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, - ) -> Result { + ) -> Result { let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::new())?; match atom.item { @@ -91,11 +95,15 @@ pub struct BarePatternShape; impl ExpandSyntax for BarePatternShape { type Output = Span; + fn name(&self) -> &'static str { + "bare pattern" + } + fn expand_syntax<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - ) -> Result { + ) -> Result { expand_bare(token_nodes, context, |token| match token { TokenNode::Token(Spanned { item: RawToken::Bare, diff --git a/src/parser/hir/syntax_shape/expression/string.rs b/src/parser/hir/syntax_shape/expression/string.rs index 46015376e8..454cb9f46d 100644 --- a/src/parser/hir/syntax_shape/expression/string.rs +++ b/src/parser/hir/syntax_shape/expression/string.rs @@ -1,6 +1,6 @@ use crate::parser::hir::syntax_shape::{ expand_atom, expand_variable, parse_single_node, AtomicToken, ExpandContext, ExpandExpression, - ExpansionRule, FallibleColorSyntax, FlatShape, TestSyntax, + ExpansionRule, FallibleColorSyntax, FlatShape, ParseError, TestSyntax, }; use crate::parser::hir::tokens_iterator::Peeked; use crate::parser::{hir, hir::TokensIterator, RawToken}; @@ -75,32 +75,24 @@ impl FallibleColorSyntax for StringShape { } impl ExpandExpression for StringShape { + fn name(&self) -> &'static str { + "string" + } + fn expand_expr<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, - ) -> Result { - parse_single_node(token_nodes, "String", |token, token_span, _| { + ) -> Result { + parse_single_node(token_nodes, "String", |token, token_span, err| { Ok(match token { - RawToken::GlobPattern => { - return Err(ShellError::type_error( - "String", - "glob pattern".tagged(token_span), - )) - } - RawToken::Operator(..) => { - return Err(ShellError::type_error( - "String", - "operator".tagged(token_span), - )) + RawToken::GlobPattern | RawToken::Operator(..) | RawToken::ExternalWord => { + return Err(err.error()) } RawToken::Variable(span) => expand_variable(span, token_span, &context.source), RawToken::ExternalCommand(span) => { hir::Expression::external_command(span, token_span) } - RawToken::ExternalWord => { - return Err(ShellError::invalid_external_word(token_span)) - } RawToken::Number(_) => hir::Expression::bare(token_span), RawToken::Bare => hir::Expression::bare(token_span), RawToken::String(span) => hir::Expression::string(span, token_span), diff --git a/src/parser/hir/syntax_shape/expression/unit.rs b/src/parser/hir/syntax_shape/expression/unit.rs index 2c01038ebc..c4bd85434b 100644 --- a/src/parser/hir/syntax_shape/expression/unit.rs +++ b/src/parser/hir/syntax_shape/expression/unit.rs @@ -1,5 +1,5 @@ use crate::data::meta::Span; -use crate::parser::hir::syntax_shape::{ExpandContext, ExpandSyntax}; +use crate::parser::hir::syntax_shape::{ExpandContext, ExpandSyntax, ParseError}; use crate::parser::parse::tokens::RawNumber; use crate::parser::parse::unit::Unit; use crate::parser::{hir::TokensIterator, RawToken, TokenNode}; @@ -9,18 +9,34 @@ use nom::bytes::complete::tag; use nom::character::complete::digit1; use nom::combinator::{all_consuming, opt, value}; use nom::IResult; +use std::fmt; #[derive(Debug, Copy, Clone)] pub struct UnitShape; +impl FormatDebug for Spanned<(Spanned, Spanned)> { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + let dict = indexmap::indexmap! { + "number" => format!("{}", self.item.0.item.debug(source)), + "unit" => format!("{}", self.item.1.debug(source)), + }; + + f.say_dict("unit", dict) + } +} + impl ExpandSyntax for UnitShape { type Output = Spanned<(Spanned, Spanned)>; + fn name(&self) -> &'static str { + "unit" + } + fn expand_syntax<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - ) -> Result, Spanned)>, ShellError> { + ) -> Result, Spanned)>, ParseError> { let peeked = token_nodes.peek_any().not_eof("unit")?; let span = match peeked.node { @@ -34,12 +50,7 @@ impl ExpandSyntax for UnitShape { let unit = unit_size(span.slice(context.source), *span); let (_, (number, unit)) = match unit { - Err(_) => { - return Err(ShellError::type_error( - "unit", - "word".tagged(Tag::unknown()), - )) - } + Err(_) => return Err(ParseError::mismatch("unit", "word".tagged(Tag::unknown()))), Ok((number, unit)) => (number, unit), }; diff --git a/src/parser/hir/syntax_shape/expression/variable_path.rs b/src/parser/hir/syntax_shape/expression/variable_path.rs index 5ed615a9e8..1a91e132c6 100644 --- a/src/parser/hir/syntax_shape/expression/variable_path.rs +++ b/src/parser/hir/syntax_shape/expression/variable_path.rs @@ -1,21 +1,28 @@ use crate::parser::hir::syntax_shape::{ color_fallible_syntax, color_fallible_syntax_with, expand_atom, expand_expr, expand_syntax, parse_single_node, AnyExpressionShape, AtomicToken, BareShape, ExpandContext, ExpandExpression, - ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, Peeked, SkipSyntax, StringShape, - TestSyntax, WhitespaceShape, + ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, ParseError, Peeked, SkipSyntax, + StringShape, TestSyntax, WhitespaceShape, }; use crate::parser::{hir, hir::Expression, hir::TokensIterator, Operator, RawToken}; use crate::prelude::*; +use derive_new::new; +use getset::Getters; +use std::fmt; #[derive(Debug, Copy, Clone)] pub struct VariablePathShape; impl ExpandExpression for VariablePathShape { + fn name(&self) -> &'static str { + "variable path" + } + fn expand_expr<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, - ) -> Result { + ) -> Result { // 1. let the head be the first token, expecting a variable // 2. let the tail be an empty list of members // 2. while the next token (excluding ws) is a dot: @@ -200,12 +207,17 @@ impl FallibleColorSyntax for PathTailShape { } impl ExpandSyntax for PathTailShape { - type Output = (Vec>, Span); + type Output = Spanned>>; + + fn name(&self) -> &'static str { + "path continuation" + } + fn expand_syntax<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - ) -> Result { + ) -> Result { let mut end: Option = None; let mut tail = vec![]; @@ -223,7 +235,7 @@ impl ExpandSyntax for PathTailShape { match end { None => { - return Err(ShellError::type_error("path tail", { + return Err(ParseError::mismatch("path tail", { let typed_span = token_nodes.typed_span_at_cursor(); Tagged { @@ -233,17 +245,41 @@ impl ExpandSyntax for PathTailShape { })) } - Some(end) => Ok((tail, end)), + Some(end) => Ok(tail.spanned(end)), } } } -#[derive(Debug)] +#[derive(Debug, Clone)] pub enum ExpressionContinuation { DotSuffix(Span, Spanned), InfixSuffix(Spanned, Expression), } +impl FormatDebug for ExpressionContinuation { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + match self { + ExpressionContinuation::DotSuffix(dot, rest) => { + f.say_str("dot suffix", dot.until(rest.span).slice(source)) + } + ExpressionContinuation::InfixSuffix(operator, expr) => { + f.say_str("infix suffix", operator.span.until(expr.span).slice(source)) + } + } + } +} + +impl HasSpan for ExpressionContinuation { + fn span(&self) -> Span { + match self { + ExpressionContinuation::DotSuffix(dot, column) => dot.until(column.span), + ExpressionContinuation::InfixSuffix(operator, expression) => { + operator.span.until(expression.span) + } + } + } +} + /// An expression continuation #[derive(Debug, Copy, Clone)] pub struct ExpressionContinuationShape; @@ -251,11 +287,15 @@ pub struct ExpressionContinuationShape; impl ExpandSyntax for ExpressionContinuationShape { type Output = ExpressionContinuation; + fn name(&self) -> &'static str { + "expression continuation" + } + fn expand_syntax<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, - ) -> Result { + ) -> Result { // Try to expand a `.` let dot = expand_syntax(&DotShape, token_nodes, context); @@ -270,7 +310,7 @@ impl ExpandSyntax for ExpressionContinuationShape { // Otherwise, we expect an infix operator and an expression next Err(_) => { - let (_, op, _) = expand_syntax(&InfixShape, token_nodes, context)?; + let (_, op, _) = expand_syntax(&InfixShape, token_nodes, context)?.item; let next = expand_expr(&AnyExpressionShape, token_nodes, context)?; Ok(ExpressionContinuation::InfixSuffix(op, next)) @@ -390,12 +430,16 @@ impl FallibleColorSyntax for ExpressionContinuationShape { pub struct VariableShape; impl ExpandExpression for VariableShape { + fn name(&self) -> &'static str { + "variable" + } + fn expand_expr<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, - ) -> Result { - parse_single_node(token_nodes, "variable", |token, token_tag, _| { + ) -> Result { + parse_single_node(token_nodes, "variable", |token, token_tag, err| { Ok(match token { RawToken::Variable(tag) => { if tag.slice(context.source) == "it" { @@ -404,12 +448,7 @@ impl ExpandExpression for VariableShape { hir::Expression::variable(tag, token_tag) } } - _ => { - return Err(ShellError::type_error( - "variable", - token.type_name().tagged(token_tag), - )) - } + _ => return Err(err.error()), }) }) } @@ -435,7 +474,7 @@ impl FallibleColorSyntax for VariableShape { ); let atom = match atom { - Err(err) => return Err(err), + Err(err) => return Err(err.into()), Ok(atom) => atom, }; @@ -476,7 +515,7 @@ impl FallibleColorSyntax for VariableShape { ); let atom = match atom { - Err(err) => return Err(err), + Err(err) => return Err(err.into()), Ok(atom) => atom, }; @@ -489,7 +528,7 @@ impl FallibleColorSyntax for VariableShape { token_nodes.color_shape(FlatShape::ItVariable.spanned(atom.span)); Ok(()) } - _ => Err(ShellError::type_error("variable", atom.tagged_type_name())), + _ => Err(ParseError::mismatch("variable", atom.tagged_type_name()).into()), } } } @@ -500,6 +539,24 @@ pub enum Member { Bare(Span), } +impl FormatDebug for Member { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + match self { + Member::String(outer, _) => write!(f, "member ({})", outer.slice(source)), + Member::Bare(bare) => write!(f, "member ({})", bare.slice(source)), + } + } +} + +impl HasSpan for Member { + fn span(&self) -> Span { + match self { + Member::String(outer, ..) => *outer, + Member::Bare(name) => *name, + } + } +} + impl Member { pub(crate) fn to_expr(&self) -> hir::Expression { match self { @@ -538,7 +595,7 @@ enum ColumnPathState { LeadingDot(Span), Dot(Span, Vec, Span), Member(Span, Vec), - Error(ShellError), + Error(ParseError), } impl ColumnPathState { @@ -546,10 +603,10 @@ impl ColumnPathState { match self { ColumnPathState::Initial => ColumnPathState::LeadingDot(dot), ColumnPathState::LeadingDot(_) => { - ColumnPathState::Error(ShellError::type_error("column", "dot".tagged(dot))) + ColumnPathState::Error(ParseError::mismatch("column", "dot".tagged(dot))) } ColumnPathState::Dot(..) => { - ColumnPathState::Error(ShellError::type_error("column", "dot".tagged(dot))) + ColumnPathState::Error(ParseError::mismatch("column", "dot".tagged(dot))) } ColumnPathState::Member(tag, members) => ColumnPathState::Dot(tag, members, dot), ColumnPathState::Error(err) => ColumnPathState::Error(err), @@ -570,20 +627,20 @@ impl ColumnPathState { }) } ColumnPathState::Member(..) => { - ColumnPathState::Error(ShellError::type_error("column", member.tagged_type_name())) + ColumnPathState::Error(ParseError::mismatch("column", member.tagged_type_name())) } ColumnPathState::Error(err) => ColumnPathState::Error(err), } } - pub fn into_path(self, next: Peeked) -> Result>, ShellError> { + pub fn into_path(self, next: Peeked) -> Result>, ParseError> { match self { ColumnPathState::Initial => Err(next.type_error("column path")), ColumnPathState::LeadingDot(dot) => { - Err(ShellError::type_error("column", "dot".tagged(dot))) + Err(ParseError::mismatch("column", "dot".tagged(dot))) } ColumnPathState::Dot(_tag, _members, dot) => { - Err(ShellError::type_error("column", "dot".tagged(dot))) + Err(ParseError::mismatch("column", "dot".tagged(dot))) } ColumnPathState::Member(tag, tags) => Ok(tags.tagged(tag)), ColumnPathState::Error(err) => Err(err), @@ -594,7 +651,7 @@ impl ColumnPathState { pub fn expand_column_path<'a, 'b>( token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, -) -> Result>, ShellError> { +) -> Result>, ParseError> { let mut state = ColumnPathState::Initial; loop { @@ -720,15 +777,43 @@ impl FallibleColorSyntax for ColumnPathShape { } } +impl FormatDebug for Tagged> { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + self.item.fmt_debug(f, source) + } +} + +#[derive(Debug, Clone, Getters, new)] +pub struct ColumnPath { + #[get = "pub"] + path: Tagged>, +} + +impl HasSpan for ColumnPath { + fn span(&self) -> Span { + self.path.tag.span + } +} + +impl FormatDebug for ColumnPath { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + f.say("column path", self.path.item.debug(source)) + } +} + impl ExpandSyntax for ColumnPathShape { - type Output = Tagged>; + type Output = ColumnPath; + + fn name(&self) -> &'static str { + "column path" + } fn expand_syntax<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - ) -> Result { - expand_column_path(token_nodes, context) + ) -> Result { + Ok(ColumnPath::new(expand_column_path(token_nodes, context)?)) } } @@ -806,11 +891,15 @@ impl FallibleColorSyntax for MemberShape { impl ExpandSyntax for MemberShape { type Output = Member; + fn name(&self) -> &'static str { + "column" + } + fn expand_syntax<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, - ) -> Result { + ) -> Result { let bare = BareShape.test(token_nodes, context); if let Some(peeked) = bare { let node = peeked.not_eof("column")?.commit(); @@ -906,16 +995,20 @@ impl SkipSyntax for DotShape { impl ExpandSyntax for DotShape { type Output = Span; + fn name(&self) -> &'static str { + "dot" + } + fn expand_syntax<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, _context: &ExpandContext, - ) -> Result { + ) -> Result { parse_single_node(token_nodes, "dot", |token, token_span, _| { Ok(match token { RawToken::Operator(Operator::Dot) => token_span, _ => { - return Err(ShellError::type_error( + return Err(ParseError::mismatch( "dot", token.type_name().tagged(token_span), )) @@ -950,7 +1043,7 @@ impl FallibleColorSyntax for InfixShape { parse_single_node( checkpoint.iterator, "infix operator", - |token, token_span, _| { + |token, token_span, err| { match token { // If it's an operator (and not `.`), it's a match RawToken::Operator(operator) if operator != Operator::Dot => { @@ -959,10 +1052,7 @@ impl FallibleColorSyntax for InfixShape { } // Otherwise, it's not a match - _ => Err(ShellError::type_error( - "infix operator", - token.type_name().tagged(token_span), - )), + _ => Err(err.error()), } }, )?; @@ -1006,7 +1096,7 @@ impl FallibleColorSyntax for InfixShape { RawToken::Operator(operator) if operator != Operator::Dot => Ok(token_span), // Otherwise, it's not a match - _ => Err(ShellError::type_error( + _ => Err(ParseError::mismatch( "infix operator", token.type_name().tagged(token_span), )), @@ -1026,46 +1116,72 @@ impl FallibleColorSyntax for InfixShape { } } +impl FormatDebug for Spanned<(Span, Spanned, Span)> { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + f.say_str("operator", self.item.1.span.slice(source)) + } +} + impl ExpandSyntax for InfixShape { - type Output = (Span, Spanned, Span); + type Output = Spanned<(Span, Spanned, Span)>; + + fn name(&self) -> &'static str { + "infix operator" + } fn expand_syntax<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, - ) -> Result { - let checkpoint = token_nodes.checkpoint(); + ) -> Result { + let mut checkpoint = token_nodes.checkpoint(); // An infix operator must be prefixed by whitespace let start = expand_syntax(&WhitespaceShape, checkpoint.iterator, context)?; // Parse the next TokenNode after the whitespace - let operator = parse_single_node( - checkpoint.iterator, - "infix operator", - |token, token_span, _| { - Ok(match token { - // If it's an operator (and not `.`), it's a match - RawToken::Operator(operator) if operator != Operator::Dot => { - operator.spanned(token_span) - } - - // Otherwise, it's not a match - _ => { - return Err(ShellError::type_error( - "infix operator", - token.type_name().tagged(token_span), - )) - } - }) - }, - )?; + let operator = expand_syntax(&InfixInnerShape, &mut checkpoint.iterator, context)?; // An infix operator must be followed by whitespace let end = expand_syntax(&WhitespaceShape, checkpoint.iterator, context)?; checkpoint.commit(); - Ok((start, operator, end)) + Ok((start, operator, end).spanned(start.until(end))) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct InfixInnerShape; + +impl FormatDebug for Spanned { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + f.say_str("operator", self.span.slice(source)) + } +} + +impl ExpandSyntax for InfixInnerShape { + type Output = Spanned; + + fn name(&self) -> &'static str { + "infix inner" + } + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result { + parse_single_node(token_nodes, "infix operator", |token, token_span, err| { + Ok(match token { + // If it's an operator (and not `.`), it's a match + RawToken::Operator(operator) if operator != Operator::Dot => { + operator.spanned(token_span) + } + + // Otherwise, it's not a match + _ => return Err(err.error()), + }) + }) } } diff --git a/src/parser/hir/syntax_shape/flat_shape.rs b/src/parser/hir/syntax_shape/flat_shape.rs index b961d1f567..48cb5b72cc 100644 --- a/src/parser/hir/syntax_shape/flat_shape.rs +++ b/src/parser/hir/syntax_shape/flat_shape.rs @@ -1,5 +1,5 @@ use crate::parser::{Delimiter, Flag, FlagKind, Operator, RawNumber, RawToken, TokenNode}; -use crate::{Span, Spanned, SpannedItem, Text}; +use crate::{HasSpan, Span, Spanned, SpannedItem, Text}; #[derive(Debug, Copy, Clone)] pub enum FlatShape { diff --git a/src/parser/hir/tokens_iterator.rs b/src/parser/hir/tokens_iterator.rs index 8e2f4a8f88..bba5ff1352 100644 --- a/src/parser/hir/tokens_iterator.rs +++ b/src/parser/hir/tokens_iterator.rs @@ -1,25 +1,38 @@ pub(crate) mod debug; -use self::debug::Tracer; +use self::debug::{ColorTracer, ExpandTracer}; use crate::errors::ShellError; #[cfg(coloring_in_tokens)] use crate::parser::hir::syntax_shape::FlatShape; +use crate::parser::hir::Expression; use crate::parser::TokenNode; use crate::prelude::*; use crate::{Span, Spanned, SpannedItem}; #[allow(unused)] use getset::{Getters, MutGetters}; -#[derive(Getters, Debug)] -pub struct TokensIteratorState<'content> { - tokens: &'content [TokenNode], - span: Span, - skip_ws: bool, - index: usize, - seen: indexmap::IndexSet, - #[cfg(coloring_in_tokens)] - #[cfg_attr(coloring_in_tokens, get = "pub")] - shapes: Vec>, +cfg_if::cfg_if! { + if #[cfg(coloring_in_tokens)] { + #[derive(Getters, Debug)] + pub struct TokensIteratorState<'content> { + tokens: &'content [TokenNode], + span: Span, + skip_ws: bool, + index: usize, + seen: indexmap::IndexSet, + #[get = "pub"] + shapes: Vec>, + } + } else { + #[derive(Getters, Debug)] + pub struct TokensIteratorState<'content> { + tokens: &'content [TokenNode], + span: Span, + skip_ws: bool, + index: usize, + seen: indexmap::IndexSet, + } + } } #[derive(Getters, MutGetters, Debug)] @@ -29,7 +42,10 @@ pub struct TokensIterator<'content> { state: TokensIteratorState<'content>, #[get = "pub"] #[get_mut = "pub"] - tracer: Tracer, + color_tracer: ColorTracer, + #[get = "pub"] + #[get_mut = "pub"] + expand_tracer: ExpandTracer, } #[derive(Debug)] @@ -83,12 +99,9 @@ impl<'content, 'me> Peeked<'content, 'me> { Some(node) } - pub fn not_eof( - self, - expected: impl Into, - ) -> Result, ShellError> { + pub fn not_eof(self, expected: &'static str) -> Result, ParseError> { match self.node { - None => Err(ShellError::unexpected_eof( + None => Err(ParseError::unexpected_eof( expected, self.iterator.eof_span(), )), @@ -101,7 +114,7 @@ impl<'content, 'me> Peeked<'content, 'me> { } } - pub fn type_error(&self, expected: impl Into) -> ShellError { + pub fn type_error(&self, expected: &'static str) -> ParseError { peek_error(&self.node, self.iterator.eof_span(), expected) } } @@ -129,19 +142,15 @@ impl<'content, 'me> PeekedNode<'content, 'me> { pub fn rollback(self) {} - pub fn type_error(&self, expected: impl Into) -> ShellError { + pub fn type_error(&self, expected: &'static str) -> ParseError { peek_error(&Some(self.node), self.iterator.eof_span(), expected) } } -pub fn peek_error( - node: &Option<&TokenNode>, - eof_span: Span, - expected: impl Into, -) -> ShellError { +pub fn peek_error(node: &Option<&TokenNode>, eof_span: Span, expected: &'static str) -> ParseError { match node { - None => ShellError::unexpected_eof(expected, eof_span), - Some(node) => ShellError::type_error(expected, node.tagged_type_name()), + None => ParseError::unexpected_eof(expected, eof_span), + Some(node) => ParseError::mismatch(expected, node.tagged_type_name()), } } @@ -161,7 +170,8 @@ impl<'content> TokensIterator<'content> { #[cfg(coloring_in_tokens)] shapes: vec![], }, - tracer: Tracer::new(), + color_tracer: ColorTracer::new(), + expand_tracer: ExpandTracer::new(), } } @@ -188,7 +198,7 @@ impl<'content> TokensIterator<'content> { #[cfg(coloring_in_tokens)] pub fn color_shape(&mut self, shape: Spanned) { - self.with_tracer(|_, tracer| tracer.add_shape(shape)); + self.with_color_tracer(|_, tracer| tracer.add_shape(shape)); self.state.shapes.push(shape); } @@ -201,7 +211,7 @@ impl<'content> TokensIterator<'content> { (len..(shapes.len())).map(|i| shapes[i]).collect() }; - self.with_tracer(|_, tracer| { + self.with_color_tracer(|_, tracer| { for shape in new_shapes { tracer.add_shape(shape) } @@ -233,8 +243,11 @@ impl<'content> TokensIterator<'content> { let mut shapes = vec![]; std::mem::swap(&mut shapes, &mut self.state.shapes); - let mut tracer = Tracer::new(); - std::mem::swap(&mut tracer, &mut self.tracer); + let mut color_tracer = ColorTracer::new(); + std::mem::swap(&mut color_tracer, &mut self.color_tracer); + + let mut expand_tracer = ExpandTracer::new(); + std::mem::swap(&mut expand_tracer, &mut self.expand_tracer); let mut iterator = TokensIterator { state: TokensIteratorState { @@ -245,13 +258,15 @@ impl<'content> TokensIterator<'content> { seen: indexmap::IndexSet::new(), shapes, }, - tracer, + color_tracer, + expand_tracer, }; let result = block(&mut iterator); std::mem::swap(&mut iterator.state.shapes, &mut self.state.shapes); - std::mem::swap(&mut iterator.tracer, &mut self.tracer); + std::mem::swap(&mut iterator.color_tracer, &mut self.color_tracer); + std::mem::swap(&mut iterator.expand_tracer, &mut self.expand_tracer); result } @@ -262,8 +277,11 @@ impl<'content> TokensIterator<'content> { tokens: Spanned<&'me [TokenNode]>, block: impl FnOnce(&mut TokensIterator<'me>) -> T, ) -> T { - let mut tracer = Tracer::new(); - std::mem::swap(&mut tracer, &mut self.tracer); + let mut color_tracer = ColorTracer::new(); + std::mem::swap(&mut color_tracer, &mut self.color_tracer); + + let mut expand_tracer = ExpandTracer::new(); + std::mem::swap(&mut expand_tracer, &mut self.expand_tracer); let mut iterator = TokensIterator { state: TokensIteratorState { @@ -273,19 +291,34 @@ impl<'content> TokensIterator<'content> { index: 0, seen: indexmap::IndexSet::new(), }, - tracer, + color_tracer, + expand_tracer, }; let result = block(&mut iterator); - std::mem::swap(&mut iterator.tracer, &mut self.tracer); + std::mem::swap(&mut iterator.color_tracer, &mut self.color_tracer); + std::mem::swap(&mut iterator.expand_tracer, &mut self.expand_tracer); result } - pub fn with_tracer(&mut self, block: impl FnOnce(&mut TokensIteratorState, &mut Tracer)) { + pub fn with_color_tracer( + &mut self, + block: impl FnOnce(&mut TokensIteratorState, &mut ColorTracer), + ) { let state = &mut self.state; - let tracer = &mut self.tracer; + let color_tracer = &mut self.color_tracer; + + block(state, color_tracer) + } + + pub fn with_expand_tracer( + &mut self, + block: impl FnOnce(&mut TokensIteratorState, &mut ExpandTracer), + ) { + let state = &mut self.state; + let tracer = &mut self.expand_tracer; block(state, tracer) } @@ -296,32 +329,77 @@ impl<'content> TokensIterator<'content> { desc: &'static str, block: impl FnOnce(&mut TokensIterator) -> T, ) -> T { - self.with_tracer(|_, tracer| tracer.start(desc)); + self.with_color_tracer(|_, tracer| tracer.start(desc)); let result = block(self); - self.with_tracer(|_, tracer| { + self.with_color_tracer(|_, tracer| { tracer.success(); }); result } + pub fn expand_frame( + &mut self, + desc: &'static str, + block: impl FnOnce(&mut TokensIterator) -> Result, + ) -> Result + where + T: std::fmt::Debug + FormatDebug + Clone + HasFallibleSpan + 'static, + { + self.with_expand_tracer(|_, tracer| tracer.start(desc)); + + let result = block(self); + + self.with_expand_tracer(|_, tracer| match &result { + Ok(result) => { + tracer.add_result(Box::new(result.clone())); + tracer.success(); + } + + Err(err) => tracer.failed(err), + }); + + result + } + + pub fn expand_expr_frame( + &mut self, + desc: &'static str, + block: impl FnOnce(&mut TokensIterator) -> Result, + ) -> Result { + self.with_expand_tracer(|_, tracer| tracer.start(desc)); + + let result = block(self); + + self.with_expand_tracer(|_, tracer| match &result { + Ok(expr) => { + tracer.add_expr(expr.clone()); + tracer.success() + } + + Err(err) => tracer.failed(err), + }); + + result + } + pub fn color_fallible_frame( &mut self, desc: &'static str, block: impl FnOnce(&mut TokensIterator) -> Result, ) -> Result { - self.with_tracer(|_, tracer| tracer.start(desc)); + self.with_color_tracer(|_, tracer| tracer.start(desc)); if self.at_end() { - self.with_tracer(|_, tracer| tracer.eof_frame()); + self.with_color_tracer(|_, tracer| tracer.eof_frame()); return Err(ShellError::unexpected_eof("coloring", Tag::unknown())); } let result = block(self); - self.with_tracer(|_, tracer| match &result { + self.with_color_tracer(|_, tracer| match &result { Ok(_) => { tracer.success(); } @@ -431,10 +509,6 @@ impl<'content> TokensIterator<'content> { } } - pub fn whole_span(&self) -> Span { - self.state.span - } - pub fn span_at_cursor(&mut self) -> Span { let next = self.peek_any(); @@ -491,27 +565,22 @@ impl<'content> TokensIterator<'content> { self.state.index = 0; } - pub fn clone(&self) -> TokensIterator<'content> { - let state = &self.state; - TokensIterator { - state: TokensIteratorState { - tokens: state.tokens, - span: state.span, - index: state.index, - seen: state.seen.clone(), - skip_ws: state.skip_ws, - #[cfg(coloring_in_tokens)] - shapes: state.shapes.clone(), - }, - tracer: self.tracer.clone(), - } - } - - // Get the next token, not including whitespace - pub fn next_non_ws(&mut self) -> Option<&TokenNode> { - let mut peeked = start_next(self, true); - peeked.commit() - } + // pub fn clone(&self) -> TokensIterator<'content> { + // let state = &self.state; + // TokensIterator { + // state: TokensIteratorState { + // tokens: state.tokens, + // span: state.span, + // index: state.index, + // seen: state.seen.clone(), + // skip_ws: state.skip_ws, + // #[cfg(coloring_in_tokens)] + // shapes: state.shapes.clone(), + // }, + // color_tracer: self.color_tracer.clone(), + // expand_tracer: self.expand_tracer.clone(), + // } + // } // Peek the next token, not including whitespace pub fn peek_non_ws<'me>(&'me mut self) -> Peeked<'content, 'me> { @@ -527,8 +596,8 @@ impl<'content> TokensIterator<'content> { pub fn peek_any_token<'me, T>( &'me mut self, expected: &'static str, - block: impl FnOnce(&'content TokenNode) -> Result, - ) -> Result { + block: impl FnOnce(&'content TokenNode) -> Result, + ) -> Result { let peeked = start_next(self, false); let peeked = peeked.not_eof(expected); @@ -557,9 +626,11 @@ impl<'content> TokensIterator<'content> { } pub fn debug_remaining(&self) -> Vec { - let mut tokens = self.clone(); - tokens.restart(); - tokens.cloned().collect() + // TODO: TODO: TODO: Clean up + vec![] + // let mut tokens = self.clone(); + // tokens.restart(); + // tokens.cloned().collect() } } diff --git a/src/parser/hir/tokens_iterator/debug.rs b/src/parser/hir/tokens_iterator/debug.rs index 332a74067c..6e2d7082b2 100644 --- a/src/parser/hir/tokens_iterator/debug.rs +++ b/src/parser/hir/tokens_iterator/debug.rs @@ -1,13 +1,13 @@ -use crate::errors::ShellError; -use crate::parser::hir::syntax_shape::FlatShape; +#![allow(unused)] + +pub(crate) mod color_trace; +pub(crate) mod expand_trace; + +pub(crate) use self::color_trace::*; +pub(crate) use self::expand_trace::*; + use crate::parser::hir::tokens_iterator::TokensIteratorState; -use crate::prelude::*; use crate::traits::ToDebug; -use ansi_term::Color; -use log::trace; -use ptree::*; -use std::borrow::Cow; -use std::io; #[derive(Debug)] pub(crate) enum DebugIteratorToken { @@ -36,344 +36,3 @@ pub(crate) fn debug_tokens(state: &TokensIteratorState, source: &str) -> Vec), - Frame(ColorFrame), -} - -impl FrameChild { - fn colored_leaf_description(&self, text: &Text, f: &mut impl io::Write) -> io::Result<()> { - match self { - FrameChild::Shape(shape) => write!( - f, - "{} {:?}", - Color::White - .bold() - .on(Color::Green) - .paint(format!("{:?}", shape.item)), - shape.span.slice(text) - ), - - FrameChild::Frame(frame) => frame.colored_leaf_description(f), - } - } - - fn into_tree_child(self, text: &Text) -> TreeChild { - match self { - FrameChild::Shape(shape) => TreeChild::Shape(shape, text.clone()), - FrameChild::Frame(frame) => TreeChild::Frame(frame, text.clone()), - } - } -} - -#[derive(Debug, Clone)] -pub struct ColorFrame { - description: &'static str, - children: Vec, - error: Option, -} - -impl ColorFrame { - fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> { - if self.has_only_error_descendents() { - if self.children.len() == 0 { - write!( - f, - "{}", - Color::White.bold().on(Color::Red).paint(self.description) - ) - } else { - write!(f, "{}", Color::Red.normal().paint(self.description)) - } - } else if self.has_descendent_shapes() { - write!(f, "{}", Color::Green.normal().paint(self.description)) - } else { - write!(f, "{}", Color::Yellow.bold().paint(self.description)) - } - } - - fn colored_description(&self, text: &Text, f: &mut impl io::Write) -> io::Result<()> { - if self.children.len() == 1 { - let child = &self.children[0]; - - self.colored_leaf_description(f)?; - write!(f, " -> ")?; - child.colored_leaf_description(text, f) - } else { - self.colored_leaf_description(f) - } - } - - fn children_for_formatting(&self, text: &Text) -> Vec { - if self.children.len() == 1 { - let child = &self.children[0]; - - match child { - FrameChild::Shape(_) => vec![], - FrameChild::Frame(frame) => frame.tree_children(text), - } - } else { - self.tree_children(text) - } - } - - fn tree_children(&self, text: &Text) -> Vec { - self.children - .clone() - .into_iter() - .map(|c| c.into_tree_child(text)) - .collect() - } - - #[allow(unused)] - fn add_shape(&mut self, shape: Spanned) { - self.children.push(FrameChild::Shape(shape)) - } - - fn has_child_shapes(&self) -> bool { - self.any_child_shape(|_| true) - } - - fn any_child_shape(&self, predicate: impl Fn(Spanned) -> bool) -> bool { - for item in &self.children { - match item { - FrameChild::Shape(shape) => { - if predicate(*shape) { - return true; - } - } - - _ => {} - } - } - - false - } - - fn any_child_frame(&self, predicate: impl Fn(&ColorFrame) -> bool) -> bool { - for item in &self.children { - match item { - FrameChild::Frame(frame) => { - if predicate(frame) { - return true; - } - } - - _ => {} - } - } - - false - } - - fn has_descendent_shapes(&self) -> bool { - if self.has_child_shapes() { - true - } else { - self.any_child_frame(|frame| frame.has_descendent_shapes()) - } - } - - fn has_only_error_descendents(&self) -> bool { - if self.children.len() == 0 { - // if this frame has no children at all, it has only error descendents if this frame - // is an error - self.error.is_some() - } else { - // otherwise, it has only error descendents if all of its children terminate in an - // error (transitively) - - let mut seen_error = false; - - for child in &self.children { - match child { - // if this frame has at least one child shape, this frame has non-error descendents - FrameChild::Shape(_) => return false, - FrameChild::Frame(frame) => { - // if the chi - if frame.has_only_error_descendents() { - seen_error = true; - } else { - return false; - } - } - } - } - - seen_error - } - } -} - -#[derive(Debug, Clone)] -pub enum TreeChild { - Shape(Spanned, Text), - Frame(ColorFrame, Text), -} - -impl TreeChild { - fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> { - match self { - TreeChild::Shape(shape, text) => write!( - f, - "{} {:?}", - Color::White - .bold() - .on(Color::Green) - .paint(format!("{:?}", shape.item)), - shape.span.slice(text) - ), - - TreeChild::Frame(frame, _) => frame.colored_leaf_description(f), - } - } -} - -impl TreeItem for TreeChild { - type Child = TreeChild; - - fn write_self(&self, f: &mut W, _style: &Style) -> io::Result<()> { - match self { - shape @ TreeChild::Shape(..) => shape.colored_leaf_description(f), - - TreeChild::Frame(frame, text) => frame.colored_description(text, f), - } - } - - fn children(&self) -> Cow<[Self::Child]> { - match self { - TreeChild::Shape(..) => Cow::Borrowed(&[]), - TreeChild::Frame(frame, text) => Cow::Owned(frame.children_for_formatting(text)), - } - } -} - -#[derive(Debug, Clone)] -pub struct Tracer { - frame_stack: Vec, -} - -impl Tracer { - pub fn print(self, source: Text) -> PrintTracer { - PrintTracer { - tracer: self, - source, - } - } - - pub fn new() -> Tracer { - let root = ColorFrame { - description: "Trace", - children: vec![], - error: None, - }; - - Tracer { - frame_stack: vec![root], - } - } - - fn current_frame(&mut self) -> &mut ColorFrame { - let frames = &mut self.frame_stack; - let last = frames.len() - 1; - &mut frames[last] - } - - fn pop_frame(&mut self) -> ColorFrame { - let result = self.frame_stack.pop().expect("Can't pop root tracer frame"); - - if self.frame_stack.len() == 0 { - panic!("Can't pop root tracer frame"); - } - - self.debug(); - - result - } - - pub fn start(&mut self, description: &'static str) { - let frame = ColorFrame { - description, - children: vec![], - error: None, - }; - - self.frame_stack.push(frame); - self.debug(); - } - - pub fn eof_frame(&mut self) { - let current = self.pop_frame(); - self.current_frame() - .children - .push(FrameChild::Frame(current)); - } - - #[allow(unused)] - pub fn finish(&mut self) { - loop { - if self.frame_stack.len() == 1 { - break; - } - - let frame = self.pop_frame(); - self.current_frame().children.push(FrameChild::Frame(frame)); - } - } - - #[allow(unused)] - pub fn add_shape(&mut self, shape: Spanned) { - self.current_frame().add_shape(shape); - } - - pub fn success(&mut self) { - let current = self.pop_frame(); - self.current_frame() - .children - .push(FrameChild::Frame(current)); - } - - pub fn failed(&mut self, error: &ShellError) { - let mut current = self.pop_frame(); - current.error = Some(error.clone()); - self.current_frame() - .children - .push(FrameChild::Frame(current)); - } - - fn debug(&self) { - trace!(target: "nu::color_syntax", - "frames = {:?}", - self.frame_stack - .iter() - .map(|f| f.description) - .collect::>() - ); - - trace!(target: "nu::color_syntax", "{:#?}", self); - } -} - -#[derive(Debug, Clone)] -pub struct PrintTracer { - tracer: Tracer, - source: Text, -} - -impl TreeItem for PrintTracer { - type Child = TreeChild; - - fn write_self(&self, f: &mut W, style: &Style) -> io::Result<()> { - write!(f, "{}", style.paint("Color Trace")) - } - - fn children(&self) -> Cow<[Self::Child]> { - Cow::Owned(vec![TreeChild::Frame( - self.tracer.frame_stack[0].clone(), - self.source.clone(), - )]) - } -} diff --git a/src/parser/hir/tokens_iterator/debug/color_trace.rs b/src/parser/hir/tokens_iterator/debug/color_trace.rs new file mode 100644 index 0000000000..bbb9d856c4 --- /dev/null +++ b/src/parser/hir/tokens_iterator/debug/color_trace.rs @@ -0,0 +1,351 @@ +use crate::errors::ShellError; +use crate::parser::hir::syntax_shape::FlatShape; +use crate::prelude::*; +use ansi_term::Color; +use log::trace; +use ptree::*; +use std::borrow::Cow; +use std::io; + +#[derive(Debug, Clone)] +pub enum FrameChild { + #[allow(unused)] + Shape(Spanned), + Frame(ColorFrame), +} + +impl FrameChild { + fn colored_leaf_description(&self, text: &Text, f: &mut impl io::Write) -> io::Result<()> { + match self { + FrameChild::Shape(shape) => write!( + f, + "{} {:?}", + Color::White + .bold() + .on(Color::Green) + .paint(format!("{:?}", shape.item)), + shape.span.slice(text) + ), + + FrameChild::Frame(frame) => frame.colored_leaf_description(f), + } + } + + fn into_tree_child(self, text: &Text) -> TreeChild { + match self { + FrameChild::Shape(shape) => TreeChild::Shape(shape, text.clone()), + FrameChild::Frame(frame) => TreeChild::Frame(frame, text.clone()), + } + } +} + +#[derive(Debug, Clone)] +pub struct ColorFrame { + description: &'static str, + children: Vec, + error: Option, +} + +impl ColorFrame { + fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> { + if self.has_only_error_descendents() { + if self.children.len() == 0 { + write!( + f, + "{}", + Color::White.bold().on(Color::Red).paint(self.description) + ) + } else { + write!(f, "{}", Color::Red.normal().paint(self.description)) + } + } else if self.has_descendent_shapes() { + write!(f, "{}", Color::Green.normal().paint(self.description)) + } else { + write!(f, "{}", Color::Yellow.bold().paint(self.description)) + } + } + + fn colored_description(&self, text: &Text, f: &mut impl io::Write) -> io::Result<()> { + if self.children.len() == 1 { + let child = &self.children[0]; + + self.colored_leaf_description(f)?; + write!(f, " -> ")?; + child.colored_leaf_description(text, f) + } else { + self.colored_leaf_description(f) + } + } + + fn children_for_formatting(&self, text: &Text) -> Vec { + if self.children.len() == 1 { + let child = &self.children[0]; + + match child { + FrameChild::Shape(_) => vec![], + FrameChild::Frame(frame) => frame.tree_children(text), + } + } else { + self.tree_children(text) + } + } + + fn tree_children(&self, text: &Text) -> Vec { + self.children + .clone() + .into_iter() + .map(|c| c.into_tree_child(text)) + .collect() + } + + #[allow(unused)] + fn add_shape(&mut self, shape: Spanned) { + self.children.push(FrameChild::Shape(shape)) + } + + fn has_child_shapes(&self) -> bool { + self.any_child_shape(|_| true) + } + + fn any_child_shape(&self, predicate: impl Fn(Spanned) -> bool) -> bool { + for item in &self.children { + match item { + FrameChild::Shape(shape) => { + if predicate(*shape) { + return true; + } + } + + _ => {} + } + } + + false + } + + fn any_child_frame(&self, predicate: impl Fn(&ColorFrame) -> bool) -> bool { + for item in &self.children { + match item { + FrameChild::Frame(frame) => { + if predicate(frame) { + return true; + } + } + + _ => {} + } + } + + false + } + + fn has_descendent_shapes(&self) -> bool { + if self.has_child_shapes() { + true + } else { + self.any_child_frame(|frame| frame.has_descendent_shapes()) + } + } + + fn has_only_error_descendents(&self) -> bool { + if self.children.len() == 0 { + // if this frame has no children at all, it has only error descendents if this frame + // is an error + self.error.is_some() + } else { + // otherwise, it has only error descendents if all of its children terminate in an + // error (transitively) + + let mut seen_error = false; + + for child in &self.children { + match child { + // if this frame has at least one child shape, this frame has non-error descendents + FrameChild::Shape(_) => return false, + FrameChild::Frame(frame) => { + // if the chi + if frame.has_only_error_descendents() { + seen_error = true; + } else { + return false; + } + } + } + } + + seen_error + } + } +} + +#[derive(Debug, Clone)] +pub enum TreeChild { + Shape(Spanned, Text), + Frame(ColorFrame, Text), +} + +impl TreeChild { + fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> { + match self { + TreeChild::Shape(shape, text) => write!( + f, + "{} {:?}", + Color::White + .bold() + .on(Color::Green) + .paint(format!("{:?}", shape.item)), + shape.span.slice(text) + ), + + TreeChild::Frame(frame, _) => frame.colored_leaf_description(f), + } + } +} + +impl TreeItem for TreeChild { + type Child = TreeChild; + + fn write_self(&self, f: &mut W, _style: &Style) -> io::Result<()> { + match self { + shape @ TreeChild::Shape(..) => shape.colored_leaf_description(f), + + TreeChild::Frame(frame, text) => frame.colored_description(text, f), + } + } + + fn children(&self) -> Cow<[Self::Child]> { + match self { + TreeChild::Shape(..) => Cow::Borrowed(&[]), + TreeChild::Frame(frame, text) => Cow::Owned(frame.children_for_formatting(text)), + } + } +} + +#[derive(Debug, Clone)] +pub struct ColorTracer { + frame_stack: Vec, +} + +impl ColorTracer { + pub fn print(self, source: Text) -> PrintTracer { + PrintTracer { + tracer: self, + source, + } + } + + pub fn new() -> ColorTracer { + let root = ColorFrame { + description: "Trace", + children: vec![], + error: None, + }; + + ColorTracer { + frame_stack: vec![root], + } + } + + fn current_frame(&mut self) -> &mut ColorFrame { + let frames = &mut self.frame_stack; + let last = frames.len() - 1; + &mut frames[last] + } + + fn pop_frame(&mut self) -> ColorFrame { + trace!(target: "nu::color_syntax", "Popping {:#?}", self); + + let result = self.frame_stack.pop().expect("Can't pop root tracer frame"); + + if self.frame_stack.len() == 0 { + panic!("Can't pop root tracer frame {:#?}", self); + } + + self.debug(); + + result + } + + pub fn start(&mut self, description: &'static str) { + let frame = ColorFrame { + description, + children: vec![], + error: None, + }; + + self.frame_stack.push(frame); + self.debug(); + } + + pub fn eof_frame(&mut self) { + let current = self.pop_frame(); + self.current_frame() + .children + .push(FrameChild::Frame(current)); + } + + #[allow(unused)] + pub fn finish(&mut self) { + loop { + if self.frame_stack.len() == 1 { + break; + } + + let frame = self.pop_frame(); + self.current_frame().children.push(FrameChild::Frame(frame)); + } + } + + #[allow(unused)] + pub fn add_shape(&mut self, shape: Spanned) { + self.current_frame().add_shape(shape); + } + + pub fn success(&mut self) { + let current = self.pop_frame(); + self.current_frame() + .children + .push(FrameChild::Frame(current)); + } + + pub fn failed(&mut self, error: &ShellError) { + let mut current = self.pop_frame(); + current.error = Some(error.clone()); + self.current_frame() + .children + .push(FrameChild::Frame(current)); + } + + fn debug(&self) { + trace!(target: "nu::color_syntax", + "frames = {:?}", + self.frame_stack + .iter() + .map(|f| f.description) + .collect::>() + ); + + trace!(target: "nu::color_syntax", "{:#?}", self); + } +} + +#[derive(Debug, Clone)] +pub struct PrintTracer { + tracer: ColorTracer, + source: Text, +} + +impl TreeItem for PrintTracer { + type Child = TreeChild; + + fn write_self(&self, f: &mut W, style: &Style) -> io::Result<()> { + write!(f, "{}", style.paint("Color Trace")) + } + + fn children(&self) -> Cow<[Self::Child]> { + Cow::Owned(vec![TreeChild::Frame( + self.tracer.frame_stack[0].clone(), + self.source.clone(), + )]) + } +} diff --git a/src/parser/hir/tokens_iterator/debug/expand_trace.rs b/src/parser/hir/tokens_iterator/debug/expand_trace.rs new file mode 100644 index 0000000000..11b705b9d8 --- /dev/null +++ b/src/parser/hir/tokens_iterator/debug/expand_trace.rs @@ -0,0 +1,365 @@ +use crate::parser::hir::Expression; +use crate::prelude::*; +use ansi_term::Color; +use log::trace; +use ptree::*; +use std::borrow::Cow; +use std::io; + +#[derive(Debug)] +pub enum FrameChild { + Expr(Expression), + Frame(ExprFrame), + Result(Box), +} + +impl FrameChild { + fn get_error_leaf(&self) -> Option<&'static str> { + match self { + FrameChild::Frame(frame) if frame.error.is_some() => { + if frame.children.len() == 0 { + Some(frame.description) + } else { + None + } + } + _ => None, + } + } + + fn to_tree_child(&self, text: &Text) -> TreeChild { + match self { + FrameChild::Expr(expr) => TreeChild::OkExpr(expr.clone(), text.clone()), + FrameChild::Result(result) => { + let result = format!("{}", result.debug(text)); + TreeChild::OkNonExpr(result) + } + FrameChild::Frame(frame) => { + if frame.error.is_some() { + if frame.children.len() == 0 { + TreeChild::ErrorLeaf(vec![frame.description]) + } else { + TreeChild::ErrorFrame(frame.to_tree_frame(text), text.clone()) + } + } else { + TreeChild::OkFrame(frame.to_tree_frame(text), text.clone()) + } + } + } + } +} + +#[derive(Debug)] +pub struct ExprFrame { + description: &'static str, + children: Vec, + error: Option, +} + +impl ExprFrame { + fn to_tree_frame(&self, text: &Text) -> TreeFrame { + let mut children = vec![]; + let mut errors = vec![]; + + for child in &self.children { + if let Some(error_leaf) = child.get_error_leaf() { + errors.push(error_leaf); + continue; + } else if errors.len() > 0 { + children.push(TreeChild::ErrorLeaf(errors)); + errors = vec![]; + } + + children.push(child.to_tree_child(text)); + } + + if errors.len() > 0 { + children.push(TreeChild::ErrorLeaf(errors)); + } + + TreeFrame { + description: self.description, + children, + error: self.error.clone(), + } + } + + fn add_expr(&mut self, expr: Expression) { + self.children.push(FrameChild::Expr(expr)) + } + + fn add_result(&mut self, result: Box) { + self.children.push(FrameChild::Result(result)) + } +} + +#[derive(Debug, Clone)] +pub struct TreeFrame { + description: &'static str, + children: Vec, + error: Option, +} + +impl TreeFrame { + fn leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> { + if self.children.len() == 1 { + if self.error.is_some() { + write!(f, "{}", Color::Red.normal().paint(self.description))?; + } else if self.has_descendent_green() { + write!(f, "{}", Color::Green.normal().paint(self.description))?; + } else { + write!(f, "{}", Color::Yellow.bold().paint(self.description))?; + } + + write!(f, " -> ")?; + self.children[0].leaf_description(f) + } else { + if self.error.is_some() { + if self.children.len() == 0 { + write!( + f, + "{}", + Color::White.bold().on(Color::Red).paint(self.description) + ) + } else { + write!(f, "{}", Color::Red.normal().paint(self.description)) + } + } else if self.has_descendent_green() { + write!(f, "{}", Color::Green.normal().paint(self.description)) + } else { + write!(f, "{}", Color::Yellow.bold().paint(self.description)) + } + } + } + + fn has_child_green(&self) -> bool { + self.children.iter().any(|item| match item { + TreeChild::OkFrame(..) | TreeChild::ErrorFrame(..) | TreeChild::ErrorLeaf(..) => false, + TreeChild::OkExpr(..) | TreeChild::OkNonExpr(..) => true, + }) + } + + fn any_child_frame(&self, predicate: impl Fn(&TreeFrame) -> bool) -> bool { + for item in &self.children { + match item { + TreeChild::OkFrame(frame, ..) => { + if predicate(frame) { + return true; + } + } + + _ => {} + } + } + + false + } + + fn has_descendent_green(&self) -> bool { + if self.has_child_green() { + true + } else { + self.any_child_frame(|frame| frame.has_child_green()) + } + } + + fn children_for_formatting(&self, text: &Text) -> Vec { + if self.children.len() == 1 { + let child: &TreeChild = &self.children[0]; + match child { + TreeChild::OkExpr(..) | TreeChild::OkNonExpr(..) | TreeChild::ErrorLeaf(..) => { + vec![] + } + TreeChild::OkFrame(frame, _) | TreeChild::ErrorFrame(frame, _) => { + frame.children_for_formatting(text) + } + } + } else { + self.children.clone() + } + } +} + +#[derive(Debug, Clone)] +pub enum TreeChild { + OkNonExpr(String), + OkExpr(Expression, Text), + OkFrame(TreeFrame, Text), + ErrorFrame(TreeFrame, Text), + ErrorLeaf(Vec<&'static str>), +} + +impl TreeChild { + fn leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> { + match self { + TreeChild::OkExpr(expr, text) => write!( + f, + "{} {} {}", + Color::Cyan.normal().paint("returns"), + Color::White.bold().on(Color::Green).paint(expr.type_name()), + expr.span.slice(text) + ), + + TreeChild::OkNonExpr(result) => write!( + f, + "{} {}", + Color::Cyan.normal().paint("returns"), + Color::White + .bold() + .on(Color::Green) + .paint(format!("{}", result)) + ), + + TreeChild::ErrorLeaf(desc) => { + let last = desc.len() - 1; + + for (i, item) in desc.iter().enumerate() { + write!(f, "{}", Color::White.bold().on(Color::Red).paint(*item))?; + + if i != last { + write!(f, "{}", Color::White.normal().paint(", "))?; + } + } + + Ok(()) + } + + TreeChild::ErrorFrame(frame, _) | TreeChild::OkFrame(frame, _) => { + frame.leaf_description(f) + } + } + } +} + +impl TreeItem for TreeChild { + type Child = TreeChild; + + fn write_self(&self, f: &mut W, _style: &Style) -> io::Result<()> { + self.leaf_description(f) + } + + fn children(&self) -> Cow<[Self::Child]> { + match self { + TreeChild::OkExpr(..) | TreeChild::OkNonExpr(..) | TreeChild::ErrorLeaf(..) => { + Cow::Borrowed(&[]) + } + TreeChild::OkFrame(frame, text) | TreeChild::ErrorFrame(frame, text) => { + Cow::Owned(frame.children_for_formatting(text)) + } + } + } +} + +#[derive(Debug)] +pub struct ExpandTracer { + frame_stack: Vec, +} + +impl ExpandTracer { + pub fn print(&self, source: Text) -> PrintTracer { + let root = self + .frame_stack + .iter() + .nth(0) + .unwrap() + .to_tree_frame(&source); + + PrintTracer { root, source } + } + + pub fn new() -> ExpandTracer { + let root = ExprFrame { + description: "Trace", + children: vec![], + error: None, + }; + + ExpandTracer { + frame_stack: vec![root], + } + } + + fn current_frame(&mut self) -> &mut ExprFrame { + let frames = &mut self.frame_stack; + let last = frames.len() - 1; + &mut frames[last] + } + + fn pop_frame(&mut self) -> ExprFrame { + let result = self.frame_stack.pop().expect("Can't pop root tracer frame"); + + if self.frame_stack.len() == 0 { + panic!("Can't pop root tracer frame"); + } + + self.debug(); + + result + } + + pub fn start(&mut self, description: &'static str) { + let frame = ExprFrame { + description, + children: vec![], + error: None, + }; + + self.frame_stack.push(frame); + self.debug(); + } + + pub fn add_expr(&mut self, shape: Expression) { + self.current_frame().add_expr(shape); + } + + pub fn add_result(&mut self, result: Box) { + self.current_frame().add_result(result); + } + + pub fn success(&mut self) { + trace!(target: "parser::expand_syntax", "success {:#?}", self); + + let current = self.pop_frame(); + self.current_frame() + .children + .push(FrameChild::Frame(current)); + } + + pub fn failed(&mut self, error: &ParseError) { + let mut current = self.pop_frame(); + current.error = Some(error.clone()); + self.current_frame() + .children + .push(FrameChild::Frame(current)); + } + + fn debug(&self) { + trace!(target: "nu::parser::expand", + "frames = {:?}", + self.frame_stack + .iter() + .map(|f| f.description) + .collect::>() + ); + + trace!(target: "nu::parser::expand", "{:#?}", self); + } +} + +#[derive(Debug, Clone)] +pub struct PrintTracer { + root: TreeFrame, + source: Text, +} + +impl TreeItem for PrintTracer { + type Child = TreeChild; + + fn write_self(&self, f: &mut W, style: &Style) -> io::Result<()> { + write!(f, "{}", style.paint("Expansion Trace")) + } + + fn children(&self) -> Cow<[Self::Child]> { + Cow::Borrowed(&self.root.children) + } +} diff --git a/src/parser/parse/call_node.rs b/src/parser/parse/call_node.rs index eb715cd376..57d7fa9ad4 100644 --- a/src/parser/parse/call_node.rs +++ b/src/parser/parse/call_node.rs @@ -1,7 +1,7 @@ use crate::parser::TokenNode; -use crate::traits::ToDebug; +use crate::traits::{DebugFormatter, FormatDebug, ToDebug}; use getset::Getters; -use std::fmt; +use std::fmt::{self, Write}; #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters)] pub struct CallNode { @@ -27,8 +27,8 @@ impl CallNode { } } -impl ToDebug for CallNode { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { +impl FormatDebug for CallNode { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { write!(f, "{}", self.head.debug(source))?; if let Some(children) = &self.children { diff --git a/src/parser/parse/operator.rs b/src/parser/parse/operator.rs index 7b5a5c77d8..47c63075af 100644 --- a/src/parser/parse/operator.rs +++ b/src/parser/parse/operator.rs @@ -14,8 +14,8 @@ pub enum Operator { Dot, } -impl ToDebug for Operator { - fn fmt_debug(&self, f: &mut fmt::Formatter, _source: &str) -> fmt::Result { +impl FormatDebug for Operator { + fn fmt_debug(&self, f: &mut DebugFormatter, _source: &str) -> fmt::Result { write!(f, "{}", self.as_str()) } } diff --git a/src/parser/parse/pipeline.rs b/src/parser/parse/pipeline.rs index 4a8c72119c..c14f3745df 100644 --- a/src/parser/parse/pipeline.rs +++ b/src/parser/parse/pipeline.rs @@ -1,24 +1,22 @@ use crate::parser::TokenNode; -use crate::traits::ToDebug; -use crate::{Span, Spanned}; +use crate::{DebugFormatter, FormatDebug, Span, Spanned, ToDebug}; use derive_new::new; use getset::Getters; -use std::fmt; +use itertools::Itertools; +use std::fmt::{self, Write}; #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Getters, new)] pub struct Pipeline { #[get = "pub"] pub(crate) parts: Vec>, - // pub(crate) post_ws: Option, } -impl ToDebug for Pipeline { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { - for part in self.parts.iter() { - write!(f, "{}", part.debug(source))?; - } - - Ok(()) +impl FormatDebug for Pipeline { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + f.say_str( + "pipeline", + self.parts.iter().map(|p| p.debug(source)).join(" "), + ) } } @@ -29,8 +27,8 @@ pub struct PipelineElement { pub tokens: Spanned>, } -impl ToDebug for PipelineElement { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { +impl FormatDebug for PipelineElement { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { if let Some(pipe) = self.pipe { write!(f, "{}", pipe.slice(source))?; } diff --git a/src/parser/parse/token_tree.rs b/src/parser/parse/token_tree.rs index 0d00dcff0d..75228133da 100644 --- a/src/parser/parse/token_tree.rs +++ b/src/parser/parse/token_tree.rs @@ -1,4 +1,4 @@ -use crate::errors::ShellError; +use crate::errors::{ParseError, ShellError}; use crate::parser::parse::{call_node::*, flag::*, operator::*, pipeline::*, tokens::*}; use crate::prelude::*; use crate::traits::ToDebug; @@ -21,8 +21,14 @@ pub enum TokenNode { Error(Spanned), } -impl ToDebug for TokenNode { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { +impl HasSpan for TokenNode { + fn span(&self) -> Span { + self.get_span() + } +} + +impl FormatDebug for TokenNode { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { write!(f, "{:?}", self.old_debug(&Text::from(source))) } } @@ -84,12 +90,12 @@ impl fmt::Debug for DebugTokenNode<'_> { impl From<&TokenNode> for Span { fn from(token: &TokenNode) -> Span { - token.span() + token.get_span() } } impl TokenNode { - pub fn span(&self) -> Span { + pub fn get_span(&self) -> Span { match self { TokenNode::Token(t) => t.span, TokenNode::Nodes(t) => t.span, @@ -231,10 +237,10 @@ impl TokenNode { } } - pub fn as_pipeline(&self) -> Result { + pub fn as_pipeline(&self) -> Result { match self { TokenNode::Pipeline(Spanned { item, .. }) => Ok(item.clone()), - _ => Err(ShellError::type_error("pipeline", self.tagged_type_name())), + other => Err(ParseError::mismatch("pipeline", other.tagged_type_name())), } } @@ -321,9 +327,9 @@ impl TokenNode { } } - pub fn expect_list(&self) -> &[TokenNode] { + pub fn expect_list(&self) -> Spanned<&[TokenNode]> { match self { - TokenNode::Nodes(token_nodes) => &token_nodes[..], + TokenNode::Nodes(token_nodes) => token_nodes[..].spanned(token_nodes.span), other => panic!("Expected list, found {:?}", other), } } diff --git a/src/parser/parse/tokens.rs b/src/parser/parse/tokens.rs index 29061ed7a2..43ce7f405b 100644 --- a/src/parser/parse/tokens.rs +++ b/src/parser/parse/tokens.rs @@ -23,8 +23,8 @@ impl RawToken { RawToken::Operator(..) => "operator", RawToken::String(_) => "string", RawToken::Variable(_) => "variable", - RawToken::ExternalCommand(_) => "external command", - RawToken::ExternalWord => "external word", + RawToken::ExternalCommand(_) => "syntax error", + RawToken::ExternalWord => "syntax error", RawToken::GlobPattern => "glob pattern", RawToken::Bare => "string", } @@ -37,6 +37,15 @@ pub enum RawNumber { Decimal(Span), } +impl FormatDebug for RawNumber { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + match self { + RawNumber::Int(span) => f.say_str("int", span.slice(source)), + RawNumber::Decimal(span) => f.say_str("decimal", span.slice(source)), + } + } +} + impl RawNumber { pub fn int(span: impl Into) -> Spanned { let span = span.into(); diff --git a/src/parser/parse/unit.rs b/src/parser/parse/unit.rs index e89986f8ac..e2075636a3 100644 --- a/src/parser/parse/unit.rs +++ b/src/parser/parse/unit.rs @@ -1,6 +1,7 @@ use crate::data::base::Value; use crate::prelude::*; use serde::{Deserialize, Serialize}; +use std::fmt; use std::str::FromStr; #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)] @@ -13,6 +14,12 @@ pub enum Unit { PB, } +impl FormatDebug for Spanned { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + write!(f, "{}", self.span.slice(source)) + } +} + impl Unit { pub fn as_str(&self) -> &str { match *self { diff --git a/src/parser/parse_command.rs b/src/parser/parse_command.rs index d531da62ac..32f05fd1ca 100644 --- a/src/parser/parse_command.rs +++ b/src/parser/parse_command.rs @@ -1,4 +1,4 @@ -use crate::errors::{ArgumentError, ShellError}; +use crate::errors::{ArgumentError, ParseError}; use crate::parser::hir::syntax_shape::{ color_fallible_syntax, color_syntax, expand_expr, flat_shape::FlatShape, spaced, BackoffColoringMode, ColorSyntax, MaybeSpaceShape, @@ -18,9 +18,9 @@ pub fn parse_command_tail( context: &ExpandContext, tail: &mut TokensIterator, command_span: Span, -) -> Result>, Option)>, ShellError> { +) -> Result>, Option)>, ParseError> { let mut named = NamedArguments::new(); - trace_remaining("nodes", tail.clone(), context.source()); + trace_remaining("nodes", &tail, context.source()); for (name, kind) in &config.named { trace!(target: "nu::parse", "looking for {} : {:?}", name, kind); @@ -38,7 +38,7 @@ pub fn parse_command_tail( tail.move_to(pos); if tail.at_end() { - return Err(ShellError::argument_error( + return Err(ParseError::argument_error( config.name.clone(), ArgumentError::MissingValueForName(name.to_string()), flag.span, @@ -59,7 +59,7 @@ pub fn parse_command_tail( tail.move_to(pos); if tail.at_end() { - return Err(ShellError::argument_error( + return Err(ParseError::argument_error( config.name.clone(), ArgumentError::MissingValueForName(name.to_string()), flag.span, @@ -85,7 +85,7 @@ pub fn parse_command_tail( }; } - trace_remaining("after named", tail.clone(), context.source()); + trace_remaining("after named", &tail, context.source()); let mut positional = vec![]; @@ -95,7 +95,7 @@ pub fn parse_command_tail( match &arg.0 { PositionalType::Mandatory(..) => { if tail.at_end_possible_ws() { - return Err(ShellError::argument_error( + return Err(ParseError::argument_error( config.name.clone(), ArgumentError::MissingMandatoryPositional(arg.0.name().to_string()), Tag { @@ -118,7 +118,7 @@ pub fn parse_command_tail( positional.push(result); } - trace_remaining("after positional", tail.clone(), context.source()); + trace_remaining("after positional", &tail, context.source()); if let Some((syntax_type, _)) = config.rest_positional { let mut out = vec![]; @@ -136,7 +136,7 @@ pub fn parse_command_tail( positional.extend(out); } - trace_remaining("after rest", tail.clone(), context.source()); + trace_remaining("after rest", &tail, context.source()); trace!(target: "nu::parse", "Constructed positional={:?} named={:?}", positional, named); @@ -202,8 +202,6 @@ impl ColorSyntax for CommandTailShape { shapes: &mut Vec>, ) -> Self::Info { let mut args = ColoringArgs::new(token_nodes.len()); - trace_remaining("nodes", token_nodes.clone(), context.source()); - for (name, kind) in &signature.named { trace!(target: "nu::color_syntax", "looking for {} : {:?}", name, kind); @@ -295,8 +293,6 @@ impl ColorSyntax for CommandTailShape { }; } - trace_remaining("after named", token_nodes.clone(), context.source()); - for arg in &signature.positional { trace!("Processing positional {:?}", arg); @@ -341,8 +337,6 @@ impl ColorSyntax for CommandTailShape { } } - trace_remaining("after positional", token_nodes.clone(), context.source()); - if let Some((syntax_type, _)) = signature.rest_positional { loop { if token_nodes.at_end_possible_ws() { @@ -402,7 +396,7 @@ impl ColorSyntax for CommandTailShape { context: &ExpandContext, ) -> Self::Info { let mut args = ColoringArgs::new(token_nodes.len()); - trace_remaining("nodes", token_nodes.clone(), context.source()); + trace_remaining("nodes", &token_nodes, context.source()); for (name, kind) in &signature.named { trace!(target: "nu::color_syntax", "looking for {} : {:?}", name, kind); @@ -497,7 +491,7 @@ impl ColorSyntax for CommandTailShape { }; } - trace_remaining("after named", token_nodes.clone(), context.source()); + trace_remaining("after named", &token_nodes, context.source()); for arg in &signature.positional { trace!("Processing positional {:?}", arg); @@ -537,7 +531,7 @@ impl ColorSyntax for CommandTailShape { } } - trace_remaining("after positional", token_nodes.clone(), context.source()); + trace_remaining("after positional", &token_nodes, context.source()); if let Some((syntax_type, _)) = signature.rest_positional { loop { @@ -594,11 +588,11 @@ fn extract_mandatory( tokens: &mut hir::TokensIterator<'_>, source: &Text, span: Span, -) -> Result<(usize, Spanned), ShellError> { +) -> Result<(usize, Spanned), ParseError> { let flag = tokens.extract(|t| t.as_flag(name, source)); match flag { - None => Err(ShellError::argument_error( + None => Err(ParseError::argument_error( config.name.clone(), ArgumentError::MissingMandatoryFlag(name.to_string()), span, @@ -615,7 +609,7 @@ fn extract_optional( name: &str, tokens: &mut hir::TokensIterator<'_>, source: &Text, -) -> Result<(Option<(usize, Spanned)>), ShellError> { +) -> Result<(Option<(usize, Spanned)>), ParseError> { let flag = tokens.extract(|t| t.as_flag(name, source)); match flag { @@ -627,7 +621,7 @@ fn extract_optional( } } -pub fn trace_remaining(desc: &'static str, tail: hir::TokensIterator<'_>, source: &Text) { +pub fn trace_remaining(desc: &'static str, tail: &hir::TokensIterator<'_>, source: &Text) { trace!( target: "nu::parse", "{} = {:?}", diff --git a/src/prelude.rs b/src/prelude.rs index 6ff62c3240..1a87da2866 100644 --- a/src/prelude.rs +++ b/src/prelude.rs @@ -67,13 +67,14 @@ pub(crate) use crate::context::CommandRegistry; pub(crate) use crate::context::{AnchorLocation, Context}; pub(crate) use crate::data::base as value; pub(crate) use crate::data::meta::{ - tag_for_tagged_list, Span, Spanned, SpannedItem, Tag, Tagged, TaggedItem, + tag_for_tagged_list, HasFallibleSpan, HasSpan, Span, Spanned, SpannedItem, Tag, Tagged, + TaggedItem, }; pub(crate) use crate::data::types::ExtractType; pub(crate) use crate::data::{Primitive, Value}; pub(crate) use crate::env::host::handle_unexpected; pub(crate) use crate::env::Host; -pub(crate) use crate::errors::{CoerceInto, ShellError}; +pub(crate) use crate::errors::{CoerceInto, ParseError, ShellError}; pub(crate) use crate::parser::hir::SyntaxShape; pub(crate) use crate::parser::parse::parser::Number; pub(crate) use crate::parser::registry::Signature; @@ -82,7 +83,7 @@ pub(crate) use crate::shell::help_shell::HelpShell; pub(crate) use crate::shell::shell_manager::ShellManager; pub(crate) use crate::shell::value_shell::ValueShell; pub(crate) use crate::stream::{InputStream, OutputStream}; -pub(crate) use crate::traits::{HasTag, ToDebug}; +pub(crate) use crate::traits::{DebugFormatter, FormatDebug, HasTag, ToDebug}; pub(crate) use crate::Text; pub(crate) use async_stream::stream as async_stream; pub(crate) use bigdecimal::BigDecimal; @@ -93,9 +94,12 @@ pub(crate) use num_traits::cast::{FromPrimitive, ToPrimitive}; pub(crate) use num_traits::identities::Zero; pub(crate) use serde::Deserialize; pub(crate) use std::collections::VecDeque; +pub(crate) use std::fmt::Write; pub(crate) use std::future::Future; pub(crate) use std::sync::{Arc, Mutex}; +pub(crate) use itertools::Itertools; + pub trait FromInputStream { fn from_input_stream(self) -> OutputStream; } diff --git a/src/shell/helper.rs b/src/shell/helper.rs index 8f38a10002..5b46dbd4b9 100644 --- a/src/shell/helper.rs +++ b/src/shell/helper.rs @@ -3,7 +3,7 @@ use crate::parser::hir::syntax_shape::{color_fallible_syntax, FlatShape, Pipelin use crate::parser::hir::TokensIterator; use crate::parser::nom_input; use crate::parser::parse::token_tree::TokenNode; -use crate::{Span, Spanned, SpannedItem, Tag, Tagged, Text}; +use crate::{HasSpan, Spanned, SpannedItem, Tag, Tagged, Text}; use ansi_term::Color; use log::{log_enabled, trace}; use rustyline::completion::Completer; @@ -65,9 +65,7 @@ impl Highlighter for Helper { let mut tokens = TokensIterator::all(&tokens[..], v.span()); let text = Text::from(line); - let expand_context = self - .context - .expand_context(&text, Span::new(0, line.len() - 1)); + let expand_context = self.context.expand_context(&text); #[cfg(not(coloring_in_tokens))] let shapes = { @@ -86,16 +84,17 @@ impl Highlighter for Helper { let shapes = { // We just constructed a token list that only contains a pipeline, so it can't fail color_fallible_syntax(&PipelineShape, &mut tokens, &expand_context).unwrap(); - tokens.with_tracer(|_, tracer| tracer.finish()); + tokens.with_color_tracer(|_, tracer| tracer.finish()); tokens.state().shapes() }; - trace!(target: "nu::color_syntax", "{:#?}", tokens.tracer()); + trace!(target: "nu::color_syntax", "{:#?}", tokens.color_tracer()); - if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { + if log_enabled!(target: "nu::color_syntax", log::Level::Debug) { println!(""); - ptree::print_tree(&tokens.tracer().clone().print(Text::from(line))).unwrap(); + ptree::print_tree(&tokens.color_tracer().clone().print(Text::from(line))) + .unwrap(); println!(""); } diff --git a/src/traits.rs b/src/traits.rs index 677d019ad8..a33453ab23 100644 --- a/src/traits.rs +++ b/src/traits.rs @@ -1,14 +1,28 @@ use crate::prelude::*; -use std::fmt; +use derive_new::new; +use std::fmt::{self, Write}; -pub struct Debuggable<'a, T: ToDebug> { +pub struct Debuggable<'a, T: FormatDebug> { inner: &'a T, source: &'a str, } +impl FormatDebug for str { + fn fmt_debug(&self, f: &mut DebugFormatter, _source: &str) -> fmt::Result { + write!(f, "{}", self) + } +} + impl fmt::Display for Debuggable<'_, T> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.inner.fmt_debug(f, self.source) + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.inner.fmt_debug( + &mut DebugFormatter::new( + f, + ansi_term::Color::White.bold(), + ansi_term::Color::Black.bold(), + ), + self.source, + ) } } @@ -16,13 +30,109 @@ pub trait HasTag { fn tag(&self) -> Tag; } -pub trait ToDebug: Sized { +#[derive(new)] +pub struct DebugFormatter<'me, 'args> { + formatter: &'me mut std::fmt::Formatter<'args>, + style: ansi_term::Style, + default_style: ansi_term::Style, +} + +impl<'me, 'args> DebugFormatter<'me, 'args> { + pub fn say<'debuggable>( + &mut self, + kind: &str, + debuggable: Debuggable<'debuggable, impl FormatDebug>, + ) -> std::fmt::Result { + write!(self, "{}", self.style.paint(kind))?; + write!(self, "{}", self.default_style.paint(" "))?; + write!( + self, + "{}", + self.default_style.paint(format!("{}", debuggable)) + ) + } + + pub fn say_str<'debuggable>( + &mut self, + kind: &str, + string: impl AsRef, + ) -> std::fmt::Result { + write!(self, "{}", self.style.paint(kind))?; + write!(self, "{}", self.default_style.paint(" "))?; + write!(self, "{}", self.default_style.paint(string.as_ref())) + } + + pub fn say_block( + &mut self, + kind: &str, + block: impl FnOnce(&mut Self) -> std::fmt::Result, + ) -> std::fmt::Result { + write!(self, "{}", self.style.paint(kind))?; + write!(self, "{}", self.default_style.paint(" "))?; + block(self) + } + + pub fn say_dict<'debuggable>( + &mut self, + kind: &str, + dict: indexmap::IndexMap<&str, String>, + ) -> std::fmt::Result { + write!(self, "{}", self.style.paint(kind))?; + write!(self, "{}", self.default_style.paint(" "))?; + + let last = dict.len() - 1; + + for (i, (key, value)) in dict.into_iter().enumerate() { + write!(self, "{}", self.default_style.paint(key))?; + write!(self, "{}", self.default_style.paint("=["))?; + write!(self, "{}", self.style.paint(value))?; + write!(self, "{}", self.default_style.paint("]"))?; + + if i != last { + write!(self, "{}", self.default_style.paint(" "))?; + } + } + + Ok(()) + } +} + +impl<'a, 'b> std::fmt::Write for DebugFormatter<'a, 'b> { + fn write_str(&mut self, s: &str) -> std::fmt::Result { + self.formatter.write_str(s) + } + + fn write_char(&mut self, c: char) -> std::fmt::Result { + self.formatter.write_char(c) + } + + fn write_fmt(self: &mut Self, args: std::fmt::Arguments<'_>) -> std::fmt::Result { + self.formatter.write_fmt(args) + } +} + +pub trait FormatDebug: std::fmt::Debug { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result; +} + +pub trait ToDebug: Sized + FormatDebug { + fn debug<'a>(&'a self, source: &'a str) -> Debuggable<'a, Self>; +} + +impl FormatDebug for Box { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + (&**self).fmt_debug(f, source) + } +} + +impl ToDebug for T +where + T: FormatDebug + Sized, +{ fn debug<'a>(&'a self, source: &'a str) -> Debuggable<'a, Self> { Debuggable { inner: self, source, } } - - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result; } From a3679f0f4e91efc7959f781c514bcfcbaf118281 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Sat, 2 Nov 2019 08:15:53 +1300 Subject: [PATCH 117/184] Make echo more flexible with data types --- src/commands/echo.rs | 39 ++++++++++++++++++--------------------- 1 file changed, 18 insertions(+), 21 deletions(-) diff --git a/src/commands/echo.rs b/src/commands/echo.rs index db4993d017..6e59d51f44 100644 --- a/src/commands/echo.rs +++ b/src/commands/echo.rs @@ -35,37 +35,34 @@ fn run( _registry: &CommandRegistry, _raw_args: &RawCommandArgs, ) -> Result { - let name = call_info.name_tag.clone(); - - let mut output = String::new(); - - let mut first = true; + let mut output = vec![]; if let Some(ref positional) = call_info.args.positional { for i in positional { match i.as_string() { Ok(s) => { - if !first { - output.push_str(" "); - } else { - first = false; + output.push(Ok(ReturnSuccess::Value( + Value::string(s).tagged(i.tag.clone()), + ))); + } + _ => match i { + Tagged { + item: Value::Table(table), + .. + } => { + for item in table { + output.push(Ok(ReturnSuccess::Value(item.clone()))); + } } - - output.push_str(&s); - } - _ => { - return Err(ShellError::type_error( - "a string-compatible value", - i.tagged_type_name(), - )) - } + _ => { + output.push(Ok(ReturnSuccess::Value(i.clone()))); + } + }, } } } - let stream = VecDeque::from(vec![Ok(ReturnSuccess::Value( - Value::string(output).tagged(name), - ))]); + let stream = VecDeque::from(output); Ok(stream.to_output_stream()) } From 0ea35275447007949c4810df654a3b8194c9855c Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Sat, 2 Nov 2019 09:21:29 +1300 Subject: [PATCH 118/184] Update issue templates --- .github/ISSUE_TEMPLATE/bug_report.md | 30 +++++++++++++++++++++++ .github/ISSUE_TEMPLATE/feature_request.md | 20 +++++++++++++++ 2 files changed, 50 insertions(+) create mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/feature_request.md diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000000..84ab81641e --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,30 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: '' +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Steps to reproduce the behavior: +1. +2. +3. + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Screenshots** +If applicable, add screenshots to help explain your problem. + +**Configuration (please complete the following information):** + - OS: [e.g. Windows] + - Version [e.g. 0.4.0] + - Optional features (if any) + +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000000..bbcbbe7d61 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: '' +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. From 763bbe1c01739564e687e5946361e2a70ab626a4 Mon Sep 17 00:00:00 2001 From: Flare576 Date: Fri, 1 Nov 2019 17:25:08 -0500 Subject: [PATCH 119/184] Updated Doc, error on bad input --- docs/commands/str.md | 14 ++++++------ src/plugins/str.rs | 54 +++++++++++++++++++------------------------- 2 files changed, 30 insertions(+), 38 deletions(-) diff --git a/docs/commands/str.md b/docs/commands/str.md index b9ddc8c2e4..d1ed3edb3d 100644 --- a/docs/commands/str.md +++ b/docs/commands/str.md @@ -26,13 +26,6 @@ Consumes either a single value or a table and converts the provided data to a st 0 │ X │ filesystem │ /home/tux/stuff/expr/stuff 1 │ │ filesystem │ / ━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ -> shells | str # --to-int -━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ - # │ │ name │ path -───┼───┼────────────┼──────────────────────────────── - 0 │ X │ filesystem │ /home/TUX/stuff/expr/stuff - 1 │ │ filesystem │ / -━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ > shells | str # --substring "21, 99" ━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ # │ │ name │ path @@ -47,4 +40,11 @@ Consumes either a single value or a table and converts the provided data to a st 0 │ X │ filesystem │ TUX/stuff/expr/stuff 1 │ │ filesystem │ ━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + +> echo "1, 2, 3" | split-row "," | str --to-int | sum +━━━━━━━━━ + +───────── + 6 +━━━━━━━━━ ``` diff --git a/src/plugins/str.rs b/src/plugins/str.rs index a0950b2ec0..0c28039b2d 100644 --- a/src/plugins/str.rs +++ b/src/plugins/str.rs @@ -9,7 +9,7 @@ enum Action { Downcase, Upcase, ToInteger, - Substring(String), + Substring(usize, usize), } pub type ColumnPath = Vec>; @@ -35,23 +35,14 @@ impl Str { let applied = match self.action.as_ref() { Some(Action::Downcase) => Value::string(input.to_ascii_lowercase()), Some(Action::Upcase) => Value::string(input.to_ascii_uppercase()), - Some(Action::Substring(s)) => { - // Index operator isn't perfect: https://users.rust-lang.org/t/how-to-get-a-substring-of-a-string/1351 - let no_spaces: String = s.chars().filter(|c| !c.is_whitespace()).collect(); - let v: Vec<&str> = no_spaces.split(',').collect(); - let start: usize = match v[0] { - "" => 0, - _ => v[0].parse().unwrap(), - }; - let end: usize = match v[1] { - "" => input.len(), - _ => cmp::min(v[1].parse().unwrap(), input.len()), - }; + Some(Action::Substring(s, e)) => { + let end: usize = cmp::min(*e, input.len()); + let start: usize = *s; if start > input.len() - 1 { Value::string("") - } else if start > end { - Value::string(input) } else { + // Index operator isn't perfect: + // https://users.rust-lang.org/t/how-to-get-a-substring-of-a-string/1351 Value::string(&input[start..end]) } } @@ -103,9 +94,20 @@ impl Str { } } - fn for_substring(&mut self, start_end: String) { - if self.permit() { - self.action = Some(Action::Substring(start_end)); + fn for_substring(&mut self, s: String) { + let v: Vec<&str> = s.split(',').collect(); + let start: usize = match v[0] { + "" => 0, + _ => v[0].trim().parse().unwrap(), + }; + let end: usize = match v[1] { + "" => usize::max_value().clone(), + _ => v[1].trim().parse().unwrap(), + }; + if start > end { + self.log_error("End must be greater than or equal to Start"); + } else if self.permit() { + self.action = Some(Action::Substring(start, end)); } else { self.log_error("can only apply one"); } @@ -688,7 +690,7 @@ mod tests { } #[test] - fn str_plugin_applies_substring_returns_string_if_start_exceeds_end() { + fn str_plugin_applies_substring_returns_error_if_start_exceeds_end() { let mut plugin = Str::new(); assert!(plugin @@ -697,17 +699,7 @@ mod tests { .with_named_parameter("substring", "3,1") .create() ) - .is_ok()); - - let subject = unstructured_sample_record("0123456789"); - let output = plugin.filter(subject).unwrap(); - - match output[0].as_ref().unwrap() { - ReturnSuccess::Value(Tagged { - item: Value::Primitive(Primitive::String(s)), - .. - }) => assert_eq!(*s, String::from("0123456789")), - _ => {} - } + .is_err()); + assert_eq!(plugin.error, Some("End must be greater than or equal to Start".to_string())); } } From 7801c03e2d6c71302b0fa8c8bf0fd3c3dbda8ddf Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Sat, 2 Nov 2019 13:36:21 +1300 Subject: [PATCH 120/184] plugin_nu_path --- src/cli.rs | 74 +++++++++++++++++++++++++++++------------------------- 1 file changed, 40 insertions(+), 34 deletions(-) diff --git a/src/cli.rs b/src/cli.rs index f050df41e7..3db8e12ddc 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -163,6 +163,8 @@ fn load_plugins(context: &mut Context) -> Result<(), ShellError> { require_literal_leading_dot: false, }; + set_path_from_config(); + for path in search_paths() { let mut pattern = path.to_path_buf(); @@ -472,6 +474,43 @@ fn chomp_newline(s: &str) -> &str { } } +fn set_path_from_config() { + let config = crate::data::config::read(Tag::unknown(), &None).unwrap(); + if config.contains_key("path") { + // Override the path with what they give us from config + let value = config.get("path"); + + match value { + Some(value) => match value { + Tagged { + item: Value::Table(table), + .. + } => { + let mut paths = vec![]; + for val in table { + let path_str = val.as_string(); + match path_str { + Err(_) => {} + Ok(path_str) => { + paths.push(PathBuf::from(path_str)); + } + } + } + let path_os_string = std::env::join_paths(&paths); + match path_os_string { + Ok(path_os_string) => { + std::env::set_var("PATH", path_os_string); + } + Err(_) => {} + } + } + _ => {} + }, + None => {} + } + } +} + enum LineResult { Success(String), Error(String, ShellError), @@ -526,40 +565,7 @@ async fn process_line(readline: Result, ctx: &mut Context // Check the config to see if we need to update the path // TODO: make sure config is cached so we don't path this load every call - let config = crate::data::config::read(Tag::unknown(), &None).unwrap(); - if config.contains_key("path") { - // Override the path with what they give us from config - let value = config.get("path"); - - match value { - Some(value) => match value { - Tagged { - item: Value::Table(table), - .. - } => { - let mut paths = vec![]; - for val in table { - let path_str = val.as_string(); - match path_str { - Err(_) => {} - Ok(path_str) => { - paths.push(PathBuf::from(path_str)); - } - } - } - let path_os_string = std::env::join_paths(&paths); - match path_os_string { - Ok(path_os_string) => { - std::env::set_var("PATH", path_os_string); - } - Err(_) => {} - } - } - _ => {} - }, - None => {} - } - } + set_path_from_config(); loop { let item: Option = iter.next(); From e92d4b2ccba700b61560b86ad37d364bb251aa89 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Sat, 2 Nov 2019 14:47:14 +1300 Subject: [PATCH 121/184] Rename add to insert --- Cargo.toml | 4 ++-- src/plugins/{add.rs => insert.rs} | 26 +++++++++++++++----------- tests/tests.rs | 4 ++-- 3 files changed, 19 insertions(+), 15 deletions(-) rename src/plugins/{add.rs => insert.rs} (83%) diff --git a/Cargo.toml b/Cargo.toml index f2ad5073fa..8dbf9eb628 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -130,8 +130,8 @@ name = "nu_plugin_embed" path = "src/plugins/embed.rs" [[bin]] -name = "nu_plugin_add" -path = "src/plugins/add.rs" +name = "nu_plugin_insert" +path = "src/plugins/insert.rs" [[bin]] name = "nu_plugin_edit" diff --git a/src/plugins/add.rs b/src/plugins/insert.rs similarity index 83% rename from src/plugins/add.rs rename to src/plugins/insert.rs index 5bda9d0593..9041abc36b 100644 --- a/src/plugins/add.rs +++ b/src/plugins/insert.rs @@ -6,19 +6,19 @@ use nu::{ pub type ColumnPath = Vec>; -struct Add { +struct Insert { field: Option, value: Option, } -impl Add { - fn new() -> Add { - Add { +impl Insert { + fn new() -> Insert { + Insert { field: None, value: None, } } - fn add(&self, value: Tagged) -> Result, ShellError> { + fn insert(&self, value: Tagged) -> Result, ShellError> { let value_tag = value.tag(); match (value.item, self.value.clone()) { (obj @ Value::Row(_), Some(v)) => match &self.field { @@ -50,11 +50,15 @@ impl Add { } } -impl Plugin for Add { +impl Plugin for Insert { fn config(&mut self) -> Result { - Ok(Signature::build("add") - .desc("Add a new column to the table.") - .required("column", SyntaxShape::ColumnPath, "the column name to add") + Ok(Signature::build("insert") + .desc("Insert a new column to the table.") + .required( + "column", + SyntaxShape::ColumnPath, + "the column name to insert", + ) .required( "value", SyntaxShape::String, @@ -86,10 +90,10 @@ impl Plugin for Add { } fn filter(&mut self, input: Tagged) -> Result, ShellError> { - Ok(vec![ReturnSuccess::value(self.add(input)?)]) + Ok(vec![ReturnSuccess::value(self.insert(input)?)]) } } fn main() { - serve_plugin(&mut Add::new()); + serve_plugin(&mut Insert::new()); } diff --git a/tests/tests.rs b/tests/tests.rs index 14552a41ee..caaeb2ac86 100644 --- a/tests/tests.rs +++ b/tests/tests.rs @@ -42,12 +42,12 @@ fn external_has_correct_quotes() { } #[test] -fn add_plugin() { +fn insert_plugin() { let actual = nu!( cwd: "tests/fixtures/formats", h::pipeline( r#" open cargo_sample.toml - | add dev-dependencies.newdep "1" + | insert dev-dependencies.newdep "1" | get dev-dependencies.newdep | echo $it "# From 136c8acba62aa65ca102cbb3d47428b5124a25a7 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Sat, 2 Nov 2019 14:48:18 +1300 Subject: [PATCH 122/184] Update README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 5e482bc29e..9c328ee807 100644 --- a/README.md +++ b/README.md @@ -248,7 +248,6 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat ## Filters on tables (structured data) | command | description | | ------------- | ------------- | -| add column-or-column-path value | Add a new column to the table | | append row-data | Append a row to the end of the table | | count | Show the total number of rows | | edit column-or-column-path value | Edit an existing column to have a new value | @@ -257,6 +256,7 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat | get column-or-column-path | Open column and get data from the corresponding cells | | group-by column | Creates a new table with the data from the table rows grouped by the column given | | inc (column-or-column-path) | Increment a value or version. Optionally use the column of a table | +| insert column-or-column-path value | Insert a new column to the table | | last amount | Show only the last number of rows | | nth row-number | Return only the selected row | | pick ...columns | Down-select table to only these columns | From 129ee45944e660c21a4963bdedf12c4c0440f121 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Sat, 2 Nov 2019 16:41:58 +1300 Subject: [PATCH 123/184] Add initial support for env vars --- src/cli.rs | 33 ++++++++++++++++++++++++++++++--- src/evaluate/evaluator.rs | 4 +++- 2 files changed, 33 insertions(+), 4 deletions(-) diff --git a/src/cli.rs b/src/cli.rs index 3db8e12ddc..b5a58c2ca2 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -163,7 +163,7 @@ fn load_plugins(context: &mut Context) -> Result<(), ShellError> { require_literal_leading_dot: false, }; - set_path_from_config(); + set_env_from_config(); for path in search_paths() { let mut pattern = path.to_path_buf(); @@ -474,8 +474,35 @@ fn chomp_newline(s: &str) -> &str { } } -fn set_path_from_config() { +fn set_env_from_config() { let config = crate::data::config::read(Tag::unknown(), &None).unwrap(); + + if config.contains_key("env") { + // Clear the existing vars, we're about to replace them + for (key, _value) in std::env::vars() { + std::env::remove_var(key); + } + + let value = config.get("env"); + + match value { + Some(Tagged { + item: Value::Row(r), + .. + }) => { + for (k, v) in &r.entries { + match v.as_string() { + Ok(value_string) => { + std::env::set_var(k, value_string); + } + _ => {} + } + } + } + _ => {} + } + } + if config.contains_key("path") { // Override the path with what they give us from config let value = config.get("path"); @@ -565,7 +592,7 @@ async fn process_line(readline: Result, ctx: &mut Context // Check the config to see if we need to update the path // TODO: make sure config is cached so we don't path this load every call - set_path_from_config(); + set_env_from_config(); loop { let item: Option = iter.next(); diff --git a/src/evaluate/evaluator.rs b/src/evaluate/evaluator.rs index 9313d0fe5c..ceb555c339 100644 --- a/src/evaluate/evaluator.rs +++ b/src/evaluate/evaluator.rs @@ -169,7 +169,9 @@ fn evaluate_reference( x if x == "nu:env" => { let mut dict = TaggedDictBuilder::new(&tag); for v in std::env::vars() { - dict.insert(v.0, Value::string(v.1)); + if v.0 != "PATH" && v.0 != "Path" { + dict.insert(v.0, Value::string(v.1)); + } } Ok(dict.into_tagged_value()) } From 2260b3dda3ab24f36b36e04b3910021868f12ee7 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Sat, 2 Nov 2019 17:25:20 +1300 Subject: [PATCH 124/184] Update str.rs --- src/plugins/str.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/plugins/str.rs b/src/plugins/str.rs index 0c28039b2d..328cbd7c14 100644 --- a/src/plugins/str.rs +++ b/src/plugins/str.rs @@ -700,6 +700,9 @@ mod tests { .create() ) .is_err()); - assert_eq!(plugin.error, Some("End must be greater than or equal to Start".to_string())); + assert_eq!( + plugin.error, + Some("End must be greater than or equal to Start".to_string()) + ); } } From f589d3c795de3c7805bc3d842ba6b57b54ba11b4 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Sun, 3 Nov 2019 07:49:28 +1300 Subject: [PATCH 125/184] Fix 907 and improve substring --- src/commands/get.rs | 15 +++------------ src/plugins/str.rs | 10 +++++++--- 2 files changed, 10 insertions(+), 15 deletions(-) diff --git a/src/commands/get.rs b/src/commands/get.rs index cda637495e..08474ac2c1 100644 --- a/src/commands/get.rs +++ b/src/commands/get.rs @@ -10,7 +10,6 @@ pub struct Get; #[derive(Deserialize)] pub struct GetArgs { member: ColumnPath, - rest: Vec, } impl WholeStreamCommand for Get { @@ -118,13 +117,10 @@ pub fn get_column_path( } pub fn get( - GetArgs { - member, - rest: fields, - }: GetArgs, + GetArgs { member }: GetArgs, RunnableContext { input, .. }: RunnableContext, ) -> Result { - trace!("get {:?} {:?}", member, fields); + trace!("get {:?}", member); let stream = input .values @@ -133,12 +129,7 @@ pub fn get( let member = vec![member.clone()]; - let column_paths = vec![&member, &fields] - .into_iter() - .flatten() - .collect::>(); - - for path in column_paths { + for path in member { let res = get_column_path(&path, &item); match res { diff --git a/src/plugins/str.rs b/src/plugins/str.rs index cab9c6a96a..7fb694d3a5 100644 --- a/src/plugins/str.rs +++ b/src/plugins/str.rs @@ -41,9 +41,13 @@ impl Str { if start > input.len() - 1 { Value::string("") } else { - // Index operator isn't perfect: - // https://users.rust-lang.org/t/how-to-get-a-substring-of-a-string/1351 - Value::string(&input[start..end]) + Value::string( + &input + .chars() + .skip(start) + .take(end - start) + .collect::(), + ) } } Some(Action::ToInteger) => match input.trim() { From cd30fac050220e84bfb36e61decb4f509ffa8472 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Sun, 3 Nov 2019 08:57:28 +1300 Subject: [PATCH 126/184] Approach fix differently --- src/commands/get.rs | 15 ++++++-- src/format/table.rs | 87 ++++++++++++++++++++++++++++++++++----------- 2 files changed, 78 insertions(+), 24 deletions(-) diff --git a/src/commands/get.rs b/src/commands/get.rs index 08474ac2c1..cda637495e 100644 --- a/src/commands/get.rs +++ b/src/commands/get.rs @@ -10,6 +10,7 @@ pub struct Get; #[derive(Deserialize)] pub struct GetArgs { member: ColumnPath, + rest: Vec, } impl WholeStreamCommand for Get { @@ -117,10 +118,13 @@ pub fn get_column_path( } pub fn get( - GetArgs { member }: GetArgs, + GetArgs { + member, + rest: fields, + }: GetArgs, RunnableContext { input, .. }: RunnableContext, ) -> Result { - trace!("get {:?}", member); + trace!("get {:?} {:?}", member, fields); let stream = input .values @@ -129,7 +133,12 @@ pub fn get( let member = vec![member.clone()]; - for path in member { + let column_paths = vec![&member, &fields] + .into_iter() + .flatten() + .collect::>(); + + for path in column_paths { let res = get_column_path(&path, &item); match res { diff --git a/src/format/table.rs b/src/format/table.rs index a59e1adafb..3ed5937b4b 100644 --- a/src/format/table.rs +++ b/src/format/table.rs @@ -23,11 +23,20 @@ enum TableMode { impl TableView { fn merge_descriptors(values: &[Tagged]) -> Vec { - let mut ret = vec![]; + let mut ret: Vec = vec![]; + let value_column = "".to_string(); for value in values { - for desc in value.data_descriptors() { - if !ret.contains(&desc) { - ret.push(desc); + let descs = value.data_descriptors(); + + if descs.len() == 0 { + if !ret.contains(&value_column) { + ret.push("".to_string()); + } + } else { + for desc in value.data_descriptors() { + if !ret.contains(&desc) { + ret.push(desc); + } } } } @@ -48,23 +57,59 @@ impl TableView { let mut entries = vec![]; for (idx, value) in values.iter().enumerate() { - let mut row: Vec<(String, &'static str)> = match value { - Tagged { - item: Value::Row(..), - .. - } => headers - .iter() - .enumerate() - .map(|(i, d)| { - let data = value.get_data(d); - return ( - data.borrow().format_leaf(Some(&headers[i])), - data.borrow().style_leaf(), - ); - }) - .collect(), - x => vec![(x.format_leaf(None), x.style_leaf())], - }; + // let mut row: Vec<(String, &'static str)> = match value { + // Tagged { + // item: Value::Row(..), + // .. + // } => headers + // .iter() + // .enumerate() + // .map(|(i, d)| { + // let data = value.get_data(d); + // return ( + // data.borrow().format_leaf(Some(&headers[i])), + // data.borrow().style_leaf(), + // ); + // }) + // .collect(), + // x => vec![(x.format_leaf(None), x.style_leaf())], + // }; + + let mut row: Vec<(String, &'static str)> = headers + .iter() + .enumerate() + .map(|(i, d)| { + if d == "" { + match value { + Tagged { + item: Value::Row(..), + .. + } => ( + Value::nothing().format_leaf(None), + Value::nothing().style_leaf(), + ), + _ => (value.format_leaf(None), value.style_leaf()), + } + } else { + match value { + Tagged { + item: Value::Row(..), + .. + } => { + let data = value.get_data(d); + ( + data.borrow().format_leaf(Some(&headers[i])), + data.borrow().style_leaf(), + ) + } + _ => ( + Value::nothing().format_leaf(None), + Value::nothing().style_leaf(), + ), + } + } + }) + .collect(); if values.len() > 1 { // Indices are black, bold, right-aligned: From 243df639782bec19353e628efc3514a3fe9e3b8a Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Sun, 3 Nov 2019 12:22:30 +1300 Subject: [PATCH 127/184] Move config to async_stream --- src/commands/config.rs | 139 ++++++++++++++++++++--------------------- 1 file changed, 68 insertions(+), 71 deletions(-) diff --git a/src/commands/config.rs b/src/commands/config.rs index a85920e455..3f2937cb17 100644 --- a/src/commands/config.rs +++ b/src/commands/config.rs @@ -4,7 +4,6 @@ use crate::errors::ShellError; use crate::parser::hir::SyntaxShape; use crate::parser::registry::{self}; use crate::prelude::*; -use std::iter::FromIterator; use std::path::PathBuf; pub struct Config; @@ -64,76 +63,74 @@ pub fn config( ) -> Result { let name_span = name.clone(); - let configuration = if let Some(supplied) = load { - Some(supplied.item().clone()) - } else { - None + let stream = async_stream! { + let configuration = if let Some(supplied) = load { + Some(supplied.item().clone()) + } else { + None + }; + + let mut result = crate::data::config::read(name_span, &configuration)?; + + if let Some(v) = get { + let key = v.to_string(); + let value = result + .get(&key) + .ok_or_else(|| ShellError::labeled_error("Missing key in config", "key", v.tag()))?; + + match value { + Tagged { + item: Value::Table(list), + .. + } => { + for l in list { + yield ReturnSuccess::value(l.clone()); + } + } + x => yield ReturnSuccess::value(x.clone()), + } + } + else if let Some((key, value)) = set { + result.insert(key.to_string(), value.clone()); + + config::write(&result, &configuration)?; + + yield ReturnSuccess::value(Value::Row(result.into()).tagged(value.tag())); + } + else if let Tagged { item: true, tag } = clear { + result.clear(); + + config::write(&result, &configuration)?; + + yield ReturnSuccess::value(Value::Row(result.into()).tagged(tag)); + + return; + } + else if let Tagged { item: true, tag } = path { + let path = config::default_path_for(&configuration)?; + + yield ReturnSuccess::value(Value::Primitive(Primitive::Path(path)).tagged(tag)); + } + else if let Some(v) = remove { + let key = v.to_string(); + + if result.contains_key(&key) { + result.swap_remove(&key); + config::write(&result, &configuration).unwrap(); + } else { + yield Err(ShellError::labeled_error( + "Key does not exist in config", + "key", + v.tag(), + )); + } + + yield ReturnSuccess::value(Value::Row(result.into()).tagged(v.tag())); + } + else { + yield ReturnSuccess::value(Value::Row(result.into()).tagged(name)); + } }; - let mut result = crate::data::config::read(name_span, &configuration)?; - - if let Some(v) = get { - let key = v.to_string(); - let value = result - .get(&key) - .ok_or_else(|| ShellError::labeled_error("Missing key in config", "key", v.tag()))?; - - let mut results = VecDeque::new(); - - match value { - Tagged { - item: Value::Table(list), - .. - } => { - for l in list { - results.push_back(ReturnSuccess::value(l.clone())); - } - } - x => results.push_back(ReturnSuccess::value(x.clone())), - } - - return Ok(results.to_output_stream()); - } - - if let Some((key, value)) = set { - result.insert(key.to_string(), value.clone()); - - config::write(&result, &configuration)?; - - return Ok(stream![Value::Row(result.into()).tagged(value.tag())].from_input_stream()); - } - - if let Tagged { item: true, tag } = clear { - result.clear(); - - config::write(&result, &configuration)?; - - return Ok(stream![Value::Row(result.into()).tagged(tag)].from_input_stream()); - } - - if let Tagged { item: true, tag } = path { - let path = config::default_path_for(&configuration)?; - - return Ok(stream![Value::Primitive(Primitive::Path(path)).tagged(tag)].from_input_stream()); - } - - if let Some(v) = remove { - let key = v.to_string(); - - if result.contains_key(&key) { - result.swap_remove(&key); - config::write(&result, &configuration)?; - } else { - return Err(ShellError::labeled_error( - "Key does not exist in config", - "key", - v.tag(), - )); - } - - let obj = VecDeque::from_iter(vec![Value::Row(result.into()).tagged(v.tag())]); - return Ok(obj.from_input_stream()); - } - - return Ok(vec![Value::Row(result.into()).tagged(name)].into()); + Ok(stream.to_output_stream()) } From c31709494777fb7879507f83170121d8e53c1fef Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Sun, 3 Nov 2019 12:43:15 +1300 Subject: [PATCH 128/184] Add support for config --set_into --- src/commands/config.rs | 41 +++++++++++++++++++++++++++++++++++++++-- 1 file changed, 39 insertions(+), 2 deletions(-) diff --git a/src/commands/config.rs b/src/commands/config.rs index 3f2937cb17..3c41cceae4 100644 --- a/src/commands/config.rs +++ b/src/commands/config.rs @@ -12,6 +12,7 @@ pub struct Config; pub struct ConfigArgs { load: Option>, set: Option<(Tagged, Tagged)>, + set_into: Option>, get: Option>, clear: Tagged, remove: Option>, @@ -30,7 +31,16 @@ impl WholeStreamCommand for Config { SyntaxShape::Path, "load the config from the path give", ) - .named("set", SyntaxShape::Any, "set a value in the config") + .named( + "set", + SyntaxShape::Any, + "set a value in the config, eg) --set [key value]", + ) + .named( + "set_into", + SyntaxShape::Member, + "sets a variable from values in the pipeline", + ) .named("get", SyntaxShape::Any, "get a value from the config") .named("remove", SyntaxShape::Any, "remove a value from the config") .switch("clear", "clear the config") @@ -54,12 +64,13 @@ pub fn config( ConfigArgs { load, set, + set_into, get, clear, remove, path, }: ConfigArgs, - RunnableContext { name, .. }: RunnableContext, + RunnableContext { name, input, .. }: RunnableContext, ) -> Result { let name_span = name.clone(); @@ -97,6 +108,32 @@ pub fn config( yield ReturnSuccess::value(Value::Row(result.into()).tagged(value.tag())); } + else if let Some(v) = set_into { + let rows: Vec> = input.values.collect().await; + let key = v.to_string(); + + if rows.len() == 0 { + yield Err(ShellError::labeled_error("No values given for set_into", "needs value(s) from pipeline", v.tag())); + } else if rows.len() == 1 { + // A single value + let value = &rows[0]; + + result.insert(key.to_string(), value.clone()); + + config::write(&result, &configuration)?; + + yield ReturnSuccess::value(Value::Row(result.into()).tagged(name)); + } else { + // Take in the pipeline as a table + let value = Value::Table(rows).tagged(name.clone()); + + result.insert(key.to_string(), value.clone()); + + config::write(&result, &configuration)?; + + yield ReturnSuccess::value(Value::Row(result.into()).tagged(name)); + } + } else if let Tagged { item: true, tag } = clear { result.clear(); From f57489ed923651478498a667fc3d0ec2f3a16572 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Sat, 2 Nov 2019 21:05:27 -0500 Subject: [PATCH 129/184] get command tests already present and move to their own. --- tests/commands_test.rs | 74 ------------------------------------------ 1 file changed, 74 deletions(-) diff --git a/tests/commands_test.rs b/tests/commands_test.rs index 87e1182b10..1c456b52c7 100644 --- a/tests/commands_test.rs +++ b/tests/commands_test.rs @@ -166,80 +166,6 @@ fn last_gets_last_row_when_no_amount_given() { }) } -#[test] -fn get() { - Playground::setup("get_test_1", |dirs, sandbox| { - sandbox.with_files(vec![FileWithContent( - "sample.toml", - r#" - nu_party_venue = "zion" - "#, - )]); - - let actual = nu!( - cwd: dirs.test(), h::pipeline( - r#" - open sample.toml - | get nu_party_venue - | echo $it - "# - )); - - assert_eq!(actual, "zion"); - }) -} - -#[test] -fn get_more_than_one_member() { - Playground::setup("get_test_2", |dirs, sandbox| { - sandbox.with_files(vec![FileWithContent( - "sample.toml", - r#" - [[fortune_tellers]] - name = "Andrés N. Robalino" - arepas = 1 - broken_builds = 0 - - [[fortune_tellers]] - name = "Jonathan Turner" - arepas = 1 - broken_builds = 1 - - [[fortune_tellers]] - name = "Yehuda Katz" - arepas = 1 - broken_builds = 1 - "#, - )]); - - let actual = nu!( - cwd: dirs.test(), h::pipeline( - r#" - open sample.toml - | get fortune_tellers - | get arepas broken_builds - | sum - | echo $it - "# - )); - - assert_eq!(actual, "5"); - }) -} - -#[test] -fn get_requires_at_least_one_member() { - Playground::setup("first_test_3", |dirs, sandbox| { - sandbox.with_files(vec![EmptyFile("andres.txt")]); - - let actual = nu_error!( - cwd: dirs.test(), "ls | get" - ); - - assert!(actual.contains("requires member parameter")); - }) -} - #[test] fn lines() { let actual = nu!( From e51d9d0935e295b19b1fa481e795ee995d4cb2b6 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Sun, 3 Nov 2019 16:12:36 +1300 Subject: [PATCH 130/184] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 9c328ee807..a138f829e7 100644 --- a/README.md +++ b/README.md @@ -32,7 +32,7 @@ Try it in Gitpod. ## Local -Up-to-date installation instructions can be found in the [installation chapter of the book](https://book.nushell.sh/en/installation). +Up-to-date installation instructions can be found in the [installation chapter of the book](https://book.nushell.sh/en/installation). **Windows users**: please note that Nu works on Windows 10 and does not currently have Windows 7 support. To build Nu, you will need to use the **nightly** version of the compiler. From e43e906f86a734947cf103873cfa433dd99477de Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Sun, 3 Nov 2019 16:13:00 +1300 Subject: [PATCH 131/184] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index a138f829e7..66748a5f8e 100644 --- a/README.md +++ b/README.md @@ -34,7 +34,7 @@ Try it in Gitpod. Up-to-date installation instructions can be found in the [installation chapter of the book](https://book.nushell.sh/en/installation). **Windows users**: please note that Nu works on Windows 10 and does not currently have Windows 7 support. -To build Nu, you will need to use the **nightly** version of the compiler. +To build Nu, you will need to use the **beta** version of the compiler. Required dependencies: From 3b42655b517658f86bfd331eeeb80c3630bd53a8 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Sun, 3 Nov 2019 17:04:59 +1300 Subject: [PATCH 132/184] Make column logic in from-ssv optional --- src/commands/from_ssv.rs | 126 ++++++++++++++++++++++++++------------- 1 file changed, 83 insertions(+), 43 deletions(-) diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs index aaf6018fb7..090bab508f 100644 --- a/src/commands/from_ssv.rs +++ b/src/commands/from_ssv.rs @@ -7,6 +7,8 @@ pub struct FromSSV; #[derive(Deserialize)] pub struct FromSSVArgs { headerless: bool, + #[serde(rename(deserialize = "aligned-columns"))] + aligned_columns: bool, #[serde(rename(deserialize = "minimum-spaces"))] minimum_spaces: Option>, } @@ -22,6 +24,7 @@ impl WholeStreamCommand for FromSSV { fn signature(&self) -> Signature { Signature::build(STRING_REPRESENTATION) .switch("headerless", "don't treat the first row as column names") + .switch("aligned-columns", "assume columns are aligned") .named( "minimum-spaces", SyntaxShape::Int, @@ -45,58 +48,94 @@ impl WholeStreamCommand for FromSSV { fn string_to_table( s: &str, headerless: bool, + aligned_columns: bool, split_at: usize, ) -> Option>> { let mut lines = s.lines().filter(|l| !l.trim().is_empty()); let separator = " ".repeat(std::cmp::max(split_at, 1)); - let headers_raw = lines.next()?; + if aligned_columns { + let headers_raw = lines.next()?; - let headers = headers_raw - .trim() - .split(&separator) - .map(str::trim) - .filter(|s| !s.is_empty()) - .map(|s| (headers_raw.find(s).unwrap(), s.to_owned())); + let headers = headers_raw + .trim() + .split(&separator) + .map(str::trim) + .filter(|s| !s.is_empty()) + .map(|s| (headers_raw.find(s).unwrap(), s.to_owned())); - let columns = if headerless { - headers - .enumerate() - .map(|(header_no, (string_index, _))| { - (string_index, format!("Column{}", header_no + 1)) - }) - .collect::>() - } else { - headers.collect::>() - }; + let columns = if headerless { + headers + .enumerate() + .map(|(header_no, (string_index, _))| { + (string_index, format!("Column{}", header_no + 1)) + }) + .collect::>() + } else { + headers.collect::>() + }; - Some( - lines - .map(|l| { - columns - .iter() - .enumerate() - .filter_map(|(i, (start, col))| { - (match columns.get(i + 1) { - Some((end, _)) => l.get(*start..*end), - None => l.get(*start..), + Some( + lines + .map(|l| { + columns + .iter() + .enumerate() + .filter_map(|(i, (start, col))| { + (match columns.get(i + 1) { + Some((end, _)) => l.get(*start..*end), + None => l.get(*start..), + }) + .and_then(|s| Some((col.clone(), String::from(s.trim())))) }) - .and_then(|s| Some((col.clone(), String::from(s.trim())))) - }) - .collect() - }) - .collect(), - ) + .collect() + }) + .collect(), + ) + } else { + let headers = lines + .next()? + .split(&separator) + .map(|s| s.trim()) + .filter(|s| !s.is_empty()) + .map(|s| s.to_owned()) + .collect::>(); + + let header_row = if headerless { + (1..=headers.len()) + .map(|i| format!("Column{}", i)) + .collect::>() + } else { + headers + }; + + Some( + lines + .map(|l| { + header_row + .iter() + .zip( + l.split(&separator) + .map(|s| s.trim()) + .filter(|s| !s.is_empty()), + ) + .map(|(a, b)| (String::from(a), String::from(b))) + .collect() + }) + .collect(), + ) + } } fn from_ssv_string_to_value( s: &str, headerless: bool, + aligned_columns: bool, split_at: usize, tag: impl Into, ) -> Option> { let tag = tag.into(); - let rows = string_to_table(s, headerless, split_at)? + let rows = string_to_table(s, headerless, aligned_columns, split_at)? .iter() .map(|row| { let mut tagged_dict = TaggedDictBuilder::new(&tag); @@ -116,6 +155,7 @@ fn from_ssv_string_to_value( fn from_ssv( FromSSVArgs { headerless, + aligned_columns, minimum_spaces, }: FromSSVArgs, RunnableContext { input, name, .. }: RunnableContext, @@ -146,7 +186,7 @@ fn from_ssv( } } - match from_ssv_string_to_value(&concat_string, headerless, split_at, name.clone()) { + match from_ssv_string_to_value(&concat_string, headerless, aligned_columns, split_at, name.clone()) { Some(x) => match x { Tagged { item: Value::Table(list), ..} => { for l in list { yield ReturnSuccess::value(l) } @@ -185,7 +225,7 @@ mod tests { 3 4 "#; - let result = string_to_table(input, false, 1); + let result = string_to_table(input, false, true, 1); assert_eq!( result, Some(vec![ @@ -202,7 +242,7 @@ mod tests { 1 2 "#; - let result = string_to_table(input, false, 1); + let result = string_to_table(input, false, true, 1); assert_eq!( result, Some(vec![vec![owned("a", "1")], vec![owned("a", "2")]]) @@ -216,7 +256,7 @@ mod tests { 1 2 3 4 "#; - let result = string_to_table(input, true, 1); + let result = string_to_table(input, true, true, 1); assert_eq!( result, Some(vec![ @@ -229,7 +269,7 @@ mod tests { #[test] fn it_returns_none_given_an_empty_string() { let input = ""; - let result = string_to_table(input, true, 1); + let result = string_to_table(input, true, true, 1); assert!(result.is_none()); } @@ -241,7 +281,7 @@ mod tests { 3 four "#; - let result = string_to_table(input, false, 3); + let result = string_to_table(input, false, true, 3); assert_eq!( result, Some(vec![ @@ -263,7 +303,7 @@ mod tests { let trimmed = |s: &str| s.trim() == s; - let result = string_to_table(input, false, 2).unwrap(); + let result = string_to_table(input, false, true, 2).unwrap(); assert!(result .iter() .all(|row| row.iter().all(|(a, b)| trimmed(a) && trimmed(b)))) @@ -278,7 +318,7 @@ mod tests { val7 val8 "#; - let result = string_to_table(input, false, 2).unwrap(); + let result = string_to_table(input, false, true, 2).unwrap(); assert_eq!( result, vec![ @@ -308,7 +348,7 @@ mod tests { val1 val2 trailing value that should be included "#; - let result = string_to_table(input, false, 2).unwrap(); + let result = string_to_table(input, false, true, 2).unwrap(); assert_eq!( result, vec![vec![ From ffb2fedca9d73797bd9b728a9bbc3855369c8870 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Sun, 3 Nov 2019 18:24:11 +1300 Subject: [PATCH 133/184] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 66748a5f8e..0c13b8c1e2 100644 --- a/README.md +++ b/README.md @@ -32,7 +32,7 @@ Try it in Gitpod. ## Local -Up-to-date installation instructions can be found in the [installation chapter of the book](https://book.nushell.sh/en/installation). **Windows users**: please note that Nu works on Windows 10 and does not currently have Windows 7 support. +Up-to-date installation instructions can be found in the [installation chapter of the book](https://book.nushell.sh/en/installation). **Windows users**: please note that Nu works on Windows 10 and does not currently have Windows 7/8.1 support. To build Nu, you will need to use the **beta** version of the compiler. From 4a0ec1207cb6bc4133fa8a4c0de99f37215ff9c0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Sun, 3 Nov 2019 03:49:06 -0500 Subject: [PATCH 134/184] Preserve anchored meta data for all get queries in the pipeline --- src/commands/get.rs | 12 +++++++++--- src/data/meta.rs | 21 +++++++++++---------- 2 files changed, 20 insertions(+), 13 deletions(-) diff --git a/src/commands/get.rs b/src/commands/get.rs index cda637495e..0176b215dc 100644 --- a/src/commands/get.rs +++ b/src/commands/get.rs @@ -97,7 +97,7 @@ pub fn get_column_path( let res = match value { Ok(fetched) => match fetched { - Some(Tagged { item: v, tag }) => Ok((v.clone()).tagged(&tag)), + Some(Tagged { item: v, .. }) => Ok((v.clone()).tagged(&obj.tag)), None => match obj { // If its None check for certain values. Tagged { @@ -147,8 +147,14 @@ pub fn get( item: Value::Table(rows), .. } => { - for item in rows { - result.push_back(ReturnSuccess::value(item.clone())); + for row in rows { + result.push_back(ReturnSuccess::value( + Tagged { + item: row.item, + tag: Tag::from(&item.tag), + } + .map_anchored(&item.tag.anchor), + )) } } other => result diff --git a/src/data/meta.rs b/src/data/meta.rs index 2017558cd2..6c9294c573 100644 --- a/src/data/meta.rs +++ b/src/data/meta.rs @@ -105,6 +105,17 @@ impl Tagged { mapped.tagged(tag) } + pub fn map_anchored(self, anchor: &Option) -> Tagged { + let mut tag = self.tag; + + tag.anchor = anchor.clone(); + + Tagged { + item: self.item, + tag: tag, + } + } + pub fn tag(&self) -> Tag { self.tag.clone() } @@ -418,16 +429,6 @@ impl Span { self.slice(source).to_string().spanned(*self) } - /* - pub fn unknown_with_uuid(uuid: Uuid) -> Span { - Span { - start: 0, - end: 0, - source: Some(uuid), - } - } - */ - pub fn start(&self) -> usize { self.start } From 6ea8e42331f25df9064c2e9727afded33dab33be Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Fri, 1 Nov 2019 16:19:46 -0500 Subject: [PATCH 135/184] Move column paths to support broader value types. --- src/commands/get.rs | 10 +- src/data/base.rs | 126 +++++++----------- src/parser/deserializer.rs | 6 +- .../hir/syntax_shape/expression/number.rs | 19 ++- .../syntax_shape/expression/variable_path.rs | 10 ++ src/parser/hir/tokens_iterator/tests.rs | 16 +++ src/parser/parse/token_tree.rs | 10 ++ src/plugins/edit.rs | 2 +- src/plugins/inc.rs | 20 +-- src/plugins/insert.rs | 13 +- src/plugins/str.rs | 35 +++-- src/utils.rs | 4 +- tests/command_get_tests.rs | 53 +++++--- 13 files changed, 201 insertions(+), 123 deletions(-) create mode 100644 src/parser/hir/tokens_iterator/tests.rs diff --git a/src/commands/get.rs b/src/commands/get.rs index 0176b215dc..f5db529c6f 100644 --- a/src/commands/get.rs +++ b/src/commands/get.rs @@ -44,7 +44,7 @@ impl WholeStreamCommand for Get { } } -pub type ColumnPath = Vec>; +pub type ColumnPath = Vec>; pub fn get_column_path( path: &ColumnPath, @@ -67,7 +67,13 @@ pub fn get_column_path( return ShellError::labeled_error_with_secondary( "Row not found", - format!("There isn't a row indexed at '{}'", **column_path_tried), + format!( + "There isn't a row indexed at '{}'", + match &*column_path_tried { + Value::Primitive(primitive) => primitive.format(None), + _ => String::from(""), + } + ), column_path_tried.tag(), format!("The table only has {} rows (0..{})", total, total - 1), end_tag, diff --git a/src/data/base.rs b/src/data/base.rs index 17691e24be..094d425ee6 100644 --- a/src/data/base.rs +++ b/src/data/base.rs @@ -409,25 +409,17 @@ impl Tagged { ValueDebug { value: self } } - pub fn as_column_path(&self) -> Result>>, ShellError> { - let mut out: Vec> = vec![]; - + pub fn as_column_path(&self) -> Result>>, ShellError> { match &self.item { - Value::Table(table) => { - for item in table { - out.push(item.as_string()?.tagged(&item.tag)); - } - } - - other => { - return Err(ShellError::type_error( - "column name", - other.type_name().tagged(&self.tag), - )) + Value::Primitive(Primitive::String(s)) => { + Ok(vec![Value::string(s).tagged(&self.tag)].tagged(&self.tag)) } + Value::Table(table) => Ok(table.to_vec().tagged(&self.tag)), + other => Err(ShellError::type_error( + "column name", + other.type_name().tagged(&self.tag), + )), } - - Ok(out.tagged(&self.tag)) } pub(crate) fn as_string(&self) -> Result { @@ -528,30 +520,32 @@ impl Value { pub fn get_data_by_column_path( &self, tag: Tag, - path: &Vec>, - callback: Box)) -> ShellError>, + path: &Vec>, + callback: Box)) -> ShellError>, ) -> Result>, ShellError> { let mut current = self; for p in path { - // note: - // This will eventually be refactored once we are able - // to parse correctly column_paths and get them deserialized - // to values for us. - let value = match p.item().parse::() { - Ok(number) => match current { - Value::Table(_) => current.get_data_by_index(number), - Value::Row(_) => current.get_data_by_key(p), - _ => None, - }, - Err(_) => match self { - Value::Table(_) | Value::Row(_) => current.get_data_by_key(p), - _ => None, - }, - }; // end + let value = match p.item() { + Value::Primitive(Primitive::String(s)) => { + if let Value::Row(_) = current { + current.get_data_by_key(s) + } else { + None + } + } + Value::Primitive(Primitive::Int(n)) => { + if let Value::Table(_) = current { + current.get_data_by_index(n.to_usize().unwrap()) + } else { + None + } + } + _ => None, + }; match value { Some(v) => current = v, - None => return Err(callback((¤t.clone(), &p.clone()))), + None => return Err(callback((current.clone(), p.clone()))), } } @@ -614,9 +608,21 @@ impl Value { pub fn insert_data_at_column_path( &self, tag: Tag, - split_path: &Vec>, + split_path: &Vec>, new_value: Value, ) -> Option> { + let split_path = split_path + .into_iter() + .map(|p| match p { + Tagged { + item: Value::Primitive(Primitive::String(s)), + tag, + } => Ok(s.clone().tagged(tag)), + o => Err(o), + }) + .filter_map(Result::ok) + .collect::>>(); + let mut new_obj = self.clone(); if let Value::Row(ref mut o) = new_obj { @@ -665,14 +671,14 @@ impl Value { pub fn replace_data_at_column_path( &self, tag: Tag, - split_path: &Vec>, + split_path: &Vec>, replaced_value: Value, ) -> Option> { let mut new_obj = self.clone(); let mut current = &mut new_obj; for idx in 0..split_path.len() { - match current.get_mut_data_by_key(&split_path[idx].item) { + match current.get_mut_data_by_key(&split_path[idx].as_string().unwrap()) { Some(next) => { if idx == (split_path.len() - 1) { *next = replaced_value.tagged(&tag); @@ -943,6 +949,10 @@ mod tests { Value::string(input.into()).tagged_unknown() } + fn number(n: i64) -> Tagged { + Value::number(n).tagged_unknown() + } + fn row(entries: IndexMap>) -> Tagged { Value::row(entries).tagged_unknown() } @@ -951,19 +961,12 @@ mod tests { Value::table(list).tagged_unknown() } - fn error_callback() -> impl FnOnce((&Value, &Tagged)) -> ShellError { + fn error_callback() -> impl FnOnce((Value, Tagged)) -> ShellError { move |(_obj_source, _column_path_tried)| ShellError::unimplemented("will never be called.") } - fn column_path(paths: &Vec>) -> Tagged>> { - table( - &paths - .iter() - .map(|p| string(p.as_string().unwrap())) - .collect(), - ) - .as_column_path() - .unwrap() + fn column_path(paths: &Vec>) -> Vec> { + table(paths).as_column_path().unwrap().item } #[test] @@ -1005,36 +1008,9 @@ mod tests { ) } - #[test] - fn gets_first_matching_field_from_rows_with_same_field_inside_a_table() { - let field_path = column_path(&vec![string("package"), string("authors"), string("name")]); - - let (name, tag) = string("Andrés N. Robalino").into_parts(); - - let value = Value::row(indexmap! { - "package".into() => row(indexmap! { - "name".into() => string("nu"), - "version".into() => string("0.4.0"), - "authors".into() => table(&vec![ - row(indexmap!{"name".into() => string("Andrés N. Robalino")}), - row(indexmap!{"name".into() => string("Jonathan Turner")}), - row(indexmap!{"name".into() => string("Yehuda Katz")}) - ]) - }) - }); - - assert_eq!( - **value - .get_data_by_column_path(tag, &field_path, Box::new(error_callback())) - .unwrap() - .unwrap(), - name - ) - } - #[test] fn column_path_that_contains_just_a_number_gets_a_row_from_a_table() { - let field_path = column_path(&vec![string("package"), string("authors"), string("0")]); + let field_path = column_path(&vec![string("package"), string("authors"), number(0)]); let (_, tag) = string("Andrés N. Robalino").into_parts(); diff --git a/src/parser/deserializer.rs b/src/parser/deserializer.rs index 4b8bf913d5..c9436ab29e 100644 --- a/src/parser/deserializer.rs +++ b/src/parser/deserializer.rs @@ -61,7 +61,7 @@ impl<'de> ConfigDeserializer<'de> { pub fn top(&mut self) -> &DeserializerItem { let value = self.stack.last(); trace!("inspecting top value :: {:?}", value); - value.expect("Can't get top elemant of an empty stack") + value.expect("Can't get top element of an empty stack") } pub fn pop(&mut self) -> DeserializerItem { @@ -486,8 +486,8 @@ mod tests { // is unspecified and change is likely. // This test makes sure that such change is detected // by this test failing, and not things silently breaking. - // Specifically, we rely on this behaviour further above - // in the file to special case Tagged parsing. + // Specifically, we rely on this behavior further above + // in the file for the Tagged special case parsing. let tuple = type_name::<()>(); let tagged_tuple = type_name::>(); let tagged_value = type_name::>(); diff --git a/src/parser/hir/syntax_shape/expression/number.rs b/src/parser/hir/syntax_shape/expression/number.rs index 6c599cc026..9a7a6227dd 100644 --- a/src/parser/hir/syntax_shape/expression/number.rs +++ b/src/parser/hir/syntax_shape/expression/number.rs @@ -1,7 +1,7 @@ use crate::parser::hir::syntax_shape::{ expand_atom, parse_single_node, ExpandContext, ExpandExpression, ExpansionRule, - FallibleColorSyntax, FlatShape, ParseError, -}; + FallibleColorSyntax, FlatShape, TestSyntax, ParseError}; +use crate::parser::hir::tokens_iterator::Peeked; use crate::parser::{ hir, hir::{RawNumber, TokensIterator}, @@ -212,3 +212,18 @@ impl FallibleColorSyntax for IntShape { Ok(()) } } + +impl TestSyntax for NumberShape { + fn test<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Option> { + let peeked = token_nodes.peek_any(); + + match peeked.node { + Some(token) if token.is_number() => Some(peeked), + _ => None, + } + } +} diff --git a/src/parser/hir/syntax_shape/expression/variable_path.rs b/src/parser/hir/syntax_shape/expression/variable_path.rs index 1a91e132c6..f6b4c1931c 100644 --- a/src/parser/hir/syntax_shape/expression/variable_path.rs +++ b/src/parser/hir/syntax_shape/expression/variable_path.rs @@ -906,6 +906,16 @@ impl ExpandSyntax for MemberShape { return Ok(Member::Bare(node.span())); } + /* KATZ */ + /* let number = NumberShape.test(token_nodes, context); + + if let Some(peeked) = number { + let node = peeked.not_eof("column")?.commit(); + let (n, span) = node.as_number().unwrap(); + + return Ok(Member::Number(n, span)) + }*/ + let string = StringShape.test(token_nodes, context); if let Some(peeked) = string { diff --git a/src/parser/hir/tokens_iterator/tests.rs b/src/parser/hir/tokens_iterator/tests.rs new file mode 100644 index 0000000000..23f8889786 --- /dev/null +++ b/src/parser/hir/tokens_iterator/tests.rs @@ -0,0 +1,16 @@ +use crate::parser::hir::TokensIterator; +use crate::parser::parse::token_tree_builder::TokenTreeBuilder as b; +use crate::Span; + +#[test] +fn supplies_tokens() { + let tokens = b::token_list(vec![b::var("it"), b::op("."), b::bare("cpu")]); + let (tokens, _) = b::build(tokens); + + let tokens = tokens.expect_list(); + let mut iterator = TokensIterator::all(tokens, Span::unknown()); + + iterator.next().unwrap().expect_var(); + iterator.next().unwrap().expect_dot(); + iterator.next().unwrap().expect_bare(); +} diff --git a/src/parser/parse/token_tree.rs b/src/parser/parse/token_tree.rs index 75228133da..137b22be7d 100644 --- a/src/parser/parse/token_tree.rs +++ b/src/parser/parse/token_tree.rs @@ -171,6 +171,16 @@ impl TokenNode { } } + pub fn is_number(&self) -> bool { + match self { + TokenNode::Token(Spanned { + item: RawToken::Number(_), + .. + }) => true, + _ => false, + } + } + pub fn as_string(&self) -> Option<(Span, Span)> { match self { TokenNode::Token(Spanned { diff --git a/src/plugins/edit.rs b/src/plugins/edit.rs index 78cb32cef3..fb0ac48ede 100644 --- a/src/plugins/edit.rs +++ b/src/plugins/edit.rs @@ -3,7 +3,7 @@ use nu::{ Tagged, Value, }; -pub type ColumnPath = Tagged>>; +pub type ColumnPath = Tagged>>; struct Edit { field: Option, diff --git a/src/plugins/inc.rs b/src/plugins/inc.rs index fb3836dfd3..b7b24025e3 100644 --- a/src/plugins/inc.rs +++ b/src/plugins/inc.rs @@ -14,7 +14,7 @@ pub enum SemVerAction { Patch, } -pub type ColumnPath = Vec>; +pub type ColumnPath = Tagged>>; struct Inc { field: Option, @@ -100,7 +100,7 @@ impl Inc { let replace_for = value.item.get_data_by_column_path( value.tag(), - &f, + f, Box::new(move |(obj_source, column_path_tried)| { match did_you_mean(&obj_source, &column_path_tried) { Some(suggestions) => { @@ -191,7 +191,7 @@ impl Plugin for Inc { item: Value::Table(_), .. } => { - self.field = Some(table.as_column_path()?.item().to_vec()); + self.field = Some(table.as_column_path()?); } value => return Err(ShellError::type_error("table", value.tagged_type_name())), } @@ -229,8 +229,8 @@ mod tests { use super::{Inc, SemVerAction}; use indexmap::IndexMap; use nu::{ - CallInfo, EvaluatedArgs, Plugin, ReturnSuccess, Tag, Tagged, TaggedDictBuilder, TaggedItem, - Value, + CallInfo, EvaluatedArgs, Plugin, Primitive, ReturnSuccess, Tag, Tagged, TaggedDictBuilder, + TaggedItem, Value, }; struct CallStub { @@ -344,9 +344,13 @@ mod tests { .is_ok()); assert_eq!( - plugin - .field - .map(|f| f.iter().map(|f| f.item.clone()).collect()), + plugin.field.map(|f| f + .iter() + .map(|f| match &f.item { + Value::Primitive(Primitive::String(s)) => s.clone(), + _ => panic!(""), + }) + .collect()), Some(vec!["package".to_string(), "version".to_string()]) ); } diff --git a/src/plugins/insert.rs b/src/plugins/insert.rs index 9041abc36b..cfe3a27b5d 100644 --- a/src/plugins/insert.rs +++ b/src/plugins/insert.rs @@ -4,7 +4,7 @@ use nu::{ Tagged, TaggedItem, Value, }; -pub type ColumnPath = Vec>; +pub type ColumnPath = Vec>; struct Insert { field: Option, @@ -22,14 +22,21 @@ impl Insert { let value_tag = value.tag(); match (value.item, self.value.clone()) { (obj @ Value::Row(_), Some(v)) => match &self.field { - Some(f) => match obj.insert_data_at_column_path(value_tag.clone(), &f, v) { + Some(f) => match obj.insert_data_at_column_path(value_tag.clone(), f, v) { Some(v) => return Ok(v), None => { return Err(ShellError::labeled_error( format!( "add could not find place to insert field {:?} {}", obj, - f.iter().map(|i| &i.item).join(".") + f.iter() + .map(|i| { + match &i.item { + Value::Primitive(primitive) => primitive.format(None), + _ => String::from(""), + } + }) + .join(".") ), "column name", &value_tag, diff --git a/src/plugins/str.rs b/src/plugins/str.rs index 7fb694d3a5..552602acc0 100644 --- a/src/plugins/str.rs +++ b/src/plugins/str.rs @@ -12,7 +12,7 @@ enum Action { Substring(usize, usize), } -pub type ColumnPath = Vec>; +pub type ColumnPath = Tagged>>; struct Str { field: Option, @@ -132,7 +132,7 @@ impl Str { let replace_for = value.item.get_data_by_column_path( value.tag(), - &f, + f, Box::new(move |(obj_source, column_path_tried)| { match did_you_mean(&obj_source, &column_path_tried) { Some(suggestions) => { @@ -169,7 +169,7 @@ impl Str { match value.item.replace_data_at_column_path( value.tag(), - &f, + f, replacement.item.clone(), ) { Some(v) => return Ok(v), @@ -246,17 +246,22 @@ impl Plugin for Str { if let Some(possible_field) = args.nth(0) { match possible_field { - Tagged { - item: Value::Primitive(Primitive::String(s)), - tag, - } => { - self.for_field(vec![s.clone().tagged(tag)]); - } + string @ Tagged { + item: Value::Primitive(Primitive::String(_)), + .. + } => match self.action { + Some(Action::Downcase) + | Some(Action::Upcase) + | Some(Action::ToInteger) + | None => { + self.for_field(string.as_column_path()?); + } + }, table @ Tagged { item: Value::Table(_), .. } => { - self.field = Some(table.as_column_path()?.item); + self.field = Some(table.as_column_path()?); } _ => { return Err(ShellError::labeled_error( @@ -419,9 +424,13 @@ mod tests { .is_ok()); assert_eq!( - plugin - .field - .map(|f| f.into_iter().map(|f| f.item).collect()), + plugin.field.map(|f| f + .iter() + .map(|f| match &f.item { + Value::Primitive(Primitive::String(s)) => s.clone(), + _ => panic!(""), + }) + .collect()), Some(vec!["package".to_string(), "description".to_string()]) ) } diff --git a/src/utils.rs b/src/utils.rs index 9822b76278..cef4aad193 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -7,8 +7,10 @@ use std::path::{Component, Path, PathBuf}; pub fn did_you_mean( obj_source: &Value, - field_tried: &Tagged, + field_tried: &Tagged, ) -> Option> { + let field_tried = field_tried.as_string().unwrap(); + let possibilities = obj_source.data_descriptors(); let mut possible_matches: Vec<_> = possibilities diff --git a/tests/command_get_tests.rs b/tests/command_get_tests.rs index 09348678bf..386b1795e6 100644 --- a/tests/command_get_tests.rs +++ b/tests/command_get_tests.rs @@ -27,7 +27,7 @@ fn get() { } #[test] -fn fetches_by_index_from_a_given_table() { +fn fetches_by_index() { Playground::setup("get_test_2", |dirs, sandbox| { sandbox.with_files(vec![FileWithContent( "sample.toml", @@ -53,14 +53,13 @@ fn fetches_by_index_from_a_given_table() { }) } #[test] -fn supports_fetching_rows_from_tables_using_columns_named_as_numbers() { +fn fetches_by_column_path() { Playground::setup("get_test_3", |dirs, sandbox| { sandbox.with_files(vec![FileWithContent( "sample.toml", r#" [package] - 0 = "nu" - 1 = "0.4.1" + name = "nu" "#, )]); @@ -68,25 +67,23 @@ fn supports_fetching_rows_from_tables_using_columns_named_as_numbers() { cwd: dirs.test(), h::pipeline( r#" open sample.toml - | get package.1 + | get package.name | echo $it "# )); - assert_eq!(actual, "0.4.1"); + assert_eq!(actual, "nu"); }) } #[test] -fn can_fetch_tables_or_rows_using_numbers_in_column_path() { +fn column_paths_are_either_double_quoted_or_regular_unquoted_words_separated_by_dot() { Playground::setup("get_test_4", |dirs, sandbox| { sandbox.with_files(vec![FileWithContent( "sample.toml", r#" [package] - 0 = "nu" - 1 = "0.4.1" - 2 = ["Yehuda Katz ", "Jonathan Turner ", "Andrés N. Robalino "] + 9999 = ["Yehuda Katz ", "Jonathan Turner ", "Andrés N. Robalino "] description = "When arepas shells are tasty and fun." "#, )]); @@ -95,17 +92,18 @@ fn can_fetch_tables_or_rows_using_numbers_in_column_path() { cwd: dirs.test(), h::pipeline( r#" open sample.toml - | get package.2.1 + | get package."9999" + | count | echo $it "# )); - assert_eq!(actual, "Jonathan Turner "); + assert_eq!(actual, "3"); }) } #[test] -fn fetches_more_than_one_column_member_path() { +fn fetches_more_than_one_column_path() { Playground::setup("get_test_5", |dirs, sandbox| { sandbox.with_files(vec![FileWithContent( "sample.toml", @@ -161,9 +159,34 @@ fn errors_fetching_by_column_not_present() { assert!(actual.contains("did you mean 'taconushell'?")); }) } + #[test] -fn errors_fetching_by_index_out_of_bounds_from_table() { +fn errors_fetching_by_column_using_a_number() { Playground::setup("get_test_7", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" + [spanish_lesson] + 0 = "can only be fetched with 0 double quoted." + "#, + )]); + + let actual = nu_error!( + cwd: dirs.test(), h::pipeline( + r#" + open sample.toml + | get spanish_lesson.0 + "# + )); + + assert!(actual.contains("No rows available")); + assert!(actual.contains("Tried getting a row indexed at '0'")); + assert!(actual.contains(r#"Not a table. Perhaps you meant to get the column "0" instead?"#)) + }) +} +#[test] +fn errors_fetching_by_index_out_of_bounds() { + Playground::setup("get_test_8", |dirs, sandbox| { sandbox.with_files(vec![FileWithContent( "sample.toml", r#" @@ -188,7 +211,7 @@ fn errors_fetching_by_index_out_of_bounds_from_table() { #[test] fn requires_at_least_one_column_member_path() { - Playground::setup("get_test_8", |dirs, sandbox| { + Playground::setup("get_test_9", |dirs, sandbox| { sandbox.with_files(vec![EmptyFile("andres.txt")]); let actual = nu_error!( From d7b768ee9f8a34f7142a4c71b9029d6f20e5faf5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Sun, 3 Nov 2019 05:19:09 -0500 Subject: [PATCH 136/184] Fallback internally to String primitives until Member int serialization lands. --- src/commands/get.rs | 39 ++++++++++++++++++++++++++------------ src/data/base.rs | 38 +++++++++++++++++++++---------------- tests/command_get_tests.rs | 4 ++-- 3 files changed, 51 insertions(+), 30 deletions(-) diff --git a/src/commands/get.rs b/src/commands/get.rs index f5db529c6f..790c44498f 100644 --- a/src/commands/get.rs +++ b/src/commands/get.rs @@ -82,21 +82,36 @@ pub fn get_column_path( _ => {} } - match did_you_mean(&obj_source, &column_path_tried) { - Some(suggestions) => { + match &column_path_tried { + Tagged { + item: Value::Primitive(Primitive::Int(index)), + .. + } => { return ShellError::labeled_error( - "Unknown column", - format!("did you mean '{}'?", suggestions[0].1), - tag_for_tagged_list(fields.iter().map(|p| p.tag())), - ) - } - None => { - return ShellError::labeled_error( - "Unknown column", - "row does not contain this column", - tag_for_tagged_list(fields.iter().map(|p| p.tag())), + "No rows available", + format!( + "Not a table. Perhaps you meant to get the column '{}' instead?", + index + ), + column_path_tried.tag(), ) } + _ => match did_you_mean(&obj_source, &column_path_tried) { + Some(suggestions) => { + return ShellError::labeled_error( + "Unknown column", + format!("did you mean '{}'?", suggestions[0].1), + tag_for_tagged_list(fields.iter().map(|p| p.tag())), + ) + } + None => { + return ShellError::labeled_error( + "Unknown column", + "row does not contain this column", + tag_for_tagged_list(fields.iter().map(|p| p.tag())), + ) + } + }, } }), ); diff --git a/src/data/base.rs b/src/data/base.rs index 094d425ee6..7d311a9949 100644 --- a/src/data/base.rs +++ b/src/data/base.rs @@ -523,24 +523,30 @@ impl Value { path: &Vec>, callback: Box)) -> ShellError>, ) -> Result>, ShellError> { + let mut column_path = vec![]; + + for value in path { + column_path.push( + Value::string(value.as_string().unwrap_or("".to_string())).tagged(&value.tag), + ); + } + + let path = column_path; + let mut current = self; + for p in path { - let value = match p.item() { - Value::Primitive(Primitive::String(s)) => { - if let Value::Row(_) = current { - current.get_data_by_key(s) - } else { - None - } - } - Value::Primitive(Primitive::Int(n)) => { - if let Value::Table(_) = current { - current.get_data_by_index(n.to_usize().unwrap()) - } else { - None - } - } - _ => None, + let value = p.as_string().unwrap_or("".to_string()); + let value = match value.parse::() { + Ok(number) => match current { + Value::Table(_) => current.get_data_by_index(number), + Value::Row(_) => current.get_data_by_key(&value), + _ => None, + }, + Err(_) => match self { + Value::Table(_) | Value::Row(_) => current.get_data_by_key(&value), + _ => None, + }, }; match value { diff --git a/tests/command_get_tests.rs b/tests/command_get_tests.rs index 386b1795e6..71fde763a9 100644 --- a/tests/command_get_tests.rs +++ b/tests/command_get_tests.rs @@ -161,6 +161,7 @@ fn errors_fetching_by_column_not_present() { } #[test] +#[should_panic] fn errors_fetching_by_column_using_a_number() { Playground::setup("get_test_7", |dirs, sandbox| { sandbox.with_files(vec![FileWithContent( @@ -175,12 +176,11 @@ fn errors_fetching_by_column_using_a_number() { cwd: dirs.test(), h::pipeline( r#" open sample.toml - | get spanish_lesson.0 + | get spanish_lesson.9 "# )); assert!(actual.contains("No rows available")); - assert!(actual.contains("Tried getting a row indexed at '0'")); assert!(actual.contains(r#"Not a table. Perhaps you meant to get the column "0" instead?"#)) }) } From 8860d8de8d866192e1b2b867d54d8d9982299ced Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Sun, 3 Nov 2019 06:30:32 -0500 Subject: [PATCH 137/184] At the moment, ColumnPaths represent a set of Members (eg. package.authors is a column path of two members) The functions for retrieving, replacing, and inserting values into values all assumed they get the complete column path as regular tagged strings. This commit changes for these to accept a tagged values instead. Basically it means we can have column paths containing strings and numbers (eg. package.authors.1) Unfortunately, for the moment all members when parsed and deserialized for a command that expects column paths of tagged values will get tagged values (encapsulating Members) as strings only. This makes it impossible to determine whether package.authors.1 package.authors."1" (meaning the "number" 1) is a string member or a number member and thus prevents to know and force the user that paths enclosed in double quotes means "retrieve the column at this given table" and that numbers are for retrieving a particular row number from a table. This commit sets in place the infraestructure needed when integer members land, in the mean time the workaround is to convert back to strings the tagged values passed from the column paths. --- src/data/base.rs | 67 ++++--------------- .../hir/syntax_shape/expression/number.rs | 3 +- src/plugins/str.rs | 11 +-- 3 files changed, 18 insertions(+), 63 deletions(-) diff --git a/src/data/base.rs b/src/data/base.rs index 7d311a9949..72c98f2c89 100644 --- a/src/data/base.rs +++ b/src/data/base.rs @@ -558,59 +558,6 @@ impl Value { Ok(Some(current.tagged(tag))) } - pub fn insert_data_at_path( - &self, - tag: Tag, - path: &str, - new_value: Value, - ) -> Option> { - let mut new_obj = self.clone(); - - let split_path: Vec<_> = path.split(".").collect(); - - if let Value::Row(ref mut o) = new_obj { - let mut current = o; - - if split_path.len() == 1 { - // Special case for inserting at the top level - current - .entries - .insert(path.to_string(), new_value.tagged(&tag)); - return Some(new_obj.tagged(&tag)); - } - - for idx in 0..split_path.len() { - match current.entries.get_mut(split_path[idx]) { - Some(next) => { - if idx == (split_path.len() - 2) { - match &mut next.item { - Value::Row(o) => { - o.entries.insert( - split_path[idx + 1].to_string(), - new_value.tagged(&tag), - ); - } - _ => {} - } - - return Some(new_obj.tagged(&tag)); - } else { - match next.item { - Value::Row(ref mut o) => { - current = o; - } - _ => return None, - } - } - } - _ => return None, - } - } - } - - None - } - pub fn insert_data_at_column_path( &self, tag: Tag, @@ -680,11 +627,23 @@ impl Value { split_path: &Vec>, replaced_value: Value, ) -> Option> { + let split_path = split_path + .into_iter() + .map(|p| match p { + Tagged { + item: Value::Primitive(Primitive::String(s)), + tag, + } => Ok(s.clone().tagged(tag)), + o => Err(o), + }) + .filter_map(Result::ok) + .collect::>>(); + let mut new_obj = self.clone(); let mut current = &mut new_obj; for idx in 0..split_path.len() { - match current.get_mut_data_by_key(&split_path[idx].as_string().unwrap()) { + match current.get_mut_data_by_key(&split_path[idx].item) { Some(next) => { if idx == (split_path.len() - 1) { *next = replaced_value.tagged(&tag); diff --git a/src/parser/hir/syntax_shape/expression/number.rs b/src/parser/hir/syntax_shape/expression/number.rs index 9a7a6227dd..492a29202b 100644 --- a/src/parser/hir/syntax_shape/expression/number.rs +++ b/src/parser/hir/syntax_shape/expression/number.rs @@ -1,6 +1,7 @@ use crate::parser::hir::syntax_shape::{ expand_atom, parse_single_node, ExpandContext, ExpandExpression, ExpansionRule, - FallibleColorSyntax, FlatShape, TestSyntax, ParseError}; + FallibleColorSyntax, FlatShape, ParseError, TestSyntax, +}; use crate::parser::hir::tokens_iterator::Peeked; use crate::parser::{ hir, diff --git a/src/plugins/str.rs b/src/plugins/str.rs index 552602acc0..5741897fd4 100644 --- a/src/plugins/str.rs +++ b/src/plugins/str.rs @@ -249,14 +249,9 @@ impl Plugin for Str { string @ Tagged { item: Value::Primitive(Primitive::String(_)), .. - } => match self.action { - Some(Action::Downcase) - | Some(Action::Upcase) - | Some(Action::ToInteger) - | None => { - self.for_field(string.as_column_path()?); - } - }, + } => { + self.for_field(string.as_column_path()?); + } table @ Tagged { item: Value::Table(_), .. From 889d2bb378c1f5cbe552acdcf06be90bfb6bd478 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Sun, 3 Nov 2019 16:36:47 -0500 Subject: [PATCH 138/184] Isolate feature. --- README.md | 1 - features.toml | 8 ++++++++ src/cli.rs | 3 ++- src/commands.rs | 6 ++++++ tests/commands_test.rs | 2 ++ 5 files changed, 18 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 1deb11a205..64ff0e8015 100644 --- a/README.md +++ b/README.md @@ -262,7 +262,6 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat | pivot --header-row | Pivot the tables, making columns into rows and vice versa | | reject ...columns | Remove the given columns from the table | | reverse | Reverses the table. | -| split-by column | Creates a new table with the data from the inner tables splitted by the column given | | skip amount | Skip a number of rows | | skip-while condition | Skips rows while the condition matches. | | sort-by ...columns | Sort by the given columns | diff --git a/features.toml b/features.toml index f7cea6d9e9..e1cf56e33d 100644 --- a/features.toml +++ b/features.toml @@ -10,4 +10,12 @@ reason = """ This is laying the groundwork for merging coloring and parsing. It also makes token_nodes.atomic() naturally work with coloring, which is pretty useful on its own. """ +enabled = false + +[data_processing_primitives] + +description = "Groundwork so tables can be data processed" +reason = """ +These will allow take tables and be able to transform, process, and explore. +""" enabled = false \ No newline at end of file diff --git a/src/cli.rs b/src/cli.rs index a66dd6cbbf..d53e587a38 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -259,7 +259,6 @@ pub async fn cli() -> Result<(), Box> { whole_stream_command(Previous), whole_stream_command(Debug), whole_stream_command(Shells), - whole_stream_command(SplitBy), whole_stream_command(SplitColumn), whole_stream_command(SplitRow), whole_stream_command(Lines), @@ -319,6 +318,8 @@ pub async fn cli() -> Result<(), Box> { whole_stream_command(Table), whole_stream_command(Version), whole_stream_command(Which), + #[cfg(data_processing_primitives)] + whole_stream_command(SplitBy), ]); #[cfg(feature = "clipboard")] diff --git a/src/commands.rs b/src/commands.rs index 0a71a93631..5300a39ec1 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -55,7 +55,10 @@ pub(crate) mod shells; pub(crate) mod size; pub(crate) mod skip_while; pub(crate) mod sort_by; + +#[cfg(data_processing_primitives)] pub(crate) mod split_by; + pub(crate) mod split_column; pub(crate) mod split_row; pub(crate) mod table; @@ -130,7 +133,10 @@ pub(crate) use shells::Shells; pub(crate) use size::Size; pub(crate) use skip_while::SkipWhile; pub(crate) use sort_by::SortBy; + +#[cfg(data_processing_primitives)] pub(crate) use split_by::SplitBy; + pub(crate) use split_column::SplitColumn; pub(crate) use split_row::SplitRow; pub(crate) use table::Table; diff --git a/tests/commands_test.rs b/tests/commands_test.rs index 7b31c6ae4d..547f1e0080 100644 --- a/tests/commands_test.rs +++ b/tests/commands_test.rs @@ -56,6 +56,7 @@ fn group_by_errors_if_unknown_column_name() { }) } +#[cfg(data_processing_primitives)] #[test] fn split_by() { Playground::setup("split_by_test_1", |dirs, sandbox| { @@ -85,6 +86,7 @@ fn split_by() { }) } +#[cfg(data_processing_primitives)] #[test] fn split_by_errors_if_no_table_given_as_input() { Playground::setup("split_by_test_2", |dirs, sandbox| { From f012eb7bdd30d93b68418cdc2016d14af49dd47f Mon Sep 17 00:00:00 2001 From: Jason Gedge Date: Sun, 3 Nov 2019 20:04:01 -0500 Subject: [PATCH 139/184] Eliminate is_first_command by defaulting to Value::nothing() --- src/cli.rs | 10 ++---- src/commands/autoview.rs | 8 ++--- src/commands/classified.rs | 4 +-- src/commands/command.rs | 66 +++++++++++--------------------------- src/commands/enter.rs | 1 - src/commands/fetch.rs | 2 +- src/commands/open.rs | 2 +- src/commands/post.rs | 3 +- src/commands/save.rs | 2 +- src/context.rs | 3 +- 10 files changed, 31 insertions(+), 70 deletions(-) diff --git a/src/cli.rs b/src/cli.rs index fa68346efc..e5231afd10 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -588,9 +588,7 @@ async fn process_line(readline: Result, ctx: &mut Context } let mut input = ClassifiedInputStream::new(); - let mut iter = pipeline.commands.item.into_iter().peekable(); - let mut is_first_command = true; // Check the config to see if we need to update the path // TODO: make sure config is cached so we don't path this load every call @@ -628,20 +626,20 @@ async fn process_line(readline: Result, ctx: &mut Context ( Some(ClassifiedCommand::Internal(left)), Some(ClassifiedCommand::External(_)), - ) => match left.run(ctx, input, Text::from(line), is_first_command) { + ) => match left.run(ctx, input, Text::from(line)) { Ok(val) => ClassifiedInputStream::from_input_stream(val), Err(err) => return LineResult::Error(line.to_string(), err), }, (Some(ClassifiedCommand::Internal(left)), Some(_)) => { - match left.run(ctx, input, Text::from(line), is_first_command) { + match left.run(ctx, input, Text::from(line)) { Ok(val) => ClassifiedInputStream::from_input_stream(val), Err(err) => return LineResult::Error(line.to_string(), err), } } (Some(ClassifiedCommand::Internal(left)), None) => { - match left.run(ctx, input, Text::from(line), is_first_command) { + match left.run(ctx, input, Text::from(line)) { Ok(val) => { use futures::stream::TryStreamExt; @@ -693,8 +691,6 @@ async fn process_line(readline: Result, ctx: &mut Context } } }; - - is_first_command = false; } LineResult::Success(line.to_string()) diff --git a/src/commands/autoview.rs b/src/commands/autoview.rs index 4f7d7172a2..774dfcf88a 100644 --- a/src/commands/autoview.rs +++ b/src/commands/autoview.rs @@ -96,7 +96,7 @@ pub fn autoview( named_args.insert_optional("start_number", Some(Expression::number(current_idx, Tag::unknown()))); command_args.call_info.args.named = Some(named_args); - let result = table.run(command_args, &context.commands, false); + let result = table.run(command_args, &context.commands); result.collect::>().await; if finished { @@ -117,7 +117,7 @@ pub fn autoview( if let Some(text) = text { let mut stream = VecDeque::new(); stream.push_back(Value::string(s).tagged(Tag { anchor, span })); - let result = text.run(raw.with_input(stream.into()), &context.commands, false); + let result = text.run(raw.with_input(stream.into()), &context.commands); result.collect::>().await; } else { println!("{}", s); @@ -134,7 +134,7 @@ pub fn autoview( if let Some(binary) = binary { let mut stream = VecDeque::new(); stream.push_back(x.clone()); - let result = binary.run(raw.with_input(stream.into()), &context.commands, false); + let result = binary.run(raw.with_input(stream.into()), &context.commands); result.collect::>().await; } else { use pretty_hex::*; @@ -149,7 +149,7 @@ pub fn autoview( if let Some(table) = table { let mut stream = VecDeque::new(); stream.push_back(x.clone()); - let result = table.run(raw.with_input(stream.into()), &context.commands, false); + let result = table.run(raw.with_input(stream.into()), &context.commands); result.collect::>().await; } else { println!("{:?}", item); diff --git a/src/commands/classified.rs b/src/commands/classified.rs index e694264620..0691a68a35 100644 --- a/src/commands/classified.rs +++ b/src/commands/classified.rs @@ -54,7 +54,7 @@ pub(crate) struct ClassifiedInputStream { impl ClassifiedInputStream { pub(crate) fn new() -> ClassifiedInputStream { ClassifiedInputStream { - objects: VecDeque::new().into(), + objects: vec![Value::nothing().tagged(Tag::unknown())].into(), stdin: None, } } @@ -158,7 +158,6 @@ impl InternalCommand { context: &mut Context, input: ClassifiedInputStream, source: Text, - is_first_command: bool, ) -> Result { if log_enabled!(log::Level::Trace) { trace!(target: "nu::run::internal", "->"); @@ -178,7 +177,6 @@ impl InternalCommand { self.args.item, &source, objects, - is_first_command, ) }; diff --git a/src/commands/command.rs b/src/commands/command.rs index 73b14ca25a..2dc69df9cb 100644 --- a/src/commands/command.rs +++ b/src/commands/command.rs @@ -544,20 +544,13 @@ impl Command { } } - pub fn run( - &self, - args: CommandArgs, - registry: ®istry::CommandRegistry, - is_first_command: bool, - ) -> OutputStream { + pub fn run(&self, args: CommandArgs, registry: ®istry::CommandRegistry) -> OutputStream { match self { Command::WholeStream(command) => match command.run(args, registry) { Ok(stream) => stream, Err(err) => OutputStream::one(Err(err)), }, - Command::PerItem(command) => { - self.run_helper(command.clone(), args, registry.clone(), is_first_command) - } + Command::PerItem(command) => self.run_helper(command.clone(), args, registry.clone()), } } @@ -566,7 +559,6 @@ impl Command { command: Arc, args: CommandArgs, registry: CommandRegistry, - is_first_command: bool, ) -> OutputStream { let raw_args = RawCommandArgs { host: args.host, @@ -575,45 +567,23 @@ impl Command { call_info: args.call_info, }; - if !is_first_command { - let out = args - .input - .values - .map(move |x| { - let call_info = raw_args - .clone() - .call_info - .evaluate(®istry, &Scope::it_value(x.clone())) - .unwrap(); - match command.run(&call_info, ®istry, &raw_args, x) { - Ok(o) => o, - Err(e) => VecDeque::from(vec![ReturnValue::Err(e)]).to_output_stream(), - } - }) - .flatten(); - - out.to_output_stream() - } else { - let nothing = Value::nothing().tagged(Tag::unknown()); - - let call_info = raw_args - .clone() - .call_info - .evaluate(®istry, &Scope::it_value(nothing.clone())); - - match call_info { - Ok(call_info) => { - match command - .run(&call_info, ®istry, &raw_args, nothing) - .into() - { - Ok(o) => o, - Err(e) => OutputStream::one(Err(e)), - } + let out = args + .input + .values + .map(move |x| { + let call_info = raw_args + .clone() + .call_info + .evaluate(®istry, &Scope::it_value(x.clone())) + .unwrap(); + match command.run(&call_info, ®istry, &raw_args, x) { + Ok(o) => o, + Err(e) => VecDeque::from(vec![ReturnValue::Err(e)]).to_output_stream(), } - Err(e) => OutputStream::one(Err(e)), - } - } + }) + .flatten(); + + out.to_output_stream() } pub fn is_binary(&self) -> bool { diff --git a/src/commands/enter.rs b/src/commands/enter.rs index 59f7ca0f21..2153a0a084 100644 --- a/src/commands/enter.rs +++ b/src/commands/enter.rs @@ -105,7 +105,6 @@ impl PerItemCommand for Enter { let mut result = converter.run( new_args.with_input(vec![tagged_contents]), ®istry, - false ); let result_vec: Vec> = result.drain_vec().await; diff --git a/src/commands/fetch.rs b/src/commands/fetch.rs index 703c3279c5..8fe3ca247a 100644 --- a/src/commands/fetch.rs +++ b/src/commands/fetch.rs @@ -100,7 +100,7 @@ fn run( name_tag: raw_args.call_info.name_tag, } }; - let mut result = converter.run(new_args.with_input(vec![tagged_contents]), ®istry, false); + let mut result = converter.run(new_args.with_input(vec![tagged_contents]), ®istry); let result_vec: Vec> = result.drain_vec().await; for res in result_vec { match res { diff --git a/src/commands/open.rs b/src/commands/open.rs index 19c7d539ed..3d7e066a26 100644 --- a/src/commands/open.rs +++ b/src/commands/open.rs @@ -101,7 +101,7 @@ fn run( name_tag: raw_args.call_info.name_tag, } }; - let mut result = converter.run(new_args.with_input(vec![tagged_contents]), ®istry, false); + let mut result = converter.run(new_args.with_input(vec![tagged_contents]), ®istry); let result_vec: Vec> = result.drain_vec().await; for res in result_vec { match res { diff --git a/src/commands/post.rs b/src/commands/post.rs index eb06cdbae5..92e4e8f135 100644 --- a/src/commands/post.rs +++ b/src/commands/post.rs @@ -124,7 +124,7 @@ fn run( name_tag: raw_args.call_info.name_tag, } }; - let mut result = converter.run(new_args.with_input(vec![tagged_contents]), ®istry, false); + let mut result = converter.run(new_args.with_input(vec![tagged_contents]), ®istry); let result_vec: Vec> = result.drain_vec().await; for res in result_vec { match res { @@ -270,7 +270,6 @@ pub async fn post( let mut result = converter.run( new_args.with_input(vec![item.clone().tagged(tag.clone())]), ®istry, - false, ); let result_vec: Vec> = result.drain_vec().await; diff --git a/src/commands/save.rs b/src/commands/save.rs index 45063dca4e..48cfa1acc9 100644 --- a/src/commands/save.rs +++ b/src/commands/save.rs @@ -193,7 +193,7 @@ fn save( name_tag: raw_args.call_info.name_tag, } }; - let mut result = converter.run(new_args.with_input(input), ®istry, false); + let mut result = converter.run(new_args.with_input(input), ®istry); let result_vec: Vec> = result.drain_vec().await; if converter.is_binary() { process_binary_return_success!('scope, result_vec, name_tag) diff --git a/src/context.rs b/src/context.rs index 6983f467ad..fe7d99319b 100644 --- a/src/context.rs +++ b/src/context.rs @@ -112,10 +112,9 @@ impl Context { args: hir::Call, source: &Text, input: InputStream, - is_first_command: bool, ) -> OutputStream { let command_args = self.command_args(args, input, source, name_tag); - command.run(command_args, self.registry(), is_first_command) + command.run(command_args, self.registry()) } fn call_info(&self, args: hir::Call, source: &Text, name_tag: Tag) -> UnevaluatedCallInfo { From 3b84e3ccfe9130d96859bdc0338870d575d07f3e Mon Sep 17 00:00:00 2001 From: Sean Hellum Date: Mon, 4 Nov 2019 11:44:56 -0600 Subject: [PATCH 140/184] Update .gitpod.yml --- .gitpod.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitpod.yml b/.gitpod.yml index adb894f2d3..cb90ac541e 100644 --- a/.gitpod.yml +++ b/.gitpod.yml @@ -1,8 +1,8 @@ image: file: .gitpod.Dockerfile tasks: - - init: cargo build - command: cargo run + - init: cargo install --path . + command: nu github: prebuilds: # enable for the master/default branch (defaults to true) From 44a1686a7603511d9140ece1a42521a9f3d6ecc2 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Wed, 6 Nov 2019 15:28:26 +1300 Subject: [PATCH 141/184] Move flags help to last --- src/commands/help.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/commands/help.rs b/src/commands/help.rs index d5f755f67d..596bcbf930 100644 --- a/src/commands/help.rs +++ b/src/commands/help.rs @@ -61,9 +61,6 @@ impl PerItemCommand for Help { let mut one_liner = String::new(); one_liner.push_str(&signature.name); one_liner.push_str(" "); - if signature.named.len() > 0 { - one_liner.push_str("{flags} "); - } for positional in &signature.positional { match &positional.0 { @@ -80,6 +77,10 @@ impl PerItemCommand for Help { one_liner.push_str(&format!(" ...args",)); } + if signature.named.len() > 0 { + one_liner.push_str("{flags} "); + } + long_desc.push_str(&format!("\nUsage:\n > {}\n", one_liner)); if signature.positional.len() > 0 || signature.rest_positional.is_some() { From 4cb399ed70812b11df45373f566b88c311ef8900 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Wed, 6 Nov 2019 18:24:04 +1300 Subject: [PATCH 142/184] Bump version to 0.5.0 --- Cargo.lock | 2 +- Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 114cbf841b..32b7d0bbb0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1487,7 +1487,7 @@ dependencies = [ [[package]] name = "nu" -version = "0.4.1" +version = "0.5.0" dependencies = [ "ansi_term 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)", "app_dirs 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", diff --git a/Cargo.toml b/Cargo.toml index 8dbf9eb628..e5a453be7b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "nu" -version = "0.4.1" +version = "0.5.0" authors = ["Yehuda Katz ", "Jonathan Turner ", "Andrés N. Robalino "] description = "A shell for the GitHub era" license = "MIT" From 01d6287a8fec287b26e887345747df871c84ea15 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Wed, 6 Nov 2019 18:25:23 +1300 Subject: [PATCH 143/184] Update README.md --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 0c13b8c1e2..55866d54af 100644 --- a/README.md +++ b/README.md @@ -173,7 +173,7 @@ We can pipeline this into a command that gets the contents of one of the columns ━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━┯━━━━━━┯━━━━━━━━━ authors │ description │ edition │ license │ name │ version ─────────────────┼────────────────────────────┼─────────┼─────────┼──────┼───────── - [table: 3 rows] │ A shell for the GitHub era │ 2018 │ MIT │ nu │ 0.4.0 + [table: 3 rows] │ A shell for the GitHub era │ 2018 │ MIT │ nu │ 0.5.0 ━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━┷━━━━━━┷━━━━━━━━━ ``` @@ -181,7 +181,7 @@ Finally, we can use commands outside of Nu once we have the data we want: ``` /home/jonathan/Source/nushell(master)> open Cargo.toml | get package.version | echo $it -0.4.0 +0.5.0 ``` Here we use the variable `$it` to refer to the value being piped to the external command. From 60445b0559abc1a56d1dc6161da3a4b62f8fc8f5 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Fri, 8 Nov 2019 05:51:21 +1300 Subject: [PATCH 144/184] Move Nu to the stable Rust 1.39 release --- rust-toolchain | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rust-toolchain b/rust-toolchain index c3a3f37794..5edffce6d5 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1 +1 @@ -beta-2019-09-25 +1.39.0 From 104b30142f72480fb40c24d8805466c6797e2eb9 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Fri, 8 Nov 2019 06:13:39 +1300 Subject: [PATCH 145/184] Move azure pipeline to stable --- .azure/azure-pipelines.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.azure/azure-pipelines.yml b/.azure/azure-pipelines.yml index 2ab7e05c46..caa63e0164 100644 --- a/.azure/azure-pipelines.yml +++ b/.azure/azure-pipelines.yml @@ -3,13 +3,13 @@ trigger: strategy: matrix: - linux-nightly: + linux-stable: image: ubuntu-16.04 style: 'unflagged' - macos-nightly: + macos-stable: image: macos-10.14 style: 'unflagged' - windows-nightly: + windows-stable: image: vs2017-win2016 style: 'unflagged' linux-nightly-canary: @@ -35,11 +35,11 @@ steps: then sudo apt-get -y install libxcb-composite0-dev libx11-dev fi - curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain `cat rust-toolchain` + curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain "stable" export PATH=$HOME/.cargo/bin:$PATH rustc -Vv echo "##vso[task.prependpath]$HOME/.cargo/bin" - rustup component add rustfmt --toolchain `cat rust-toolchain` + rustup component add rustfmt --toolchain "stable" displayName: Install Rust - bash: RUSTFLAGS="-D warnings" cargo test --all-features condition: eq(variables['style'], 'unflagged') From c01b602b86a3eb78fac9cb53557a2e5d62636d80 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Fri, 8 Nov 2019 06:34:53 +1300 Subject: [PATCH 146/184] Update docker to stable --- docker/Dockerfile.nu-base | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/Dockerfile.nu-base b/docker/Dockerfile.nu-base index 1a9e83a11e..81e4822994 100644 --- a/docker/Dockerfile.nu-base +++ b/docker/Dockerfile.nu-base @@ -12,7 +12,7 @@ RUN apt-get update && apt-get install -y libssl-dev \ ARG RELEASE=false WORKDIR /code COPY ./rust-toolchain ./rust-toolchain -RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain `cat rust-toolchain` +RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain "stable" ENV PATH=/root/.cargo/bin:$PATH COPY . /code RUN echo "##vso[task.prependpath]/root/.cargo/bin" && \ From 76208110b968381a4c9f9c8295b2069cec107f60 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Fri, 8 Nov 2019 07:17:12 +1300 Subject: [PATCH 147/184] Update README.md --- README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 55866d54af..d996af67b6 100644 --- a/README.md +++ b/README.md @@ -34,7 +34,7 @@ Try it in Gitpod. Up-to-date installation instructions can be found in the [installation chapter of the book](https://book.nushell.sh/en/installation). **Windows users**: please note that Nu works on Windows 10 and does not currently have Windows 7/8.1 support. -To build Nu, you will need to use the **beta** version of the compiler. +To build Nu, you will need to use the **latest stable (1.39 or later)** version of the compiler. Required dependencies: @@ -46,16 +46,16 @@ Optional dependencies: * To use Nu with all possible optional features enabled, you'll also need the following: * on Linux (on Debian/Ubuntu): `apt install libxcb-composite0-dev libx11-dev` -To install Nu via cargo (make sure you have installed [rustup](https://rustup.rs/) and the beta compiler via `rustup install beta`): +To install Nu via cargo (make sure you have installed [rustup](https://rustup.rs/) and the beta compiler via `rustup install stable`): ``` -cargo +beta install nu +cargo install nu ``` You can also install Nu with all the bells and whistles (be sure to have installed the [dependencies](https://book.nushell.sh/en/installation#dependencies) for your platform): ``` -cargo +beta install nu --all-features +cargo install nu --all-features ``` ## Docker From 1cca5557b146e0d5b79da6bc0e2e68d04666ec0a Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Fri, 8 Nov 2019 07:27:39 +1300 Subject: [PATCH 148/184] Second attempt to remove rust-toolchain --- .azure/azure-pipelines.yml | 4 ++-- docker/Dockerfile.nu-base | 2 +- rust-toolchain | 1 - 3 files changed, 3 insertions(+), 4 deletions(-) delete mode 100644 rust-toolchain diff --git a/.azure/azure-pipelines.yml b/.azure/azure-pipelines.yml index caa63e0164..ec77a1de97 100644 --- a/.azure/azure-pipelines.yml +++ b/.azure/azure-pipelines.yml @@ -35,11 +35,11 @@ steps: then sudo apt-get -y install libxcb-composite0-dev libx11-dev fi - curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain "stable" + curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain "1.39.0" export PATH=$HOME/.cargo/bin:$PATH rustc -Vv echo "##vso[task.prependpath]$HOME/.cargo/bin" - rustup component add rustfmt --toolchain "stable" + rustup component add rustfmt --toolchain "1.39.0" displayName: Install Rust - bash: RUSTFLAGS="-D warnings" cargo test --all-features condition: eq(variables['style'], 'unflagged') diff --git a/docker/Dockerfile.nu-base b/docker/Dockerfile.nu-base index 81e4822994..ca3ce44f23 100644 --- a/docker/Dockerfile.nu-base +++ b/docker/Dockerfile.nu-base @@ -12,7 +12,7 @@ RUN apt-get update && apt-get install -y libssl-dev \ ARG RELEASE=false WORKDIR /code COPY ./rust-toolchain ./rust-toolchain -RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain "stable" +RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain "1.39.0" ENV PATH=/root/.cargo/bin:$PATH COPY . /code RUN echo "##vso[task.prependpath]/root/.cargo/bin" && \ diff --git a/rust-toolchain b/rust-toolchain deleted file mode 100644 index 5edffce6d5..0000000000 --- a/rust-toolchain +++ /dev/null @@ -1 +0,0 @@ -1.39.0 From c6c6c0f295cbfb90a071b14159209c81e1ec0cab Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Fri, 8 Nov 2019 07:44:34 +1300 Subject: [PATCH 149/184] try again --- .azure/azure-pipelines.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.azure/azure-pipelines.yml b/.azure/azure-pipelines.yml index ec77a1de97..7dfae570a5 100644 --- a/.azure/azure-pipelines.yml +++ b/.azure/azure-pipelines.yml @@ -35,11 +35,12 @@ steps: then sudo apt-get -y install libxcb-composite0-dev libx11-dev fi - curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain "1.39.0" + curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain "stable" + rustup update export PATH=$HOME/.cargo/bin:$PATH rustc -Vv echo "##vso[task.prependpath]$HOME/.cargo/bin" - rustup component add rustfmt --toolchain "1.39.0" + rustup component add rustfmt --toolchain "stable" displayName: Install Rust - bash: RUSTFLAGS="-D warnings" cargo test --all-features condition: eq(variables['style'], 'unflagged') From ff6026ca7973561dc7c99d6ffe48f798ce7658ce Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Fri, 8 Nov 2019 07:47:43 +1300 Subject: [PATCH 150/184] try again --- .azure/azure-pipelines.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.azure/azure-pipelines.yml b/.azure/azure-pipelines.yml index 7dfae570a5..f4c24325e3 100644 --- a/.azure/azure-pipelines.yml +++ b/.azure/azure-pipelines.yml @@ -36,8 +36,8 @@ steps: sudo apt-get -y install libxcb-composite0-dev libx11-dev fi curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain "stable" - rustup update export PATH=$HOME/.cargo/bin:$PATH + rustup update rustc -Vv echo "##vso[task.prependpath]$HOME/.cargo/bin" rustup component add rustfmt --toolchain "stable" From 13314ad1e71042b389bf46e7c75037ff7dec6073 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Fri, 8 Nov 2019 07:54:52 +1300 Subject: [PATCH 151/184] try again --- docker/Dockerfile.nu-base | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docker/Dockerfile.nu-base b/docker/Dockerfile.nu-base index ca3ce44f23..cb4217b4bd 100644 --- a/docker/Dockerfile.nu-base +++ b/docker/Dockerfile.nu-base @@ -12,8 +12,9 @@ RUN apt-get update && apt-get install -y libssl-dev \ ARG RELEASE=false WORKDIR /code COPY ./rust-toolchain ./rust-toolchain -RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain "1.39.0" +RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain "stable" ENV PATH=/root/.cargo/bin:$PATH +RUN rustup update COPY . /code RUN echo "##vso[task.prependpath]/root/.cargo/bin" && \ rustc -Vv && \ From c42d97fb9738fab843e02b6bcc6b71fda2ed8093 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Fri, 8 Nov 2019 08:00:46 +1300 Subject: [PATCH 152/184] try again --- docker/Dockerfile.nu-base | 1 - 1 file changed, 1 deletion(-) diff --git a/docker/Dockerfile.nu-base b/docker/Dockerfile.nu-base index cb4217b4bd..3adee88e78 100644 --- a/docker/Dockerfile.nu-base +++ b/docker/Dockerfile.nu-base @@ -11,7 +11,6 @@ RUN apt-get update && apt-get install -y libssl-dev \ ARG RELEASE=false WORKDIR /code -COPY ./rust-toolchain ./rust-toolchain RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain "stable" ENV PATH=/root/.cargo/bin:$PATH RUN rustup update From 8855c543919147db96afceca67dd04e6460252b4 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Fri, 8 Nov 2019 08:19:41 +1300 Subject: [PATCH 153/184] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index d996af67b6..24daf6c35d 100644 --- a/README.md +++ b/README.md @@ -46,7 +46,7 @@ Optional dependencies: * To use Nu with all possible optional features enabled, you'll also need the following: * on Linux (on Debian/Ubuntu): `apt install libxcb-composite0-dev libx11-dev` -To install Nu via cargo (make sure you have installed [rustup](https://rustup.rs/) and the beta compiler via `rustup install stable`): +To install Nu via cargo (make sure you have installed [rustup](https://rustup.rs/) and the latest stable compiler via `rustup install stable`): ``` cargo install nu From 078342442ddfecd8e65a8fa4b3031753fbde05dd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lars=20M=C3=BChmel?= Date: Fri, 8 Nov 2019 13:33:28 +0100 Subject: [PATCH 154/184] removed the requirement on the 'regex' feature for the match plugin The nu_plugin_match binary wasn't built anymore after the regex dependency was made non-optional in https://github.com/nushell/nushell/pull/889, causing the removal of the regex feature, which nu_plugin_match depended on. --- Cargo.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index e5a453be7b..3a7e81a4c4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -152,7 +152,6 @@ path = "src/plugins/skip.rs" [[bin]] name = "nu_plugin_match" path = "src/plugins/match.rs" -required-features = ["regex"] [[bin]] name = "nu_plugin_sys" From 15986c598ae653c8d1d2415a0b06b2235cebd235 Mon Sep 17 00:00:00 2001 From: David Mason Date: Fri, 8 Nov 2019 13:11:04 +0000 Subject: [PATCH 155/184] Add --separator command to from_csv The command takes a string, checks it is a single character and then passes it to csv::ReaderBuilder via .delimiter() method as a u8. --- docs/commands/from-csv.md | 65 +++++++++++++++++++++++++++++++++++++++ src/commands/from_csv.rs | 23 +++++++++++++- tests/filters_test.rs | 29 +++++++++++++++++ 3 files changed, 116 insertions(+), 1 deletion(-) diff --git a/docs/commands/from-csv.md b/docs/commands/from-csv.md index 86d309d86b..b72818eefc 100644 --- a/docs/commands/from-csv.md +++ b/docs/commands/from-csv.md @@ -3,7 +3,9 @@ Converts csv data into table. Use this when nushell cannot dertermine the input file extension. ## Example + Let's say we have the following file : + ```shell > cat pets.txt animal, name, age @@ -36,6 +38,7 @@ To get a table from `pets.txt` we need to use the `from-csv` command : ``` To ignore the csv headers use `--headerless` : + ```shell ━━━┯━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━ # │ Column1 │ Column2 │ Column3 @@ -45,3 +48,65 @@ To ignore the csv headers use `--headerless` : ━━━┷━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━ ``` +To split on a character other than ',' use `--separator` : + +```shell +> open pets.txt +animal; name; age +cat; Tom; 7 +dog; Alfred; 10 +chameleon; Linda; 1 +``` + +```shell +> open pets.txt | from-csv --separator ';' +━━━┯━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━ + # │ animal │ name │ age +───┼───────────┼─────────┼────── + 0 │ cat │ Tom │ 7 + 1 │ dog │ Alfred │ 10 + 2 │ chameleon │ Linda │ 1 +━━━┷━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━ +``` + +To use this command to open a csv with separators other than a comma, use the `--raw` switch of `open` to open the csv, othewise the csv will enter `from-csv` as a table split on commas rather than raw text. + +```shell +> mv pets.txt pets.csv +> open pets.csv | from-csv --separator ';' +error: Expected a string from pipeline +- shell:1:16 +1 | open pets.csv | from-csv --separator ';' + | ^^^^^^^^ requires string input +- shell:1:0 +1 | open pets.csv | from-csv --separator ';' + | value originates from here + +> open pets.csv --raw | from-csv --separator ';' +━━━┯━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━ + # │ animal │ name │ age +───┼───────────┼─────────┼────── + 0 │ cat │ Tom │ 7 + 1 │ dog │ Alfred │ 10 + 2 │ chameleon │ Linda │ 1 +━━━┷━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━ +``` + +Note that separators are currently provided as strings and need to be wrapped in quotes. + +```shell +> open pets.csv --raw | from-csv --separator ; +- shell:1:43 +1 | open pets.csv --raw | from-csv --separator ; + | ^ +``` + +It is also considered an error to use a separator greater than one char : + +```shell +> open pets.txt | from-csv --separator '123' +error: Expected a single separator char from --separator +- shell:1:37 +1 | open pets.txt | from-csv --separator '123' + | ^^^^^ requires a single character string input +``` diff --git a/src/commands/from_csv.rs b/src/commands/from_csv.rs index 7442a07fc9..de2b0d0593 100644 --- a/src/commands/from_csv.rs +++ b/src/commands/from_csv.rs @@ -8,6 +8,7 @@ pub struct FromCSV; #[derive(Deserialize)] pub struct FromCSVArgs { headerless: bool, + separator: Option>, } impl WholeStreamCommand for FromCSV { @@ -17,6 +18,7 @@ impl WholeStreamCommand for FromCSV { fn signature(&self) -> Signature { Signature::build("from-csv") + .named("separator", SyntaxShape::String, "a character to separate columns, defaults to ','") .switch("headerless", "don't treat the first row as column names") } @@ -36,10 +38,12 @@ impl WholeStreamCommand for FromCSV { pub fn from_csv_string_to_value( s: String, headerless: bool, + separator: char, tag: impl Into, ) -> Result, csv::Error> { let mut reader = ReaderBuilder::new() .has_headers(false) + .delimiter(separator as u8) .from_reader(s.as_bytes()); let tag = tag.into(); @@ -84,10 +88,27 @@ pub fn from_csv_string_to_value( fn from_csv( FromCSVArgs { headerless: skip_headers, + separator, }: FromCSVArgs, RunnableContext { input, name, .. }: RunnableContext, ) -> Result { let name_tag = name; + let sep = match separator { + Some(Tagged { item: Value::Primitive(Primitive::String(s)), tag, .. }) => { + let vec_s: Vec = s.chars().collect(); + if vec_s.len() != 1 { + return Err(ShellError::labeled_error( + "Expected a single separator char from --separator", + "requires a single character string input", + tag, + )) + }; + vec_s[0] + } + _ => { + ',' + } + }; let stream = async_stream! { let values: Vec> = input.values.collect().await; @@ -114,7 +135,7 @@ fn from_csv( } } - match from_csv_string_to_value(concat_string, skip_headers, name_tag.clone()) { + match from_csv_string_to_value(concat_string, skip_headers, sep, name_tag.clone()) { Ok(x) => match x { Tagged { item: Value::Table(list), .. } => { for l in list { diff --git a/tests/filters_test.rs b/tests/filters_test.rs index 1eb55448b7..e410e99e65 100644 --- a/tests/filters_test.rs +++ b/tests/filters_test.rs @@ -100,6 +100,35 @@ fn converts_from_csv_text_to_structured_table() { }) } +#[test] +fn converts_from_csv_text_with_separator_to_structured_table() { + Playground::setup("filter_from_csv_test_1", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContentToBeTrimmed( + "los_tres_caballeros.txt", + r#" + first_name;last_name;rusty_luck + Andrés;Robalino;1 + Jonathan;Turner;1 + Yehuda;Katz;1 + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open los_tres_caballeros.txt + | from-csv --separator ';' + | get rusty_luck + | str --to-int + | sum + | echo $it + "# + )); + + assert_eq!(actual, "3"); + }) +} + #[test] fn converts_from_csv_text_skipping_headers_to_structured_table() { Playground::setup("filter_from_csv_test_2", |dirs, sandbox| { From 4a6122905b69435a36c6d66e212820e4aab13fd6 Mon Sep 17 00:00:00 2001 From: David Mason Date: Fri, 8 Nov 2019 15:27:29 +0000 Subject: [PATCH 156/184] fmt: cargo fmt --all --- src/commands/from_csv.rs | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/src/commands/from_csv.rs b/src/commands/from_csv.rs index de2b0d0593..9483fed521 100644 --- a/src/commands/from_csv.rs +++ b/src/commands/from_csv.rs @@ -18,7 +18,11 @@ impl WholeStreamCommand for FromCSV { fn signature(&self) -> Signature { Signature::build("from-csv") - .named("separator", SyntaxShape::String, "a character to separate columns, defaults to ','") + .named( + "separator", + SyntaxShape::String, + "a character to separate columns, defaults to ','", + ) .switch("headerless", "don't treat the first row as column names") } @@ -94,20 +98,22 @@ fn from_csv( ) -> Result { let name_tag = name; let sep = match separator { - Some(Tagged { item: Value::Primitive(Primitive::String(s)), tag, .. }) => { + Some(Tagged { + item: Value::Primitive(Primitive::String(s)), + tag, + .. + }) => { let vec_s: Vec = s.chars().collect(); if vec_s.len() != 1 { return Err(ShellError::labeled_error( "Expected a single separator char from --separator", "requires a single character string input", tag, - )) + )); }; vec_s[0] } - _ => { - ',' - } + _ => ',', }; let stream = async_stream! { From bab58576b4d7320c6b59f9234d4fe5764ad404de Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Sun, 10 Nov 2019 11:26:44 +1300 Subject: [PATCH 157/184] Rename read to parse --- Cargo.toml | 4 ++-- src/plugins/{read.rs => parse.rs} | 40 +++++++++++++++---------------- tests/tests.rs | 2 +- 3 files changed, 23 insertions(+), 23 deletions(-) rename src/plugins/{read.rs => parse.rs} (78%) diff --git a/Cargo.toml b/Cargo.toml index e5a453be7b..593d9d0467 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -138,8 +138,8 @@ name = "nu_plugin_edit" path = "src/plugins/edit.rs" [[bin]] -name = "nu_plugin_read" -path = "src/plugins/read.rs" +name = "nu_plugin_parse" +path = "src/plugins/parse.rs" [[bin]] name = "nu_plugin_str" diff --git a/src/plugins/read.rs b/src/plugins/parse.rs similarity index 78% rename from src/plugins/read.rs rename to src/plugins/parse.rs index de88946e91..97ece0693d 100644 --- a/src/plugins/read.rs +++ b/src/plugins/parse.rs @@ -10,19 +10,19 @@ use nom::{ use regex::Regex; #[derive(Debug)] -enum ReadCommand { +enum ParseCommand { Text(String), Column(String), } -fn read(input: &str) -> IResult<&str, Vec> { +fn parse(input: &str) -> IResult<&str, Vec> { let mut output = vec![]; let mut loop_input = input; loop { let (input, before) = take_while(|c| c != '{')(loop_input)?; if before.len() > 0 { - output.push(ReadCommand::Text(before.to_string())); + output.push(ParseCommand::Text(before.to_string())); } if input != "" { // Look for column as we're now at one @@ -30,7 +30,7 @@ fn read(input: &str) -> IResult<&str, Vec> { let (input, column) = take_while(|c| c != '}')(input)?; let (input, _) = tag("}")(input)?; - output.push(ReadCommand::Column(column.to_string())); + output.push(ParseCommand::Column(column.to_string())); loop_input = input; } else { loop_input = input; @@ -43,12 +43,12 @@ fn read(input: &str) -> IResult<&str, Vec> { Ok((loop_input, output)) } -fn column_names(commands: &[ReadCommand]) -> Vec { +fn column_names(commands: &[ParseCommand]) -> Vec { let mut output = vec![]; for command in commands { match command { - ReadCommand::Column(c) => { + ParseCommand::Column(c) => { output.push(c.clone()); } _ => {} @@ -58,15 +58,15 @@ fn column_names(commands: &[ReadCommand]) -> Vec { output } -fn build_regex(commands: &[ReadCommand]) -> String { +fn build_regex(commands: &[ParseCommand]) -> String { let mut output = String::new(); for command in commands { match command { - ReadCommand::Text(s) => { + ParseCommand::Text(s) => { output.push_str(&s.replace("(", "\\(")); } - ReadCommand::Column(_) => { + ParseCommand::Column(_) => { output.push_str("(.*)"); } } @@ -74,23 +74,23 @@ fn build_regex(commands: &[ReadCommand]) -> String { return output; } -struct Read { +struct Parse { regex: Regex, column_names: Vec, } -impl Read { +impl Parse { fn new() -> Self { - Read { + Parse { regex: Regex::new("").unwrap(), column_names: vec![], } } } -impl Plugin for Read { +impl Plugin for Parse { fn config(&mut self) -> Result { - Ok(Signature::build("read") + Ok(Signature::build("parse") .desc("Parse columns from string data using a simple pattern") .required( "pattern", @@ -107,17 +107,17 @@ impl Plugin for Read { .. } => { //self.pattern = s.clone(); - let read_pattern = read(&pattern).unwrap(); - let read_regex = build_regex(&read_pattern.1); + let parse_pattern = parse(&pattern).unwrap(); + let parse_regex = build_regex(&parse_pattern.1); - self.column_names = column_names(&read_pattern.1); + self.column_names = column_names(&parse_pattern.1); - self.regex = Regex::new(&read_regex).unwrap(); + self.regex = Regex::new(&parse_regex).unwrap(); } Tagged { tag, .. } => { return Err(ShellError::labeled_error( "Unrecognized type in params", - "value", + "expected a string", tag, )); } @@ -152,5 +152,5 @@ impl Plugin for Read { } fn main() { - serve_plugin(&mut Read::new()); + serve_plugin(&mut Parse::new()); } diff --git a/tests/tests.rs b/tests/tests.rs index caaeb2ac86..490dabefff 100644 --- a/tests/tests.rs +++ b/tests/tests.rs @@ -62,7 +62,7 @@ fn read_plugin() { cwd: "tests/fixtures/formats", h::pipeline( r#" open fileA.txt - | read "{Name}={Value}" + | parse "{Name}={Value}" | nth 1 | get Value | echo $it From 73d847867866b680455378a897db544f33e8e93c Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Sun, 10 Nov 2019 11:27:56 +1300 Subject: [PATCH 158/184] Update readme --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 24daf6c35d..ef245135ad 100644 --- a/README.md +++ b/README.md @@ -295,7 +295,7 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat | from-xml | Parse text as .xml and create a table | | from-yaml | Parse text as a .yaml/.yml and create a table | | lines | Split single string into rows, one per line | -| read pattern | Convert text to a table by matching the given pattern | +| parse pattern | Convert text to a table by matching the given pattern | | size | Gather word count statistics on the text | | split-column sep ...column-names | Split row contents across multiple columns via the separator, optionally give the columns names | | split-row sep | Split row contents over multiple rows via the separator | From 62a5250554d281e844fb16fea2f8787690ab3c37 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Sun, 10 Nov 2019 13:14:59 +1300 Subject: [PATCH 159/184] Add format command --- Cargo.toml | 4 ++ src/data/base.rs | 2 +- src/plugins/format.rs | 128 ++++++++++++++++++++++++++++++++++++++++++ tests/tests.rs | 17 +++++- 4 files changed, 149 insertions(+), 2 deletions(-) create mode 100644 src/plugins/format.rs diff --git a/Cargo.toml b/Cargo.toml index cca4fbce83..323069da8d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -137,6 +137,10 @@ path = "src/plugins/insert.rs" name = "nu_plugin_edit" path = "src/plugins/edit.rs" +[[bin]] +name = "nu_plugin_format" +path = "src/plugins/format.rs" + [[bin]] name = "nu_plugin_parse" path = "src/plugins/parse.rs" diff --git a/src/data/base.rs b/src/data/base.rs index 72c98f2c89..d877e4a7cc 100644 --- a/src/data/base.rs +++ b/src/data/base.rs @@ -422,7 +422,7 @@ impl Tagged { } } - pub(crate) fn as_string(&self) -> Result { + pub fn as_string(&self) -> Result { match &self.item { Value::Primitive(Primitive::String(s)) => Ok(s.clone()), Value::Primitive(Primitive::Boolean(x)) => Ok(format!("{}", x)), diff --git a/src/plugins/format.rs b/src/plugins/format.rs new file mode 100644 index 0000000000..7ed33f964c --- /dev/null +++ b/src/plugins/format.rs @@ -0,0 +1,128 @@ +use nu::{ + serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, + SyntaxShape, Tagged, TaggedItem, Value, +}; + +use nom::{ + bytes::complete::{tag, take_while}, + IResult, +}; + +#[derive(Debug)] +enum FormatCommand { + Text(String), + Column(String), +} + +fn format(input: &str) -> IResult<&str, Vec> { + let mut output = vec![]; + + let mut loop_input = input; + loop { + let (input, before) = take_while(|c| c != '{')(loop_input)?; + if before.len() > 0 { + output.push(FormatCommand::Text(before.to_string())); + } + if input != "" { + // Look for column as we're now at one + let (input, _) = tag("{")(input)?; + let (input, column) = take_while(|c| c != '}')(input)?; + let (input, _) = tag("}")(input)?; + + output.push(FormatCommand::Column(column.to_string())); + loop_input = input; + } else { + loop_input = input; + } + if loop_input == "" { + break; + } + } + + Ok((loop_input, output)) +} + +struct Format { + commands: Vec, +} + +impl Format { + fn new() -> Self { + Format { commands: vec![] } + } +} + +impl Plugin for Format { + fn config(&mut self) -> Result { + Ok(Signature::build("format") + .desc("Format columns into a string using a simple pattern") + .required( + "pattern", + SyntaxShape::Any, + "the pattern to match. Eg) \"{foo}: {bar}\"", + ) + .filter()) + } + fn begin_filter(&mut self, call_info: CallInfo) -> Result, ShellError> { + if let Some(args) = call_info.args.positional { + match &args[0] { + Tagged { + item: Value::Primitive(Primitive::String(pattern)), + .. + } => { + let format_pattern = format(&pattern).unwrap(); + self.commands = format_pattern.1 + } + Tagged { tag, .. } => { + return Err(ShellError::labeled_error( + "Unrecognized type in params", + "expected a string", + tag, + )); + } + } + } + Ok(vec![]) + } + + fn filter(&mut self, input: Tagged) -> Result, ShellError> { + match &input { + Tagged { + item: Value::Row(dict), + .. + } => { + let mut output = String::new(); + + for command in &self.commands { + match command { + FormatCommand::Text(s) => { + output.push_str(s); + } + FormatCommand::Column(c) => { + match dict.entries.get(c) { + Some(c) => match c.as_string() { + Ok(v) => output.push_str(&v), + _ => return Ok(vec![]), + }, + None => { + // This row doesn't match, so don't emit anything + return Ok(vec![]); + } + } + } + } + } + + return Ok(vec![ReturnSuccess::value( + Value::string(output).tagged_unknown(), + )]); + } + _ => {} + } + Ok(vec![]) + } +} + +fn main() { + serve_plugin(&mut Format::new()); +} diff --git a/tests/tests.rs b/tests/tests.rs index 490dabefff..8799f5077d 100644 --- a/tests/tests.rs +++ b/tests/tests.rs @@ -57,7 +57,7 @@ fn insert_plugin() { } #[test] -fn read_plugin() { +fn parse_plugin() { let actual = nu!( cwd: "tests/fixtures/formats", h::pipeline( r#" @@ -72,6 +72,21 @@ fn read_plugin() { assert_eq!(actual, "StupidLongName"); } +#[test] +fn format_plugin() { + let actual = nu!( + cwd: "tests/fixtures/formats", h::pipeline( + r#" + open cargo_sample.toml + | get package + | format "{name} has license {license}" + | echo $it + "# + )); + + assert_eq!(actual, "nu has license ISC"); +} + #[test] fn prepend_plugin() { let actual = nu!( From 943e0045e736f3bddcd8a58db006a7060e4b0812 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Sun, 10 Nov 2019 13:16:52 +1300 Subject: [PATCH 160/184] Update readme --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index ef245135ad..46a4c45ac8 100644 --- a/README.md +++ b/README.md @@ -253,6 +253,7 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat | edit column-or-column-path value | Edit an existing column to have a new value | | embed column | Creates a new table of one column with the given name, and places the current table inside of it | | first amount | Show only the first number of rows | +| format pattern | Format table row data as a string following the given pattern | | get column-or-column-path | Open column and get data from the corresponding cells | | group-by column | Creates a new table with the data from the table rows grouped by the column given | | inc (column-or-column-path) | Increment a value or version. Optionally use the column of a table | From df302d4bacd011a56c9b156538cc00b1aa00ec41 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Sun, 10 Nov 2019 16:44:05 +1300 Subject: [PATCH 161/184] Bump Nu version and change plugin load logic for debug --- Cargo.lock | 2 +- Cargo.toml | 2 +- src/cli.rs | 17 +++++++++-------- 3 files changed, 11 insertions(+), 10 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 32b7d0bbb0..c3a7183cf0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1487,7 +1487,7 @@ dependencies = [ [[package]] name = "nu" -version = "0.5.0" +version = "0.5.1" dependencies = [ "ansi_term 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)", "app_dirs 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", diff --git a/Cargo.toml b/Cargo.toml index cca4fbce83..5dc5131bb4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "nu" -version = "0.5.0" +version = "0.5.1" authors = ["Yehuda Katz ", "Jonathan Turner ", "Andrés N. Robalino "] description = "A shell for the GitHub era" license = "MIT" diff --git a/src/cli.rs b/src/cli.rs index e5231afd10..483ee332ea 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -23,7 +23,6 @@ use crate::prelude::*; use log::{debug, log_enabled, trace}; use rustyline::error::ReadlineError; use rustyline::{self, config::Configurer, config::EditMode, ColorMode, Config, Editor}; -use std::env; use std::error::Error; use std::io::{BufRead, BufReader, Write}; use std::iter::Iterator; @@ -119,13 +118,6 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel fn search_paths() -> Vec { let mut search_paths = Vec::new(); - match env::var_os("PATH") { - Some(paths) => { - search_paths = env::split_paths(&paths).collect::>(); - } - None => println!("PATH is not defined in the environment."), - } - #[cfg(debug_assertions)] { // Use our debug plugins in debug mode @@ -140,6 +132,15 @@ fn search_paths() -> Vec { #[cfg(not(debug_assertions))] { + use std::env; + + match env::var_os("PATH") { + Some(paths) => { + search_paths = env::split_paths(&paths).collect::>(); + } + None => println!("PATH is not defined in the environment."), + } + // Use our release plugins in release mode let mut path = std::path::PathBuf::from("."); path.push("target"); From 9d345cab073b055272a5136e47e634b581eacd0d Mon Sep 17 00:00:00 2001 From: Shaurya Shubham Date: Sun, 10 Nov 2019 12:37:27 +0530 Subject: [PATCH 162/184] Add documentation for the pick and reject command Partial fix of issue#711 --- docs/commands/pick.md | 53 +++++++++++++++++++++++++++++++++++++++++ docs/commands/reject.md | 38 +++++++++++++++++++++++++++++ 2 files changed, 91 insertions(+) create mode 100644 docs/commands/pick.md create mode 100644 docs/commands/reject.md diff --git a/docs/commands/pick.md b/docs/commands/pick.md new file mode 100644 index 0000000000..e0d0e4c079 --- /dev/null +++ b/docs/commands/pick.md @@ -0,0 +1,53 @@ +# pick + +This command displays only the column names passed on to it. + +## Examples + +```shell +> ls +━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━ + # │ name │ type │ readonly │ size │ created │ accessed │ modified +───┼────────────────────────────┼──────┼──────────┼────────┼─────────────┼─────────────┼───────────── + 0 │ zeusiscrazy.txt │ File │ │ 556 B │ a month ago │ a month ago │ a month ago + 1 │ coww.txt │ File │ │ 24 B │ a month ago │ a month ago │ a month ago + 2 │ randomweirdstuff.txt │ File │ │ 197 B │ a month ago │ a month ago │ a month ago + 3 │ abaracadabra.txt │ File │ │ 401 B │ a month ago │ a month ago │ a month ago + 4 │ youshouldeatmorecereal.txt │ File │ │ 768 B │ a month ago │ a month ago │ a month ago +━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━ +> ls | pick name +━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + # │ name +───┼──────────────────────────── + 0 │ zeusiscrazy.txt + 1 │ coww.txt + 2 │ randomweirdstuff.txt + 3 │ abaracadabra.txt + 4 │ youshouldeatmorecereal.txt +━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +``` + +The order in which you put the column names matters: + +```shell +> ls | pick type name size +━━━┯━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━ + # │ type │ name │ size +───┼──────┼────────────────────────────┼──────── + 0 │ File │ zeusiscrazy.txt │ 556 B + 1 │ File │ coww.txt │ 24 B + 2 │ File │ randomweirdstuff.txt │ 197 B + 3 │ File │ abaracadabra.txt │ 401 B + 4 │ File │ youshouldeatmorecereal.txt │ 768 B +━━━┷━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━ +> ls | pick size type name +━━━┯━━━━━━━━┯━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + # │ size │ type │ name +───┼────────┼──────┼──────────────────────────── + 0 │ 556 B │ File │ zeusiscrazy.txt + 1 │ 24 B │ File │ coww.txt + 2 │ 197 B │ File │ randomweirdstuff.txt + 3 │ 401 B │ File │ abaracadabra.txt + 4 │ 768 B │ File │ youshouldeatmorecereal.txt +━━━┷━━━━━━━━┷━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +``` diff --git a/docs/commands/reject.md b/docs/commands/reject.md new file mode 100644 index 0000000000..0d474ee863 --- /dev/null +++ b/docs/commands/reject.md @@ -0,0 +1,38 @@ +# reject + +This column removes or rejects the columns passed to it. + +## Examples + +```shell +> ls +━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━ + # │ name │ type │ readonly │ size │ created │ accessed │ modified +───┼────────────────────────────┼──────┼──────────┼────────┼─────────────┼─────────────┼───────────── + 0 │ zeusiscrazy.txt │ File │ │ 556 B │ a month ago │ a month ago │ a month ago + 1 │ coww.txt │ File │ │ 24 B │ a month ago │ a month ago │ a month ago + 2 │ randomweirdstuff.txt │ File │ │ 197 B │ a month ago │ a month ago │ a month ago + 3 │ abaracadabra.txt │ File │ │ 401 B │ a month ago │ a month ago │ a month ago + 4 │ youshouldeatmorecereal.txt │ File │ │ 768 B │ a month ago │ a month ago │ a month ago +━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━ +> ls | reject readonly +━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━ + # │ name │ type │ size │ created │ accessed │ modified +───┼────────────────────────────┼──────┼────────┼─────────────┼─────────────┼───────────── + 0 │ zeusiscrazy.txt │ File │ 556 B │ a month ago │ a month ago │ a month ago + 1 │ coww.txt │ File │ 24 B │ a month ago │ a month ago │ a month ago + 2 │ randomweirdstuff.txt │ File │ 197 B │ a month ago │ a month ago │ a month ago + 3 │ abaracadabra.txt │ File │ 401 B │ a month ago │ a month ago │ a month ago + 4 │ youshouldeatmorecereal.txt │ File │ 768 B │ a month ago │ a month ago │ a month ago +━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━ +> ls | reject readonly accessed +━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━ + # │ name │ type │ size │ created │ modified +───┼────────────────────────────┼──────┼────────┼─────────────┼───────────── + 0 │ zeusiscrazy.txt │ File │ 556 B │ a month ago │ a month ago + 1 │ coww.txt │ File │ 24 B │ a month ago │ a month ago + 2 │ randomweirdstuff.txt │ File │ 197 B │ a month ago │ a month ago + 3 │ abaracadabra.txt │ File │ 401 B │ a month ago │ a month ago + 4 │ youshouldeatmorecereal.txt │ File │ 768 B │ a month ago │ a month ago +━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━ +``` From 50616cc62c99e0ae73b4d088ffec535dba533f5a Mon Sep 17 00:00:00 2001 From: Shaurya Shubham Date: Sun, 10 Nov 2019 14:12:59 +0530 Subject: [PATCH 163/184] Add docs for the count command Partial fix of issue #711 --- docs/commands/count.md | 48 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) create mode 100644 docs/commands/count.md diff --git a/docs/commands/count.md b/docs/commands/count.md new file mode 100644 index 0000000000..e330dcc187 --- /dev/null +++ b/docs/commands/count.md @@ -0,0 +1,48 @@ +# count + +This command counts the number of rows in a table. + +## Examples - + +```shell +> ls +━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━ + # │ name │ type │ readonly │ size │ created │ accessed │ modified +────┼──────────────────────────────┼───────────┼──────────┼─────────┼──────────────┼──────────────┼────────────── + 0 │ Desktop │ Directory │ │ 4.1 KB │ 2 months ago │ 2 months ago │ 2 months ago + 1 │ aur │ Directory │ │ 4.1 KB │ 4 hours ago │ 4 hours ago │ 4 hours ago +... + 75 │ .emulator_console_auth_token │ File │ │ 16 B │ 2 months ago │ 2 months ago │ 2 months ago + 76 │ bin │ Directory │ │ 4.1 KB │ 2 months ago │ 2 months ago │ 2 months ago +━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━ +> ls | count +━━━━━━━━━ + +───────── + 77 +━━━━━━━━━ +> ls | get name | count +━━━━━━━━━ + +───────── + 77 +━━━━━━━━━ +> ls | where type == File | count +━━━━━━━━━ + +───────── + 29 +━━━━━━━━━ +> ls | where type == Directory | count +━━━━━━━━━ + +───────── + 48 +━━━━━━━━━ +> ls | where size > 2KB | count +━━━━━━━━━ + +───────── + 57 +━━━━━━━━━ +``` From 5a8128dd302d3b2dcba42e56a7fb4325b1fabb5e Mon Sep 17 00:00:00 2001 From: Shaurya Shubham Date: Sun, 10 Nov 2019 14:41:23 +0530 Subject: [PATCH 164/184] Make documentation for size command --- docs/commands/size.md | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 docs/commands/size.md diff --git a/docs/commands/size.md b/docs/commands/size.md new file mode 100644 index 0000000000..02599dcce9 --- /dev/null +++ b/docs/commands/size.md @@ -0,0 +1,20 @@ +# size + +This commands gives word count statistics on any text. + +## Examples - + +```shell +> open lalala.txt | size +━━━━━━━┯━━━━━━━┯━━━━━━━┯━━━━━━━━━━━━ + lines │ words │ chars │ max length +───────┼───────┼───────┼──────────── + 4 │ 10 │ 72 │ 72 +━━━━━━━┷━━━━━━━┷━━━━━━━┷━━━━━━━━━━━━ +> open the_mysterious_affair_at_styles.txt | size +━━━━━━━┯━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━ + lines │ words │ chars │ max length +───────┼───────┼────────┼──────────── + 8935 │ 62352 │ 349459 │ 361771 +━━━━━━━┷━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━ +``` From 0f405f24c75fe52ee9bc6238745ee3b362687264 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Mon, 11 Nov 2019 06:48:49 +1300 Subject: [PATCH 165/184] Bump dep versions --- Cargo.lock | 348 ++++++++++++++++++++++++---------------------- Cargo.toml | 44 +++--- src/plugins/ps.rs | 2 +- 3 files changed, 202 insertions(+), 192 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c3a7183cf0..a3d563a989 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -55,11 +55,11 @@ dependencies = [ [[package]] name = "async-stream" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "async-stream-impl 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -77,7 +77,7 @@ name = "atty" version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -93,7 +93,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "backtrace-sys 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -103,7 +103,7 @@ version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -114,6 +114,11 @@ dependencies = [ "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "base64" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "battery" version = "0.7.4" @@ -122,7 +127,7 @@ dependencies = [ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "core-foundation 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "nix 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -138,7 +143,7 @@ dependencies = [ "num-bigint 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -148,7 +153,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -180,11 +185,11 @@ dependencies = [ "chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)", "decimal 2.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "md5 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -197,7 +202,7 @@ dependencies = [ "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "regex-automata 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -257,10 +262,10 @@ name = "chrono" version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -322,7 +327,7 @@ dependencies = [ "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "rust-ini 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "serde-hjson 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "toml 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)", @@ -340,7 +345,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "core-foundation-sys 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -409,7 +414,7 @@ dependencies = [ "crossterm_screen 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "crossterm_utils 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "crossterm_winapi 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -441,7 +446,7 @@ dependencies = [ "crossterm_cursor 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "crossterm_utils 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "crossterm_winapi 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -450,7 +455,7 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "crossterm_winapi 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -471,7 +476,7 @@ dependencies = [ "csv-core 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "ryu 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -506,7 +511,7 @@ version = "0.4.25" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "curl-sys 0.4.23 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-probe 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-sys 0.9.51 (registry+https://github.com/rust-lang/crates.io-index)", "schannel 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)", @@ -520,7 +525,7 @@ version = "0.4.23" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libnghttp2-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-sys 0.9.51 (registry+https://github.com/rust-lang/crates.io-index)", @@ -535,7 +540,7 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "darwin-libproc-sys 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -544,7 +549,7 @@ name = "darwin-libproc-sys" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -554,10 +559,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "ord_subset 3.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -589,7 +594,7 @@ name = "directories" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -598,7 +603,7 @@ name = "dirs" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "redox_users 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -618,7 +623,7 @@ version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "redox_users 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -697,7 +702,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "miniz_oxide 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -718,74 +723,75 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "futures-channel-preview" -version = "0.3.0-alpha.18" +version = "0.3.0-alpha.19" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-sink-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-sink-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "futures-core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "futures-core-preview" -version = "0.3.0-alpha.18" +version = "0.3.0-alpha.19" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "futures-executor-preview" -version = "0.3.0-alpha.18" +version = "0.3.0-alpha.19" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-util-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.10.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "futures-io-preview" -version = "0.3.0-alpha.18" +version = "0.3.0-alpha.19" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "futures-preview" -version = "0.3.0-alpha.18" +version = "0.3.0-alpha.19" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-executor-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-io-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-sink-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-channel-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-executor-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-io-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-sink-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-util-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "futures-sink-preview" -version = "0.3.0-alpha.18" +version = "0.3.0-alpha.19" source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", -] [[package]] name = "futures-timer" -version = "0.4.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "pin-utils 0.1.0-alpha.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "futures-util-preview" -version = "0.3.0-alpha.18" +version = "0.3.0-alpha.19" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-io-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-sink-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-channel-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-io-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-sink-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "pin-utils 0.1.0-alpha.4 (registry+https://github.com/rust-lang/crates.io-index)", "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -794,11 +800,12 @@ dependencies = [ [[package]] name = "futures_codec" -version = "0.2.5" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -807,13 +814,13 @@ version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "getset" -version = "0.0.8" +version = "0.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", @@ -827,7 +834,7 @@ version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libgit2-sys 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -871,10 +878,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "core-foundation 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-util-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "nix 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)", "pin-utils 0.1.0-alpha.4 (registry+https://github.com/rust-lang/crates.io-index)", @@ -892,7 +899,7 @@ dependencies = [ "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -918,7 +925,7 @@ dependencies = [ "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "widestring 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -934,7 +941,7 @@ dependencies = [ "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "platforms 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -950,7 +957,7 @@ dependencies = [ "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -966,7 +973,7 @@ dependencies = [ "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "hex 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "macaddr 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "nix 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -985,7 +992,7 @@ dependencies = [ "heim-net 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "ntapi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", @@ -999,7 +1006,7 @@ version = "0.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-channel-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "threadpool 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1080,10 +1087,11 @@ dependencies = [ [[package]] name = "indexmap" -version = "1.2.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1099,7 +1107,7 @@ name = "iovec" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1112,8 +1120,8 @@ dependencies = [ "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", "curl 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)", "curl-sys 0.4.23 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-io-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-io-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-util-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", "http 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1127,7 +1135,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1142,7 +1150,7 @@ dependencies = [ [[package]] name = "itertools" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1159,7 +1167,7 @@ version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "getrandom 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1197,7 +1205,7 @@ dependencies = [ "itertools 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "render-tree 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "termcolor 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1231,7 +1239,7 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.62" +version = "0.2.65" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1240,7 +1248,7 @@ version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1251,7 +1259,7 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1270,7 +1278,7 @@ version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1323,7 +1331,7 @@ name = "mach" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1331,7 +1339,7 @@ name = "mach" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1339,7 +1347,7 @@ name = "malloc_buf" version = "0.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1357,7 +1365,7 @@ name = "memchr" version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1395,7 +1403,7 @@ dependencies = [ "bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "wasm-bindgen 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1408,7 +1416,7 @@ dependencies = [ "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1420,7 +1428,7 @@ dependencies = [ "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1450,7 +1458,7 @@ dependencies = [ [[package]] name = "nom-tracable" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "nom 5.0.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1491,8 +1499,8 @@ version = "0.5.1" dependencies = [ "ansi_term 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)", "app_dirs 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "async-stream 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", + "async-stream 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "base64 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "battery 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", "bigdecimal 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "bson 0.14.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1509,24 +1517,24 @@ dependencies = [ "derive-new 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)", "dirs 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "dunce 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-timer 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "futures_codec 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", - "getset 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-timer 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "futures_codec 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "getset 0.0.9 (registry+https://github.com/rust-lang/crates.io-index)", "git2 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", "glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "heim 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", - "hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", + "hex 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "image 0.22.3 (registry+https://github.com/rust-lang/crates.io-index)", - "indexmap 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", + "indexmap 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "itertools 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", "language-reporting 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "mime 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "natural 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "neso 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "nom 5.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "nom-tracable 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "nom-tracable 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", "nom_locate 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "num-bigint 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1539,11 +1547,11 @@ dependencies = [ "ptree 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "rawkey 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "roxmltree 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "roxmltree 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "rusqlite 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)", "rustyline 5.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "serde-hjson 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde_bytes 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)", "serde_ini 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1551,18 +1559,18 @@ dependencies = [ "serde_urlencoded 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde_yaml 0.8.11 (registry+https://github.com/rust-lang/crates.io-index)", "shellexpand 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "sublime_fuzzy 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sublime_fuzzy 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "subprocess 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", - "surf 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "surf 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "syntect 3.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "term 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", - "toml 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", + "toml 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)", "trash 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "which 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "which 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1573,7 +1581,7 @@ dependencies = [ "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1626,7 +1634,7 @@ name = "num_cpus" version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1671,7 +1679,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "onig_sys 69.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1696,7 +1704,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1765,7 +1773,7 @@ dependencies = [ "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "line-wrap 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "xml-rs 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1842,7 +1850,7 @@ dependencies = [ "directories 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "isatty 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", "petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "serde-value 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "tint 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1867,7 +1875,7 @@ version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "getrandom 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1918,7 +1926,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2023,7 +2031,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "roxmltree" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "xmlparser 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2082,7 +2090,7 @@ version = "5.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "dirs 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "nix 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2139,7 +2147,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "serde" -version = "1.0.101" +version = "1.0.102" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2175,7 +2183,7 @@ version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "ordered-float 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2183,7 +2191,7 @@ name = "serde_bytes" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2202,7 +2210,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "result 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2211,10 +2219,10 @@ name = "serde_json" version = "1.0.41" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "indexmap 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "indexmap 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "ryu 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2232,7 +2240,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2243,7 +2251,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2271,9 +2279,9 @@ name = "sluice" version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-io-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-channel-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-io-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2287,7 +2295,7 @@ version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2309,7 +2317,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "sublime_fuzzy" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -2318,23 +2326,23 @@ version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "crossbeam-utils 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "surf" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", "http 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", "isahc 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", "js-sys 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "mime 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "mime_guess 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "serde_urlencoded 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2378,7 +2386,7 @@ dependencies = [ "onig 4.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "plist 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2391,7 +2399,7 @@ version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2414,7 +2422,7 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2456,7 +2464,7 @@ name = "time" version = "0.1.42" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2484,15 +2492,15 @@ name = "toml" version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "toml" -version = "0.5.3" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2655,8 +2663,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-channel-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-util-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", "js-sys 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "wasm-bindgen 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2726,11 +2734,11 @@ dependencies = [ [[package]] name = "which" -version = "2.0.1" +version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2789,7 +2797,7 @@ name = "x11" version = "2.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2806,7 +2814,7 @@ name = "xcb" version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2841,13 +2849,14 @@ dependencies = [ "checksum app_dirs 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e73a24bad9bd6a94d6395382a6c69fe071708ae4409f763c5475e14ee896313d" "checksum arrayref 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "0d382e583f07208808f6b1249e60848879ba3543f57c32277bf52d69c2f0f0ee" "checksum arrayvec 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "cd9fd44efafa8690358b7408d253adf110036b88f55672a933f01d616ad9b1b9" -"checksum async-stream 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "650be9b667e47506c42ee53034fb1935443cb2447a3a5c0a75e303d2e756fa73" +"checksum async-stream 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "fb6fa015ebe961e9908ca4c1854e7dc7aabd4417da77b6a0466e4dfb4c8f6f69" "checksum async-stream-impl 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4f0d8c5b411e36dcfb04388bacfec54795726b1f0148adcb0f377a96d6747e0e" "checksum atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "1803c647a3ec87095e7ae7acfca019e98de5ec9a7d01343f611cf3152ed71a90" "checksum autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "b671c8fb71b457dd4ae18c4ba1e59aa81793daacc361d82fcd410cef0d491875" "checksum backtrace 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)" = "690a62be8920ccf773ee00ef0968649b0e724cda8bd5b12286302b4ae955fdf5" "checksum backtrace-sys 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)" = "82a830b4ef2d1124a711c71d263c5abdc710ef8e907bd508c88be475cebc422b" "checksum base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0b25d992356d2eb0ed82172f5248873db5560c4721f564b13cb5193bda5e668e" +"checksum base64 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b41b7ea54a0c9d92199de89e20e58d49f02f8e699814ef3fdf266f6f748d15c7" "checksum battery 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6d6fe5630049e900227cd89afce4c1204b88ec8e61a2581bb96fcce26f047b" "checksum bigdecimal 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "460825c9e21708024d67c07057cd5560e5acdccac85de0de624a81d3de51bacb" "checksum bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b8ab639324e3ee8774d296864fbc0dbbb256cf1a41c490b94cba90c082915f92" @@ -2916,17 +2925,18 @@ dependencies = [ "checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3" "checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" "checksum futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)" = "1b980f2816d6ee8673b6517b52cb0e808a180efc92e5c19d02cdda79066703ef" -"checksum futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "f477fd0292c4a4ae77044454e7f2b413207942ad405f759bb0b4698b7ace5b12" -"checksum futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "4a2f26f774b81b3847dcda0c81bd4b6313acfb4f69e5a0390c7cb12c058953e9" -"checksum futures-executor-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "80705612926df8a1bc05f0057e77460e29318801f988bf7d803a734cf54e7528" -"checksum futures-io-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "ee7de0c1c9ed23f9457b0437fec7663ce64d9cc3c906597e714e529377b5ddd1" -"checksum futures-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "efa8f90c4fb2328e381f8adfd4255b4a2b696f77d1c63a3dee6700b564c4e4b5" -"checksum futures-sink-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "e9b65a2481863d1b78e094a07e9c0eed458cc7dc6e72b22b7138b8a67d924859" -"checksum futures-timer 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "878f1d2fc31355fa02ed2372e741b0c17e58373341e6a122569b4623a14a7d33" -"checksum futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "7df53daff1e98cc024bf2720f3ceb0414d96fbb0a94f3cad3a5c3bf3be1d261c" -"checksum futures_codec 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "36552cd31353fd135114510d53b8d120758120c36aa636a9341970f9efb1e4a0" +"checksum futures-channel-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)" = "d5e5f4df964fa9c1c2f8bddeb5c3611631cacd93baf810fc8bb2fb4b495c263a" +"checksum futures-core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "79564c427afefab1dfb3298535b21eda083ef7935b4f0ecbfcb121f0aec10866" +"checksum futures-core-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)" = "b35b6263fb1ef523c3056565fa67b1d16f0a8604ff12b11b08c25f28a734c60a" +"checksum futures-executor-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)" = "75236e88bd9fe88e5e8bfcd175b665d0528fe03ca4c5207fabc028c8f9d93e98" +"checksum futures-io-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)" = "f4914ae450db1921a56c91bde97a27846287d062087d4a652efc09bb3a01ebda" +"checksum futures-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)" = "3b1dce2a0267ada5c6ff75a8ba864b4e679a9e2aa44262af7a3b5516d530d76e" +"checksum futures-sink-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)" = "86f148ef6b69f75bb610d4f9a2336d4fc88c4b5b67129d1a340dd0fd362efeec" +"checksum futures-timer 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "527936b95e804a42c1cf05999e175892bde27464b459785a5fa2664a06ecb172" +"checksum futures-util-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)" = "5ce968633c17e5f97936bd2797b6e38fb56cf16a7422319f7ec2e30d3c470e8d" +"checksum futures_codec 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a736da44bcb6aa3acd8a5cebe8517a9d1dace7b1c6b1b8aa185e7cab168e8871" "checksum getrandom 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "473a1265acc8ff1e808cd0a1af8cee3c2ee5200916058a2ca113c29f2d903571" -"checksum getset 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "117a5b13aecd4e10161bb3feb22dda898e8552836c2391d8e4645d5e703ab866" +"checksum getset 0.0.9 (registry+https://github.com/rust-lang/crates.io-index)" = "5bb3f5b7d8d70c9bd23cf29b2b38094661418fb0ea79f1b0cc2019a11d6f5429" "checksum git2 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "39f27186fbb5ec67ece9a56990292bc5aed3c3fc51b9b07b0b52446b1dfb4a82" "checksum glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" "checksum heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205" @@ -2948,13 +2958,13 @@ dependencies = [ "checksum humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f" "checksum idna 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "02e2673c30ee86b5b96a9cb52ad15718aa1f966f5ab9ad54a8b95d5ca33120a9" "checksum image 0.22.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b4be8aaefbe7545dc42ae925afb55a0098f226a3fe5ef721872806f44f57826" -"checksum indexmap 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a61202fbe46c4a951e9404a720a0180bcf3212c750d735cb5c4ba4dc551299f3" +"checksum indexmap 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712d7b3ea5827fcb9d4fda14bf4da5f136f0db2ae9c8f4bd4e2d1c6fde4e6db2" "checksum inflate 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "1cdb29978cc5797bd8dcc8e5bf7de604891df2a8dc576973d71a281e916db2ff" "checksum iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e" "checksum isahc 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)" = "769f5071e5bf0b45489eefe0ec96b97328675db38d02ea5e923519d52e690cb8" "checksum isatty 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e31a8281fc93ec9693494da65fbf28c0c2aa60a2eaec25dc58e2f31952e95edc" "checksum itertools 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)" = "0d47946d458e94a1b7bcabbf6521ea7c037062c81f534615abcad76e84d4970d" -"checksum itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5b8467d9c1cebe26feb08c640139247fac215782d35371ade9a2136ed6085358" +"checksum itertools 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "87fa75c9dea7b07be3138c49abbb83fd4bea199b5cdc76f9804458edc5da0d6e" "checksum itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "501266b7edd0174f8530248f87f99c88fbe60ca4ef3dd486835b8d8d53136f7f" "checksum jobserver 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "f2b1d42ef453b30b7387e113da1c83ab1605d90c5b4e0eb8e96d016ed3b8c160" "checksum jpeg-decoder 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "c1aae18ffeeae409c6622c3b6a7ee49792a7e5a062eea1b135fbb74e301792ba" @@ -2965,7 +2975,7 @@ dependencies = [ "checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" "checksum lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f" "checksum lexical-core 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2304bccb228c4b020f3a4835d247df0a02a7c4686098d4167762cfbbe4c5cb14" -"checksum libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)" = "34fcd2c08d2f832f376f4173a231990fa5aef4e99fb569867318a227ef4c06ba" +"checksum libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)" = "1a31a0627fdf1f6a39ec0dd577e101440b7db22672c0901fe00a9a6fbb5c24e8" "checksum libgit2-sys 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a30f8637eb59616ee3b8a00f6adff781ee4ddd8343a615b8238de756060cc1b3" "checksum libnghttp2-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "02254d44f4435dd79e695f2c2b83cd06a47919adea30216ceaf0c57ca0a72463" "checksum libsqlite3-sys 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5e5b95e89c330291768dc840238db7f9e204fd208511ab6319b56193a7f2ae25" @@ -2992,7 +3002,7 @@ dependencies = [ "checksum nodrop 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" "checksum nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2ad2a91a8e869eeb30b9cb3119ae87773a8f4ae617f41b1eb9c154b2905f7bd6" "checksum nom 5.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c618b63422da4401283884e6668d39f819a106ef51f5f59b81add00075da35ca" -"checksum nom-tracable 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "edaa64ad2837d831d4a17966c9a83aa5101cc320730f5b724811c8f7442a2528" +"checksum nom-tracable 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4e9af1ee3bf4c9b842a720c53c0e7abb1b56a207e0b9bdbe7ff684b4cf630da1" "checksum nom-tracable-macros 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fd25f70877a9fe68bd406b3dd3ff99e94ce9de776cf2a96e0d99de90b53d4765" "checksum nom_locate 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f932834fd8e391fc7710e2ba17e8f9f8645d846b55aa63207e17e110a1e1ce35" "checksum ntapi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f26e041cd983acbc087e30fcba770380cfa352d0e392e175b2344ebaf7ea0602" @@ -3050,7 +3060,7 @@ dependencies = [ "checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e" "checksum render-tree 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "68ed587df09cfb7ce1bc6fe8f77e24db219f222c049326ccbfb948ec67e31664" "checksum result 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "194d8e591e405d1eecf28819740abed6d719d1a2db87fc0bcdedee9a26d55560" -"checksum roxmltree 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b1a3193e568c6e262f817fd07af085c7f79241a947aedd3779d47eadc170e174" +"checksum roxmltree 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e9afcba5553fd1c7e55ebf48a645f44ba766a32cd85e3eb54fc70158228f0c51" "checksum rusqlite 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2a194373ef527035645a1bc21b10dc2125f73497e6e155771233eb187aedd051" "checksum rust-argon2 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4ca4eaef519b494d1f2848fc602d18816fed808a981aedf4f1f00ceb7c9d32cf" "checksum rust-ini 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3e52c148ef37f8c375d49d5a73aa70713125b7f19095948a923f80afdeb22ec2" @@ -3065,7 +3075,7 @@ dependencies = [ "checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" "checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" "checksum serde 0.8.23 (registry+https://github.com/rust-lang/crates.io-index)" = "9dad3f759919b92c3068c696c15c3d17238234498bbdcc80f2c469606f948ac8" -"checksum serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)" = "9796c9b7ba2ffe7a9ce53c2287dfc48080f4b2b362fcc245a259b3a7201119dd" +"checksum serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)" = "0c4b39bd9b0b087684013a792c59e3e07a46a01d2322518d8a1104641a0b1be0" "checksum serde-hjson 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0b833c5ad67d52ced5f5938b2980f32a9c1c5ef047f0b4fb3127e7a423c76153" "checksum serde-hjson 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6a3a4e0ea8a88553209f6cc6cfe8724ecad22e1acf372793c27d995290fe74f8" "checksum serde-value 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7a663f873dedc4eac1a559d4c6bc0d0b2c34dc5ac4702e105014b8281489e44f" @@ -3085,9 +3095,9 @@ dependencies = [ "checksum sourcefile 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "4bf77cb82ba8453b42b6ae1d692e4cdc92f9a47beaf89a847c8be83f4e328ad3" "checksum static_assertions 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "7f3eb36b47e512f8f1c9e3d10c2c1965bc992bd9cdb024fa581e2194501c83d3" "checksum strsim 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" -"checksum sublime_fuzzy 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97bd7ad698ea493a3a7f60c2ffa117c234f341e09f8cc2d39cef10cdde077acf" +"checksum sublime_fuzzy 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bdac3d983d073c19487ba1f5e16eda43e9c6e50aa895d87110d0febe389b66b9" "checksum subprocess 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)" = "28fc0f40f0c0da73339d347aa7d6d2b90341a95683a47722bc4eebed71ff3c00" -"checksum surf 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "018eed64aede455beb88505d50c5c64882bebbe0996d4b660c272e3d8bb6f883" +"checksum surf 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "741a8008f8a833ef16f47df94a30754478fb2c2bf822b9c2e6f7f09203b97ace" "checksum syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf" "checksum synstructure 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3f085a5855930c0441ca1288cf044ea4aecf4f43a91668abdb870b4ba546a203" "checksum syntect 3.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e80b8831c5a543192ffc3727f01cf0e57579c6ac15558e3048bfb5708892167b" @@ -3102,7 +3112,7 @@ dependencies = [ "checksum tint 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7af24570664a3074673dbbf69a65bdae0ae0b72f2949b1adfbacb736ee4d6896" "checksum tokio-io 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "5090db468dad16e1a7a54c8c67280c5e4b544f3d3e018f0b913b400261f85926" "checksum toml 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "758664fc71a3a69038656bee8b6be6477d2a6c315a6b81f7081f591bffa4111f" -"checksum toml 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c7aabe75941d914b72bf3e5d3932ed92ce0664d49d8432305a8b547c37227724" +"checksum toml 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)" = "01d1404644c8b12b16bfcffa4322403a91a451584daaaa7c28d3152e6cbc98cf" "checksum trash 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f2f24d31505f49e989b1ee2c03c323251f6763d5907d471b71192dac92e323f8" "checksum typenum 1.11.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6d2783fe2d6b8c1101136184eb41be8b1ad379e4657050b8aaff0c79ee7575f9" "checksum unicase 2.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2e2e6bd1e59e56598518beb94fd6db628ded570326f0a98c679a304bd9f00150" @@ -3131,7 +3141,7 @@ dependencies = [ "checksum wasm-bindgen-webidl 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)" = "9b979afb0535fe4749906a674082db1211de8aef466331d43232f63accb7c07c" "checksum web-sys 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)" = "c84440699cd02ca23bed6f045ffb1497bc18a3c2628bd13e2093186faaaacf6b" "checksum weedle 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3bb43f70885151e629e2a19ce9e50bd730fd436cfd4b666894c9ce4de9141164" -"checksum which 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b57acb10231b9493c8472b20cb57317d0679a49e0bdbee44b3b803a6473af164" +"checksum which 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5475d47078209a02e60614f7ba5e645ef3ed60f771920ac1906d7c1cc65024c8" "checksum widestring 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "effc0e4ff8085673ea7b9b2e3c73f6bd4d118810c9009ed8f1e16bd96c331db6" "checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" "checksum winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8093091eeb260906a183e6ae1abdba2ef5ef2257a21801128899c3fc699229c6" diff --git a/Cargo.toml b/Cargo.toml index e9686bec98..1ac3c22ccd 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,61 +18,61 @@ rustyline = "5.0.4" chrono = { version = "0.4.9", features = ["serde"] } derive-new = "0.5.8" prettytable-rs = "0.8.0" -itertools = "0.8.0" +itertools = "0.8.1" ansi_term = "0.12.1" -nom = "5.0.0" +nom = "5.0.1" dunce = "1.0.0" -indexmap = { version = "1.2.0", features = ["serde-1"] } +indexmap = { version = "1.3.0", features = ["serde-1"] } chrono-humanize = "0.0.11" -byte-unit = "3.0.1" -base64 = "0.10.1" -futures-preview = { version = "=0.3.0-alpha.18", features = ["compat", "io-compat"] } -async-stream = "0.1.1" -futures_codec = "0.2.5" +byte-unit = "3.0.3" +base64 = "0.11" +futures-preview = { version = "=0.3.0-alpha.19", features = ["compat", "io-compat"] } +async-stream = "0.1.2" +futures_codec = "=0.3.0" num-traits = "0.2.8" term = "0.5.2" bytes = "0.4.12" log = "0.4.8" pretty_env_logger = "0.3.1" -serde = { version = "1.0.100", features = ["derive"] } +serde = { version = "1.0.102", features = ["derive"] } bson = { version = "0.14.0", features = ["decimal128"] } -serde_json = "1.0.40" +serde_json = "1.0.41" serde-hjson = "0.9.1" serde_yaml = "0.8" serde_bytes = "0.11.2" -getset = "0.0.8" +getset = "0.0.9" language-reporting = "0.4.0" app_dirs = "1.2.1" csv = "1.1" -toml = "0.5.3" +toml = "0.5.5" clap = "2.33.0" git2 = { version = "0.10.1", default_features = false } dirs = "2.0.2" glob = "0.3.0" ctrlc = "3.1.3" -surf = "1.0.2" +surf = "1.0.3" url = "2.1.0" -roxmltree = "0.7.0" +roxmltree = "0.7.2" nom_locate = "1.0.0" -nom-tracable = "0.4.0" +nom-tracable = "0.4.1" unicode-xid = "0.2.0" serde_ini = "0.2.0" subprocess = "0.1.18" mime = "0.3.14" -pretty-hex = "0.1.0" -hex = "0.3.2" +pretty-hex = "0.1.1" +hex = "0.4" tempfile = "3.1.0" semver = "0.9.0" -which = "2.0.1" +which = "3.1" textwrap = {version = "0.11.0", features = ["term_size"]} shellexpand = "1.0.0" -futures-timer = "0.4.0" +futures-timer = "2.0.0" pin-utils = "0.1.0-alpha.4" num-bigint = { version = "0.2.3", features = ["serde"] } bigdecimal = { version = "0.1.0", features = ["serde"] } natural = "0.3.0" serde_urlencoded = "0.6.1" -sublime_fuzzy = "0.5" +sublime_fuzzy = "0.6" trash = "1.0.0" regex = "1" cfg-if = "0.1" @@ -106,8 +106,8 @@ features = ["bundled", "blob"] pretty_assertions = "0.6.1" [build-dependencies] -toml = "0.5.3" -serde = { version = "1.0.101", features = ["derive"] } +toml = "0.5.5" +serde = { version = "1.0.102", features = ["derive"] } [lib] name = "nu" diff --git a/src/plugins/ps.rs b/src/plugins/ps.rs index 2db73d395a..69c7fa1b43 100644 --- a/src/plugins/ps.rs +++ b/src/plugins/ps.rs @@ -20,7 +20,7 @@ impl Ps { async fn usage(process: Process) -> ProcessResult<(process::Process, Ratio)> { let usage_1 = process.cpu_usage().await?; - futures_timer::Delay::new(Duration::from_millis(100)).await?; + futures_timer::Delay::new(Duration::from_millis(100)).await; let usage_2 = process.cpu_usage().await?; Ok((process, usage_2 - usage_1)) From 7cf3c6eb951772322cab00f6084c3fd9d730c535 Mon Sep 17 00:00:00 2001 From: Fahmi Akbar Wildana Date: Mon, 11 Nov 2019 07:51:41 +0700 Subject: [PATCH 166/184] Move env declaration to jobs.docker --- .github/workflows/docker-publish.yml | 34 ++++++++++++---------------- docker/docker-compose.package.yml | 2 +- 2 files changed, 16 insertions(+), 20 deletions(-) diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml index 0deca68a42..ef731e26c6 100644 --- a/.github/workflows/docker-publish.yml +++ b/.github/workflows/docker-publish.yml @@ -35,6 +35,10 @@ jobs: name: Build and publish docker images needs: compile runs-on: ubuntu-latest + env: + DOCKER_REGISTRY: quay.io/nushell + DOCKER_PASSWORD: ${{ secrets.DOCKER_REGISTRY }} + DOCKER_USER: nushell+action # TBD strategy: matrix: tag: @@ -63,19 +67,16 @@ jobs: with: { name: '${{ matrix.arch }}', path: target/release } - name: Build and publish exact version run: |- - REGISTRY=${REGISTRY,,}; export TAG=${GITHUB_REF##*/}-${{ matrix.tag }} + export DOCKER_TAG=${GITHUB_REF##*/}-${{ matrix.tag }} export NU_BINS=target/release/$( [ ${{ matrix.plugin }} = true ] && echo nu* || echo nu ) export PATCH=$([ ${{ matrix.use-patch }} = true ] && echo .${{ matrix.tag }} || echo '') chmod +x $NU_BINS - echo ${{ secrets.DOCKER_REGISTRY }} | docker login ${REGISTRY%/*} -u ${USER,,} --password-stdin + echo ${DOCKER_PASSWORD} | docker login ${DOCKER_REGISTRY} -u ${DOCKER_USER} --password-stdin docker-compose --file docker/docker-compose.package.yml build docker-compose --file docker/docker-compose.package.yml push # exact version env: BASE_IMAGE: ${{ matrix.base-image }} - # REGISTRY: docker.pkg.github.com/${{ github.repository }} #TODO: waiting support for GITHUB_TOKEN for docker.pkg.github.com - USER: ${{ github.actor }}+action - REGISTRY: quay.io/${{ github.actor }} #region semantics tagging - name: Retag and push with suffixed version @@ -90,16 +91,14 @@ jobs: tags=( ${latest_version} ${latest_feature} ${latest_patch} ${exact_version} ) for tag in ${tags[@]}; do - docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${tag} - docker push ${REGISTRY,,}/nu:${tag} + docker tag ${DOCKER_REGISTRY}/nu:${VERSION}-${{ matrix.tag }} ${DOCKER_REGISTRY}/nu:${tag} + docker push ${DOCKER_REGISTRY}/nu:${tag} done # latest version - docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${{ matrix.tag }} - docker push ${REGISTRY,,}/nu:${{ matrix.tag }} - env: - # REGISTRY: 'docker.pkg.github.com/${{ github.repository }}' #TODO: waiting support for GITHUB_TOKEN for docker.pkg.github.com - REGISTRY: quay.io/${{ github.actor }} + docker tag ${DOCKER_REGISTRY}/nu:${VERSION}-${{ matrix.tag }} ${DOCKER_REGISTRY}/nu:${{ matrix.tag }} + docker push ${DOCKER_REGISTRY}/nu:${{ matrix.tag }} + - name: Retag and push debian as latest if: matrix.tag == 'debian' run: |- @@ -109,14 +108,11 @@ jobs: tags=( ${VERSION%%.*} ${VERSION%.*} ${VERSION} ) for tag in ${tags[@]}; do - docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${tag} - docker push ${REGISTRY,,}/nu:${tag} + docker tag ${DOCKER_REGISTRY}/nu:${VERSION}-${{ matrix.tag }} ${DOCKER_REGISTRY}/nu:${tag} + docker push ${DOCKER_REGISTRY}/nu:${tag} done # latest version - docker tag ${REGISTRY,,}/nu:${{ matrix.tag }} ${REGISTRY,,}/nu:latest - docker push ${REGISTRY,,}/nu:latest - env: - # REGISTRY: 'docker.pkg.github.com/${{ github.repository }}' #TODO: waiting support for GITHUB_TOKEN for docker.pkg.github.com - REGISTRY: quay.io/${{ github.actor }} + docker tag ${DOCKER_REGISTRY}/nu:${{ matrix.tag }} ${DOCKER_REGISTRY}/nu:latest + docker push ${DOCKER_REGISTRY}/nu:latest #endregion semantics tagging diff --git a/docker/docker-compose.package.yml b/docker/docker-compose.package.yml index 9be36544eb..a2ad90b6bb 100644 --- a/docker/docker-compose.package.yml +++ b/docker/docker-compose.package.yml @@ -2,7 +2,7 @@ version: '3' services: nushell: - image: ${REGISTRY}/nu:${TAG} + image: ${DOCKER_REGISTRY}/nu:${DOCKER_TAG} build: context: .. dockerfile: docker/Package${PATCH}.Dockerfile From b39c2e2f75b5dc12d78891b2840c90873d0828d2 Mon Sep 17 00:00:00 2001 From: Sean Hellum Date: Mon, 11 Nov 2019 18:17:55 +0000 Subject: [PATCH 167/184] edit install cmd --- .gitpod.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitpod.yml b/.gitpod.yml index cb90ac541e..9f675b812a 100644 --- a/.gitpod.yml +++ b/.gitpod.yml @@ -1,7 +1,7 @@ image: file: .gitpod.Dockerfile tasks: - - init: cargo install --path . + - init: cargo install nu command: nu github: prebuilds: From f3d056110a8cb038baf38795749e0cc869506aba Mon Sep 17 00:00:00 2001 From: Vanessasaurus Date: Mon, 11 Nov 2019 13:33:52 -0500 Subject: [PATCH 168/184] DOCKER_USER should come from secrets --- .github/workflows/docker-publish.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml index ef731e26c6..f803d6e343 100644 --- a/.github/workflows/docker-publish.yml +++ b/.github/workflows/docker-publish.yml @@ -38,7 +38,7 @@ jobs: env: DOCKER_REGISTRY: quay.io/nushell DOCKER_PASSWORD: ${{ secrets.DOCKER_REGISTRY }} - DOCKER_USER: nushell+action # TBD + DOCKER_USER: ${{ secrets.DOCKER_USER }} strategy: matrix: tag: From 21f48577aeb4bc483b5959788376c80e97c63475 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Sun, 3 Nov 2019 20:55:34 -0500 Subject: [PATCH 169/184] Reductions placeholder. --- src/cli.rs | 11 ++++- src/commands.rs | 16 ++++-- src/commands/reduce_by.rs | 100 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 121 insertions(+), 6 deletions(-) create mode 100644 src/commands/reduce_by.rs diff --git a/src/cli.rs b/src/cli.rs index 483ee332ea..5af74b132b 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -323,10 +323,17 @@ pub async fn cli() -> Result<(), Box> { whole_stream_command(Table), whole_stream_command(Version), whole_stream_command(Which), - #[cfg(data_processing_primitives)] - whole_stream_command(SplitBy), ]); + cfg_if::cfg_if! { + if #[cfg(data_processing_primitives)] { + context.add_commands(vec![ + whole_stream_command(SplitBy), + whole_stream_command(ReduceBy), + ]); + } + } + #[cfg(feature = "clipboard")] { context.add_commands(vec![whole_stream_command( diff --git a/src/commands.rs b/src/commands.rs index 7c2c188629..73a4b7244a 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -58,8 +58,12 @@ pub(crate) mod size; pub(crate) mod skip_while; pub(crate) mod sort_by; -#[cfg(data_processing_primitives)] -pub(crate) mod split_by; +cfg_if::cfg_if! { + if #[cfg(data_processing_primitives)] { + pub(crate) mod split_by; + pub(crate) mod reduce_by; + } +} pub(crate) mod split_column; pub(crate) mod split_row; @@ -138,8 +142,12 @@ pub(crate) use size::Size; pub(crate) use skip_while::SkipWhile; pub(crate) use sort_by::SortBy; -#[cfg(data_processing_primitives)] -pub(crate) use split_by::SplitBy; +cfg_if::cfg_if! { + if #[cfg(data_processing_primitives)] { + pub(crate) use split_by::SplitBy; + pub(crate) use reduce_by::ReduceBy; + } +} pub(crate) use split_column::SplitColumn; pub(crate) use split_row::SplitRow; diff --git a/src/commands/reduce_by.rs b/src/commands/reduce_by.rs new file mode 100644 index 0000000000..de64caac15 --- /dev/null +++ b/src/commands/reduce_by.rs @@ -0,0 +1,100 @@ +use crate::commands::WholeStreamCommand; +use crate::data::TaggedDictBuilder; +use crate::parser::hir::SyntaxShape; +use crate::parser::registry; +use crate::data::base::Block; +use crate::prelude::*; + +use log::trace; + +pub struct ReduceBy; + +#[derive(Deserialize)] +pub struct ReduceByArgs { + calculator: Block, +} + +impl WholeStreamCommand for ReduceBy { + fn name(&self) -> &str { + "reduce-by" + } + + fn signature(&self) -> Signature { + Signature::build("reduce-by").required( + "calculator", + SyntaxShape::Block, + "The block used for calculating values", + ) + } + + fn usage(&self) -> &str { + "Crates a new table with the data from the table rows reduced by the block given." + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, reduce_by)?.run() + } +} + +pub fn reduce_by( + ReduceByArgs { calculator }: ReduceByArgs, + RunnableContext { input, name, .. }: RunnableContext, +) -> Result { + let stream = async_stream! { + let values: Vec> = input.values.collect().await; + + trace!("{:?}", &calculator); + + if values.is_empty() { + yield Err(ShellError::labeled_error( + "Expected table from pipeline", + "requires a table input", + name + )) + } else { + match reduce(values, &calculator, name) { + Ok(reduced) => yield ReturnSuccess::value(reduced), + Err(err) => yield Err(err) + } + } + }; + + Ok(stream.to_output_stream()) +} + +pub fn reduce( + values: Vec>, + calculator: &Block, + tag: impl Into, +) -> Result, ShellError> { + let tag = tag.into(); + + let mut out = TaggedDictBuilder::new(&tag); + + Ok(out.into_tagged_value()) +} + +#[cfg(test)] +mod tests { + + use crate::commands::reduce_by::reduce; + use crate::data::meta::*; + use crate::Value; + use indexmap::IndexMap; + + fn string(input: impl Into) -> Tagged { + Value::string(input.into()).tagged_unknown() + } + + fn row(entries: IndexMap>) -> Tagged { + Value::row(entries).tagged_unknown() + } + + fn table(list: &Vec>) -> Tagged { + Value::table(list).tagged_unknown() + } +} From 3163b0d362ca771e102ff09843f0cdd3f69b8d4d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Tue, 12 Nov 2019 02:07:43 -0500 Subject: [PATCH 170/184] Data processing mvp histogram. --- features.toml | 2 +- src/cli.rs | 4 + src/commands.rs | 8 + src/commands/evaluate_by.rs | 260 ++++++++++++++++++++++++++ src/commands/group_by.rs | 42 ++++- src/commands/histogram.rs | 148 +++++++++++++++ src/commands/map_max_by.rs | 227 +++++++++++++++++++++++ src/commands/reduce_by.rs | 199 +++++++++++++++++--- src/commands/split_by.rs | 68 ++++--- src/commands/t_sort_by.rs | 358 ++++++++++++++++++++++++++++++++++++ src/data/base.rs | 1 + src/data/dict.rs | 2 +- 12 files changed, 1262 insertions(+), 57 deletions(-) create mode 100644 src/commands/evaluate_by.rs create mode 100644 src/commands/histogram.rs create mode 100644 src/commands/map_max_by.rs create mode 100644 src/commands/t_sort_by.rs diff --git a/features.toml b/features.toml index e1cf56e33d..6dd7a26c36 100644 --- a/features.toml +++ b/features.toml @@ -18,4 +18,4 @@ description = "Groundwork so tables can be data processed" reason = """ These will allow take tables and be able to transform, process, and explore. """ -enabled = false \ No newline at end of file +enabled = false diff --git a/src/cli.rs b/src/cli.rs index 5af74b132b..c6995ef711 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -330,6 +330,10 @@ pub async fn cli() -> Result<(), Box> { context.add_commands(vec![ whole_stream_command(SplitBy), whole_stream_command(ReduceBy), + whole_stream_command(EvaluateBy), + whole_stream_command(TSortBy), + whole_stream_command(MapMaxBy), + whole_stream_command(Histogram), ]); } } diff --git a/src/commands.rs b/src/commands.rs index 73a4b7244a..629289b565 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -62,6 +62,10 @@ cfg_if::cfg_if! { if #[cfg(data_processing_primitives)] { pub(crate) mod split_by; pub(crate) mod reduce_by; + pub(crate) mod evaluate_by; + pub(crate) mod t_sort_by; + pub(crate) mod map_max_by; + pub(crate) mod histogram; } } @@ -146,6 +150,10 @@ cfg_if::cfg_if! { if #[cfg(data_processing_primitives)] { pub(crate) use split_by::SplitBy; pub(crate) use reduce_by::ReduceBy; + pub(crate) use evaluate_by::EvaluateBy; + pub(crate) use t_sort_by::TSortBy; + pub(crate) use map_max_by::MapMaxBy; + pub(crate) use histogram::Histogram; } } diff --git a/src/commands/evaluate_by.rs b/src/commands/evaluate_by.rs new file mode 100644 index 0000000000..f4925917c4 --- /dev/null +++ b/src/commands/evaluate_by.rs @@ -0,0 +1,260 @@ +use crate::commands::WholeStreamCommand; +use crate::parser::hir::SyntaxShape; +use crate::prelude::*; +pub struct EvaluateBy; + +#[derive(Deserialize)] +pub struct EvaluateByArgs { + evaluate_with: Option>, +} + +impl WholeStreamCommand for EvaluateBy { + fn name(&self) -> &str { + "evaluate-by" + } + + fn signature(&self) -> Signature { + Signature::build("evaluate-by").named( + "evaluate_with", + SyntaxShape::String, + "the name of the column to evaluate by", + ) + } + + fn usage(&self) -> &str { + "Creates a new table with the data from the tables rows evaluated by the column given." + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, evaluate_by)?.run() + } +} + +pub fn evaluate_by( + EvaluateByArgs { evaluate_with }: EvaluateByArgs, + RunnableContext { input, name, .. }: RunnableContext, +) -> Result { + let stream = async_stream! { + let values: Vec> = input.values.collect().await; + + + if values.is_empty() { + yield Err(ShellError::labeled_error( + "Expected table from pipeline", + "requires a table input", + name + )) + } else { + + let evaluate_with = if let Some(evaluator) = evaluate_with { + Some(evaluator.item().clone()) + } else { + None + }; + + match evaluate(&values[0], evaluate_with, name) { + Ok(evaluated) => yield ReturnSuccess::value(evaluated), + Err(err) => yield Err(err) + } + } + }; + + Ok(stream.to_output_stream()) +} + +fn fetch( + key: Option, +) -> Box, Tag) -> Option> + 'static> { + Box::new(move |value: Tagged, tag| match key { + Some(ref key_given) => { + if let Some(Tagged { item, .. }) = value.get_data_by_key(&key_given) { + Some(item.clone().tagged(tag)) + } else { + None + } + } + None => Some(Value::int(1).tagged(tag)), + }) +} + +pub fn evaluate( + values: &Tagged, + evaluator: Option, + tag: impl Into, +) -> Result, ShellError> { + let tag = tag.into(); + + let evaluate_with = match evaluator { + Some(keyfn) => fetch(Some(keyfn)), + None => fetch(None), + }; + + let results: Tagged = match values { + Tagged { + item: Value::Table(datasets), + .. + } => { + let datasets: Vec<_> = datasets + .into_iter() + .map(|subsets| match subsets { + Tagged { + item: Value::Table(subsets), + .. + } => { + let subsets: Vec<_> = subsets + .clone() + .into_iter() + .map(|data| match data { + Tagged { + item: Value::Table(data), + .. + } => { + let data: Vec<_> = data + .into_iter() + .map(|x| evaluate_with(x.clone(), tag.clone()).unwrap()) + .collect(); + Value::Table(data).tagged(&tag) + } + _ => Value::Table(vec![]).tagged(&tag), + }) + .collect(); + Value::Table(subsets).tagged(&tag) + } + _ => Value::Table(vec![]).tagged(&tag), + }) + .collect(); + + Value::Table(datasets.clone()).tagged(&tag) + } + _ => Value::Table(vec![]).tagged(&tag), + }; + + Ok(results) +} + +#[cfg(test)] +mod tests { + + use crate::commands::evaluate_by::{evaluate, fetch}; + use crate::commands::group_by::group; + use crate::commands::t_sort_by::t_sort; + use crate::data::meta::*; + use crate::prelude::*; + use crate::Value; + use indexmap::IndexMap; + + fn int(s: impl Into) -> Tagged { + Value::int(s).tagged_unknown() + } + + fn string(input: impl Into) -> Tagged { + Value::string(input.into()).tagged_unknown() + } + + fn row(entries: IndexMap>) -> Tagged { + Value::row(entries).tagged_unknown() + } + + fn table(list: &Vec>) -> Tagged { + Value::table(list).tagged_unknown() + } + + fn nu_releases_sorted_by_date() -> Tagged { + let key = String::from("date"); + + t_sort( + Some(key), + None, + &nu_releases_grouped_by_date(), + Tag::unknown(), + ) + .unwrap() + } + + fn nu_releases_grouped_by_date() -> Tagged { + let key = String::from("date").tagged_unknown(); + group(&key, nu_releases_commiters(), Tag::unknown()).unwrap() + } + + fn nu_releases_commiters() -> Vec> { + vec![ + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("September 24-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("September 24-2019")}, + ), + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("September 24-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}, + ), + ] + } + + #[test] + fn evaluator_fetches_by_column_if_supplied_a_column_name() { + let subject = row(indexmap! { "name".into() => string("andres") }); + + let evaluator = fetch(Some(String::from("name"))); + + assert_eq!(evaluator(subject, Tag::unknown()), Some(string("andres"))); + } + + #[test] + fn evaluator_returns_1_if_no_column_name_given() { + let subject = row(indexmap! { "name".into() => string("andres") }); + let evaluator = fetch(None); + + assert_eq!( + evaluator(subject, Tag::unknown()), + Some(Value::int(1).tagged_unknown()) + ); + } + + #[test] + fn evaluates_the_tables() { + assert_eq!( + evaluate(&nu_releases_sorted_by_date(), None, Tag::unknown()).unwrap(), + table(&vec![table(&vec![ + table(&vec![int(1), int(1), int(1)]), + table(&vec![int(1), int(1), int(1)]), + table(&vec![int(1), int(1), int(1)]), + ]),]) + ); + } + + #[test] + fn evaluates_the_tables_with_custom_evaluator() { + let eval = String::from("name"); + + assert_eq!( + evaluate(&nu_releases_sorted_by_date(), Some(eval), Tag::unknown()).unwrap(), + table(&vec![table(&vec![ + table(&vec![string("AR"), string("JT"), string("YK")]), + table(&vec![string("AR"), string("YK"), string("JT")]), + table(&vec![string("YK"), string("JT"), string("AR")]), + ]),]) + ); + } +} diff --git a/src/commands/group_by.rs b/src/commands/group_by.rs index 66c1360f5d..07e74841b1 100644 --- a/src/commands/group_by.rs +++ b/src/commands/group_by.rs @@ -131,11 +131,8 @@ mod tests { Value::table(list).tagged_unknown() } - #[test] - fn groups_table_by_key() { - let for_key = String::from("date").tagged_unknown(); - - let nu_releases = vec![ + fn nu_releases_commiters() -> Vec> { + vec![ row( indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}, ), @@ -163,10 +160,15 @@ mod tests { row( indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}, ), - ]; + ] + } + + #[test] + fn groups_table_by_date_column() { + let for_key = String::from("date").tagged_unknown(); assert_eq!( - group(&for_key, nu_releases, Tag::unknown()).unwrap(), + group(&for_key, nu_releases_commiters(), Tag::unknown()).unwrap(), row(indexmap! { "August 23-2019".into() => table(&vec![ row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}), @@ -186,4 +188,30 @@ mod tests { }) ); } + + #[test] + fn groups_table_by_country_column() { + let for_key = String::from("country").tagged_unknown(); + + assert_eq!( + group(&for_key, nu_releases_commiters(), Tag::unknown()).unwrap(), + row(indexmap! { + "EC".into() => table(&vec![ + row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}), + row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}), + row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}) + ]), + "NZ".into() => table(&vec![ + row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}), + row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}), + row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")}) + ]), + "US".into() => table(&vec![ + row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}), + row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")}), + row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}), + ]), + }) + ); + } } diff --git a/src/commands/histogram.rs b/src/commands/histogram.rs new file mode 100644 index 0000000000..52d72bdfbf --- /dev/null +++ b/src/commands/histogram.rs @@ -0,0 +1,148 @@ +use crate::commands::WholeStreamCommand; +use crate::commands::group_by::group; +use crate::commands::t_sort_by::columns_sorted; +use crate::commands::t_sort_by::t_sort; +use crate::commands::evaluate_by::evaluate; +use crate::commands::reduce_by::reduce; +use crate::commands::map_max_by::map_max; +use crate::data::TaggedDictBuilder; +use crate::errors::ShellError; +use crate::prelude::*; +use num_traits::cast::ToPrimitive; + +pub struct Histogram; + +#[derive(Deserialize)] +pub struct HistogramArgs { + column_name: Tagged, +} + +impl WholeStreamCommand for Histogram { + fn name(&self) -> &str { + "histogram" + } + + fn signature(&self) -> Signature { + Signature::build("histogram").required( + "column_name", + SyntaxShape::String, + "the name of the column to graph by", + ) + } + + fn usage(&self) -> &str { + "Creates a new table with a histogram based on the column name passed in." + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, histogram)?.run() + } +} + +pub fn histogram( + HistogramArgs { column_name }: HistogramArgs, + RunnableContext { input, name, .. }: RunnableContext, +) -> Result { + let stream = async_stream! { + let values: Vec> = input.values.collect().await; + + let Tagged { item: group_by, .. } = column_name.clone(); + + let groups = group(&column_name, values, &name)?; + let group_labels = columns_sorted(Some(group_by.clone()), &groups, &name); + let sorted = t_sort(Some(group_by.clone()), None, &groups, &name)?; + let evaled = evaluate(&sorted, None, &name)?; + let reduced = reduce(&evaled, None, &name)?; + let maxima = map_max(&reduced, None, &name)?; + let percents = percentages(&reduced, maxima, &name)?; + + match percents { + Tagged { + item: Value::Table(datasets), + .. + } => { + + let mut idx = 0; + + if let Tagged { item: Value::Table(start), .. } = datasets.get(0).unwrap() { + for percentage in start.into_iter() { + let mut fact = TaggedDictBuilder::new(&name); + fact.insert_tagged("committer", group_labels.get(idx).unwrap().clone()); + + if let Tagged { item: Value::Primitive(Primitive::Int(ref num)), .. } = percentage.clone() { + fact.insert("activity", std::iter::repeat("*").take(num.to_i32().unwrap() as usize).collect::()); + } + + idx = idx + 1; + + yield ReturnSuccess::value(fact.into_tagged_value()); + } + } + } + _ => {} + } + }; + + Ok(stream.to_output_stream()) +} + +fn percentages( + values: &Tagged, + max: Tagged, + tag: impl Into, +) -> Result, ShellError> { + let tag = tag.into(); + + let results: Tagged = match values { + Tagged { + item: Value::Table(datasets), + .. + } => { + let datasets: Vec<_> = datasets + .into_iter() + .map(|subsets| { + match subsets { + Tagged { + item: Value::Table(data), + .. + } => { + let data = data + .into_iter() + .map(|d| match d { + Tagged { + item: Value::Primitive(Primitive::Int(n)), + .. + } => { + let max = match max { + Tagged { + item: Value::Primitive(Primitive::Int(ref maxima)), + .. + } => maxima.to_i32().unwrap(), + _ => 0, + }; + + let n = { n.to_i32().unwrap() * 100 / max }; + + Value::number(n).tagged(&tag) + } + _ => Value::number(0).tagged(&tag), + }) + .collect::>(); + Value::Table(data).tagged(&tag) + } + _ => Value::Table(vec![]).tagged(&tag), + } + }) + .collect(); + + Value::Table(datasets).tagged(&tag) + } + other => other.clone(), + }; + + Ok(results) +} diff --git a/src/commands/map_max_by.rs b/src/commands/map_max_by.rs new file mode 100644 index 0000000000..31a02a81b1 --- /dev/null +++ b/src/commands/map_max_by.rs @@ -0,0 +1,227 @@ +use crate::commands::WholeStreamCommand; +use crate::parser::hir::SyntaxShape; +use crate::prelude::*; +use num_traits::cast::ToPrimitive; +pub struct MapMaxBy; + +#[derive(Deserialize)] +pub struct MapMaxByArgs { + column_name: Option>, +} + +impl WholeStreamCommand for MapMaxBy { + fn name(&self) -> &str { + "map-max-by" + } + + fn signature(&self) -> Signature { + Signature::build("map-max-by").named( + "column_name", + SyntaxShape::String, + "the name of the column to map-max the table's rows", + ) + } + + fn usage(&self) -> &str { + "Creates a new table with the data from the tables rows maxed by the column given." + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, map_max_by)?.run() + } +} + +pub fn map_max_by( + MapMaxByArgs { column_name }: MapMaxByArgs, + RunnableContext { input, name, .. }: RunnableContext, +) -> Result { + let stream = async_stream! { + let values: Vec> = input.values.collect().await; + + + if values.is_empty() { + yield Err(ShellError::labeled_error( + "Expected table from pipeline", + "requires a table input", + name + )) + } else { + + let map_by_column = if let Some(column_to_map) = column_name { + Some(column_to_map.item().clone()) + } else { + None + }; + + match map_max(&values[0], map_by_column, name) { + Ok(table_maxed) => yield ReturnSuccess::value(table_maxed), + Err(err) => yield Err(err) + } + } + }; + + Ok(stream.to_output_stream()) +} + +pub fn map_max( + values: &Tagged, + _map_by_column_name: Option, + tag: impl Into, +) -> Result, ShellError> { + let tag = tag.into(); + + let results: Tagged = match values { + Tagged { + item: Value::Table(datasets), + .. + } => { + let datasets: Vec<_> = datasets + .into_iter() + .map(|subsets| { + match subsets { + Tagged { + item: Value::Table(data), + .. + } => { + let data = data.into_iter().fold(0, |acc, value| match value { + Tagged { + item: Value::Primitive(Primitive::Int(n)), + .. + } => { + if n.to_i32().unwrap() > acc { + n.to_i32().unwrap() + } else { + acc + } + } + _ => acc, + }); + Value::number(data).tagged(&tag) + } + _ => Value::number(0).tagged(&tag), + } + }) + .collect(); + + let datasets = datasets.iter().fold(0, |max, value| match value { + Tagged { + item: Value::Primitive(Primitive::Int(n)), + .. + } => { + if n.to_i32().unwrap() > max { + n.to_i32().unwrap() + } else { + max + } + } + _ => max, + }); + Value::number(datasets).tagged(&tag) + } + _ => Value::number(-1).tagged(&tag), + }; + + Ok(results) +} + +#[cfg(test)] +mod tests { + + use crate::commands::evaluate_by::evaluate; + use crate::commands::group_by::group; + use crate::commands::map_max_by::map_max; + use crate::commands::reduce_by::reduce; + use crate::commands::t_sort_by::t_sort; + use crate::data::meta::*; + use crate::prelude::*; + use crate::Value; + use indexmap::IndexMap; + + fn int(s: impl Into) -> Tagged { + Value::int(s).tagged_unknown() + } + + fn string(input: impl Into) -> Tagged { + Value::string(input.into()).tagged_unknown() + } + + fn row(entries: IndexMap>) -> Tagged { + Value::row(entries).tagged_unknown() + } + + fn nu_releases_evaluated_by_default_one() -> Tagged { + evaluate(&nu_releases_sorted_by_date(), None, Tag::unknown()).unwrap() + } + + fn nu_releases_reduced_by_sum() -> Tagged { + reduce( + &nu_releases_evaluated_by_default_one(), + Some(String::from("sum")), + Tag::unknown(), + ) + .unwrap() + } + + fn nu_releases_sorted_by_date() -> Tagged { + let key = String::from("date"); + + t_sort( + Some(key), + None, + &nu_releases_grouped_by_date(), + Tag::unknown(), + ) + .unwrap() + } + + fn nu_releases_grouped_by_date() -> Tagged { + let key = String::from("date").tagged_unknown(); + group(&key, nu_releases_commiters(), Tag::unknown()).unwrap() + } + + fn nu_releases_commiters() -> Vec> { + vec![ + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("September 24-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("September 24-2019")}, + ), + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("September 24-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}, + ), + row( + indexmap! {"name".into() => string("JK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}, + ), + ] + } + #[test] + fn maps_and_gets_max_value() { + assert_eq!( + map_max(&nu_releases_reduced_by_sum(), None, Tag::unknown()).unwrap(), + int(4) + ); + } +} diff --git a/src/commands/reduce_by.rs b/src/commands/reduce_by.rs index de64caac15..53ddf7e15d 100644 --- a/src/commands/reduce_by.rs +++ b/src/commands/reduce_by.rs @@ -1,17 +1,12 @@ use crate::commands::WholeStreamCommand; -use crate::data::TaggedDictBuilder; use crate::parser::hir::SyntaxShape; -use crate::parser::registry; -use crate::data::base::Block; use crate::prelude::*; - -use log::trace; - +use num_traits::cast::ToPrimitive; pub struct ReduceBy; #[derive(Deserialize)] pub struct ReduceByArgs { - calculator: Block, + reduce_with: Option>, } impl WholeStreamCommand for ReduceBy { @@ -20,15 +15,15 @@ impl WholeStreamCommand for ReduceBy { } fn signature(&self) -> Signature { - Signature::build("reduce-by").required( - "calculator", - SyntaxShape::Block, - "The block used for calculating values", + Signature::build("reduce-by").named( + "reduce_with", + SyntaxShape::String, + "the command to reduce by with", ) } fn usage(&self) -> &str { - "Crates a new table with the data from the table rows reduced by the block given." + "Creates a new table with the data from the tables rows reduced by the command given." } fn run( @@ -41,14 +36,12 @@ impl WholeStreamCommand for ReduceBy { } pub fn reduce_by( - ReduceByArgs { calculator }: ReduceByArgs, + ReduceByArgs { reduce_with }: ReduceByArgs, RunnableContext { input, name, .. }: RunnableContext, ) -> Result { let stream = async_stream! { let values: Vec> = input.values.collect().await; - trace!("{:?}", &calculator); - if values.is_empty() { yield Err(ShellError::labeled_error( "Expected table from pipeline", @@ -56,7 +49,14 @@ pub fn reduce_by( name )) } else { - match reduce(values, &calculator, name) { + + let reduce_with = if let Some(reducer) = reduce_with { + Some(reducer.item().clone()) + } else { + None + }; + + match reduce(&values[0], reduce_with, name) { Ok(reduced) => yield ReturnSuccess::value(reduced), Err(err) => yield Err(err) } @@ -66,26 +66,109 @@ pub fn reduce_by( Ok(stream.to_output_stream()) } +fn sum(data: Vec>) -> i32 { + data.into_iter().fold(0, |acc, value| match value { + Tagged { + item: Value::Primitive(Primitive::Int(n)), + .. + } => acc + n.to_i32().unwrap(), + _ => acc, + }) +} + +fn formula( + acc_begin: i32, + calculator: Box>) -> i32 + 'static>, +) -> Box>) -> i32 + 'static> { + Box::new(move |acc, datax| -> i32 { + let result = acc * acc_begin; + result + calculator(datax) + }) +} + +fn reducer_for(command: Reduce) -> Box>) -> i32 + 'static> { + match command { + Reduce::Sum | Reduce::Default => Box::new(formula(0, Box::new(sum))), + } +} + +pub enum Reduce { + Sum, + Default, +} + pub fn reduce( - values: Vec>, - calculator: &Block, + values: &Tagged, + reducer: Option, tag: impl Into, ) -> Result, ShellError> { let tag = tag.into(); - let mut out = TaggedDictBuilder::new(&tag); + let reduce_with = match reducer { + Some(cmd) if cmd == "sum" => reducer_for(Reduce::Sum), + Some(_) | None => reducer_for(Reduce::Default), + }; - Ok(out.into_tagged_value()) + let results: Tagged = match values { + Tagged { + item: Value::Table(datasets), + .. + } => { + let datasets: Vec<_> = datasets + .into_iter() + .map(|subsets| { + let mut acc = 0; + match subsets { + Tagged { + item: Value::Table(data), + .. + } => { + let data = data + .into_iter() + .map(|d| { + if let Tagged { + item: Value::Table(x), + .. + } = d + { + acc = reduce_with(acc, x.clone()); + Value::number(acc).tagged(&tag) + } else { + Value::number(0).tagged(&tag) + } + }) + .collect::>(); + Value::Table(data).tagged(&tag) + } + _ => Value::Table(vec![]).tagged(&tag), + } + }) + .collect(); + + Value::Table(datasets).tagged(&tag) + } + _ => Value::Table(vec![]).tagged(&tag), + }; + + Ok(results) } #[cfg(test)] mod tests { - use crate::commands::reduce_by::reduce; + use crate::commands::evaluate_by::evaluate; + use crate::commands::group_by::group; + use crate::commands::reduce_by::{reduce, reducer_for, Reduce}; + use crate::commands::t_sort_by::t_sort; use crate::data::meta::*; + use crate::prelude::*; use crate::Value; use indexmap::IndexMap; + fn int(s: impl Into) -> Tagged { + Value::int(s).tagged_unknown() + } + fn string(input: impl Into) -> Tagged { Value::string(input.into()).tagged_unknown() } @@ -97,4 +180,78 @@ mod tests { fn table(list: &Vec>) -> Tagged { Value::table(list).tagged_unknown() } + + fn nu_releases_sorted_by_date() -> Tagged { + let key = String::from("date"); + + t_sort( + Some(key), + None, + &nu_releases_grouped_by_date(), + Tag::unknown(), + ) + .unwrap() + } + + fn nu_releases_evaluated_by_default_one() -> Tagged { + evaluate(&nu_releases_sorted_by_date(), None, Tag::unknown()).unwrap() + } + + fn nu_releases_grouped_by_date() -> Tagged { + let key = String::from("date").tagged_unknown(); + group(&key, nu_releases_commiters(), Tag::unknown()).unwrap() + } + + fn nu_releases_commiters() -> Vec> { + vec![ + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("September 24-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("September 24-2019")}, + ), + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("September 24-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}, + ), + ] + } + + #[test] + fn reducer_computes_given_a_sum_command() { + let subject = vec![int(1), int(1), int(1)]; + + let action = reducer_for(Reduce::Sum); + + assert_eq!(action(0, subject), 3); + } + + #[test] + fn reducer_computes() { + assert_eq!( + reduce( + &nu_releases_evaluated_by_default_one(), + Some(String::from("sum")), + Tag::unknown() + ), + Ok(table(&vec![table(&vec![int(3), int(3), int(3)])])) + ); + } } diff --git a/src/commands/split_by.rs b/src/commands/split_by.rs index b995b041d7..1f972a2c55 100644 --- a/src/commands/split_by.rs +++ b/src/commands/split_by.rs @@ -150,6 +150,7 @@ pub fn split( #[cfg(test)] mod tests { + use crate::commands::group_by::group; use crate::commands::split_by::split; use crate::data::meta::*; use crate::Value; @@ -167,30 +168,49 @@ mod tests { Value::table(list).tagged_unknown() } + fn nu_releases_grouped_by_date() -> Tagged { + let key = String::from("date").tagged_unknown(); + group(&key, nu_releases_commiters(), Tag::unknown()).unwrap() + } + + fn nu_releases_commiters() -> Vec> { + vec![ + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")}, + ), + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}, + ), + ] + } + #[test] fn splits_inner_tables_by_key() { let for_key = String::from("country").tagged_unknown(); - let nu_releases = row(indexmap! { - "August 23-2019".into() => table(&vec![ - row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}), - row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}), - row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}) - ]), - "Sept 24-2019".into() => table(&vec![ - row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}), - row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")}), - row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")}) - ]), - "October 10-2019".into() => table(&vec![ - row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}), - row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}), - row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}) - ]) - }); - assert_eq!( - split(&for_key, &nu_releases, Tag::unknown()).unwrap(), + split(&for_key, &nu_releases_grouped_by_date(), Tag::unknown()).unwrap(), Value::row(indexmap! { "EC".into() => row(indexmap! { "August 23-2019".into() => table(&vec![ @@ -235,18 +255,12 @@ mod tests { let nu_releases = row(indexmap! { "August 23-2019".into() => table(&vec![ - row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}), - row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}), - row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}) + row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}) ]), "Sept 24-2019".into() => table(&vec![ - row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}), - row(indexmap!{"name".into() => Value::string("JT").tagged(Tag::from(Span::new(5,10))), "date".into() => string("Sept 24-2019")}), - row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")}) + row(indexmap!{"name".into() => Value::string("JT").tagged(Tag::from(Span::new(5,10))), "date".into() => string("Sept 24-2019")}) ]), "October 10-2019".into() => table(&vec![ - row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}), - row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}), row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}) ]) }); diff --git a/src/commands/t_sort_by.rs b/src/commands/t_sort_by.rs new file mode 100644 index 0000000000..1df4cce887 --- /dev/null +++ b/src/commands/t_sort_by.rs @@ -0,0 +1,358 @@ +use crate::commands::WholeStreamCommand; +use crate::data::{TaggedDictBuilder, TaggedListBuilder}; +use crate::errors::ShellError; +use crate::prelude::*; +use chrono::{DateTime, NaiveDate, Utc}; + +pub struct TSortBy; + +#[derive(Deserialize)] +pub struct TSortByArgs { + #[serde(rename(deserialize = "show-columns"))] + show_columns: bool, + group_by: Option>, + #[allow(unused)] + split_by: Option, +} + +impl WholeStreamCommand for TSortBy { + fn name(&self) -> &str { + "t-sort-by" + } + + fn signature(&self) -> Signature { + Signature::build("t-sort-by") + .switch("show-columns", "Displays the column names sorted") + .named( + "group_by", + SyntaxShape::String, + "the name of the column to group by", + ) + .named( + "split_by", + SyntaxShape::String, + "the name of the column within the grouped by table to split by", + ) + } + + fn usage(&self) -> &str { + "Sort by the given columns." + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, t_sort_by)?.run() + } +} + +fn t_sort_by( + TSortByArgs { + show_columns, + group_by, + .. + }: TSortByArgs, + RunnableContext { input, name, .. }: RunnableContext, +) -> Result { + Ok(OutputStream::new(async_stream! { + let values: Vec> = input.values.collect().await; + + let column_grouped_by_name = if let Some(grouped_by) = group_by { + Some(grouped_by.item().clone()) + } else { + None + }; + + if show_columns { + for label in columns_sorted(column_grouped_by_name, &values[0], &name).iter() { + yield ReturnSuccess::value(label.clone()); + } + } else { + match t_sort(column_grouped_by_name, None, &values[0], name) { + Ok(sorted) => yield ReturnSuccess::value(sorted), + Err(err) => yield Err(err) + } + } + })) +} + +pub fn columns_sorted( + _group_by_name: Option, + value: &Tagged, + tag: impl Into, +) -> Vec> { + let origin_tag = tag.into(); + + match value { + Tagged { + item: Value::Row(rows), + .. + } => { + let mut keys: Vec> = + rows.entries + .keys() + .map(|s| s.as_ref()) + .map(|k: &str| { + let date = NaiveDate::parse_from_str(k, "%B %d-%Y"); + + let date = match date { + Ok(parsed) => Value::Primitive(Primitive::Date( + DateTime::::from_utc(parsed.and_hms(12, 34, 56), Utc), + )), + Err(_) => Value::string(k), + }; + + date.tagged_unknown() + }) + .collect(); + + keys.sort(); + + let keys: Vec = keys + .into_iter() + .map(|k| { + Value::string(match k { + Tagged { + item: Value::Primitive(Primitive::Date(d)), + .. + } => format!("{}", d.format("%B %d-%Y")), + _ => k.as_string().unwrap(), + }) + }) + .collect(); + + keys.into_iter().map(|k| k.tagged(&origin_tag)).collect() + } + _ => vec![Value::string("default").tagged(&origin_tag)] + } +} + +pub fn t_sort( + group_by_name: Option, + split_by_name: Option, + value: &Tagged, + tag: impl Into, +) -> Result, ShellError> { + let origin_tag = tag.into(); + + match group_by_name { + Some(column_name) => { + let sorted_labels = columns_sorted(Some(column_name), value, &origin_tag); + + match split_by_name { + None => { + let mut dataset = TaggedDictBuilder::new(&origin_tag); + dataset.insert_tagged("default", value.clone()); + let dataset = dataset.into_tagged_value(); + + let split_labels = match &dataset { + Tagged { + item: Value::Row(rows), + .. + } => { + let mut keys: Vec> = rows + .entries + .keys() + .map(|s| s.as_ref()) + .map(|k: &str| { + let date = NaiveDate::parse_from_str(k, "%B %d-%Y"); + + let date = match date { + Ok(parsed) => Value::Primitive(Primitive::Date( + DateTime::::from_utc( + parsed.and_hms(12, 34, 56), + Utc, + ), + )), + Err(_) => Value::string(k), + }; + + date.tagged_unknown() + }) + .collect(); + + keys.sort(); + + let keys: Vec = keys + .into_iter() + .map(|k| { + Value::string(match k { + Tagged { + item: Value::Primitive(Primitive::Date(d)), + .. + } => format!("{}", d.format("%B %d-%Y")), + _ => k.as_string().unwrap(), + }) + }) + .collect(); + + keys.into_iter().map(|k| k.tagged(&origin_tag)).collect() + } + _ => vec![], + }; + + let results: Vec>> = split_labels + .into_iter() + .map(|split| { + let groups = dataset.get_data_by_key(&split.as_string().unwrap()); + + sorted_labels + .clone() + .into_iter() + .map(|label| { + let label = label.as_string().unwrap(); + + match groups { + Some(Tagged { + item: Value::Row(dict), + .. + }) => dict.get_data_by_key(&label).unwrap().clone(), + _ => Value::Table(vec![]).tagged(&origin_tag), + } + }) + .collect() + }) + .collect(); + + let mut outer = TaggedListBuilder::new(&origin_tag); + + for i in results { + outer.insert_tagged(Value::Table(i).tagged(&origin_tag)); + } + + return Ok(Value::Table(outer.list).tagged(&origin_tag)); + } + Some(_) => return Ok(Value::nothing().tagged(&origin_tag)), + } + } + None => return Ok(Value::nothing().tagged(&origin_tag)), + } +} +#[cfg(test)] +mod tests { + + use crate::commands::group_by::group; + use crate::commands::t_sort_by::{columns_sorted, t_sort}; + use crate::data::meta::*; + use crate::Value; + use indexmap::IndexMap; + + fn string(input: impl Into) -> Tagged { + Value::string(input.into()).tagged_unknown() + } + + fn row(entries: IndexMap>) -> Tagged { + Value::row(entries).tagged_unknown() + } + + fn table(list: &Vec>) -> Tagged { + Value::table(list).tagged_unknown() + } + + fn nu_releases_grouped_by_date() -> Tagged { + let key = String::from("date").tagged_unknown(); + group(&key, nu_releases_commiters(), Tag::unknown()).unwrap() + } + + fn nu_releases_commiters() -> Vec> { + vec![ + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("September 24-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("September 24-2019")}, + ), + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("September 24-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}, + ), + ] + } + + #[test] + fn show_columns_sorted_given_a_column_to_sort_by() { + let by_column = String::from("date"); + + assert_eq!( + columns_sorted( + Some(by_column), + &nu_releases_grouped_by_date(), + Tag::unknown() + ), + vec![ + string("August 23-2019"), + string("September 24-2019"), + string("October 10-2019") + ] + ) + } + + #[test] + fn sorts_the_tables() { + let group_by = String::from("date"); + + assert_eq!( + t_sort( + Some(group_by), + None, + &nu_releases_grouped_by_date(), + Tag::unknown() + ) + .unwrap(), + table(&vec![table(&vec![ + table(&vec![ + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")} + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")} + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")} + ) + ]), + table(&vec![ + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("September 24-2019")} + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("September 24-2019")} + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("September 24-2019")} + ) + ]), + table(&vec![ + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")} + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")} + ), + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")} + ) + ]), + ]),]) + ); + } +} diff --git a/src/data/base.rs b/src/data/base.rs index d877e4a7cc..f0357273e2 100644 --- a/src/data/base.rs +++ b/src/data/base.rs @@ -430,6 +430,7 @@ impl Tagged { Value::Primitive(Primitive::Int(x)) => Ok(format!("{}", x)), Value::Primitive(Primitive::Bytes(x)) => Ok(format!("{}", x)), Value::Primitive(Primitive::Path(x)) => Ok(format!("{}", x.display())), + Value::Primitive(Primitive::Date(x)) => Ok(format!("{}", x.to_rfc3339())), // TODO: this should definitely be more general with better errors other => Err(ShellError::labeled_error( "Expected string", diff --git a/src/data/dict.rs b/src/data/dict.rs index 432170f361..32393f0a0d 100644 --- a/src/data/dict.rs +++ b/src/data/dict.rs @@ -114,7 +114,7 @@ impl Dictionary { #[derive(Debug)] pub struct TaggedListBuilder { tag: Tag, - list: Vec>, + pub list: Vec>, } impl TaggedListBuilder { From 00b3c2036a0295bc34bf4d96a124621acf3b21f6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Tue, 12 Nov 2019 03:38:55 -0500 Subject: [PATCH 171/184] This is part of on-going work with capabilities when working with tables and able to work with them for data processing & viewing purposes. At the moment, certain ways to process said tables we are able to view a histogram of a given column. As usage matures, we may find certain core commands that could be used ergonomically when working with tables on Nu. --- README.md | 2 + src/cli.rs | 4 +- src/commands.rs | 44 ++++++++--------- src/commands/histogram.rs | 97 ++++++++++++++++++++++---------------- src/commands/map_max_by.rs | 40 ++++++++-------- src/commands/t_sort_by.rs | 38 +++++++-------- tests/commands_test.rs | 31 +++++++++++- 7 files changed, 147 insertions(+), 109 deletions(-) diff --git a/README.md b/README.md index 46a4c45ac8..b1ce4feec1 100644 --- a/README.md +++ b/README.md @@ -256,6 +256,7 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat | format pattern | Format table row data as a string following the given pattern | | get column-or-column-path | Open column and get data from the corresponding cells | | group-by column | Creates a new table with the data from the table rows grouped by the column given | +| histogram column ...column-names | Creates a new table with a histogram based on the column name passed in, optionally give the frequency column name | inc (column-or-column-path) | Increment a value or version. Optionally use the column of a table | | insert column-or-column-path value | Insert a new column to the table | | last amount | Show only the last number of rows | @@ -267,6 +268,7 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat | reverse | Reverses the table. | | skip amount | Skip a number of rows | | skip-while condition | Skips rows while the condition matches. | +| split-by column | Creates a new table with the data from the inner tables splitted by the column given | | sort-by ...columns | Sort by the given columns | | str (column) | Apply string function. Optionally use the column of a table | | sum | Sum a column of values | diff --git a/src/cli.rs b/src/cli.rs index c6995ef711..b882d57d69 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -301,6 +301,7 @@ pub async fn cli() -> Result<(), Box> { whole_stream_command(FromYML), whole_stream_command(Pick), whole_stream_command(Get), + whole_stream_command(Histogram), per_item_command(Remove), per_item_command(Fetch), per_item_command(Open), @@ -320,6 +321,7 @@ pub async fn cli() -> Result<(), Box> { per_item_command(Mkdir), per_item_command(Move), whole_stream_command(Save), + whole_stream_command(SplitBy), whole_stream_command(Table), whole_stream_command(Version), whole_stream_command(Which), @@ -328,12 +330,10 @@ pub async fn cli() -> Result<(), Box> { cfg_if::cfg_if! { if #[cfg(data_processing_primitives)] { context.add_commands(vec![ - whole_stream_command(SplitBy), whole_stream_command(ReduceBy), whole_stream_command(EvaluateBy), whole_stream_command(TSortBy), whole_stream_command(MapMaxBy), - whole_stream_command(Histogram), ]); } } diff --git a/src/commands.rs b/src/commands.rs index 629289b565..ee70534640 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -16,6 +16,8 @@ pub(crate) mod debug; pub(crate) mod echo; pub(crate) mod enter; pub(crate) mod env; +#[allow(unused)] +pub(crate) mod evaluate_by; pub(crate) mod exit; pub(crate) mod fetch; pub(crate) mod first; @@ -33,10 +35,13 @@ pub(crate) mod from_yaml; pub(crate) mod get; pub(crate) mod group_by; pub(crate) mod help; +pub(crate) mod histogram; pub(crate) mod history; pub(crate) mod last; pub(crate) mod lines; pub(crate) mod ls; +#[allow(unused)] +pub(crate) mod map_max_by; pub(crate) mod mkdir; pub(crate) mod mv; pub(crate) mod next; @@ -49,6 +54,8 @@ pub(crate) mod post; pub(crate) mod prepend; pub(crate) mod prev; pub(crate) mod pwd; +#[allow(unused)] +pub(crate) mod reduce_by; pub(crate) mod reject; pub(crate) mod reverse; pub(crate) mod rm; @@ -57,20 +64,11 @@ pub(crate) mod shells; pub(crate) mod size; pub(crate) mod skip_while; pub(crate) mod sort_by; - -cfg_if::cfg_if! { - if #[cfg(data_processing_primitives)] { - pub(crate) mod split_by; - pub(crate) mod reduce_by; - pub(crate) mod evaluate_by; - pub(crate) mod t_sort_by; - pub(crate) mod map_max_by; - pub(crate) mod histogram; - } -} - +pub(crate) mod split_by; pub(crate) mod split_column; pub(crate) mod split_row; +#[allow(unused)] +pub(crate) mod t_sort_by; pub(crate) mod table; pub(crate) mod tags; pub(crate) mod to_bson; @@ -103,6 +101,8 @@ pub(crate) use debug::Debug; pub(crate) use echo::Echo; pub(crate) use enter::Enter; pub(crate) use env::Env; +#[allow(unused)] +pub(crate) use evaluate_by::EvaluateBy; pub(crate) use exit::Exit; pub(crate) use fetch::Fetch; pub(crate) use first::First; @@ -122,10 +122,13 @@ pub(crate) use from_yaml::FromYML; pub(crate) use get::Get; pub(crate) use group_by::GroupBy; pub(crate) use help::Help; +pub(crate) use histogram::Histogram; pub(crate) use history::History; pub(crate) use last::Last; pub(crate) use lines::Lines; pub(crate) use ls::LS; +#[allow(unused)] +pub(crate) use map_max_by::MapMaxBy; pub(crate) use mkdir::Mkdir; pub(crate) use mv::Move; pub(crate) use next::Next; @@ -137,6 +140,8 @@ pub(crate) use post::Post; pub(crate) use prepend::Prepend; pub(crate) use prev::Previous; pub(crate) use pwd::PWD; +#[allow(unused)] +pub(crate) use reduce_by::ReduceBy; pub(crate) use reject::Reject; pub(crate) use reverse::Reverse; pub(crate) use rm::Remove; @@ -145,20 +150,11 @@ pub(crate) use shells::Shells; pub(crate) use size::Size; pub(crate) use skip_while::SkipWhile; pub(crate) use sort_by::SortBy; - -cfg_if::cfg_if! { - if #[cfg(data_processing_primitives)] { - pub(crate) use split_by::SplitBy; - pub(crate) use reduce_by::ReduceBy; - pub(crate) use evaluate_by::EvaluateBy; - pub(crate) use t_sort_by::TSortBy; - pub(crate) use map_max_by::MapMaxBy; - pub(crate) use histogram::Histogram; - } -} - +pub(crate) use split_by::SplitBy; pub(crate) use split_column::SplitColumn; pub(crate) use split_row::SplitRow; +#[allow(unused)] +pub(crate) use t_sort_by::TSortBy; pub(crate) use table::Table; pub(crate) use tags::Tags; pub(crate) use to_bson::ToBSON; diff --git a/src/commands/histogram.rs b/src/commands/histogram.rs index 52d72bdfbf..6933f28a6f 100644 --- a/src/commands/histogram.rs +++ b/src/commands/histogram.rs @@ -1,10 +1,10 @@ -use crate::commands::WholeStreamCommand; +use crate::commands::evaluate_by::evaluate; use crate::commands::group_by::group; +use crate::commands::map_max_by::map_max; +use crate::commands::reduce_by::reduce; use crate::commands::t_sort_by::columns_sorted; use crate::commands::t_sort_by::t_sort; -use crate::commands::evaluate_by::evaluate; -use crate::commands::reduce_by::reduce; -use crate::commands::map_max_by::map_max; +use crate::commands::WholeStreamCommand; use crate::data::TaggedDictBuilder; use crate::errors::ShellError; use crate::prelude::*; @@ -15,6 +15,7 @@ pub struct Histogram; #[derive(Deserialize)] pub struct HistogramArgs { column_name: Tagged, + rest: Vec>, } impl WholeStreamCommand for Histogram { @@ -23,11 +24,16 @@ impl WholeStreamCommand for Histogram { } fn signature(&self) -> Signature { - Signature::build("histogram").required( - "column_name", - SyntaxShape::String, - "the name of the column to graph by", - ) + Signature::build("histogram") + .required( + "column_name", + SyntaxShape::String, + "the name of the column to graph by", + ) + .rest( + SyntaxShape::Member, + "column name to give the histogram's frequency column", + ) } fn usage(&self) -> &str { @@ -44,7 +50,7 @@ impl WholeStreamCommand for Histogram { } pub fn histogram( - HistogramArgs { column_name }: HistogramArgs, + HistogramArgs { column_name, rest }: HistogramArgs, RunnableContext { input, name, .. }: RunnableContext, ) -> Result { let stream = async_stream! { @@ -68,13 +74,24 @@ pub fn histogram( let mut idx = 0; + let column_names_supplied: Vec<_> = rest.iter().map(|f| f.item.clone()).collect(); + + let frequency_column_name = if column_names_supplied.is_empty() { + "frecuency".to_string() + } else { + column_names_supplied[0].clone() + }; + + let column = (*column_name).clone(); + if let Tagged { item: Value::Table(start), .. } = datasets.get(0).unwrap() { for percentage in start.into_iter() { + let mut fact = TaggedDictBuilder::new(&name); - fact.insert_tagged("committer", group_labels.get(idx).unwrap().clone()); + fact.insert_tagged(&column, group_labels.get(idx).unwrap().clone()); if let Tagged { item: Value::Primitive(Primitive::Int(ref num)), .. } = percentage.clone() { - fact.insert("activity", std::iter::repeat("*").take(num.to_i32().unwrap() as usize).collect::()); + fact.insert(&frequency_column_name, std::iter::repeat("*").take(num.to_i32().unwrap() as usize).collect::()); } idx = idx + 1; @@ -104,38 +121,36 @@ fn percentages( } => { let datasets: Vec<_> = datasets .into_iter() - .map(|subsets| { - match subsets { - Tagged { - item: Value::Table(data), - .. - } => { - let data = data - .into_iter() - .map(|d| match d { - Tagged { - item: Value::Primitive(Primitive::Int(n)), - .. - } => { - let max = match max { - Tagged { - item: Value::Primitive(Primitive::Int(ref maxima)), - .. - } => maxima.to_i32().unwrap(), - _ => 0, - }; + .map(|subsets| match subsets { + Tagged { + item: Value::Table(data), + .. + } => { + let data = data + .into_iter() + .map(|d| match d { + Tagged { + item: Value::Primitive(Primitive::Int(n)), + .. + } => { + let max = match max { + Tagged { + item: Value::Primitive(Primitive::Int(ref maxima)), + .. + } => maxima.to_i32().unwrap(), + _ => 0, + }; - let n = { n.to_i32().unwrap() * 100 / max }; + let n = { n.to_i32().unwrap() * 100 / max }; - Value::number(n).tagged(&tag) - } - _ => Value::number(0).tagged(&tag), - }) - .collect::>(); - Value::Table(data).tagged(&tag) - } - _ => Value::Table(vec![]).tagged(&tag), + Value::number(n).tagged(&tag) + } + _ => Value::number(0).tagged(&tag), + }) + .collect::>(); + Value::Table(data).tagged(&tag) } + _ => Value::Table(vec![]).tagged(&tag), }) .collect(); diff --git a/src/commands/map_max_by.rs b/src/commands/map_max_by.rs index 31a02a81b1..ea2fc99219 100644 --- a/src/commands/map_max_by.rs +++ b/src/commands/map_max_by.rs @@ -81,29 +81,27 @@ pub fn map_max( } => { let datasets: Vec<_> = datasets .into_iter() - .map(|subsets| { - match subsets { - Tagged { - item: Value::Table(data), - .. - } => { - let data = data.into_iter().fold(0, |acc, value| match value { - Tagged { - item: Value::Primitive(Primitive::Int(n)), - .. - } => { - if n.to_i32().unwrap() > acc { - n.to_i32().unwrap() - } else { - acc - } + .map(|subsets| match subsets { + Tagged { + item: Value::Table(data), + .. + } => { + let data = data.into_iter().fold(0, |acc, value| match value { + Tagged { + item: Value::Primitive(Primitive::Int(n)), + .. + } => { + if n.to_i32().unwrap() > acc { + n.to_i32().unwrap() + } else { + acc } - _ => acc, - }); - Value::number(data).tagged(&tag) - } - _ => Value::number(0).tagged(&tag), + } + _ => acc, + }); + Value::number(data).tagged(&tag) } + _ => Value::number(0).tagged(&tag), }) .collect(); diff --git a/src/commands/t_sort_by.rs b/src/commands/t_sort_by.rs index 1df4cce887..1c914dbac3 100644 --- a/src/commands/t_sort_by.rs +++ b/src/commands/t_sort_by.rs @@ -57,25 +57,25 @@ fn t_sort_by( RunnableContext { input, name, .. }: RunnableContext, ) -> Result { Ok(OutputStream::new(async_stream! { - let values: Vec> = input.values.collect().await; + let values: Vec> = input.values.collect().await; - let column_grouped_by_name = if let Some(grouped_by) = group_by { - Some(grouped_by.item().clone()) - } else { - None - }; + let column_grouped_by_name = if let Some(grouped_by) = group_by { + Some(grouped_by.item().clone()) + } else { + None + }; - if show_columns { - for label in columns_sorted(column_grouped_by_name, &values[0], &name).iter() { - yield ReturnSuccess::value(label.clone()); - } - } else { - match t_sort(column_grouped_by_name, None, &values[0], name) { - Ok(sorted) => yield ReturnSuccess::value(sorted), - Err(err) => yield Err(err) - } + if show_columns { + for label in columns_sorted(column_grouped_by_name, &values[0], &name).iter() { + yield ReturnSuccess::value(label.clone()); } - })) + } else { + match t_sort(column_grouped_by_name, None, &values[0], name) { + Ok(sorted) => yield ReturnSuccess::value(sorted), + Err(err) => yield Err(err) + } + } + })) } pub fn columns_sorted( @@ -125,7 +125,7 @@ pub fn columns_sorted( keys.into_iter().map(|k| k.tagged(&origin_tag)).collect() } - _ => vec![Value::string("default").tagged(&origin_tag)] + _ => vec![Value::string("default").tagged(&origin_tag)], } } @@ -238,7 +238,7 @@ mod tests { use crate::data::meta::*; use crate::Value; use indexmap::IndexMap; - + fn string(input: impl Into) -> Tagged { Value::string(input.into()).tagged_unknown() } @@ -305,7 +305,7 @@ mod tests { ] ) } - + #[test] fn sorts_the_tables() { let group_by = String::from("date"); diff --git a/tests/commands_test.rs b/tests/commands_test.rs index acd5e8374c..661d14023e 100644 --- a/tests/commands_test.rs +++ b/tests/commands_test.rs @@ -31,6 +31,35 @@ fn group_by() { }) } +#[test] +fn histogram() { + Playground::setup("histogram_test_1", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContentToBeTrimmed( + "los_tres_caballeros.csv", + r#" + first_name,last_name,rusty_at + Andrés,Robalino,Ecuador + Jonathan,Turner,Estados Unidos + Yehuda,Katz,Estados Unidos + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open los_tres_caballeros.csv + | histogram rusty_at countries + | where rusty_at == "Ecuador" + | get countries + | echo $it + "# + )); + + assert_eq!(actual, "**************************************************"); + // 50% + }) +} + #[test] fn group_by_errors_if_unknown_column_name() { Playground::setup("group_by_test_2", |dirs, sandbox| { @@ -56,7 +85,6 @@ fn group_by_errors_if_unknown_column_name() { }) } -#[cfg(data_processing_primitives)] #[test] fn split_by() { Playground::setup("split_by_test_1", |dirs, sandbox| { @@ -86,7 +114,6 @@ fn split_by() { }) } -#[cfg(data_processing_primitives)] #[test] fn split_by_errors_if_no_table_given_as_input() { Playground::setup("split_by_test_2", |dirs, sandbox| { From a3ff5f12460c44286d2e5e00c9712379e643a306 Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Sun, 3 Nov 2019 23:12:14 +0100 Subject: [PATCH 172/184] Updates tests for from tsv, csv, and ssv. With the proposed changes, these tests now become invalid. If the first line is to be counted as data, then converting the headers to ints will fail. Removing the headers and instead treating the first line as data, however, reflects the new, desired mode of operation. --- tests/filters_test.rs | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/tests/filters_test.rs b/tests/filters_test.rs index e410e99e65..9ccb4ab718 100644 --- a/tests/filters_test.rs +++ b/tests/filters_test.rs @@ -135,7 +135,6 @@ fn converts_from_csv_text_skipping_headers_to_structured_table() { sandbox.with_files(vec![FileWithContentToBeTrimmed( "los_tres_amigos.txt", r#" - first_name,last_name,rusty_luck Andrés,Robalino,1 Jonathan,Turner,1 Yehuda,Katz,1 @@ -361,7 +360,6 @@ fn converts_from_tsv_text_skipping_headers_to_structured_table() { sandbox.with_files(vec![FileWithContentToBeTrimmed( "los_tres_amigos.txt", r#" - first Name Last Name rusty_luck Andrés Robalino 1 Jonathan Turner 1 Yehuda Katz 1 @@ -441,12 +439,11 @@ fn converts_from_ssv_text_to_structured_table_with_separator_specified() { } #[test] -fn converts_from_ssv_text_skipping_headers_to_structured_table() { +fn converts_from_ssv_text_treating_first_line_as_data_with_flag() { Playground::setup("filter_from_ssv_test_2", |dirs, sandbox| { sandbox.with_files(vec![FileWithContentToBeTrimmed( "oc_get_svc.txt", r#" - NAME LABELS SELECTOR IP PORT(S) docker-registry docker-registry=default docker-registry=default 172.30.78.158 5000/TCP kubernetes component=apiserver,provider=kubernetes 172.30.0.2 443/TCP kubernetes-ro component=apiserver,provider=kubernetes 172.30.0.1 80/TCP @@ -458,13 +455,13 @@ fn converts_from_ssv_text_skipping_headers_to_structured_table() { r#" open oc_get_svc.txt | from-ssv --headerless - | nth 2 - | get Column2 + | first + | get Column1 | echo $it "# )); - assert_eq!(actual, "component=apiserver,provider=kubernetes"); + assert_eq!(actual, "docker-registry"); }) } From 282cb46ff12d333106cc1f3cc8c9493b6a370387 Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Mon, 11 Nov 2019 12:01:21 +0100 Subject: [PATCH 173/184] Implements --headerless for from-csv --- src/commands/from_csv.rs | 55 +++++++++++++++------------------------- 1 file changed, 20 insertions(+), 35 deletions(-) diff --git a/src/commands/from_csv.rs b/src/commands/from_csv.rs index 9483fed521..cd29b625a6 100644 --- a/src/commands/from_csv.rs +++ b/src/commands/from_csv.rs @@ -27,7 +27,7 @@ impl WholeStreamCommand for FromCSV { } fn usage(&self) -> &str { - "Parse text as .csv and create table" + "Parse text as .csv and create table." } fn run( @@ -46,44 +46,29 @@ pub fn from_csv_string_to_value( tag: impl Into, ) -> Result, csv::Error> { let mut reader = ReaderBuilder::new() - .has_headers(false) + .has_headers(!headerless) .delimiter(separator as u8) .from_reader(s.as_bytes()); let tag = tag.into(); - let mut fields: VecDeque = VecDeque::new(); - let mut iter = reader.records(); + let headers = if headerless { + (1..=reader.headers()?.len()) + .map(|i| format!("Column{}", i)) + .collect::>() + } else { + reader.headers()?.iter().map(String::from).collect() + }; + let mut rows = vec![]; - - if let Some(result) = iter.next() { - let line = result?; - - for (idx, item) in line.iter().enumerate() { - if headerless { - fields.push_back(format!("Column{}", idx + 1)); - } else { - fields.push_back(item.to_string()); - } - } - } - - loop { - if let Some(row_values) = iter.next() { - let row_values = row_values?; - - let mut row = TaggedDictBuilder::new(tag.clone()); - - for (idx, entry) in row_values.iter().enumerate() { - row.insert_tagged( - fields.get(idx).unwrap(), - Value::Primitive(Primitive::String(String::from(entry))).tagged(&tag), - ); - } - - rows.push(row.into_tagged_value()); - } else { - break; + for row in reader.records() { + let mut tagged_row = TaggedDictBuilder::new(&tag); + for (value, header) in row?.iter().zip(headers.iter()) { + tagged_row.insert_tagged( + header, + Value::Primitive(Primitive::String(String::from(value))).tagged(&tag), + ) } + rows.push(tagged_row.into_tagged_value()); } Ok(Value::Table(rows).tagged(&tag)) @@ -91,7 +76,7 @@ pub fn from_csv_string_to_value( fn from_csv( FromCSVArgs { - headerless: skip_headers, + headerless, separator, }: FromCSVArgs, RunnableContext { input, name, .. }: RunnableContext, @@ -141,7 +126,7 @@ fn from_csv( } } - match from_csv_string_to_value(concat_string, skip_headers, sep, name_tag.clone()) { + match from_csv_string_to_value(concat_string, headerless, sep, name_tag.clone()) { Ok(x) => match x { Tagged { item: Value::Table(list), .. } => { for l in list { From f8dc06ef49e9a2325981611f3d170ee32fa864c9 Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Mon, 11 Nov 2019 12:25:41 +0100 Subject: [PATCH 174/184] Changes implementation of --headerless for from-tsv. --- src/commands/from_tsv.rs | 55 ++++++++++++++-------------------------- 1 file changed, 19 insertions(+), 36 deletions(-) diff --git a/src/commands/from_tsv.rs b/src/commands/from_tsv.rs index 2284e95573..24841b91c1 100644 --- a/src/commands/from_tsv.rs +++ b/src/commands/from_tsv.rs @@ -39,53 +39,36 @@ pub fn from_tsv_string_to_value( tag: impl Into, ) -> Result, csv::Error> { let mut reader = ReaderBuilder::new() - .has_headers(false) + .has_headers(!headerless) .delimiter(b'\t') .from_reader(s.as_bytes()); let tag = tag.into(); - let mut fields: VecDeque = VecDeque::new(); - let mut iter = reader.records(); + let headers = if headerless { + (1..=reader.headers()?.len()) + .map(|i| format!("Column{}", i)) + .collect::>() + } else { + reader.headers()?.iter().map(String::from).collect() + }; + let mut rows = vec![]; - - if let Some(result) = iter.next() { - let line = result?; - - for (idx, item) in line.iter().enumerate() { - if headerless { - fields.push_back(format!("Column{}", idx + 1)); - } else { - fields.push_back(item.to_string()); - } - } - } - - loop { - if let Some(row_values) = iter.next() { - let row_values = row_values?; - - let mut row = TaggedDictBuilder::new(&tag); - - for (idx, entry) in row_values.iter().enumerate() { - row.insert_tagged( - fields.get(idx).unwrap(), - Value::Primitive(Primitive::String(String::from(entry))).tagged(&tag), - ); - } - - rows.push(row.into_tagged_value()); - } else { - break; + for row in reader.records() { + let mut tagged_row = TaggedDictBuilder::new(&tag); + for (value, header) in row?.iter().zip(headers.iter()) { + tagged_row.insert_tagged( + header, + Value::Primitive(Primitive::String(String::from(value))).tagged(&tag), + ) } + rows.push(tagged_row.into_tagged_value()); } Ok(Value::Table(rows).tagged(&tag)) } fn from_tsv( - FromTSVArgs { - headerless: skip_headers, - }: FromTSVArgs, + FromTSVArgs { headerless }: FromTSVArgs, RunnableContext { input, name, .. }: RunnableContext, ) -> Result { let name_tag = name; @@ -115,7 +98,7 @@ fn from_tsv( } } - match from_tsv_string_to_value(concat_string, skip_headers, name_tag.clone()) { + match from_tsv_string_to_value(concat_string, headerless, name_tag.clone()) { Ok(x) => match x { Tagged { item: Value::Table(list), .. } => { for l in list { From 040108717561f531c65d131fe8f23215f37cab06 Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Mon, 11 Nov 2019 12:54:58 +0100 Subject: [PATCH 175/184] Refactors out structured parsing logic to a separate module. --- src/commands.rs | 2 + src/commands/from_csv.rs | 90 ++------------------------ src/commands/from_structured_data.rs | 97 ++++++++++++++++++++++++++++ src/commands/from_tsv.rs | 89 +------------------------ 4 files changed, 106 insertions(+), 172 deletions(-) create mode 100644 src/commands/from_structured_data.rs diff --git a/src/commands.rs b/src/commands.rs index ee70534640..c238b451d8 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -1,6 +1,8 @@ #[macro_use] pub(crate) mod macros; +mod from_structured_data; + pub(crate) mod append; pub(crate) mod args; pub(crate) mod autoview; diff --git a/src/commands/from_csv.rs b/src/commands/from_csv.rs index cd29b625a6..4bada42dfb 100644 --- a/src/commands/from_csv.rs +++ b/src/commands/from_csv.rs @@ -1,7 +1,7 @@ +use crate::commands::from_structured_data::from_structured_data; use crate::commands::WholeStreamCommand; -use crate::data::{Primitive, TaggedDictBuilder, Value}; +use crate::data::{Primitive, Value}; use crate::prelude::*; -use csv::ReaderBuilder; pub struct FromCSV; @@ -39,49 +39,13 @@ impl WholeStreamCommand for FromCSV { } } -pub fn from_csv_string_to_value( - s: String, - headerless: bool, - separator: char, - tag: impl Into, -) -> Result, csv::Error> { - let mut reader = ReaderBuilder::new() - .has_headers(!headerless) - .delimiter(separator as u8) - .from_reader(s.as_bytes()); - let tag = tag.into(); - - let headers = if headerless { - (1..=reader.headers()?.len()) - .map(|i| format!("Column{}", i)) - .collect::>() - } else { - reader.headers()?.iter().map(String::from).collect() - }; - - let mut rows = vec![]; - for row in reader.records() { - let mut tagged_row = TaggedDictBuilder::new(&tag); - for (value, header) in row?.iter().zip(headers.iter()) { - tagged_row.insert_tagged( - header, - Value::Primitive(Primitive::String(String::from(value))).tagged(&tag), - ) - } - rows.push(tagged_row.into_tagged_value()); - } - - Ok(Value::Table(rows).tagged(&tag)) -} - fn from_csv( FromCSVArgs { headerless, separator, }: FromCSVArgs, - RunnableContext { input, name, .. }: RunnableContext, + runnable_context: RunnableContext, ) -> Result { - let name_tag = name; let sep = match separator { Some(Tagged { item: Value::Primitive(Primitive::String(s)), @@ -101,51 +65,5 @@ fn from_csv( _ => ',', }; - let stream = async_stream! { - let values: Vec> = input.values.collect().await; - - let mut concat_string = String::new(); - let mut latest_tag: Option = None; - - for value in values { - let value_tag = value.tag(); - latest_tag = Some(value_tag.clone()); - match value.item { - Value::Primitive(Primitive::String(s)) => { - concat_string.push_str(&s); - concat_string.push_str("\n"); - } - _ => yield Err(ShellError::labeled_error_with_secondary( - "Expected a string from pipeline", - "requires string input", - name_tag.clone(), - "value originates from here", - value_tag.clone(), - )), - - } - } - - match from_csv_string_to_value(concat_string, headerless, sep, name_tag.clone()) { - Ok(x) => match x { - Tagged { item: Value::Table(list), .. } => { - for l in list { - yield ReturnSuccess::value(l); - } - } - x => yield ReturnSuccess::value(x), - }, - Err(_) => if let Some(last_tag) = latest_tag { - yield Err(ShellError::labeled_error_with_secondary( - "Could not parse as CSV", - "input cannot be parsed as CSV", - name_tag.clone(), - "value originates from here", - last_tag.clone(), - )) - } , - } - }; - - Ok(stream.to_output_stream()) + from_structured_data(headerless, sep, "CSV", runnable_context) } diff --git a/src/commands/from_structured_data.rs b/src/commands/from_structured_data.rs new file mode 100644 index 0000000000..4799a40993 --- /dev/null +++ b/src/commands/from_structured_data.rs @@ -0,0 +1,97 @@ +use crate::data::{Primitive, TaggedDictBuilder, Value}; +use crate::prelude::*; +use csv::ReaderBuilder; + +fn from_stuctured_string_to_value( + s: String, + headerless: bool, + separator: char, + tag: impl Into, +) -> Result, csv::Error> { + let mut reader = ReaderBuilder::new() + .has_headers(!headerless) + .delimiter(separator as u8) + .from_reader(s.as_bytes()); + let tag = tag.into(); + + let headers = if headerless { + (1..=reader.headers()?.len()) + .map(|i| format!("Column{}", i)) + .collect::>() + } else { + reader.headers()?.iter().map(String::from).collect() + }; + + let mut rows = vec![]; + for row in reader.records() { + let mut tagged_row = TaggedDictBuilder::new(&tag); + for (value, header) in row?.iter().zip(headers.iter()) { + tagged_row.insert_tagged( + header, + Value::Primitive(Primitive::String(String::from(value))).tagged(&tag), + ) + } + rows.push(tagged_row.into_tagged_value()); + } + + Ok(Value::Table(rows).tagged(&tag)) +} + +pub fn from_structured_data( + headerless: bool, + sep: char, + format_name: &'static str, + RunnableContext { input, name, .. }: RunnableContext, +) -> Result { + let name_tag = name; + + let stream = async_stream! { + let values: Vec> = input.values.collect().await; + + let mut concat_string = String::new(); + let mut latest_tag: Option = None; + + for value in values { + let value_tag = value.tag(); + latest_tag = Some(value_tag.clone()); + match value.item { + Value::Primitive(Primitive::String(s)) => { + concat_string.push_str(&s); + concat_string.push_str("\n"); + } + _ => yield Err(ShellError::labeled_error_with_secondary( + "Expected a string from pipeline", + "requires string input", + name_tag.clone(), + "value originates from here", + value_tag.clone(), + )), + + } + } + + match from_stuctured_string_to_value(concat_string, headerless, sep, name_tag.clone()) { + Ok(x) => match x { + Tagged { item: Value::Table(list), .. } => { + for l in list { + yield ReturnSuccess::value(l); + } + } + x => yield ReturnSuccess::value(x), + }, + Err(_) => if let Some(last_tag) = latest_tag { + let line_one = format!("Could not parse as {}", format_name); + let line_two = format!("input cannot be parsed as {}", format_name); + yield Err(ShellError::labeled_error_with_secondary( + line_one, + line_two, + name_tag.clone(), + "value originates from here", + last_tag.clone(), + )) + } , + } + }; + + Ok(stream.to_output_stream()) +} diff --git a/src/commands/from_tsv.rs b/src/commands/from_tsv.rs index 24841b91c1..7931b8ef38 100644 --- a/src/commands/from_tsv.rs +++ b/src/commands/from_tsv.rs @@ -1,7 +1,6 @@ +use crate::commands::from_structured_data::from_structured_data; use crate::commands::WholeStreamCommand; -use crate::data::{Primitive, TaggedDictBuilder, Value}; use crate::prelude::*; -use csv::ReaderBuilder; pub struct FromTSV; @@ -33,91 +32,9 @@ impl WholeStreamCommand for FromTSV { } } -pub fn from_tsv_string_to_value( - s: String, - headerless: bool, - tag: impl Into, -) -> Result, csv::Error> { - let mut reader = ReaderBuilder::new() - .has_headers(!headerless) - .delimiter(b'\t') - .from_reader(s.as_bytes()); - let tag = tag.into(); - - let headers = if headerless { - (1..=reader.headers()?.len()) - .map(|i| format!("Column{}", i)) - .collect::>() - } else { - reader.headers()?.iter().map(String::from).collect() - }; - - let mut rows = vec![]; - for row in reader.records() { - let mut tagged_row = TaggedDictBuilder::new(&tag); - for (value, header) in row?.iter().zip(headers.iter()) { - tagged_row.insert_tagged( - header, - Value::Primitive(Primitive::String(String::from(value))).tagged(&tag), - ) - } - rows.push(tagged_row.into_tagged_value()); - } - - Ok(Value::Table(rows).tagged(&tag)) -} - fn from_tsv( FromTSVArgs { headerless }: FromTSVArgs, - RunnableContext { input, name, .. }: RunnableContext, + runnable_context: RunnableContext, ) -> Result { - let name_tag = name; - - let stream = async_stream! { - let values: Vec> = input.values.collect().await; - - let mut concat_string = String::new(); - let mut latest_tag: Option = None; - - for value in values { - let value_tag = value.tag(); - latest_tag = Some(value_tag.clone()); - match value.item { - Value::Primitive(Primitive::String(s)) => { - concat_string.push_str(&s); - concat_string.push_str("\n"); - } - _ => yield Err(ShellError::labeled_error_with_secondary( - "Expected a string from pipeline", - "requires string input", - &name_tag, - "value originates from here", - &value_tag, - )), - - } - } - - match from_tsv_string_to_value(concat_string, headerless, name_tag.clone()) { - Ok(x) => match x { - Tagged { item: Value::Table(list), .. } => { - for l in list { - yield ReturnSuccess::value(l); - } - } - x => yield ReturnSuccess::value(x), - }, - Err(_) => if let Some(last_tag) = latest_tag { - yield Err(ShellError::labeled_error_with_secondary( - "Could not parse as TSV", - "input cannot be parsed as TSV", - &name_tag, - "value originates from here", - &last_tag, - )) - } , - } - }; - - Ok(stream.to_output_stream()) + from_structured_data(headerless, '\t', "TSV", runnable_context) } From 1060ba220670261c3104b2873eff0ffe6b87b61f Mon Sep 17 00:00:00 2001 From: Thomas Hartmann Date: Mon, 11 Nov 2019 15:07:02 +0100 Subject: [PATCH 176/184] Fixes --headerless functionality for from-ssv. Squashed commit of the following: commit fc59d47a2291461d84e0587fc0fe63af0dc26f9f Author: Thomas Hartmann Date: Tue Nov 12 15:39:38 2019 +0100 Fixes inconsistencies in output. commit da4084e9fdd983557b101207b381e333a443e551 Author: Thomas Hartmann Date: Tue Nov 12 13:04:10 2019 +0100 remove unused enum. commit 7f6a105879c8746786b99fb19bb9f0860c41796a Author: Thomas Hartmann Date: Tue Nov 12 12:58:41 2019 +0100 Starts refactoring from_ssv. commit b70ddd169ef0c900e03fb590cb171cc7181528db Author: Thomas Hartmann Date: Tue Nov 12 11:34:06 2019 +0100 Fixes --headerless for non-aligned columns. commit 6332778dd26de8d07be77b291124115141479892 Author: Thomas Hartmann Date: Tue Nov 12 10:27:35 2019 +0100 Fixes from-ssv headerless aligned-columns logic. commit 747d8c812e06349b4a15b8c130721881d86fff98 Author: Thomas Hartmann Date: Mon Nov 11 23:53:59 2019 +0100 fixes unit tests for ssv. commit c77cb451623b37a7a9742c791a4fc38cad053d3d Author: Thomas Hartmann Date: Mon Nov 11 22:49:21 2019 +0100 it compiles! one broken test. commit 08a05964f56cf92507c255057d0aaf2b6dbb6f45 Author: Thomas Hartmann Date: Mon Nov 11 18:52:54 2019 +0100 Backed into a corner. Help. commit c95ab683025a8007b8a6f8e1659f021a002df584 Author: Thomas Hartmann Date: Mon Nov 11 17:30:54 2019 +0100 broken but on the way --- src/commands/from_ssv.rs | 280 +++++++++++++++++++++++++++++---------- tests/filters_test.rs | 16 ++- 2 files changed, 226 insertions(+), 70 deletions(-) diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs index 090bab508f..37bba215f1 100644 --- a/src/commands/from_ssv.rs +++ b/src/commands/from_ssv.rs @@ -45,6 +45,149 @@ impl WholeStreamCommand for FromSSV { } } +enum HeaderOptions<'a> { + WithHeaders(&'a str), + WithoutHeaders, +} + +fn parse_aligned_columns<'a>( + lines: impl Iterator, + headers: HeaderOptions, + separator: &str, +) -> Vec> { + fn construct<'a>( + lines: impl Iterator, + headers: Vec<(String, usize)>, + ) -> Vec> { + lines + .map(|l| { + headers + .iter() + .enumerate() + .map(|(i, (header_name, start_position))| { + let val = match headers.get(i + 1) { + Some((_, end)) => { + if *end < l.len() { + l.get(*start_position..*end) + } else { + l.get(*start_position..) + } + } + None => l.get(*start_position..), + } + .unwrap_or("") + .trim() + .into(); + (header_name.clone(), val) + }) + .collect() + }) + .collect() + } + + let find_indices = |line: &str| { + let values = line + .split(&separator) + .map(str::trim) + .filter(|s| !s.is_empty()); + values + .fold( + (0, vec![]), + |(current_pos, mut indices), value| match line[current_pos..].find(value) { + None => (current_pos, indices), + Some(index) => { + let absolute_index = current_pos + index; + indices.push(absolute_index); + (absolute_index + value.len(), indices) + } + }, + ) + .1 + }; + + let parse_with_headers = |lines, headers_raw: &str| { + let indices = find_indices(headers_raw); + let headers = headers_raw + .split(&separator) + .map(str::trim) + .filter(|s| !s.is_empty()) + .map(String::from) + .zip(indices); + + let columns = headers.collect::>(); + + construct(lines, columns) + }; + + let parse_without_headers = |ls: Vec<&str>| { + let mut indices = ls + .iter() + .flat_map(|s| find_indices(*s)) + .collect::>(); + + indices.sort(); + indices.dedup(); + + let headers: Vec<(String, usize)> = indices + .iter() + .enumerate() + .map(|(i, position)| (format!("Column{}", i + 1), *position)) + .collect(); + + construct(ls.iter().map(|s| s.to_owned()), headers) + }; + + match headers { + HeaderOptions::WithHeaders(headers_raw) => parse_with_headers(lines, headers_raw), + HeaderOptions::WithoutHeaders => parse_without_headers(lines.collect()), + } +} + +fn parse_separated_columns<'a>( + lines: impl Iterator, + headers: HeaderOptions, + separator: &str, +) -> Vec> { + fn collect<'a>( + headers: Vec, + rows: impl Iterator, + separator: &str, + ) -> Vec> { + rows.map(|r| { + headers + .iter() + .zip(r.split(separator).map(str::trim).filter(|s| !s.is_empty())) + .map(|(a, b)| (a.to_owned(), b.to_owned())) + .collect() + }) + .collect() + } + + let parse_with_headers = |lines, headers_raw: &str| { + let headers = headers_raw + .split(&separator) + .map(str::trim) + .map(|s| s.to_owned()) + .filter(|s| !s.is_empty()) + .collect(); + collect(headers, lines, separator) + }; + + let parse_without_headers = |ls: Vec<&str>| { + let num_columns = ls.iter().map(|r| r.len()).max().unwrap_or(0); + + let headers = (1..=num_columns) + .map(|i| format!("Column{}", i)) + .collect::>(); + collect(headers, ls.iter().map(|s| s.as_ref()), separator) + }; + + match headers { + HeaderOptions::WithHeaders(headers_raw) => parse_with_headers(lines, headers_raw), + HeaderOptions::WithoutHeaders => parse_without_headers(lines.collect()), + } +} + fn string_to_table( s: &str, headerless: bool, @@ -54,76 +197,23 @@ fn string_to_table( let mut lines = s.lines().filter(|l| !l.trim().is_empty()); let separator = " ".repeat(std::cmp::max(split_at, 1)); - if aligned_columns { - let headers_raw = lines.next()?; - - let headers = headers_raw - .trim() - .split(&separator) - .map(str::trim) - .filter(|s| !s.is_empty()) - .map(|s| (headers_raw.find(s).unwrap(), s.to_owned())); - - let columns = if headerless { - headers - .enumerate() - .map(|(header_no, (string_index, _))| { - (string_index, format!("Column{}", header_no + 1)) - }) - .collect::>() - } else { - headers.collect::>() - }; - - Some( - lines - .map(|l| { - columns - .iter() - .enumerate() - .filter_map(|(i, (start, col))| { - (match columns.get(i + 1) { - Some((end, _)) => l.get(*start..*end), - None => l.get(*start..), - }) - .and_then(|s| Some((col.clone(), String::from(s.trim())))) - }) - .collect() - }) - .collect(), - ) + let (ls, header_options) = if headerless { + (lines, HeaderOptions::WithoutHeaders) } else { - let headers = lines - .next()? - .split(&separator) - .map(|s| s.trim()) - .filter(|s| !s.is_empty()) - .map(|s| s.to_owned()) - .collect::>(); + let headers = lines.next()?; + (lines, HeaderOptions::WithHeaders(headers)) + }; - let header_row = if headerless { - (1..=headers.len()) - .map(|i| format!("Column{}", i)) - .collect::>() - } else { - headers - }; + let f = if aligned_columns { + parse_aligned_columns + } else { + parse_separated_columns + }; - Some( - lines - .map(|l| { - header_row - .iter() - .zip( - l.split(&separator) - .map(|s| s.trim()) - .filter(|s| !s.is_empty()), - ) - .map(|(a, b)| (String::from(a), String::from(b))) - .collect() - }) - .collect(), - ) + let parsed = f(ls, header_options, &separator); + match parsed.len() { + 0 => None, + _ => Some(parsed), } } @@ -250,7 +340,7 @@ mod tests { } #[test] - fn it_ignores_headers_when_headerless() { + fn it_uses_first_row_as_data_when_headerless() { let input = r#" a b 1 2 @@ -260,6 +350,7 @@ mod tests { assert_eq!( result, Some(vec![ + vec![owned("Column1", "a"), owned("Column2", "b")], vec![owned("Column1", "1"), owned("Column2", "2")], vec![owned("Column1", "3"), owned("Column2", "4")] ]) @@ -357,4 +448,57 @@ mod tests { ],] ) } + + #[test] + fn it_handles_empty_values_when_headerless_and_aligned_columns() { + let input = r#" + a multi-word value b d + 1 3-3 4 + last + "#; + + let result = string_to_table(input, true, true, 2).unwrap(); + assert_eq!( + result, + vec![ + vec![ + owned("Column1", "a multi-word value"), + owned("Column2", "b"), + owned("Column3", ""), + owned("Column4", "d"), + owned("Column5", "") + ], + vec![ + owned("Column1", "1"), + owned("Column2", ""), + owned("Column3", "3-3"), + owned("Column4", "4"), + owned("Column5", "") + ], + vec![ + owned("Column1", ""), + owned("Column2", ""), + owned("Column3", ""), + owned("Column4", ""), + owned("Column5", "last") + ], + ] + ) + } + + #[test] + fn input_is_parsed_correctly_if_either_option_works() { + let input = r#" + docker-registry docker-registry=default docker-registry=default 172.30.78.158 5000/TCP + kubernetes component=apiserver,provider=kubernetes 172.30.0.2 443/TCP + kubernetes-ro component=apiserver,provider=kubernetes 172.30.0.1 80/TCP + "#; + + let aligned_columns_headerless = string_to_table(input, true, true, 2).unwrap(); + let separator_headerless = string_to_table(input, true, false, 2).unwrap(); + let aligned_columns_with_headers = string_to_table(input, false, true, 2).unwrap(); + let separator_with_headers = string_to_table(input, false, false, 2).unwrap(); + assert_eq!(aligned_columns_headerless, separator_headerless); + assert_eq!(aligned_columns_with_headers, separator_with_headers); + } } diff --git a/tests/filters_test.rs b/tests/filters_test.rs index 9ccb4ab718..e18f20be67 100644 --- a/tests/filters_test.rs +++ b/tests/filters_test.rs @@ -450,7 +450,18 @@ fn converts_from_ssv_text_treating_first_line_as_data_with_flag() { "#, )]); - let actual = nu!( + let aligned_columns = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open oc_get_svc.txt + | from-ssv --headerless --aligned-columns + | first + | get Column1 + | echo $it + "# + )); + + let separator_based = nu!( cwd: dirs.test(), h::pipeline( r#" open oc_get_svc.txt @@ -461,7 +472,8 @@ fn converts_from_ssv_text_treating_first_line_as_data_with_flag() { "# )); - assert_eq!(actual, "docker-registry"); + assert_eq!(aligned_columns, separator_based); + assert_eq!(separator_based, "docker-registry"); }) } From 87d58535ff277067ce42e12464c81e7d5f3a5112 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Tue, 12 Nov 2019 14:04:53 -0500 Subject: [PATCH 177/184] Downgrade futures-codec. --- Cargo.lock | 7 +++---- Cargo.toml | 2 +- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a3d563a989..9fac3a68ad 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -800,12 +800,11 @@ dependencies = [ [[package]] name = "futures_codec" -version = "0.3.0" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", "futures-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", - "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1519,7 +1518,7 @@ dependencies = [ "dunce 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "futures-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", "futures-timer 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "futures_codec 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "futures_codec 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "getset 0.0.9 (registry+https://github.com/rust-lang/crates.io-index)", "git2 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", "glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2934,7 +2933,7 @@ dependencies = [ "checksum futures-sink-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)" = "86f148ef6b69f75bb610d4f9a2336d4fc88c4b5b67129d1a340dd0fd362efeec" "checksum futures-timer 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "527936b95e804a42c1cf05999e175892bde27464b459785a5fa2664a06ecb172" "checksum futures-util-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)" = "5ce968633c17e5f97936bd2797b6e38fb56cf16a7422319f7ec2e30d3c470e8d" -"checksum futures_codec 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a736da44bcb6aa3acd8a5cebe8517a9d1dace7b1c6b1b8aa185e7cab168e8871" +"checksum futures_codec 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "36552cd31353fd135114510d53b8d120758120c36aa636a9341970f9efb1e4a0" "checksum getrandom 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "473a1265acc8ff1e808cd0a1af8cee3c2ee5200916058a2ca113c29f2d903571" "checksum getset 0.0.9 (registry+https://github.com/rust-lang/crates.io-index)" = "5bb3f5b7d8d70c9bd23cf29b2b38094661418fb0ea79f1b0cc2019a11d6f5429" "checksum git2 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "39f27186fbb5ec67ece9a56990292bc5aed3c3fc51b9b07b0b52446b1dfb4a82" diff --git a/Cargo.toml b/Cargo.toml index 1ac3c22ccd..60d971492b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,7 +28,7 @@ byte-unit = "3.0.3" base64 = "0.11" futures-preview = { version = "=0.3.0-alpha.19", features = ["compat", "io-compat"] } async-stream = "0.1.2" -futures_codec = "=0.3.0" +futures_codec = "0.2.5" num-traits = "0.2.8" term = "0.5.2" bytes = "0.4.12" From 036860770b8065593b678fef70fafb56a8b2c1b9 Mon Sep 17 00:00:00 2001 From: "R.T. Lechow" Date: Thu, 14 Nov 2019 16:59:39 -0500 Subject: [PATCH 178/184] Document pivot command Part of https://github.com/nushell/nushell/issues/711 --- docs/commands/pivot.md | 75 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 75 insertions(+) create mode 100644 docs/commands/pivot.md diff --git a/docs/commands/pivot.md b/docs/commands/pivot.md new file mode 100644 index 0000000000..632dae4e0d --- /dev/null +++ b/docs/commands/pivot.md @@ -0,0 +1,75 @@ +# pivot + +Pivots the table contents so rows become columns and columns become rows. + +## Examples + +```sh +> ls docs +━━━┯━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━ + # │ name │ type │ readonly │ size │ accessed │ modified +───┼────────────────────┼───────────┼──────────┼────────┼─────────────┼───────────── + 0 │ docs/commands │ Directory │ │ 4.1 KB │ an hour ago │ an hour ago + 1 │ docs/docker.md │ File │ │ 7.0 KB │ an hour ago │ a day ago + 2 │ docs/philosophy.md │ File │ │ 896 B │ an hour ago │ a day ago +━━━┷━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━ + +> ls docs | pivot +━━━┯━━━━━━━━━━┯━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━ + # │ Column0 │ Column1 │ Column2 │ Column3 +───┼──────────┼───────────────┼────────────────┼──────────────────── + 0 │ name │ docs/commands │ docs/docker.md │ docs/philosophy.md + 1 │ type │ Directory │ File │ File + 2 │ readonly │ │ │ + 3 │ size │ 4.1 KB │ 7.0 KB │ 896 B + 4 │ accessed │ an hour ago │ an hour ago │ an hour ago + 5 │ modified │ an hour ago │ a day ago │ a day ago +━━━┷━━━━━━━━━━┷━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━ +``` + +Use `--header-row` to treat the first row as column names: + +```shell +> ls docs | pivot --header-row +━━━┯━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━ + # │ docs/commands │ docs/docker.md │ docs/philosophy.md +───┼───────────────┼────────────────┼──────────────────── + 0 │ Directory │ File │ File + 1 │ │ │ + 2 │ 4.1 KB │ 7.0 KB │ 896 B + 3 │ an hour ago │ an hour ago │ an hour ago + 4 │ an hour ago │ a day ago │ a day ago +━━━┷━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━ +``` + +Use `--ignore-titles` to prevent pivoting the column names into values: + +```shell +> ls docs | pivot --ignore-titles +━━━┯━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━ + # │ Column0 │ Column1 │ Column2 +───┼───────────────┼────────────────┼──────────────────── + 0 │ docs/commands │ docs/docker.md │ docs/philosophy.md + 1 │ Directory │ File │ File + 2 │ │ │ + 3 │ 4.1 KB │ 7.0 KB │ 896 B + 4 │ an hour ago │ an hour ago │ an hour ago + 5 │ an hour ago │ a day ago │ a day ago +━━━┷━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━ +``` + +Additional arguments are used as column names: + +```shell +> ls docs | pivot foo bar baz +━━━┯━━━━━━━━━━┯━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━ + # │ foo │ bar │ baz │ Column3 +───┼──────────┼───────────────┼────────────────┼──────────────────── + 0 │ name │ docs/commands │ docs/docker.md │ docs/philosophy.md + 1 │ type │ Directory │ File │ File + 2 │ readonly │ │ │ + 3 │ size │ 4.1 KB │ 7.0 KB │ 896 B + 4 │ accessed │ 2 hours ago │ 2 hours ago │ 2 hours ago + 5 │ modified │ 2 hours ago │ a day ago │ a day ago +━━━┷━━━━━━━━━━┷━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━ +``` From 0756145caf77f95f24632363ddebddc184a78a63 Mon Sep 17 00:00:00 2001 From: uma0317 Date: Fri, 15 Nov 2019 11:52:51 +0900 Subject: [PATCH 179/184] Fix move file to diffrent partition on Windows --- src/shell/filesystem_shell.rs | 199 ++++++++++++++++++++++++++-------- 1 file changed, 154 insertions(+), 45 deletions(-) diff --git a/src/shell/filesystem_shell.rs b/src/shell/filesystem_shell.rs index 7b8310141c..60d5135d11 100644 --- a/src/shell/filesystem_shell.rs +++ b/src/shell/filesystem_shell.rs @@ -623,26 +623,71 @@ impl Shell for FilesystemShell { } if entry.is_file() { - match std::fs::rename(&entry, &destination) { - Err(e) => { - return Err(ShellError::labeled_error( - format!( - "Rename {:?} to {:?} aborted. {:}", - entry_file_name, - destination_file_name, - e.to_string(), - ), - format!( - "Rename {:?} to {:?} aborted. {:}", - entry_file_name, - destination_file_name, - e.to_string(), - ), - name_tag, - )); - } - Ok(o) => o, - }; + #[cfg(not(windows))] + { + match std::fs::rename(&entry, &destination) { + Err(e) => { + return Err(ShellError::labeled_error( + format!( + "Rename {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + format!( + "Rename {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + name_tag, + )); + } + Ok(o) => o, + }; + } + #[cfg(windows)] + { + match std::fs::copy(&entry, &destination) { + Err(e) => { + return Err(ShellError::labeled_error( + format!( + "Rename {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + format!( + "Rename {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + name_tag, + )); + } + Ok(_) => match std::fs::remove_file(&entry) { + Err(e) => { + return Err(ShellError::labeled_error( + format!( + "Rename {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + format!( + "Rename {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + name_tag, + )); + } + Ok(o) => o, + }, + }; + } } if entry.is_dir() { @@ -745,26 +790,45 @@ impl Shell for FilesystemShell { } if src.is_file() { - match std::fs::rename(src, dst) { + match std::fs::copy(&src, &dst) { Err(e) => { return Err(ShellError::labeled_error( format!( "Rename {:?} to {:?} aborted. {:}", - entry_file_name, + src, destination_file_name, e.to_string(), ), format!( "Rename {:?} to {:?} aborted. {:}", - entry_file_name, + src, destination_file_name, e.to_string(), ), name_tag, )); } - Ok(o) => o, - } + Ok(_) => match std::fs::remove_file(&src) { + Err(e) => { + return Err(ShellError::labeled_error( + format!( + "Rename {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + format!( + "Rename {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + name_tag, + )); + } + Ok(o) => o, + }, + }; } } @@ -824,26 +888,71 @@ impl Shell for FilesystemShell { to.push(entry_file_name); if entry.is_file() { - match std::fs::rename(&entry, &to) { - Err(e) => { - return Err(ShellError::labeled_error( - format!( - "Rename {:?} to {:?} aborted. {:}", - entry_file_name, - destination_file_name, - e.to_string(), - ), - format!( - "Rename {:?} to {:?} aborted. {:}", - entry_file_name, - destination_file_name, - e.to_string(), - ), - name_tag, - )); - } - Ok(o) => o, - }; + #[cfg(not(windows))] + { + match std::fs::rename(&entry, &to) { + Err(e) => { + return Err(ShellError::labeled_error( + format!( + "Rename {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + format!( + "Rename {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + name_tag, + )); + } + Ok(o) => o, + }; + } + #[cfg(windows)] + { + match std::fs::copy(&entry, &to) { + Err(e) => { + return Err(ShellError::labeled_error( + format!( + "Rename {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + format!( + "Rename {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + name_tag, + )); + } + Ok(_) => match std::fs::remove_file(&entry) { + Err(e) => { + return Err(ShellError::labeled_error( + format!( + "Remove {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + format!( + "Remove {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + name_tag, + )); + } + Ok(o) => o, + }, + }; + } } } } From 08b770719cf71e8ab8db5cca8f037a44b44b7e04 Mon Sep 17 00:00:00 2001 From: sebastian-xyz <52786998+sebastian-xyz@users.noreply.github.com> Date: Fri, 15 Nov 2019 15:37:41 +0100 Subject: [PATCH 180/184] Add append command documentation --- docs/commands/append.md | 53 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 53 insertions(+) create mode 100644 docs/commands/append.md diff --git a/docs/commands/append.md b/docs/commands/append.md new file mode 100644 index 0000000000..8bb2cabee3 --- /dev/null +++ b/docs/commands/append.md @@ -0,0 +1,53 @@ +# append +This command allows you to append the given row to the table. + +**Note**: +- `append` does not change a file itself. If you want to save your changes, you need to run the `save` command +- if you want to add something containing a whitespace character, you need to put it in quotation marks + +## Examples + +Let's add more cities to this table: + +```shell +> open cities.txt | lines +━━━┯━━━━━━━━━━━━ + # │ +───┼──────────── + 0 │ Canberra + 1 │ London + 2 │ Nairobi + 3 │ Washington +━━━┷━━━━━━━━━━━━ +``` + +You can add a new row by using `append`: + +```shell +> open cities.txt | lines | append Beijing +━━━┯━━━━━━━━━━━━ + # │ +───┼──────────── + 0 │ Canberra + 1 │ London + 2 │ Nairobi + 3 │ Washington + 4 │ Beijing +━━━┷━━━━━━━━━━━━ +``` + +It's not possible to add multiple rows at once, so you'll need to call `append` multiple times: + +```shell +> open cities.txt | lines | append Beijing | append "Buenos Aires" +━━━┯━━━━━━━━━━━━━━ + # │ +───┼────────────── + 0 │ Canberra + 1 │ London + 2 │ Nairobi + 3 │ Washington + 4 │ Beijing + 5 │ Buenos Aires +━━━┷━━━━━━━━━━━━━━ +``` From 63667d9e465fbbbb3eead59f321ed957601e9bff Mon Sep 17 00:00:00 2001 From: sebastian-xyz <52786998+sebastian-xyz@users.noreply.github.com> Date: Fri, 15 Nov 2019 15:53:58 +0100 Subject: [PATCH 181/184] Add prepend command documentation --- docs/commands/prepend.md | 56 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 56 insertions(+) create mode 100644 docs/commands/prepend.md diff --git a/docs/commands/prepend.md b/docs/commands/prepend.md new file mode 100644 index 0000000000..86b9249237 --- /dev/null +++ b/docs/commands/prepend.md @@ -0,0 +1,56 @@ +# prepend +This command prepends the given row to the front of the table + +**Note**: +- `prepend` does not change a file itself. If you want to save your changes, you need to run the `save` command +- if you want to add something containing a whitespace character, you need to put it in quotation marks + +## Examples + +Let's complete this table with the missing continents: + +```shell +> open continents.txt | lines +━━━┯━━━━━━━━━━━━━━━ + # │ +───┼─────────────── + 0 │ Africa + 1 │ South America + 2 │ Australia + 3 │ Europe + 4 │ Antarctica +━━━┷━━━━━━━━━━━━━━━ +``` + +You can add a new row at the top by using `prepend`: + +```shell +> open continents.txt | lines | prepend Asia +━━━┯━━━━━━━━━━━━━━━ + # │ +───┼─────────────── + 0 │ Asia + 1 │ Africa + 2 │ South America + 3 │ Australia + 4 │ Europe + 5 │ Antarctica +━━━┷━━━━━━━━━━━━━━━ +``` + +It's not possible to add multiple rows at once, so you'll need to call `prepend` multiple times: + +```shell +> open continents.txt | lines | prepend Asia | prepend "North America" +━━━┯━━━━━━━━━━━━━━━ + # │ +───┼─────────────── + 0 │ North America + 1 │ Asia + 2 │ Africa + 3 │ South America + 4 │ Australia + 5 │ Europe + 6 │ Antarctica +━━━┷━━━━━━━━━━━━━━━ +``` From 3c3637b674e648649842b141a16a0f56c5ef95cc Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Sat, 16 Nov 2019 14:36:51 +1300 Subject: [PATCH 182/184] Add comparison between dates --- src/data/base.rs | 3 + .../hir/syntax_shape/expression/unit.rs | 19 ++- src/parser/parse/unit.rs | 110 +++++++++++++----- 3 files changed, 100 insertions(+), 32 deletions(-) diff --git a/src/data/base.rs b/src/data/base.rs index f0357273e2..4baa8f1d67 100644 --- a/src/data/base.rs +++ b/src/data/base.rs @@ -856,6 +856,7 @@ enum CompareValues { Ints(BigInt, BigInt), Decimals(BigDecimal, BigDecimal), String(String, String), + Date(DateTime, DateTime), } impl CompareValues { @@ -864,6 +865,7 @@ impl CompareValues { CompareValues::Ints(left, right) => left.cmp(right), CompareValues::Decimals(left, right) => left.cmp(right), CompareValues::String(left, right) => left.cmp(right), + CompareValues::Date(left, right) => right.cmp(left), } } } @@ -900,6 +902,7 @@ fn coerce_compare_primitive( CompareValues::Decimals(BigDecimal::from(*left), right.clone()) } (String(left), String(right)) => CompareValues::String(left.clone(), right.clone()), + (Date(left), Date(right)) => CompareValues::Date(left.clone(), right.clone()), _ => return Err((left.type_name(), right.type_name())), }) } diff --git a/src/parser/hir/syntax_shape/expression/unit.rs b/src/parser/hir/syntax_shape/expression/unit.rs index c4bd85434b..901b86e8d6 100644 --- a/src/parser/hir/syntax_shape/expression/unit.rs +++ b/src/parser/hir/syntax_shape/expression/unit.rs @@ -86,12 +86,19 @@ fn unit_size(input: &str, bare_span: Span) -> IResult<&str, (Spanned, }; let (input, unit) = all_consuming(alt(( - value(Unit::B, alt((tag("B"), tag("b")))), - value(Unit::KB, alt((tag("KB"), tag("kb"), tag("Kb")))), - value(Unit::MB, alt((tag("MB"), tag("mb"), tag("Mb")))), - value(Unit::GB, alt((tag("GB"), tag("gb"), tag("Gb")))), - value(Unit::TB, alt((tag("TB"), tag("tb"), tag("Tb")))), - value(Unit::PB, alt((tag("PB"), tag("pb"), tag("Pb")))), + value(Unit::Byte, alt((tag("B"), tag("b")))), + value(Unit::Kilobyte, alt((tag("KB"), tag("kb"), tag("Kb")))), + value(Unit::Megabyte, alt((tag("MB"), tag("mb"), tag("Mb")))), + value(Unit::Gigabyte, alt((tag("GB"), tag("gb"), tag("Gb")))), + value(Unit::Terabyte, alt((tag("TB"), tag("tb"), tag("Tb")))), + value(Unit::Petabyte, alt((tag("PB"), tag("pb"), tag("Pb")))), + value(Unit::Second, tag("s")), + value(Unit::Minute, tag("m")), + value(Unit::Hour, tag("h")), + value(Unit::Day, tag("d")), + value(Unit::Week, tag("w")), + value(Unit::Month, tag("M")), + value(Unit::Year, tag("y")), )))(input)?; let start_span = number.span.end(); diff --git a/src/parser/parse/unit.rs b/src/parser/parse/unit.rs index e2075636a3..e98a5cdbcb 100644 --- a/src/parser/parse/unit.rs +++ b/src/parser/parse/unit.rs @@ -3,15 +3,34 @@ use crate::prelude::*; use serde::{Deserialize, Serialize}; use std::fmt; use std::str::FromStr; +use std::time::Duration; +use std::time::SystemTime; #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)] pub enum Unit { - B, - KB, - MB, - GB, - TB, - PB, + // Filesize units + Byte, + Kilobyte, + Megabyte, + Gigabyte, + Terabyte, + Petabyte, + + // Duration units + Second, + Minute, + Hour, + Day, + Week, + Month, + Year, +} + +fn convert_number_to_u64(number: &Number) -> u64 { + match number { + Number::Int(big_int) => big_int.to_u64().unwrap(), + Number::Decimal(big_decimal) => big_decimal.to_u64().unwrap(), + } } impl FormatDebug for Spanned { @@ -23,26 +42,58 @@ impl FormatDebug for Spanned { impl Unit { pub fn as_str(&self) -> &str { match *self { - Unit::B => "B", - Unit::KB => "KB", - Unit::MB => "MB", - Unit::GB => "GB", - Unit::TB => "TB", - Unit::PB => "PB", + Unit::Byte => "B", + Unit::Kilobyte => "KB", + Unit::Megabyte => "MB", + Unit::Gigabyte => "GB", + Unit::Terabyte => "TB", + Unit::Petabyte => "PB", + Unit::Second => "s", + Unit::Minute => "m", + Unit::Hour => "h", + Unit::Day => "d", + Unit::Week => "w", + Unit::Month => "M", + Unit::Year => "y", } } pub(crate) fn compute(&self, size: &Number) -> Value { let size = size.clone(); - Value::number(match self { - Unit::B => size, - Unit::KB => size * 1024, - Unit::MB => size * 1024 * 1024, - Unit::GB => size * 1024 * 1024 * 1024, - Unit::TB => size * 1024 * 1024 * 1024 * 1024, - Unit::PB => size * 1024 * 1024 * 1024 * 1024 * 1024, - }) + match self { + Unit::Byte => Value::number(size), + Unit::Kilobyte => Value::number(size * 1024), + Unit::Megabyte => Value::number(size * 1024 * 1024), + Unit::Gigabyte => Value::number(size * 1024 * 1024 * 1024), + Unit::Terabyte => Value::number(size * 1024 * 1024 * 1024 * 1024), + Unit::Petabyte => Value::number(size * 1024 * 1024 * 1024 * 1024 * 1024), + Unit::Second => Value::system_date( + SystemTime::now() - Duration::from_secs(convert_number_to_u64(&size)), + ), + Unit::Minute => Value::system_date( + SystemTime::now() - Duration::from_secs(60 * convert_number_to_u64(&size)), + ), + Unit::Hour => Value::system_date( + SystemTime::now() - Duration::from_secs(60 * 60 * convert_number_to_u64(&size)), + ), + Unit::Day => Value::system_date( + SystemTime::now() + - Duration::from_secs(24 * 60 * 60 * convert_number_to_u64(&size)), + ), + Unit::Week => Value::system_date( + SystemTime::now() + - Duration::from_secs(7 * 24 * 60 * 60 * convert_number_to_u64(&size)), + ), + Unit::Month => Value::system_date( + SystemTime::now() + - Duration::from_secs(30 * 24 * 60 * 60 * convert_number_to_u64(&size)), + ), + Unit::Year => Value::system_date( + SystemTime::now() + - Duration::from_secs(365 * 24 * 60 * 60 * convert_number_to_u64(&size)), + ), + } } } @@ -50,12 +101,19 @@ impl FromStr for Unit { type Err = (); fn from_str(input: &str) -> Result::Err> { match input { - "B" | "b" => Ok(Unit::B), - "KB" | "kb" | "Kb" | "K" | "k" => Ok(Unit::KB), - "MB" | "mb" | "Mb" => Ok(Unit::MB), - "GB" | "gb" | "Gb" => Ok(Unit::GB), - "TB" | "tb" | "Tb" => Ok(Unit::TB), - "PB" | "pb" | "Pb" => Ok(Unit::PB), + "B" | "b" => Ok(Unit::Byte), + "KB" | "kb" | "Kb" | "K" | "k" => Ok(Unit::Kilobyte), + "MB" | "mb" | "Mb" => Ok(Unit::Megabyte), + "GB" | "gb" | "Gb" => Ok(Unit::Gigabyte), + "TB" | "tb" | "Tb" => Ok(Unit::Terabyte), + "PB" | "pb" | "Pb" => Ok(Unit::Petabyte), + "s" => Ok(Unit::Second), + "m" => Ok(Unit::Minute), + "h" => Ok(Unit::Hour), + "d" => Ok(Unit::Day), + "w" => Ok(Unit::Week), + "M" => Ok(Unit::Month), + "y" => Ok(Unit::Year), _ => Err(()), } } From ce106bfda95bf6cc8236006f621c57d3ac33c3c4 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Sat, 16 Nov 2019 21:23:04 +1300 Subject: [PATCH 183/184] Fix build warnings --- src/parser/parse_command.rs | 2 +- src/plugins/binaryview.rs | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/parser/parse_command.rs b/src/parser/parse_command.rs index 32f05fd1ca..cb5111179b 100644 --- a/src/parser/parse_command.rs +++ b/src/parser/parse_command.rs @@ -609,7 +609,7 @@ fn extract_optional( name: &str, tokens: &mut hir::TokensIterator<'_>, source: &Text, -) -> Result<(Option<(usize, Spanned)>), ParseError> { +) -> Result)>, ParseError> { let flag = tokens.extract(|t| t.as_flag(name, source)); match flag { diff --git a/src/plugins/binaryview.rs b/src/plugins/binaryview.rs index 0072df5b4d..4e563beabe 100644 --- a/src/plugins/binaryview.rs +++ b/src/plugins/binaryview.rs @@ -211,7 +211,7 @@ struct RawImageBuffer { buffer: Vec, } -fn load_from_png_buffer(buffer: &[u8]) -> Option<(RawImageBuffer)> { +fn load_from_png_buffer(buffer: &[u8]) -> Option { use image::ImageDecoder; let decoder = image::png::PNGDecoder::new(buffer); @@ -231,7 +231,7 @@ fn load_from_png_buffer(buffer: &[u8]) -> Option<(RawImageBuffer)> { }) } -fn load_from_jpg_buffer(buffer: &[u8]) -> Option<(RawImageBuffer)> { +fn load_from_jpg_buffer(buffer: &[u8]) -> Option { use image::ImageDecoder; let decoder = image::jpeg::JPEGDecoder::new(buffer); From f4dc79f4baed493a637b58dba2e23f63020192fd Mon Sep 17 00:00:00 2001 From: sebastian-xyz <52786998+sebastian-xyz@users.noreply.github.com> Date: Sat, 16 Nov 2019 15:31:28 +0100 Subject: [PATCH 184/184] add group-by command documentation --- docs/commands/group-by.md | 72 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 72 insertions(+) create mode 100644 docs/commands/group-by.md diff --git a/docs/commands/group-by.md b/docs/commands/group-by.md new file mode 100644 index 0000000000..9628800031 --- /dev/null +++ b/docs/commands/group-by.md @@ -0,0 +1,72 @@ +# group-by + +This command creates a new table with the data from the table rows grouped by the column given. + +## Examples + +Let's say we have this table of all countries in the world sorted by their population: + +```shell +> open countries_by_population.json | from-json | first 10 +━━━┯━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━━ + # │ rank │ country or area │ UN continental region │ UN statistical region │ population 2018 │ population 2019 │ change +───┼──────┼─────────────────┼───────────────────────┼───────────────────────┼─────────────────┼─────────────────┼──────── + 0 │ 1 │ China │ Asia │ Eastern Asia │ 1,427,647,786 │ 1,433,783,686 │ +0.4% + 1 │ 2 │ India │ Asia │ Southern Asia │ 1,352,642,280 │ 1,366,417,754 │ +1.0% + 2 │ 3 │ United States │ Americas │ Northern America │ 327,096,265 │ 329,064,917 │ +0.6% + 3 │ 4 │ Indonesia │ Asia │ South-eastern Asia │ 267,670,543 │ 270,625,568 │ +1.1% + 4 │ 5 │ Pakistan │ Asia │ Southern Asia │ 212,228,286 │ 216,565,318 │ +2.0% + 5 │ 6 │ Brazil │ Americas │ South America │ 209,469,323 │ 211,049,527 │ +0.8% + 6 │ 7 │ Nigeria │ Africa │ Western Africa │ 195,874,683 │ 200,963,599 │ +2.6% + 7 │ 8 │ Bangladesh │ Asia │ Southern Asia │ 161,376,708 │ 163,046,161 │ +1.0% + 8 │ 9 │ Russia │ Europe │ Eastern Europe │ 145,734,038 │ 145,872,256 │ +0.1% + 9 │ 10 │ Mexico │ Americas │ Central America │ 126,190,788 │ 127,575,529 │ +1.1% +━━━┷━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━━ +``` + +Here we have listed only the first 10 lines. In total this table has got 233 rows which is to big to get information easily out of it. + +We can use the `group-by` command on 'UN statistical region' to create a table per continental region. + +```shell +> open countries_by_population.json | from-json | group-by "UN continental region" +━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━ + Asia │ Americas │ Africa │ Europe │ Oceania +──────────────────┼──────────────────┼──────────────────┼──────────────────┼────────────────── + [table: 51 rows] │ [table: 53 rows] │ [table: 58 rows] │ [table: 48 rows] │ [table: 23 rows] +━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━ +``` + +Now we can already get some informations like "which continental regions are there" and "how many countries are in each region". +If we want to see only the countries in the continental region of Oceania we can type: + +```shell +> open countries_by_population.json | from-json | group-by "UN continental region" | get Oceania +━━━━┯━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━━ + # │ rank │ country or area │ UN continental region │ UN statistical region │ population 2018 │ population 2019 │ change +────┼──────┼────────────────────────────────┼───────────────────────┼───────────────────────────┼─────────────────┼─────────────────┼──────── + 0 │ 55 │ Australia │ Oceania │ Australia and New Zealand │ 24,898,152 │ 25,203,198 │ +1.2% + 1 │ 98 │ Papua New Guinea │ Oceania │ Melanesia │ 8,606,323 │ 8,776,109 │ +2.0% + 2 │ 125 │ New Zealand │ Oceania │ Australia and New Zealand │ 4,743,131 │ 4,783,063 │ +0.8% + 3 │ 161 │ Fiji │ Oceania │ Melanesia │ 883,483 │ 889,953 │ +0.7% + 4 │ 166 │ Solomon Islands │ Oceania │ Melanesia │ 652,857 │ 669,823 │ +2.6% + 5 │ 181 │ Vanuatu │ Oceania │ Melanesia │ 292,680 │ 299,882 │ +2.5% + 6 │ 183 │ New Caledonia │ Oceania │ Melanesia │ 279,993 │ 282,750 │ +1.0% + 7 │ 185 │ French Polynesia │ Oceania │ Polynesia │ 277,679 │ 279,287 │ +0.6% + 8 │ 188 │ Samoa │ Oceania │ Polynesia │ 196,129 │ 197,097 │ +0.5% + 9 │ 191 │ Guam │ Oceania │ Micronesia │ 165,768 │ 167,294 │ +0.9% + 10 │ 193 │ Kiribati │ Oceania │ Micronesia │ 115,847 │ 117,606 │ +1.5% + 11 │ 194 │ Federated States of Micronesia │ Oceania │ Micronesia │ 112,640 │ 113,815 │ +1.0% + 12 │ 196 │ Tonga │ Oceania │ Polynesia │ 110,589 │ 110,940 │ +0.3% + 13 │ 207 │ Marshall Islands │ Oceania │ Micronesia │ 58,413 │ 58,791 │ +0.6% + 14 │ 209 │ Northern Mariana Islands │ Oceania │ Micronesia │ 56,882 │ 56,188 │ −1.2% + 15 │ 210 │ American Samoa │ Oceania │ Polynesia │ 55,465 │ 55,312 │ −0.3% + 16 │ 221 │ Palau │ Oceania │ Micronesia │ 17,907 │ 18,008 │ +0.6% + 17 │ 222 │ Cook Islands │ Oceania │ Polynesia │ 17,518 │ 17,548 │ +0.2% + 18 │ 224 │ Tuvalu │ Oceania │ Polynesia │ 11,508 │ 11,646 │ +1.2% + 19 │ 225 │ Wallis and Futuna │ Oceania │ Polynesia │ 11,661 │ 11,432 │ −2.0% + 20 │ 226 │ Nauru │ Oceania │ Micronesia │ 10,670 │ 10,756 │ +0.8% + 21 │ 231 │ Niue │ Oceania │ Polynesia │ 1,620 │ 1,615 │ −0.3% + 22 │ 232 │ Tokelau │ Oceania │ Polynesia │ 1,319 │ 1,340 │ +1.6% +━━━━┷━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━━ +```