diff --git a/.azure/azure-pipelines.yml b/.azure/azure-pipelines.yml index e1f9b93681..f4c24325e3 100644 --- a/.azure/azure-pipelines.yml +++ b/.azure/azure-pipelines.yml @@ -3,12 +3,27 @@ trigger: strategy: matrix: - linux-nightly: + linux-stable: image: ubuntu-16.04 - macos-nightly: + style: 'unflagged' + macos-stable: image: macos-10.14 - windows-nightly: + style: 'unflagged' + windows-stable: image: vs2017-win2016 + style: 'unflagged' + linux-nightly-canary: + image: ubuntu-16.04 + style: 'canary' + macos-nightly-canary: + image: macos-10.14 + style: 'canary' + windows-nightly-canary: + image: vs2017-win2016 + style: 'canary' + fmt: + image: ubuntu-16.04 + style: 'fmt' pool: vmImage: $(image) @@ -20,13 +35,19 @@ steps: then sudo apt-get -y install libxcb-composite0-dev libx11-dev fi - curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain `cat rust-toolchain` + curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain "stable" export PATH=$HOME/.cargo/bin:$PATH + rustup update rustc -Vv echo "##vso[task.prependpath]$HOME/.cargo/bin" - rustup component add rustfmt --toolchain `cat rust-toolchain` + rustup component add rustfmt --toolchain "stable" displayName: Install Rust - bash: RUSTFLAGS="-D warnings" cargo test --all-features + condition: eq(variables['style'], 'unflagged') + displayName: Run tests + - bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo test --all-features + condition: eq(variables['style'], 'canary') displayName: Run tests - bash: cargo fmt --all -- --check + condition: eq(variables['style'], 'fmt') displayName: Lint diff --git a/.cargo/config b/.cargo/config index e69de29bb2..620568b44f 100644 --- a/.cargo/config +++ b/.cargo/config @@ -0,0 +1,3 @@ +[build] + +rustflags = "--cfg coloring_in_tokens" diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000000..84ab81641e --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,30 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: '' +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Steps to reproduce the behavior: +1. +2. +3. + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Screenshots** +If applicable, add screenshots to help explain your problem. + +**Configuration (please complete the following information):** + - OS: [e.g. Windows] + - Version [e.g. 0.4.0] + - Optional features (if any) + +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000000..bbcbbe7d61 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: '' +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml index e84cefd3ab..f803d6e343 100644 --- a/.github/workflows/docker-publish.yml +++ b/.github/workflows/docker-publish.yml @@ -2,7 +2,7 @@ name: Publish consumable Docker images on: push: - tags: ['*.*.*'] + tags: ['v?[0-9]+.[0-9]+.[0-9]+*'] jobs: compile: @@ -14,7 +14,11 @@ jobs: - x86_64-unknown-linux-gnu steps: - uses: actions/checkout@v1 - - run: cargo install cross + - name: Install rust-embedded/cross + env: { VERSION: v0.1.16 } + run: >- + wget -nv https://github.com/rust-embedded/cross/releases/download/${VERSION}/cross-${VERSION}-x86_64-unknown-linux-gnu.tar.gz + -O- | sudo tar xz -C /usr/local/bin/ - name: compile for specific target env: { arch: '${{ matrix.arch }}' } run: | @@ -31,6 +35,10 @@ jobs: name: Build and publish docker images needs: compile runs-on: ubuntu-latest + env: + DOCKER_REGISTRY: quay.io/nushell + DOCKER_PASSWORD: ${{ secrets.DOCKER_REGISTRY }} + DOCKER_USER: ${{ secrets.DOCKER_USER }} strategy: matrix: tag: @@ -58,41 +66,53 @@ jobs: - uses: actions/download-artifact@master with: { name: '${{ matrix.arch }}', path: target/release } - name: Build and publish exact version - run: | - REGISTRY=${REGISTRY,,}; export TAG=${GITHUB_REF##*/}-${{ matrix.tag }}; + run: |- + export DOCKER_TAG=${GITHUB_REF##*/}-${{ matrix.tag }} export NU_BINS=target/release/$( [ ${{ matrix.plugin }} = true ] && echo nu* || echo nu ) export PATCH=$([ ${{ matrix.use-patch }} = true ] && echo .${{ matrix.tag }} || echo '') chmod +x $NU_BINS - echo ${{ secrets.DOCKER_REGISTRY }} | docker login docker.pkg.github.com -u ${{ github.actor }} --password-stdin + echo ${DOCKER_PASSWORD} | docker login ${DOCKER_REGISTRY} -u ${DOCKER_USER} --password-stdin docker-compose --file docker/docker-compose.package.yml build docker-compose --file docker/docker-compose.package.yml push # exact version env: BASE_IMAGE: ${{ matrix.base-image }} - REGISTRY: docker.pkg.github.com/${{ github.repository }} #region semantics tagging - - name: Retag and push without suffixing version - run: | + - name: Retag and push with suffixed version + run: |- VERSION=${GITHUB_REF##*/} - docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${{ matrix.tag }} - docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${VERSION%%.*}-${{ matrix.tag }} - docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${VERSION%.*}-${{ matrix.tag }} - docker push ${REGISTRY,,}/nu:${VERSION%.*}-${{ matrix.tag }} # latest patch - docker push ${REGISTRY,,}/nu:${VERSION%%.*}-${{ matrix.tag }} # latest features - docker push ${REGISTRY,,}/nu:${{ matrix.tag }} # latest version - env: { REGISTRY: 'docker.pkg.github.com/${{ github.repository }}' } + + latest_version=${VERSION%%%.*}-${{ matrix.tag }} + latest_feature=${VERSION%%.*}-${{ matrix.tag }} + latest_patch=${VERSION%.*}-${{ matrix.tag }} + exact_version=${VERSION}-${{ matrix.tag }} + + tags=( ${latest_version} ${latest_feature} ${latest_patch} ${exact_version} ) + + for tag in ${tags[@]}; do + docker tag ${DOCKER_REGISTRY}/nu:${VERSION}-${{ matrix.tag }} ${DOCKER_REGISTRY}/nu:${tag} + docker push ${DOCKER_REGISTRY}/nu:${tag} + done + + # latest version + docker tag ${DOCKER_REGISTRY}/nu:${VERSION}-${{ matrix.tag }} ${DOCKER_REGISTRY}/nu:${{ matrix.tag }} + docker push ${DOCKER_REGISTRY}/nu:${{ matrix.tag }} + - name: Retag and push debian as latest if: matrix.tag == 'debian' - run: | + run: |- VERSION=${GITHUB_REF##*/} - docker tag ${REGISTRY,,}/nu:${{ matrix.tag }} ${REGISTRY,,}/nu:latest - docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${VERSION%.*} - docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${VERSION%%.*} - docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${VERSION} - docker push ${REGISTRY,,}/nu:${VERSION} # exact version - docker push ${REGISTRY,,}/nu:${VERSION%%.*} # latest features - docker push ${REGISTRY,,}/nu:${VERSION%.*} # latest patch - docker push ${REGISTRY,,}/nu:latest # latest version - env: { REGISTRY: 'docker.pkg.github.com/${{ github.repository }}' } + + # ${latest features} ${latest patch} ${exact version} + tags=( ${VERSION%%.*} ${VERSION%.*} ${VERSION} ) + + for tag in ${tags[@]}; do + docker tag ${DOCKER_REGISTRY}/nu:${VERSION}-${{ matrix.tag }} ${DOCKER_REGISTRY}/nu:${tag} + docker push ${DOCKER_REGISTRY}/nu:${tag} + done + + # latest version + docker tag ${DOCKER_REGISTRY}/nu:${{ matrix.tag }} ${DOCKER_REGISTRY}/nu:latest + docker push ${DOCKER_REGISTRY}/nu:latest #endregion semantics tagging diff --git a/.gitpod.yml b/.gitpod.yml index adb894f2d3..9f675b812a 100644 --- a/.gitpod.yml +++ b/.gitpod.yml @@ -1,8 +1,8 @@ image: file: .gitpod.Dockerfile tasks: - - init: cargo build - command: cargo run + - init: cargo install nu + command: nu github: prebuilds: # enable for the master/default branch (defaults to true) diff --git a/Cargo.lock b/Cargo.lock index a86c845cd5..2254b60971 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,7 +2,7 @@ # It is not intended for manual editing. [[package]] name = "adler32" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -29,6 +29,11 @@ dependencies = [ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "anyhow" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "app_dirs" version = "1.2.1" @@ -47,19 +52,24 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "arrayvec" -version = "0.4.11" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", + "nodrop 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "arrayvec" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "async-stream" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "async-stream-impl 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -67,9 +77,9 @@ name = "async-stream-impl" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -77,33 +87,33 @@ name = "atty" version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "autocfg" -version = "0.1.5" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "backtrace" -version = "0.3.34" +version = "0.3.40" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "backtrace-sys 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-demangle 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", + "backtrace-sys 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "backtrace-sys" -version = "0.1.31" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.47 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -114,18 +124,23 @@ dependencies = [ "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "base64" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "battery" version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "core-foundation 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "nix 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)", - "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", "uom 0.23.1 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -137,33 +152,33 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "num-bigint 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", - "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "bincode" -version = "1.1.4" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "bitflags" -version = "1.1.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "blake2b_simd" -version = "0.5.6" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "arrayref 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", - "arrayvec 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", - "constant_time_eq 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "constant_time_eq 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -180,29 +195,29 @@ dependencies = [ "chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)", "decimal 2.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "md5 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "bstr" -version = "0.2.6" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "regex-automata 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "bumpalo" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -226,30 +241,29 @@ version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "c2-chacha" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "ppv-lite86 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", + "ppv-lite86 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "cc" -version = "1.0.45" +version = "1.0.47" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "jobserver 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)", - "num_cpus 1.10.1 (registry+https://github.com/rust-lang/crates.io-index)", + "jobserver 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 1.11.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "cfg-if" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -257,10 +271,10 @@ name = "chrono" version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", - "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -279,7 +293,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "strsim 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-width 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", @@ -292,7 +306,7 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "clipboard-win 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "objc 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", + "objc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", "objc-foundation 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "objc_id 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "x11-clipboard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", @@ -311,7 +325,7 @@ name = "cloudabi" version = "0.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -319,10 +333,10 @@ name = "config" version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "rust-ini 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "serde-hjson 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "toml 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)", @@ -331,7 +345,7 @@ dependencies = [ [[package]] name = "constant_time_eq" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -340,7 +354,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "core-foundation-sys 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -353,7 +367,7 @@ name = "crc32fast" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -366,23 +380,23 @@ dependencies = [ [[package]] name = "crossbeam-deque" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "crossbeam-epoch 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-epoch 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-utils 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "crossbeam-epoch" -version = "0.7.2" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "arrayvec 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "memoffset 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-utils 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "memoffset 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", "scopeguard 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -404,8 +418,18 @@ name = "crossbeam-utils" version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "crossbeam-utils" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -439,7 +463,7 @@ dependencies = [ "crossterm_screen 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "crossterm_utils 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "crossterm_winapi 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -471,7 +495,7 @@ dependencies = [ "crossterm_cursor 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "crossterm_utils 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "crossterm_winapi 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -480,7 +504,7 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "crossterm_winapi 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -497,11 +521,11 @@ name = "csv" version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bstr 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", + "bstr 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "csv-core 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "ryu 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "ryu 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -514,11 +538,11 @@ dependencies = [ [[package]] name = "ctor" -version = "0.1.9" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -532,30 +556,29 @@ dependencies = [ [[package]] name = "curl" -version = "0.4.22" +version = "0.4.25" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "curl-sys 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)", - "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "curl-sys 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-probe 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.9.49 (registry+https://github.com/rust-lang/crates.io-index)", - "schannel 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.52 (registry+https://github.com/rust-lang/crates.io-index)", + "schannel 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)", "socket2 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "curl-sys" -version = "0.4.20" +version = "0.4.24" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.47 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libnghttp2-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.9.49 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.52 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -566,7 +589,7 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "darwin-libproc-sys 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -575,7 +598,7 @@ name = "darwin-libproc-sys" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -583,12 +606,12 @@ name = "decimal" version = "2.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.47 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "ord_subset 3.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -596,7 +619,7 @@ name = "deflate" version = "0.7.20" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "adler32 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "adler32 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -605,9 +628,9 @@ name = "derive-new" version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -620,7 +643,7 @@ name = "directories" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -629,7 +652,7 @@ name = "dirs" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "redox_users 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -639,7 +662,7 @@ name = "dirs" version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "dirs-sys 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -648,8 +671,8 @@ name = "dirs-sys" version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "redox_users 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -671,66 +694,44 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "either" -version = "1.5.2" +version = "1.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "encode_unicode" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "enum-utils" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "enum-utils-from-str 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive_internals 0.24.1 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "enum-utils-from-str" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "env_logger" version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", - "humantime 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "termcolor 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "failure" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "backtrace 0.3.34 (registry+https://github.com/rust-lang/crates.io-index)", - "failure_derive 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "backtrace 0.3.40 (registry+https://github.com/rust-lang/crates.io-index)", + "failure_derive 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "failure_derive" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)", - "synstructure 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "synstructure 0.12.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -750,13 +751,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "flate2" -version = "1.0.9" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "miniz-sys 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", - "miniz_oxide_c_api 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", + "miniz_oxide 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -771,79 +772,71 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "futures" -version = "0.1.28" +version = "0.1.29" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "futures-channel-preview" -version = "0.3.0-alpha.18" +version = "0.3.0-alpha.19" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-sink-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-sink-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "futures-core-preview" -version = "0.3.0-alpha.18" +version = "0.3.0-alpha.19" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "futures-executor-preview" -version = "0.3.0-alpha.18" +version = "0.3.0-alpha.19" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "num_cpus 1.10.1 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-util-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 1.11.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "futures-io-preview" -version = "0.3.0-alpha.18" +version = "0.3.0-alpha.19" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "futures-preview" -version = "0.3.0-alpha.18" +version = "0.3.0-alpha.19" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-executor-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-io-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-sink-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-channel-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-executor-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-io-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-sink-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-util-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "futures-sink-preview" -version = "0.3.0-alpha.18" +version = "0.3.0-alpha.19" source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", -] [[package]] name = "futures-timer" -version = "0.4.0" +version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "pin-utils 0.1.0-alpha.4 (registry+https://github.com/rust-lang/crates.io-index)", -] [[package]] name = "futures-util-preview" -version = "0.3.0-alpha.18" +version = "0.3.0-alpha.19" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-io-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-sink-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-channel-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-io-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-sink-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "pin-utils 0.1.0-alpha.4 (registry+https://github.com/rust-lang/crates.io-index)", "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -856,7 +849,7 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -864,27 +857,28 @@ name = "gethostname" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "getrandom" -version = "0.1.8" +version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", + "wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "getset" -version = "0.0.8" +version = "0.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -892,8 +886,8 @@ name = "git2" version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libgit2-sys 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -909,38 +903,38 @@ name = "heck" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "unicode-segmentation 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-segmentation 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-cpu 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-disk 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-host 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-memory 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-net 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-process 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-sensors 0.0.3-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-virt 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-cpu 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-disk 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-host 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-memory 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-net 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-process 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-sensors 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-virt 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-common" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "core-foundation 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-util-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "nix 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)", "pin-utils 0.1.0-alpha.4 (registry+https://github.com/rust-lang/crates.io-index)", @@ -950,41 +944,41 @@ dependencies = [ [[package]] name = "heim-cpu" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-derive" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-disk" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "core-foundation 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "widestring 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -992,66 +986,66 @@ dependencies = [ [[package]] name = "heim-host" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "platforms 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "platforms 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-memory" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-net" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "hex 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "macaddr 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", + "macaddr 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "nix 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-process" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "darwin-libproc 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-cpu 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-host 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-net 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-cpu 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-host 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-net 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "ntapi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1061,38 +1055,46 @@ dependencies = [ [[package]] name = "heim-runtime" -version = "0.0.4-alpha.1" +version = "0.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-channel-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "threadpool 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-sensors" -version = "0.0.3-alpha.1" +version = "0.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-virt" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "raw-cpuid 7.0.3 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "hermit-abi" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "hex" version = "0.3.2" @@ -1105,7 +1107,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "http" -version = "0.1.18" +version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1115,7 +1117,7 @@ dependencies = [ [[package]] name = "humantime" -version = "1.2.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1128,28 +1130,29 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-normalization 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-normalization 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "image" -version = "0.22.2" +version = "0.22.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "jpeg-decoder 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", + "jpeg-decoder 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)", "num-iter 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", "num-rational 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", "png 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "indexmap" -version = "1.2.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1157,35 +1160,34 @@ name = "inflate" version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "adler32 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "adler32 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "iovec" -version = "0.1.2" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "isahc" -version = "0.7.1" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-channel 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", - "curl 0.4.22 (registry+https://github.com/rust-lang/crates.io-index)", - "curl-sys 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-io-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "http 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "curl 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)", + "curl-sys 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-io-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-util-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "http 0.1.19 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "sluice 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "sluice 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1193,8 +1195,8 @@ name = "isatty" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1204,15 +1206,15 @@ name = "itertools" version = "0.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "either 1.5.2 (registry+https://github.com/rust-lang/crates.io-index)", + "either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "itertools" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "either 1.5.2 (registry+https://github.com/rust-lang/crates.io-index)", + "either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1222,17 +1224,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "jobserver" -version = "0.1.16" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "getrandom 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "jpeg-decoder" -version = "0.1.15" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1240,10 +1242,10 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.27" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "wasm-bindgen 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1257,15 +1259,15 @@ dependencies = [ [[package]] name = "language-reporting" -version = "0.3.1" -source = "git+https://github.com/wycats/language-reporting#1e2100290fec96f69646e1e61482d80f7a8e7855" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "derive-new 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "render-tree 0.1.1 (git+https://github.com/wycats/language-reporting)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)", + "render-tree 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "termcolor 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1276,7 +1278,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "lazy_static" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1286,19 +1288,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "lexical-core" -version = "0.4.3" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "arrayvec 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", - "ryu 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "stackvector 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", - "static_assertions 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "ryu 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "static_assertions 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "libc" -version = "0.2.60" +version = "0.2.65" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1306,10 +1308,10 @@ name = "libgit2-sys" version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.47 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1317,8 +1319,8 @@ name = "libnghttp2-sys" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.47 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1326,8 +1328,8 @@ name = "libsqlite3-sys" version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.47 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1336,9 +1338,9 @@ name = "libz-sys" version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.47 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1347,7 +1349,7 @@ name = "line-wrap" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "safemem 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "safemem 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1369,7 +1371,7 @@ name = "log" version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1382,7 +1384,7 @@ dependencies = [ [[package]] name = "macaddr" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1390,7 +1392,7 @@ name = "mach" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1398,7 +1400,7 @@ name = "mach" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1406,7 +1408,7 @@ name = "malloc_buf" version = "0.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1414,6 +1416,11 @@ name = "matches" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "maybe-uninit" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "md5" version = "0.6.1" @@ -1424,12 +1431,12 @@ name = "memchr" version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "memoffset" -version = "0.5.1" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1446,35 +1453,15 @@ version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "mime 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", - "unicase 2.4.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "miniz-sys" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "unicase 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "miniz_oxide" -version = "0.3.1" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "adler32 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "miniz_oxide_c_api" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "miniz_oxide 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "adler32 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1487,12 +1474,12 @@ name = "neso" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bincode 1.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1500,10 +1487,10 @@ name = "nix" version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.47 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1512,16 +1499,16 @@ name = "nix" version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.47 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "nodrop" -version = "0.1.13" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1535,14 +1522,33 @@ dependencies = [ [[package]] name = "nom" -version = "5.0.0" +version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lexical-core 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", + "lexical-core 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "nom-tracable" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "nom 5.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "nom-tracable-macros 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "nom_locate 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "nom-tracable-macros" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "nom_locate" version = "1.0.0" @@ -1550,7 +1556,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytecount 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "nom 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "nom 5.0.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1563,17 +1569,18 @@ dependencies = [ [[package]] name = "nu" -version = "0.3.0" +version = "0.5.1" dependencies = [ "ansi_term 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)", "app_dirs 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "async-stream 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", + "async-stream 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "base64 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "battery 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", "bigdecimal 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "bson 0.14.0 (registry+https://github.com/rust-lang/crates.io-index)", "byte-unit 3.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)", "chrono-humanize 0.0.11 (registry+https://github.com/rust-lang/crates.io-index)", "clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1584,61 +1591,61 @@ dependencies = [ "derive-new 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)", "dirs 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "dunce 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "enum-utils 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-timer 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-timer 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "futures_codec 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", - "getset 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "getset 0.0.9 (registry+https://github.com/rust-lang/crates.io-index)", "git2 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", "glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "heim 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "image 0.22.2 (registry+https://github.com/rust-lang/crates.io-index)", - "indexmap 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", - "language-reporting 0.3.1 (git+https://github.com/wycats/language-reporting)", + "heim 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "hex 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "image 0.22.3 (registry+https://github.com/rust-lang/crates.io-index)", + "indexmap 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "itertools 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", + "language-reporting 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "mime 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "natural 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "neso 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", - "nom 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "nom 5.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "nom-tracable 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", "nom_locate 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "num-bigint 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", - "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", "onig_sys 69.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "pin-utils 0.1.0-alpha.4 (registry+https://github.com/rust-lang/crates.io-index)", - "pretty-hex 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "pretty-hex 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "pretty_assertions 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "pretty_env_logger 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "prettytable-rs 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "ptree 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "rawkey 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "roxmltree 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "roxmltree 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", "rusqlite 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rustyline 5.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "rustyline 5.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "serde-hjson 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde_bytes 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)", "serde_ini 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "serde_urlencoded 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_yaml 0.8.9 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_yaml 0.8.11 (registry+https://github.com/rust-lang/crates.io-index)", "shellexpand 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "starship 0.22.0 (registry+https://github.com/rust-lang/crates.io-index)", - "sublime_fuzzy 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", + "starship 0.26.4 (registry+https://github.com/rust-lang/crates.io-index)", + "sublime_fuzzy 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "subprocess 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", - "surf 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "surf 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "syntect 3.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "term 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", - "toml 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", + "toml 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)", + "trash 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", - "which 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "which 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1646,10 +1653,10 @@ name = "num-bigint" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", - "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1657,8 +1664,8 @@ name = "num-integer" version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1666,9 +1673,9 @@ name = "num-iter" version = "0.1.39" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", - "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1676,9 +1683,9 @@ name = "num-rational" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", - "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1686,28 +1693,29 @@ name = "num-traits" version = "0.1.43" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "num-traits" -version = "0.2.8" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "num_cpus" -version = "1.10.1" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "hermit-abi 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "objc" -version = "0.2.6" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "malloc_buf 0.0.6 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1719,7 +1727,7 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "block 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", - "objc 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", + "objc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", "objc_id 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1728,7 +1736,7 @@ name = "objc_id" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "objc 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", + "objc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1747,12 +1755,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "onig" -version = "4.3.2" +version = "4.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "onig_sys 69.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1761,8 +1769,8 @@ name = "onig_sys" version = "69.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.47 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1772,13 +1780,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "openssl-sys" -version = "0.9.49" +version = "0.9.52" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.47 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1792,7 +1800,7 @@ name = "ordered-float" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1834,12 +1842,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "pkg-config" -version = "0.3.15" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "platforms" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1849,9 +1857,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "humantime 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "line-wrap 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "xml-rs 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1860,7 +1868,7 @@ name = "png" version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "deflate 0.7.20 (registry+https://github.com/rust-lang/crates.io-index)", "inflate 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1868,12 +1876,12 @@ dependencies = [ [[package]] name = "ppv-lite86" -version = "0.2.5" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "pretty-hex" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1882,7 +1890,7 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", - "ctor 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "ctor 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", "difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "output_vt100 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1904,23 +1912,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "csv 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "encode_unicode 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "encode_unicode 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "term 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-width 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "proc-macro2" -version = "0.4.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "proc-macro2" -version = "1.0.1" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1936,9 +1936,9 @@ dependencies = [ "directories 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "isatty 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", "petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "serde-value 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "tint 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1947,68 +1947,33 @@ name = "quick-error" version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "quote" -version = "0.6.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "quote" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "rand" -version = "0.6.5" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "getrandom 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "getrandom 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "rand_chacha" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "rand_chacha" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "c2-chacha 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", + "c2-chacha 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2026,18 +1991,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "rand_core" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "getrandom 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand_hc" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "getrandom 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2045,25 +2002,7 @@ name = "rand_hc" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "rand_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand_isaac" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand_jitter" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2073,36 +2012,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "rand_pcg" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand_xorshift" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "raw-cpuid" version = "7.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.47 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2122,8 +2044,8 @@ name = "rayon" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "crossbeam-deque 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", - "either 1.5.2 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-deque 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", + "either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)", "rayon-core 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2132,11 +2054,11 @@ name = "rayon-core" version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "crossbeam-deque 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-deque 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-queue 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "num_cpus 1.10.1 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 1.11.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2162,7 +2084,7 @@ name = "redox_users" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "rust-argon2 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2170,12 +2092,12 @@ dependencies = [ [[package]] name = "regex" -version = "1.2.1" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "regex-syntax 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)", + "regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", "thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2189,7 +2111,7 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.6.11" +version = "0.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -2203,7 +2125,7 @@ dependencies = [ [[package]] name = "render-tree" version = "0.1.1" -source = "git+https://github.com/wycats/language-reporting#1e2100290fec96f69646e1e61482d80f7a8e7855" +source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "itertools 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2217,10 +2139,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "roxmltree" -version = "0.7.0" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "xmlparser 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", + "xmlparser 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2228,7 +2150,7 @@ name = "rusqlite" version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "fallible-iterator 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "fallible-streaming-iterator 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", "libsqlite3-sys 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2243,7 +2165,7 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", - "blake2b_simd 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)", + "blake2b_simd 0.5.9 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2254,7 +2176,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "rustc-demangle" -version = "0.1.15" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -2272,15 +2194,15 @@ dependencies = [ [[package]] name = "rustyline" -version = "5.0.3" +version = "5.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "dirs 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "nix 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-segmentation 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-segmentation 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-width 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "utf8parse 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2288,12 +2210,12 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.0" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "safemem" -version = "0.3.1" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -2306,10 +2228,10 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.15" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2338,10 +2260,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "serde" -version = "1.0.100" +version = "1.0.102" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2352,7 +2274,7 @@ dependencies = [ "lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde 0.8.23 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2361,10 +2283,10 @@ name = "serde-hjson" version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde 0.8.23 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2374,7 +2296,7 @@ version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "ordered-float 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2382,26 +2304,17 @@ name = "serde_bytes" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "serde_derive" -version = "1.0.98" +version = "1.0.102" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "serde_derive_internals" -version = "0.24.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2410,7 +2323,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "result 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2419,10 +2332,10 @@ name = "serde_json" version = "1.0.41" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "indexmap 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "indexmap 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "ryu 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "ryu 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2440,18 +2353,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "serde_yaml" -version = "0.8.9" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2476,26 +2389,29 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "sluice" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-io-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-channel-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-io-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "smallvec" -version = "0.6.10" +version = "0.6.13" source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", +] [[package]] name = "socket2" version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2505,18 +2421,9 @@ name = "sourcefile" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "stackvector" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", - "unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "starship" -version = "0.22.0" +version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "ansi_term 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2528,15 +2435,16 @@ dependencies = [ "gethostname 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "git2 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", + "nom 5.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "path-slash 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "pretty_env_logger 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "rayon 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "starship_module_config_derive 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "sysinfo 0.9.5 (registry+https://github.com/rust-lang/crates.io-index)", - "toml 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-segmentation 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sysinfo 0.9.6 (registry+https://github.com/rust-lang/crates.io-index)", + "toml 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-segmentation 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2545,14 +2453,14 @@ name = "starship_module_config_derive" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "static_assertions" -version = "0.3.3" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -2562,7 +2470,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "sublime_fuzzy" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -2571,60 +2479,50 @@ version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "crossbeam-utils 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "surf" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "http 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", - "isahc 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", - "js-sys 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "http 0.1.19 (registry+https://github.com/rust-lang/crates.io-index)", + "isahc 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)", + "js-sys 0.3.31 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "mime 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "mime_guess 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "serde_urlencoded 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-futures 0.3.25 (registry+https://github.com/rust-lang/crates.io-index)", - "web-sys 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-futures 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)", + "web-sys 0.3.31 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "syn" -version = "0.15.43" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "syn" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "synstructure" -version = "0.10.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2632,17 +2530,17 @@ name = "syntect" version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bincode 1.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "flate2 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)", + "bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "flate2 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)", "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "onig 4.3.2 (registry+https://github.com/rust-lang/crates.io-index)", + "onig 4.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "plist 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "regex-syntax 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)", + "regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)", "yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2650,12 +2548,12 @@ dependencies = [ [[package]] name = "sysinfo" -version = "0.9.5" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "doc-comment 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "rayon 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2665,9 +2563,9 @@ name = "tempfile" version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2689,7 +2587,7 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2698,7 +2596,7 @@ name = "termcolor" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "wincolor 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "wincolor 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2715,7 +2613,7 @@ name = "thread_local" version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2723,7 +2621,7 @@ name = "threadpool" version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "num_cpus 1.10.1 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 1.11.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2731,7 +2629,7 @@ name = "time" version = "0.1.42" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2750,7 +2648,7 @@ version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2759,28 +2657,36 @@ name = "toml" version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "toml" -version = "0.5.3" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "trash" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "typenum" -version = "1.10.0" +version = "1.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "unicase" -version = "2.4.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "version_check 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2793,15 +2699,15 @@ dependencies = [ [[package]] name = "unicode-normalization" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)", + "smallvec 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "unicode-segmentation" -version = "1.3.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -2809,31 +2715,18 @@ name = "unicode-width" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "unicode-xid" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - [[package]] name = "unicode-xid" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "unreachable" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "uom" version = "0.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", + "typenum 1.11.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2842,8 +2735,8 @@ version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "num-rational 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", + "typenum 1.11.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2870,15 +2763,6 @@ name = "utf8parse" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "uuid" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "vcpkg" version = "0.2.7" @@ -2894,6 +2778,11 @@ name = "version_check" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "version_check" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "void" version = "1.0.2" @@ -2909,93 +2798,100 @@ dependencies = [ "winapi-util 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "wasi" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "wasm-bindgen" -version = "0.2.50" +version = "0.2.54" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-macro 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-macro 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.50" +version = "0.2.54" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bumpalo 2.5.0 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bumpalo 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-shared 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-shared 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "wasm-bindgen-futures" -version = "0.3.25" +version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "js-sys 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-channel-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-util-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", + "js-sys 0.3.31 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", + "web-sys 0.3.31 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.50" +version = "0.2.54" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-macro-support 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-macro-support 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.50" +version = "0.2.54" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-backend 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-shared 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-backend 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-shared 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.50" +version = "0.2.54" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "wasm-bindgen-webidl" -version = "0.2.50" +version = "0.2.54" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "anyhow 1.0.20 (registry+https://github.com/rust-lang/crates.io-index)", "heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-backend 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-backend 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", "weedle 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "web-sys" -version = "0.3.27" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "js-sys 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)", + "anyhow 1.0.20 (registry+https://github.com/rust-lang/crates.io-index)", + "js-sys 0.3.31 (registry+https://github.com/rust-lang/crates.io-index)", "sourcefile 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-webidl 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-webidl 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -3008,11 +2904,11 @@ dependencies = [ [[package]] name = "which" -version = "2.0.1" +version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -3059,7 +2955,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "wincolor" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -3071,8 +2967,8 @@ name = "x11" version = "2.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -3088,7 +2984,7 @@ name = "xcb" version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -3104,7 +3000,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "xmlparser" -version = "0.9.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -3116,36 +3012,39 @@ dependencies = [ ] [metadata] -"checksum adler32 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7e522997b529f05601e05166c07ed17789691f562762c7f3b987263d2dedee5c" +"checksum adler32 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "5d2e7343e7fc9de883d1b0341e0b13970f764c14101234857d2ddafa1cb1cac2" "checksum aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "58fb5e95d83b38284460a5fda7d6470aa0b8844d283a0b614b8535e880800d2d" "checksum ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b" "checksum ansi_term 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" +"checksum anyhow 1.0.20 (registry+https://github.com/rust-lang/crates.io-index)" = "768155103fe6b9bf51090758e0657aa34dde4f6618f32ba1c3e45be3b29a0709" "checksum app_dirs 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e73a24bad9bd6a94d6395382a6c69fe071708ae4409f763c5475e14ee896313d" "checksum arrayref 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "0d382e583f07208808f6b1249e60848879ba3543f57c32277bf52d69c2f0f0ee" -"checksum arrayvec 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)" = "b8d73f9beda665eaa98ab9e4f7442bd4e7de6652587de55b2525e52e29c1b0ba" -"checksum async-stream 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "650be9b667e47506c42ee53034fb1935443cb2447a3a5c0a75e303d2e756fa73" +"checksum arrayvec 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "cd9fd44efafa8690358b7408d253adf110036b88f55672a933f01d616ad9b1b9" +"checksum arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cff77d8686867eceff3105329d4698d96c2391c176d5d03adc90c7389162b5b8" +"checksum async-stream 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "fb6fa015ebe961e9908ca4c1854e7dc7aabd4417da77b6a0466e4dfb4c8f6f69" "checksum async-stream-impl 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4f0d8c5b411e36dcfb04388bacfec54795726b1f0148adcb0f377a96d6747e0e" "checksum atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "1803c647a3ec87095e7ae7acfca019e98de5ec9a7d01343f611cf3152ed71a90" -"checksum autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "22130e92352b948e7e82a49cdb0aa94f2211761117f29e052dd397c1ac33542b" -"checksum backtrace 0.3.34 (registry+https://github.com/rust-lang/crates.io-index)" = "b5164d292487f037ece34ec0de2fcede2faa162f085dd96d2385ab81b12765ba" -"checksum backtrace-sys 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)" = "82a830b4ef2d1124a711c71d263c5abdc710ef8e907bd508c88be475cebc422b" +"checksum autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2" +"checksum backtrace 0.3.40 (registry+https://github.com/rust-lang/crates.io-index)" = "924c76597f0d9ca25d762c25a4d369d51267536465dc5064bdf0eb073ed477ea" +"checksum backtrace-sys 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "5d6575f128516de27e3ce99689419835fce9643a9b215a14d2b5b685be018491" "checksum base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0b25d992356d2eb0ed82172f5248873db5560c4721f564b13cb5193bda5e668e" +"checksum base64 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b41b7ea54a0c9d92199de89e20e58d49f02f8e699814ef3fdf266f6f748d15c7" "checksum battery 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6d6fe5630049e900227cd89afce4c1204b88ec8e61a2581bb96fcce26f047b" "checksum bigdecimal 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "460825c9e21708024d67c07057cd5560e5acdccac85de0de624a81d3de51bacb" -"checksum bincode 1.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "9f04a5e50dc80b3d5d35320889053637d15011aed5e66b66b37ae798c65da6f7" -"checksum bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3d155346769a6855b86399e9bc3814ab343cd3d62c7e985113d46a0ec3c281fd" -"checksum blake2b_simd 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "461f4b879a8eb70c1debf7d0788a9a5ff15f1ea9d25925fea264ef4258bed6b2" +"checksum bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b8ab639324e3ee8774d296864fbc0dbbb256cf1a41c490b94cba90c082915f92" +"checksum bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" +"checksum blake2b_simd 0.5.9 (registry+https://github.com/rust-lang/crates.io-index)" = "b83b7baab1e671718d78204225800d6b170e648188ac7dc992e9d6bddf87d0c0" "checksum block 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0d8c1fef690941d3e7788d328517591fecc684c084084702d6ff1641e993699a" "checksum bson 0.14.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d61895d21e2194d1ce1d434cff69025daac1e49a8b4698eb04b05722dbc08b33" -"checksum bstr 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "e0a692f1c740e7e821ca71a22cf99b9b2322dfa94d10f71443befb1797b3946a" -"checksum bumpalo 2.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2cd43d82f27d68911e6ee11ee791fb248f138f5d69424dc02e098d4f152b0b05" +"checksum bstr 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8d6c2c5b58ab920a4f5aeaaca34b4488074e8cc7596af94e6f8c6ff247c60245" +"checksum bumpalo 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ad807f2fc2bf185eeb98ff3a901bd46dc5ad58163d0fa4577ba0d25674d71708" "checksum byte-unit 3.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6894a79550807490d9f19a138a6da0f8830e70c83e83402dd23f16fd6c479056" "checksum bytecount 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f861d9ce359f56dbcb6e0c2a1cb84e52ad732cadb57b806adeb3c7668caccbd8" "checksum byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a7c3dd8985a7111efc5c80b44e23ecdd8c007de8ade3b96595387e812b957cf5" "checksum bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "206fdffcfa2df7cbe15601ef46c813fce0965eb3286db6b56c583b814b51c81c" -"checksum c2-chacha 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7d64d04786e0f528460fc884753cf8dddcc466be308f6026f8e355c41a0e4101" -"checksum cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)" = "4fc9a35e1f4290eb9e5fc54ba6cf40671ed2a2514c3eeb2b2a908dda2ea5a1be" -"checksum cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "b486ce3ccf7ffd79fdeb678eac06a9e6c09fc88d33836340becb8fffe87c5e33" +"checksum c2-chacha 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "214238caa1bf3a496ec3392968969cab8549f96ff30652c9e56885329315f6bb" +"checksum cc 1.0.47 (registry+https://github.com/rust-lang/crates.io-index)" = "aa87058dce70a3ff5621797f1506cb837edd02ac4c0ae642b4542dce802908b8" +"checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" "checksum chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e8493056968583b0193c1bb04d6f7684586f3726992d6c573261941a895dbd68" "checksum chrono-humanize 0.0.11 (registry+https://github.com/rust-lang/crates.io-index)" = "eb2ff48a655fe8d2dae9a39e66af7fd8ff32a879e8c4e27422c25596a8b5e90d" "checksum clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5067f5bb2d80ef5d68b4c87db81601f0b75bca627bc2ef76b141d7b846a3c6d9" @@ -3153,16 +3052,17 @@ dependencies = [ "checksum clipboard-win 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e3a093d6fed558e5fe24c3dfc85a68bb68f1c824f440d3ba5aca189e2998786b" "checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" "checksum config 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f9107d78ed62b3fa5a86e7d18e647abed48cfd8f8fab6c72f4cdb982d196f7e6" -"checksum constant_time_eq 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8ff012e225ce166d4422e0e78419d901719760f62ae2b7969ca6b564d1b54a9e" +"checksum constant_time_eq 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "995a44c877f9212528ccc74b21a232f66ad69001e40ede5bcee2ac9ef2657120" "checksum core-foundation 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "25b9e03f145fd4f2bf705e07b900cd41fc636598fe5dc452fd0db1441c3f496d" "checksum core-foundation-sys 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e7ca8a5221364ef15ce201e8ed2f609fc312682a8f4e0e3d4aa5879764e0fa3b" "checksum crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ba125de2af0df55319f41944744ad91c71113bf74a4646efff39afe1f6842db1" "checksum crossbeam-channel 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "c8ec7fcd21571dc78f96cc96243cab8d8f035247c3efd16c687be154c3fa9efa" -"checksum crossbeam-deque 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b18cd2e169ad86297e6bc0ad9aa679aee9daa4f19e8163860faf7c164e4f5a71" -"checksum crossbeam-epoch 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "fedcd6772e37f3da2a9af9bf12ebe046c0dfe657992377b4df982a2b54cd37a9" +"checksum crossbeam-deque 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c3aa945d63861bfe624b55d153a39684da1e8c0bc8fba932f7ee3a3c16cea3ca" +"checksum crossbeam-epoch 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5064ebdbf05ce3cb95e45c8b086f72263f4166b29b97f6baff7ef7fe047b55ac" "checksum crossbeam-queue 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7c979cd6cfe72335896575c6b5688da489e420d36a27a0b9eb0c73db574b4a4b" "checksum crossbeam-utils 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "677d453a17e8bd2b913fa38e8b9cf04bcdbb5be790aa294f2389661d72036015" "checksum crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)" = "04973fa96e96579258a5091af6003abde64af786b860f18622b82e026cca60e6" +"checksum crossbeam-utils 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ce446db02cdc3165b94ae73111e570793400d0794e46125cc4056c81cbb039f4" "checksum crossterm 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9abce7d7c50e9823ea0c0dbeb8f16d7e247af06d75b4c6244ea0a0998b3a6f35" "checksum crossterm_cursor 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "fb4bfd085f17d83e6cd2943f0150d3b4331e465de8dba1750d1966192faf63dc" "checksum crossterm_input 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "c6dd255ca05a596bae31ec392fdb67a829509bb767213f00f37c6b62814db663" @@ -3173,10 +3073,10 @@ dependencies = [ "checksum crossterm_winapi 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "b055e7cc627c452e6a9b977022f48a2db6f0ff73df446ca970f95eef9c381d45" "checksum csv 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "37519ccdfd73a75821cac9319d4fce15a81b9fcf75f951df5b9988aa3a0af87d" "checksum csv-core 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "9b5cadb6b25c77aeff80ba701712494213f4a8418fcda2ee11b6560c3ad0bf4c" -"checksum ctor 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "3b4c17619643c1252b5f690084b82639dd7fac141c57c8e77a00e0148132092c" +"checksum ctor 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "cd8ce37ad4184ab2ce004c33bf6379185d3b1c95801cab51026bd271bf68eedc" "checksum ctrlc 3.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c7dfd2d8b4c82121dfdff120f818e09fc4380b0b7e17a742081a89b94853e87f" -"checksum curl 0.4.22 (registry+https://github.com/rust-lang/crates.io-index)" = "f8ed9a22aa8c4e49ac0c896279ef532a43a7df2f54fcd19fa36960de029f965f" -"checksum curl-sys 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)" = "5e90ae10f635645cba9cad1023535f54915a95c58c44751c6ed70dbaeb17a408" +"checksum curl 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)" = "06aa71e9208a54def20792d877bc663d6aae0732b9852e612c4a933177c31283" +"checksum curl-sys 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)" = "f659f3ffac9582d6177bb86d1d2aa649f4eb9d0d4de9d03ccc08b402832ea340" "checksum darwin-libproc 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ade5a88af8d9646bf770687321a9488a0f2b4610aa08b0373016cd1af37f0a31" "checksum darwin-libproc-sys 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c30d1a078d74da1183b02fed8a8b07afc412d3998334b53b750d0ed03b031541" "checksum decimal 2.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e6458723bc760383275fbc02f4c769b2e5f3de782abaf5e7e0b9b7f0368a63ed" @@ -3190,71 +3090,70 @@ dependencies = [ "checksum doc-comment 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "923dea538cea0aa3025e8685b20d6ee21ef99c4f77e954a30febbaac5ec73a97" "checksum dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "ea57b42383d091c85abcc2706240b94ab2a8fa1fc81c10ff23c4de06e2a90b5e" "checksum dunce 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d0ad6bf6a88548d1126045c413548df1453d9be094a8ab9fd59bf1fdd338da4f" -"checksum either 1.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "5527cfe0d098f36e3f8839852688e63c8fff1c90b2b405aef730615f9a7bcf7b" -"checksum encode_unicode 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "90b2c9496c001e8cb61827acdefad780795c42264c137744cae6f7d9e3450abd" -"checksum enum-utils 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f1ae672d9891879fb93e17ab6015c4e3bbe63fbeb23a41b9ac39ffa845b8836" -"checksum enum-utils-from-str 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6b5669381f76d7320e122abdd4a8307f986634f6d067fb69e31179422175801a" +"checksum either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "bb1f6b1ce1c140482ea30ddd3335fc0024ac7ee112895426e0a629a6c20adfe3" +"checksum encode_unicode 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" "checksum env_logger 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "aafcde04e90a5226a6443b7aabdb016ba2f8307c847d524724bd9b346dd1a2d3" -"checksum failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "795bd83d3abeb9220f257e597aa0080a508b27533824adf336529648f6abf7e2" -"checksum failure_derive 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "ea1063915fd7ef4309e222a5a07cf9c319fb9c7836b1f89b85458672dbb127e1" +"checksum failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "f8273f13c977665c5db7eb2b99ae520952fe5ac831ae4cd09d80c4c7042b5ed9" +"checksum failure_derive 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0bc225b78e0391e4b8683440bf2e63c2deeeb2ce5189eab46e2b68c6d3725d08" "checksum fallible-iterator 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" "checksum fallible-streaming-iterator 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a" "checksum fixedbitset 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "86d4de0081402f5e88cdac65c8dcdcc73118c1a7a465e2a05f0da05843a8ea33" -"checksum flate2 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)" = "550934ad4808d5d39365e5d61727309bf18b3b02c6c56b729cb92e7dd84bc3d8" +"checksum flate2 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)" = "6bd6d6f4752952feb71363cffc9ebac9411b75b87c6ab6058c40c8900cf43c0f" "checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3" "checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" -"checksum futures 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)" = "45dc39533a6cae6da2b56da48edae506bb767ec07370f86f70fc062e9d435869" -"checksum futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "f477fd0292c4a4ae77044454e7f2b413207942ad405f759bb0b4698b7ace5b12" -"checksum futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "4a2f26f774b81b3847dcda0c81bd4b6313acfb4f69e5a0390c7cb12c058953e9" -"checksum futures-executor-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "80705612926df8a1bc05f0057e77460e29318801f988bf7d803a734cf54e7528" -"checksum futures-io-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "ee7de0c1c9ed23f9457b0437fec7663ce64d9cc3c906597e714e529377b5ddd1" -"checksum futures-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "efa8f90c4fb2328e381f8adfd4255b4a2b696f77d1c63a3dee6700b564c4e4b5" -"checksum futures-sink-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "e9b65a2481863d1b78e094a07e9c0eed458cc7dc6e72b22b7138b8a67d924859" -"checksum futures-timer 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "878f1d2fc31355fa02ed2372e741b0c17e58373341e6a122569b4623a14a7d33" -"checksum futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "7df53daff1e98cc024bf2720f3ceb0414d96fbb0a94f3cad3a5c3bf3be1d261c" +"checksum futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)" = "1b980f2816d6ee8673b6517b52cb0e808a180efc92e5c19d02cdda79066703ef" +"checksum futures-channel-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)" = "d5e5f4df964fa9c1c2f8bddeb5c3611631cacd93baf810fc8bb2fb4b495c263a" +"checksum futures-core-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)" = "b35b6263fb1ef523c3056565fa67b1d16f0a8604ff12b11b08c25f28a734c60a" +"checksum futures-executor-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)" = "75236e88bd9fe88e5e8bfcd175b665d0528fe03ca4c5207fabc028c8f9d93e98" +"checksum futures-io-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)" = "f4914ae450db1921a56c91bde97a27846287d062087d4a652efc09bb3a01ebda" +"checksum futures-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)" = "3b1dce2a0267ada5c6ff75a8ba864b4e679a9e2aa44262af7a3b5516d530d76e" +"checksum futures-sink-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)" = "86f148ef6b69f75bb610d4f9a2336d4fc88c4b5b67129d1a340dd0fd362efeec" +"checksum futures-timer 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a1de7508b218029b0f01662ed8f61b1c964b3ae99d6f25462d0f55a595109df6" +"checksum futures-util-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)" = "5ce968633c17e5f97936bd2797b6e38fb56cf16a7422319f7ec2e30d3c470e8d" "checksum futures_codec 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "36552cd31353fd135114510d53b8d120758120c36aa636a9341970f9efb1e4a0" "checksum gethostname 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d4ab273ca2a31eb6ca40b15837ccf1aa59a43c5db69ac10c542be342fae2e01d" -"checksum getrandom 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "34f33de6f0ae7c9cb5e574502a562e2b512799e32abb801cd1e79ad952b62b49" -"checksum getset 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "117a5b13aecd4e10161bb3feb22dda898e8552836c2391d8e4645d5e703ab866" +"checksum getrandom 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "e7db7ca94ed4cd01190ceee0d8a8052f08a247aa1b469a7f68c6a3b71afcf407" +"checksum getset 0.0.9 (registry+https://github.com/rust-lang/crates.io-index)" = "5bb3f5b7d8d70c9bd23cf29b2b38094661418fb0ea79f1b0cc2019a11d6f5429" "checksum git2 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "39f27186fbb5ec67ece9a56990292bc5aed3c3fc51b9b07b0b52446b1dfb4a82" "checksum glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" "checksum heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205" -"checksum heim 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "02692a4aa3bed77933da9ae7915aef7fcceb65eff9d9251be189b1acc0b77f65" -"checksum heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "559807533108e09863125eeccb38a7213cef5a7a7deadd3fac2674e1f8d3db70" -"checksum heim-cpu 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "60c237652eaa091b39f996deb41aa7baae67cab5f25204154c14414f46ef69c1" -"checksum heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b3f326db96a03106afcea6839b13f7d95b09cffd063eaa94ef0fd3e796214a66" -"checksum heim-disk 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bd75c64f2d054ce1297ad08f2ca41bf7db7e9ca868221b2fb7427210579e85a1" -"checksum heim-host 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6401c858723568a09e0f09e09bda833e0019c34aa512ccdeba236fce45e4eeb1" -"checksum heim-memory 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "424a549b6c3faecc2492cd3d49f1f89ed9f191c7995741b89e674b85a262e303" -"checksum heim-net 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d0ebbcbabe86dbc1c8713ecc1f54630549f82fa07520083cf9a0edcdd77d329a" -"checksum heim-process 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "564f0d9d123c708688721fb2c2aacc198bd5eec3d995eb8c25d369500c66ca7d" -"checksum heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "df59b2a6e00b7f4532dc00736d74bf721a4587d4dbf90793c524ed0a7eddfa19" -"checksum heim-sensors 0.0.3-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "512afc3c0562aa26ae4e236a4b371901fbf7ddac843c961b2ef201936e79a7cd" -"checksum heim-virt 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "95372a84d2a0a5709899449fbb8ed296a9ce5b9fc0ba4729f0c26f7d5ebdf155" +"checksum heim 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "de848466ae9659d5ab634615bdd0b7d558a41ae524ee4d59c880d12499af5b77" +"checksum heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "63f408c31e695732096a0383df16cd3efee4adb32ba3ad086fb85a7dc8f53100" +"checksum heim-cpu 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "5785004dfdbd68a814d504b27b8ddc16c748a856835dfb6e65b15142090664ef" +"checksum heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "9573bedf4673c1b254bce7f1521559329d2b27995b693b695fa13be2b15c188b" +"checksum heim-disk 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "c84980e62564828ae4ca70a8bfbdb0f139cc89abb6c91b8b4809518346a72366" +"checksum heim-host 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "1de019d5969f6bab766311be378788bd1bb068b59c4f3861c539a420fc258ed3" +"checksum heim-memory 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "a9cdbe6433197da8387dcd0cf1afd9184db4385d55f8a76355b28ceabe99cdc5" +"checksum heim-net 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7b0f5e590eb2f8b23229ff4b06f7e7aee0e229837d3697f362014343682ae073" +"checksum heim-process 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "a64874316339b9c0c7953e7a87d2b32e2400bf6778650ac11b76b05d3c37e121" +"checksum heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "13ef10b5ab5a501e6537b1414db0e3c488425d88bb131bd4e9ff7c0e61e5fbd1" +"checksum heim-sensors 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8ad8b3c9032bca1a76dd43e1eb5c8044e0c505343cb21949dc7acd1bc55b408b" +"checksum heim-virt 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "bb2dda5314da10a8fbcdf130c065abc65f02c3ace72c6f143ad4537520536e2b" +"checksum hermit-abi 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "307c3c9f937f38e3534b1d6447ecf090cafcc9744e4a6360e8b037b2cf5af120" "checksum hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "805026a5d0141ffc30abb3be3173848ad46a1b1664fe632428479619a3644d77" "checksum hex 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "023b39be39e3a2da62a94feb433e91e8bcd37676fbc8bea371daf52b7a769a3e" -"checksum http 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)" = "372bcb56f939e449117fb0869c2e8fd8753a8223d92a172c6e808cf123a5b6e4" -"checksum humantime 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3ca7e5f2e110db35f93b837c81797f3714500b81d517bf20c431b16d3ca4f114" +"checksum http 0.1.19 (registry+https://github.com/rust-lang/crates.io-index)" = "d7e06e336150b178206af098a055e3621e8336027e2b4d126bda0bc64824baaf" +"checksum humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f" "checksum idna 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "02e2673c30ee86b5b96a9cb52ad15718aa1f966f5ab9ad54a8b95d5ca33120a9" -"checksum image 0.22.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ee0665404aa0f2ad154021777b785878b0e5b1c1da030455abc3d9ed257c2c67" -"checksum indexmap 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a61202fbe46c4a951e9404a720a0180bcf3212c750d735cb5c4ba4dc551299f3" +"checksum image 0.22.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b4be8aaefbe7545dc42ae925afb55a0098f226a3fe5ef721872806f44f57826" +"checksum indexmap 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712d7b3ea5827fcb9d4fda14bf4da5f136f0db2ae9c8f4bd4e2d1c6fde4e6db2" "checksum inflate 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "1cdb29978cc5797bd8dcc8e5bf7de604891df2a8dc576973d71a281e916db2ff" -"checksum iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dbe6e417e7d0975db6512b90796e8ce223145ac4e33c377e4a42882a0e88bb08" -"checksum isahc 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e1b971511b5d8de4a51d4da4bc8e374bf60ce841e91b116f46ae06ae2e2a8e9b" +"checksum iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e" +"checksum isahc 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "17b77027f12e53ae59a379f7074259d32eb10867e6183388020e922832d9c3fb" "checksum isatty 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e31a8281fc93ec9693494da65fbf28c0c2aa60a2eaec25dc58e2f31952e95edc" "checksum itertools 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)" = "0d47946d458e94a1b7bcabbf6521ea7c037062c81f534615abcad76e84d4970d" -"checksum itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5b8467d9c1cebe26feb08c640139247fac215782d35371ade9a2136ed6085358" +"checksum itertools 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "87fa75c9dea7b07be3138c49abbb83fd4bea199b5cdc76f9804458edc5da0d6e" "checksum itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "501266b7edd0174f8530248f87f99c88fbe60ca4ef3dd486835b8d8d53136f7f" -"checksum jobserver 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "f74e73053eaf95399bf926e48fc7a2a3ce50bd0eaaa2357d391e95b2dcdd4f10" -"checksum jpeg-decoder 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "c8b7d43206b34b3f94ea9445174bda196e772049b9bddbc620c9d29b2d20110d" -"checksum js-sys 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)" = "1efc4f2a556c58e79c5500912e221dd826bec64ff4aabd8ce71ccef6da02d7d4" +"checksum jobserver 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "f2b1d42ef453b30b7387e113da1c83ab1605d90c5b4e0eb8e96d016ed3b8c160" +"checksum jpeg-decoder 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "c1aae18ffeeae409c6622c3b6a7ee49792a7e5a062eea1b135fbb74e301792ba" +"checksum js-sys 0.3.31 (registry+https://github.com/rust-lang/crates.io-index)" = "d8657b7ca06a6044ece477f6900bf7670f8b5fd0cce177a1d7094eef51e0adf4" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" -"checksum language-reporting 0.3.1 (git+https://github.com/wycats/language-reporting)" = "" +"checksum language-reporting 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4e6a84e1e6cccd818617d299427ad1519f127af2738b1d3a581835ef56ae298b" "checksum lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "76f033c7ad61445c5b347c7382dd1237847eb1bce590fe50365dcb33d546be73" -"checksum lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bc5729f27f159ddd61f4df6228e827e86643d4d3e7c32183cb30a1c08f604a14" +"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" "checksum lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f" -"checksum lexical-core 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b8b0f90c979adde96d19eb10eb6431ba0c441e2f9e9bdff868b2f6f5114ff519" -"checksum libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)" = "d44e80633f007889c7eff624b709ab43c92d708caad982295768a7b13ca3b5eb" +"checksum lexical-core 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2304bccb228c4b020f3a4835d247df0a02a7c4686098d4167762cfbbe4c5cb14" +"checksum libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)" = "1a31a0627fdf1f6a39ec0dd577e101440b7db22672c0901fe00a9a6fbb5c24e8" "checksum libgit2-sys 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a30f8637eb59616ee3b8a00f6adff781ee4ddd8343a615b8238de756060cc1b3" "checksum libnghttp2-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "02254d44f4435dd79e695f2c2b83cd06a47919adea30216ceaf0c57ca0a72463" "checksum libsqlite3-sys 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5e5b95e89c330291768dc840238db7f9e204fd208511ab6319b56193a7f2ae25" @@ -3264,26 +3163,27 @@ dependencies = [ "checksum linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ae91b68aebc4ddb91978b11a1b02ddd8602a05ec19002801c5666000e05e0f83" "checksum log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "14b6052be84e6b71ab17edffc2eeabf5c2c3ae1fdb464aae35ac50c67a44e1f7" "checksum lru-cache 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "31e24f1ad8321ca0e8a1e0ac13f23cb668e6f5466c2c57319f6a5cf1cc8e3b1c" -"checksum macaddr 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3ff4752cb15cffb3e68f7dcb22e0818ac871f8c98fb07a634a81f41fb202a09f" +"checksum macaddr 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bee538cb1031f87f970ba28f0e5ebfcdaf63ed1a000a4176b4117537c33d19fb" "checksum mach 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "86dd2487cdfea56def77b88438a2c915fb45113c5319bfe7e14306ca4cd0b0e1" "checksum mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b823e83b2affd8f40a9ee8c29dbc56404c1e34cd2710921f2801e2cf29527afa" "checksum malloc_buf 0.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "62bb907fe88d54d8d9ce32a3cceab4218ed2f6b7d35617cafe9adf84e43919cb" "checksum matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08" +"checksum maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" "checksum md5 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e6bcd6433cff03a4bfc3d9834d504467db1f1cf6d0ea765d37d330249ed629d" "checksum memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "88579771288728879b57485cc7d6b07d648c9f0141eb955f8ab7f9d45394468e" -"checksum memoffset 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ce6075db033bbbb7ee5a0bbd3a3186bbae616f57fb001c485c7ff77955f8177f" +"checksum memoffset 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "75189eb85871ea5c2e2c15abbdd541185f63b408415e5051f5cac122d8c774b9" "checksum mime 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)" = "dd1d63acd1b78403cc0c325605908475dd9b9a3acbf65ed8bcab97e27014afcf" "checksum mime_guess 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1a0ed03949aef72dbdf3116a383d7b38b4768e6f960528cd6a6044aa9ed68599" -"checksum miniz-sys 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "1e9e3ae51cea1576ceba0dde3d484d30e6e5b86dee0b2d412fe3a16a15c98202" -"checksum miniz_oxide 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "fe2959c5a0747a8d7a56b4444c252ffd2dda5d452cfd147cdfdda73b1c3ece5b" -"checksum miniz_oxide_c_api 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6c675792957b0d19933816c4e1d56663c341dd9bfa31cb2140ff2267c1d8ecf4" +"checksum miniz_oxide 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6f3f74f726ae935c3f514300cc6773a0c9492abc5e972d42ba0c0ebb88757625" "checksum natural 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fd659d7d6b4554da2c0e7a486d5952b24dfce0e0bac88ab53b270f4efe1010a6" "checksum neso 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6b3c31defbcb081163db18437fd88c2a267cb3e26f7bd5e4b186e4b1b38fe8c8" "checksum nix 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6c722bee1037d430d0f8e687bbdbf222f27cc6e4e68d5caf630857bb2b6dbdce" "checksum nix 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3b2e0b4f3320ed72aaedb9a5ac838690a8047c7b275da22711fddff4f8a14229" -"checksum nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9667ddcc6cc8a43afc9b7917599d7216aa09c463919ea32c59ed6cac8bc945" +"checksum nodrop 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" "checksum nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2ad2a91a8e869eeb30b9cb3119ae87773a8f4ae617f41b1eb9c154b2905f7bd6" -"checksum nom 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e9761d859320e381010a4f7f8ed425f2c924de33ad121ace447367c713ad561b" +"checksum nom 5.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c618b63422da4401283884e6668d39f819a106ef51f5f59b81add00075da35ca" +"checksum nom-tracable 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4e9af1ee3bf4c9b842a720c53c0e7abb1b56a207e0b9bdbe7ff684b4cf630da1" +"checksum nom-tracable-macros 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8c16e5f9f228073fd36e4c9e65b12d763d9a1bda73b8400f3aa67d7971c8dffb" "checksum nom_locate 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f932834fd8e391fc7710e2ba17e8f9f8645d846b55aa63207e17e110a1e1ce35" "checksum ntapi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f26e041cd983acbc087e30fcba770380cfa352d0e392e175b2344ebaf7ea0602" "checksum num-bigint 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f9c3f34cdd24f334cb265d9bf8bfa8a241920d026916785747a92f0e55541a1a" @@ -3291,17 +3191,17 @@ dependencies = [ "checksum num-iter 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "76bd5272412d173d6bf9afdf98db8612bbabc9a7a830b7bfc9c188911716132e" "checksum num-rational 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f2885278d5fe2adc2f75ced642d52d879bffaceb5a2e0b1d4309ffdfb239b454" "checksum num-traits 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)" = "92e5113e9fd4cc14ded8e499429f396a20f98c772a47cc8622a736e1ec843c31" -"checksum num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "6ba9a427cfca2be13aa6f6403b0b7e7368fe982bfa16fccc450ce74c46cd9b32" -"checksum num_cpus 1.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bcef43580c035376c0705c42792c294b66974abbfd2789b511784023f71f3273" -"checksum objc 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "31d20fd2b37e07cf5125be68357b588672e8cefe9a96f8c17a9d46053b3e590d" +"checksum num-traits 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "443c53b3c3531dfcbfa499d8893944db78474ad7a1d87fa2d94d1a2231693ac6" +"checksum num_cpus 1.11.1 (registry+https://github.com/rust-lang/crates.io-index)" = "76dac5ed2a876980778b8b85f75a71b6cbf0db0b1232ee12f826bccb00d09d72" +"checksum objc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "915b1b472bc21c53464d6c8461c9d3af805ba1ef837e1cac254428f4a77177b1" "checksum objc-foundation 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1add1b659e36c9607c7aab864a76c7a4c2760cd0cd2e120f3fb8b952c7e22bf9" "checksum objc_id 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c92d4ddb4bd7b50d730c215ff871754d0da6b2178849f8a2a2ab69712d0c073b" "checksum ole32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5d2c49021782e5233cd243168edfa8037574afed4eba4bbaf538b3d8d1789d8c" "checksum once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "891f486f630e5c5a4916c7e16c4b24a53e78c860b646e9f8e005e4f16847bfed" -"checksum onig 4.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a646989adad8a19f49be2090374712931c3a59835cb5277b4530f48b417f26e7" +"checksum onig 4.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8518fcb2b1b8c2f45f0ad499df4fda6087fc3475ca69a185c173b8315d2fb383" "checksum onig_sys 69.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388410bf5fa341f10e58e6db3975f4bea1ac30247dd79d37a9e5ced3cb4cc3b0" "checksum openssl-probe 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "77af24da69f9d9341038eba93a073b1fdaaa1b788221b00a69bce9e762cb32de" -"checksum openssl-sys 0.9.49 (registry+https://github.com/rust-lang/crates.io-index)" = "f4fad9e54bd23bd4cbbe48fdc08a1b8091707ac869ef8508edea2fec77dcc884" +"checksum openssl-sys 0.9.52 (registry+https://github.com/rust-lang/crates.io-index)" = "c977d08e1312e2f7e4b86f9ebaa0ed3b19d1daff75fae88bbb88108afbd801fc" "checksum ord_subset 3.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d7ce14664caf5b27f5656ff727defd68ae1eb75ef3c4d95259361df1eb376bef" "checksum ordered-float 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "18869315e81473c951eb56ad5558bbc56978562d3ecfb87abb7a1e944cea4518" "checksum ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063" @@ -3310,35 +3210,26 @@ dependencies = [ "checksum percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" "checksum petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)" = "9c3659d1ee90221741f65dd128d9998311b0e40c5d3c23a62445938214abce4f" "checksum pin-utils 0.1.0-alpha.4 (registry+https://github.com/rust-lang/crates.io-index)" = "5894c618ce612a3fa23881b152b608bafb8c56cfc22f434a3ba3120b40f7b587" -"checksum pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "a7c1d2cfa5a714db3b5f24f0915e74fcdf91d09d496ba61329705dda7774d2af" -"checksum platforms 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6cfec0daac55b13af394ceaaad095d17c790f77bdc9329264f06e49d6cd3206c" +"checksum pkg-config 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)" = "05da548ad6865900e60eaba7f589cc0783590a92e940c26953ff81ddbab2d677" +"checksum platforms 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "feb3b2b1033b8a60b4da6ee470325f887758c95d5320f52f9ce0df055a55940e" "checksum plist 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "5f2a9f075f6394100e7c105ed1af73fb1859d6fd14e49d4290d578120beb167f" "checksum png 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8422b27bb2c013dd97b9aef69e161ce262236f49aaf46a0489011c8ff0264602" -"checksum ppv-lite86 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e3cbf9f658cdb5000fcf6f362b8ea2ba154b9f146a61c7a20d647034c6b6561b" -"checksum pretty-hex 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "119929a2a3b731bb3d888f7a1b5dc3c1db28b6c134def5d99f7e16e2da16b8f7" +"checksum ppv-lite86 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "74490b50b9fbe561ac330df47c08f3f33073d2d00c150f719147d7c54522fa1b" +"checksum pretty-hex 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "be91bcc43e73799dc46a6c194a55e7aae1d86cc867c860fd4a436019af21bd8c" "checksum pretty_assertions 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3f81e1644e1b54f5a68959a29aa86cde704219254669da328ecfdf6a1f09d427" "checksum pretty_env_logger 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "717ee476b1690853d222af4634056d830b5197ffd747726a9a1eee6da9f49074" "checksum prettytable-rs 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0fd04b170004fa2daccf418a7f8253aaf033c27760b5f225889024cf66d7ac2e" -"checksum proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)" = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759" -"checksum proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4c5c2380ae88876faae57698be9e9775e3544decad214599c3a6266cca6ac802" +"checksum proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "9c9e470a8dc4aeae2dee2f335e8f533e2d4b347e1434e5671afc49b054592f27" "checksum ptree 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6b0a3be00b19ee7bd33238c1c523a7ab4df697345f6b36f90827a7860ea938d4" "checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0" -"checksum quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1" "checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe" -"checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" -"checksum rand 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d47eab0e83d9693d40f825f86948aa16eff6750ead4bdffc4ab95b8b3a7f052c" -"checksum rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" +"checksum rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3ae1b169243eaf61759b8475a998f0a385e42042370f3a7dbaf35246eacc8412" "checksum rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "03a2a90da8c7523f554344f921aa97283eadf6ac484a6d2a7d0212fa7f8d6853" "checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" "checksum rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" -"checksum rand_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "615e683324e75af5d43d8f7a39ffe3ee4a9dc42c5c701167a71dc59c3a493aca" -"checksum rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" +"checksum rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" "checksum rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" -"checksum rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" -"checksum rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b" "checksum rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" -"checksum rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44" -"checksum rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c" "checksum raw-cpuid 7.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b4a349ca83373cfa5d6dbb66fd76e58b2cca08da71a5f6400de0a0a6a9bceeaf" "checksum rawkey 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "33ec17a493dcb820725c002bc253f6f3ba4e4dc635e72c238540691b05e43897" "checksum rayon 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "83a27732a533a1be0a0035a111fe76db89ad312f6f0347004c220c57f209a123" @@ -3347,60 +3238,57 @@ dependencies = [ "checksum readkey 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d98db94bb4f3e926c8d8186547cd9366d958d753aff5801214d93d38214e8f0f" "checksum redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)" = "2439c63f3f6139d1b57529d16bc3b8bb855230c8efcc5d3a896c8bea7c3b1e84" "checksum redox_users 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4ecedbca3bf205f8d8f5c2b44d83cd0690e39ee84b951ed649e9f1841132b66d" -"checksum regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "88c3d9193984285d544df4a30c23a4e62ead42edf70a4452ceb76dac1ce05c26" +"checksum regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "dc220bd33bdce8f093101afe22a037b8eb0e5af33592e6a9caafff0d4cb81cbd" "checksum regex-automata 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "92b73c2a1770c255c240eaa4ee600df1704a38dc3feaa6e949e7fcd4f8dc09f9" -"checksum regex-syntax 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)" = "b143cceb2ca5e56d5671988ef8b15615733e7ee16cd348e064333b251b89343f" +"checksum regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "11a7e20d1cce64ef2fed88b66d347f88bd9babb82845b2b858f3edbf59a4f716" "checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e" -"checksum render-tree 0.1.1 (git+https://github.com/wycats/language-reporting)" = "" +"checksum render-tree 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "68ed587df09cfb7ce1bc6fe8f77e24db219f222c049326ccbfb948ec67e31664" "checksum result 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "194d8e591e405d1eecf28819740abed6d719d1a2db87fc0bcdedee9a26d55560" -"checksum roxmltree 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "153c367ce9fb8ef7afe637ef92bd083ba0f88b03ef3fcf0287d40be05ae0a61c" +"checksum roxmltree 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0852407257c1b696a0c66b9db3ffe7769c2744a2fa725c8050e6f3e5a823c02b" "checksum rusqlite 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2a194373ef527035645a1bc21b10dc2125f73497e6e155771233eb187aedd051" "checksum rust-argon2 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4ca4eaef519b494d1f2848fc602d18816fed808a981aedf4f1f00ceb7c9d32cf" "checksum rust-ini 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3e52c148ef37f8c375d49d5a73aa70713125b7f19095948a923f80afdeb22ec2" -"checksum rustc-demangle 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "a7f4dccf6f4891ebcc0c39f9b6eb1a83b9bf5d747cb439ec6fba4f3b977038af" +"checksum rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783" "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" "checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" -"checksum rustyline 5.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4795e277e6e57dec9df62b515cd4991371daa80e8dc8d80d596e58722b89c417" -"checksum ryu 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c92464b447c0ee8c4fb3824ecc8383b81717b9f1e74ba2e72540aef7b9f82997" -"checksum safemem 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e133ccc4f4d1cd4f89cc8a7ff618287d56dc7f638b8e38fc32c5fdcadc339dd5" +"checksum rustyline 5.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e9d8eb9912bc492db051324d36f5cea56984fc2afeaa5c6fa84e0b0e3cde550f" +"checksum ryu 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bfa8506c1de11c9c4e4c38863ccbe02a305c8188e85a05a784c9e11e1c3910c8" +"checksum safemem 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072" "checksum same-file 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "585e8ddcedc187886a30fa705c47985c3fa88d06624095856b36ca0b82ff4421" -"checksum schannel 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "f2f6abf258d99c3c1c5c2131d99d064e94b7b3dd5f416483057f308fea253339" +"checksum schannel 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "87f550b06b6cba9c8b8be3ee73f391990116bf527450d2556e9b9ce263b9a021" "checksum scopeguard 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b42e15e59b18a828bbf5c58ea01debb36b9b096346de35d941dcb89009f24a0d" "checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" "checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" "checksum serde 0.8.23 (registry+https://github.com/rust-lang/crates.io-index)" = "9dad3f759919b92c3068c696c15c3d17238234498bbdcc80f2c469606f948ac8" -"checksum serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)" = "f4473e8506b213730ff2061073b48fa51dcc66349219e2e7c5608f0296a1d95a" +"checksum serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)" = "0c4b39bd9b0b087684013a792c59e3e07a46a01d2322518d8a1104641a0b1be0" "checksum serde-hjson 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0b833c5ad67d52ced5f5938b2980f32a9c1c5ef047f0b4fb3127e7a423c76153" "checksum serde-hjson 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6a3a4e0ea8a88553209f6cc6cfe8724ecad22e1acf372793c27d995290fe74f8" "checksum serde-value 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7a663f873dedc4eac1a559d4c6bc0d0b2c34dc5ac4702e105014b8281489e44f" "checksum serde_bytes 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)" = "45af0182ff64abaeea290235eb67da3825a576c5d53e642c4d5b652e12e6effc" -"checksum serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)" = "01e69e1b8a631f245467ee275b8c757b818653c6d704cdbcaeb56b56767b529c" -"checksum serde_derive_internals 0.24.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8a80c6c0b1ebbcea4ec2c7e9e2e9fa197a425d17f1afec8ba79fcd1352b18ffb" +"checksum serde_derive 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)" = "ca13fc1a832f793322228923fbb3aba9f3f44444898f835d31ad1b74fa0a2bf8" "checksum serde_ini 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "eb236687e2bb073a7521c021949be944641e671b8505a94069ca37b656c81139" "checksum serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)" = "2f72eb2a68a7dc3f9a691bfda9305a1c017a6215e5a4545c258500d2099a37c2" "checksum serde_test 0.8.23 (registry+https://github.com/rust-lang/crates.io-index)" = "110b3dbdf8607ec493c22d5d947753282f3bae73c0f56d322af1e8c78e4c23d5" "checksum serde_urlencoded 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9ec5d77e2d4c73717816afac02670d5c4f534ea95ed430442cad02e7a6e32c97" -"checksum serde_yaml 0.8.9 (registry+https://github.com/rust-lang/crates.io-index)" = "38b08a9a90e5260fe01c6480ec7c811606df6d3a660415808c3c3fa8ed95b582" +"checksum serde_yaml 0.8.11 (registry+https://github.com/rust-lang/crates.io-index)" = "691b17f19fc1ec9d94ec0b5864859290dff279dbd7b03f017afda54eb36c3c35" "checksum shell32-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9ee04b46101f57121c9da2b151988283b6beb79b34f5bb29a58ee48cb695122c" "checksum shellexpand 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de7a5b5a9142fd278a10e0209b021a1b85849352e6951f4f914735c976737564" "checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8" -"checksum sluice 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ec70d7c3b17c262d4a18f7291c6ce62bf47170915f3b795434d3c5c49a4e59b7" -"checksum smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)" = "ab606a9c5e214920bb66c458cd7be8ef094f813f20fe77a54cc7dbfff220d4b7" +"checksum sluice 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0a7d06dfb3e8743bc19e6de8a302277471d08077d68946b307280496dc5a3531" +"checksum smallvec 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "f7b0758c52e15a8b5e3691eae6cc559f08eee9406e548a4477ba4e67770a82b6" "checksum socket2 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)" = "e8b74de517221a2cb01a53349cf54182acdc31a074727d3079068448c0676d85" "checksum sourcefile 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "4bf77cb82ba8453b42b6ae1d692e4cdc92f9a47beaf89a847c8be83f4e328ad3" -"checksum stackvector 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "1c4725650978235083241fab0fdc8e694c3de37821524e7534a1a9061d1068af" -"checksum starship 0.22.0 (registry+https://github.com/rust-lang/crates.io-index)" = "43070194dc4cd97f37367c50ca51420c3b86303d4b259a9a73db71ee1658028d" +"checksum starship 0.26.4 (registry+https://github.com/rust-lang/crates.io-index)" = "d31191331ef70afd5c8f88515850ce50ae4a7ad5a9d7d1046eba6ceb8e9707d8" "checksum starship_module_config_derive 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "36d147e2f158842551535289789d4f3ef5a37d4043f6dc96f3a461bb253e69a1" -"checksum static_assertions 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b4f8de36da215253eb5f24020bfaa0646613b48bf7ebe36cdfa37c3b3b33b241" +"checksum static_assertions 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "7f3eb36b47e512f8f1c9e3d10c2c1965bc992bd9cdb024fa581e2194501c83d3" "checksum strsim 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" -"checksum sublime_fuzzy 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97bd7ad698ea493a3a7f60c2ffa117c234f341e09f8cc2d39cef10cdde077acf" +"checksum sublime_fuzzy 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bdac3d983d073c19487ba1f5e16eda43e9c6e50aa895d87110d0febe389b66b9" "checksum subprocess 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)" = "28fc0f40f0c0da73339d347aa7d6d2b90341a95683a47722bc4eebed71ff3c00" -"checksum surf 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "018eed64aede455beb88505d50c5c64882bebbe0996d4b660c272e3d8bb6f883" -"checksum syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)" = "ee06ea4b620ab59a2267c6b48be16244a3389f8bfa0986bdd15c35b890b00af3" -"checksum syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "c65d951ab12d976b61a41cf9ed4531fc19735c6e6d84a4bb1453711e762ec731" -"checksum synstructure 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)" = "02353edf96d6e4dc81aea2d8490a7e9db177bf8acb0e951c24940bf866cb313f" +"checksum surf 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "741a8008f8a833ef16f47df94a30754478fb2c2bf822b9c2e6f7f09203b97ace" +"checksum syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "661641ea2aa15845cddeb97dad000d22070bb5c1fb456b96c1cba883ec691e92" +"checksum synstructure 0.12.3 (registry+https://github.com/rust-lang/crates.io-index)" = "67656ea1dc1b41b1451851562ea232ec2e5a80242139f7e679ceccfb5d61f545" "checksum syntect 3.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e80b8831c5a543192ffc3727f01cf0e57579c6ac15558e3048bfb5708892167b" -"checksum sysinfo 0.9.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d5bd3b813d94552a8033c650691645f8dd5a63d614dddd62428a95d3931ef7b6" +"checksum sysinfo 0.9.6 (registry+https://github.com/rust-lang/crates.io-index)" = "6f4b2468c629cffba39c0a4425849ab3cdb03d9dfacba69684609aea04d08ff9" "checksum tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9" "checksum term 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "edd106a334b7657c10b7c540a0106114feadeb4dc314513e97df481d5d966f42" "checksum term_size 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9e5b9a66db815dcfd2da92db471106457082577c3c278d4138ab3e3b4e189327" @@ -3412,37 +3300,37 @@ dependencies = [ "checksum tint 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7af24570664a3074673dbbf69a65bdae0ae0b72f2949b1adfbacb736ee4d6896" "checksum tokio-io 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "5090db468dad16e1a7a54c8c67280c5e4b544f3d3e018f0b913b400261f85926" "checksum toml 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "758664fc71a3a69038656bee8b6be6477d2a6c315a6b81f7081f591bffa4111f" -"checksum toml 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c7aabe75941d914b72bf3e5d3932ed92ce0664d49d8432305a8b547c37227724" -"checksum typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "612d636f949607bdf9b123b4a6f6d966dedf3ff669f7f045890d3a4a73948169" -"checksum unicase 2.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a84e5511b2a947f3ae965dcb29b13b7b1691b6e7332cf5dbc1744138d5acb7f6" +"checksum toml 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)" = "01d1404644c8b12b16bfcffa4322403a91a451584daaaa7c28d3152e6cbc98cf" +"checksum trash 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f2f24d31505f49e989b1ee2c03c323251f6763d5907d471b71192dac92e323f8" +"checksum typenum 1.11.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6d2783fe2d6b8c1101136184eb41be8b1ad379e4657050b8aaff0c79ee7575f9" +"checksum unicase 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" "checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" -"checksum unicode-normalization 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "141339a08b982d942be2ca06ff8b076563cbe223d1befd5450716790d44e2426" -"checksum unicode-segmentation 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1967f4cdfc355b37fd76d2a954fb2ed3871034eb4f26d60537d88795cfc332a9" +"checksum unicode-normalization 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "09c8070a9942f5e7cfccd93f490fdebd230ee3c3c9f107cb25bad5351ef671cf" +"checksum unicode-segmentation 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e83e153d1053cbb5a118eeff7fd5be06ed99153f00dbcd8ae310c5fb2b22edc0" "checksum unicode-width 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "7007dbd421b92cc6e28410fe7362e2e0a2503394908f417b68ec8d1c364c4e20" -"checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" "checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c" -"checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56" "checksum uom 0.23.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3ef5bbe8385736e498dbb0033361f764ab43a435192513861447b9f7714b3fec" "checksum uom 0.25.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3198c29f199fa8a23d732f4aa21ddc4f4d0a257cb0c2a44afea30145ce2575c1" "checksum url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "75b414f6c464c879d7f9babf951f23bc3743fb7313c081b2e6ca719067ea9d61" "checksum user32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4ef4711d107b21b410a3a974b1204d9accc8b10dad75d8324b5d755de1617d47" "checksum utf8parse 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8772a4ccbb4e89959023bc5b7cb8623a795caa7092d99f3aa9501b9484d4557d" -"checksum uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)" = "90dbc611eb48397705a6b0f6e917da23ae517e4d127123d2cf7674206627d32a" "checksum vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "33dd455d0f96e90a75803cfeb7f948768c08d70a6de9a8d2362461935698bf95" "checksum vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a" "checksum version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd" +"checksum version_check 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "078775d0255232fb988e6fccf26ddc9d1ac274299aaedcedce21c6f72cc533ce" "checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" "checksum walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "9658c94fa8b940eab2250bd5a457f9c48b748420d71293b165c8cdbe2f55f71e" -"checksum wasm-bindgen 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "dcddca308b16cd93c2b67b126c688e5467e4ef2e28200dc7dfe4ae284f2faefc" -"checksum wasm-bindgen-backend 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "f805d9328b5fc7e5c6399960fd1889271b9b58ae17bdb2417472156cc9fafdd0" -"checksum wasm-bindgen-futures 0.3.25 (registry+https://github.com/rust-lang/crates.io-index)" = "73c25810ee684c909488c214f55abcbc560beb62146d352b9588519e73c2fed9" -"checksum wasm-bindgen-macro 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "3ff88201a482abfc63921621f6cb18eb1efd74f136b05e5841e7f8ca434539e9" -"checksum wasm-bindgen-macro-support 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "6a433d89ecdb9f77d46fcf00c8cf9f3467b7de9954d8710c175f61e2e245bb0e" -"checksum wasm-bindgen-shared 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "d41fc1bc3570cdf8d108c15e014045fd45a95bb5eb36605f96a90461fc34027d" -"checksum wasm-bindgen-webidl 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "be53d289bf2fa7645a089cfd5c7a34bf4fe94221f58cf86ee42a7b4bc854ff14" -"checksum web-sys 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)" = "6435c477200ad486089a7a72c2bd6c9bdf9740bd7fff868806076218076d8c51" +"checksum wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b89c3ce4ce14bdc6fb6beaf9ec7928ca331de5df7e5ea278375642a2f478570d" +"checksum wasm-bindgen 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)" = "c4568ae1b4e07ca907b1a4de41174eaa3e5be4066c024475586b7842725f69a9" +"checksum wasm-bindgen-backend 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)" = "5a00cfdce37367770062065fd3abb9278cbae86a0d918cacd0978a7acd51b481" +"checksum wasm-bindgen-futures 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)" = "83420b37346c311b9ed822af41ec2e82839bfe99867ec6c54e2da43b7538771c" +"checksum wasm-bindgen-macro 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)" = "7c568f4d3cf6d7c1d72b165daf778fb0d6e09a24f96ac14fc8c4f66a96e86b72" +"checksum wasm-bindgen-macro-support 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)" = "430d12539ae324d16097b399e9d07a6d5ce0173b2a61a2d02346ca7c198daffe" +"checksum wasm-bindgen-shared 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)" = "8ae7167f0bbffd7fac2b12da0fa1f834c1d84671a1ae3c93ac8bde2e97179c39" +"checksum wasm-bindgen-webidl 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)" = "3021567c515a746a64ad0b269d120d46e687c0c95702a4750623db935ae6b5e7" +"checksum web-sys 0.3.31 (registry+https://github.com/rust-lang/crates.io-index)" = "ce8e893e021539beb87de8f06e77bdb390a3ab0db4cfeb569c4e377b55ed20de" "checksum weedle 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3bb43f70885151e629e2a19ce9e50bd730fd436cfd4b666894c9ce4de9141164" -"checksum which 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b57acb10231b9493c8472b20cb57317d0679a49e0bdbee44b3b803a6473af164" +"checksum which 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5475d47078209a02e60614f7ba5e645ef3ed60f771920ac1906d7c1cc65024c8" "checksum widestring 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "effc0e4ff8085673ea7b9b2e3c73f6bd4d118810c9009ed8f1e16bd96c331db6" "checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" "checksum winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8093091eeb260906a183e6ae1abdba2ef5ef2257a21801128899c3fc699229c6" @@ -3450,11 +3338,11 @@ dependencies = [ "checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" "checksum winapi-util 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7168bab6e1daee33b4557efd0e95d5ca70a03706d39fa5f3fe7a236f584b03c9" "checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" -"checksum wincolor 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "561ed901ae465d6185fa7864d63fbd5720d0ef718366c9a4dc83cf6170d7e9ba" +"checksum wincolor 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "96f5016b18804d24db43cebf3c77269e7569b8954a8464501c216cc5e070eaa9" "checksum x11 2.18.1 (registry+https://github.com/rust-lang/crates.io-index)" = "39697e3123f715483d311b5826e254b6f3cfebdd83cf7ef3358f579c3d68e235" "checksum x11-clipboard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "89bd49c06c9eb5d98e6ba6536cf64ac9f7ee3a009b2f53996d405b3944f6bcea" "checksum xcb 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "5e917a3f24142e9ff8be2414e36c649d47d6cc2ba81f16201cdef96e533e02de" "checksum xdg 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d089681aa106a86fade1b0128fb5daf07d5867a509ab036d99988dec80429a57" "checksum xml-rs 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "541b12c998c5b56aa2b4e6f18f03664eef9a4fd0a246a55594efae6cc2d964b5" -"checksum xmlparser 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ecec95f00fb0ff019153e64ea520f87d1409769db3e8f4db3ea588638a3e1cee" +"checksum xmlparser 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8110496c5bcc0d966b0b2da38d5a791aa139eeb0b80e7840a7463c2b806921eb" "checksum yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "65923dd1784f44da1d2c3dbbc5e822045628c590ba72123e1c73d3c230c4434d" diff --git a/Cargo.toml b/Cargo.toml index 1f948d4e5d..57110f815c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "nu" -version = "0.3.0" +version = "0.5.1" authors = ["Yehuda Katz ", "Jonathan Turner ", "Andrés N. Robalino "] description = "A shell for the GitHub era" license = "MIT" @@ -14,80 +14,80 @@ documentation = "https://book.nushell.sh" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -rustyline = "5.0.3" +rustyline = "5.0.4" chrono = { version = "0.4.9", features = ["serde"] } derive-new = "0.5.8" prettytable-rs = "0.8.0" -itertools = "0.8.0" +itertools = "0.8.1" ansi_term = "0.12.1" -nom = "5.0.0" +nom = "5.0.1" dunce = "1.0.0" -indexmap = { version = "1.2.0", features = ["serde-1"] } +indexmap = { version = "1.3.0", features = ["serde-1"] } chrono-humanize = "0.0.11" -byte-unit = "3.0.1" -base64 = "0.10.1" -futures-preview = { version = "=0.3.0-alpha.18", features = ["compat", "io-compat"] } -async-stream = "0.1.1" +byte-unit = "3.0.3" +base64 = "0.11" +futures-preview = { version = "=0.3.0-alpha.19", features = ["compat", "io-compat"] } +async-stream = "0.1.2" futures_codec = "0.2.5" num-traits = "0.2.8" term = "0.5.2" bytes = "0.4.12" log = "0.4.8" pretty_env_logger = "0.3.1" -serde = { version = "1.0.100", features = ["derive"] } +serde = { version = "1.0.102", features = ["derive"] } bson = { version = "0.14.0", features = ["decimal128"] } -serde_json = "1.0.40" +serde_json = "1.0.41" serde-hjson = "0.9.1" serde_yaml = "0.8" serde_bytes = "0.11.2" -getset = "0.0.8" -#language-reporting = "0.3.1" -language-reporting = { git = "https://github.com/wycats/language-reporting" } +getset = "0.0.9" +language-reporting = "0.4.0" app_dirs = "1.2.1" csv = "1.1" -toml = "0.5.3" +toml = "0.5.5" clap = "2.33.0" git2 = { version = "0.10.1", default_features = false } dirs = "2.0.2" glob = "0.3.0" ctrlc = "3.1.3" -surf = "1.0.2" +surf = "1.0.3" url = "2.1.0" -roxmltree = "0.7.0" +roxmltree = "0.7.2" nom_locate = "1.0.0" -enum-utils = "0.1.1" +nom-tracable = "0.4.1" unicode-xid = "0.2.0" serde_ini = "0.2.0" subprocess = "0.1.18" mime = "0.3.14" -pretty-hex = "0.1.0" -hex = "0.3.2" +pretty-hex = "0.1.1" +hex = "0.4" tempfile = "3.1.0" semver = "0.9.0" -which = "2.0.1" -uuid = {version = "0.7.4", features = [ "v4", "serde" ]} +which = "3.1" textwrap = {version = "0.11.0", features = ["term_size"]} shellexpand = "1.0.0" -futures-timer = "0.4.0" +futures-timer = "2.0.0" pin-utils = "0.1.0-alpha.4" num-bigint = { version = "0.2.3", features = ["serde"] } bigdecimal = { version = "0.1.0", features = ["serde"] } natural = "0.3.0" serde_urlencoded = "0.6.1" -sublime_fuzzy = "0.5" +sublime_fuzzy = "0.6" +trash = "1.0.0" regex = "1" +cfg-if = "0.1" neso = { version = "0.5.0", optional = true } crossterm = { version = "0.10.2", optional = true } syntect = {version = "3.2.0", optional = true } onig_sys = {version = "=69.1.0", optional = true } -heim = {version = "0.0.8-alpha.1", optional = true } +heim = {version = "0.0.8", optional = true } battery = {version = "0.7.4", optional = true } rawkey = {version = "0.1.2", optional = true } clipboard = {version = "0.5", optional = true } -ptree = {version = "0.2", optional = true } +ptree = {version = "0.2" } image = { version = "0.22.2", default_features = false, features = ["png_codec", "jpeg"], optional = true } -starship = { version = "0.22.0", optional = true} +starship = { version = "0.26.4", optional = true} [features] default = ["textview", "sys", "ps"] @@ -97,6 +97,7 @@ binaryview = ["image", "crossterm"] sys = ["heim", "battery"] ps = ["heim"] starship-prompt = ["starship"] +# trace = ["nom-tracable/trace"] [dependencies.rusqlite] version = "0.20.0" @@ -105,6 +106,10 @@ features = ["bundled", "blob"] [dev-dependencies] pretty_assertions = "0.6.1" +[build-dependencies] +toml = "0.5.5" +serde = { version = "1.0.102", features = ["derive"] } + [lib] name = "nu" path = "src/lib.rs" @@ -117,18 +122,30 @@ path = "src/plugins/inc.rs" name = "nu_plugin_sum" path = "src/plugins/sum.rs" +[[bin]] +name = "nu_plugin_average" +path = "src/plugins/average.rs" + [[bin]] name = "nu_plugin_embed" path = "src/plugins/embed.rs" [[bin]] -name = "nu_plugin_add" -path = "src/plugins/add.rs" +name = "nu_plugin_insert" +path = "src/plugins/insert.rs" [[bin]] name = "nu_plugin_edit" path = "src/plugins/edit.rs" +[[bin]] +name = "nu_plugin_format" +path = "src/plugins/format.rs" + +[[bin]] +name = "nu_plugin_parse" +path = "src/plugins/parse.rs" + [[bin]] name = "nu_plugin_str" path = "src/plugins/str.rs" diff --git a/README.md b/README.md index 7df2c92ec9..b1ce4feec1 100644 --- a/README.md +++ b/README.md @@ -32,9 +32,9 @@ Try it in Gitpod. ## Local -Up-to-date installation instructions can be found in the [installation chapter of the book](https://book.nushell.sh/en/installation). +Up-to-date installation instructions can be found in the [installation chapter of the book](https://book.nushell.sh/en/installation). **Windows users**: please note that Nu works on Windows 10 and does not currently have Windows 7/8.1 support. -To build Nu, you will need to use the **nightly** version of the compiler. +To build Nu, you will need to use the **latest stable (1.39 or later)** version of the compiler. Required dependencies: @@ -46,16 +46,16 @@ Optional dependencies: * To use Nu with all possible optional features enabled, you'll also need the following: * on Linux (on Debian/Ubuntu): `apt install libxcb-composite0-dev libx11-dev` -To install Nu via cargo (make sure you have installed [rustup](https://rustup.rs/) and the nightly compiler via `rustup install nightly`): +To install Nu via cargo (make sure you have installed [rustup](https://rustup.rs/) and the latest stable compiler via `rustup install stable`): ``` -cargo +nightly install nu +cargo install nu ``` You can also install Nu with all the bells and whistles (be sure to have installed the [dependencies](https://book.nushell.sh/en/installation#dependencies) for your platform): ``` -cargo +nightly install nu --all-features +cargo install nu --all-features ``` ## Docker @@ -173,7 +173,7 @@ We can pipeline this into a command that gets the contents of one of the columns ━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━┯━━━━━━┯━━━━━━━━━ authors │ description │ edition │ license │ name │ version ─────────────────┼────────────────────────────┼─────────┼─────────┼──────┼───────── - [table: 3 rows] │ A shell for the GitHub era │ 2018 │ ISC │ nu │ 0.3.0 + [table: 3 rows] │ A shell for the GitHub era │ 2018 │ MIT │ nu │ 0.5.0 ━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━┷━━━━━━┷━━━━━━━━━ ``` @@ -181,7 +181,7 @@ Finally, we can use commands outside of Nu once we have the data we want: ``` /home/jonathan/Source/nushell(master)> open Cargo.toml | get package.version | echo $it -0.3.0 +0.5.0 ``` Here we use the variable `$it` to refer to the value being piped to the external command. @@ -200,7 +200,7 @@ Nu supports plugins that offer additional functionality to the shell and follow There are a few examples in the `plugins` directory. -Plugins are binaries that are available in your path and follow a "nu_plugin_*" naming convention. These binaries interact with nu via a simple JSON-RPC protocol where the command identifies itself and passes along its configuration, which then makes it available for use. If the plugin is a filter, data streams to it one element at a time, and it can stream data back in return via stdin/stdout. If the plugin is a sink, it is given the full vector of final data and is given free reign over stdin/stdout to use as it pleases. +Plugins are binaries that are available in your path and follow a `nu_plugin_*` naming convention. These binaries interact with nu via a simple JSON-RPC protocol where the command identifies itself and passes along its configuration, which then makes it available for use. If the plugin is a filter, data streams to it one element at a time, and it can stream data back in return via stdin/stdout. If the plugin is a sink, it is given the full vector of final data and is given free reign over stdin/stdout to use as it pleases. # Goals @@ -248,20 +248,27 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat ## Filters on tables (structured data) | command | description | | ------------- | ------------- | -| add column-or-column-path value | Add a new column to the table | +| append row-data | Append a row to the end of the table | +| count | Show the total number of rows | | edit column-or-column-path value | Edit an existing column to have a new value | | embed column | Creates a new table of one column with the given name, and places the current table inside of it | | first amount | Show only the first number of rows | +| format pattern | Format table row data as a string following the given pattern | | get column-or-column-path | Open column and get data from the corresponding cells | +| group-by column | Creates a new table with the data from the table rows grouped by the column given | +| histogram column ...column-names | Creates a new table with a histogram based on the column name passed in, optionally give the frequency column name | inc (column-or-column-path) | Increment a value or version. Optionally use the column of a table | +| insert column-or-column-path value | Insert a new column to the table | | last amount | Show only the last number of rows | | nth row-number | Return only the selected row | | pick ...columns | Down-select table to only these columns | | pivot --header-row | Pivot the tables, making columns into rows and vice versa | +| prepend row-data | Prepend a row to the beginning of the table | | reject ...columns | Remove the given columns from the table | | reverse | Reverses the table. | | skip amount | Skip a number of rows | | skip-while condition | Skips rows while the condition matches. | +| split-by column | Creates a new table with the data from the inner tables splitted by the column given | | sort-by ...columns | Sort by the given columns | | str (column) | Apply string function. Optionally use the column of a table | | sum | Sum a column of values | @@ -284,12 +291,14 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat | from-ini | Parse text as .ini and create table | | from-json | Parse text as .json and create table | | from-sqlite | Parse binary data as sqlite .db and create table | +| from-ssv --minimum-spaces | Parse text as space-separated values and create table | | from-toml | Parse text as .toml and create table | | from-tsv | Parse text as .tsv and create table | | from-url | Parse urlencoded string and create a table | | from-xml | Parse text as .xml and create a table | | from-yaml | Parse text as a .yaml/.yml and create a table | | lines | Split single string into rows, one per line | +| parse pattern | Convert text to a table by matching the given pattern | | size | Gather word count statistics on the text | | split-column sep ...column-names | Split row contents across multiple columns via the separator, optionally give the columns names | | split-row sep | Split row contents over multiple rows via the separator | diff --git a/build.rs b/build.rs new file mode 100644 index 0000000000..44a55f9573 --- /dev/null +++ b/build.rs @@ -0,0 +1,39 @@ +use serde::Deserialize; +use std::collections::HashMap; +use std::collections::HashSet; +use std::env; +use std::path::Path; + +#[derive(Deserialize)] +struct Feature { + #[allow(unused)] + description: String, + enabled: bool, +} + +fn main() -> Result<(), Box> { + let input = env::var("CARGO_MANIFEST_DIR").unwrap(); + let all_on = env::var("NUSHELL_ENABLE_ALL_FLAGS").is_ok(); + let flags: HashSet = env::var("NUSHELL_ENABLE_FLAGS") + .map(|s| s.split(",").map(|s| s.to_string()).collect()) + .unwrap_or_else(|_| HashSet::new()); + + if all_on && !flags.is_empty() { + println!( + "cargo:warning={}", + "Both NUSHELL_ENABLE_ALL_FLAGS and NUSHELL_ENABLE_FLAGS were set. You don't need both." + ); + } + + let path = Path::new(&input).join("features.toml"); + + let toml: HashMap = toml::from_str(&std::fs::read_to_string(path)?)?; + + for (key, value) in toml.iter() { + if value.enabled == true || all_on || flags.contains(key) { + println!("cargo:rustc-cfg={}", key); + } + } + + Ok(()) +} diff --git a/docker/Dockerfile.nu-base b/docker/Dockerfile.nu-base index 1a9e83a11e..3adee88e78 100644 --- a/docker/Dockerfile.nu-base +++ b/docker/Dockerfile.nu-base @@ -11,9 +11,9 @@ RUN apt-get update && apt-get install -y libssl-dev \ ARG RELEASE=false WORKDIR /code -COPY ./rust-toolchain ./rust-toolchain -RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain `cat rust-toolchain` +RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain "stable" ENV PATH=/root/.cargo/bin:$PATH +RUN rustup update COPY . /code RUN echo "##vso[task.prependpath]/root/.cargo/bin" && \ rustc -Vv && \ diff --git a/docker/docker-compose.package.yml b/docker/docker-compose.package.yml index 9be36544eb..a2ad90b6bb 100644 --- a/docker/docker-compose.package.yml +++ b/docker/docker-compose.package.yml @@ -2,7 +2,7 @@ version: '3' services: nushell: - image: ${REGISTRY}/nu:${TAG} + image: ${DOCKER_REGISTRY}/nu:${DOCKER_TAG} build: context: .. dockerfile: docker/Package${PATCH}.Dockerfile diff --git a/docs/commands/append.md b/docs/commands/append.md new file mode 100644 index 0000000000..8bb2cabee3 --- /dev/null +++ b/docs/commands/append.md @@ -0,0 +1,53 @@ +# append +This command allows you to append the given row to the table. + +**Note**: +- `append` does not change a file itself. If you want to save your changes, you need to run the `save` command +- if you want to add something containing a whitespace character, you need to put it in quotation marks + +## Examples + +Let's add more cities to this table: + +```shell +> open cities.txt | lines +━━━┯━━━━━━━━━━━━ + # │ +───┼──────────── + 0 │ Canberra + 1 │ London + 2 │ Nairobi + 3 │ Washington +━━━┷━━━━━━━━━━━━ +``` + +You can add a new row by using `append`: + +```shell +> open cities.txt | lines | append Beijing +━━━┯━━━━━━━━━━━━ + # │ +───┼──────────── + 0 │ Canberra + 1 │ London + 2 │ Nairobi + 3 │ Washington + 4 │ Beijing +━━━┷━━━━━━━━━━━━ +``` + +It's not possible to add multiple rows at once, so you'll need to call `append` multiple times: + +```shell +> open cities.txt | lines | append Beijing | append "Buenos Aires" +━━━┯━━━━━━━━━━━━━━ + # │ +───┼────────────── + 0 │ Canberra + 1 │ London + 2 │ Nairobi + 3 │ Washington + 4 │ Beijing + 5 │ Buenos Aires +━━━┷━━━━━━━━━━━━━━ +``` diff --git a/docs/commands/average.md b/docs/commands/average.md new file mode 100644 index 0000000000..d4095e518f --- /dev/null +++ b/docs/commands/average.md @@ -0,0 +1,45 @@ +# average +This command allows you to calculate the average of values in a column. + +## Examples +To get the average of the file sizes in a directory, simply pipe the size column from the ls command to the average command. + +```shell +> ls | get size | average +━━━━━━━━━ + +━━━━━━━━━ +2282.727272727273 +━━━━━━━━━ +``` + +```shell +> pwd | split-row / | size | get chars | average +━━━━━━━━━ + +━━━━━━━━━ +5.250000000000000 +━━━━━━━━━ +``` + +Note that average only works for integer and byte values. If the shell doesn't recognize the values in a column as one of those types, it will return an error. +One way to solve this is to convert each row to an integer when possible and then pipe the result to `average` + +```shell +> open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | average +error: Unrecognized type in stream: Primitive(String("2509000000")) +- shell:1:0 +1 | open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | average + | ^^^^ source +``` + +```shell +> open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | str --to-int | average +━━━━━━━━━━━━━━━━━━━ + +─────────────────── + 3239404444.000000 +━━━━━━━━━━━━━━━━━━━ +``` + + diff --git a/docs/commands/cd.md b/docs/commands/cd.md index 377733ba8f..2e5d933f47 100644 --- a/docs/commands/cd.md +++ b/docs/commands/cd.md @@ -2,7 +2,7 @@ If you didn't already know, the `cd` command is very simple. It stands for 'change directory' and it does exactly that. It changes the current directory to the one specified. If no directory is specified, it takes you to the home directory. Additionally, using `cd ..` takes you to the parent directory. -## Examples - +## Examples ```shell /home/username> cd Desktop @@ -21,5 +21,13 @@ If you didn't already know, the `cd` command is very simple. It stands for 'chan /home/username/Desktop/super/duper/crazy/nested/folders> cd /home/username> cd ../../usr /usr> cd -/home/username> +/home/username> +``` + +Using `cd -` will take you to the previous directory: + +```shell +/home/username/Desktop/super/duper/crazy/nested/folders> cd +/home/username> cd - +/home/username/Desktop/super/duper/crazy/nested/folders> cd ``` diff --git a/docs/commands/count.md b/docs/commands/count.md new file mode 100644 index 0000000000..e330dcc187 --- /dev/null +++ b/docs/commands/count.md @@ -0,0 +1,48 @@ +# count + +This command counts the number of rows in a table. + +## Examples - + +```shell +> ls +━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━ + # │ name │ type │ readonly │ size │ created │ accessed │ modified +────┼──────────────────────────────┼───────────┼──────────┼─────────┼──────────────┼──────────────┼────────────── + 0 │ Desktop │ Directory │ │ 4.1 KB │ 2 months ago │ 2 months ago │ 2 months ago + 1 │ aur │ Directory │ │ 4.1 KB │ 4 hours ago │ 4 hours ago │ 4 hours ago +... + 75 │ .emulator_console_auth_token │ File │ │ 16 B │ 2 months ago │ 2 months ago │ 2 months ago + 76 │ bin │ Directory │ │ 4.1 KB │ 2 months ago │ 2 months ago │ 2 months ago +━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━ +> ls | count +━━━━━━━━━ + +───────── + 77 +━━━━━━━━━ +> ls | get name | count +━━━━━━━━━ + +───────── + 77 +━━━━━━━━━ +> ls | where type == File | count +━━━━━━━━━ + +───────── + 29 +━━━━━━━━━ +> ls | where type == Directory | count +━━━━━━━━━ + +───────── + 48 +━━━━━━━━━ +> ls | where size > 2KB | count +━━━━━━━━━ + +───────── + 57 +━━━━━━━━━ +``` diff --git a/docs/commands/from-csv.md b/docs/commands/from-csv.md new file mode 100644 index 0000000000..b72818eefc --- /dev/null +++ b/docs/commands/from-csv.md @@ -0,0 +1,112 @@ +# from-csv + +Converts csv data into table. Use this when nushell cannot dertermine the input file extension. + +## Example + +Let's say we have the following file : + +```shell +> cat pets.txt +animal, name, age +cat, Tom, 7 +dog, Alfred, 10 +chameleon, Linda, 1 +``` + +`pets.txt` is actually a .csv file but it has the .txt extension, `open` is not able to convert it into a table : + +```shell +> open pets.txt +animal, name, age +cat, Tom, 7 +dog, Alfred, 10 +chameleon, Linda, 1 +``` + +To get a table from `pets.txt` we need to use the `from-csv` command : + +```shell +> open pets.txt | from-csv +━━━┯━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━ + # │ animal │ name │ age +───┼───────────┼─────────┼────── + 0 │ cat │ Tom │ 7 + 1 │ dog │ Alfred │ 10 + 2 │ chameleon │ Linda │ 1 +━━━┷━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━ +``` + +To ignore the csv headers use `--headerless` : + +```shell +━━━┯━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━ + # │ Column1 │ Column2 │ Column3 +───┼───────────┼─────────┼───────── + 0 │ dog │ Alfred │ 10 + 1 │ chameleon │ Linda │ 1 +━━━┷━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━ +``` + +To split on a character other than ',' use `--separator` : + +```shell +> open pets.txt +animal; name; age +cat; Tom; 7 +dog; Alfred; 10 +chameleon; Linda; 1 +``` + +```shell +> open pets.txt | from-csv --separator ';' +━━━┯━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━ + # │ animal │ name │ age +───┼───────────┼─────────┼────── + 0 │ cat │ Tom │ 7 + 1 │ dog │ Alfred │ 10 + 2 │ chameleon │ Linda │ 1 +━━━┷━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━ +``` + +To use this command to open a csv with separators other than a comma, use the `--raw` switch of `open` to open the csv, othewise the csv will enter `from-csv` as a table split on commas rather than raw text. + +```shell +> mv pets.txt pets.csv +> open pets.csv | from-csv --separator ';' +error: Expected a string from pipeline +- shell:1:16 +1 | open pets.csv | from-csv --separator ';' + | ^^^^^^^^ requires string input +- shell:1:0 +1 | open pets.csv | from-csv --separator ';' + | value originates from here + +> open pets.csv --raw | from-csv --separator ';' +━━━┯━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━ + # │ animal │ name │ age +───┼───────────┼─────────┼────── + 0 │ cat │ Tom │ 7 + 1 │ dog │ Alfred │ 10 + 2 │ chameleon │ Linda │ 1 +━━━┷━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━ +``` + +Note that separators are currently provided as strings and need to be wrapped in quotes. + +```shell +> open pets.csv --raw | from-csv --separator ; +- shell:1:43 +1 | open pets.csv --raw | from-csv --separator ; + | ^ +``` + +It is also considered an error to use a separator greater than one char : + +```shell +> open pets.txt | from-csv --separator '123' +error: Expected a single separator char from --separator +- shell:1:37 +1 | open pets.txt | from-csv --separator '123' + | ^^^^^ requires a single character string input +``` diff --git a/docs/commands/from-toml.md b/docs/commands/from-toml.md new file mode 100644 index 0000000000..d3f3364c78 --- /dev/null +++ b/docs/commands/from-toml.md @@ -0,0 +1,23 @@ +# from-toml +Converts toml data into table. Use this when nushell cannot dertermine the input file extension. + +## Example +Let's say we have the following Rust .lock file : +```shell +> open Cargo.lock +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. [[package]] name = "adler32" version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" +... +``` + +The "Cargo.lock" file is actually a .toml file, but the file extension isn't .toml. That's okay, we can use the `from-toml` command : + + +```shell +> open Cargo.lock | from-toml +━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━ + metadata │ package +────────────────┼─────────────────── + [table: 1 row] │ [table: 154 rows] +━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━ +``` \ No newline at end of file diff --git a/docs/commands/group-by.md b/docs/commands/group-by.md new file mode 100644 index 0000000000..9628800031 --- /dev/null +++ b/docs/commands/group-by.md @@ -0,0 +1,72 @@ +# group-by + +This command creates a new table with the data from the table rows grouped by the column given. + +## Examples + +Let's say we have this table of all countries in the world sorted by their population: + +```shell +> open countries_by_population.json | from-json | first 10 +━━━┯━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━━ + # │ rank │ country or area │ UN continental region │ UN statistical region │ population 2018 │ population 2019 │ change +───┼──────┼─────────────────┼───────────────────────┼───────────────────────┼─────────────────┼─────────────────┼──────── + 0 │ 1 │ China │ Asia │ Eastern Asia │ 1,427,647,786 │ 1,433,783,686 │ +0.4% + 1 │ 2 │ India │ Asia │ Southern Asia │ 1,352,642,280 │ 1,366,417,754 │ +1.0% + 2 │ 3 │ United States │ Americas │ Northern America │ 327,096,265 │ 329,064,917 │ +0.6% + 3 │ 4 │ Indonesia │ Asia │ South-eastern Asia │ 267,670,543 │ 270,625,568 │ +1.1% + 4 │ 5 │ Pakistan │ Asia │ Southern Asia │ 212,228,286 │ 216,565,318 │ +2.0% + 5 │ 6 │ Brazil │ Americas │ South America │ 209,469,323 │ 211,049,527 │ +0.8% + 6 │ 7 │ Nigeria │ Africa │ Western Africa │ 195,874,683 │ 200,963,599 │ +2.6% + 7 │ 8 │ Bangladesh │ Asia │ Southern Asia │ 161,376,708 │ 163,046,161 │ +1.0% + 8 │ 9 │ Russia │ Europe │ Eastern Europe │ 145,734,038 │ 145,872,256 │ +0.1% + 9 │ 10 │ Mexico │ Americas │ Central America │ 126,190,788 │ 127,575,529 │ +1.1% +━━━┷━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━━ +``` + +Here we have listed only the first 10 lines. In total this table has got 233 rows which is to big to get information easily out of it. + +We can use the `group-by` command on 'UN statistical region' to create a table per continental region. + +```shell +> open countries_by_population.json | from-json | group-by "UN continental region" +━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━ + Asia │ Americas │ Africa │ Europe │ Oceania +──────────────────┼──────────────────┼──────────────────┼──────────────────┼────────────────── + [table: 51 rows] │ [table: 53 rows] │ [table: 58 rows] │ [table: 48 rows] │ [table: 23 rows] +━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━ +``` + +Now we can already get some informations like "which continental regions are there" and "how many countries are in each region". +If we want to see only the countries in the continental region of Oceania we can type: + +```shell +> open countries_by_population.json | from-json | group-by "UN continental region" | get Oceania +━━━━┯━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━━ + # │ rank │ country or area │ UN continental region │ UN statistical region │ population 2018 │ population 2019 │ change +────┼──────┼────────────────────────────────┼───────────────────────┼───────────────────────────┼─────────────────┼─────────────────┼──────── + 0 │ 55 │ Australia │ Oceania │ Australia and New Zealand │ 24,898,152 │ 25,203,198 │ +1.2% + 1 │ 98 │ Papua New Guinea │ Oceania │ Melanesia │ 8,606,323 │ 8,776,109 │ +2.0% + 2 │ 125 │ New Zealand │ Oceania │ Australia and New Zealand │ 4,743,131 │ 4,783,063 │ +0.8% + 3 │ 161 │ Fiji │ Oceania │ Melanesia │ 883,483 │ 889,953 │ +0.7% + 4 │ 166 │ Solomon Islands │ Oceania │ Melanesia │ 652,857 │ 669,823 │ +2.6% + 5 │ 181 │ Vanuatu │ Oceania │ Melanesia │ 292,680 │ 299,882 │ +2.5% + 6 │ 183 │ New Caledonia │ Oceania │ Melanesia │ 279,993 │ 282,750 │ +1.0% + 7 │ 185 │ French Polynesia │ Oceania │ Polynesia │ 277,679 │ 279,287 │ +0.6% + 8 │ 188 │ Samoa │ Oceania │ Polynesia │ 196,129 │ 197,097 │ +0.5% + 9 │ 191 │ Guam │ Oceania │ Micronesia │ 165,768 │ 167,294 │ +0.9% + 10 │ 193 │ Kiribati │ Oceania │ Micronesia │ 115,847 │ 117,606 │ +1.5% + 11 │ 194 │ Federated States of Micronesia │ Oceania │ Micronesia │ 112,640 │ 113,815 │ +1.0% + 12 │ 196 │ Tonga │ Oceania │ Polynesia │ 110,589 │ 110,940 │ +0.3% + 13 │ 207 │ Marshall Islands │ Oceania │ Micronesia │ 58,413 │ 58,791 │ +0.6% + 14 │ 209 │ Northern Mariana Islands │ Oceania │ Micronesia │ 56,882 │ 56,188 │ −1.2% + 15 │ 210 │ American Samoa │ Oceania │ Polynesia │ 55,465 │ 55,312 │ −0.3% + 16 │ 221 │ Palau │ Oceania │ Micronesia │ 17,907 │ 18,008 │ +0.6% + 17 │ 222 │ Cook Islands │ Oceania │ Polynesia │ 17,518 │ 17,548 │ +0.2% + 18 │ 224 │ Tuvalu │ Oceania │ Polynesia │ 11,508 │ 11,646 │ +1.2% + 19 │ 225 │ Wallis and Futuna │ Oceania │ Polynesia │ 11,661 │ 11,432 │ −2.0% + 20 │ 226 │ Nauru │ Oceania │ Micronesia │ 10,670 │ 10,756 │ +0.8% + 21 │ 231 │ Niue │ Oceania │ Polynesia │ 1,620 │ 1,615 │ −0.3% + 22 │ 232 │ Tokelau │ Oceania │ Polynesia │ 1,319 │ 1,340 │ +1.6% +━━━━┷━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━━ +``` diff --git a/docs/commands/pick.md b/docs/commands/pick.md new file mode 100644 index 0000000000..e0d0e4c079 --- /dev/null +++ b/docs/commands/pick.md @@ -0,0 +1,53 @@ +# pick + +This command displays only the column names passed on to it. + +## Examples + +```shell +> ls +━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━ + # │ name │ type │ readonly │ size │ created │ accessed │ modified +───┼────────────────────────────┼──────┼──────────┼────────┼─────────────┼─────────────┼───────────── + 0 │ zeusiscrazy.txt │ File │ │ 556 B │ a month ago │ a month ago │ a month ago + 1 │ coww.txt │ File │ │ 24 B │ a month ago │ a month ago │ a month ago + 2 │ randomweirdstuff.txt │ File │ │ 197 B │ a month ago │ a month ago │ a month ago + 3 │ abaracadabra.txt │ File │ │ 401 B │ a month ago │ a month ago │ a month ago + 4 │ youshouldeatmorecereal.txt │ File │ │ 768 B │ a month ago │ a month ago │ a month ago +━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━ +> ls | pick name +━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + # │ name +───┼──────────────────────────── + 0 │ zeusiscrazy.txt + 1 │ coww.txt + 2 │ randomweirdstuff.txt + 3 │ abaracadabra.txt + 4 │ youshouldeatmorecereal.txt +━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +``` + +The order in which you put the column names matters: + +```shell +> ls | pick type name size +━━━┯━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━ + # │ type │ name │ size +───┼──────┼────────────────────────────┼──────── + 0 │ File │ zeusiscrazy.txt │ 556 B + 1 │ File │ coww.txt │ 24 B + 2 │ File │ randomweirdstuff.txt │ 197 B + 3 │ File │ abaracadabra.txt │ 401 B + 4 │ File │ youshouldeatmorecereal.txt │ 768 B +━━━┷━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━ +> ls | pick size type name +━━━┯━━━━━━━━┯━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + # │ size │ type │ name +───┼────────┼──────┼──────────────────────────── + 0 │ 556 B │ File │ zeusiscrazy.txt + 1 │ 24 B │ File │ coww.txt + 2 │ 197 B │ File │ randomweirdstuff.txt + 3 │ 401 B │ File │ abaracadabra.txt + 4 │ 768 B │ File │ youshouldeatmorecereal.txt +━━━┷━━━━━━━━┷━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +``` diff --git a/docs/commands/pivot.md b/docs/commands/pivot.md new file mode 100644 index 0000000000..632dae4e0d --- /dev/null +++ b/docs/commands/pivot.md @@ -0,0 +1,75 @@ +# pivot + +Pivots the table contents so rows become columns and columns become rows. + +## Examples + +```sh +> ls docs +━━━┯━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━ + # │ name │ type │ readonly │ size │ accessed │ modified +───┼────────────────────┼───────────┼──────────┼────────┼─────────────┼───────────── + 0 │ docs/commands │ Directory │ │ 4.1 KB │ an hour ago │ an hour ago + 1 │ docs/docker.md │ File │ │ 7.0 KB │ an hour ago │ a day ago + 2 │ docs/philosophy.md │ File │ │ 896 B │ an hour ago │ a day ago +━━━┷━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━ + +> ls docs | pivot +━━━┯━━━━━━━━━━┯━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━ + # │ Column0 │ Column1 │ Column2 │ Column3 +───┼──────────┼───────────────┼────────────────┼──────────────────── + 0 │ name │ docs/commands │ docs/docker.md │ docs/philosophy.md + 1 │ type │ Directory │ File │ File + 2 │ readonly │ │ │ + 3 │ size │ 4.1 KB │ 7.0 KB │ 896 B + 4 │ accessed │ an hour ago │ an hour ago │ an hour ago + 5 │ modified │ an hour ago │ a day ago │ a day ago +━━━┷━━━━━━━━━━┷━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━ +``` + +Use `--header-row` to treat the first row as column names: + +```shell +> ls docs | pivot --header-row +━━━┯━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━ + # │ docs/commands │ docs/docker.md │ docs/philosophy.md +───┼───────────────┼────────────────┼──────────────────── + 0 │ Directory │ File │ File + 1 │ │ │ + 2 │ 4.1 KB │ 7.0 KB │ 896 B + 3 │ an hour ago │ an hour ago │ an hour ago + 4 │ an hour ago │ a day ago │ a day ago +━━━┷━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━ +``` + +Use `--ignore-titles` to prevent pivoting the column names into values: + +```shell +> ls docs | pivot --ignore-titles +━━━┯━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━ + # │ Column0 │ Column1 │ Column2 +───┼───────────────┼────────────────┼──────────────────── + 0 │ docs/commands │ docs/docker.md │ docs/philosophy.md + 1 │ Directory │ File │ File + 2 │ │ │ + 3 │ 4.1 KB │ 7.0 KB │ 896 B + 4 │ an hour ago │ an hour ago │ an hour ago + 5 │ an hour ago │ a day ago │ a day ago +━━━┷━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━ +``` + +Additional arguments are used as column names: + +```shell +> ls docs | pivot foo bar baz +━━━┯━━━━━━━━━━┯━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━ + # │ foo │ bar │ baz │ Column3 +───┼──────────┼───────────────┼────────────────┼──────────────────── + 0 │ name │ docs/commands │ docs/docker.md │ docs/philosophy.md + 1 │ type │ Directory │ File │ File + 2 │ readonly │ │ │ + 3 │ size │ 4.1 KB │ 7.0 KB │ 896 B + 4 │ accessed │ 2 hours ago │ 2 hours ago │ 2 hours ago + 5 │ modified │ 2 hours ago │ a day ago │ a day ago +━━━┷━━━━━━━━━━┷━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━ +``` diff --git a/docs/commands/prepend.md b/docs/commands/prepend.md new file mode 100644 index 0000000000..86b9249237 --- /dev/null +++ b/docs/commands/prepend.md @@ -0,0 +1,56 @@ +# prepend +This command prepends the given row to the front of the table + +**Note**: +- `prepend` does not change a file itself. If you want to save your changes, you need to run the `save` command +- if you want to add something containing a whitespace character, you need to put it in quotation marks + +## Examples + +Let's complete this table with the missing continents: + +```shell +> open continents.txt | lines +━━━┯━━━━━━━━━━━━━━━ + # │ +───┼─────────────── + 0 │ Africa + 1 │ South America + 2 │ Australia + 3 │ Europe + 4 │ Antarctica +━━━┷━━━━━━━━━━━━━━━ +``` + +You can add a new row at the top by using `prepend`: + +```shell +> open continents.txt | lines | prepend Asia +━━━┯━━━━━━━━━━━━━━━ + # │ +───┼─────────────── + 0 │ Asia + 1 │ Africa + 2 │ South America + 3 │ Australia + 4 │ Europe + 5 │ Antarctica +━━━┷━━━━━━━━━━━━━━━ +``` + +It's not possible to add multiple rows at once, so you'll need to call `prepend` multiple times: + +```shell +> open continents.txt | lines | prepend Asia | prepend "North America" +━━━┯━━━━━━━━━━━━━━━ + # │ +───┼─────────────── + 0 │ North America + 1 │ Asia + 2 │ Africa + 3 │ South America + 4 │ Australia + 5 │ Europe + 6 │ Antarctica +━━━┷━━━━━━━━━━━━━━━ +``` diff --git a/docs/commands/reject.md b/docs/commands/reject.md new file mode 100644 index 0000000000..0d474ee863 --- /dev/null +++ b/docs/commands/reject.md @@ -0,0 +1,38 @@ +# reject + +This column removes or rejects the columns passed to it. + +## Examples + +```shell +> ls +━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━ + # │ name │ type │ readonly │ size │ created │ accessed │ modified +───┼────────────────────────────┼──────┼──────────┼────────┼─────────────┼─────────────┼───────────── + 0 │ zeusiscrazy.txt │ File │ │ 556 B │ a month ago │ a month ago │ a month ago + 1 │ coww.txt │ File │ │ 24 B │ a month ago │ a month ago │ a month ago + 2 │ randomweirdstuff.txt │ File │ │ 197 B │ a month ago │ a month ago │ a month ago + 3 │ abaracadabra.txt │ File │ │ 401 B │ a month ago │ a month ago │ a month ago + 4 │ youshouldeatmorecereal.txt │ File │ │ 768 B │ a month ago │ a month ago │ a month ago +━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━ +> ls | reject readonly +━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━ + # │ name │ type │ size │ created │ accessed │ modified +───┼────────────────────────────┼──────┼────────┼─────────────┼─────────────┼───────────── + 0 │ zeusiscrazy.txt │ File │ 556 B │ a month ago │ a month ago │ a month ago + 1 │ coww.txt │ File │ 24 B │ a month ago │ a month ago │ a month ago + 2 │ randomweirdstuff.txt │ File │ 197 B │ a month ago │ a month ago │ a month ago + 3 │ abaracadabra.txt │ File │ 401 B │ a month ago │ a month ago │ a month ago + 4 │ youshouldeatmorecereal.txt │ File │ 768 B │ a month ago │ a month ago │ a month ago +━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━ +> ls | reject readonly accessed +━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━ + # │ name │ type │ size │ created │ modified +───┼────────────────────────────┼──────┼────────┼─────────────┼───────────── + 0 │ zeusiscrazy.txt │ File │ 556 B │ a month ago │ a month ago + 1 │ coww.txt │ File │ 24 B │ a month ago │ a month ago + 2 │ randomweirdstuff.txt │ File │ 197 B │ a month ago │ a month ago + 3 │ abaracadabra.txt │ File │ 401 B │ a month ago │ a month ago + 4 │ youshouldeatmorecereal.txt │ File │ 768 B │ a month ago │ a month ago +━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━ +``` diff --git a/docs/commands/size.md b/docs/commands/size.md new file mode 100644 index 0000000000..02599dcce9 --- /dev/null +++ b/docs/commands/size.md @@ -0,0 +1,20 @@ +# size + +This commands gives word count statistics on any text. + +## Examples - + +```shell +> open lalala.txt | size +━━━━━━━┯━━━━━━━┯━━━━━━━┯━━━━━━━━━━━━ + lines │ words │ chars │ max length +───────┼───────┼───────┼──────────── + 4 │ 10 │ 72 │ 72 +━━━━━━━┷━━━━━━━┷━━━━━━━┷━━━━━━━━━━━━ +> open the_mysterious_affair_at_styles.txt | size +━━━━━━━┯━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━ + lines │ words │ chars │ max length +───────┼───────┼────────┼──────────── + 8935 │ 62352 │ 349459 │ 361771 +━━━━━━━┷━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━ +``` diff --git a/docs/commands/sort-by.md b/docs/commands/sort-by.md new file mode 100644 index 0000000000..1f0f3da9ed --- /dev/null +++ b/docs/commands/sort-by.md @@ -0,0 +1,56 @@ + +# env + +The `sort-by` command sorts the table being displayed in the terminal by a chosen column(s). + +`sort-by` takes multiple arguments (being the names of columns) sorting by each argument in order. + + +## Examples - + +```shell +/home/example> ls | sort-by size +━━━┯━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━ + # │ name │ type │ readonly │ size │ accessed │ modified +───┼──────┼──────┼──────────┼────────┼────────────────┼──────────────── + 0 │ az │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago + 1 │ a │ File │ │ 18 B │ 4 minutes ago │ 38 minutes ago + 2 │ ad │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago + 3 │ ac │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago + 4 │ ab │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago + 5 │ c │ File │ │ 102 B │ 35 minutes ago │ 35 minutes ago + 6 │ d │ File │ │ 189 B │ 35 minutes ago │ 34 minutes ago + 7 │ b │ File │ │ 349 B │ 35 minutes ago │ 35 minutes ago +━━━┷━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━ +``` + +```shell +/home/example> ls | sort-by size name +━━━┯━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━ + # │ name │ type │ readonly │ size │ accessed │ modified +───┼──────┼──────┼──────────┼────────┼────────────────┼──────────────── + 0 │ a │ File │ │ 18 B │ 4 minutes ago │ 39 minutes ago + 1 │ ab │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago + 2 │ ac │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago + 3 │ ad │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago + 4 │ az │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago + 5 │ c │ File │ │ 102 B │ 36 minutes ago │ 35 minutes ago + 6 │ d │ File │ │ 189 B │ 35 minutes ago │ 35 minutes ago + 7 │ b │ File │ │ 349 B │ 36 minutes ago │ 36 minutes ago +``` + +``` +/home/example> ls | sort-by accessed +━━━┯━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━ + # │ name │ type │ readonly │ size │ accessed │ modified +───┼──────┼──────┼──────────┼────────┼────────────────┼──────────────── + 0 │ b │ File │ │ 349 B │ 37 minutes ago │ 37 minutes ago + 1 │ c │ File │ │ 102 B │ 37 minutes ago │ 37 minutes ago + 2 │ d │ File │ │ 189 B │ 37 minutes ago │ 36 minutes ago + 3 │ a │ File │ │ 18 B │ 6 minutes ago │ 40 minutes ago + 4 │ ab │ File │ │ 18 B │ 6 minutes ago │ 6 minutes ago + 5 │ ac │ File │ │ 18 B │ 6 minutes ago │ 6 minutes ago + 6 │ ad │ File │ │ 18 B │ 5 minutes ago │ 5 minutes ago + 7 │ az │ File │ │ 18 B │ 5 minutes ago │ 5 minutes ago +━━━┷━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━ +``` \ No newline at end of file diff --git a/docs/commands/str.md b/docs/commands/str.md new file mode 100644 index 0000000000..d1ed3edb3d --- /dev/null +++ b/docs/commands/str.md @@ -0,0 +1,50 @@ +# str + +Consumes either a single value or a table and converts the provided data to a string and optionally applies a change. + +## Examples + +```shell +> shells +━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + # │ │ name │ path +───┼───┼────────────┼──────────────────────────────── + 0 │ X │ filesystem │ /home/TUX/stuff/expr/stuff + 1 │ │ filesystem │ / +━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +> shells | str path --upcase +━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + # │ │ name │ path +───┼───┼────────────┼──────────────────────────────── + 0 │ X │ filesystem │ /HOME/TUX/STUFF/EXPR/STUFF + 1 │ │ filesystem │ / +━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +> shells | str path --downcase +━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + # │ │ name │ path +───┼───┼────────────┼──────────────────────────────── + 0 │ X │ filesystem │ /home/tux/stuff/expr/stuff + 1 │ │ filesystem │ / +━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +> shells | str # --substring "21, 99" +━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + # │ │ name │ path +───┼───┼────────────┼──────────────────────────────── + 0 │ X │ filesystem │ stuff + 1 │ │ filesystem │ +━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +> shells | str # --substring "6," +━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + # │ │ name │ path +───┼───┼────────────┼──────────────────────────────── + 0 │ X │ filesystem │ TUX/stuff/expr/stuff + 1 │ │ filesystem │ +━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + +> echo "1, 2, 3" | split-row "," | str --to-int | sum +━━━━━━━━━ + +───────── + 6 +━━━━━━━━━ +``` diff --git a/docs/commands/sum.md b/docs/commands/sum.md new file mode 100644 index 0000000000..7482ca0c54 --- /dev/null +++ b/docs/commands/sum.md @@ -0,0 +1,44 @@ +# sum +This command allows you to calculate the sum of values in a column. + +## Examples +To get the sum of the file sizes in a directory, simply pipe the size column from the ls command to the sum command. + +```shell +> ls | get size | sum +━━━━━━━━━ + value +━━━━━━━━━ + 51.0 MB +━━━━━━━━━ +``` + +To get the sum of the characters that make up your present working directory. +```shell +> pwd | split-row / | size | get chars | sum +━━━━━━━━━ + +━━━━━━━━━ +21 +━━━━━━━━━ +``` + +Note that sum only works for integer and byte values. If the shell doesn't recognize the values in a column as one of those types, it will return an error. +One way to solve this is to convert each row to an integer when possible and then pipe the result to `sum` + +```shell +> open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | sum +error: Unrecognized type in stream: Primitive(String("2509000000")) +- shell:1:0 +1 | open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | sum + | ^^^^ source +``` + +```shell +> open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | str --to-int | sum +━━━━━━━━━━━━━ + +───────────── + 29154639996 +━━━━━━━━━━━━━ +``` diff --git a/docs/commands/tags.md b/docs/commands/tags.md new file mode 100644 index 0000000000..2c80cc19cf --- /dev/null +++ b/docs/commands/tags.md @@ -0,0 +1,47 @@ +# tags + +The tags commands allows users to access the metadata of the previous value in +the pipeline. This command may be run on multiple values of input as well. + +As of writing this, the only metadata returned includes: + +- `span`: the start and end indices of the previous value's substring location +- `anchor`: the source where data was loaded from; this may not appear if the + previous pipeline value didn't actually have a source (like trying to `open` a + dir, or running `ls` on a dir) + +## Examples + +```shell +> open README.md | tags +━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + span │ anchor +────────────────┼────────────────────────────────────────────────── + [table: 1 row] │ /Users/danielh/Projects/github/nushell/README.md +━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +``` + +```shell +> open README.md | tags | get span +━━━━━━━┯━━━━━ + start │ end +───────┼───── + 5 │ 14 +━━━━━━━┷━━━━━ +``` + +```shell +> ls | tags | first 3 | get span +━━━┯━━━━━━━┯━━━━━ + # │ start │ end +───┼───────┼───── + 0 │ 0 │ 2 + 1 │ 0 │ 2 + 2 │ 0 │ 2 +━━━┷━━━━━━━┷━━━━━ +``` + +## Reference + +More useful information on the `tags` command can be found by referencing [The +Nu Book's entry on Metadata](https://book.nushell.sh/en/metadata) diff --git a/features.toml b/features.toml new file mode 100644 index 0000000000..6dd7a26c36 --- /dev/null +++ b/features.toml @@ -0,0 +1,21 @@ +[hintsv1] + +description = "Adding hints based upon error states in the syntax highlighter" +enabled = false + +[coloring_in_tokens] + +description = "Move coloring into the TokensIterator so they can be atomic with the rest of the iterator" +reason = """ +This is laying the groundwork for merging coloring and parsing. It also makes token_nodes.atomic() naturally +work with coloring, which is pretty useful on its own. +""" +enabled = false + +[data_processing_primitives] + +description = "Groundwork so tables can be data processed" +reason = """ +These will allow take tables and be able to transform, process, and explore. +""" +enabled = false diff --git a/rust-toolchain b/rust-toolchain deleted file mode 100644 index c3a3f37794..0000000000 --- a/rust-toolchain +++ /dev/null @@ -1 +0,0 @@ -beta-2019-09-25 diff --git a/src/cli.rs b/src/cli.rs index 3ad6ad9c9b..dc83baf7b8 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -1,4 +1,3 @@ -use crate::commands::autoview; use crate::commands::classified::{ ClassifiedCommand, ClassifiedInputStream, ClassifiedPipeline, ExternalCommand, InternalCommand, StreamNext, @@ -14,18 +13,22 @@ use crate::fuzzysearch::{interactive_fuzzy_search, SelectionResult}; #[cfg(not(feature = "starship-prompt"))] use crate::git::current_branch; use crate::parser::registry::Signature; -use crate::parser::{hir, CallNode, Pipeline, PipelineElement, TokenNode}; +use crate::parser::{ + hir, + hir::syntax_shape::{expand_syntax, ExpandContext, PipelineShape}, + hir::{expand_external_tokens::ExternalTokensShape, tokens_iterator::TokensIterator}, + TokenNode, +}; use crate::prelude::*; -use log::{debug, trace}; +use log::{debug, log_enabled, trace}; use rustyline::error::ReadlineError; use rustyline::{self, config::Configurer, config::EditMode, ColorMode, Config, Editor}; -use std::env; use std::error::Error; use std::io::{BufRead, BufReader, Write}; use std::iter::Iterator; use std::path::PathBuf; -use std::sync::atomic::{AtomicBool, Ordering}; +use std::sync::atomic::Ordering; #[derive(Debug)] pub enum MaybeOwned<'a, T> { @@ -76,7 +79,7 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel let name = params.name.clone(); let fname = fname.to_string(); - if context.has_command(&name) { + if let Some(_) = context.get_command(&name) { trace!("plugin {:?} already loaded.", &name); } else { if params.is_filter { @@ -95,11 +98,17 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel }, Err(e) => { trace!("incompatible plugin {:?}", input); - Err(ShellError::string(format!("Error: {:?}", e))) + Err(ShellError::untagged_runtime_error(format!( + "Error: {:?}", + e + ))) } } } - Err(e) => Err(ShellError::string(format!("Error: {:?}", e))), + Err(e) => Err(ShellError::untagged_runtime_error(format!( + "Error: {:?}", + e + ))), }; let _ = child.wait(); @@ -110,13 +119,6 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel fn search_paths() -> Vec { let mut search_paths = Vec::new(); - match env::var_os("PATH") { - Some(paths) => { - search_paths = env::split_paths(&paths).collect::>(); - } - None => println!("PATH is not defined in the environment."), - } - #[cfg(debug_assertions)] { // Use our debug plugins in debug mode @@ -131,6 +133,15 @@ fn search_paths() -> Vec { #[cfg(not(debug_assertions))] { + use std::env; + + match env::var_os("PATH") { + Some(paths) => { + search_paths = env::split_paths(&paths).collect::>(); + } + None => println!("PATH is not defined in the environment."), + } + // Use our release plugins in release mode let mut path = std::path::PathBuf::from("."); path.push("target"); @@ -154,6 +165,8 @@ fn load_plugins(context: &mut Context) -> Result<(), ShellError> { require_literal_leading_dot: false, }; + set_env_from_config(); + for path in search_paths() { let mut pattern = path.to_path_buf(); @@ -249,13 +262,14 @@ pub async fn cli() -> Result<(), Box> { whole_stream_command(Next), whole_stream_command(Previous), whole_stream_command(Debug), - whole_stream_command(Lines), whole_stream_command(Shells), whole_stream_command(SplitColumn), whole_stream_command(SplitRow), whole_stream_command(Lines), whole_stream_command(Reject), whole_stream_command(Reverse), + whole_stream_command(Append), + whole_stream_command(Prepend), whole_stream_command(Trim), whole_stream_command(ToBSON), whole_stream_command(ToCSV), @@ -267,12 +281,15 @@ pub async fn cli() -> Result<(), Box> { whole_stream_command(ToURL), whole_stream_command(ToYAML), whole_stream_command(SortBy), + whole_stream_command(GroupBy), whole_stream_command(Tags), + whole_stream_command(Count), whole_stream_command(First), whole_stream_command(Last), whole_stream_command(Env), whole_stream_command(FromCSV), whole_stream_command(FromTSV), + whole_stream_command(FromSSV), whole_stream_command(FromINI), whole_stream_command(FromBSON), whole_stream_command(FromJSON), @@ -285,6 +302,7 @@ pub async fn cli() -> Result<(), Box> { whole_stream_command(FromYML), whole_stream_command(Pick), whole_stream_command(Get), + whole_stream_command(Histogram), per_item_command(Remove), per_item_command(Fetch), per_item_command(Open), @@ -295,6 +313,7 @@ pub async fn cli() -> Result<(), Box> { whole_stream_command(SkipWhile), per_item_command(Enter), per_item_command(Help), + per_item_command(History), whole_stream_command(Exit), whole_stream_command(Autoview), whole_stream_command(Pivot), @@ -303,11 +322,23 @@ pub async fn cli() -> Result<(), Box> { per_item_command(Mkdir), per_item_command(Move), whole_stream_command(Save), + whole_stream_command(SplitBy), whole_stream_command(Table), whole_stream_command(Version), whole_stream_command(Which), ]); + cfg_if::cfg_if! { + if #[cfg(data_processing_primitives)] { + context.add_commands(vec![ + whole_stream_command(ReduceBy), + whole_stream_command(EvaluateBy), + whole_stream_command(TSortBy), + whole_stream_command(MapMaxBy), + ]); + } + } + #[cfg(feature = "clipboard")] { context.add_commands(vec![whole_stream_command( @@ -315,6 +346,7 @@ pub async fn cli() -> Result<(), Box> { )]); } } + let _ = load_plugins(&mut context); let config = Config::builder().color_mode(ColorMode::Forced).build(); @@ -328,24 +360,21 @@ pub async fn cli() -> Result<(), Box> { // we are ok if history does not exist let _ = rl.load_history(&History::path()); - let ctrl_c = Arc::new(AtomicBool::new(false)); - let cc = ctrl_c.clone(); + let cc = context.ctrl_c.clone(); ctrlc::set_handler(move || { cc.store(true, Ordering::SeqCst); }) .expect("Error setting Ctrl-C handler"); let mut ctrlcbreak = false; loop { - if ctrl_c.load(Ordering::SeqCst) { - ctrl_c.store(false, Ordering::SeqCst); + if context.ctrl_c.load(Ordering::SeqCst) { + context.ctrl_c.store(false, Ordering::SeqCst); continue; } let cwd = context.shell_manager.path(); - rl.set_helper(Some(crate::shell::Helper::new( - context.shell_manager.clone(), - ))); + rl.set_helper(Some(crate::shell::Helper::new(context.clone()))); let edit_mode = config::config(Tag::unknown())? .get("edit_mode") @@ -416,6 +445,7 @@ pub async fn cli() -> Result<(), Box> { match process_line(readline, &mut context).await { LineResult::Success(line) => { rl.add_history_entry(line.clone()); + let _ = rl.save_history(&History::path()); } LineResult::CtrlC => { @@ -441,21 +471,12 @@ pub async fn cli() -> Result<(), Box> { } } - LineResult::Error(mut line, err) => { + LineResult::Error(line, err) => { rl.add_history_entry(line.clone()); - let diag = err.to_diagnostic(); + let _ = rl.save_history(&History::path()); + context.with_host(|host| { - let writer = host.err_termcolor(); - line.push_str(" "); - let files = crate::parser::Files::new(line); - let _ = std::panic::catch_unwind(move || { - let _ = language_reporting::emit( - &mut writer.lock(), - &files, - &diag, - &language_reporting::DefaultConfig, - ); - }); + print_err(err, host, &Text::from(line)); }) } @@ -472,6 +493,78 @@ pub async fn cli() -> Result<(), Box> { Ok(()) } +fn chomp_newline(s: &str) -> &str { + if s.ends_with('\n') { + &s[..s.len() - 1] + } else { + s + } +} + +fn set_env_from_config() { + let config = crate::data::config::read(Tag::unknown(), &None).unwrap(); + + if config.contains_key("env") { + // Clear the existing vars, we're about to replace them + for (key, _value) in std::env::vars() { + std::env::remove_var(key); + } + + let value = config.get("env"); + + match value { + Some(Tagged { + item: Value::Row(r), + .. + }) => { + for (k, v) in &r.entries { + match v.as_string() { + Ok(value_string) => { + std::env::set_var(k, value_string); + } + _ => {} + } + } + } + _ => {} + } + } + + if config.contains_key("path") { + // Override the path with what they give us from config + let value = config.get("path"); + + match value { + Some(value) => match value { + Tagged { + item: Value::Table(table), + .. + } => { + let mut paths = vec![]; + for val in table { + let path_str = val.as_string(); + match path_str { + Err(_) => {} + Ok(path_str) => { + paths.push(PathBuf::from(path_str)); + } + } + } + let path_os_string = std::env::join_paths(&paths); + match path_os_string { + Ok(path_os_string) => { + std::env::set_var("PATH", path_os_string); + } + Err(_) => {} + } + } + _ => {} + }, + None => {} + } + } +} + enum LineResult { Success(String), Error(String, ShellError), @@ -484,9 +577,11 @@ async fn process_line(readline: Result, ctx: &mut Context Ok(line) if line.trim() == "" => LineResult::Success(line.clone()), Ok(line) => { - let result = match crate::parser::parse(&line, uuid::Uuid::nil()) { + let line = chomp_newline(line); + + let result = match crate::parser::parse(&line) { Err(err) => { - return LineResult::Error(line.clone(), err); + return LineResult::Error(line.to_string(), err); } Ok(val) => val, @@ -497,28 +592,32 @@ async fn process_line(readline: Result, ctx: &mut Context let mut pipeline = match classify_pipeline(&result, ctx, &Text::from(line)) { Ok(pipeline) => pipeline, - Err(err) => return LineResult::Error(line.clone(), err), + Err(err) => return LineResult::Error(line.to_string(), err), }; match pipeline.commands.last() { Some(ClassifiedCommand::External(_)) => {} _ => pipeline .commands + .item .push(ClassifiedCommand::Internal(InternalCommand { - command: whole_stream_command(autoview::Autoview), + name: "autoview".to_string(), name_tag: Tag::unknown(), args: hir::Call::new( Box::new(hir::Expression::synthetic_string("autoview")), None, None, - ), + ) + .spanned_unknown(), })), } let mut input = ClassifiedInputStream::new(); + let mut iter = pipeline.commands.item.into_iter().peekable(); - let mut iter = pipeline.commands.into_iter().peekable(); - let mut is_first_command = true; + // Check the config to see if we need to update the path + // TODO: make sure config is cached so we don't path this load every call + set_env_from_config(); loop { let item: Option = iter.next(); @@ -527,16 +626,24 @@ async fn process_line(readline: Result, ctx: &mut Context input = match (item, next) { (None, _) => break, + (Some(ClassifiedCommand::Dynamic(_)), _) + | (_, Some(ClassifiedCommand::Dynamic(_))) => { + return LineResult::Error( + line.to_string(), + ShellError::unimplemented("Dynamic commands"), + ) + } + (Some(ClassifiedCommand::Expr(_)), _) => { return LineResult::Error( - line.clone(), + line.to_string(), ShellError::unimplemented("Expression-only commands"), ) } (_, Some(ClassifiedCommand::Expr(_))) => { return LineResult::Error( - line.clone(), + line.to_string(), ShellError::unimplemented("Expression-only commands"), ) } @@ -544,31 +651,46 @@ async fn process_line(readline: Result, ctx: &mut Context ( Some(ClassifiedCommand::Internal(left)), Some(ClassifiedCommand::External(_)), - ) => match left - .run(ctx, input, Text::from(line), is_first_command) - .await - { + ) => match left.run(ctx, input, Text::from(line)) { Ok(val) => ClassifiedInputStream::from_input_stream(val), - Err(err) => return LineResult::Error(line.clone(), err), + Err(err) => return LineResult::Error(line.to_string(), err), }, (Some(ClassifiedCommand::Internal(left)), Some(_)) => { - match left - .run(ctx, input, Text::from(line), is_first_command) - .await - { + match left.run(ctx, input, Text::from(line)) { Ok(val) => ClassifiedInputStream::from_input_stream(val), - Err(err) => return LineResult::Error(line.clone(), err), + Err(err) => return LineResult::Error(line.to_string(), err), } } (Some(ClassifiedCommand::Internal(left)), None) => { - match left - .run(ctx, input, Text::from(line), is_first_command) - .await - { - Ok(val) => ClassifiedInputStream::from_input_stream(val), - Err(err) => return LineResult::Error(line.clone(), err), + match left.run(ctx, input, Text::from(line)) { + Ok(val) => { + use futures::stream::TryStreamExt; + + let mut output_stream: OutputStream = val.into(); + loop { + match output_stream.try_next().await { + Ok(Some(ReturnSuccess::Value(Tagged { + item: Value::Error(e), + .. + }))) => { + return LineResult::Error(line.to_string(), e); + } + Ok(Some(_item)) => { + if ctx.ctrl_c.load(Ordering::SeqCst) { + break; + } + } + _ => { + break; + } + } + } + + return LineResult::Success(line.to_string()); + } + Err(err) => return LineResult::Error(line.to_string(), err), } } @@ -577,28 +699,26 @@ async fn process_line(readline: Result, ctx: &mut Context Some(ClassifiedCommand::External(_)), ) => match left.run(ctx, input, StreamNext::External).await { Ok(val) => val, - Err(err) => return LineResult::Error(line.clone(), err), + Err(err) => return LineResult::Error(line.to_string(), err), }, (Some(ClassifiedCommand::External(left)), Some(_)) => { match left.run(ctx, input, StreamNext::Internal).await { Ok(val) => val, - Err(err) => return LineResult::Error(line.clone(), err), + Err(err) => return LineResult::Error(line.to_string(), err), } } (Some(ClassifiedCommand::External(left)), None) => { match left.run(ctx, input, StreamNext::Last).await { Ok(val) => val, - Err(err) => return LineResult::Error(line.clone(), err), + Err(err) => return LineResult::Error(line.to_string(), err), } } }; - - is_first_command = false; } - LineResult::Success(line.clone()) + LineResult::Success(line.to_string()) } Err(ReadlineError::Interrupted) => LineResult::CtrlC, Err(ReadlineError::Eof) => LineResult::Break, @@ -614,95 +734,62 @@ fn classify_pipeline( context: &Context, source: &Text, ) -> Result { - let pipeline = pipeline.as_pipeline()?; + let mut pipeline_list = vec![pipeline.clone()]; + let mut iterator = TokensIterator::all(&mut pipeline_list, pipeline.span()); - let Pipeline { parts, .. } = pipeline; + let result = expand_syntax( + &PipelineShape, + &mut iterator, + &context.expand_context(source), + ) + .map_err(|err| err.into()); - let commands: Result, ShellError> = parts - .iter() - .map(|item| classify_command(&item, context, &source)) - .collect(); - - Ok(ClassifiedPipeline { - commands: commands?, - }) -} - -fn classify_command( - command: &PipelineElement, - context: &Context, - source: &Text, -) -> Result { - let call = command.call(); - - match call { - // If the command starts with `^`, treat it as an external command no matter what - call if call.head().is_external() => { - let name_tag = call.head().expect_external(); - let name = name_tag.slice(source); - - Ok(external_command(call, source, name.tagged(name_tag))) - } - - // Otherwise, if the command is a bare word, we'll need to triage it - call if call.head().is_bare() => { - let head = call.head(); - let name = head.source(source); - - match context.has_command(name) { - // if the command is in the registry, it's an internal command - true => { - let command = context.get_command(name); - let config = command.signature(); - - trace!(target: "nu::build_pipeline", "classifying {:?}", config); - - let args: hir::Call = config.parse_args(call, &context, source)?; - - trace!(target: "nu::build_pipeline", "args :: {}", args.debug(source)); - - Ok(ClassifiedCommand::Internal(InternalCommand { - command, - name_tag: head.tag(), - args, - })) - } - - // otherwise, it's an external command - false => Ok(external_command(call, source, name.tagged(head.tag()))), - } - } - - // If the command is something else (like a number or a variable), that is currently unsupported. - // We might support `$somevar` as a curried command in the future. - call => Err(ShellError::invalid_command(call.head().tag())), + if log_enabled!(target: "nu::expand_syntax", log::Level::Debug) { + println!(""); + ptree::print_tree(&iterator.expand_tracer().print(source.clone())).unwrap(); + println!(""); } + + result } // Classify this command as an external command, which doesn't give special meaning // to nu syntactic constructs, and passes all arguments to the external command as // strings. -fn external_command( - call: &Tagged, - source: &Text, +pub(crate) fn external_command( + tokens: &mut TokensIterator, + context: &ExpandContext, name: Tagged<&str>, -) -> ClassifiedCommand { - let arg_list_strings: Vec> = match call.children() { - Some(args) => args - .iter() - .filter_map(|i| match i { - TokenNode::Whitespace(_) => None, - other => Some(other.as_external_arg(source).tagged(other.tag())), - }) - .collect(), - None => vec![], - }; +) -> Result { + let Spanned { item, span } = expand_syntax(&ExternalTokensShape, tokens, context)?; - let (name, tag) = name.into_parts(); - - ClassifiedCommand::External(ExternalCommand { + Ok(ClassifiedCommand::External(ExternalCommand { name: name.to_string(), - name_tag: tag, - args: arg_list_strings, - }) + name_tag: name.tag(), + args: item + .iter() + .map(|x| Tagged { + tag: x.span.into(), + item: x.item.clone(), + }) + .collect::>() + .spanned(span), + })) +} + +pub fn print_err(err: ShellError, host: &dyn Host, source: &Text) { + let diag = err.to_diagnostic(); + + let writer = host.err_termcolor(); + let mut source = source.to_string(); + source.push_str(" "); + let files = crate::parser::Files::new(source); + let _ = std::panic::catch_unwind(move || { + let _ = language_reporting::emit( + &mut writer.lock(), + &files, + &diag, + &language_reporting::DefaultConfig, + ); + }); } diff --git a/src/commands.rs b/src/commands.rs index 72c07e38e6..c238b451d8 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -1,6 +1,9 @@ #[macro_use] pub(crate) mod macros; +mod from_structured_data; + +pub(crate) mod append; pub(crate) mod args; pub(crate) mod autoview; pub(crate) mod cd; @@ -8,12 +11,15 @@ pub(crate) mod classified; pub(crate) mod clip; pub(crate) mod command; pub(crate) mod config; +pub(crate) mod count; pub(crate) mod cp; pub(crate) mod date; pub(crate) mod debug; pub(crate) mod echo; pub(crate) mod enter; pub(crate) mod env; +#[allow(unused)] +pub(crate) mod evaluate_by; pub(crate) mod exit; pub(crate) mod fetch; pub(crate) mod first; @@ -22,16 +28,22 @@ pub(crate) mod from_csv; pub(crate) mod from_ini; pub(crate) mod from_json; pub(crate) mod from_sqlite; +pub(crate) mod from_ssv; pub(crate) mod from_toml; pub(crate) mod from_tsv; pub(crate) mod from_url; pub(crate) mod from_xml; pub(crate) mod from_yaml; pub(crate) mod get; +pub(crate) mod group_by; pub(crate) mod help; +pub(crate) mod histogram; +pub(crate) mod history; pub(crate) mod last; pub(crate) mod lines; pub(crate) mod ls; +#[allow(unused)] +pub(crate) mod map_max_by; pub(crate) mod mkdir; pub(crate) mod mv; pub(crate) mod next; @@ -41,8 +53,11 @@ pub(crate) mod pick; pub(crate) mod pivot; pub(crate) mod plugin; pub(crate) mod post; +pub(crate) mod prepend; pub(crate) mod prev; pub(crate) mod pwd; +#[allow(unused)] +pub(crate) mod reduce_by; pub(crate) mod reject; pub(crate) mod reverse; pub(crate) mod rm; @@ -51,8 +66,11 @@ pub(crate) mod shells; pub(crate) mod size; pub(crate) mod skip_while; pub(crate) mod sort_by; +pub(crate) mod split_by; pub(crate) mod split_column; pub(crate) mod split_row; +#[allow(unused)] +pub(crate) mod t_sort_by; pub(crate) mod table; pub(crate) mod tags; pub(crate) mod to_bson; @@ -75,13 +93,18 @@ pub(crate) use command::{ UnevaluatedCallInfo, WholeStreamCommand, }; +pub(crate) use append::Append; +pub(crate) use classified::ClassifiedCommand; pub(crate) use config::Config; +pub(crate) use count::Count; pub(crate) use cp::Cpy; pub(crate) use date::Date; pub(crate) use debug::Debug; pub(crate) use echo::Echo; pub(crate) use enter::Enter; pub(crate) use env::Env; +#[allow(unused)] +pub(crate) use evaluate_by::EvaluateBy; pub(crate) use exit::Exit; pub(crate) use fetch::Fetch; pub(crate) use first::First; @@ -91,6 +114,7 @@ pub(crate) use from_ini::FromINI; pub(crate) use from_json::FromJSON; pub(crate) use from_sqlite::FromDB; pub(crate) use from_sqlite::FromSQLite; +pub(crate) use from_ssv::FromSSV; pub(crate) use from_toml::FromTOML; pub(crate) use from_tsv::FromTSV; pub(crate) use from_url::FromURL; @@ -98,10 +122,15 @@ pub(crate) use from_xml::FromXML; pub(crate) use from_yaml::FromYAML; pub(crate) use from_yaml::FromYML; pub(crate) use get::Get; +pub(crate) use group_by::GroupBy; pub(crate) use help::Help; +pub(crate) use histogram::Histogram; +pub(crate) use history::History; pub(crate) use last::Last; pub(crate) use lines::Lines; pub(crate) use ls::LS; +#[allow(unused)] +pub(crate) use map_max_by::MapMaxBy; pub(crate) use mkdir::Mkdir; pub(crate) use mv::Move; pub(crate) use next::Next; @@ -110,8 +139,11 @@ pub(crate) use open::Open; pub(crate) use pick::Pick; pub(crate) use pivot::Pivot; pub(crate) use post::Post; +pub(crate) use prepend::Prepend; pub(crate) use prev::Previous; pub(crate) use pwd::PWD; +#[allow(unused)] +pub(crate) use reduce_by::ReduceBy; pub(crate) use reject::Reject; pub(crate) use reverse::Reverse; pub(crate) use rm::Remove; @@ -120,8 +152,11 @@ pub(crate) use shells::Shells; pub(crate) use size::Size; pub(crate) use skip_while::SkipWhile; pub(crate) use sort_by::SortBy; +pub(crate) use split_by::SplitBy; pub(crate) use split_column::SplitColumn; pub(crate) use split_row::SplitRow; +#[allow(unused)] +pub(crate) use t_sort_by::TSortBy; pub(crate) use table::Table; pub(crate) use tags::Tags; pub(crate) use to_bson::ToBSON; diff --git a/src/commands/append.rs b/src/commands/append.rs new file mode 100644 index 0000000000..fe22c9065e --- /dev/null +++ b/src/commands/append.rs @@ -0,0 +1,47 @@ +use crate::commands::WholeStreamCommand; +use crate::errors::ShellError; +use crate::parser::CommandRegistry; +use crate::prelude::*; + +#[derive(Deserialize)] +struct AppendArgs { + row: Tagged, +} + +pub struct Append; + +impl WholeStreamCommand for Append { + fn name(&self) -> &str { + "append" + } + + fn signature(&self) -> Signature { + Signature::build("append").required( + "row value", + SyntaxShape::Any, + "the value of the row to append to the table", + ) + } + + fn usage(&self) -> &str { + "Append the given row to the table" + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, append)?.run() + } +} + +fn append( + AppendArgs { row }: AppendArgs, + RunnableContext { input, .. }: RunnableContext, +) -> Result { + let mut after: VecDeque> = VecDeque::new(); + after.push_back(row); + + Ok(OutputStream::from_input(input.values.chain(after))) +} diff --git a/src/commands/autoview.rs b/src/commands/autoview.rs index a0e7e9a8a5..774dfcf88a 100644 --- a/src/commands/autoview.rs +++ b/src/commands/autoview.rs @@ -1,9 +1,14 @@ use crate::commands::{RawCommandArgs, WholeStreamCommand}; use crate::errors::ShellError; +use crate::parser::hir::{Expression, NamedArguments}; use crate::prelude::*; +use futures::stream::TryStreamExt; +use std::sync::atomic::Ordering; pub struct Autoview; +const STREAM_PAGE_SIZE: u64 = 50; + #[derive(Deserialize)] pub struct AutoviewArgs {} @@ -31,61 +36,132 @@ impl WholeStreamCommand for Autoview { pub fn autoview( AutoviewArgs {}: AutoviewArgs, - mut context: RunnableContext, + context: RunnableContext, raw: RawCommandArgs, ) -> Result { - Ok(OutputStream::new(async_stream! { - let input = context.input.drain_vec().await; + let binary = context.get_command("binaryview"); + let text = context.get_command("textview"); + let table = context.get_command("table"); - if input.len() > 0 { - if let Tagged { - item: Value::Primitive(Primitive::Binary(_)), - .. - } = input[0usize] - { - let binary = context.get_command("binaryview"); - if let Some(binary) = binary { - let result = binary.run(raw.with_input(input), &context.commands, false); - result.collect::>().await; - } else { - for i in input { - match i.item { - Value::Primitive(Primitive::Binary(b)) => { - use pretty_hex::*; - println!("{:?}", b.hex_dump()); + Ok(OutputStream::new(async_stream! { + let mut output_stream: OutputStream = context.input.into(); + + match output_stream.try_next().await { + Ok(Some(x)) => { + match output_stream.try_next().await { + Ok(Some(y)) => { + let ctrl_c = context.ctrl_c.clone(); + let stream = async_stream! { + yield Ok(x); + yield Ok(y); + + loop { + match output_stream.try_next().await { + Ok(Some(z)) => { + if ctrl_c.load(Ordering::SeqCst) { + break; + } + yield Ok(z); + } + _ => break, + } + } + }; + if let Some(table) = table { + let mut new_output_stream: OutputStream = stream.to_output_stream(); + let mut finished = false; + let mut current_idx = 0; + loop { + let mut new_input = VecDeque::new(); + + for _ in 0..STREAM_PAGE_SIZE { + match new_output_stream.try_next().await { + + Ok(Some(a)) => { + if let ReturnSuccess::Value(v) = a { + new_input.push_back(v); + } + } + _ => { + finished = true; + break; + } + } + } + + let raw = raw.clone(); + + let mut command_args = raw.with_input(new_input.into()); + let mut named_args = NamedArguments::new(); + named_args.insert_optional("start_number", Some(Expression::number(current_idx, Tag::unknown()))); + command_args.call_info.args.named = Some(named_args); + + let result = table.run(command_args, &context.commands); + result.collect::>().await; + + if finished { + break; + } else { + current_idx += STREAM_PAGE_SIZE; + } } - _ => {} } } - }; - } else if is_single_anchored_text_value(&input) { - let text = context.get_command("textview"); - if let Some(text) = text { - let result = text.run(raw.with_input(input), &context.commands, false); - result.collect::>().await; - } else { - for i in input { - match i.item { - Value::Primitive(Primitive::String(s)) => { - println!("{}", s); + _ => { + if let ReturnSuccess::Value(x) = x { + match x { + Tagged { + item: Value::Primitive(Primitive::String(ref s)), + tag: Tag { anchor, span }, + } if anchor.is_some() => { + if let Some(text) = text { + let mut stream = VecDeque::new(); + stream.push_back(Value::string(s).tagged(Tag { anchor, span })); + let result = text.run(raw.with_input(stream.into()), &context.commands); + result.collect::>().await; + } else { + println!("{}", s); + } + } + Tagged { + item: Value::Primitive(Primitive::String(s)), + .. + } => { + println!("{}", s); + } + + Tagged { item: Value::Primitive(Primitive::Binary(ref b)), .. } => { + if let Some(binary) = binary { + let mut stream = VecDeque::new(); + stream.push_back(x.clone()); + let result = binary.run(raw.with_input(stream.into()), &context.commands); + result.collect::>().await; + } else { + use pretty_hex::*; + println!("{:?}", b.hex_dump()); + } + } + + Tagged { item: Value::Error(e), .. } => { + yield Err(e); + } + Tagged { item: ref item, .. } => { + if let Some(table) = table { + let mut stream = VecDeque::new(); + stream.push_back(x.clone()); + let result = table.run(raw.with_input(stream.into()), &context.commands); + result.collect::>().await; + } else { + println!("{:?}", item); + } + } } - _ => {} } } } - } else if is_single_text_value(&input) { - for i in input { - match i.item { - Value::Primitive(Primitive::String(s)) => { - println!("{}", s); - } - _ => {} - } - } - } else { - let table = context.expect_command("table"); - let result = table.run(raw.with_input(input), &context.commands, false); - result.collect::>().await; + } + _ => { + //println!(""); } } @@ -95,34 +171,3 @@ pub fn autoview( } })) } - -fn is_single_text_value(input: &Vec>) -> bool { - if input.len() != 1 { - return false; - } - if let Tagged { - item: Value::Primitive(Primitive::String(_)), - .. - } = input[0] - { - true - } else { - false - } -} - -fn is_single_anchored_text_value(input: &Vec>) -> bool { - if input.len() != 1 { - return false; - } - - if let Tagged { - item: Value::Primitive(Primitive::String(_)), - tag: Tag { anchor, .. }, - } = input[0] - { - anchor != uuid::Uuid::nil() - } else { - false - } -} diff --git a/src/commands/cd.rs b/src/commands/cd.rs index a3f5a8d89b..65cc45231d 100644 --- a/src/commands/cd.rs +++ b/src/commands/cd.rs @@ -10,7 +10,11 @@ impl WholeStreamCommand for CD { } fn signature(&self) -> Signature { - Signature::build("cd").optional("directory", SyntaxShape::Path) + Signature::build("cd").optional( + "directory", + SyntaxShape::Path, + "the directory to change to", + ) } fn usage(&self) -> &str { diff --git a/src/commands/classified.rs b/src/commands/classified.rs index 0e5cd95d8d..0691a68a35 100644 --- a/src/commands/classified.rs +++ b/src/commands/classified.rs @@ -1,12 +1,13 @@ -use crate::commands::Command; use crate::parser::{hir, TokenNode}; use crate::prelude::*; use bytes::{BufMut, BytesMut}; +use derive_new::new; use futures::stream::StreamExt; use futures_codec::{Decoder, Encoder, Framed}; +use itertools::Itertools; use log::{log_enabled, trace}; +use std::fmt; use std::io::{Error, ErrorKind}; -use std::sync::Arc; use subprocess::Exec; /// A simple `Codec` implementation that splits up data into lines. @@ -53,7 +54,7 @@ pub(crate) struct ClassifiedInputStream { impl ClassifiedInputStream { pub(crate) fn new() -> ClassifiedInputStream { ClassifiedInputStream { - objects: VecDeque::new().into(), + objects: vec![Value::nothing().tagged(Tag::unknown())].into(), stdin: None, } } @@ -73,125 +74,212 @@ impl ClassifiedInputStream { } } +#[derive(Debug, Clone)] pub(crate) struct ClassifiedPipeline { - pub(crate) commands: Vec, + pub(crate) commands: Spanned>, } +impl FormatDebug for ClassifiedPipeline { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + f.say_str( + "classified pipeline", + self.commands.iter().map(|c| c.debug(source)).join(" | "), + ) + } +} + +impl HasSpan for ClassifiedPipeline { + fn span(&self) -> Span { + self.commands.span + } +} + +#[derive(Debug, Clone, Eq, PartialEq)] pub(crate) enum ClassifiedCommand { #[allow(unused)] Expr(TokenNode), Internal(InternalCommand), + #[allow(unused)] + Dynamic(Spanned), External(ExternalCommand), } +impl FormatDebug for ClassifiedCommand { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + match self { + ClassifiedCommand::Expr(expr) => expr.fmt_debug(f, source), + ClassifiedCommand::Internal(internal) => internal.fmt_debug(f, source), + ClassifiedCommand::Dynamic(dynamic) => dynamic.fmt_debug(f, source), + ClassifiedCommand::External(external) => external.fmt_debug(f, source), + } + } +} + +impl HasSpan for ClassifiedCommand { + fn span(&self) -> Span { + match self { + ClassifiedCommand::Expr(node) => node.span(), + ClassifiedCommand::Internal(command) => command.span(), + ClassifiedCommand::Dynamic(call) => call.span, + ClassifiedCommand::External(command) => command.span(), + } + } +} + +#[derive(new, Debug, Clone, Eq, PartialEq)] pub(crate) struct InternalCommand { - pub(crate) command: Arc, + pub(crate) name: String, pub(crate) name_tag: Tag, + pub(crate) args: Spanned, +} + +impl HasSpan for InternalCommand { + fn span(&self) -> Span { + let start = self.name_tag.span; + + start.until(self.args.span) + } +} + +impl FormatDebug for InternalCommand { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + f.say("internal", self.args.debug(source)) + } +} + +#[derive(new, Debug, Eq, PartialEq)] +pub(crate) struct DynamicCommand { pub(crate) args: hir::Call, } impl InternalCommand { - pub(crate) async fn run( + pub(crate) fn run( self, context: &mut Context, input: ClassifiedInputStream, source: Text, - is_first_command: bool, ) -> Result { if log_enabled!(log::Level::Trace) { trace!(target: "nu::run::internal", "->"); - trace!(target: "nu::run::internal", "{}", self.command.name()); + trace!(target: "nu::run::internal", "{}", self.name); trace!(target: "nu::run::internal", "{}", self.args.debug(&source)); } let objects: InputStream = trace_stream!(target: "nu::trace_stream::internal", "input" = input.objects); - let result = context.run_command( - self.command, - self.name_tag.clone(), - context.source_map.clone(), - self.args, - &source, - objects, - is_first_command, - ); + let command = context.expect_command(&self.name); + + let result = { + context.run_command( + command, + self.name_tag.clone(), + self.args.item, + &source, + objects, + ) + }; let result = trace_out_stream!(target: "nu::trace_stream::internal", source: &source, "output" = result); let mut result = result.values; + let mut context = context.clone(); - let mut stream = VecDeque::new(); - while let Some(item) = result.next().await { - match item? { - ReturnSuccess::Action(action) => match action { - CommandAction::ChangePath(path) => { - context.shell_manager.set_path(path); - } - CommandAction::AddAnchorLocation(uuid, anchor_location) => { - context.add_anchor_location(uuid, anchor_location); - } - CommandAction::Exit => std::process::exit(0), // TODO: save history.txt - CommandAction::EnterHelpShell(value) => { - match value { - Tagged { - item: Value::Primitive(Primitive::String(cmd)), - tag, - } => { - context.shell_manager.insert_at_current(Box::new( - HelpShell::for_command( - Value::string(cmd).tagged(tag), - &context.registry(), - )?, - )); - } - _ => { - context.shell_manager.insert_at_current(Box::new( - HelpShell::index(&context.registry())?, - )); + let stream = async_stream! { + while let Some(item) = result.next().await { + match item { + Ok(ReturnSuccess::Action(action)) => match action { + CommandAction::ChangePath(path) => { + context.shell_manager.set_path(path); + } + CommandAction::Exit => std::process::exit(0), // TODO: save history.txt + CommandAction::EnterHelpShell(value) => { + match value { + Tagged { + item: Value::Primitive(Primitive::String(cmd)), + tag, + } => { + context.shell_manager.insert_at_current(Box::new( + HelpShell::for_command( + Value::string(cmd).tagged(tag), + &context.registry(), + ).unwrap(), + )); + } + _ => { + context.shell_manager.insert_at_current(Box::new( + HelpShell::index(&context.registry()).unwrap(), + )); + } } } - } - CommandAction::EnterValueShell(value) => { - context - .shell_manager - .insert_at_current(Box::new(ValueShell::new(value))); - } - CommandAction::EnterShell(location) => { - context.shell_manager.insert_at_current(Box::new( - FilesystemShell::with_location(location, context.registry().clone())?, - )); - } - CommandAction::PreviousShell => { - context.shell_manager.prev(); - } - CommandAction::NextShell => { - context.shell_manager.next(); - } - CommandAction::LeaveShell => { - context.shell_manager.remove_at_current(); - if context.shell_manager.is_empty() { - std::process::exit(0); // TODO: save history.txt + CommandAction::EnterValueShell(value) => { + context + .shell_manager + .insert_at_current(Box::new(ValueShell::new(value))); } - } - }, + CommandAction::EnterShell(location) => { + context.shell_manager.insert_at_current(Box::new( + FilesystemShell::with_location(location, context.registry().clone()).unwrap(), + )); + } + CommandAction::PreviousShell => { + context.shell_manager.prev(); + } + CommandAction::NextShell => { + context.shell_manager.next(); + } + CommandAction::LeaveShell => { + context.shell_manager.remove_at_current(); + if context.shell_manager.is_empty() { + std::process::exit(0); // TODO: save history.txt + } + } + }, - ReturnSuccess::Value(v) => { - stream.push_back(v); + Ok(ReturnSuccess::Value(v)) => { + yield Ok(v); + } + + Err(x) => { + yield Ok(Value::Error(x).tagged_unknown()); + break; + } } } - } + }; - Ok(stream.into()) + Ok(stream.to_input_stream()) } } +#[derive(Debug, Clone, Eq, PartialEq)] pub(crate) struct ExternalCommand { pub(crate) name: String, pub(crate) name_tag: Tag, - pub(crate) args: Vec>, + pub(crate) args: Spanned>>, } +impl FormatDebug for ExternalCommand { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + write!(f, "{}", self.name)?; + + if self.args.item.len() > 0 { + write!(f, " ")?; + write!(f, "{}", self.args.iter().map(|i| i.debug(source)).join(" "))?; + } + + Ok(()) + } +} + +impl HasSpan for ExternalCommand { + fn span(&self) -> Span { + self.name_tag.span.until(self.args.span) + } +} + +#[derive(Debug)] pub(crate) enum StreamNext { Last, External, @@ -207,58 +295,57 @@ impl ExternalCommand { ) -> Result { let stdin = input.stdin; let inputs: Vec> = input.objects.into_vec().await; - let name_tag = self.name_tag.clone(); trace!(target: "nu::run::external", "-> {}", self.name); trace!(target: "nu::run::external", "inputs = {:?}", inputs); let mut arg_string = format!("{}", self.name); - for arg in &self.args { + for arg in &self.args.item { arg_string.push_str(&arg); } + trace!(target: "nu::run::external", "command = {:?}", self.name); + let mut process; - - process = Exec::cmd(&self.name); - if arg_string.contains("$it") { - let mut first = true; - - for i in &inputs { - if i.as_string().is_err() { - let mut tag = None; - for arg in &self.args { - if arg.item.contains("$it") { - tag = Some(arg.tag()); + let input_strings = inputs + .iter() + .map(|i| { + i.as_string().map_err(|_| { + let arg = self.args.iter().find(|arg| arg.item.contains("$it")); + if let Some(arg) = arg { + ShellError::labeled_error( + "External $it needs string data", + "given row instead of string data", + arg.tag(), + ) + } else { + ShellError::labeled_error( + "$it needs string data", + "given something else", + self.name_tag.clone(), + ) } - } - if let Some(tag) = tag { - return Err(ShellError::labeled_error( - "External $it needs string data", - "given row instead of string data", - tag, - )); - } else { - return Err(ShellError::string("Error: $it needs string data")); - } - } - if !first { - process = process.arg("&&"); - process = process.arg(&self.name); - } else { - first = false; - } + }) + }) + .collect::, ShellError>>()?; - for arg in &self.args { + let commands = input_strings.iter().map(|i| { + let args = self.args.iter().filter_map(|arg| { if arg.chars().all(|c| c.is_whitespace()) { - continue; + None + } else { + Some(arg.replace("$it", &i)) } + }); - process = process.arg(&arg.replace("$it", &i.as_string()?)); - } - } + format!("{} {}", self.name, itertools::join(args, " ")) + }); + + process = Exec::shell(itertools::join(commands, " && ")) } else { - for arg in &self.args { + process = Exec::cmd(&self.name); + for arg in &self.args.item { let arg_chars: Vec<_> = arg.chars().collect(); if arg_chars.len() > 1 && arg_chars[0] == '"' @@ -275,6 +362,8 @@ impl ExternalCommand { process = process.cwd(context.shell_manager.path()); + trace!(target: "nu::run::external", "cwd = {:?}", context.shell_manager.path()); + let mut process = match stream_next { StreamNext::Last => process, StreamNext::External | StreamNext::Internal => { @@ -282,43 +371,60 @@ impl ExternalCommand { } }; + trace!(target: "nu::run::external", "set up stdout pipe"); + if let Some(stdin) = stdin { process = process.stdin(stdin); } - let mut popen = process.popen()?; + trace!(target: "nu::run::external", "set up stdin pipe"); + trace!(target: "nu::run::external", "built process {:?}", process); - match stream_next { - StreamNext::Last => { - let _ = popen.detach(); - loop { - match popen.poll() { - None => { - let _ = std::thread::sleep(std::time::Duration::new(0, 100000000)); - } - _ => { - let _ = popen.terminate(); - break; + let popen = process.popen(); + + trace!(target: "nu::run::external", "next = {:?}", stream_next); + + let name_tag = self.name_tag.clone(); + if let Ok(mut popen) = popen { + match stream_next { + StreamNext::Last => { + let _ = popen.detach(); + loop { + match popen.poll() { + None => { + let _ = std::thread::sleep(std::time::Duration::new(0, 100000000)); + } + _ => { + let _ = popen.terminate(); + break; + } } } + Ok(ClassifiedInputStream::new()) + } + StreamNext::External => { + let _ = popen.detach(); + let stdout = popen.stdout.take().unwrap(); + Ok(ClassifiedInputStream::from_stdout(stdout)) + } + StreamNext::Internal => { + let _ = popen.detach(); + let stdout = popen.stdout.take().unwrap(); + let file = futures::io::AllowStdIo::new(stdout); + let stream = Framed::new(file, LinesCodec {}); + let stream = + stream.map(move |line| Value::string(line.unwrap()).tagged(&name_tag)); + Ok(ClassifiedInputStream::from_input_stream( + stream.boxed() as BoxStream<'static, Tagged> + )) } - Ok(ClassifiedInputStream::new()) - } - StreamNext::External => { - let _ = popen.detach(); - let stdout = popen.stdout.take().unwrap(); - Ok(ClassifiedInputStream::from_stdout(stdout)) - } - StreamNext::Internal => { - let _ = popen.detach(); - let stdout = popen.stdout.take().unwrap(); - let file = futures::io::AllowStdIo::new(stdout); - let stream = Framed::new(file, LinesCodec {}); - let stream = stream.map(move |line| Value::string(line.unwrap()).tagged(name_tag)); - Ok(ClassifiedInputStream::from_input_stream( - stream.boxed() as BoxStream<'static, Tagged> - )) } + } else { + return Err(ShellError::labeled_error( + "Command not found", + "command not found", + name_tag, + )); } } } diff --git a/src/commands/command.rs b/src/commands/command.rs index 95732abac2..2dc69df9cb 100644 --- a/src/commands/command.rs +++ b/src/commands/command.rs @@ -1,4 +1,3 @@ -use crate::context::{AnchorLocation, SourceMap}; use crate::data::Value; use crate::errors::ShellError; use crate::evaluate::Scope; @@ -11,18 +10,17 @@ use serde::{Deserialize, Serialize}; use std::fmt; use std::ops::Deref; use std::path::PathBuf; -use uuid::Uuid; +use std::sync::atomic::AtomicBool; #[derive(Deserialize, Serialize, Debug, Clone)] pub struct UnevaluatedCallInfo { pub args: hir::Call, pub source: Text, - pub source_map: SourceMap, pub name_tag: Tag, } -impl ToDebug for UnevaluatedCallInfo { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { +impl FormatDebug for UnevaluatedCallInfo { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { self.args.fmt_debug(f, source) } } @@ -37,7 +35,6 @@ impl UnevaluatedCallInfo { Ok(CallInfo { args, - source_map: self.source_map, name_tag: self.name_tag, }) } @@ -46,7 +43,6 @@ impl UnevaluatedCallInfo { #[derive(Deserialize, Serialize, Debug, Clone)] pub struct CallInfo { pub args: registry::EvaluatedArgs, - pub source_map: SourceMap, pub name_tag: Tag, } @@ -62,7 +58,7 @@ impl CallInfo { args: T::deserialize(&mut deserializer)?, context: RunnablePerItemContext { shell_manager: shell_manager.clone(), - name: self.name_tag, + name: self.name_tag.clone(), }, callback, }) @@ -73,6 +69,7 @@ impl CallInfo { #[get = "pub(crate)"] pub struct CommandArgs { pub host: Arc>, + pub ctrl_c: Arc, pub shell_manager: ShellManager, pub call_info: UnevaluatedCallInfo, pub input: InputStream, @@ -82,6 +79,7 @@ pub struct CommandArgs { #[get = "pub(crate)"] pub struct RawCommandArgs { pub host: Arc>, + pub ctrl_c: Arc, pub shell_manager: ShellManager, pub call_info: UnevaluatedCallInfo, } @@ -90,6 +88,7 @@ impl RawCommandArgs { pub fn with_input(self, input: Vec>) -> CommandArgs { CommandArgs { host: self.host, + ctrl_c: self.ctrl_c, shell_manager: self.shell_manager, call_info: self.call_info, input: input.into(), @@ -97,8 +96,14 @@ impl RawCommandArgs { } } -impl ToDebug for CommandArgs { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { +impl std::fmt::Debug for CommandArgs { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.call_info.fmt(f) + } +} + +impl FormatDebug for CommandArgs { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { self.call_info.fmt_debug(f, source) } } @@ -109,12 +114,14 @@ impl CommandArgs { registry: ®istry::CommandRegistry, ) -> Result { let host = self.host.clone(); + let ctrl_c = self.ctrl_c.clone(); let shell_manager = self.shell_manager.clone(); let input = self.input; let call_info = self.call_info.evaluate(registry, &Scope::empty())?; Ok(EvaluatedWholeStreamCommandArgs::new( host, + ctrl_c, shell_manager, call_info, input, @@ -127,12 +134,13 @@ impl CommandArgs { callback: fn(T, RunnableContext) -> Result, ) -> Result, ShellError> { let shell_manager = self.shell_manager.clone(); - let source_map = self.call_info.source_map.clone(); let host = self.host.clone(); + let ctrl_c = self.ctrl_c.clone(); let args = self.evaluate_once(registry)?; + let call_info = args.call_info.clone(); let (input, args) = args.split(); let name_tag = args.call_info.name_tag; - let mut deserializer = ConfigDeserializer::from_call_info(args.call_info); + let mut deserializer = ConfigDeserializer::from_call_info(call_info); Ok(RunnableArgs { args: T::deserialize(&mut deserializer)?, @@ -141,8 +149,8 @@ impl CommandArgs { commands: registry.clone(), shell_manager, name: name_tag, - source_map, host, + ctrl_c, }, callback, }) @@ -155,17 +163,20 @@ impl CommandArgs { ) -> Result, ShellError> { let raw_args = RawCommandArgs { host: self.host.clone(), + ctrl_c: self.ctrl_c.clone(), shell_manager: self.shell_manager.clone(), call_info: self.call_info.clone(), }; let shell_manager = self.shell_manager.clone(); - let source_map = self.call_info.source_map.clone(); let host = self.host.clone(); + let ctrl_c = self.ctrl_c.clone(); let args = self.evaluate_once(registry)?; + let call_info = args.call_info.clone(); + let (input, args) = args.split(); let name_tag = args.call_info.name_tag; - let mut deserializer = ConfigDeserializer::from_call_info(args.call_info); + let mut deserializer = ConfigDeserializer::from_call_info(call_info.clone()); Ok(RunnableRawArgs { args: T::deserialize(&mut deserializer)?, @@ -174,8 +185,8 @@ impl CommandArgs { commands: registry.clone(), shell_manager, name: name_tag, - source_map, host, + ctrl_c, }, raw_args, callback, @@ -198,18 +209,12 @@ pub struct RunnableContext { pub input: InputStream, pub shell_manager: ShellManager, pub host: Arc>, + pub ctrl_c: Arc, pub commands: CommandRegistry, - pub source_map: SourceMap, pub name: Tag, } impl RunnableContext { - pub fn expect_command(&self, name: &str) -> Arc { - self.commands - .get_command(name) - .expect(&format!("Expected command {}", name)) - } - pub fn get_command(&self, name: &str) -> Option> { self.commands.get_command(name) } @@ -270,6 +275,7 @@ impl Deref for EvaluatedWholeStreamCommandArgs { impl EvaluatedWholeStreamCommandArgs { pub fn new( host: Arc>, + ctrl_c: Arc, shell_manager: ShellManager, call_info: CallInfo, input: impl Into, @@ -277,6 +283,7 @@ impl EvaluatedWholeStreamCommandArgs { EvaluatedWholeStreamCommandArgs { args: EvaluatedCommandArgs { host, + ctrl_c, shell_manager, call_info, }, @@ -285,7 +292,7 @@ impl EvaluatedWholeStreamCommandArgs { } pub fn name_tag(&self) -> Tag { - self.args.call_info.name_tag + self.args.call_info.name_tag.clone() } pub fn parts(self) -> (InputStream, registry::EvaluatedArgs) { @@ -317,12 +324,14 @@ impl Deref for EvaluatedFilterCommandArgs { impl EvaluatedFilterCommandArgs { pub fn new( host: Arc>, + ctrl_c: Arc, shell_manager: ShellManager, call_info: CallInfo, ) -> EvaluatedFilterCommandArgs { EvaluatedFilterCommandArgs { args: EvaluatedCommandArgs { host, + ctrl_c, shell_manager, call_info, }, @@ -334,6 +343,7 @@ impl EvaluatedFilterCommandArgs { #[get = "pub(crate)"] pub struct EvaluatedCommandArgs { pub host: Arc>, + pub ctrl_c: Arc, pub shell_manager: ShellManager, pub call_info: CallInfo, } @@ -373,10 +383,9 @@ impl EvaluatedCommandArgs { } } -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub enum CommandAction { ChangePath(String), - AddAnchorLocation(Uuid, AnchorLocation), Exit, EnterShell(String), EnterValueShell(Tagged), @@ -386,13 +395,10 @@ pub enum CommandAction { LeaveShell, } -impl ToDebug for CommandAction { - fn fmt_debug(&self, f: &mut fmt::Formatter, _source: &str) -> fmt::Result { +impl FormatDebug for CommandAction { + fn fmt_debug(&self, f: &mut DebugFormatter, _source: &str) -> fmt::Result { match self { CommandAction::ChangePath(s) => write!(f, "action:change-path={}", s), - CommandAction::AddAnchorLocation(u, source) => { - write!(f, "action:add-span-source={}@{:?}", u, source) - } CommandAction::Exit => write!(f, "action:exit"), CommandAction::EnterShell(s) => write!(f, "action:enter-shell={}", s), CommandAction::EnterValueShell(t) => { @@ -408,7 +414,7 @@ impl ToDebug for CommandAction { } } -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub enum ReturnSuccess { Value(Tagged), Action(CommandAction), @@ -416,8 +422,8 @@ pub enum ReturnSuccess { pub type ReturnValue = Result; -impl ToDebug for ReturnValue { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { +impl FormatDebug for ReturnValue { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { match self { Err(err) => write!(f, "{}", err.debug(source)), Ok(ReturnSuccess::Value(v)) => write!(f, "{:?}", v.debug()), @@ -507,6 +513,15 @@ pub enum Command { PerItem(Arc), } +impl std::fmt::Debug for Command { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Command::WholeStream(command) => write!(f, "WholeStream({})", command.name()), + Command::PerItem(command) => write!(f, "PerItem({})", command.name()), + } + } +} + impl Command { pub fn name(&self) -> &str { match self { @@ -529,20 +544,13 @@ impl Command { } } - pub fn run( - &self, - args: CommandArgs, - registry: ®istry::CommandRegistry, - is_first_command: bool, - ) -> OutputStream { + pub fn run(&self, args: CommandArgs, registry: ®istry::CommandRegistry) -> OutputStream { match self { Command::WholeStream(command) => match command.run(args, registry) { Ok(stream) => stream, Err(err) => OutputStream::one(Err(err)), }, - Command::PerItem(command) => { - self.run_helper(command.clone(), args, registry.clone(), is_first_command) - } + Command::PerItem(command) => self.run_helper(command.clone(), args, registry.clone()), } } @@ -551,48 +559,31 @@ impl Command { command: Arc, args: CommandArgs, registry: CommandRegistry, - is_first_command: bool, ) -> OutputStream { let raw_args = RawCommandArgs { host: args.host, + ctrl_c: args.ctrl_c, shell_manager: args.shell_manager, call_info: args.call_info, }; - if !is_first_command { - let out = args - .input - .values - .map(move |x| { - let call_info = raw_args - .clone() - .call_info - .evaluate(®istry, &Scope::it_value(x.clone())) - .unwrap(); - match command.run(&call_info, ®istry, &raw_args, x) { - Ok(o) => o, - Err(e) => VecDeque::from(vec![ReturnValue::Err(e)]).to_output_stream(), - } - }) - .flatten(); + let out = args + .input + .values + .map(move |x| { + let call_info = raw_args + .clone() + .call_info + .evaluate(®istry, &Scope::it_value(x.clone())) + .unwrap(); + match command.run(&call_info, ®istry, &raw_args, x) { + Ok(o) => o, + Err(e) => VecDeque::from(vec![ReturnValue::Err(e)]).to_output_stream(), + } + }) + .flatten(); - out.to_output_stream() - } else { - let nothing = Value::nothing().tagged(Tag::unknown()); - let call_info = raw_args - .clone() - .call_info - .evaluate(®istry, &Scope::it_value(nothing.clone())) - .unwrap(); - - match command - .run(&call_info, ®istry, &raw_args, nothing) - .into() - { - Ok(o) => o, - Err(e) => OutputStream::one(Err(e)), - } - } + out.to_output_stream() } pub fn is_binary(&self) -> bool { @@ -624,6 +615,7 @@ impl WholeStreamCommand for FnFilterCommand { ) -> Result { let CommandArgs { host, + ctrl_c, shell_manager, call_info, input, @@ -641,8 +633,12 @@ impl WholeStreamCommand for FnFilterCommand { Ok(args) => args, }; - let args = - EvaluatedFilterCommandArgs::new(host.clone(), shell_manager.clone(), call_info); + let args = EvaluatedFilterCommandArgs::new( + host.clone(), + ctrl_c.clone(), + shell_manager.clone(), + call_info, + ); match func(args) { Err(err) => return OutputStream::from(vec![Err(err)]).values, diff --git a/src/commands/config.rs b/src/commands/config.rs index 3b36c88fad..3c41cceae4 100644 --- a/src/commands/config.rs +++ b/src/commands/config.rs @@ -4,7 +4,6 @@ use crate::errors::ShellError; use crate::parser::hir::SyntaxShape; use crate::parser::registry::{self}; use crate::prelude::*; -use std::iter::FromIterator; use std::path::PathBuf; pub struct Config; @@ -13,6 +12,7 @@ pub struct Config; pub struct ConfigArgs { load: Option>, set: Option<(Tagged, Tagged)>, + set_into: Option>, get: Option>, clear: Tagged, remove: Option>, @@ -26,12 +26,25 @@ impl WholeStreamCommand for Config { fn signature(&self) -> Signature { Signature::build("config") - .named("load", SyntaxShape::Path) - .named("set", SyntaxShape::Any) - .named("get", SyntaxShape::Any) - .named("remove", SyntaxShape::Any) - .switch("clear") - .switch("path") + .named( + "load", + SyntaxShape::Path, + "load the config from the path give", + ) + .named( + "set", + SyntaxShape::Any, + "set a value in the config, eg) --set [key value]", + ) + .named( + "set_into", + SyntaxShape::Member, + "sets a variable from values in the pipeline", + ) + .named("get", SyntaxShape::Any, "get a value from the config") + .named("remove", SyntaxShape::Any, "remove a value from the config") + .switch("clear", "clear the config") + .switch("path", "return the path to the config file") } fn usage(&self) -> &str { @@ -51,84 +64,110 @@ pub fn config( ConfigArgs { load, set, + set_into, get, clear, remove, path, }: ConfigArgs, - RunnableContext { name, .. }: RunnableContext, + RunnableContext { name, input, .. }: RunnableContext, ) -> Result { - let name_span = name; + let name_span = name.clone(); - let configuration = if let Some(supplied) = load { - Some(supplied.item().clone()) - } else { - None + let stream = async_stream! { + let configuration = if let Some(supplied) = load { + Some(supplied.item().clone()) + } else { + None + }; + + let mut result = crate::data::config::read(name_span, &configuration)?; + + if let Some(v) = get { + let key = v.to_string(); + let value = result + .get(&key) + .ok_or_else(|| ShellError::labeled_error("Missing key in config", "key", v.tag()))?; + + match value { + Tagged { + item: Value::Table(list), + .. + } => { + for l in list { + yield ReturnSuccess::value(l.clone()); + } + } + x => yield ReturnSuccess::value(x.clone()), + } + } + else if let Some((key, value)) = set { + result.insert(key.to_string(), value.clone()); + + config::write(&result, &configuration)?; + + yield ReturnSuccess::value(Value::Row(result.into()).tagged(value.tag())); + } + else if let Some(v) = set_into { + let rows: Vec> = input.values.collect().await; + let key = v.to_string(); + + if rows.len() == 0 { + yield Err(ShellError::labeled_error("No values given for set_into", "needs value(s) from pipeline", v.tag())); + } else if rows.len() == 1 { + // A single value + let value = &rows[0]; + + result.insert(key.to_string(), value.clone()); + + config::write(&result, &configuration)?; + + yield ReturnSuccess::value(Value::Row(result.into()).tagged(name)); + } else { + // Take in the pipeline as a table + let value = Value::Table(rows).tagged(name.clone()); + + result.insert(key.to_string(), value.clone()); + + config::write(&result, &configuration)?; + + yield ReturnSuccess::value(Value::Row(result.into()).tagged(name)); + } + } + else if let Tagged { item: true, tag } = clear { + result.clear(); + + config::write(&result, &configuration)?; + + yield ReturnSuccess::value(Value::Row(result.into()).tagged(tag)); + + return; + } + else if let Tagged { item: true, tag } = path { + let path = config::default_path_for(&configuration)?; + + yield ReturnSuccess::value(Value::Primitive(Primitive::Path(path)).tagged(tag)); + } + else if let Some(v) = remove { + let key = v.to_string(); + + if result.contains_key(&key) { + result.swap_remove(&key); + config::write(&result, &configuration).unwrap(); + } else { + yield Err(ShellError::labeled_error( + "Key does not exist in config", + "key", + v.tag(), + )); + } + + yield ReturnSuccess::value(Value::Row(result.into()).tagged(v.tag())); + } + else { + yield ReturnSuccess::value(Value::Row(result.into()).tagged(name)); + } }; - let mut result = crate::data::config::read(name_span, &configuration)?; - - if let Some(v) = get { - let key = v.to_string(); - let value = result - .get(&key) - .ok_or_else(|| ShellError::string(&format!("Missing key {} in config", key)))?; - - let mut results = VecDeque::new(); - - match value { - Tagged { - item: Value::Table(list), - .. - } => { - for l in list { - results.push_back(ReturnSuccess::value(l.clone())); - } - } - x => results.push_back(ReturnSuccess::value(x.clone())), - } - - return Ok(results.to_output_stream()); - } - - if let Some((key, value)) = set { - result.insert(key.to_string(), value.clone()); - - config::write(&result, &configuration)?; - - return Ok(stream![Value::Row(result.into()).tagged(value.tag())].from_input_stream()); - } - - if let Tagged { item: true, tag } = clear { - result.clear(); - - config::write(&result, &configuration)?; - - return Ok(stream![Value::Row(result.into()).tagged(tag)].from_input_stream()); - } - - if let Tagged { item: true, tag } = path { - let path = config::default_path_for(&configuration)?; - - return Ok(stream![Value::Primitive(Primitive::Path(path)).tagged(tag)].from_input_stream()); - } - - if let Some(v) = remove { - let key = v.to_string(); - - if result.contains_key(&key) { - result.swap_remove(&key); - config::write(&result, &configuration)?; - } else { - return Err(ShellError::string(&format!( - "{} does not exist in config", - key - ))); - } - - let obj = VecDeque::from_iter(vec![Value::Row(result.into()).tagged(v.tag())]); - return Ok(obj.from_input_stream()); - } - - return Ok(vec![Value::Row(result.into()).tagged(name)].into()); + Ok(stream.to_output_stream()) } diff --git a/src/commands/count.rs b/src/commands/count.rs new file mode 100644 index 0000000000..5e44283737 --- /dev/null +++ b/src/commands/count.rs @@ -0,0 +1,46 @@ +use crate::commands::WholeStreamCommand; +use crate::data::Value; +use crate::errors::ShellError; +use crate::parser::CommandRegistry; +use crate::prelude::*; +use futures::stream::StreamExt; + +pub struct Count; + +#[derive(Deserialize)] +pub struct CountArgs {} + +impl WholeStreamCommand for Count { + fn name(&self) -> &str { + "count" + } + + fn signature(&self) -> Signature { + Signature::build("count") + } + + fn usage(&self) -> &str { + "Show the total number of rows." + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, count)?.run() + } +} + +pub fn count( + CountArgs {}: CountArgs, + RunnableContext { input, name, .. }: RunnableContext, +) -> Result { + let stream = async_stream! { + let rows: Vec> = input.values.collect().await; + + yield ReturnSuccess::value(Value::int(rows.len()).tagged(name)) + }; + + Ok(stream.to_output_stream()) +} diff --git a/src/commands/cp.rs b/src/commands/cp.rs index bf20c74ce9..5ca21adb1e 100644 --- a/src/commands/cp.rs +++ b/src/commands/cp.rs @@ -21,10 +21,9 @@ impl PerItemCommand for Cpy { fn signature(&self) -> Signature { Signature::build("cp") - .required("src", SyntaxShape::Pattern) - .required("dst", SyntaxShape::Path) - .named("file", SyntaxShape::Any) - .switch("recursive") + .required("src", SyntaxShape::Pattern, "the place to copy from") + .required("dst", SyntaxShape::Path, "the place to copy to") + .switch("recursive", "copy recursively through subdirectories") } fn usage(&self) -> &str { diff --git a/src/commands/date.rs b/src/commands/date.rs index 6df9e27209..24ebc876e4 100644 --- a/src/commands/date.rs +++ b/src/commands/date.rs @@ -17,7 +17,9 @@ impl WholeStreamCommand for Date { } fn signature(&self) -> Signature { - Signature::build("date").switch("utc").switch("local") + Signature::build("date") + .switch("utc", "use universal time (UTC)") + .switch("local", "use the local time") } fn usage(&self) -> &str { @@ -39,27 +41,27 @@ where { let mut indexmap = IndexMap::new(); - indexmap.insert("year".to_string(), Value::int(dt.year()).tagged(tag)); - indexmap.insert("month".to_string(), Value::int(dt.month()).tagged(tag)); - indexmap.insert("day".to_string(), Value::int(dt.day()).tagged(tag)); - indexmap.insert("hour".to_string(), Value::int(dt.hour()).tagged(tag)); - indexmap.insert("minute".to_string(), Value::int(dt.minute()).tagged(tag)); - indexmap.insert("second".to_string(), Value::int(dt.second()).tagged(tag)); + indexmap.insert("year".to_string(), Value::int(dt.year()).tagged(&tag)); + indexmap.insert("month".to_string(), Value::int(dt.month()).tagged(&tag)); + indexmap.insert("day".to_string(), Value::int(dt.day()).tagged(&tag)); + indexmap.insert("hour".to_string(), Value::int(dt.hour()).tagged(&tag)); + indexmap.insert("minute".to_string(), Value::int(dt.minute()).tagged(&tag)); + indexmap.insert("second".to_string(), Value::int(dt.second()).tagged(&tag)); let tz = dt.offset(); indexmap.insert( "timezone".to_string(), - Value::string(format!("{}", tz)).tagged(tag), + Value::string(format!("{}", tz)).tagged(&tag), ); - Value::Row(Dictionary::from(indexmap)).tagged(tag) + Value::Row(Dictionary::from(indexmap)).tagged(&tag) } pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result { let args = args.evaluate_once(registry)?; let mut date_out = VecDeque::new(); - let tag = args.call_info.name_tag; + let tag = args.call_info.name_tag.clone(); let value = if args.has("utc") { let utc: DateTime = Utc::now(); diff --git a/src/commands/echo.rs b/src/commands/echo.rs index 21188f54f2..6e59d51f44 100644 --- a/src/commands/echo.rs +++ b/src/commands/echo.rs @@ -12,7 +12,7 @@ impl PerItemCommand for Echo { } fn signature(&self) -> Signature { - Signature::build("echo").rest(SyntaxShape::Any) + Signature::build("echo").rest(SyntaxShape::Any, "the values to echo") } fn usage(&self) -> &str { @@ -35,38 +35,34 @@ fn run( _registry: &CommandRegistry, _raw_args: &RawCommandArgs, ) -> Result { - let name = call_info.name_tag; - - let mut output = String::new(); - - let mut first = true; + let mut output = vec![]; if let Some(ref positional) = call_info.args.positional { for i in positional { match i.as_string() { Ok(s) => { - if !first { - output.push_str(" "); - } else { - first = false; + output.push(Ok(ReturnSuccess::Value( + Value::string(s).tagged(i.tag.clone()), + ))); + } + _ => match i { + Tagged { + item: Value::Table(table), + .. + } => { + for item in table { + output.push(Ok(ReturnSuccess::Value(item.clone()))); + } } - - output.push_str(&s); - } - _ => { - return Err(ShellError::labeled_error( - "Expect a string from pipeline", - "not a string-compatible value", - i.tag(), - )); - } + _ => { + output.push(Ok(ReturnSuccess::Value(i.clone()))); + } + }, } } } - let stream = VecDeque::from(vec![Ok(ReturnSuccess::Value( - Value::string(output).tagged(name), - ))]); + let stream = VecDeque::from(output); Ok(stream.to_output_stream()) } diff --git a/src/commands/enter.rs b/src/commands/enter.rs index 2d96fe865c..2153a0a084 100644 --- a/src/commands/enter.rs +++ b/src/commands/enter.rs @@ -1,7 +1,6 @@ use crate::commands::command::CommandAction; use crate::commands::PerItemCommand; use crate::commands::UnevaluatedCallInfo; -use crate::data::meta::Span; use crate::errors::ShellError; use crate::parser::registry; use crate::prelude::*; @@ -15,7 +14,11 @@ impl PerItemCommand for Enter { } fn signature(&self) -> registry::Signature { - Signature::build("enter").required("location", SyntaxShape::Block) + Signature::build("enter").required( + "location", + SyntaxShape::Path, + "the location to create a new shell from", + ) } fn usage(&self) -> &str { @@ -33,14 +36,16 @@ impl PerItemCommand for Enter { let raw_args = raw_args.clone(); match call_info.args.expect_nth(0)? { Tagged { - item: Value::Primitive(Primitive::String(location)), + item: Value::Primitive(Primitive::Path(location)), + tag, .. } => { - let location = location.to_string(); - let location_clone = location.to_string(); + let location_string = location.display().to_string(); + let location_clone = location_string.clone(); + let tag_clone = tag.clone(); if location.starts_with("help") { - let spec = location.split(":").collect::>(); + let spec = location_string.split(":").collect::>(); let (_, command) = (spec[0], spec[1]); @@ -67,26 +72,16 @@ impl PerItemCommand for Enter { let full_path = std::path::PathBuf::from(cwd); - let (file_extension, contents, contents_tag, anchor_location) = + let (file_extension, contents, contents_tag) = crate::commands::open::fetch( &full_path, &location_clone, - Span::unknown(), - ) - .await.unwrap(); - - if contents_tag.anchor != uuid::Uuid::nil() { - // If we have loaded something, track its source - yield ReturnSuccess::action(CommandAction::AddAnchorLocation( - contents_tag.anchor, - anchor_location, - )); - } - + tag_clone.span, + ).await?; match contents { Value::Primitive(Primitive::String(_)) => { - let tagged_contents = contents.tagged(contents_tag); + let tagged_contents = contents.tagged(&contents_tag); if let Some(extension) = file_extension { let command_name = format!("from-{}", extension); @@ -95,6 +90,7 @@ impl PerItemCommand for Enter { { let new_args = RawCommandArgs { host: raw_args.host, + ctrl_c: raw_args.ctrl_c, shell_manager: raw_args.shell_manager, call_info: UnevaluatedCallInfo { args: crate::parser::hir::Call { @@ -103,14 +99,12 @@ impl PerItemCommand for Enter { named: None, }, source: raw_args.call_info.source, - source_map: raw_args.call_info.source_map, name_tag: raw_args.call_info.name_tag, }, }; let mut result = converter.run( new_args.with_input(vec![tagged_contents]), ®istry, - false ); let result_vec: Vec> = result.drain_vec().await; @@ -123,7 +117,7 @@ impl PerItemCommand for Enter { yield Ok(ReturnSuccess::Action(CommandAction::EnterValueShell( Tagged { item, - tag: contents_tag, + tag: contents_tag.clone(), }))); } x => yield x, diff --git a/src/commands/env.rs b/src/commands/env.rs index c0af785557..0572b499c1 100644 --- a/src/commands/env.rs +++ b/src/commands/env.rs @@ -37,22 +37,22 @@ pub fn get_environment(tag: Tag) -> Result, Box Result, Box Result { let args = args.evaluate_once(registry)?; let mut env_out = VecDeque::new(); - let tag = args.call_info.name_tag; + let tag = args.call_info.name_tag.clone(); let value = get_environment(tag)?; env_out.push_back(value); diff --git a/src/commands/evaluate_by.rs b/src/commands/evaluate_by.rs new file mode 100644 index 0000000000..f4925917c4 --- /dev/null +++ b/src/commands/evaluate_by.rs @@ -0,0 +1,260 @@ +use crate::commands::WholeStreamCommand; +use crate::parser::hir::SyntaxShape; +use crate::prelude::*; +pub struct EvaluateBy; + +#[derive(Deserialize)] +pub struct EvaluateByArgs { + evaluate_with: Option>, +} + +impl WholeStreamCommand for EvaluateBy { + fn name(&self) -> &str { + "evaluate-by" + } + + fn signature(&self) -> Signature { + Signature::build("evaluate-by").named( + "evaluate_with", + SyntaxShape::String, + "the name of the column to evaluate by", + ) + } + + fn usage(&self) -> &str { + "Creates a new table with the data from the tables rows evaluated by the column given." + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, evaluate_by)?.run() + } +} + +pub fn evaluate_by( + EvaluateByArgs { evaluate_with }: EvaluateByArgs, + RunnableContext { input, name, .. }: RunnableContext, +) -> Result { + let stream = async_stream! { + let values: Vec> = input.values.collect().await; + + + if values.is_empty() { + yield Err(ShellError::labeled_error( + "Expected table from pipeline", + "requires a table input", + name + )) + } else { + + let evaluate_with = if let Some(evaluator) = evaluate_with { + Some(evaluator.item().clone()) + } else { + None + }; + + match evaluate(&values[0], evaluate_with, name) { + Ok(evaluated) => yield ReturnSuccess::value(evaluated), + Err(err) => yield Err(err) + } + } + }; + + Ok(stream.to_output_stream()) +} + +fn fetch( + key: Option, +) -> Box, Tag) -> Option> + 'static> { + Box::new(move |value: Tagged, tag| match key { + Some(ref key_given) => { + if let Some(Tagged { item, .. }) = value.get_data_by_key(&key_given) { + Some(item.clone().tagged(tag)) + } else { + None + } + } + None => Some(Value::int(1).tagged(tag)), + }) +} + +pub fn evaluate( + values: &Tagged, + evaluator: Option, + tag: impl Into, +) -> Result, ShellError> { + let tag = tag.into(); + + let evaluate_with = match evaluator { + Some(keyfn) => fetch(Some(keyfn)), + None => fetch(None), + }; + + let results: Tagged = match values { + Tagged { + item: Value::Table(datasets), + .. + } => { + let datasets: Vec<_> = datasets + .into_iter() + .map(|subsets| match subsets { + Tagged { + item: Value::Table(subsets), + .. + } => { + let subsets: Vec<_> = subsets + .clone() + .into_iter() + .map(|data| match data { + Tagged { + item: Value::Table(data), + .. + } => { + let data: Vec<_> = data + .into_iter() + .map(|x| evaluate_with(x.clone(), tag.clone()).unwrap()) + .collect(); + Value::Table(data).tagged(&tag) + } + _ => Value::Table(vec![]).tagged(&tag), + }) + .collect(); + Value::Table(subsets).tagged(&tag) + } + _ => Value::Table(vec![]).tagged(&tag), + }) + .collect(); + + Value::Table(datasets.clone()).tagged(&tag) + } + _ => Value::Table(vec![]).tagged(&tag), + }; + + Ok(results) +} + +#[cfg(test)] +mod tests { + + use crate::commands::evaluate_by::{evaluate, fetch}; + use crate::commands::group_by::group; + use crate::commands::t_sort_by::t_sort; + use crate::data::meta::*; + use crate::prelude::*; + use crate::Value; + use indexmap::IndexMap; + + fn int(s: impl Into) -> Tagged { + Value::int(s).tagged_unknown() + } + + fn string(input: impl Into) -> Tagged { + Value::string(input.into()).tagged_unknown() + } + + fn row(entries: IndexMap>) -> Tagged { + Value::row(entries).tagged_unknown() + } + + fn table(list: &Vec>) -> Tagged { + Value::table(list).tagged_unknown() + } + + fn nu_releases_sorted_by_date() -> Tagged { + let key = String::from("date"); + + t_sort( + Some(key), + None, + &nu_releases_grouped_by_date(), + Tag::unknown(), + ) + .unwrap() + } + + fn nu_releases_grouped_by_date() -> Tagged { + let key = String::from("date").tagged_unknown(); + group(&key, nu_releases_commiters(), Tag::unknown()).unwrap() + } + + fn nu_releases_commiters() -> Vec> { + vec![ + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("September 24-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("September 24-2019")}, + ), + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("September 24-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}, + ), + ] + } + + #[test] + fn evaluator_fetches_by_column_if_supplied_a_column_name() { + let subject = row(indexmap! { "name".into() => string("andres") }); + + let evaluator = fetch(Some(String::from("name"))); + + assert_eq!(evaluator(subject, Tag::unknown()), Some(string("andres"))); + } + + #[test] + fn evaluator_returns_1_if_no_column_name_given() { + let subject = row(indexmap! { "name".into() => string("andres") }); + let evaluator = fetch(None); + + assert_eq!( + evaluator(subject, Tag::unknown()), + Some(Value::int(1).tagged_unknown()) + ); + } + + #[test] + fn evaluates_the_tables() { + assert_eq!( + evaluate(&nu_releases_sorted_by_date(), None, Tag::unknown()).unwrap(), + table(&vec![table(&vec![ + table(&vec![int(1), int(1), int(1)]), + table(&vec![int(1), int(1), int(1)]), + table(&vec![int(1), int(1), int(1)]), + ]),]) + ); + } + + #[test] + fn evaluates_the_tables_with_custom_evaluator() { + let eval = String::from("name"); + + assert_eq!( + evaluate(&nu_releases_sorted_by_date(), Some(eval), Tag::unknown()).unwrap(), + table(&vec![table(&vec![ + table(&vec![string("AR"), string("JT"), string("YK")]), + table(&vec![string("AR"), string("YK"), string("JT")]), + table(&vec![string("YK"), string("JT"), string("AR")]), + ]),]) + ); + } +} diff --git a/src/commands/exit.rs b/src/commands/exit.rs index 8a382d8b7d..b7db7cc340 100644 --- a/src/commands/exit.rs +++ b/src/commands/exit.rs @@ -11,7 +11,7 @@ impl WholeStreamCommand for Exit { } fn signature(&self) -> Signature { - Signature::build("exit").switch("now") + Signature::build("exit").switch("now", "exit out of the shell immediately") } fn usage(&self) -> &str { diff --git a/src/commands/fetch.rs b/src/commands/fetch.rs index 652ec77eb5..8fe3ca247a 100644 --- a/src/commands/fetch.rs +++ b/src/commands/fetch.rs @@ -10,7 +10,6 @@ use mime::Mime; use std::path::PathBuf; use std::str::FromStr; use surf::mime; -use uuid::Uuid; pub struct Fetch; impl PerItemCommand for Fetch { @@ -20,8 +19,12 @@ impl PerItemCommand for Fetch { fn signature(&self) -> Signature { Signature::build(self.name()) - .required("path", SyntaxShape::Path) - .switch("raw") + .required( + "path", + SyntaxShape::Path, + "the URL to fetch the contents from", + ) + .switch("raw", "fetch contents as text rather than a table") } fn usage(&self) -> &str { @@ -44,16 +47,18 @@ fn run( registry: &CommandRegistry, raw_args: &RawCommandArgs, ) -> Result { - let path = match call_info - .args - .nth(0) - .ok_or_else(|| ShellError::string(&format!("No file or directory specified")))? - { + let path = match call_info.args.nth(0).ok_or_else(|| { + ShellError::labeled_error( + "No file or directory specified", + "for command", + &call_info.name_tag, + ) + })? { file => file, }; let path_buf = path.as_path()?; let path_str = path_buf.display().to_string(); - let path_span = path.span(); + let path_span = path.tag.span; let has_raw = call_info.args.has("raw"); let registry = registry.clone(); let raw_args = raw_args.clone(); @@ -66,7 +71,7 @@ fn run( yield Err(e); return; } - let (file_extension, contents, contents_tag, anchor_location) = result.unwrap(); + let (file_extension, contents, contents_tag) = result.unwrap(); let file_extension = if has_raw { None @@ -76,21 +81,14 @@ fn run( file_extension.or(path_str.split('.').last().map(String::from)) }; - if contents_tag.anchor != uuid::Uuid::nil() { - // If we have loaded something, track its source - yield ReturnSuccess::action(CommandAction::AddAnchorLocation( - contents_tag.anchor, - anchor_location, - )); - } - - let tagged_contents = contents.tagged(contents_tag); + let tagged_contents = contents.tagged(&contents_tag); if let Some(extension) = file_extension { let command_name = format!("from-{}", extension); if let Some(converter) = registry.get_command(&command_name) { let new_args = RawCommandArgs { host: raw_args.host, + ctrl_c: raw_args.ctrl_c, shell_manager: raw_args.shell_manager, call_info: UnevaluatedCallInfo { args: crate::parser::hir::Call { @@ -99,11 +97,10 @@ fn run( named: None }, source: raw_args.call_info.source, - source_map: raw_args.call_info.source_map, name_tag: raw_args.call_info.name_tag, } }; - let mut result = converter.run(new_args.with_input(vec![tagged_contents]), ®istry, false); + let mut result = converter.run(new_args.with_input(vec![tagged_contents]), ®istry); let result_vec: Vec> = result.drain_vec().await; for res in result_vec { match res { @@ -113,7 +110,7 @@ fn run( } } Ok(ReturnSuccess::Value(Tagged { item, .. })) => { - yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag })); + yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() })); } x => yield x, } @@ -129,10 +126,7 @@ fn run( Ok(stream.to_output_stream()) } -pub async fn fetch( - location: &str, - span: Span, -) -> Result<(Option, Value, Tag, AnchorLocation), ShellError> { +pub async fn fetch(location: &str, span: Span) -> Result<(Option, Value, Tag), ShellError> { if let Err(_) = url::Url::parse(location) { return Err(ShellError::labeled_error( "Incomplete or incorrect url", @@ -158,9 +152,8 @@ pub async fn fetch( })?), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )), (mime::APPLICATION, mime::JSON) => Ok(( Some("json".to_string()), @@ -173,9 +166,8 @@ pub async fn fetch( })?), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )), (mime::APPLICATION, mime::OCTET_STREAM) => { let buf: Vec = r.body_bytes().await.map_err(|_| { @@ -190,9 +182,8 @@ pub async fn fetch( Value::binary(buf), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )) } (mime::IMAGE, mime::SVG) => Ok(( @@ -206,9 +197,8 @@ pub async fn fetch( })?), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )), (mime::IMAGE, image_ty) => { let buf: Vec = r.body_bytes().await.map_err(|_| { @@ -223,9 +213,8 @@ pub async fn fetch( Value::binary(buf), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )) } (mime::TEXT, mime::HTML) => Ok(( @@ -239,9 +228,8 @@ pub async fn fetch( })?), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )), (mime::TEXT, mime::PLAIN) => { let path_extension = url::Url::parse(location) @@ -266,9 +254,8 @@ pub async fn fetch( })?), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )) } (ty, sub_ty) => Ok(( @@ -276,9 +263,8 @@ pub async fn fetch( Value::string(format!("Not yet supported MIME type: {} {}", ty, sub_ty)), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )), } } @@ -287,9 +273,8 @@ pub async fn fetch( Value::string(format!("No content type found")), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )), }, Err(_) => { diff --git a/src/commands/first.rs b/src/commands/first.rs index e39b5155d0..a9a287978a 100644 --- a/src/commands/first.rs +++ b/src/commands/first.rs @@ -7,7 +7,7 @@ pub struct First; #[derive(Deserialize)] pub struct FirstArgs { - amount: Tagged, + rows: Option>, } impl WholeStreamCommand for First { @@ -16,7 +16,11 @@ impl WholeStreamCommand for First { } fn signature(&self) -> Signature { - Signature::build("first").required("amount", SyntaxShape::Literal) + Signature::build("first").optional( + "rows", + SyntaxShape::Int, + "starting from the front, the number of rows to return", + ) } fn usage(&self) -> &str { @@ -33,8 +37,16 @@ impl WholeStreamCommand for First { } fn first( - FirstArgs { amount }: FirstArgs, + FirstArgs { rows }: FirstArgs, context: RunnableContext, ) -> Result { - Ok(OutputStream::from_input(context.input.values.take(*amount))) + let rows_desired = if let Some(quantity) = rows { + *quantity + } else { + 1 + }; + + Ok(OutputStream::from_input( + context.input.values.take(rows_desired), + )) } diff --git a/src/commands/from_bson.rs b/src/commands/from_bson.rs index 7dd00983fc..469e15f35e 100644 --- a/src/commands/from_bson.rs +++ b/src/commands/from_bson.rs @@ -33,7 +33,7 @@ fn bson_array(input: &Vec, tag: Tag) -> Result>, ShellEr let mut out = vec![]; for value in input { - out.push(convert_bson_value_to_nu_value(value, tag)?); + out.push(convert_bson_value_to_nu_value(value, &tag)?); } Ok(out) @@ -46,100 +46,100 @@ fn convert_bson_value_to_nu_value( let tag = tag.into(); Ok(match v { - Bson::FloatingPoint(n) => Value::Primitive(Primitive::from(*n)).tagged(tag), - Bson::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(tag), - Bson::Array(a) => Value::Table(bson_array(a, tag)?).tagged(tag), + Bson::FloatingPoint(n) => Value::Primitive(Primitive::from(*n)).tagged(&tag), + Bson::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(&tag), + Bson::Array(a) => Value::Table(bson_array(a, tag.clone())?).tagged(&tag), Bson::Document(doc) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); for (k, v) in doc.iter() { - collected.insert_tagged(k.clone(), convert_bson_value_to_nu_value(v, tag)?); + collected.insert_tagged(k.clone(), convert_bson_value_to_nu_value(v, &tag)?); } collected.into_tagged_value() } - Bson::Boolean(b) => Value::Primitive(Primitive::Boolean(*b)).tagged(tag), - Bson::Null => Value::Primitive(Primitive::Nothing).tagged(tag), + Bson::Boolean(b) => Value::Primitive(Primitive::Boolean(*b)).tagged(&tag), + Bson::Null => Value::Primitive(Primitive::Nothing).tagged(&tag), Bson::RegExp(r, opts) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); collected.insert_tagged( "$regex".to_string(), - Value::Primitive(Primitive::String(String::from(r))).tagged(tag), + Value::Primitive(Primitive::String(String::from(r))).tagged(&tag), ); collected.insert_tagged( "$options".to_string(), - Value::Primitive(Primitive::String(String::from(opts))).tagged(tag), + Value::Primitive(Primitive::String(String::from(opts))).tagged(&tag), ); collected.into_tagged_value() } - Bson::I32(n) => Value::number(n).tagged(tag), - Bson::I64(n) => Value::number(n).tagged(tag), + Bson::I32(n) => Value::number(n).tagged(&tag), + Bson::I64(n) => Value::number(n).tagged(&tag), Bson::Decimal128(n) => { // TODO: this really isn't great, and we should update this to do a higher // fidelity translation let decimal = BigDecimal::from_str(&format!("{}", n)).map_err(|_| { ShellError::range_error( ExpectedRange::BigDecimal, - &n.tagged(tag), + &n.tagged(&tag), format!("converting BSON Decimal128 to BigDecimal"), ) })?; - Value::Primitive(Primitive::Decimal(decimal)).tagged(tag) + Value::Primitive(Primitive::Decimal(decimal)).tagged(&tag) } Bson::JavaScriptCode(js) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); collected.insert_tagged( "$javascript".to_string(), - Value::Primitive(Primitive::String(String::from(js))).tagged(tag), + Value::Primitive(Primitive::String(String::from(js))).tagged(&tag), ); collected.into_tagged_value() } Bson::JavaScriptCodeWithScope(js, doc) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); collected.insert_tagged( "$javascript".to_string(), - Value::Primitive(Primitive::String(String::from(js))).tagged(tag), + Value::Primitive(Primitive::String(String::from(js))).tagged(&tag), ); collected.insert_tagged( "$scope".to_string(), - convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag)?, + convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag.clone())?, ); collected.into_tagged_value() } Bson::TimeStamp(ts) => { - let mut collected = TaggedDictBuilder::new(tag); - collected.insert_tagged("$timestamp".to_string(), Value::number(ts).tagged(tag)); + let mut collected = TaggedDictBuilder::new(tag.clone()); + collected.insert_tagged("$timestamp".to_string(), Value::number(ts).tagged(&tag)); collected.into_tagged_value() } Bson::Binary(bst, bytes) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); collected.insert_tagged( "$binary_subtype".to_string(), match bst { BinarySubtype::UserDefined(u) => Value::number(u), _ => Value::Primitive(Primitive::String(binary_subtype_to_string(*bst))), } - .tagged(tag), + .tagged(&tag), ); collected.insert_tagged( "$binary".to_string(), - Value::Primitive(Primitive::Binary(bytes.to_owned())).tagged(tag), + Value::Primitive(Primitive::Binary(bytes.to_owned())).tagged(&tag), ); collected.into_tagged_value() } Bson::ObjectId(obj_id) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); collected.insert_tagged( "$object_id".to_string(), - Value::Primitive(Primitive::String(obj_id.to_hex())).tagged(tag), + Value::Primitive(Primitive::String(obj_id.to_hex())).tagged(&tag), ); collected.into_tagged_value() } - Bson::UtcDatetime(dt) => Value::Primitive(Primitive::Date(*dt)).tagged(tag), + Bson::UtcDatetime(dt) => Value::Primitive(Primitive::Date(*dt)).tagged(&tag), Bson::Symbol(s) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); collected.insert_tagged( "$symbol".to_string(), - Value::Primitive(Primitive::String(String::from(s))).tagged(tag), + Value::Primitive(Primitive::String(String::from(s))).tagged(&tag), ); collected.into_tagged_value() } @@ -208,13 +208,13 @@ fn from_bson(args: CommandArgs, registry: &CommandRegistry) -> Result - match from_bson_bytes_to_value(vb, tag) { + match from_bson_bytes_to_value(vb, tag.clone()) { Ok(x) => yield ReturnSuccess::value(x), Err(_) => { yield Err(ShellError::labeled_error_with_secondary( "Could not parse as BSON", "input cannot be parsed as BSON", - tag, + tag.clone(), "value originates from here", value_tag, )) @@ -223,7 +223,7 @@ fn from_bson(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - tag, + tag.clone(), "value originates from here", value_tag, )), diff --git a/src/commands/from_csv.rs b/src/commands/from_csv.rs index ea90ab3de1..4bada42dfb 100644 --- a/src/commands/from_csv.rs +++ b/src/commands/from_csv.rs @@ -1,13 +1,14 @@ +use crate::commands::from_structured_data::from_structured_data; use crate::commands::WholeStreamCommand; -use crate::data::{Primitive, TaggedDictBuilder, Value}; +use crate::data::{Primitive, Value}; use crate::prelude::*; -use csv::ReaderBuilder; pub struct FromCSV; #[derive(Deserialize)] pub struct FromCSVArgs { headerless: bool, + separator: Option>, } impl WholeStreamCommand for FromCSV { @@ -16,11 +17,17 @@ impl WholeStreamCommand for FromCSV { } fn signature(&self) -> Signature { - Signature::build("from-csv").switch("headerless") + Signature::build("from-csv") + .named( + "separator", + SyntaxShape::String, + "a character to separate columns, defaults to ','", + ) + .switch("headerless", "don't treat the first row as column names") } fn usage(&self) -> &str { - "Parse text as .csv and create table" + "Parse text as .csv and create table." } fn run( @@ -32,107 +39,31 @@ impl WholeStreamCommand for FromCSV { } } -pub fn from_csv_string_to_value( - s: String, - headerless: bool, - tag: impl Into, -) -> Result, csv::Error> { - let mut reader = ReaderBuilder::new() - .has_headers(false) - .from_reader(s.as_bytes()); - let tag = tag.into(); - - let mut fields: VecDeque = VecDeque::new(); - let mut iter = reader.records(); - let mut rows = vec![]; - - if let Some(result) = iter.next() { - let line = result?; - - for (idx, item) in line.iter().enumerate() { - if headerless { - fields.push_back(format!("Column{}", idx + 1)); - } else { - fields.push_back(item.to_string()); - } - } - } - - loop { - if let Some(row_values) = iter.next() { - let row_values = row_values?; - - let mut row = TaggedDictBuilder::new(tag); - - for (idx, entry) in row_values.iter().enumerate() { - row.insert_tagged( - fields.get(idx).unwrap(), - Value::Primitive(Primitive::String(String::from(entry))).tagged(tag), - ); - } - - rows.push(row.into_tagged_value()); - } else { - break; - } - } - - Ok(Tagged::from_item(Value::Table(rows), tag)) -} - fn from_csv( FromCSVArgs { - headerless: skip_headers, + headerless, + separator, }: FromCSVArgs, - RunnableContext { input, name, .. }: RunnableContext, + runnable_context: RunnableContext, ) -> Result { - let name_tag = name; - - let stream = async_stream! { - let values: Vec> = input.values.collect().await; - - let mut concat_string = String::new(); - let mut latest_tag: Option = None; - - for value in values { - let value_tag = value.tag(); - latest_tag = Some(value_tag); - match value.item { - Value::Primitive(Primitive::String(s)) => { - concat_string.push_str(&s); - concat_string.push_str("\n"); - } - _ => yield Err(ShellError::labeled_error_with_secondary( - "Expected a string from pipeline", - "requires string input", - name_tag, - "value originates from here", - value_tag, - )), - - } - } - - match from_csv_string_to_value(concat_string, skip_headers, name_tag) { - Ok(x) => match x { - Tagged { item: Value::Table(list), .. } => { - for l in list { - yield ReturnSuccess::value(l); - } - } - x => yield ReturnSuccess::value(x), - }, - Err(_) => if let Some(last_tag) = latest_tag { - yield Err(ShellError::labeled_error_with_secondary( - "Could not parse as CSV", - "input cannot be parsed as CSV", - name_tag, - "value originates from here", - last_tag, - )) - } , + let sep = match separator { + Some(Tagged { + item: Value::Primitive(Primitive::String(s)), + tag, + .. + }) => { + let vec_s: Vec = s.chars().collect(); + if vec_s.len() != 1 { + return Err(ShellError::labeled_error( + "Expected a single separator char from --separator", + "requires a single character string input", + tag, + )); + }; + vec_s[0] } + _ => ',', }; - Ok(stream.to_output_stream()) + from_structured_data(headerless, sep, "CSV", runnable_context) } diff --git a/src/commands/from_ini.rs b/src/commands/from_ini.rs index d53ad67773..e55bbd45c4 100644 --- a/src/commands/from_ini.rs +++ b/src/commands/from_ini.rs @@ -45,10 +45,13 @@ fn convert_ini_top_to_nu_value( tag: impl Into, ) -> Tagged { let tag = tag.into(); - let mut top_level = TaggedDictBuilder::new(tag); + let mut top_level = TaggedDictBuilder::new(tag.clone()); for (key, value) in v.iter() { - top_level.insert_tagged(key.clone(), convert_ini_second_to_nu_value(value, tag)); + top_level.insert_tagged( + key.clone(), + convert_ini_second_to_nu_value(value, tag.clone()), + ); } top_level.into_tagged_value() @@ -75,7 +78,7 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result { concat_string.push_str(&s); @@ -84,15 +87,15 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - tag, + &tag, "value originates from here", - value_tag, + &value_tag, )), } } - match from_ini_string_to_value(concat_string, tag) { + match from_ini_string_to_value(concat_string, tag.clone()) { Ok(x) => match x { Tagged { item: Value::Table(list), .. } => { for l in list { @@ -105,7 +108,7 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result Signature { - Signature::build("from-json").switch("objects") + Signature::build("from-json").switch("objects", "treat each line as a separate value") } fn usage(&self) -> &str { @@ -35,24 +35,24 @@ fn convert_json_value_to_nu_value(v: &serde_hjson::Value, tag: impl Into) - let tag = tag.into(); match v { - serde_hjson::Value::Null => Value::Primitive(Primitive::Nothing).tagged(tag), - serde_hjson::Value::Bool(b) => Value::boolean(*b).tagged(tag), - serde_hjson::Value::F64(n) => Value::number(n).tagged(tag), - serde_hjson::Value::U64(n) => Value::number(n).tagged(tag), - serde_hjson::Value::I64(n) => Value::number(n).tagged(tag), + serde_hjson::Value::Null => Value::Primitive(Primitive::Nothing).tagged(&tag), + serde_hjson::Value::Bool(b) => Value::boolean(*b).tagged(&tag), + serde_hjson::Value::F64(n) => Value::number(n).tagged(&tag), + serde_hjson::Value::U64(n) => Value::number(n).tagged(&tag), + serde_hjson::Value::I64(n) => Value::number(n).tagged(&tag), serde_hjson::Value::String(s) => { - Value::Primitive(Primitive::String(String::from(s))).tagged(tag) + Value::Primitive(Primitive::String(String::from(s))).tagged(&tag) } serde_hjson::Value::Array(a) => Value::Table( a.iter() - .map(|x| convert_json_value_to_nu_value(x, tag)) + .map(|x| convert_json_value_to_nu_value(x, &tag)) .collect(), ) .tagged(tag), serde_hjson::Value::Object(o) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(&tag); for (k, v) in o.iter() { - collected.insert_tagged(k.clone(), convert_json_value_to_nu_value(v, tag)); + collected.insert_tagged(k.clone(), convert_json_value_to_nu_value(v, &tag)); } collected.into_tagged_value() @@ -82,7 +82,7 @@ fn from_json( for value in values { let value_tag = value.tag(); - latest_tag = Some(value_tag); + latest_tag = Some(value_tag.clone()); match value.item { Value::Primitive(Primitive::String(s)) => { concat_string.push_str(&s); @@ -91,9 +91,9 @@ fn from_json( _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - name_tag, + &name_tag, "value originates from here", - value_tag, + &value_tag, )), } @@ -106,15 +106,15 @@ fn from_json( continue; } - match from_json_string_to_value(json_str.to_string(), name_tag) { + match from_json_string_to_value(json_str.to_string(), &name_tag) { Ok(x) => yield ReturnSuccess::value(x), Err(_) => { - if let Some(last_tag) = latest_tag { + if let Some(ref last_tag) = latest_tag { yield Err(ShellError::labeled_error_with_secondary( "Could nnot parse as JSON", "input cannot be parsed as JSON", - name_tag, + &name_tag, "value originates from here", last_tag)) } @@ -122,7 +122,7 @@ fn from_json( } } } else { - match from_json_string_to_value(concat_string, name_tag) { + match from_json_string_to_value(concat_string, name_tag.clone()) { Ok(x) => match x { Tagged { item: Value::Table(list), .. } => { diff --git a/src/commands/from_sqlite.rs b/src/commands/from_sqlite.rs index 20d087bd5c..7b93dc1633 100644 --- a/src/commands/from_sqlite.rs +++ b/src/commands/from_sqlite.rs @@ -138,7 +138,7 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result - match from_sqlite_bytes_to_value(vb, tag) { + match from_sqlite_bytes_to_value(vb, tag.clone()) { Ok(x) => match x { Tagged { item: Value::Table(list), .. } => { for l in list { @@ -151,7 +151,7 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result Result yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - tag, + &tag, "value originates from here", value_tag, )), diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs new file mode 100644 index 0000000000..37bba215f1 --- /dev/null +++ b/src/commands/from_ssv.rs @@ -0,0 +1,504 @@ +use crate::commands::WholeStreamCommand; +use crate::data::{Primitive, TaggedDictBuilder, Value}; +use crate::prelude::*; + +pub struct FromSSV; + +#[derive(Deserialize)] +pub struct FromSSVArgs { + headerless: bool, + #[serde(rename(deserialize = "aligned-columns"))] + aligned_columns: bool, + #[serde(rename(deserialize = "minimum-spaces"))] + minimum_spaces: Option>, +} + +const STRING_REPRESENTATION: &str = "from-ssv"; +const DEFAULT_MINIMUM_SPACES: usize = 2; + +impl WholeStreamCommand for FromSSV { + fn name(&self) -> &str { + STRING_REPRESENTATION + } + + fn signature(&self) -> Signature { + Signature::build(STRING_REPRESENTATION) + .switch("headerless", "don't treat the first row as column names") + .switch("aligned-columns", "assume columns are aligned") + .named( + "minimum-spaces", + SyntaxShape::Int, + "the mininum spaces to separate columns", + ) + } + + fn usage(&self) -> &str { + "Parse text as space-separated values and create a table. The default minimum number of spaces counted as a separator is 2." + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, from_ssv)?.run() + } +} + +enum HeaderOptions<'a> { + WithHeaders(&'a str), + WithoutHeaders, +} + +fn parse_aligned_columns<'a>( + lines: impl Iterator, + headers: HeaderOptions, + separator: &str, +) -> Vec> { + fn construct<'a>( + lines: impl Iterator, + headers: Vec<(String, usize)>, + ) -> Vec> { + lines + .map(|l| { + headers + .iter() + .enumerate() + .map(|(i, (header_name, start_position))| { + let val = match headers.get(i + 1) { + Some((_, end)) => { + if *end < l.len() { + l.get(*start_position..*end) + } else { + l.get(*start_position..) + } + } + None => l.get(*start_position..), + } + .unwrap_or("") + .trim() + .into(); + (header_name.clone(), val) + }) + .collect() + }) + .collect() + } + + let find_indices = |line: &str| { + let values = line + .split(&separator) + .map(str::trim) + .filter(|s| !s.is_empty()); + values + .fold( + (0, vec![]), + |(current_pos, mut indices), value| match line[current_pos..].find(value) { + None => (current_pos, indices), + Some(index) => { + let absolute_index = current_pos + index; + indices.push(absolute_index); + (absolute_index + value.len(), indices) + } + }, + ) + .1 + }; + + let parse_with_headers = |lines, headers_raw: &str| { + let indices = find_indices(headers_raw); + let headers = headers_raw + .split(&separator) + .map(str::trim) + .filter(|s| !s.is_empty()) + .map(String::from) + .zip(indices); + + let columns = headers.collect::>(); + + construct(lines, columns) + }; + + let parse_without_headers = |ls: Vec<&str>| { + let mut indices = ls + .iter() + .flat_map(|s| find_indices(*s)) + .collect::>(); + + indices.sort(); + indices.dedup(); + + let headers: Vec<(String, usize)> = indices + .iter() + .enumerate() + .map(|(i, position)| (format!("Column{}", i + 1), *position)) + .collect(); + + construct(ls.iter().map(|s| s.to_owned()), headers) + }; + + match headers { + HeaderOptions::WithHeaders(headers_raw) => parse_with_headers(lines, headers_raw), + HeaderOptions::WithoutHeaders => parse_without_headers(lines.collect()), + } +} + +fn parse_separated_columns<'a>( + lines: impl Iterator, + headers: HeaderOptions, + separator: &str, +) -> Vec> { + fn collect<'a>( + headers: Vec, + rows: impl Iterator, + separator: &str, + ) -> Vec> { + rows.map(|r| { + headers + .iter() + .zip(r.split(separator).map(str::trim).filter(|s| !s.is_empty())) + .map(|(a, b)| (a.to_owned(), b.to_owned())) + .collect() + }) + .collect() + } + + let parse_with_headers = |lines, headers_raw: &str| { + let headers = headers_raw + .split(&separator) + .map(str::trim) + .map(|s| s.to_owned()) + .filter(|s| !s.is_empty()) + .collect(); + collect(headers, lines, separator) + }; + + let parse_without_headers = |ls: Vec<&str>| { + let num_columns = ls.iter().map(|r| r.len()).max().unwrap_or(0); + + let headers = (1..=num_columns) + .map(|i| format!("Column{}", i)) + .collect::>(); + collect(headers, ls.iter().map(|s| s.as_ref()), separator) + }; + + match headers { + HeaderOptions::WithHeaders(headers_raw) => parse_with_headers(lines, headers_raw), + HeaderOptions::WithoutHeaders => parse_without_headers(lines.collect()), + } +} + +fn string_to_table( + s: &str, + headerless: bool, + aligned_columns: bool, + split_at: usize, +) -> Option>> { + let mut lines = s.lines().filter(|l| !l.trim().is_empty()); + let separator = " ".repeat(std::cmp::max(split_at, 1)); + + let (ls, header_options) = if headerless { + (lines, HeaderOptions::WithoutHeaders) + } else { + let headers = lines.next()?; + (lines, HeaderOptions::WithHeaders(headers)) + }; + + let f = if aligned_columns { + parse_aligned_columns + } else { + parse_separated_columns + }; + + let parsed = f(ls, header_options, &separator); + match parsed.len() { + 0 => None, + _ => Some(parsed), + } +} + +fn from_ssv_string_to_value( + s: &str, + headerless: bool, + aligned_columns: bool, + split_at: usize, + tag: impl Into, +) -> Option> { + let tag = tag.into(); + let rows = string_to_table(s, headerless, aligned_columns, split_at)? + .iter() + .map(|row| { + let mut tagged_dict = TaggedDictBuilder::new(&tag); + for (col, entry) in row { + tagged_dict.insert_tagged( + col, + Value::Primitive(Primitive::String(String::from(entry))).tagged(&tag), + ) + } + tagged_dict.into_tagged_value() + }) + .collect(); + + Some(Value::Table(rows).tagged(&tag)) +} + +fn from_ssv( + FromSSVArgs { + headerless, + aligned_columns, + minimum_spaces, + }: FromSSVArgs, + RunnableContext { input, name, .. }: RunnableContext, +) -> Result { + let stream = async_stream! { + let values: Vec> = input.values.collect().await; + let mut concat_string = String::new(); + let mut latest_tag: Option = None; + let split_at = match minimum_spaces { + Some(number) => number.item, + None => DEFAULT_MINIMUM_SPACES + }; + + for value in values { + let value_tag = value.tag(); + latest_tag = Some(value_tag.clone()); + match value.item { + Value::Primitive(Primitive::String(s)) => { + concat_string.push_str(&s); + } + _ => yield Err(ShellError::labeled_error_with_secondary ( + "Expected a string from pipeline", + "requires string input", + &name, + "value originates from here", + &value_tag + )), + } + } + + match from_ssv_string_to_value(&concat_string, headerless, aligned_columns, split_at, name.clone()) { + Some(x) => match x { + Tagged { item: Value::Table(list), ..} => { + for l in list { yield ReturnSuccess::value(l) } + } + x => yield ReturnSuccess::value(x) + }, + None => if let Some(tag) = latest_tag { + yield Err(ShellError::labeled_error_with_secondary( + "Could not parse as SSV", + "input cannot be parsed ssv", + &name, + "value originates from here", + &tag, + )) + }, + } + }; + + Ok(stream.to_output_stream()) +} + +#[cfg(test)] +mod tests { + use super::*; + fn owned(x: &str, y: &str) -> (String, String) { + (String::from(x), String::from(y)) + } + + #[test] + fn it_trims_empty_and_whitespace_only_lines() { + let input = r#" + + a b + + 1 2 + + 3 4 + "#; + let result = string_to_table(input, false, true, 1); + assert_eq!( + result, + Some(vec![ + vec![owned("a", "1"), owned("b", "2")], + vec![owned("a", "3"), owned("b", "4")] + ]) + ); + } + + #[test] + fn it_deals_with_single_column_input() { + let input = r#" + a + 1 + 2 + "#; + let result = string_to_table(input, false, true, 1); + assert_eq!( + result, + Some(vec![vec![owned("a", "1")], vec![owned("a", "2")]]) + ); + } + + #[test] + fn it_uses_first_row_as_data_when_headerless() { + let input = r#" + a b + 1 2 + 3 4 + "#; + let result = string_to_table(input, true, true, 1); + assert_eq!( + result, + Some(vec![ + vec![owned("Column1", "a"), owned("Column2", "b")], + vec![owned("Column1", "1"), owned("Column2", "2")], + vec![owned("Column1", "3"), owned("Column2", "4")] + ]) + ); + } + + #[test] + fn it_returns_none_given_an_empty_string() { + let input = ""; + let result = string_to_table(input, true, true, 1); + assert!(result.is_none()); + } + + #[test] + fn it_allows_a_predefined_number_of_spaces() { + let input = r#" + column a column b + entry 1 entry number 2 + 3 four + "#; + + let result = string_to_table(input, false, true, 3); + assert_eq!( + result, + Some(vec![ + vec![ + owned("column a", "entry 1"), + owned("column b", "entry number 2") + ], + vec![owned("column a", "3"), owned("column b", "four")] + ]) + ); + } + + #[test] + fn it_trims_remaining_separator_space() { + let input = r#" + colA colB colC + val1 val2 val3 + "#; + + let trimmed = |s: &str| s.trim() == s; + + let result = string_to_table(input, false, true, 2).unwrap(); + assert!(result + .iter() + .all(|row| row.iter().all(|(a, b)| trimmed(a) && trimmed(b)))) + } + + #[test] + fn it_keeps_empty_columns() { + let input = r#" + colA col B col C + val2 val3 + val4 val 5 val 6 + val7 val8 + "#; + + let result = string_to_table(input, false, true, 2).unwrap(); + assert_eq!( + result, + vec![ + vec![ + owned("colA", ""), + owned("col B", "val2"), + owned("col C", "val3") + ], + vec![ + owned("colA", "val4"), + owned("col B", "val 5"), + owned("col C", "val 6") + ], + vec![ + owned("colA", "val7"), + owned("col B", ""), + owned("col C", "val8") + ], + ] + ) + } + + #[test] + fn it_uses_the_full_final_column() { + let input = r#" + colA col B + val1 val2 trailing value that should be included + "#; + + let result = string_to_table(input, false, true, 2).unwrap(); + assert_eq!( + result, + vec![vec![ + owned("colA", "val1"), + owned("col B", "val2 trailing value that should be included"), + ],] + ) + } + + #[test] + fn it_handles_empty_values_when_headerless_and_aligned_columns() { + let input = r#" + a multi-word value b d + 1 3-3 4 + last + "#; + + let result = string_to_table(input, true, true, 2).unwrap(); + assert_eq!( + result, + vec![ + vec![ + owned("Column1", "a multi-word value"), + owned("Column2", "b"), + owned("Column3", ""), + owned("Column4", "d"), + owned("Column5", "") + ], + vec![ + owned("Column1", "1"), + owned("Column2", ""), + owned("Column3", "3-3"), + owned("Column4", "4"), + owned("Column5", "") + ], + vec![ + owned("Column1", ""), + owned("Column2", ""), + owned("Column3", ""), + owned("Column4", ""), + owned("Column5", "last") + ], + ] + ) + } + + #[test] + fn input_is_parsed_correctly_if_either_option_works() { + let input = r#" + docker-registry docker-registry=default docker-registry=default 172.30.78.158 5000/TCP + kubernetes component=apiserver,provider=kubernetes 172.30.0.2 443/TCP + kubernetes-ro component=apiserver,provider=kubernetes 172.30.0.1 80/TCP + "#; + + let aligned_columns_headerless = string_to_table(input, true, true, 2).unwrap(); + let separator_headerless = string_to_table(input, true, false, 2).unwrap(); + let aligned_columns_with_headers = string_to_table(input, false, true, 2).unwrap(); + let separator_with_headers = string_to_table(input, false, false, 2).unwrap(); + assert_eq!(aligned_columns_headerless, separator_headerless); + assert_eq!(aligned_columns_with_headers, separator_with_headers); + } +} diff --git a/src/commands/from_structured_data.rs b/src/commands/from_structured_data.rs new file mode 100644 index 0000000000..4799a40993 --- /dev/null +++ b/src/commands/from_structured_data.rs @@ -0,0 +1,97 @@ +use crate::data::{Primitive, TaggedDictBuilder, Value}; +use crate::prelude::*; +use csv::ReaderBuilder; + +fn from_stuctured_string_to_value( + s: String, + headerless: bool, + separator: char, + tag: impl Into, +) -> Result, csv::Error> { + let mut reader = ReaderBuilder::new() + .has_headers(!headerless) + .delimiter(separator as u8) + .from_reader(s.as_bytes()); + let tag = tag.into(); + + let headers = if headerless { + (1..=reader.headers()?.len()) + .map(|i| format!("Column{}", i)) + .collect::>() + } else { + reader.headers()?.iter().map(String::from).collect() + }; + + let mut rows = vec![]; + for row in reader.records() { + let mut tagged_row = TaggedDictBuilder::new(&tag); + for (value, header) in row?.iter().zip(headers.iter()) { + tagged_row.insert_tagged( + header, + Value::Primitive(Primitive::String(String::from(value))).tagged(&tag), + ) + } + rows.push(tagged_row.into_tagged_value()); + } + + Ok(Value::Table(rows).tagged(&tag)) +} + +pub fn from_structured_data( + headerless: bool, + sep: char, + format_name: &'static str, + RunnableContext { input, name, .. }: RunnableContext, +) -> Result { + let name_tag = name; + + let stream = async_stream! { + let values: Vec> = input.values.collect().await; + + let mut concat_string = String::new(); + let mut latest_tag: Option = None; + + for value in values { + let value_tag = value.tag(); + latest_tag = Some(value_tag.clone()); + match value.item { + Value::Primitive(Primitive::String(s)) => { + concat_string.push_str(&s); + concat_string.push_str("\n"); + } + _ => yield Err(ShellError::labeled_error_with_secondary( + "Expected a string from pipeline", + "requires string input", + name_tag.clone(), + "value originates from here", + value_tag.clone(), + )), + + } + } + + match from_stuctured_string_to_value(concat_string, headerless, sep, name_tag.clone()) { + Ok(x) => match x { + Tagged { item: Value::Table(list), .. } => { + for l in list { + yield ReturnSuccess::value(l); + } + } + x => yield ReturnSuccess::value(x), + }, + Err(_) => if let Some(last_tag) = latest_tag { + let line_one = format!("Could not parse as {}", format_name); + let line_two = format!("input cannot be parsed as {}", format_name); + yield Err(ShellError::labeled_error_with_secondary( + line_one, + line_two, + name_tag.clone(), + "value originates from here", + last_tag.clone(), + )) + } , + } + }; + + Ok(stream.to_output_stream()) +} diff --git a/src/commands/from_toml.rs b/src/commands/from_toml.rs index c0098d9267..2cfd059165 100644 --- a/src/commands/from_toml.rs +++ b/src/commands/from_toml.rs @@ -36,7 +36,7 @@ pub fn convert_toml_value_to_nu_value(v: &toml::Value, tag: impl Into) -> T toml::Value::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(tag), toml::Value::Array(a) => Value::Table( a.iter() - .map(|x| convert_toml_value_to_nu_value(x, tag)) + .map(|x| convert_toml_value_to_nu_value(x, &tag)) .collect(), ) .tagged(tag), @@ -44,10 +44,10 @@ pub fn convert_toml_value_to_nu_value(v: &toml::Value, tag: impl Into) -> T Value::Primitive(Primitive::String(dt.to_string())).tagged(tag) } toml::Value::Table(t) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(&tag); for (k, v) in t.iter() { - collected.insert_tagged(k.clone(), convert_toml_value_to_nu_value(v, tag)); + collected.insert_tagged(k.clone(), convert_toml_value_to_nu_value(v, &tag)); } collected.into_tagged_value() @@ -79,7 +79,7 @@ pub fn from_toml( for value in values { let value_tag = value.tag(); - latest_tag = Some(value_tag); + latest_tag = Some(value_tag.clone()); match value.item { Value::Primitive(Primitive::String(s)) => { concat_string.push_str(&s); @@ -88,15 +88,15 @@ pub fn from_toml( _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - tag, + &tag, "value originates from here", - value_tag, + &value_tag, )), } } - match from_toml_string_to_value(concat_string, tag) { + match from_toml_string_to_value(concat_string, tag.clone()) { Ok(x) => match x { Tagged { item: Value::Table(list), .. } => { for l in list { @@ -109,7 +109,7 @@ pub fn from_toml( yield Err(ShellError::labeled_error_with_secondary( "Could not parse as TOML", "input cannot be parsed as TOML", - tag, + &tag, "value originates from here", last_tag, )) diff --git a/src/commands/from_tsv.rs b/src/commands/from_tsv.rs index bba532d17b..7931b8ef38 100644 --- a/src/commands/from_tsv.rs +++ b/src/commands/from_tsv.rs @@ -1,7 +1,6 @@ +use crate::commands::from_structured_data::from_structured_data; use crate::commands::WholeStreamCommand; -use crate::data::{Primitive, TaggedDictBuilder, Value}; use crate::prelude::*; -use csv::ReaderBuilder; pub struct FromTSV; @@ -16,7 +15,8 @@ impl WholeStreamCommand for FromTSV { } fn signature(&self) -> Signature { - Signature::build("from-tsv").switch("headerless") + Signature::build("from-tsv") + .switch("headerless", "don't treat the first row as column names") } fn usage(&self) -> &str { @@ -32,108 +32,9 @@ impl WholeStreamCommand for FromTSV { } } -pub fn from_tsv_string_to_value( - s: String, - headerless: bool, - tag: impl Into, -) -> Result, csv::Error> { - let mut reader = ReaderBuilder::new() - .has_headers(false) - .delimiter(b'\t') - .from_reader(s.as_bytes()); - let tag = tag.into(); - - let mut fields: VecDeque = VecDeque::new(); - let mut iter = reader.records(); - let mut rows = vec![]; - - if let Some(result) = iter.next() { - let line = result?; - - for (idx, item) in line.iter().enumerate() { - if headerless { - fields.push_back(format!("Column{}", idx + 1)); - } else { - fields.push_back(item.to_string()); - } - } - } - - loop { - if let Some(row_values) = iter.next() { - let row_values = row_values?; - - let mut row = TaggedDictBuilder::new(tag); - - for (idx, entry) in row_values.iter().enumerate() { - row.insert_tagged( - fields.get(idx).unwrap(), - Value::Primitive(Primitive::String(String::from(entry))).tagged(tag), - ); - } - - rows.push(row.into_tagged_value()); - } else { - break; - } - } - - Ok(Tagged::from_item(Value::Table(rows), tag)) -} - fn from_tsv( - FromTSVArgs { - headerless: skip_headers, - }: FromTSVArgs, - RunnableContext { input, name, .. }: RunnableContext, + FromTSVArgs { headerless }: FromTSVArgs, + runnable_context: RunnableContext, ) -> Result { - let name_tag = name; - - let stream = async_stream! { - let values: Vec> = input.values.collect().await; - - let mut concat_string = String::new(); - let mut latest_tag: Option = None; - - for value in values { - let value_tag = value.tag(); - latest_tag = Some(value_tag); - match value.item { - Value::Primitive(Primitive::String(s)) => { - concat_string.push_str(&s); - concat_string.push_str("\n"); - } - _ => yield Err(ShellError::labeled_error_with_secondary( - "Expected a string from pipeline", - "requires string input", - name_tag, - "value originates from here", - value_tag, - )), - - } - } - - match from_tsv_string_to_value(concat_string, skip_headers, name_tag) { - Ok(x) => match x { - Tagged { item: Value::Table(list), .. } => { - for l in list { - yield ReturnSuccess::value(l); - } - } - x => yield ReturnSuccess::value(x), - }, - Err(_) => if let Some(last_tag) = latest_tag { - yield Err(ShellError::labeled_error_with_secondary( - "Could not parse as TSV", - "input cannot be parsed as TSV", - name_tag, - "value originates from here", - last_tag, - )) - } , - } - }; - - Ok(stream.to_output_stream()) + from_structured_data(headerless, '\t', "TSV", runnable_context) } diff --git a/src/commands/from_url.rs b/src/commands/from_url.rs index 662508deb6..ad23ea5b53 100644 --- a/src/commands/from_url.rs +++ b/src/commands/from_url.rs @@ -39,7 +39,7 @@ fn from_url(args: CommandArgs, registry: &CommandRegistry) -> Result { concat_string.push_str(&s); @@ -47,9 +47,9 @@ fn from_url(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - tag, + &tag, "value originates from here", - value_tag, + &value_tag, )), } diff --git a/src/commands/from_xml.rs b/src/commands/from_xml.rs index 5bba67b42a..e99e5664e5 100644 --- a/src/commands/from_xml.rs +++ b/src/commands/from_xml.rs @@ -34,7 +34,7 @@ fn from_node_to_value<'a, 'd>(n: &roxmltree::Node<'a, 'd>, tag: impl Into) let mut children_values = vec![]; for c in n.children() { - children_values.push(from_node_to_value(&c, tag)); + children_values.push(from_node_to_value(&c, &tag)); } let children_values: Vec> = children_values @@ -94,7 +94,7 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result { concat_string.push_str(&s); @@ -103,15 +103,15 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - tag, + &tag, "value originates from here", - value_tag, + &value_tag, )), } } - match from_xml_string_to_value(concat_string, tag) { + match from_xml_string_to_value(concat_string, tag.clone()) { Ok(x) => match x { Tagged { item: Value::Table(list), .. } => { for l in list { @@ -124,9 +124,9 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result Result) -> Tagged { + Value::string(input.into()).tagged_unknown() + } + + fn row(entries: IndexMap>) -> Tagged { + Value::row(entries).tagged_unknown() + } + + fn table(list: &Vec>) -> Tagged { + Value::table(list).tagged_unknown() + } + + fn parse(xml: &str) -> Tagged { + from_xml::from_xml_string_to_value(xml.to_string(), Tag::unknown()).unwrap() + } + + #[test] + fn parses_empty_element() { + let source = ""; + + assert_eq!( + parse(source), + row(indexmap! { + "nu".into() => table(&vec![]) + }) + ); + } + + #[test] + fn parses_element_with_text() { + let source = "La era de los tres caballeros"; + + assert_eq!( + parse(source), + row(indexmap! { + "nu".into() => table(&vec![string("La era de los tres caballeros")]) + }) + ); + } + + #[test] + fn parses_element_with_elements() { + let source = "\ + + Andrés + Jonathan + Yehuda +"; + + assert_eq!( + parse(source), + row(indexmap! { + "nu".into() => table(&vec![ + row(indexmap! {"dev".into() => table(&vec![string("Andrés")])}), + row(indexmap! {"dev".into() => table(&vec![string("Jonathan")])}), + row(indexmap! {"dev".into() => table(&vec![string("Yehuda")])}) + ]) + }) + ); + } +} diff --git a/src/commands/from_yaml.rs b/src/commands/from_yaml.rs index 9e156bbc75..3ba8033ddf 100644 --- a/src/commands/from_yaml.rs +++ b/src/commands/from_yaml.rs @@ -64,17 +64,17 @@ fn convert_yaml_value_to_nu_value(v: &serde_yaml::Value, tag: impl Into) -> serde_yaml::Value::String(s) => Value::string(s).tagged(tag), serde_yaml::Value::Sequence(a) => Value::Table( a.iter() - .map(|x| convert_yaml_value_to_nu_value(x, tag)) + .map(|x| convert_yaml_value_to_nu_value(x, &tag)) .collect(), ) .tagged(tag), serde_yaml::Value::Mapping(t) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(&tag); for (k, v) in t.iter() { match k { serde_yaml::Value::String(k) => { - collected.insert_tagged(k.clone(), convert_yaml_value_to_nu_value(v, tag)); + collected.insert_tagged(k.clone(), convert_yaml_value_to_nu_value(v, &tag)); } _ => unimplemented!("Unknown key type"), } @@ -108,7 +108,7 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result { concat_string.push_str(&s); @@ -117,15 +117,15 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - tag, + &tag, "value originates from here", - value_tag, + &value_tag, )), } } - match from_yaml_string_to_value(concat_string, tag) { + match from_yaml_string_to_value(concat_string, tag.clone()) { Ok(x) => match x { Tagged { item: Value::Table(list), .. } => { for l in list { @@ -138,9 +138,9 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result, - rest: Vec>, + member: ColumnPath, + rest: Vec, } impl WholeStreamCommand for Get { @@ -18,8 +20,15 @@ impl WholeStreamCommand for Get { fn signature(&self) -> Signature { Signature::build("get") - .required("member", SyntaxShape::Member) - .rest(SyntaxShape::Member) + .required( + "member", + SyntaxShape::ColumnPath, + "the path to the data to get", + ) + .rest( + SyntaxShape::ColumnPath, + "optionally return additional data by path", + ) } fn usage(&self) -> &str { @@ -35,59 +44,98 @@ impl WholeStreamCommand for Get { } } -fn get_member(path: &Tagged, obj: &Tagged) -> Result, ShellError> { - let mut current = Some(obj); - for p in path.split(".") { - if let Some(obj) = current { - current = match obj.get_data_by_key(p) { - Some(v) => Some(v), - None => - // Before we give up, see if they gave us a path that matches a field name by itself - { - match obj.get_data_by_key(&path.item) { - Some(v) => return Ok(v.clone()), - None => { - let possibilities = obj.data_descriptors(); +pub type ColumnPath = Vec>; - let mut possible_matches: Vec<_> = possibilities - .iter() - .map(|x| { - (natural::distance::levenshtein_distance(x, &path.item), x) - }) - .collect(); +pub fn get_column_path( + path: &ColumnPath, + obj: &Tagged, +) -> Result, ShellError> { + let fields = path.clone(); - possible_matches.sort(); + let value = obj.get_data_by_column_path( + obj.tag(), + path, + Box::new(move |(obj_source, column_path_tried)| { + match obj_source { + Value::Table(rows) => { + let total = rows.len(); + let end_tag = match fields.iter().nth_back(if fields.len() > 2 { 1 } else { 0 }) + { + Some(last_field) => last_field.tag(), + None => column_path_tried.tag(), + }; - if possible_matches.len() > 0 { - return Err(ShellError::labeled_error( - "Unknown column", - format!("did you mean '{}'?", possible_matches[0].1), - path.tag(), - )); + return ShellError::labeled_error_with_secondary( + "Row not found", + format!( + "There isn't a row indexed at '{}'", + match &*column_path_tried { + Value::Primitive(primitive) => primitive.format(None), + _ => String::from(""), } - None - } - } + ), + column_path_tried.tag(), + format!("The table only has {} rows (0..{})", total, total - 1), + end_tag, + ); } + _ => {} } - } - } - match current { - Some(v) => Ok(v.clone()), - None => match obj { - // If its None check for certain values. - Tagged { - item: Value::Primitive(Primitive::String(_)), - .. - } => Ok(obj.clone()), - Tagged { - item: Value::Primitive(Primitive::Path(_)), - .. - } => Ok(obj.clone()), - _ => Ok(Value::nothing().tagged(obj.tag)), + match &column_path_tried { + Tagged { + item: Value::Primitive(Primitive::Int(index)), + .. + } => { + return ShellError::labeled_error( + "No rows available", + format!( + "Not a table. Perhaps you meant to get the column '{}' instead?", + index + ), + column_path_tried.tag(), + ) + } + _ => match did_you_mean(&obj_source, &column_path_tried) { + Some(suggestions) => { + return ShellError::labeled_error( + "Unknown column", + format!("did you mean '{}'?", suggestions[0].1), + tag_for_tagged_list(fields.iter().map(|p| p.tag())), + ) + } + None => { + return ShellError::labeled_error( + "Unknown column", + "row does not contain this column", + tag_for_tagged_list(fields.iter().map(|p| p.tag())), + ) + } + }, + } + }), + ); + + let res = match value { + Ok(fetched) => match fetched { + Some(Tagged { item: v, .. }) => Ok((v.clone()).tagged(&obj.tag)), + None => match obj { + // If its None check for certain values. + Tagged { + item: Value::Primitive(Primitive::String(_)), + .. + } => Ok(obj.clone()), + Tagged { + item: Value::Primitive(Primitive::Path(_)), + .. + } => Ok(obj.clone()), + _ => Ok(Value::nothing().tagged(&obj.tag)), + }, }, - } + Err(reason) => Err(reason), + }; + + res } pub fn get( @@ -97,6 +145,8 @@ pub fn get( }: GetArgs, RunnableContext { input, .. }: RunnableContext, ) -> Result { + trace!("get {:?} {:?}", member, fields); + let stream = input .values .map(move |item| { @@ -104,26 +154,36 @@ pub fn get( let member = vec![member.clone()]; - let fields = vec![&member, &fields] + let column_paths = vec![&member, &fields] .into_iter() .flatten() - .collect::>>(); + .collect::>(); - for field in &fields { - match get_member(field, &item) { - Ok(Tagged { - item: Value::Table(l), - .. - }) => { - for item in l { - result.push_back(ReturnSuccess::value(item.clone())); + for path in column_paths { + let res = get_column_path(&path, &item); + + match res { + Ok(got) => match got { + Tagged { + item: Value::Table(rows), + .. + } => { + for row in rows { + result.push_back(ReturnSuccess::value( + Tagged { + item: row.item, + tag: Tag::from(&item.tag), + } + .map_anchored(&item.tag.anchor), + )) + } } - } - Ok(x) => result.push_back(ReturnSuccess::value(x.clone())), - Err(x) => result.push_back(Err(x)), + other => result + .push_back(ReturnSuccess::value((*other).clone().tagged(&item.tag))), + }, + Err(reason) => result.push_back(Err(reason)), } } - result }) .flatten(); diff --git a/src/commands/group_by.rs b/src/commands/group_by.rs new file mode 100644 index 0000000000..07e74841b1 --- /dev/null +++ b/src/commands/group_by.rs @@ -0,0 +1,217 @@ +use crate::commands::WholeStreamCommand; +use crate::data::TaggedDictBuilder; +use crate::errors::ShellError; +use crate::prelude::*; + +pub struct GroupBy; + +#[derive(Deserialize)] +pub struct GroupByArgs { + column_name: Tagged, +} + +impl WholeStreamCommand for GroupBy { + fn name(&self) -> &str { + "group-by" + } + + fn signature(&self) -> Signature { + Signature::build("group-by").required( + "column_name", + SyntaxShape::String, + "the name of the column to group by", + ) + } + + fn usage(&self) -> &str { + "Creates a new table with the data from the table rows grouped by the column given." + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, group_by)?.run() + } +} + +pub fn group_by( + GroupByArgs { column_name }: GroupByArgs, + RunnableContext { input, name, .. }: RunnableContext, +) -> Result { + let stream = async_stream! { + let values: Vec> = input.values.collect().await; + + if values.is_empty() { + yield Err(ShellError::labeled_error( + "Expected table from pipeline", + "requires a table input", + column_name.span() + )) + } else { + match group(&column_name, values, name) { + Ok(grouped) => yield ReturnSuccess::value(grouped), + Err(err) => yield Err(err) + } + } + }; + + Ok(stream.to_output_stream()) +} + +pub fn group( + column_name: &Tagged, + values: Vec>, + tag: impl Into, +) -> Result, ShellError> { + let tag = tag.into(); + + let mut groups = indexmap::IndexMap::new(); + + for value in values { + let group_key = value.get_data_by_key(column_name); + + if group_key.is_none() { + let possibilities = value.data_descriptors(); + + let mut possible_matches: Vec<_> = possibilities + .iter() + .map(|x| (natural::distance::levenshtein_distance(x, column_name), x)) + .collect(); + + possible_matches.sort(); + + if possible_matches.len() > 0 { + return Err(ShellError::labeled_error( + "Unknown column", + format!("did you mean '{}'?", possible_matches[0].1), + column_name.tag(), + )); + } else { + return Err(ShellError::labeled_error( + "Unknown column", + "row does not contain this column", + column_name.tag(), + )); + } + } + + let group_key = group_key.unwrap().as_string()?; + let group = groups.entry(group_key).or_insert(vec![]); + group.push(value); + } + + let mut out = TaggedDictBuilder::new(&tag); + + for (k, v) in groups.iter() { + out.insert(k, Value::table(v)); + } + + Ok(out.into_tagged_value()) +} + +#[cfg(test)] +mod tests { + + use crate::commands::group_by::group; + use crate::data::meta::*; + use crate::Value; + use indexmap::IndexMap; + + fn string(input: impl Into) -> Tagged { + Value::string(input.into()).tagged_unknown() + } + + fn row(entries: IndexMap>) -> Tagged { + Value::row(entries).tagged_unknown() + } + + fn table(list: &Vec>) -> Tagged { + Value::table(list).tagged_unknown() + } + + fn nu_releases_commiters() -> Vec> { + vec![ + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")}, + ), + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}, + ), + ] + } + + #[test] + fn groups_table_by_date_column() { + let for_key = String::from("date").tagged_unknown(); + + assert_eq!( + group(&for_key, nu_releases_commiters(), Tag::unknown()).unwrap(), + row(indexmap! { + "August 23-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}), + row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}), + row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}) + ]), + "October 10-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}), + row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}), + row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}) + ]), + "Sept 24-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}), + row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")}), + row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")}) + ]), + }) + ); + } + + #[test] + fn groups_table_by_country_column() { + let for_key = String::from("country").tagged_unknown(); + + assert_eq!( + group(&for_key, nu_releases_commiters(), Tag::unknown()).unwrap(), + row(indexmap! { + "EC".into() => table(&vec![ + row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}), + row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}), + row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}) + ]), + "NZ".into() => table(&vec![ + row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}), + row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}), + row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")}) + ]), + "US".into() => table(&vec![ + row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}), + row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")}), + row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}), + ]), + }) + ); + } +} diff --git a/src/commands/help.rs b/src/commands/help.rs index d780f13459..596bcbf930 100644 --- a/src/commands/help.rs +++ b/src/commands/help.rs @@ -12,7 +12,7 @@ impl PerItemCommand for Help { } fn signature(&self) -> registry::Signature { - Signature::build("help").rest(SyntaxShape::Any) + Signature::build("help").rest(SyntaxShape::Any, "the name of command(s) to get help on") } fn usage(&self) -> &str { @@ -26,7 +26,7 @@ impl PerItemCommand for Help { _raw_args: &RawCommandArgs, _input: Tagged, ) -> Result { - let tag = call_info.name_tag; + let tag = &call_info.name_tag; match call_info.args.nth(0) { Some(Tagged { @@ -61,12 +61,9 @@ impl PerItemCommand for Help { let mut one_liner = String::new(); one_liner.push_str(&signature.name); one_liner.push_str(" "); - if signature.named.len() > 0 { - one_liner.push_str("{flags} "); - } - for positional in signature.positional { - match positional { + for positional in &signature.positional { + match &positional.0 { PositionalType::Mandatory(name, _m) => { one_liner.push_str(&format!("<{}> ", name)); } @@ -77,25 +74,70 @@ impl PerItemCommand for Help { } if signature.rest_positional.is_some() { - one_liner.push_str(" ...args"); + one_liner.push_str(&format!(" ...args",)); } + + if signature.named.len() > 0 { + one_liner.push_str("{flags} "); + } + long_desc.push_str(&format!("\nUsage:\n > {}\n", one_liner)); + if signature.positional.len() > 0 || signature.rest_positional.is_some() { + long_desc.push_str("\nparameters:\n"); + for positional in signature.positional { + match positional.0 { + PositionalType::Mandatory(name, _m) => { + long_desc + .push_str(&format!(" <{}> {}\n", name, positional.1)); + } + PositionalType::Optional(name, _o) => { + long_desc + .push_str(&format!(" ({}) {}\n", name, positional.1)); + } + } + } + if signature.rest_positional.is_some() { + long_desc.push_str(&format!( + " ...args{} {}\n", + if signature.rest_positional.is_some() { + ":" + } else { + "" + }, + signature.rest_positional.unwrap().1 + )); + } + } if signature.named.len() > 0 { long_desc.push_str("\nflags:\n"); for (flag, ty) in signature.named { - match ty { + match ty.0 { NamedType::Switch => { - long_desc.push_str(&format!(" --{}\n", flag)); + long_desc.push_str(&format!( + " --{}{} {}\n", + flag, + if ty.1.len() > 0 { ":" } else { "" }, + ty.1 + )); } NamedType::Mandatory(m) => { long_desc.push_str(&format!( - " --{} <{}> (required parameter)\n", - flag, m + " --{} <{}> (required parameter){} {}\n", + flag, + m, + if ty.1.len() > 0 { ":" } else { "" }, + ty.1 )); } NamedType::Optional(o) => { - long_desc.push_str(&format!(" --{} <{}>\n", flag, o)); + long_desc.push_str(&format!( + " --{} <{}>{} {}\n", + flag, + o, + if ty.1.len() > 0 { ":" } else { "" }, + ty.1 + )); } } } diff --git a/src/commands/histogram.rs b/src/commands/histogram.rs new file mode 100644 index 0000000000..6933f28a6f --- /dev/null +++ b/src/commands/histogram.rs @@ -0,0 +1,163 @@ +use crate::commands::evaluate_by::evaluate; +use crate::commands::group_by::group; +use crate::commands::map_max_by::map_max; +use crate::commands::reduce_by::reduce; +use crate::commands::t_sort_by::columns_sorted; +use crate::commands::t_sort_by::t_sort; +use crate::commands::WholeStreamCommand; +use crate::data::TaggedDictBuilder; +use crate::errors::ShellError; +use crate::prelude::*; +use num_traits::cast::ToPrimitive; + +pub struct Histogram; + +#[derive(Deserialize)] +pub struct HistogramArgs { + column_name: Tagged, + rest: Vec>, +} + +impl WholeStreamCommand for Histogram { + fn name(&self) -> &str { + "histogram" + } + + fn signature(&self) -> Signature { + Signature::build("histogram") + .required( + "column_name", + SyntaxShape::String, + "the name of the column to graph by", + ) + .rest( + SyntaxShape::Member, + "column name to give the histogram's frequency column", + ) + } + + fn usage(&self) -> &str { + "Creates a new table with a histogram based on the column name passed in." + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, histogram)?.run() + } +} + +pub fn histogram( + HistogramArgs { column_name, rest }: HistogramArgs, + RunnableContext { input, name, .. }: RunnableContext, +) -> Result { + let stream = async_stream! { + let values: Vec> = input.values.collect().await; + + let Tagged { item: group_by, .. } = column_name.clone(); + + let groups = group(&column_name, values, &name)?; + let group_labels = columns_sorted(Some(group_by.clone()), &groups, &name); + let sorted = t_sort(Some(group_by.clone()), None, &groups, &name)?; + let evaled = evaluate(&sorted, None, &name)?; + let reduced = reduce(&evaled, None, &name)?; + let maxima = map_max(&reduced, None, &name)?; + let percents = percentages(&reduced, maxima, &name)?; + + match percents { + Tagged { + item: Value::Table(datasets), + .. + } => { + + let mut idx = 0; + + let column_names_supplied: Vec<_> = rest.iter().map(|f| f.item.clone()).collect(); + + let frequency_column_name = if column_names_supplied.is_empty() { + "frecuency".to_string() + } else { + column_names_supplied[0].clone() + }; + + let column = (*column_name).clone(); + + if let Tagged { item: Value::Table(start), .. } = datasets.get(0).unwrap() { + for percentage in start.into_iter() { + + let mut fact = TaggedDictBuilder::new(&name); + fact.insert_tagged(&column, group_labels.get(idx).unwrap().clone()); + + if let Tagged { item: Value::Primitive(Primitive::Int(ref num)), .. } = percentage.clone() { + fact.insert(&frequency_column_name, std::iter::repeat("*").take(num.to_i32().unwrap() as usize).collect::()); + } + + idx = idx + 1; + + yield ReturnSuccess::value(fact.into_tagged_value()); + } + } + } + _ => {} + } + }; + + Ok(stream.to_output_stream()) +} + +fn percentages( + values: &Tagged, + max: Tagged, + tag: impl Into, +) -> Result, ShellError> { + let tag = tag.into(); + + let results: Tagged = match values { + Tagged { + item: Value::Table(datasets), + .. + } => { + let datasets: Vec<_> = datasets + .into_iter() + .map(|subsets| match subsets { + Tagged { + item: Value::Table(data), + .. + } => { + let data = data + .into_iter() + .map(|d| match d { + Tagged { + item: Value::Primitive(Primitive::Int(n)), + .. + } => { + let max = match max { + Tagged { + item: Value::Primitive(Primitive::Int(ref maxima)), + .. + } => maxima.to_i32().unwrap(), + _ => 0, + }; + + let n = { n.to_i32().unwrap() * 100 / max }; + + Value::number(n).tagged(&tag) + } + _ => Value::number(0).tagged(&tag), + }) + .collect::>(); + Value::Table(data).tagged(&tag) + } + _ => Value::Table(vec![]).tagged(&tag), + }) + .collect(); + + Value::Table(datasets).tagged(&tag) + } + other => other.clone(), + }; + + Ok(results) +} diff --git a/src/commands/history.rs b/src/commands/history.rs new file mode 100644 index 0000000000..fdc6d655a2 --- /dev/null +++ b/src/commands/history.rs @@ -0,0 +1,49 @@ +use crate::cli::History as HistoryFile; +use crate::commands::PerItemCommand; +use crate::errors::ShellError; +use crate::parser::registry::{self}; +use crate::prelude::*; +use std::fs::File; +use std::io::{BufRead, BufReader}; + +pub struct History; + +impl PerItemCommand for History { + fn name(&self) -> &str { + "history" + } + + fn signature(&self) -> registry::Signature { + Signature::build("history") + } + + fn usage(&self) -> &str { + "Display command history." + } + + fn run( + &self, + call_info: &CallInfo, + _registry: &CommandRegistry, + _raw_args: &RawCommandArgs, + _input: Tagged, + ) -> Result { + let tag = call_info.name_tag.clone(); + + let stream = async_stream! { + let history_path = HistoryFile::path(); + let file = File::open(history_path); + if let Ok(file) = file { + let reader = BufReader::new(file); + for line in reader.lines() { + if let Ok(line) = line { + yield ReturnSuccess::value(Value::string(line).tagged(tag.clone())); + } + } + } else { + yield Err(ShellError::labeled_error("Could not open history", "history file could not be opened", tag.clone())); + } + }; + Ok(stream.to_output_stream()) + } +} diff --git a/src/commands/last.rs b/src/commands/last.rs index 321506846b..abb10f5fce 100644 --- a/src/commands/last.rs +++ b/src/commands/last.rs @@ -7,7 +7,7 @@ pub struct Last; #[derive(Deserialize)] pub struct LastArgs { - amount: Tagged, + rows: Option>, } impl WholeStreamCommand for Last { @@ -16,7 +16,11 @@ impl WholeStreamCommand for Last { } fn signature(&self) -> Signature { - Signature::build("last").required("amount", SyntaxShape::Number) + Signature::build("last").optional( + "rows", + SyntaxShape::Number, + "starting from the back, the number of rows to return", + ) } fn usage(&self) -> &str { @@ -32,13 +36,17 @@ impl WholeStreamCommand for Last { } } -fn last( - LastArgs { amount }: LastArgs, - context: RunnableContext, -) -> Result { +fn last(LastArgs { rows }: LastArgs, context: RunnableContext) -> Result { let stream = async_stream! { let v: Vec<_> = context.input.into_vec().await; - let count = (*amount as usize); + + let rows_desired = if let Some(quantity) = rows { + *quantity + } else { + 1 + }; + + let count = (rows_desired as usize); if count < v.len() { let k = v.len() - count; for x in v[k..].iter() { diff --git a/src/commands/lines.rs b/src/commands/lines.rs index d2a9cdffd1..8375098b70 100644 --- a/src/commands/lines.rs +++ b/src/commands/lines.rs @@ -58,7 +58,7 @@ fn lines(args: CommandArgs, registry: &CommandRegistry) -> Result Signature { - Signature::build("ls").optional("path", SyntaxShape::Pattern) + Signature::build("ls").optional( + "path", + SyntaxShape::Pattern, + "a path to get the directory contents from", + ) } fn usage(&self) -> &str { @@ -34,5 +38,5 @@ impl WholeStreamCommand for LS { } fn ls(LsArgs { path }: LsArgs, context: RunnableContext) -> Result { - context.shell_manager.ls(path, context.name) + context.shell_manager.ls(path, &context) } diff --git a/src/commands/map_max_by.rs b/src/commands/map_max_by.rs new file mode 100644 index 0000000000..ea2fc99219 --- /dev/null +++ b/src/commands/map_max_by.rs @@ -0,0 +1,225 @@ +use crate::commands::WholeStreamCommand; +use crate::parser::hir::SyntaxShape; +use crate::prelude::*; +use num_traits::cast::ToPrimitive; +pub struct MapMaxBy; + +#[derive(Deserialize)] +pub struct MapMaxByArgs { + column_name: Option>, +} + +impl WholeStreamCommand for MapMaxBy { + fn name(&self) -> &str { + "map-max-by" + } + + fn signature(&self) -> Signature { + Signature::build("map-max-by").named( + "column_name", + SyntaxShape::String, + "the name of the column to map-max the table's rows", + ) + } + + fn usage(&self) -> &str { + "Creates a new table with the data from the tables rows maxed by the column given." + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, map_max_by)?.run() + } +} + +pub fn map_max_by( + MapMaxByArgs { column_name }: MapMaxByArgs, + RunnableContext { input, name, .. }: RunnableContext, +) -> Result { + let stream = async_stream! { + let values: Vec> = input.values.collect().await; + + + if values.is_empty() { + yield Err(ShellError::labeled_error( + "Expected table from pipeline", + "requires a table input", + name + )) + } else { + + let map_by_column = if let Some(column_to_map) = column_name { + Some(column_to_map.item().clone()) + } else { + None + }; + + match map_max(&values[0], map_by_column, name) { + Ok(table_maxed) => yield ReturnSuccess::value(table_maxed), + Err(err) => yield Err(err) + } + } + }; + + Ok(stream.to_output_stream()) +} + +pub fn map_max( + values: &Tagged, + _map_by_column_name: Option, + tag: impl Into, +) -> Result, ShellError> { + let tag = tag.into(); + + let results: Tagged = match values { + Tagged { + item: Value::Table(datasets), + .. + } => { + let datasets: Vec<_> = datasets + .into_iter() + .map(|subsets| match subsets { + Tagged { + item: Value::Table(data), + .. + } => { + let data = data.into_iter().fold(0, |acc, value| match value { + Tagged { + item: Value::Primitive(Primitive::Int(n)), + .. + } => { + if n.to_i32().unwrap() > acc { + n.to_i32().unwrap() + } else { + acc + } + } + _ => acc, + }); + Value::number(data).tagged(&tag) + } + _ => Value::number(0).tagged(&tag), + }) + .collect(); + + let datasets = datasets.iter().fold(0, |max, value| match value { + Tagged { + item: Value::Primitive(Primitive::Int(n)), + .. + } => { + if n.to_i32().unwrap() > max { + n.to_i32().unwrap() + } else { + max + } + } + _ => max, + }); + Value::number(datasets).tagged(&tag) + } + _ => Value::number(-1).tagged(&tag), + }; + + Ok(results) +} + +#[cfg(test)] +mod tests { + + use crate::commands::evaluate_by::evaluate; + use crate::commands::group_by::group; + use crate::commands::map_max_by::map_max; + use crate::commands::reduce_by::reduce; + use crate::commands::t_sort_by::t_sort; + use crate::data::meta::*; + use crate::prelude::*; + use crate::Value; + use indexmap::IndexMap; + + fn int(s: impl Into) -> Tagged { + Value::int(s).tagged_unknown() + } + + fn string(input: impl Into) -> Tagged { + Value::string(input.into()).tagged_unknown() + } + + fn row(entries: IndexMap>) -> Tagged { + Value::row(entries).tagged_unknown() + } + + fn nu_releases_evaluated_by_default_one() -> Tagged { + evaluate(&nu_releases_sorted_by_date(), None, Tag::unknown()).unwrap() + } + + fn nu_releases_reduced_by_sum() -> Tagged { + reduce( + &nu_releases_evaluated_by_default_one(), + Some(String::from("sum")), + Tag::unknown(), + ) + .unwrap() + } + + fn nu_releases_sorted_by_date() -> Tagged { + let key = String::from("date"); + + t_sort( + Some(key), + None, + &nu_releases_grouped_by_date(), + Tag::unknown(), + ) + .unwrap() + } + + fn nu_releases_grouped_by_date() -> Tagged { + let key = String::from("date").tagged_unknown(); + group(&key, nu_releases_commiters(), Tag::unknown()).unwrap() + } + + fn nu_releases_commiters() -> Vec> { + vec![ + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("September 24-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("September 24-2019")}, + ), + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("September 24-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}, + ), + row( + indexmap! {"name".into() => string("JK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}, + ), + ] + } + #[test] + fn maps_and_gets_max_value() { + assert_eq!( + map_max(&nu_releases_reduced_by_sum(), None, Tag::unknown()).unwrap(), + int(4) + ); + } +} diff --git a/src/commands/mkdir.rs b/src/commands/mkdir.rs index 8bf8a97d4a..e801a27530 100644 --- a/src/commands/mkdir.rs +++ b/src/commands/mkdir.rs @@ -17,7 +17,7 @@ impl PerItemCommand for Mkdir { } fn signature(&self) -> Signature { - Signature::build("mkdir").rest(SyntaxShape::Path) + Signature::build("mkdir").rest(SyntaxShape::Path, "the name(s) of the path(s) to create") } fn usage(&self) -> &str { diff --git a/src/commands/mv.rs b/src/commands/mv.rs index 2ace1fa05f..a9a11f5064 100644 --- a/src/commands/mv.rs +++ b/src/commands/mv.rs @@ -20,9 +20,16 @@ impl PerItemCommand for Move { fn signature(&self) -> Signature { Signature::build("mv") - .required("source", SyntaxShape::Pattern) - .required("destination", SyntaxShape::Path) - .named("file", SyntaxShape::Any) + .required( + "source", + SyntaxShape::Pattern, + "the location to move files/directories from", + ) + .required( + "destination", + SyntaxShape::Path, + "the location to move files/directories to", + ) } fn usage(&self) -> &str { diff --git a/src/commands/nth.rs b/src/commands/nth.rs index 18bb6f23af..bcd3057879 100644 --- a/src/commands/nth.rs +++ b/src/commands/nth.rs @@ -16,7 +16,11 @@ impl WholeStreamCommand for Nth { } fn signature(&self) -> Signature { - Signature::build("nth").required("row number", SyntaxShape::Any) + Signature::build("nth").required( + "row number", + SyntaxShape::Any, + "the number of the row to return", + ) } fn usage(&self) -> &str { diff --git a/src/commands/open.rs b/src/commands/open.rs index 254b0bd7b9..3d7e066a26 100644 --- a/src/commands/open.rs +++ b/src/commands/open.rs @@ -7,7 +7,6 @@ use crate::parser::hir::SyntaxShape; use crate::parser::registry::Signature; use crate::prelude::*; use std::path::{Path, PathBuf}; -use uuid::Uuid; pub struct Open; impl PerItemCommand for Open { @@ -17,8 +16,12 @@ impl PerItemCommand for Open { fn signature(&self) -> Signature { Signature::build(self.name()) - .required("path", SyntaxShape::Path) - .switch("raw") + .required( + "path", + SyntaxShape::Path, + "the file path to load values from", + ) + .switch("raw", "load content as a string insead of a table") } fn usage(&self) -> &str { @@ -45,16 +48,18 @@ fn run( let cwd = PathBuf::from(shell_manager.path()); let full_path = PathBuf::from(cwd); - let path = match call_info - .args - .nth(0) - .ok_or_else(|| ShellError::string(&format!("No file or directory specified")))? - { + let path = match call_info.args.nth(0).ok_or_else(|| { + ShellError::labeled_error( + "No file or directory specified", + "for command", + &call_info.name_tag, + ) + })? { file => file, }; let path_buf = path.as_path()?; let path_str = path_buf.display().to_string(); - let path_span = path.span(); + let path_span = path.tag.span; let has_raw = call_info.args.has("raw"); let registry = registry.clone(); let raw_args = raw_args.clone(); @@ -67,7 +72,7 @@ fn run( yield Err(e); return; } - let (file_extension, contents, contents_tag, anchor_location) = result.unwrap(); + let (file_extension, contents, contents_tag) = result.unwrap(); let file_extension = if has_raw { None @@ -77,21 +82,14 @@ fn run( file_extension.or(path_str.split('.').last().map(String::from)) }; - if contents_tag.anchor != uuid::Uuid::nil() { - // If we have loaded something, track its source - yield ReturnSuccess::action(CommandAction::AddAnchorLocation( - contents_tag.anchor, - anchor_location, - )); - } - - let tagged_contents = contents.tagged(contents_tag); + let tagged_contents = contents.tagged(&contents_tag); if let Some(extension) = file_extension { let command_name = format!("from-{}", extension); if let Some(converter) = registry.get_command(&command_name) { let new_args = RawCommandArgs { host: raw_args.host, + ctrl_c: raw_args.ctrl_c, shell_manager: raw_args.shell_manager, call_info: UnevaluatedCallInfo { args: crate::parser::hir::Call { @@ -100,11 +98,10 @@ fn run( named: None }, source: raw_args.call_info.source, - source_map: raw_args.call_info.source_map, name_tag: raw_args.call_info.name_tag, } }; - let mut result = converter.run(new_args.with_input(vec![tagged_contents]), ®istry, false); + let mut result = converter.run(new_args.with_input(vec![tagged_contents]), ®istry); let result_vec: Vec> = result.drain_vec().await; for res in result_vec { match res { @@ -114,7 +111,7 @@ fn run( } } Ok(ReturnSuccess::Value(Tagged { item, .. })) => { - yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag })); + yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() })); } x => yield x, } @@ -134,7 +131,7 @@ pub async fn fetch( cwd: &PathBuf, location: &str, span: Span, -) -> Result<(Option, Value, Tag, AnchorLocation), ShellError> { +) -> Result<(Option, Value, Tag), ShellError> { let mut cwd = cwd.clone(); cwd.push(Path::new(location)); @@ -147,9 +144,8 @@ pub async fn fetch( Value::string(s), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::File(cwd.to_string_lossy().to_string())), }, - AnchorLocation::File(cwd.to_string_lossy().to_string()), )), Err(_) => { //Non utf8 data. @@ -166,18 +162,20 @@ pub async fn fetch( Value::string(s), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), }, - AnchorLocation::File(cwd.to_string_lossy().to_string()), )), Err(_) => Ok(( None, Value::binary(bytes), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), }, - AnchorLocation::File(cwd.to_string_lossy().to_string()), )), } } else { @@ -186,9 +184,10 @@ pub async fn fetch( Value::binary(bytes), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), }, - AnchorLocation::File(cwd.to_string_lossy().to_string()), )) } } @@ -204,18 +203,20 @@ pub async fn fetch( Value::string(s), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), }, - AnchorLocation::File(cwd.to_string_lossy().to_string()), )), Err(_) => Ok(( None, Value::binary(bytes), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), }, - AnchorLocation::File(cwd.to_string_lossy().to_string()), )), } } else { @@ -224,9 +225,10 @@ pub async fn fetch( Value::binary(bytes), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), }, - AnchorLocation::File(cwd.to_string_lossy().to_string()), )) } } @@ -235,9 +237,10 @@ pub async fn fetch( Value::binary(bytes), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), }, - AnchorLocation::File(cwd.to_string_lossy().to_string()), )), } } diff --git a/src/commands/pick.rs b/src/commands/pick.rs index 605b7f8890..b9c4e53bcc 100644 --- a/src/commands/pick.rs +++ b/src/commands/pick.rs @@ -17,7 +17,7 @@ impl WholeStreamCommand for Pick { } fn signature(&self) -> Signature { - Signature::build("pick").rest(SyntaxShape::Any) + Signature::build("pick").rest(SyntaxShape::Any, "the columns to select from the table") } fn usage(&self) -> &str { diff --git a/src/commands/pivot.rs b/src/commands/pivot.rs index 1a6bb901fb..0556999f2d 100644 --- a/src/commands/pivot.rs +++ b/src/commands/pivot.rs @@ -21,9 +21,12 @@ impl WholeStreamCommand for Pivot { fn signature(&self) -> Signature { Signature::build("pivot") - .switch("header-row") - .switch("ignore-titles") - .rest(SyntaxShape::String) + .switch("header-row", "treat the first row as column names") + .switch("ignore-titles", "don't pivot the column names into values") + .rest( + SyntaxShape::String, + "the names to give columns once pivoted", + ) } fn usage(&self) -> &str { @@ -104,7 +107,7 @@ pub fn pivot(args: PivotArgs, context: RunnableContext) -> Result { let mut result = VecDeque::new(); - result.push_back(Err(ShellError::string(format!( + result.push_back(Err(ShellError::untagged_runtime_error(format!( "Error while processing begin_filter response: {:?} {}", e, input )))); @@ -138,7 +138,7 @@ pub fn filter_plugin( } Err(e) => { let mut result = VecDeque::new(); - result.push_back(Err(ShellError::string(format!( + result.push_back(Err(ShellError::untagged_runtime_error(format!( "Error while reading begin_filter response: {:?}", e )))); @@ -189,7 +189,7 @@ pub fn filter_plugin( }, Err(e) => { let mut result = VecDeque::new(); - result.push_back(Err(ShellError::string(format!( + result.push_back(Err(ShellError::untagged_runtime_error(format!( "Error while processing end_filter response: {:?} {}", e, input )))); @@ -199,7 +199,7 @@ pub fn filter_plugin( } Err(e) => { let mut result = VecDeque::new(); - result.push_back(Err(ShellError::string(format!( + result.push_back(Err(ShellError::untagged_runtime_error(format!( "Error while reading end_filter: {:?}", e )))); @@ -236,7 +236,7 @@ pub fn filter_plugin( }, Err(e) => { let mut result = VecDeque::new(); - result.push_back(Err(ShellError::string(format!( + result.push_back(Err(ShellError::untagged_runtime_error(format!( "Error while processing filter response: {:?} {}", e, input )))); @@ -246,7 +246,7 @@ pub fn filter_plugin( } Err(e) => { let mut result = VecDeque::new(); - result.push_back(Err(ShellError::string(format!( + result.push_back(Err(ShellError::untagged_runtime_error(format!( "Error while reading filter response: {:?}", e )))); diff --git a/src/commands/post.rs b/src/commands/post.rs index 5a77afd14b..92e4e8f135 100644 --- a/src/commands/post.rs +++ b/src/commands/post.rs @@ -25,13 +25,25 @@ impl PerItemCommand for Post { fn signature(&self) -> Signature { Signature::build(self.name()) - .required("path", SyntaxShape::Any) - .required("body", SyntaxShape::Any) - .named("user", SyntaxShape::Any) - .named("password", SyntaxShape::Any) - .named("content-type", SyntaxShape::Any) - .named("content-length", SyntaxShape::Any) - .switch("raw") + .required("path", SyntaxShape::Any, "the URL to post to") + .required("body", SyntaxShape::Any, "the contents of the post body") + .named("user", SyntaxShape::Any, "the username when authenticating") + .named( + "password", + SyntaxShape::Any, + "the password when authenticating", + ) + .named( + "content-type", + SyntaxShape::Any, + "the MIME type of content to post", + ) + .named( + "content-length", + SyntaxShape::Any, + "the length of the content being posted", + ) + .switch("raw", "return values as a string instead of a table") } fn usage(&self) -> &str { @@ -54,21 +66,20 @@ fn run( registry: &CommandRegistry, raw_args: &RawCommandArgs, ) -> Result { + let name_tag = call_info.name_tag.clone(); let call_info = call_info.clone(); - let path = match call_info - .args - .nth(0) - .ok_or_else(|| ShellError::string(&format!("No url specified")))? - { - file => file.clone(), - }; - let body = match call_info - .args - .nth(1) - .ok_or_else(|| ShellError::string(&format!("No body specified")))? - { - file => file.clone(), - }; + let path = + match call_info.args.nth(0).ok_or_else(|| { + ShellError::labeled_error("No url specified", "for command", &name_tag) + })? { + file => file.clone(), + }; + let body = + match call_info.args.nth(1).ok_or_else(|| { + ShellError::labeled_error("No body specified", "for command", &name_tag) + })? { + file => file.clone(), + }; let path_str = path.as_string()?; let path_span = path.tag(); let has_raw = call_info.args.has("raw"); @@ -83,7 +94,7 @@ fn run( let headers = get_headers(&call_info)?; let stream = async_stream! { - let (file_extension, contents, contents_tag, anchor_location) = + let (file_extension, contents, contents_tag) = post(&path_str, &body, user, password, &headers, path_span, ®istry, &raw_args).await.unwrap(); let file_extension = if has_raw { @@ -94,21 +105,14 @@ fn run( file_extension.or(path_str.split('.').last().map(String::from)) }; - if contents_tag.anchor != uuid::Uuid::nil() { - // If we have loaded something, track its source - yield ReturnSuccess::action(CommandAction::AddAnchorLocation( - contents_tag.anchor, - anchor_location, - )); - } - - let tagged_contents = contents.tagged(contents_tag); + let tagged_contents = contents.tagged(&contents_tag); if let Some(extension) = file_extension { let command_name = format!("from-{}", extension); if let Some(converter) = registry.get_command(&command_name) { let new_args = RawCommandArgs { host: raw_args.host, + ctrl_c: raw_args.ctrl_c, shell_manager: raw_args.shell_manager, call_info: UnevaluatedCallInfo { args: crate::parser::hir::Call { @@ -117,11 +121,10 @@ fn run( named: None }, source: raw_args.call_info.source, - source_map: raw_args.call_info.source_map, name_tag: raw_args.call_info.name_tag, } }; - let mut result = converter.run(new_args.with_input(vec![tagged_contents]), ®istry, false); + let mut result = converter.run(new_args.with_input(vec![tagged_contents]), ®istry); let result_vec: Vec> = result.drain_vec().await; for res in result_vec { match res { @@ -131,7 +134,7 @@ fn run( } } Ok(ReturnSuccess::Value(Tagged { item, .. })) => { - yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag })); + yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() })); } x => yield x, } @@ -211,7 +214,7 @@ pub async fn post( tag: Tag, registry: &CommandRegistry, raw_args: &RawCommandArgs, -) -> Result<(Option, Value, Tag, AnchorLocation), ShellError> { +) -> Result<(Option, Value, Tag), ShellError> { let registry = registry.clone(); let raw_args = raw_args.clone(); if location.starts_with("http:") || location.starts_with("https:") { @@ -252,6 +255,7 @@ pub async fn post( if let Some(converter) = registry.get_command("to-json") { let new_args = RawCommandArgs { host: raw_args.host, + ctrl_c: raw_args.ctrl_c, shell_manager: raw_args.shell_manager, call_info: UnevaluatedCallInfo { args: crate::parser::hir::Call { @@ -260,14 +264,12 @@ pub async fn post( named: None, }, source: raw_args.call_info.source, - source_map: raw_args.call_info.source_map, name_tag: raw_args.call_info.name_tag, }, }; let mut result = converter.run( new_args.with_input(vec![item.clone().tagged(tag.clone())]), ®istry, - false, ); let result_vec: Vec> = result.drain_vec().await; @@ -284,7 +286,7 @@ pub async fn post( return Err(ShellError::labeled_error( "Save could not successfully save", "unexpected data during save", - *tag, + tag, )); } } @@ -300,7 +302,7 @@ pub async fn post( return Err(ShellError::labeled_error( "Could not automatically convert table", "needs manual conversion", - *tag, + tag, )); } } @@ -316,11 +318,13 @@ pub async fn post( ShellError::labeled_error( "Could not load text from remote url", "could not load", - tag, + &tag, ) })?), - tag, - AnchorLocation::Url(location.to_string()), + Tag { + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, + }, )), (mime::APPLICATION, mime::JSON) => Ok(( Some("json".to_string()), @@ -328,25 +332,29 @@ pub async fn post( ShellError::labeled_error( "Could not load text from remote url", "could not load", - tag, + &tag, ) })?), - tag, - AnchorLocation::Url(location.to_string()), + Tag { + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, + }, )), (mime::APPLICATION, mime::OCTET_STREAM) => { let buf: Vec = r.body_bytes().await.map_err(|_| { ShellError::labeled_error( "Could not load binary file", "could not load", - tag, + &tag, ) })?; Ok(( None, Value::binary(buf), - tag, - AnchorLocation::Url(location.to_string()), + Tag { + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, + }, )) } (mime::IMAGE, image_ty) => { @@ -354,14 +362,16 @@ pub async fn post( ShellError::labeled_error( "Could not load image file", "could not load", - tag, + &tag, ) })?; Ok(( Some(image_ty.to_string()), Value::binary(buf), - tag, - AnchorLocation::Url(location.to_string()), + Tag { + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, + }, )) } (mime::TEXT, mime::HTML) => Ok(( @@ -370,11 +380,13 @@ pub async fn post( ShellError::labeled_error( "Could not load text from remote url", "could not load", - tag, + &tag, ) })?), - tag, - AnchorLocation::Url(location.to_string()), + Tag { + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, + }, )), (mime::TEXT, mime::PLAIN) => { let path_extension = url::Url::parse(location) @@ -394,11 +406,13 @@ pub async fn post( ShellError::labeled_error( "Could not load text from remote url", "could not load", - tag, + &tag, ) })?), - tag, - AnchorLocation::Url(location.to_string()), + Tag { + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, + }, )) } (ty, sub_ty) => Ok(( @@ -407,16 +421,20 @@ pub async fn post( "Not yet supported MIME type: {} {}", ty, sub_ty )), - tag, - AnchorLocation::Url(location.to_string()), + Tag { + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, + }, )), } } None => Ok(( None, Value::string(format!("No content type found")), - tag, - AnchorLocation::Url(location.to_string()), + Tag { + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, + }, )), }, Err(_) => { diff --git a/src/commands/prepend.rs b/src/commands/prepend.rs new file mode 100644 index 0000000000..b6fa935b0b --- /dev/null +++ b/src/commands/prepend.rs @@ -0,0 +1,47 @@ +use crate::commands::WholeStreamCommand; +use crate::errors::ShellError; +use crate::parser::CommandRegistry; +use crate::prelude::*; + +#[derive(Deserialize)] +struct PrependArgs { + row: Tagged, +} + +pub struct Prepend; + +impl WholeStreamCommand for Prepend { + fn name(&self) -> &str { + "prepend" + } + + fn signature(&self) -> Signature { + Signature::build("prepend").required( + "row value", + SyntaxShape::Any, + "the value of the row to prepend to the table", + ) + } + + fn usage(&self) -> &str { + "Prepend the given row to the front of the table" + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, prepend)?.run() + } +} + +fn prepend( + PrependArgs { row }: PrependArgs, + RunnableContext { input, .. }: RunnableContext, +) -> Result { + let mut prepend: VecDeque> = VecDeque::new(); + prepend.push_back(row); + + Ok(OutputStream::from_input(prepend.chain(input.values))) +} diff --git a/src/commands/reduce_by.rs b/src/commands/reduce_by.rs new file mode 100644 index 0000000000..53ddf7e15d --- /dev/null +++ b/src/commands/reduce_by.rs @@ -0,0 +1,257 @@ +use crate::commands::WholeStreamCommand; +use crate::parser::hir::SyntaxShape; +use crate::prelude::*; +use num_traits::cast::ToPrimitive; +pub struct ReduceBy; + +#[derive(Deserialize)] +pub struct ReduceByArgs { + reduce_with: Option>, +} + +impl WholeStreamCommand for ReduceBy { + fn name(&self) -> &str { + "reduce-by" + } + + fn signature(&self) -> Signature { + Signature::build("reduce-by").named( + "reduce_with", + SyntaxShape::String, + "the command to reduce by with", + ) + } + + fn usage(&self) -> &str { + "Creates a new table with the data from the tables rows reduced by the command given." + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, reduce_by)?.run() + } +} + +pub fn reduce_by( + ReduceByArgs { reduce_with }: ReduceByArgs, + RunnableContext { input, name, .. }: RunnableContext, +) -> Result { + let stream = async_stream! { + let values: Vec> = input.values.collect().await; + + if values.is_empty() { + yield Err(ShellError::labeled_error( + "Expected table from pipeline", + "requires a table input", + name + )) + } else { + + let reduce_with = if let Some(reducer) = reduce_with { + Some(reducer.item().clone()) + } else { + None + }; + + match reduce(&values[0], reduce_with, name) { + Ok(reduced) => yield ReturnSuccess::value(reduced), + Err(err) => yield Err(err) + } + } + }; + + Ok(stream.to_output_stream()) +} + +fn sum(data: Vec>) -> i32 { + data.into_iter().fold(0, |acc, value| match value { + Tagged { + item: Value::Primitive(Primitive::Int(n)), + .. + } => acc + n.to_i32().unwrap(), + _ => acc, + }) +} + +fn formula( + acc_begin: i32, + calculator: Box>) -> i32 + 'static>, +) -> Box>) -> i32 + 'static> { + Box::new(move |acc, datax| -> i32 { + let result = acc * acc_begin; + result + calculator(datax) + }) +} + +fn reducer_for(command: Reduce) -> Box>) -> i32 + 'static> { + match command { + Reduce::Sum | Reduce::Default => Box::new(formula(0, Box::new(sum))), + } +} + +pub enum Reduce { + Sum, + Default, +} + +pub fn reduce( + values: &Tagged, + reducer: Option, + tag: impl Into, +) -> Result, ShellError> { + let tag = tag.into(); + + let reduce_with = match reducer { + Some(cmd) if cmd == "sum" => reducer_for(Reduce::Sum), + Some(_) | None => reducer_for(Reduce::Default), + }; + + let results: Tagged = match values { + Tagged { + item: Value::Table(datasets), + .. + } => { + let datasets: Vec<_> = datasets + .into_iter() + .map(|subsets| { + let mut acc = 0; + match subsets { + Tagged { + item: Value::Table(data), + .. + } => { + let data = data + .into_iter() + .map(|d| { + if let Tagged { + item: Value::Table(x), + .. + } = d + { + acc = reduce_with(acc, x.clone()); + Value::number(acc).tagged(&tag) + } else { + Value::number(0).tagged(&tag) + } + }) + .collect::>(); + Value::Table(data).tagged(&tag) + } + _ => Value::Table(vec![]).tagged(&tag), + } + }) + .collect(); + + Value::Table(datasets).tagged(&tag) + } + _ => Value::Table(vec![]).tagged(&tag), + }; + + Ok(results) +} + +#[cfg(test)] +mod tests { + + use crate::commands::evaluate_by::evaluate; + use crate::commands::group_by::group; + use crate::commands::reduce_by::{reduce, reducer_for, Reduce}; + use crate::commands::t_sort_by::t_sort; + use crate::data::meta::*; + use crate::prelude::*; + use crate::Value; + use indexmap::IndexMap; + + fn int(s: impl Into) -> Tagged { + Value::int(s).tagged_unknown() + } + + fn string(input: impl Into) -> Tagged { + Value::string(input.into()).tagged_unknown() + } + + fn row(entries: IndexMap>) -> Tagged { + Value::row(entries).tagged_unknown() + } + + fn table(list: &Vec>) -> Tagged { + Value::table(list).tagged_unknown() + } + + fn nu_releases_sorted_by_date() -> Tagged { + let key = String::from("date"); + + t_sort( + Some(key), + None, + &nu_releases_grouped_by_date(), + Tag::unknown(), + ) + .unwrap() + } + + fn nu_releases_evaluated_by_default_one() -> Tagged { + evaluate(&nu_releases_sorted_by_date(), None, Tag::unknown()).unwrap() + } + + fn nu_releases_grouped_by_date() -> Tagged { + let key = String::from("date").tagged_unknown(); + group(&key, nu_releases_commiters(), Tag::unknown()).unwrap() + } + + fn nu_releases_commiters() -> Vec> { + vec![ + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("September 24-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("September 24-2019")}, + ), + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("September 24-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}, + ), + ] + } + + #[test] + fn reducer_computes_given_a_sum_command() { + let subject = vec![int(1), int(1), int(1)]; + + let action = reducer_for(Reduce::Sum); + + assert_eq!(action(0, subject), 3); + } + + #[test] + fn reducer_computes() { + assert_eq!( + reduce( + &nu_releases_evaluated_by_default_one(), + Some(String::from("sum")), + Tag::unknown() + ), + Ok(table(&vec![table(&vec![int(3), int(3), int(3)])])) + ); + } +} diff --git a/src/commands/reject.rs b/src/commands/reject.rs index 3521635233..f02a72aa4c 100644 --- a/src/commands/reject.rs +++ b/src/commands/reject.rs @@ -16,7 +16,7 @@ impl WholeStreamCommand for Reject { } fn signature(&self) -> Signature { - Signature::build("reject").rest(SyntaxShape::Member) + Signature::build("reject").rest(SyntaxShape::Member, "the names of columns to remove") } fn usage(&self) -> &str { diff --git a/src/commands/rm.rs b/src/commands/rm.rs index ac5aeaae7d..76222d2c28 100644 --- a/src/commands/rm.rs +++ b/src/commands/rm.rs @@ -11,6 +11,7 @@ pub struct Remove; pub struct RemoveArgs { pub target: Tagged, pub recursive: Tagged, + pub trash: Tagged, } impl PerItemCommand for Remove { @@ -20,12 +21,16 @@ impl PerItemCommand for Remove { fn signature(&self) -> Signature { Signature::build("rm") - .required("path", SyntaxShape::Pattern) - .switch("recursive") + .required("path", SyntaxShape::Pattern, "the file path to remove") + .switch( + "trash", + "use the platform's recycle bin instead of permanently deleting", + ) + .switch("recursive", "delete subdirectories recursively") } fn usage(&self) -> &str { - "Remove a file, (for removing directory append '--recursive')" + "Remove a file" } fn run( diff --git a/src/commands/save.rs b/src/commands/save.rs index 47f1a17e95..48cfa1acc9 100644 --- a/src/commands/save.rs +++ b/src/commands/save.rs @@ -93,8 +93,11 @@ impl WholeStreamCommand for Save { fn signature(&self) -> Signature { Signature::build("save") - .optional("path", SyntaxShape::Path) - .switch("raw") + .optional("path", SyntaxShape::Path, "the path to save contents to") + .switch( + "raw", + "treat values as-is rather than auto-converting based on file extension", + ) } fn usage(&self) -> &str { @@ -119,49 +122,48 @@ fn save( input, name, shell_manager, - source_map, host, + ctrl_c, commands: registry, .. }: RunnableContext, raw_args: RawCommandArgs, ) -> Result { let mut full_path = PathBuf::from(shell_manager.path()); - let name_tag = name; + let name_tag = name.clone(); - let source_map = source_map.clone(); let stream = async_stream! { let input: Vec> = input.values.collect().await; if path.is_none() { // If there is no filename, check the metadata for the anchor filename if input.len() > 0 { let anchor = input[0].anchor(); - match source_map.get(&anchor) { + match anchor { Some(path) => match path { AnchorLocation::File(file) => { - full_path.push(Path::new(file)); + full_path.push(Path::new(&file)); } _ => { yield Err(ShellError::labeled_error( - "Save requires a filepath", + "Save requires a filepath (1)", "needs path", - name_tag, + name_tag.clone(), )); } }, None => { yield Err(ShellError::labeled_error( - "Save requires a filepath", + "Save requires a filepath (2)", "needs path", - name_tag, + name_tag.clone(), )); } } } else { yield Err(ShellError::labeled_error( - "Save requires a filepath", + "Save requires a filepath (3)", "needs path", - name_tag, + name_tag.clone(), )); } } else { @@ -179,6 +181,7 @@ fn save( if let Some(converter) = registry.get_command(&command_name) { let new_args = RawCommandArgs { host, + ctrl_c, shell_manager, call_info: UnevaluatedCallInfo { args: crate::parser::hir::Call { @@ -187,11 +190,10 @@ fn save( named: None }, source: raw_args.call_info.source, - source_map: raw_args.call_info.source_map, name_tag: raw_args.call_info.name_tag, } }; - let mut result = converter.run(new_args.with_input(input), ®istry, false); + let mut result = converter.run(new_args.with_input(input), ®istry); let result_vec: Vec> = result.drain_vec().await; if converter.is_binary() { process_binary_return_success!('scope, result_vec, name_tag) @@ -212,9 +214,9 @@ fn save( match content { Ok(save_data) => match std::fs::write(full_path, save_data) { Ok(o) => o, - Err(e) => yield Err(ShellError::string(e.to_string())), + Err(e) => yield Err(ShellError::labeled_error(e.to_string(), "for command", name)), }, - Err(e) => yield Err(ShellError::string(e.to_string())), + Err(e) => yield Err(ShellError::labeled_error(e.to_string(), "for command", name)), } }; diff --git a/src/commands/shells.rs b/src/commands/shells.rs index 2aee2c8564..6058a42032 100644 --- a/src/commands/shells.rs +++ b/src/commands/shells.rs @@ -2,6 +2,7 @@ use crate::commands::WholeStreamCommand; use crate::data::TaggedDictBuilder; use crate::errors::ShellError; use crate::prelude::*; +use std::sync::atomic::Ordering; pub struct Shells; @@ -32,14 +33,14 @@ fn shells(args: CommandArgs, _registry: &CommandRegistry) -> Result Result Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - tag, + &tag, "value originates from here", v.tag(), )), diff --git a/src/commands/skip_while.rs b/src/commands/skip_while.rs index 041caf300e..e8bec7dac2 100644 --- a/src/commands/skip_while.rs +++ b/src/commands/skip_while.rs @@ -1,6 +1,7 @@ use crate::commands::WholeStreamCommand; use crate::errors::ShellError; use crate::prelude::*; +use log::trace; pub struct SkipWhile; @@ -16,7 +17,11 @@ impl WholeStreamCommand for SkipWhile { fn signature(&self) -> Signature { Signature::build("skip-while") - .required("condition", SyntaxShape::Block) + .required( + "condition", + SyntaxShape::Block, + "the condition that must be met to continue skipping", + ) .filter() } @@ -38,7 +43,9 @@ pub fn skip_while( RunnableContext { input, .. }: RunnableContext, ) -> Result { let objects = input.values.skip_while(move |item| { + trace!("ITEM = {:?}", item); let result = condition.invoke(&item); + trace!("RESULT = {:?}", result); let return_value = match result { Ok(ref v) if v.is_true() => true, diff --git a/src/commands/sort_by.rs b/src/commands/sort_by.rs index 1e6b87491a..d384207c92 100644 --- a/src/commands/sort_by.rs +++ b/src/commands/sort_by.rs @@ -15,7 +15,7 @@ impl WholeStreamCommand for SortBy { } fn signature(&self) -> Signature { - Signature::build("sort-by").rest(SyntaxShape::String) + Signature::build("sort-by").rest(SyntaxShape::String, "the column(s) to sort by") } fn usage(&self) -> &str { diff --git a/src/commands/split_by.rs b/src/commands/split_by.rs new file mode 100644 index 0000000000..1f972a2c55 --- /dev/null +++ b/src/commands/split_by.rs @@ -0,0 +1,270 @@ +use crate::commands::WholeStreamCommand; +use crate::data::TaggedDictBuilder; +use crate::errors::ShellError; +use crate::prelude::*; + +pub struct SplitBy; + +#[derive(Deserialize)] +pub struct SplitByArgs { + column_name: Tagged, +} + +impl WholeStreamCommand for SplitBy { + fn name(&self) -> &str { + "split-by" + } + + fn signature(&self) -> Signature { + Signature::build("split-by").required( + "column_name", + SyntaxShape::String, + "the name of the column within the nested table to split by", + ) + } + + fn usage(&self) -> &str { + "Creates a new table with the data from the inner tables splitted by the column given." + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, split_by)?.run() + } +} + +pub fn split_by( + SplitByArgs { column_name }: SplitByArgs, + RunnableContext { input, name, .. }: RunnableContext, +) -> Result { + let stream = async_stream! { + let values: Vec> = input.values.collect().await; + + if values.len() > 1 || values.is_empty() { + yield Err(ShellError::labeled_error( + "Expected table from pipeline", + "requires a table input", + column_name.span() + )) + } else { + match split(&column_name, &values[0], name) { + Ok(split) => yield ReturnSuccess::value(split), + Err(err) => yield Err(err), + } + } + }; + + Ok(stream.to_output_stream()) +} + +pub fn split( + column_name: &Tagged, + value: &Tagged, + tag: impl Into, +) -> Result, ShellError> { + let origin_tag = tag.into(); + + let mut splits = indexmap::IndexMap::new(); + + match value { + Tagged { + item: Value::Row(group_sets), + .. + } => { + for (group_key, group_value) in group_sets.entries.iter() { + match *group_value { + Tagged { + item: Value::Table(ref dataset), + .. + } => { + let group = crate::commands::group_by::group( + &column_name, + dataset.to_vec(), + &origin_tag, + )?; + + match group { + Tagged { + item: Value::Row(o), + .. + } => { + for (split_label, subset) in o.entries.into_iter() { + match subset { + Tagged { + item: Value::Table(subset), + tag, + } => { + let s = splits + .entry(split_label.clone()) + .or_insert(indexmap::IndexMap::new()); + s.insert( + group_key.clone(), + Value::table(&subset).tagged(tag), + ); + } + other => { + return Err(ShellError::type_error( + "a table value", + other.tagged_type_name(), + )) + } + } + } + } + _ => { + return Err(ShellError::type_error( + "a table value", + group.tagged_type_name(), + )) + } + } + } + ref other => { + return Err(ShellError::type_error( + "a table value", + other.tagged_type_name(), + )) + } + } + } + } + _ => { + return Err(ShellError::type_error( + "a table value", + value.tagged_type_name(), + )) + } + } + + let mut out = TaggedDictBuilder::new(&origin_tag); + + for (k, v) in splits.into_iter() { + out.insert(k, Value::row(v)); + } + + Ok(out.into_tagged_value()) +} +#[cfg(test)] +mod tests { + + use crate::commands::group_by::group; + use crate::commands::split_by::split; + use crate::data::meta::*; + use crate::Value; + use indexmap::IndexMap; + + fn string(input: impl Into) -> Tagged { + Value::string(input.into()).tagged_unknown() + } + + fn row(entries: IndexMap>) -> Tagged { + Value::row(entries).tagged_unknown() + } + + fn table(list: &Vec>) -> Tagged { + Value::table(list).tagged_unknown() + } + + fn nu_releases_grouped_by_date() -> Tagged { + let key = String::from("date").tagged_unknown(); + group(&key, nu_releases_commiters(), Tag::unknown()).unwrap() + } + + fn nu_releases_commiters() -> Vec> { + vec![ + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")}, + ), + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}, + ), + ] + } + + #[test] + fn splits_inner_tables_by_key() { + let for_key = String::from("country").tagged_unknown(); + + assert_eq!( + split(&for_key, &nu_releases_grouped_by_date(), Tag::unknown()).unwrap(), + Value::row(indexmap! { + "EC".into() => row(indexmap! { + "August 23-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}) + ]), + "Sept 24-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}) + ]), + "October 10-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}) + ]) + }), + "NZ".into() => row(indexmap! { + "August 23-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}) + ]), + "Sept 24-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")}) + ]), + "October 10-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}) + ]) + }), + "US".into() => row(indexmap! { + "August 23-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}) + ]), + "Sept 24-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")}) + ]), + "October 10-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}) + ]) + }) + }).tagged_unknown() + ); + } + + #[test] + fn errors_if_key_within_some_inner_table_is_missing() { + let for_key = String::from("country").tagged_unknown(); + + let nu_releases = row(indexmap! { + "August 23-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}) + ]), + "Sept 24-2019".into() => table(&vec![ + row(indexmap!{"name".into() => Value::string("JT").tagged(Tag::from(Span::new(5,10))), "date".into() => string("Sept 24-2019")}) + ]), + "October 10-2019".into() => table(&vec![ + row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}) + ]) + }); + + assert!(split(&for_key, &nu_releases, Tag::from(Span::new(5, 10))).is_err()); + } +} diff --git a/src/commands/split_column.rs b/src/commands/split_column.rs index 00e2609f26..fd872d452d 100644 --- a/src/commands/split_column.rs +++ b/src/commands/split_column.rs @@ -21,9 +21,13 @@ impl WholeStreamCommand for SplitColumn { fn signature(&self) -> Signature { Signature::build("split-column") - .required("separator", SyntaxShape::Any) - .switch("collapse-empty") - .rest(SyntaxShape::Member) + .required( + "separator", + SyntaxShape::Any, + "the character that denotes what separates columns", + ) + .switch("collapse-empty", "remove empty columns") + .rest(SyntaxShape::Member, "column names to give the new columns") } fn usage(&self) -> &str { @@ -94,7 +98,7 @@ fn split_column( _ => Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - name, + &name, "value originates from here", v.tag(), )), diff --git a/src/commands/split_row.rs b/src/commands/split_row.rs index e70e5cfa84..6c848c325a 100644 --- a/src/commands/split_row.rs +++ b/src/commands/split_row.rs @@ -17,7 +17,11 @@ impl WholeStreamCommand for SplitRow { } fn signature(&self) -> Signature { - Signature::build("split-row").required("separator", SyntaxShape::Any) + Signature::build("split-row").required( + "separator", + SyntaxShape::Any, + "the character that denotes what separates rows", + ) } fn usage(&self) -> &str { @@ -60,7 +64,7 @@ fn split_row( result.push_back(Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - name, + &name, "value originates from here", v.tag(), ))); diff --git a/src/commands/t_sort_by.rs b/src/commands/t_sort_by.rs new file mode 100644 index 0000000000..1c914dbac3 --- /dev/null +++ b/src/commands/t_sort_by.rs @@ -0,0 +1,358 @@ +use crate::commands::WholeStreamCommand; +use crate::data::{TaggedDictBuilder, TaggedListBuilder}; +use crate::errors::ShellError; +use crate::prelude::*; +use chrono::{DateTime, NaiveDate, Utc}; + +pub struct TSortBy; + +#[derive(Deserialize)] +pub struct TSortByArgs { + #[serde(rename(deserialize = "show-columns"))] + show_columns: bool, + group_by: Option>, + #[allow(unused)] + split_by: Option, +} + +impl WholeStreamCommand for TSortBy { + fn name(&self) -> &str { + "t-sort-by" + } + + fn signature(&self) -> Signature { + Signature::build("t-sort-by") + .switch("show-columns", "Displays the column names sorted") + .named( + "group_by", + SyntaxShape::String, + "the name of the column to group by", + ) + .named( + "split_by", + SyntaxShape::String, + "the name of the column within the grouped by table to split by", + ) + } + + fn usage(&self) -> &str { + "Sort by the given columns." + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, t_sort_by)?.run() + } +} + +fn t_sort_by( + TSortByArgs { + show_columns, + group_by, + .. + }: TSortByArgs, + RunnableContext { input, name, .. }: RunnableContext, +) -> Result { + Ok(OutputStream::new(async_stream! { + let values: Vec> = input.values.collect().await; + + let column_grouped_by_name = if let Some(grouped_by) = group_by { + Some(grouped_by.item().clone()) + } else { + None + }; + + if show_columns { + for label in columns_sorted(column_grouped_by_name, &values[0], &name).iter() { + yield ReturnSuccess::value(label.clone()); + } + } else { + match t_sort(column_grouped_by_name, None, &values[0], name) { + Ok(sorted) => yield ReturnSuccess::value(sorted), + Err(err) => yield Err(err) + } + } + })) +} + +pub fn columns_sorted( + _group_by_name: Option, + value: &Tagged, + tag: impl Into, +) -> Vec> { + let origin_tag = tag.into(); + + match value { + Tagged { + item: Value::Row(rows), + .. + } => { + let mut keys: Vec> = + rows.entries + .keys() + .map(|s| s.as_ref()) + .map(|k: &str| { + let date = NaiveDate::parse_from_str(k, "%B %d-%Y"); + + let date = match date { + Ok(parsed) => Value::Primitive(Primitive::Date( + DateTime::::from_utc(parsed.and_hms(12, 34, 56), Utc), + )), + Err(_) => Value::string(k), + }; + + date.tagged_unknown() + }) + .collect(); + + keys.sort(); + + let keys: Vec = keys + .into_iter() + .map(|k| { + Value::string(match k { + Tagged { + item: Value::Primitive(Primitive::Date(d)), + .. + } => format!("{}", d.format("%B %d-%Y")), + _ => k.as_string().unwrap(), + }) + }) + .collect(); + + keys.into_iter().map(|k| k.tagged(&origin_tag)).collect() + } + _ => vec![Value::string("default").tagged(&origin_tag)], + } +} + +pub fn t_sort( + group_by_name: Option, + split_by_name: Option, + value: &Tagged, + tag: impl Into, +) -> Result, ShellError> { + let origin_tag = tag.into(); + + match group_by_name { + Some(column_name) => { + let sorted_labels = columns_sorted(Some(column_name), value, &origin_tag); + + match split_by_name { + None => { + let mut dataset = TaggedDictBuilder::new(&origin_tag); + dataset.insert_tagged("default", value.clone()); + let dataset = dataset.into_tagged_value(); + + let split_labels = match &dataset { + Tagged { + item: Value::Row(rows), + .. + } => { + let mut keys: Vec> = rows + .entries + .keys() + .map(|s| s.as_ref()) + .map(|k: &str| { + let date = NaiveDate::parse_from_str(k, "%B %d-%Y"); + + let date = match date { + Ok(parsed) => Value::Primitive(Primitive::Date( + DateTime::::from_utc( + parsed.and_hms(12, 34, 56), + Utc, + ), + )), + Err(_) => Value::string(k), + }; + + date.tagged_unknown() + }) + .collect(); + + keys.sort(); + + let keys: Vec = keys + .into_iter() + .map(|k| { + Value::string(match k { + Tagged { + item: Value::Primitive(Primitive::Date(d)), + .. + } => format!("{}", d.format("%B %d-%Y")), + _ => k.as_string().unwrap(), + }) + }) + .collect(); + + keys.into_iter().map(|k| k.tagged(&origin_tag)).collect() + } + _ => vec![], + }; + + let results: Vec>> = split_labels + .into_iter() + .map(|split| { + let groups = dataset.get_data_by_key(&split.as_string().unwrap()); + + sorted_labels + .clone() + .into_iter() + .map(|label| { + let label = label.as_string().unwrap(); + + match groups { + Some(Tagged { + item: Value::Row(dict), + .. + }) => dict.get_data_by_key(&label).unwrap().clone(), + _ => Value::Table(vec![]).tagged(&origin_tag), + } + }) + .collect() + }) + .collect(); + + let mut outer = TaggedListBuilder::new(&origin_tag); + + for i in results { + outer.insert_tagged(Value::Table(i).tagged(&origin_tag)); + } + + return Ok(Value::Table(outer.list).tagged(&origin_tag)); + } + Some(_) => return Ok(Value::nothing().tagged(&origin_tag)), + } + } + None => return Ok(Value::nothing().tagged(&origin_tag)), + } +} +#[cfg(test)] +mod tests { + + use crate::commands::group_by::group; + use crate::commands::t_sort_by::{columns_sorted, t_sort}; + use crate::data::meta::*; + use crate::Value; + use indexmap::IndexMap; + + fn string(input: impl Into) -> Tagged { + Value::string(input.into()).tagged_unknown() + } + + fn row(entries: IndexMap>) -> Tagged { + Value::row(entries).tagged_unknown() + } + + fn table(list: &Vec>) -> Tagged { + Value::table(list).tagged_unknown() + } + + fn nu_releases_grouped_by_date() -> Tagged { + let key = String::from("date").tagged_unknown(); + group(&key, nu_releases_commiters(), Tag::unknown()).unwrap() + } + + fn nu_releases_commiters() -> Vec> { + vec![ + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("September 24-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("September 24-2019")}, + ), + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}, + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("September 24-2019")}, + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}, + ), + ] + } + + #[test] + fn show_columns_sorted_given_a_column_to_sort_by() { + let by_column = String::from("date"); + + assert_eq!( + columns_sorted( + Some(by_column), + &nu_releases_grouped_by_date(), + Tag::unknown() + ), + vec![ + string("August 23-2019"), + string("September 24-2019"), + string("October 10-2019") + ] + ) + } + + #[test] + fn sorts_the_tables() { + let group_by = String::from("date"); + + assert_eq!( + t_sort( + Some(group_by), + None, + &nu_releases_grouped_by_date(), + Tag::unknown() + ) + .unwrap(), + table(&vec![table(&vec![ + table(&vec![ + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")} + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")} + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")} + ) + ]), + table(&vec![ + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("September 24-2019")} + ), + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("September 24-2019")} + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("September 24-2019")} + ) + ]), + table(&vec![ + row( + indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")} + ), + row( + indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")} + ), + row( + indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")} + ) + ]), + ]),]) + ); + } +} diff --git a/src/commands/table.rs b/src/commands/table.rs index e9fbe35f2e..f8cdcd13c7 100644 --- a/src/commands/table.rs +++ b/src/commands/table.rs @@ -5,16 +5,17 @@ use crate::prelude::*; pub struct Table; -#[derive(Deserialize)] -pub struct TableArgs {} - impl WholeStreamCommand for Table { fn name(&self) -> &str { "table" } fn signature(&self) -> Signature { - Signature::build("table") + Signature::build("table").named( + "start_number", + SyntaxShape::Number, + "row number to start viewing from", + ) } fn usage(&self) -> &str { @@ -26,16 +27,29 @@ impl WholeStreamCommand for Table { args: CommandArgs, registry: &CommandRegistry, ) -> Result { - args.process(registry, table)?.run() + table(args, registry) } } -pub fn table(_args: TableArgs, context: RunnableContext) -> Result { +fn table(args: CommandArgs, registry: &CommandRegistry) -> Result { + let args = args.evaluate_once(registry)?; + let stream = async_stream! { - let input: Vec> = context.input.into_vec().await; + let host = args.host.clone(); + let start_number = match args.get("start_number") { + Some(Tagged { item: Value::Primitive(Primitive::Int(i)), .. }) => { + i.to_usize().unwrap() + } + _ => { + 0 + } + }; + + let input: Vec> = args.input.into_vec().await; if input.len() > 0 { - let mut host = context.host.lock().unwrap(); - let view = TableView::from_list(&input); + let mut host = host.lock().unwrap(); + let view = TableView::from_list(&input, start_number); + if let Some(view) = view { handle_unexpected(&mut *host, |host| crate::format::print_view(&view, host)); } diff --git a/src/commands/tags.rs b/src/commands/tags.rs index 0cef300b0c..221e8cc303 100644 --- a/src/commands/tags.rs +++ b/src/commands/tags.rs @@ -28,7 +28,6 @@ impl WholeStreamCommand for Tags { } fn tags(args: CommandArgs, _registry: &CommandRegistry) -> Result { - let source_map = args.call_info.source_map.clone(); Ok(args .input .values @@ -38,11 +37,11 @@ fn tags(args: CommandArgs, _registry: &CommandRegistry) -> Result { tags.insert("anchor", Value::string(source)); } diff --git a/src/commands/to_bson.rs b/src/commands/to_bson.rs index a36d99c077..eabf8381ec 100644 --- a/src/commands/to_bson.rs +++ b/src/commands/to_bson.rs @@ -46,7 +46,7 @@ pub fn value_to_bson_value(v: &Tagged) -> Result { Value::Primitive(Primitive::BeginningOfStream) => Bson::Null, Value::Primitive(Primitive::Decimal(d)) => Bson::FloatingPoint(d.to_f64().unwrap()), Value::Primitive(Primitive::Int(i)) => { - Bson::I64(i.tagged(v.tag).coerce_into("converting to BSON")?) + Bson::I64(i.tagged(&v.tag).coerce_into("converting to BSON")?) } Value::Primitive(Primitive::Nothing) => Bson::Null, Value::Primitive(Primitive::String(s)) => Bson::String(s.clone()), @@ -58,6 +58,7 @@ pub fn value_to_bson_value(v: &Tagged) -> Result { .collect::>()?, ), Value::Block(_) => Bson::Null, + Value::Error(e) => return Err(e.clone()), Value::Primitive(Primitive::Binary(b)) => Bson::Binary(BinarySubtype::Generic, b.clone()), Value::Row(o) => object_value_to_bson(o)?, }) @@ -170,7 +171,7 @@ fn get_binary_subtype<'a>(tagged_value: &'a Tagged) -> Result unreachable!(), }), Value::Primitive(Primitive::Int(i)) => Ok(BinarySubtype::UserDefined( - i.tagged(tagged_value.tag) + i.tagged(&tagged_value.tag) .coerce_into("converting to BSON binary subtype")?, )), _ => Err(ShellError::type_error( @@ -207,12 +208,12 @@ fn bson_value_to_bytes(bson: Bson, tag: Tag) -> Result, ShellError> { Bson::Array(a) => { for v in a.into_iter() { match v { - Bson::Document(d) => shell_encode_document(&mut out, d, tag)?, + Bson::Document(d) => shell_encode_document(&mut out, d, tag.clone())?, _ => { return Err(ShellError::labeled_error( format!("All top level values must be Documents, got {:?}", v), "requires BSON-compatible document", - tag, + &tag, )) } } @@ -237,7 +238,7 @@ fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result> = args.input.values.collect().await; let to_process_input = if input.len() > 1 { - let tag = input[0].tag; + let tag = input[0].tag.clone(); vec![Tagged { item: Value::Table(input), tag } ] } else if input.len() == 1 { input @@ -248,14 +249,14 @@ fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result { - match bson_value_to_bytes(bson_value, name_tag) { + match bson_value_to_bytes(bson_value, name_tag.clone()) { Ok(x) => yield ReturnSuccess::value( - Value::binary(x).tagged(name_tag), + Value::binary(x).tagged(&name_tag), ), _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a table with BSON-compatible structure.tag() from pipeline", "requires BSON-compatible input", - name_tag, + &name_tag, "originates from here".to_string(), value.tag(), )), @@ -264,7 +265,7 @@ fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error( "Expected a table with BSON-compatible structure from pipeline", "requires BSON-compatible input", - name_tag)) + &name_tag)) } } }; diff --git a/src/commands/to_csv.rs b/src/commands/to_csv.rs index 1897fb86b7..d2b46d9f88 100644 --- a/src/commands/to_csv.rs +++ b/src/commands/to_csv.rs @@ -16,7 +16,10 @@ impl WholeStreamCommand for ToCSV { } fn signature(&self) -> Signature { - Signature::build("to-csv").switch("headerless") + Signature::build("to-csv").switch( + "headerless", + "do not output the columns names as the first row", + ) } fn usage(&self) -> &str { @@ -32,8 +35,8 @@ impl WholeStreamCommand for ToCSV { } } -pub fn value_to_csv_value(v: &Value) -> Value { - match v { +pub fn value_to_csv_value(v: &Tagged) -> Tagged { + match &v.item { Value::Primitive(Primitive::String(s)) => Value::Primitive(Primitive::String(s.clone())), Value::Primitive(Primitive::Nothing) => Value::Primitive(Primitive::Nothing), Value::Primitive(Primitive::Boolean(b)) => Value::Primitive(Primitive::Boolean(b.clone())), @@ -47,10 +50,11 @@ pub fn value_to_csv_value(v: &Value) -> Value { Value::Block(_) => Value::Primitive(Primitive::Nothing), _ => Value::Primitive(Primitive::Nothing), } + .tagged(v.tag.clone()) } -fn to_string_helper(v: &Value) -> Result { - match v { +fn to_string_helper(v: &Tagged) -> Result { + match &v.item { Value::Primitive(Primitive::Date(d)) => Ok(d.to_string()), Value::Primitive(Primitive::Bytes(b)) => Ok(format!("{}", b)), Value::Primitive(Primitive::Boolean(_)) => Ok(v.as_string()?), @@ -60,7 +64,13 @@ fn to_string_helper(v: &Value) -> Result { Value::Table(_) => return Ok(String::from("[Table]")), Value::Row(_) => return Ok(String::from("[Row]")), Value::Primitive(Primitive::String(s)) => return Ok(s.to_string()), - _ => return Err(ShellError::string("Unexpected value")), + _ => { + return Err(ShellError::labeled_error( + "Unexpected value", + "", + v.tag.clone(), + )) + } } } @@ -76,7 +86,9 @@ fn merge_descriptors(values: &[Tagged]) -> Vec { ret } -pub fn to_string(v: &Value) -> Result { +pub fn to_string(tagged_value: &Tagged) -> Result { + let v = &tagged_value.item; + match v { Value::Row(o) => { let mut wtr = WriterBuilder::new().from_writer(vec![]); @@ -92,11 +104,20 @@ pub fn to_string(v: &Value) -> Result { wtr.write_record(fields).expect("can not write."); wtr.write_record(values).expect("can not write."); - return Ok(String::from_utf8( - wtr.into_inner() - .map_err(|_| ShellError::string("Could not convert record"))?, - ) - .map_err(|_| ShellError::string("Could not convert record"))?); + return Ok(String::from_utf8(wtr.into_inner().map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + &tagged_value.tag, + ) + })?) + .map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + &tagged_value.tag, + ) + })?); } Value::Table(list) => { let mut wtr = WriterBuilder::new().from_writer(vec![]); @@ -120,13 +141,22 @@ pub fn to_string(v: &Value) -> Result { wtr.write_record(&row).expect("can not write"); } - return Ok(String::from_utf8( - wtr.into_inner() - .map_err(|_| ShellError::string("Could not convert record"))?, - ) - .map_err(|_| ShellError::string("Could not convert record"))?); + return Ok(String::from_utf8(wtr.into_inner().map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + &tagged_value.tag, + ) + })?) + .map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + &tagged_value.tag, + ) + })?); } - _ => return to_string_helper(&v), + _ => return to_string_helper(tagged_value), } } @@ -139,7 +169,7 @@ fn to_csv( let input: Vec> = input.values.collect().await; let to_process_input = if input.len() > 1 { - let tag = input[0].tag; + let tag = input[0].tag.clone(); vec![Tagged { item: Value::Table(input), tag } ] } else if input.len() == 1 { input @@ -148,20 +178,20 @@ fn to_csv( }; for value in to_process_input { - match to_string(&value_to_csv_value(&value.item)) { + match to_string(&value_to_csv_value(&value)) { Ok(x) => { let converted = if headerless { x.lines().skip(1).collect() } else { x }; - yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(name_tag)) + yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(&name_tag)) } _ => { yield Err(ShellError::labeled_error_with_secondary( "Expected a table with CSV-compatible structure.tag() from pipeline", "requires CSV-compatible input", - name_tag, + &name_tag, "originates from here".to_string(), value.tag(), )) diff --git a/src/commands/to_json.rs b/src/commands/to_json.rs index 9c06299aad..40edc5aeb8 100644 --- a/src/commands/to_json.rs +++ b/src/commands/to_json.rs @@ -42,7 +42,7 @@ pub fn value_to_json_value(v: &Tagged) -> Result serde_json::Value::Number(serde_json::Number::from( - CoerceInto::::coerce_into(i.tagged(v.tag), "converting to JSON number")?, + CoerceInto::::coerce_into(i.tagged(&v.tag), "converting to JSON number")?, )), Value::Primitive(Primitive::Nothing) => serde_json::Value::Null, Value::Primitive(Primitive::Pattern(s)) => serde_json::Value::String(s.clone()), @@ -50,6 +50,7 @@ pub fn value_to_json_value(v: &Tagged) -> Result serde_json::Value::String(s.display().to_string()), Value::Table(l) => serde_json::Value::Array(json_list(l)?), + Value::Error(e) => return Err(e.clone()), Value::Block(_) => serde_json::Value::Null, Value::Primitive(Primitive::Binary(b)) => serde_json::Value::Array( b.iter() @@ -85,7 +86,7 @@ fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result> = args.input.values.collect().await; let to_process_input = if input.len() > 1 { - let tag = input[0].tag; + let tag = input[0].tag.clone(); vec![Tagged { item: Value::Table(input), tag } ] } else if input.len() == 1 { input @@ -98,12 +99,12 @@ fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result { match serde_json::to_string(&json_value) { Ok(x) => yield ReturnSuccess::value( - Value::Primitive(Primitive::String(x)).tagged(name_tag), + Value::Primitive(Primitive::String(x)).tagged(&name_tag), ), _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a table with JSON-compatible structure.tag() from pipeline", "requires JSON-compatible input", - name_tag, + &name_tag, "originates from here".to_string(), value.tag(), )), @@ -112,7 +113,7 @@ fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error( "Expected a table with JSON-compatible structure from pipeline", "requires JSON-compatible input", - name_tag)) + &name_tag)) } } }; diff --git a/src/commands/to_toml.rs b/src/commands/to_toml.rs index 6c8904e0c2..778fdd2561 100644 --- a/src/commands/to_toml.rs +++ b/src/commands/to_toml.rs @@ -38,10 +38,10 @@ pub fn value_to_toml_value(v: &Tagged) -> Result toml::Value::String("".to_string()) } Value::Primitive(Primitive::Decimal(f)) => { - toml::Value::Float(f.tagged(v.tag).coerce_into("converting to TOML float")?) + toml::Value::Float(f.tagged(&v.tag).coerce_into("converting to TOML float")?) } Value::Primitive(Primitive::Int(i)) => { - toml::Value::Integer(i.tagged(v.tag).coerce_into("converting to TOML integer")?) + toml::Value::Integer(i.tagged(&v.tag).coerce_into("converting to TOML integer")?) } Value::Primitive(Primitive::Nothing) => toml::Value::String("".to_string()), Value::Primitive(Primitive::Pattern(s)) => toml::Value::String(s.clone()), @@ -49,6 +49,7 @@ pub fn value_to_toml_value(v: &Tagged) -> Result Value::Primitive(Primitive::Path(s)) => toml::Value::String(s.display().to_string()), Value::Table(l) => toml::Value::Array(collect_values(l)?), + Value::Error(e) => return Err(e.clone()), Value::Block(_) => toml::Value::String("".to_string()), Value::Primitive(Primitive::Binary(b)) => { toml::Value::Array(b.iter().map(|x| toml::Value::Integer(*x as i64)).collect()) @@ -80,7 +81,7 @@ fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result> = args.input.values.collect().await; let to_process_input = if input.len() > 1 { - let tag = input[0].tag; + let tag = input[0].tag.clone(); vec![Tagged { item: Value::Table(input), tag } ] } else if input.len() == 1 { input @@ -93,12 +94,12 @@ fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result { match toml::to_string(&toml_value) { Ok(x) => yield ReturnSuccess::value( - Value::Primitive(Primitive::String(x)).tagged(name_tag), + Value::Primitive(Primitive::String(x)).tagged(&name_tag), ), _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a table with TOML-compatible structure.tag() from pipeline", "requires TOML-compatible input", - name_tag, + &name_tag, "originates from here".to_string(), value.tag(), )), @@ -107,7 +108,7 @@ fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error( "Expected a table with TOML-compatible structure from pipeline", "requires TOML-compatible input", - name_tag)) + &name_tag)) } } }; diff --git a/src/commands/to_tsv.rs b/src/commands/to_tsv.rs index 4edc26face..7857d1eeec 100644 --- a/src/commands/to_tsv.rs +++ b/src/commands/to_tsv.rs @@ -16,7 +16,10 @@ impl WholeStreamCommand for ToTSV { } fn signature(&self) -> Signature { - Signature::build("to-tsv").switch("headerless") + Signature::build("to-tsv").switch( + "headerless", + "do not output the column names as the first row", + ) } fn usage(&self) -> &str { @@ -32,7 +35,9 @@ impl WholeStreamCommand for ToTSV { } } -pub fn value_to_tsv_value(v: &Value) -> Value { +pub fn value_to_tsv_value(tagged_value: &Tagged) -> Tagged { + let v = &tagged_value.item; + match v { Value::Primitive(Primitive::String(s)) => Value::Primitive(Primitive::String(s.clone())), Value::Primitive(Primitive::Nothing) => Value::Primitive(Primitive::Nothing), @@ -47,20 +52,28 @@ pub fn value_to_tsv_value(v: &Value) -> Value { Value::Block(_) => Value::Primitive(Primitive::Nothing), _ => Value::Primitive(Primitive::Nothing), } + .tagged(&tagged_value.tag) } -fn to_string_helper(v: &Value) -> Result { +fn to_string_helper(tagged_value: &Tagged) -> Result { + let v = &tagged_value.item; match v { Value::Primitive(Primitive::Date(d)) => Ok(d.to_string()), Value::Primitive(Primitive::Bytes(b)) => Ok(format!("{}", b)), - Value::Primitive(Primitive::Boolean(_)) => Ok(v.as_string()?), - Value::Primitive(Primitive::Decimal(_)) => Ok(v.as_string()?), - Value::Primitive(Primitive::Int(_)) => Ok(v.as_string()?), - Value::Primitive(Primitive::Path(_)) => Ok(v.as_string()?), + Value::Primitive(Primitive::Boolean(_)) => Ok(tagged_value.as_string()?), + Value::Primitive(Primitive::Decimal(_)) => Ok(tagged_value.as_string()?), + Value::Primitive(Primitive::Int(_)) => Ok(tagged_value.as_string()?), + Value::Primitive(Primitive::Path(_)) => Ok(tagged_value.as_string()?), Value::Table(_) => return Ok(String::from("[table]")), Value::Row(_) => return Ok(String::from("[row]")), Value::Primitive(Primitive::String(s)) => return Ok(s.to_string()), - _ => return Err(ShellError::string("Unexpected value")), + _ => { + return Err(ShellError::labeled_error( + "Unexpected value", + "original value", + &tagged_value.tag, + )) + } } } @@ -76,7 +89,9 @@ fn merge_descriptors(values: &[Tagged]) -> Vec { ret } -pub fn to_string(v: &Value) -> Result { +pub fn to_string(tagged_value: &Tagged) -> Result { + let v = &tagged_value.item; + match v { Value::Row(o) => { let mut wtr = WriterBuilder::new().delimiter(b'\t').from_writer(vec![]); @@ -91,11 +106,20 @@ pub fn to_string(v: &Value) -> Result { wtr.write_record(fields).expect("can not write."); wtr.write_record(values).expect("can not write."); - return Ok(String::from_utf8( - wtr.into_inner() - .map_err(|_| ShellError::string("Could not convert record"))?, - ) - .map_err(|_| ShellError::string("Could not convert record"))?); + return Ok(String::from_utf8(wtr.into_inner().map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + &tagged_value.tag, + ) + })?) + .map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + &tagged_value.tag, + ) + })?); } Value::Table(list) => { let mut wtr = WriterBuilder::new().delimiter(b'\t').from_writer(vec![]); @@ -119,13 +143,22 @@ pub fn to_string(v: &Value) -> Result { wtr.write_record(&row).expect("can not write"); } - return Ok(String::from_utf8( - wtr.into_inner() - .map_err(|_| ShellError::string("Could not convert record"))?, - ) - .map_err(|_| ShellError::string("Could not convert record"))?); + return Ok(String::from_utf8(wtr.into_inner().map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + &tagged_value.tag, + ) + })?) + .map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + &tagged_value.tag, + ) + })?); } - _ => return to_string_helper(&v), + _ => return to_string_helper(tagged_value), } } @@ -138,7 +171,7 @@ fn to_tsv( let input: Vec> = input.values.collect().await; let to_process_input = if input.len() > 1 { - let tag = input[0].tag; + let tag = input[0].tag.clone(); vec![Tagged { item: Value::Table(input), tag } ] } else if input.len() == 1 { input @@ -147,20 +180,20 @@ fn to_tsv( }; for value in to_process_input { - match to_string(&value_to_tsv_value(&value.item)) { + match to_string(&value_to_tsv_value(&value)) { Ok(x) => { let converted = if headerless { x.lines().skip(1).collect() } else { x }; - yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(name_tag)) + yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(&name_tag)) } _ => { yield Err(ShellError::labeled_error_with_secondary( "Expected a table with TSV-compatible structure.tag() from pipeline", "requires TSV-compatible input", - name_tag, + &name_tag, "originates from here".to_string(), value.tag(), )) diff --git a/src/commands/to_url.rs b/src/commands/to_url.rs index dfba5faf4d..8dee0a87d5 100644 --- a/src/commands/to_url.rs +++ b/src/commands/to_url.rs @@ -47,7 +47,7 @@ fn to_url(args: CommandArgs, registry: &CommandRegistry) -> Result Result { - yield ReturnSuccess::value(Value::string(s).tagged(tag)); + yield ReturnSuccess::value(Value::string(s).tagged(&tag)); } _ => { yield Err(ShellError::labeled_error( "Failed to convert to url-encoded", "cannot url-encode", - tag, + &tag, )) } } @@ -72,7 +72,7 @@ fn to_url(args: CommandArgs, registry: &CommandRegistry) -> Result) -> Result serde_yaml::Value::Number(serde_yaml::Number::from( - CoerceInto::::coerce_into(i.tagged(v.tag), "converting to YAML number")?, + CoerceInto::::coerce_into(i.tagged(&v.tag), "converting to YAML number")?, )), Value::Primitive(Primitive::Nothing) => serde_yaml::Value::Null, Value::Primitive(Primitive::Pattern(s)) => serde_yaml::Value::String(s.clone()), @@ -55,6 +55,7 @@ pub fn value_to_yaml_value(v: &Tagged) -> Result return Err(e.clone()), Value::Block(_) => serde_yaml::Value::Null, Value::Primitive(Primitive::Binary(b)) => serde_yaml::Value::Sequence( b.iter() @@ -81,7 +82,7 @@ fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result> = args.input.values.collect().await; let to_process_input = if input.len() > 1 { - let tag = input[0].tag; + let tag = input[0].tag.clone(); vec![Tagged { item: Value::Table(input), tag } ] } else if input.len() == 1 { input @@ -94,12 +95,12 @@ fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result { match serde_yaml::to_string(&yaml_value) { Ok(x) => yield ReturnSuccess::value( - Value::Primitive(Primitive::String(x)).tagged(name_tag), + Value::Primitive(Primitive::String(x)).tagged(&name_tag), ), _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a table with YAML-compatible structure.tag() from pipeline", "requires YAML-compatible input", - name_tag, + &name_tag, "originates from here".to_string(), value.tag(), )), @@ -108,7 +109,7 @@ fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error( "Expected a table with YAML-compatible structure from pipeline", "requires YAML-compatible input", - name_tag)) + &name_tag)) } } }; diff --git a/src/commands/version.rs b/src/commands/version.rs index 01a134929e..11b243f08b 100644 --- a/src/commands/version.rs +++ b/src/commands/version.rs @@ -31,14 +31,14 @@ impl WholeStreamCommand for Version { pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result { let args = args.evaluate_once(registry)?; - let tag = args.call_info.name_tag; + let tag = args.call_info.name_tag.clone(); let mut indexmap = IndexMap::new(); indexmap.insert( "version".to_string(), - Value::string(clap::crate_version!()).tagged(tag), + Value::string(clap::crate_version!()).tagged(&tag), ); - let value = Value::Row(Dictionary::from(indexmap)).tagged(tag); + let value = Value::Row(Dictionary::from(indexmap)).tagged(&tag); Ok(OutputStream::one(value)) } diff --git a/src/commands/where_.rs b/src/commands/where_.rs index 673c6dda84..ce7367b1a6 100644 --- a/src/commands/where_.rs +++ b/src/commands/where_.rs @@ -12,7 +12,11 @@ impl PerItemCommand for Where { } fn signature(&self) -> registry::Signature { - Signature::build("where").required("condition", SyntaxShape::Block) + Signature::build("where").required( + "condition", + SyntaxShape::Block, + "the condition that must match", + ) } fn usage(&self) -> &str { @@ -49,7 +53,7 @@ impl PerItemCommand for Where { return Err(ShellError::labeled_error( "Expected a condition", "where needs a condition", - *tag, + tag, )) } }; diff --git a/src/commands/which_.rs b/src/commands/which_.rs index 905515848c..405efe7dca 100644 --- a/src/commands/which_.rs +++ b/src/commands/which_.rs @@ -13,7 +13,11 @@ impl WholeStreamCommand for Which { } fn signature(&self) -> Signature { - Signature::build("which").required("name", SyntaxShape::Any) + Signature::build("which").required( + "name", + SyntaxShape::Any, + "the name of the command to find the path to", + ) } fn usage(&self) -> &str { @@ -33,7 +37,7 @@ pub fn which(args: CommandArgs, registry: &CommandRegistry) -> Result 0 { @@ -52,7 +56,7 @@ pub fn which(args: CommandArgs, registry: &CommandRegistry) -> Result); - -impl SourceMap { - pub fn insert(&mut self, uuid: Uuid, anchor_location: AnchorLocation) { - self.0.insert(uuid, anchor_location); - } - - pub fn get(&self, uuid: &Uuid) -> Option<&AnchorLocation> { - self.0.get(uuid) - } - - pub fn new() -> SourceMap { - SourceMap(HashMap::new()) - } -} - #[derive(Clone, new)] pub struct CommandRegistry { #[new(value = "Arc::new(Mutex::new(IndexMap::default()))")] @@ -53,13 +34,17 @@ impl CommandRegistry { registry.get(name).map(|c| c.clone()) } + pub(crate) fn expect_command(&self, name: &str) -> Arc { + self.get_command(name).unwrap() + } + pub(crate) fn has(&self, name: &str) -> bool { let registry = self.registry.lock().unwrap(); registry.contains_key(name) } - fn insert(&mut self, name: impl Into, command: Arc) { + pub(crate) fn insert(&mut self, name: impl Into, command: Arc) { let mut registry = self.registry.lock().unwrap(); registry.insert(name.into(), command); } @@ -73,8 +58,8 @@ impl CommandRegistry { #[derive(Clone)] pub struct Context { registry: CommandRegistry, - pub(crate) source_map: SourceMap, host: Arc>, + pub ctrl_c: Arc, pub(crate) shell_manager: ShellManager, } @@ -83,12 +68,19 @@ impl Context { &self.registry } + pub(crate) fn expand_context<'context>( + &'context self, + source: &'context Text, + ) -> ExpandContext<'context> { + ExpandContext::new(&self.registry, source, self.shell_manager.homedir()) + } + pub(crate) fn basic() -> Result> { let registry = CommandRegistry::new(); Ok(Context { registry: registry.clone(), - source_map: SourceMap::new(), host: Arc::new(Mutex::new(crate::env::host::BasicHost)), + ctrl_c: Arc::new(AtomicBool::new(false)), shell_manager: ShellManager::basic(registry)?, }) } @@ -105,43 +97,30 @@ impl Context { } } - pub fn add_anchor_location(&mut self, uuid: Uuid, anchor_location: AnchorLocation) { - self.source_map.insert(uuid, anchor_location); + pub(crate) fn get_command(&self, name: &str) -> Option> { + self.registry.get_command(name) } - pub(crate) fn has_command(&self, name: &str) -> bool { - self.registry.has(name) - } - - pub(crate) fn get_command(&self, name: &str) -> Arc { - self.registry.get_command(name).unwrap() + pub(crate) fn expect_command(&self, name: &str) -> Arc { + self.registry.expect_command(name) } pub(crate) fn run_command<'a>( &mut self, command: Arc, name_tag: Tag, - source_map: SourceMap, args: hir::Call, source: &Text, input: InputStream, - is_first_command: bool, ) -> OutputStream { - let command_args = self.command_args(args, input, source, source_map, name_tag); - command.run(command_args, self.registry(), is_first_command) + let command_args = self.command_args(args, input, source, name_tag); + command.run(command_args, self.registry()) } - fn call_info( - &self, - args: hir::Call, - source: &Text, - source_map: SourceMap, - name_tag: Tag, - ) -> UnevaluatedCallInfo { + fn call_info(&self, args: hir::Call, source: &Text, name_tag: Tag) -> UnevaluatedCallInfo { UnevaluatedCallInfo { args, source: source.clone(), - source_map, name_tag, } } @@ -151,13 +130,13 @@ impl Context { args: hir::Call, input: InputStream, source: &Text, - source_map: SourceMap, name_tag: Tag, ) -> CommandArgs { CommandArgs { host: self.host.clone(), + ctrl_c: self.ctrl_c.clone(), shell_manager: self.shell_manager.clone(), - call_info: self.call_info(args, source, source_map, name_tag), + call_info: self.call_info(args, source, name_tag), input, } } diff --git a/src/data/base.rs b/src/data/base.rs index 04465181a3..4baa8f1d67 100644 --- a/src/data/base.rs +++ b/src/data/base.rs @@ -8,6 +8,8 @@ use crate::Text; use chrono::{DateTime, Utc}; use chrono_humanize::Humanize; use derive_new::new; +use indexmap::IndexMap; +use log::trace; use serde::{Deserialize, Serialize}; use std::fmt; use std::path::PathBuf; @@ -212,11 +214,19 @@ impl Block { let scope = Scope::new(value.clone()); if self.expressions.len() == 0 { - return Ok(Value::nothing().tagged(self.tag)); + return Ok(Value::nothing().tagged(&self.tag)); } let mut last = None; + trace!( + "EXPRS = {:?}", + self.expressions + .iter() + .map(|e| format!("{}", e)) + .collect::>() + ); + for expr in self.expressions.iter() { last = Some(evaluate_baseline_expr( &expr, @@ -236,6 +246,9 @@ pub enum Value { Row(crate::data::Dictionary), Table(Vec>), + // Errors are a type of value too + Error(ShellError), + Block(Block), } @@ -284,14 +297,15 @@ impl fmt::Debug for ValueDebug<'_> { Value::Row(o) => o.debug(f), Value::Table(l) => debug_list(l).fmt(f), Value::Block(_) => write!(f, "[[block]]"), + Value::Error(_) => write!(f, "[[error]]"), } } } impl Tagged { - pub(crate) fn tagged_type_name(&self) -> Tagged { + pub fn tagged_type_name(&self) -> Tagged { let name = self.type_name(); - Tagged::from_item(name, self.tag()) + name.tagged(self.tag()) } } @@ -303,7 +317,7 @@ impl std::convert::TryFrom<&Tagged> for Block { Value::Block(block) => Ok(block.clone()), v => Err(ShellError::type_error( "Block", - value.copy_tag(v.type_name()), + v.type_name().tagged(value.tag()), )), } } @@ -315,11 +329,11 @@ impl std::convert::TryFrom<&Tagged> for i64 { fn try_from(value: &Tagged) -> Result { match value.item() { Value::Primitive(Primitive::Int(int)) => { - int.tagged(value.tag).coerce_into("converting to i64") + int.tagged(&value.tag).coerce_into("converting to i64") } v => Err(ShellError::type_error( "Integer", - value.copy_tag(v.type_name()), + v.type_name().tagged(value.tag()), )), } } @@ -333,7 +347,7 @@ impl std::convert::TryFrom<&Tagged> for String { Value::Primitive(Primitive::String(s)) => Ok(s.clone()), v => Err(ShellError::type_error( "String", - value.copy_tag(v.type_name()), + v.type_name().tagged(value.tag()), )), } } @@ -347,7 +361,7 @@ impl std::convert::TryFrom<&Tagged> for Vec { Value::Primitive(Primitive::Binary(b)) => Ok(b.clone()), v => Err(ShellError::type_error( "Binary", - value.copy_tag(v.type_name()), + v.type_name().tagged(value.tag()), )), } } @@ -361,7 +375,7 @@ impl<'a> std::convert::TryFrom<&'a Tagged> for &'a crate::data::Dictionar Value::Row(d) => Ok(d), v => Err(ShellError::type_error( "Dictionary", - value.copy_tag(v.type_name()), + v.type_name().tagged(value.tag()), )), } } @@ -383,7 +397,7 @@ impl std::convert::TryFrom>> for Switch { Value::Primitive(Primitive::Boolean(true)) => Ok(Switch::Present), v => Err(ShellError::type_error( "Boolean", - value.copy_tag(v.type_name()), + v.type_name().tagged(value.tag()), )), }, } @@ -394,23 +408,54 @@ impl Tagged { pub(crate) fn debug(&self) -> ValueDebug<'_> { ValueDebug { value: self } } -} -impl Value { - pub(crate) fn type_name(&self) -> String { - match self { - Value::Primitive(p) => p.type_name(), - Value::Row(_) => format!("object"), - Value::Table(_) => format!("list"), - Value::Block(_) => format!("block"), + pub fn as_column_path(&self) -> Result>>, ShellError> { + match &self.item { + Value::Primitive(Primitive::String(s)) => { + Ok(vec![Value::string(s).tagged(&self.tag)].tagged(&self.tag)) + } + Value::Table(table) => Ok(table.to_vec().tagged(&self.tag)), + other => Err(ShellError::type_error( + "column name", + other.type_name().tagged(&self.tag), + )), + } + } + + pub fn as_string(&self) -> Result { + match &self.item { + Value::Primitive(Primitive::String(s)) => Ok(s.clone()), + Value::Primitive(Primitive::Boolean(x)) => Ok(format!("{}", x)), + Value::Primitive(Primitive::Decimal(x)) => Ok(format!("{}", x)), + Value::Primitive(Primitive::Int(x)) => Ok(format!("{}", x)), + Value::Primitive(Primitive::Bytes(x)) => Ok(format!("{}", x)), + Value::Primitive(Primitive::Path(x)) => Ok(format!("{}", x.display())), + Value::Primitive(Primitive::Date(x)) => Ok(format!("{}", x.to_rfc3339())), + // TODO: this should definitely be more general with better errors + other => Err(ShellError::labeled_error( + "Expected string", + other.type_name(), + &self.tag, + )), + } + } +} + +impl Value { + pub fn type_name(&self) -> String { + match self { + Value::Primitive(p) => p.type_name(), + Value::Row(_) => format!("row"), + Value::Table(_) => format!("table"), + Value::Block(_) => format!("block"), + Value::Error(_) => format!("error"), } } - // TODO: This is basically a legacy construct, I think pub fn data_descriptors(&self) -> Vec { match self { Value::Primitive(_) => vec![], - Value::Row(o) => o + Value::Row(columns) => columns .entries .keys() .into_iter() @@ -418,6 +463,14 @@ impl Value { .collect(), Value::Block(_) => vec![], Value::Table(_) => vec![], + Value::Error(_) => vec![], + } + } + + pub(crate) fn get_data_by_index(&self, idx: usize) -> Option<&Tagged> { + match self { + Value::Table(value_set) => value_set.get(idx), + _ => None, } } @@ -443,27 +496,88 @@ impl Value { } } - pub fn get_data_by_path(&self, tag: Tag, path: &str) -> Option> { + pub(crate) fn get_mut_data_by_key(&mut self, name: &str) -> Option<&mut Tagged> { + match self { + Value::Row(ref mut o) => o.get_mut_data_by_key(name), + Value::Table(ref mut l) => { + for item in l { + match item { + Tagged { + item: Value::Row(ref mut o), + .. + } => match o.get_mut_data_by_key(name) { + Some(v) => return Some(v), + None => {} + }, + _ => {} + } + } + None + } + _ => None, + } + } + + pub fn get_data_by_column_path( + &self, + tag: Tag, + path: &Vec>, + callback: Box)) -> ShellError>, + ) -> Result>, ShellError> { + let mut column_path = vec![]; + + for value in path { + column_path.push( + Value::string(value.as_string().unwrap_or("".to_string())).tagged(&value.tag), + ); + } + + let path = column_path; + let mut current = self; - for p in path.split(".") { - match current.get_data_by_key(p) { + + for p in path { + let value = p.as_string().unwrap_or("".to_string()); + let value = match value.parse::() { + Ok(number) => match current { + Value::Table(_) => current.get_data_by_index(number), + Value::Row(_) => current.get_data_by_key(&value), + _ => None, + }, + Err(_) => match self { + Value::Table(_) | Value::Row(_) => current.get_data_by_key(&value), + _ => None, + }, + }; + + match value { Some(v) => current = v, - None => return None, + None => return Err(callback((current.clone(), p.clone()))), } } - Some(Tagged::from_item(current, tag)) + Ok(Some(current.tagged(tag))) } - pub fn insert_data_at_path( + pub fn insert_data_at_column_path( &self, tag: Tag, - path: &str, + split_path: &Vec>, new_value: Value, ) -> Option> { - let mut new_obj = self.clone(); + let split_path = split_path + .into_iter() + .map(|p| match p { + Tagged { + item: Value::Primitive(Primitive::String(s)), + tag, + } => Ok(s.clone().tagged(tag)), + o => Err(o), + }) + .filter_map(Result::ok) + .collect::>>(); - let split_path: Vec<_> = path.split(".").collect(); + let mut new_obj = self.clone(); if let Value::Row(ref mut o) = new_obj { let mut current = o; @@ -472,25 +586,25 @@ impl Value { // Special case for inserting at the top level current .entries - .insert(path.to_string(), Tagged::from_item(new_value, tag)); - return Some(Tagged::from_item(new_obj, tag)); + .insert(split_path[0].item.clone(), new_value.tagged(&tag)); + return Some(new_obj.tagged(&tag)); } for idx in 0..split_path.len() { - match current.entries.get_mut(split_path[idx]) { + match current.entries.get_mut(&split_path[idx].item) { Some(next) => { if idx == (split_path.len() - 2) { match &mut next.item { Value::Row(o) => { o.entries.insert( split_path[idx + 1].to_string(), - Tagged::from_item(new_value, tag), + new_value.tagged(&tag), ); } _ => {} } - return Some(Tagged::from_item(new_obj, tag)); + return Some(new_obj.tagged(&tag)); } else { match next.item { Value::Row(ref mut o) => { @@ -508,34 +622,39 @@ impl Value { None } - pub fn replace_data_at_path( + pub fn replace_data_at_column_path( &self, tag: Tag, - path: &str, + split_path: &Vec>, replaced_value: Value, ) -> Option> { + let split_path = split_path + .into_iter() + .map(|p| match p { + Tagged { + item: Value::Primitive(Primitive::String(s)), + tag, + } => Ok(s.clone().tagged(tag)), + o => Err(o), + }) + .filter_map(Result::ok) + .collect::>>(); + let mut new_obj = self.clone(); + let mut current = &mut new_obj; - let split_path: Vec<_> = path.split(".").collect(); - - if let Value::Row(ref mut o) = new_obj { - let mut current = o; - for idx in 0..split_path.len() { - match current.entries.get_mut(split_path[idx]) { - Some(next) => { - if idx == (split_path.len() - 1) { - *next = Tagged::from_item(replaced_value, tag); - return Some(Tagged::from_item(new_obj, tag)); - } else { - match next.item { - Value::Row(ref mut o) => { - current = o; - } - _ => return None, - } - } + for idx in 0..split_path.len() { + match current.get_mut_data_by_key(&split_path[idx].item) { + Some(next) => { + if idx == (split_path.len() - 1) { + *next = replaced_value.tagged(&tag); + return Some(new_obj.tagged(&tag)); + } else { + current = &mut next.item; } - _ => return None, + } + None => { + return None; } } } @@ -549,6 +668,7 @@ impl Value { Value::Row(o) => o.get_data(desc), Value::Block(_) => MaybeOwned::Owned(Value::nothing()), Value::Table(_) => MaybeOwned::Owned(Value::nothing()), + Value::Error(_) => MaybeOwned::Owned(Value::nothing()), } } @@ -558,7 +678,7 @@ impl Value { Value::Block(b) => itertools::join( b.expressions .iter() - .map(|e| e.source(&b.source).to_string()), + .map(|e| e.span.slice(&b.source).to_string()), "; ", ), Value::Row(_) => format!("[table: 1 row]"), @@ -567,6 +687,7 @@ impl Value { l.len(), if l.len() == 1 { "row" } else { "rows" } ), + Value::Error(_) => format!("[error]"), } } @@ -607,22 +728,6 @@ impl Value { } } - pub(crate) fn as_string(&self) -> Result { - match self { - Value::Primitive(Primitive::String(s)) => Ok(s.clone()), - Value::Primitive(Primitive::Boolean(x)) => Ok(format!("{}", x)), - Value::Primitive(Primitive::Decimal(x)) => Ok(format!("{}", x)), - Value::Primitive(Primitive::Int(x)) => Ok(format!("{}", x)), - Value::Primitive(Primitive::Bytes(x)) => Ok(format!("{}", x)), - Value::Primitive(Primitive::Path(x)) => Ok(format!("{}", x.display())), - // TODO: this should definitely be more general with better errors - other => Err(ShellError::string(format!( - "Expected string, got {:?}", - other - ))), - } - } - pub(crate) fn is_true(&self) -> bool { match self { Value::Primitive(Primitive::Boolean(true)) => true, @@ -630,6 +735,15 @@ impl Value { } } + #[allow(unused)] + pub fn row(entries: IndexMap>) -> Value { + Value::Row(entries.into()) + } + + pub fn table(list: &Vec>) -> Value { + Value::Table(list.to_vec()) + } + pub fn string(s: impl Into) -> Value { Value::Primitive(Primitive::String(s.into())) } @@ -675,9 +789,14 @@ impl Value { Value::Primitive(Primitive::Date(s.into())) } - pub fn date_from_str(s: &str) -> Result { - let date = DateTime::parse_from_rfc3339(s) - .map_err(|err| ShellError::string(&format!("Date parse error: {}", err)))?; + pub fn date_from_str(s: Tagged<&str>) -> Result { + let date = DateTime::parse_from_rfc3339(s.item).map_err(|err| { + ShellError::labeled_error( + &format!("Date parse error: {}", err), + "original value", + s.tag, + ) + })?; let date = date.with_timezone(&chrono::offset::Utc); @@ -693,6 +812,7 @@ impl Tagged { pub(crate) fn as_path(&self) -> Result { match self.item() { Value::Primitive(Primitive::Path(path)) => Ok(path.clone()), + Value::Primitive(Primitive::String(path_str)) => Ok(PathBuf::from(&path_str).clone()), other => Err(ShellError::type_error( "Path", other.type_name().tagged(self.tag()), @@ -736,6 +856,7 @@ enum CompareValues { Ints(BigInt, BigInt), Decimals(BigDecimal, BigDecimal), String(String, String), + Date(DateTime, DateTime), } impl CompareValues { @@ -744,6 +865,7 @@ impl CompareValues { CompareValues::Ints(left, right) => left.cmp(right), CompareValues::Decimals(left, right) => left.cmp(right), CompareValues::String(left, right) => left.cmp(right), + CompareValues::Date(left, right) => right.cmp(left), } } } @@ -780,6 +902,256 @@ fn coerce_compare_primitive( CompareValues::Decimals(BigDecimal::from(*left), right.clone()) } (String(left), String(right)) => CompareValues::String(left.clone(), right.clone()), + (Date(left), Date(right)) => CompareValues::Date(left.clone(), right.clone()), _ => return Err((left.type_name(), right.type_name())), }) } +#[cfg(test)] +mod tests { + + use crate::data::meta::*; + use crate::ShellError; + use crate::Value; + use indexmap::IndexMap; + + fn string(input: impl Into) -> Tagged { + Value::string(input.into()).tagged_unknown() + } + + fn number(n: i64) -> Tagged { + Value::number(n).tagged_unknown() + } + + fn row(entries: IndexMap>) -> Tagged { + Value::row(entries).tagged_unknown() + } + + fn table(list: &Vec>) -> Tagged { + Value::table(list).tagged_unknown() + } + + fn error_callback() -> impl FnOnce((Value, Tagged)) -> ShellError { + move |(_obj_source, _column_path_tried)| ShellError::unimplemented("will never be called.") + } + + fn column_path(paths: &Vec>) -> Vec> { + table(paths).as_column_path().unwrap().item + } + + #[test] + fn gets_matching_field_from_a_row() { + let row = Value::row(indexmap! { + "amigos".into() => table(&vec![string("andres"),string("jonathan"),string("yehuda")]) + }); + + assert_eq!( + *row.get_data_by_key("amigos").unwrap(), + table(&vec![ + string("andres"), + string("jonathan"), + string("yehuda") + ]) + ); + } + + #[test] + fn gets_matching_field_from_nested_rows_inside_a_row() { + let field_path = column_path(&vec![string("package"), string("version")]); + + let (version, tag) = string("0.4.0").into_parts(); + + let value = Value::row(indexmap! { + "package".into() => + row(indexmap! { + "name".into() => string("nu"), + "version".into() => string("0.4.0") + }) + }); + + assert_eq!( + **value + .get_data_by_column_path(tag, &field_path, Box::new(error_callback())) + .unwrap() + .unwrap(), + version + ) + } + + #[test] + fn column_path_that_contains_just_a_number_gets_a_row_from_a_table() { + let field_path = column_path(&vec![string("package"), string("authors"), number(0)]); + + let (_, tag) = string("Andrés N. Robalino").into_parts(); + + let value = Value::row(indexmap! { + "package".into() => row(indexmap! { + "name".into() => string("nu"), + "version".into() => string("0.4.0"), + "authors".into() => table(&vec![ + row(indexmap!{"name".into() => string("Andrés N. Robalino")}), + row(indexmap!{"name".into() => string("Jonathan Turner")}), + row(indexmap!{"name".into() => string("Yehuda Katz")}) + ]) + }) + }); + + assert_eq!( + **value + .get_data_by_column_path(tag, &field_path, Box::new(error_callback())) + .unwrap() + .unwrap(), + Value::row(indexmap! { + "name".into() => string("Andrés N. Robalino") + }) + ); + } + + #[test] + fn column_path_that_contains_just_a_number_gets_a_row_from_a_row() { + let field_path = column_path(&vec![string("package"), string("authors"), string("0")]); + + let (_, tag) = string("Andrés N. Robalino").into_parts(); + + let value = Value::row(indexmap! { + "package".into() => row(indexmap! { + "name".into() => string("nu"), + "version".into() => string("0.4.0"), + "authors".into() => row(indexmap! { + "0".into() => row(indexmap!{"name".into() => string("Andrés N. Robalino")}), + "1".into() => row(indexmap!{"name".into() => string("Jonathan Turner")}), + "2".into() => row(indexmap!{"name".into() => string("Yehuda Katz")}), + }) + }) + }); + + assert_eq!( + **value + .get_data_by_column_path(tag, &field_path, Box::new(error_callback())) + .unwrap() + .unwrap(), + Value::row(indexmap! { + "name".into() => string("Andrés N. Robalino") + }) + ); + } + + #[test] + fn replaces_matching_field_from_a_row() { + let field_path = column_path(&vec![string("amigos")]); + + let sample = Value::row(indexmap! { + "amigos".into() => table(&vec![ + string("andres"), + string("jonathan"), + string("yehuda"), + ]), + }); + + let (replacement, tag) = string("jonas").into_parts(); + + let actual = sample + .replace_data_at_column_path(tag, &field_path, replacement) + .unwrap(); + + assert_eq!(actual, row(indexmap! {"amigos".into() => string("jonas")})); + } + + #[test] + fn replaces_matching_field_from_nested_rows_inside_a_row() { + let field_path = column_path(&vec![ + string("package"), + string("authors"), + string("los.3.caballeros"), + ]); + + let sample = Value::row(indexmap! { + "package".into() => row(indexmap! { + "authors".into() => row(indexmap! { + "los.3.mosqueteros".into() => table(&vec![string("andres::yehuda::jonathan")]), + "los.3.amigos".into() => table(&vec![string("andres::yehuda::jonathan")]), + "los.3.caballeros".into() => table(&vec![string("andres::yehuda::jonathan")]) + }) + }) + }); + + let (replacement, tag) = table(&vec![string("yehuda::jonathan::andres")]).into_parts(); + + let actual = sample + .replace_data_at_column_path(tag.clone(), &field_path, replacement.clone()) + .unwrap(); + + assert_eq!( + actual, + Value::row(indexmap! { + "package".into() => row(indexmap! { + "authors".into() => row(indexmap! { + "los.3.mosqueteros".into() => table(&vec![string("andres::yehuda::jonathan")]), + "los.3.amigos".into() => table(&vec![string("andres::yehuda::jonathan")]), + "los.3.caballeros".into() => replacement.tagged(&tag)})})}) + .tagged(tag) + ); + } + #[test] + fn replaces_matching_field_from_rows_inside_a_table() { + let field_path = column_path(&vec![ + string("shell_policy"), + string("releases"), + string("nu.version.arepa"), + ]); + + let sample = Value::row(indexmap! { + "shell_policy".into() => row(indexmap! { + "releases".into() => table(&vec![ + row(indexmap! { + "nu.version.arepa".into() => row(indexmap! { + "code".into() => string("0.4.0"), "tag_line".into() => string("GitHub-era") + }) + }), + row(indexmap! { + "nu.version.taco".into() => row(indexmap! { + "code".into() => string("0.3.0"), "tag_line".into() => string("GitHub-era") + }) + }), + row(indexmap! { + "nu.version.stable".into() => row(indexmap! { + "code".into() => string("0.2.0"), "tag_line".into() => string("GitHub-era") + }) + }) + ]) + }) + }); + + let (replacement, tag) = row(indexmap! { + "code".into() => string("0.5.0"), + "tag_line".into() => string("CABALLEROS") + }) + .into_parts(); + + let actual = sample + .replace_data_at_column_path(tag.clone(), &field_path, replacement.clone()) + .unwrap(); + + assert_eq!( + actual, + Value::row(indexmap! { + "shell_policy".into() => row(indexmap! { + "releases".into() => table(&vec![ + row(indexmap! { + "nu.version.arepa".into() => replacement.tagged(&tag) + }), + row(indexmap! { + "nu.version.taco".into() => row(indexmap! { + "code".into() => string("0.3.0"), "tag_line".into() => string("GitHub-era") + }) + }), + row(indexmap! { + "nu.version.stable".into() => row(indexmap! { + "code".into() => string("0.2.0"), "tag_line".into() => string("GitHub-era") + }) + }) + ]) + }) + }).tagged(&tag) + ); + } +} diff --git a/src/data/command.rs b/src/data/command.rs index a2046aa7aa..5993dda6f5 100644 --- a/src/data/command.rs +++ b/src/data/command.rs @@ -7,7 +7,7 @@ use std::ops::Deref; pub(crate) fn command_dict(command: Arc, tag: impl Into) -> Tagged { let tag = tag.into(); - let mut cmd_dict = TaggedDictBuilder::new(tag); + let mut cmd_dict = TaggedDictBuilder::new(&tag); cmd_dict.insert("name", Value::string(command.name())); @@ -42,27 +42,27 @@ fn for_spec(name: &str, ty: &str, required: bool, tag: impl Into) -> Tagged fn signature_dict(signature: Signature, tag: impl Into) -> Tagged { let tag = tag.into(); - let mut sig = TaggedListBuilder::new(tag); + let mut sig = TaggedListBuilder::new(&tag); for arg in signature.positional.iter() { - let is_required = match arg { + let is_required = match arg.0 { PositionalType::Mandatory(_, _) => true, PositionalType::Optional(_, _) => false, }; - sig.insert_tagged(for_spec(arg.name(), "argument", is_required, tag)); + sig.insert_tagged(for_spec(arg.0.name(), "argument", is_required, &tag)); } if let Some(_) = signature.rest_positional { let is_required = false; - sig.insert_tagged(for_spec("rest", "argument", is_required, tag)); + sig.insert_tagged(for_spec("rest", "argument", is_required, &tag)); } for (name, ty) in signature.named.iter() { - match ty { - NamedType::Mandatory(_) => sig.insert_tagged(for_spec(name, "flag", true, tag)), - NamedType::Optional(_) => sig.insert_tagged(for_spec(name, "flag", false, tag)), - NamedType::Switch => sig.insert_tagged(for_spec(name, "switch", false, tag)), + match ty.0 { + NamedType::Mandatory(_) => sig.insert_tagged(for_spec(name, "flag", true, &tag)), + NamedType::Optional(_) => sig.insert_tagged(for_spec(name, "flag", false, &tag)), + NamedType::Switch => sig.insert_tagged(for_spec(name, "switch", false, &tag)), } } diff --git a/src/data/config.rs b/src/data/config.rs index 1cb4533d8e..26e3e3c7d5 100644 --- a/src/data/config.rs +++ b/src/data/config.rs @@ -51,8 +51,9 @@ pub fn user_data() -> Result { } pub fn app_path(app_data_type: AppDataType, display: &str) -> Result { - let path = app_root(app_data_type, &APP_INFO) - .map_err(|err| ShellError::string(&format!("Couldn't open {} path:\n{}", display, err)))?; + let path = app_root(app_data_type, &APP_INFO).map_err(|err| { + ShellError::untagged_runtime_error(&format!("Couldn't open {} path:\n{}", display, err)) + })?; Ok(path) } @@ -74,11 +75,22 @@ pub fn read( let tag = tag.into(); let contents = fs::read_to_string(filename) - .map(|v| v.tagged(tag)) - .map_err(|err| ShellError::string(&format!("Couldn't read config file:\n{}", err)))?; + .map(|v| v.tagged(&tag)) + .map_err(|err| { + ShellError::labeled_error( + &format!("Couldn't read config file:\n{}", err), + "file name", + &tag, + ) + })?; - let parsed: toml::Value = toml::from_str(&contents) - .map_err(|err| ShellError::string(&format!("Couldn't parse config file:\n{}", err)))?; + let parsed: toml::Value = toml::from_str(&contents).map_err(|err| { + ShellError::labeled_error( + &format!("Couldn't parse config file:\n{}", err), + "file name", + &tag, + ) + })?; let value = convert_toml_value_to_nu_value(&parsed, tag); let tag = value.tag(); @@ -86,7 +98,7 @@ pub fn read( Value::Row(Dictionary { entries }) => Ok(entries), other => Err(ShellError::type_error( "Dictionary", - other.type_name().tagged(tag), + other.type_name().tagged(&tag), )), } } diff --git a/src/data/dict.rs b/src/data/dict.rs index c14c86dd90..32393f0a0d 100644 --- a/src/data/dict.rs +++ b/src/data/dict.rs @@ -89,6 +89,17 @@ impl Dictionary { } } + pub(crate) fn get_mut_data_by_key(&mut self, name: &str) -> Option<&mut Tagged> { + match self + .entries + .iter_mut() + .find(|(desc_name, _)| *desc_name == name) + { + Some((_, v)) => Some(v), + None => None, + } + } + pub(crate) fn debug(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut debug = f.debug_struct("Dictionary"); @@ -103,7 +114,7 @@ impl Dictionary { #[derive(Debug)] pub struct TaggedListBuilder { tag: Tag, - list: Vec>, + pub list: Vec>, } impl TaggedListBuilder { @@ -115,7 +126,7 @@ impl TaggedListBuilder { } pub fn push(&mut self, value: impl Into) { - self.list.push(value.into().tagged(self.tag)); + self.list.push(value.into().tagged(&self.tag)); } pub fn insert_tagged(&mut self, value: impl Into>) { @@ -155,7 +166,7 @@ impl TaggedDictBuilder { } pub fn insert(&mut self, key: impl Into, value: impl Into) { - self.dict.insert(key.into(), value.into().tagged(self.tag)); + self.dict.insert(key.into(), value.into().tagged(&self.tag)); } pub fn insert_tagged(&mut self, key: impl Into, value: impl Into>) { diff --git a/src/data/meta.rs b/src/data/meta.rs index 0a56198e6c..6c9294c573 100644 --- a/src/data/meta.rs +++ b/src/data/meta.rs @@ -1,14 +1,53 @@ -use crate::context::{AnchorLocation, SourceMap}; +use crate::context::AnchorLocation; +use crate::parser::parse::parser::TracableContext; use crate::prelude::*; -use crate::Text; use derive_new::new; use getset::Getters; use serde::Deserialize; use serde::Serialize; +use std::fmt; use std::path::{Path, PathBuf}; -use uuid::Uuid; #[derive(new, Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)] +pub struct Spanned { + pub span: Span, + pub item: T, +} + +impl Spanned { + pub fn map(self, input: impl FnOnce(T) -> U) -> Spanned { + let span = self.span; + + let mapped = input(self.item); + mapped.spanned(span) + } +} + +pub trait SpannedItem: Sized { + fn spanned(self, span: impl Into) -> Spanned { + Spanned { + item: self, + span: span.into(), + } + } + + fn spanned_unknown(self) -> Spanned { + Spanned { + item: self, + span: Span::unknown(), + } + } +} +impl SpannedItem for T {} + +impl std::ops::Deref for Spanned { + type Target = T; + + fn deref(&self) -> &T { + &self.item + } +} +#[derive(new, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)] pub struct Tagged { pub tag: Tag, pub item: T, @@ -16,7 +55,7 @@ pub struct Tagged { impl HasTag for Tagged { fn tag(&self) -> Tag { - self.tag + self.tag.clone() } } @@ -28,20 +67,23 @@ impl AsRef for Tagged { pub trait TaggedItem: Sized { fn tagged(self, tag: impl Into) -> Tagged { - Tagged::from_item(self, tag.into()) + Tagged { + item: self, + tag: tag.into(), + } } // For now, this is a temporary facility. In many cases, there are other useful spans that we // could be using, such as the original source spans of JSON or Toml files, but we don't yet // have the infrastructure to make that work. fn tagged_unknown(self) -> Tagged { - Tagged::from_item( - self, - Tag { + Tagged { + item: self, + tag: Tag { span: Span::unknown(), - anchor: uuid::Uuid::nil(), + anchor: None, }, - ) + } } } @@ -56,48 +98,40 @@ impl std::ops::Deref for Tagged { } impl Tagged { - pub fn with_tag(self, tag: impl Into) -> Tagged { - Tagged::from_item(self.item, tag) - } - - pub fn from_item(item: T, tag: impl Into) -> Tagged { - Tagged { - item, - tag: tag.into(), - } - } - pub fn map(self, input: impl FnOnce(T) -> U) -> Tagged { let tag = self.tag(); let mapped = input(self.item); - Tagged::from_item(mapped, tag) + mapped.tagged(tag) } - pub(crate) fn copy_tag(&self, output: U) -> Tagged { - Tagged::from_item(output, self.tag()) - } + pub fn map_anchored(self, anchor: &Option) -> Tagged { + let mut tag = self.tag; - pub fn source(&self, source: &Text) -> Text { - Text::from(self.tag().slice(source)) + tag.anchor = anchor.clone(); + + Tagged { + item: self.item, + tag: tag, + } } pub fn tag(&self) -> Tag { - self.tag + self.tag.clone() } pub fn span(&self) -> Span { self.tag.span } - pub fn anchor(&self) -> uuid::Uuid { - self.tag.anchor + pub fn anchor(&self) -> Option { + self.tag.anchor.clone() } - pub fn anchor_name(&self, source_map: &SourceMap) -> Option { - match source_map.get(&self.tag.anchor) { - Some(AnchorLocation::File(file)) => Some(file.clone()), - Some(AnchorLocation::Url(url)) => Some(url.clone()), + pub fn anchor_name(&self) -> Option { + match self.tag.anchor { + Some(AnchorLocation::File(ref file)) => Some(file.clone()), + Some(AnchorLocation::Url(ref url)) => Some(url.clone()), _ => None, } } @@ -113,29 +147,32 @@ impl Tagged { impl From<&Tag> for Tag { fn from(input: &Tag) -> Tag { - *input + input.clone() } } -impl From> for Span { - fn from(input: nom_locate::LocatedSpanEx<&str, Uuid>) -> Span { - Span { - start: input.offset, - end: input.offset + input.fragment.len(), - } +impl From> for Span { + fn from(input: nom_locate::LocatedSpanEx<&str, TracableContext>) -> Span { + Span::new(input.offset, input.offset + input.fragment.len()) + } +} + +impl From> for Span { + fn from(input: nom_locate::LocatedSpanEx<&str, u64>) -> Span { + Span::new(input.offset, input.offset + input.fragment.len()) } } impl From<( - nom_locate::LocatedSpanEx, - nom_locate::LocatedSpanEx, + nom_locate::LocatedSpanEx, + nom_locate::LocatedSpanEx, )> for Span { fn from( input: ( - nom_locate::LocatedSpanEx, - nom_locate::LocatedSpanEx, + nom_locate::LocatedSpanEx, + nom_locate::LocatedSpanEx, ), ) -> Span { Span { @@ -147,10 +184,7 @@ impl impl From<(usize, usize)> for Span { fn from(input: (usize, usize)) -> Span { - Span { - start: input.0, - end: input.1, - } + Span::new(input.0, input.1) } } @@ -164,61 +198,60 @@ impl From<&std::ops::Range> for Span { } #[derive( - Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters, + Debug, Clone, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters, new, )] pub struct Tag { - pub anchor: Uuid, + pub anchor: Option, pub span: Span, } impl From for Tag { fn from(span: Span) -> Self { - Tag { - anchor: uuid::Uuid::nil(), - span, - } + Tag { anchor: None, span } } } impl From<&Span> for Tag { fn from(span: &Span) -> Self { Tag { - anchor: uuid::Uuid::nil(), + anchor: None, span: *span, } } } -impl From<(usize, usize, Uuid)> for Tag { - fn from((start, end, anchor): (usize, usize, Uuid)) -> Self { +impl From<(usize, usize, TracableContext)> for Tag { + fn from((start, end, _context): (usize, usize, TracableContext)) -> Self { + Tag { + anchor: None, + span: Span::new(start, end), + } + } +} + +impl From<(usize, usize, AnchorLocation)> for Tag { + fn from((start, end, anchor): (usize, usize, AnchorLocation)) -> Self { + Tag { + anchor: Some(anchor), + span: Span::new(start, end), + } + } +} + +impl From<(usize, usize, Option)> for Tag { + fn from((start, end, anchor): (usize, usize, Option)) -> Self { Tag { anchor, - span: Span { start, end }, + span: Span::new(start, end), } } } -impl From<(usize, usize, Option)> for Tag { - fn from((start, end, anchor): (usize, usize, Option)) -> Self { +impl From> for Tag { + fn from(input: nom_locate::LocatedSpanEx<&str, TracableContext>) -> Tag { Tag { - anchor: if let Some(uuid) = anchor { - uuid - } else { - uuid::Uuid::nil() - }, - span: Span { start, end }, - } - } -} - -impl From> for Tag { - fn from(input: nom_locate::LocatedSpanEx<&str, Uuid>) -> Tag { - Tag { - anchor: input.extra, - span: Span { - start: input.offset, - end: input.offset + input.fragment.len(), - }, + anchor: None, + span: Span::new(input.offset, input.offset + input.fragment.len()), } } } @@ -237,22 +270,29 @@ impl From<&Tag> for Span { impl Tag { pub fn unknown_anchor(span: Span) -> Tag { + Tag { anchor: None, span } + } + + pub fn for_char(pos: usize, anchor: AnchorLocation) -> Tag { Tag { - anchor: uuid::Uuid::nil(), - span, + anchor: Some(anchor), + span: Span { + start: pos, + end: pos + 1, + }, } } - pub fn unknown_span(anchor: Uuid) -> Tag { + pub fn unknown_span(anchor: AnchorLocation) -> Tag { Tag { - anchor, + anchor: Some(anchor), span: Span::unknown(), } } pub fn unknown() -> Tag { Tag { - anchor: uuid::Uuid::nil(), + anchor: None, span: Span::unknown(), } } @@ -265,29 +305,73 @@ impl Tag { ); Tag { - span: Span { - start: self.span.start, - end: other.span.end, - }, - anchor: self.anchor, + span: Span::new(self.span.start, other.span.end), + anchor: self.anchor.clone(), + } + } + + pub fn until_option(&self, other: Option>) -> Tag { + match other { + Some(other) => { + let other = other.into(); + debug_assert!( + self.anchor == other.anchor, + "Can only merge two tags with the same anchor" + ); + + Tag { + span: Span::new(self.span.start, other.span.end), + anchor: self.anchor.clone(), + } + } + None => self.clone(), } } pub fn slice<'a>(&self, source: &'a str) -> &'a str { self.span.slice(source) } + + pub fn string<'a>(&self, source: &'a str) -> String { + self.span.slice(source).to_string() + } + + pub fn tagged_slice<'a>(&self, source: &'a str) -> Tagged<&'a str> { + self.span.slice(source).tagged(self) + } + + pub fn tagged_string<'a>(&self, source: &'a str) -> Tagged { + self.span.slice(source).to_string().tagged(self) + } +} + +#[allow(unused)] +pub fn tag_for_tagged_list(mut iter: impl Iterator) -> Tag { + let first = iter.next(); + + let first = match first { + None => return Tag::unknown(), + Some(first) => first, + }; + + let last = iter.last(); + + match last { + None => first, + Some(last) => first.until(last), + } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash)] pub struct Span { - pub(crate) start: usize, - pub(crate) end: usize, + start: usize, + end: usize, } impl From> for Span { fn from(input: Option) -> Span { match input { - None => Span { start: 0, end: 0 }, + None => Span::new(0, 0), Some(span) => span, } } @@ -295,18 +379,63 @@ impl From> for Span { impl Span { pub fn unknown() -> Span { - Span { start: 0, end: 0 } + Span::new(0, 0) } - /* - pub fn unknown_with_uuid(uuid: Uuid) -> Span { + pub fn new(start: usize, end: usize) -> Span { + assert!( + end >= start, + "Can't create a Span whose end < start, start={}, end={}", + start, + end + ); + + Span { start, end } + } + + pub fn for_char(pos: usize) -> Span { Span { - start: 0, - end: 0, - source: Some(uuid), + start: pos, + end: pos + 1, } } - */ + + pub fn until(&self, other: impl Into) -> Span { + let other = other.into(); + + Span::new(self.start, other.end) + } + + pub fn until_option(&self, other: Option>) -> Span { + match other { + Some(other) => { + let other = other.into(); + + Span::new(self.start, other.end) + } + None => *self, + } + } + + pub fn string<'a>(&self, source: &'a str) -> String { + self.slice(source).to_string() + } + + pub fn spanned_slice<'a>(&self, source: &'a str) -> Spanned<&'a str> { + self.slice(source).spanned(*self) + } + + pub fn spanned_string<'a>(&self, source: &'a str) -> Spanned { + self.slice(source).to_string().spanned(*self) + } + + pub fn start(&self) -> usize { + self.start + } + + pub fn end(&self) -> usize { + self.end + } pub fn is_unknown(&self) -> bool { self.start == 0 && self.end == 0 @@ -319,17 +448,11 @@ impl Span { impl language_reporting::ReportingSpan for Span { fn with_start(&self, start: usize) -> Self { - Span { - start, - end: self.end, - } + Span::new(start, self.end) } fn with_end(&self, end: usize) -> Self { - Span { - start: self.start, - end, - } + Span::new(self.start, end) } fn start(&self) -> usize { @@ -341,32 +464,133 @@ impl language_reporting::ReportingSpan for Span { } } -impl language_reporting::ReportingSpan for Tag { - fn with_start(&self, start: usize) -> Self { - Tag { - span: Span { - start, - end: self.span.end, - }, - anchor: self.anchor, - } - } +pub trait HasSpan: ToDebug { + fn span(&self) -> Span; +} - fn with_end(&self, end: usize) -> Self { - Tag { - span: Span { - start: self.span.start, - end, - }, - anchor: self.anchor, - } - } +pub trait HasFallibleSpan: ToDebug { + fn maybe_span(&self) -> Option; +} - fn start(&self) -> usize { - self.span.start - } - - fn end(&self) -> usize { - self.span.end +impl HasFallibleSpan for T { + fn maybe_span(&self) -> Option { + Some(HasSpan::span(self)) + } +} + +impl HasSpan for Spanned +where + Spanned: ToDebug, +{ + fn span(&self) -> Span { + self.span + } +} + +impl HasFallibleSpan for Option { + fn maybe_span(&self) -> Option { + *self + } +} + +impl FormatDebug for Option { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + match self { + Option::None => write!(f, "no span"), + Option::Some(span) => FormatDebug::fmt_debug(span, f, source), + } + } +} + +impl FormatDebug for Span { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + write!(f, "{:?}", self.slice(source)) + } +} + +impl HasSpan for Span { + fn span(&self) -> Span { + *self + } +} + +impl FormatDebug for Option> +where + Spanned: ToDebug, +{ + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + match self { + Option::None => write!(f, "nothing"), + Option::Some(spanned) => FormatDebug::fmt_debug(spanned, f, source), + } + } +} + +impl HasFallibleSpan for Option> +where + Spanned: ToDebug, +{ + fn maybe_span(&self) -> Option { + match self { + None => None, + Some(value) => Some(value.span), + } + } +} + +impl FormatDebug for Option> +where + Tagged: ToDebug, +{ + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + match self { + Option::None => write!(f, "nothing"), + Option::Some(item) => FormatDebug::fmt_debug(item, f, source), + } + } +} + +impl HasFallibleSpan for Option> +where + Tagged: ToDebug, +{ + fn maybe_span(&self) -> Option { + match self { + None => None, + Some(value) => Some(value.tag.span), + } + } +} + +impl HasSpan for Tagged +where + Tagged: ToDebug, +{ + fn span(&self) -> Span { + self.tag.span + } +} + +impl FormatDebug for Vec { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + write!(f, "[ ")?; + write!( + f, + "{}", + self.iter().map(|item| item.debug(source)).join(" ") + )?; + write!(f, " ]") + } +} + +impl FormatDebug for String { + fn fmt_debug(&self, f: &mut DebugFormatter, _source: &str) -> fmt::Result { + write!(f, "{}", self) + } +} + +impl FormatDebug for Spanned { + fn fmt_debug(&self, f: &mut DebugFormatter, _source: &str) -> fmt::Result { + write!(f, "{}", self.item) } } diff --git a/src/data/types.rs b/src/data/types.rs index 8dca43d878..b4ff545deb 100644 --- a/src/data/types.rs +++ b/src/data/types.rs @@ -54,7 +54,7 @@ impl ExtractType for i64 { &Tagged { item: Value::Primitive(Primitive::Int(int)), .. - } => Ok(int.tagged(value.tag).coerce_into("converting to i64")?), + } => Ok(int.tagged(&value.tag).coerce_into("converting to i64")?), other => Err(ShellError::type_error("Integer", other.tagged_type_name())), } } @@ -68,7 +68,7 @@ impl ExtractType for u64 { &Tagged { item: Value::Primitive(Primitive::Int(int)), .. - } => Ok(int.tagged(value.tag).coerce_into("converting to u64")?), + } => Ok(int.tagged(&value.tag).coerce_into("converting to u64")?), other => Err(ShellError::type_error("Integer", other.tagged_type_name())), } } diff --git a/src/errors.rs b/src/errors.rs index 7e9c14b239..c28658028c 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -1,5 +1,6 @@ use crate::prelude::*; +use crate::parser::parse::parser::TracableContext; use ansi_term::Color; use derive_new::new; use language_reporting::{Diagnostic, Label, Severity}; @@ -13,12 +14,96 @@ pub enum Description { } impl Description { - fn into_label(self) -> Result, String> { + fn into_label(self) -> Result, String> { match self { - Description::Source(s) => Ok(Label::new_primary(s.tag()).with_message(s.item)), + Description::Source(s) => Ok(Label::new_primary(s.span()).with_message(s.item)), Description::Synthetic(s) => Err(s), } } + + #[allow(unused)] + fn tag(&self) -> Tag { + match self { + Description::Source(tagged) => tagged.tag.clone(), + Description::Synthetic(_) => Tag::unknown(), + } + } +} + +#[derive(Debug, Clone)] +pub enum ParseErrorReason { + Eof { + expected: &'static str, + }, + Mismatch { + expected: &'static str, + actual: Tagged, + }, + ArgumentError { + command: String, + error: ArgumentError, + tag: Tag, + }, +} + +#[derive(Debug, Clone)] +pub struct ParseError { + reason: ParseErrorReason, + tag: Tag, +} + +impl ParseError { + pub fn unexpected_eof(expected: &'static str, span: Span) -> ParseError { + ParseError { + reason: ParseErrorReason::Eof { expected }, + tag: span.into(), + } + } + + pub fn mismatch(expected: &'static str, actual: Tagged>) -> ParseError { + let Tagged { tag, item } = actual; + + ParseError { + reason: ParseErrorReason::Mismatch { + expected, + actual: item.into().tagged(tag.clone()), + }, + tag, + } + } + + pub fn argument_error( + command: impl Into, + kind: ArgumentError, + tag: impl Into, + ) -> ParseError { + let tag = tag.into(); + + ParseError { + reason: ParseErrorReason::ArgumentError { + command: command.into(), + error: kind, + tag: tag.clone(), + }, + tag: tag.clone(), + } + } +} + +impl From for ShellError { + fn from(error: ParseError) -> ShellError { + match error.reason { + ParseErrorReason::Eof { expected } => ShellError::unexpected_eof(expected, error.tag), + ParseErrorReason::Mismatch { actual, expected } => { + ShellError::type_error(expected, actual.clone()) + } + ParseErrorReason::ArgumentError { + command, + error, + tag, + } => ShellError::argument_error(command, error, tag), + } + } } #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)] @@ -35,8 +120,15 @@ pub struct ShellError { cause: Option>, } -impl ToDebug for ShellError { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { +impl ShellError { + #[allow(unused)] + pub(crate) fn tag(&self) -> Option { + self.error.tag() + } +} + +impl FormatDebug for ShellError { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { self.error.fmt_debug(f, source) } } @@ -46,12 +138,12 @@ impl serde::de::Error for ShellError { where T: std::fmt::Display, { - ShellError::string(msg.to_string()) + ShellError::untagged_runtime_error(msg.to_string()) } } impl ShellError { - pub(crate) fn type_error( + pub fn type_error( expected: impl Into, actual: Tagged>, ) -> ShellError { @@ -62,6 +154,21 @@ impl ShellError { .start() } + pub fn untagged_runtime_error(error: impl Into) -> ShellError { + ProximateShellError::UntaggedRuntimeError { + reason: error.into(), + } + .start() + } + + pub(crate) fn unexpected_eof(expected: impl Into, tag: impl Into) -> ShellError { + ProximateShellError::UnexpectedEof { + expected: expected.into(), + tag: tag.into(), + } + .start() + } + pub(crate) fn range_error( expected: impl Into, actual: &Tagged, @@ -69,7 +176,7 @@ impl ShellError { ) -> ShellError { ProximateShellError::RangeError { kind: expected.into(), - actual_kind: actual.copy_tag(format!("{:?}", actual.item)), + actual_kind: format!("{:?}", actual.item).tagged(actual.tag()), operation, } .start() @@ -82,6 +189,7 @@ impl ShellError { .start() } + #[allow(unused)] pub(crate) fn invalid_command(problem: impl Into) -> ShellError { ProximateShellError::InvalidCommand { command: problem.into(), @@ -111,29 +219,19 @@ impl ShellError { pub(crate) fn argument_error( command: impl Into, kind: ArgumentError, - tag: Tag, + tag: impl Into, ) -> ShellError { ProximateShellError::ArgumentError { command: command.into(), error: kind, - tag, - } - .start() - } - - pub(crate) fn invalid_external_word(tag: Tag) -> ShellError { - ProximateShellError::ArgumentError { - command: "Invalid argument to Nu command (did you mean to call an external command?)" - .into(), - error: ArgumentError::InvalidExternalWord, - tag, + tag: tag.into(), } .start() } pub(crate) fn parse_error( error: nom::Err<( - nom_locate::LocatedSpanEx<&str, uuid::Uuid>, + nom_locate::LocatedSpanEx<&str, TracableContext>, nom::error::ErrorKind, )>, ) -> ShellError { @@ -151,25 +249,22 @@ impl ShellError { } nom::Err::Failure(span) | nom::Err::Error(span) => { let diagnostic = Diagnostic::new(Severity::Error, format!("Parse Error")) - .with_label(Label::new_primary(Tag::from(span.0))); + .with_label(Label::new_primary(Span::from(span.0))); ShellError::diagnostic(diagnostic) } } } - pub(crate) fn diagnostic(diagnostic: Diagnostic) -> ShellError { + pub(crate) fn diagnostic(diagnostic: Diagnostic) -> ShellError { ProximateShellError::Diagnostic(ShellDiagnostic { diagnostic }).start() } - pub(crate) fn to_diagnostic(self) -> Diagnostic { + pub(crate) fn to_diagnostic(self) -> Diagnostic { match self.error { - ProximateShellError::String(StringError { title, .. }) => { - Diagnostic::new(Severity::Error, title) - } ProximateShellError::InvalidCommand { command } => { Diagnostic::new(Severity::Error, "Invalid command") - .with_label(Label::new_primary(command)) + .with_label(Label::new_primary(command.span)) } ProximateShellError::MissingValue { tag, reason } => { let mut d = Diagnostic::new( @@ -178,7 +273,7 @@ impl ShellError { ); if let Some(tag) = tag { - d = d.with_label(Label::new_primary(tag)); + d = d.with_label(Label::new_primary(tag.span)); } d @@ -191,7 +286,7 @@ impl ShellError { ArgumentError::InvalidExternalWord => Diagnostic::new( Severity::Error, format!("Invalid bare word for Nu command (did you intend to invoke an external command?)")) - .with_label(Label::new_primary(tag)), + .with_label(Label::new_primary(tag.span)), ArgumentError::MissingMandatoryFlag(name) => Diagnostic::new( Severity::Error, format!( @@ -201,7 +296,7 @@ impl ShellError { Color::Black.bold().paint(name) ), ) - .with_label(Label::new_primary(tag)), + .with_label(Label::new_primary(tag.span)), ArgumentError::MissingMandatoryPositional(name) => Diagnostic::new( Severity::Error, format!( @@ -211,7 +306,7 @@ impl ShellError { ), ) .with_label( - Label::new_primary(tag).with_message(format!("requires {} parameter", name)), + Label::new_primary(tag.span).with_message(format!("requires {} parameter", name)), ), ArgumentError::MissingValueForName(name) => Diagnostic::new( Severity::Error, @@ -222,7 +317,7 @@ impl ShellError { Color::Black.bold().paint(name) ), ) - .with_label(Label::new_primary(tag)), + .with_label(Label::new_primary(tag.span)), }, ProximateShellError::TypeError { expected, @@ -232,10 +327,9 @@ impl ShellError { tag, }, } => Diagnostic::new(Severity::Error, "Type Error").with_label( - Label::new_primary(tag) + Label::new_primary(tag.span) .with_message(format!("Expected {}, found {}", expected, actual)), ), - ProximateShellError::TypeError { expected, actual: @@ -244,7 +338,12 @@ impl ShellError { tag }, } => Diagnostic::new(Severity::Error, "Type Error") - .with_label(Label::new_primary(tag).with_message(expected)), + .with_label(Label::new_primary(tag.span).with_message(expected)), + + ProximateShellError::UnexpectedEof { + expected, tag + } => Diagnostic::new(Severity::Error, format!("Unexpected end of input")) + .with_label(Label::new_primary(tag.span).with_message(format!("Expected {}", expected))), ProximateShellError::RangeError { kind, @@ -255,7 +354,7 @@ impl ShellError { tag }, } => Diagnostic::new(Severity::Error, "Range Error").with_label( - Label::new_primary(tag).with_message(format!( + Label::new_primary(tag.span).with_message(format!( "Expected to convert {} to {} while {}, but it was out of range", item, kind.desc(), @@ -267,12 +366,12 @@ impl ShellError { problem: Tagged { tag, - .. + item }, } => Diagnostic::new(Severity::Error, "Syntax Error") - .with_label(Label::new_primary(tag).with_message("Unexpected external command")), + .with_label(Label::new_primary(tag.span).with_message(item)), - ProximateShellError::MissingProperty { subpath, expr } => { + ProximateShellError::MissingProperty { subpath, expr, .. } => { let subpath = subpath.into_label(); let expr = expr.into_label(); @@ -293,9 +392,11 @@ impl ShellError { ProximateShellError::Diagnostic(diag) => diag.diagnostic, ProximateShellError::CoerceError { left, right } => { Diagnostic::new(Severity::Error, "Coercion error") - .with_label(Label::new_primary(left.tag()).with_message(left.item)) - .with_label(Label::new_secondary(right.tag()).with_message(right.item)) + .with_label(Label::new_primary(left.tag().span).with_message(left.item)) + .with_label(Label::new_secondary(right.tag().span).with_message(right.item)) } + + ProximateShellError::UntaggedRuntimeError { reason } => Diagnostic::new(Severity::Error, format!("Error: {}", reason)) } } @@ -306,7 +407,7 @@ impl ShellError { ) -> ShellError { ShellError::diagnostic( Diagnostic::new(Severity::Error, msg.into()) - .with_label(Label::new_primary(tag.into()).with_message(label.into())), + .with_label(Label::new_primary(tag.into().span).with_message(label.into())), ) } @@ -320,25 +421,29 @@ impl ShellError { ShellError::diagnostic( Diagnostic::new_error(msg.into()) .with_label( - Label::new_primary(primary_span.into()).with_message(primary_label.into()), + Label::new_primary(primary_span.into().span).with_message(primary_label.into()), ) .with_label( - Label::new_secondary(secondary_span.into()) + Label::new_secondary(secondary_span.into().span) .with_message(secondary_label.into()), ), ) } - pub fn string(title: impl Into) -> ShellError { - ProximateShellError::String(StringError::new(title.into(), Value::nothing())).start() - } + // pub fn string(title: impl Into) -> ShellError { + // ProximateShellError::String(StringError::new(title.into(), String::new())).start() + // } + // + // pub(crate) fn unreachable(title: impl Into) -> ShellError { + // ShellError::untagged_runtime_error(&format!("BUG: Unreachable: {}", title.into())) + // } pub(crate) fn unimplemented(title: impl Into) -> ShellError { - ShellError::string(&format!("Unimplemented: {}", title.into())) + ShellError::untagged_runtime_error(&format!("Unimplemented: {}", title.into())) } pub(crate) fn unexpected(title: impl Into) -> ShellError { - ShellError::string(&format!("Unexpected: {}", title.into())) + ShellError::untagged_runtime_error(&format!("Unexpected: {}", title.into())) } } @@ -383,10 +488,13 @@ impl ExpectedRange { #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)] pub enum ProximateShellError { - String(StringError), SyntaxError { problem: Tagged, }, + UnexpectedEof { + expected: String, + tag: Tag, + }, InvalidCommand { command: Tag, }, @@ -397,6 +505,7 @@ pub enum ProximateShellError { MissingProperty { subpath: Description, expr: Description, + tag: Tag, }, MissingValue { tag: Option, @@ -417,6 +526,9 @@ pub enum ProximateShellError { left: Tagged, right: Tagged, }, + UntaggedRuntimeError { + reason: String, + }, } impl ProximateShellError { @@ -426,10 +538,26 @@ impl ProximateShellError { error: self, } } + + pub(crate) fn tag(&self) -> Option { + Some(match self { + ProximateShellError::SyntaxError { problem } => problem.tag(), + ProximateShellError::UnexpectedEof { tag, .. } => tag.clone(), + ProximateShellError::InvalidCommand { command } => command.clone(), + ProximateShellError::TypeError { actual, .. } => actual.tag.clone(), + ProximateShellError::MissingProperty { tag, .. } => tag.clone(), + ProximateShellError::MissingValue { tag, .. } => return tag.clone(), + ProximateShellError::ArgumentError { tag, .. } => tag.clone(), + ProximateShellError::RangeError { actual_kind, .. } => actual_kind.tag.clone(), + ProximateShellError::Diagnostic(..) => return None, + ProximateShellError::UntaggedRuntimeError { .. } => return None, + ProximateShellError::CoerceError { left, right } => left.tag.until(&right.tag), + }) + } } -impl ToDebug for ProximateShellError { - fn fmt_debug(&self, f: &mut fmt::Formatter, _source: &str) -> fmt::Result { +impl FormatDebug for ProximateShellError { + fn fmt_debug(&self, f: &mut DebugFormatter, _source: &str) -> fmt::Result { // TODO: Custom debug for inner spans write!(f, "{:?}", self) } @@ -437,7 +565,7 @@ impl ToDebug for ProximateShellError { #[derive(Debug, Clone, Serialize, Deserialize)] pub struct ShellDiagnostic { - pub(crate) diagnostic: Diagnostic, + pub(crate) diagnostic: Diagnostic, } impl PartialEq for ShellDiagnostic { @@ -463,22 +591,23 @@ impl std::cmp::Ord for ShellDiagnostic { #[derive(Debug, Ord, PartialOrd, Eq, PartialEq, new, Clone, Serialize, Deserialize)] pub struct StringError { title: String, - error: Value, + error: String, } impl std::fmt::Display for ShellError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match &self.error { - ProximateShellError::String(s) => write!(f, "{}", &s.title), ProximateShellError::MissingValue { .. } => write!(f, "MissingValue"), ProximateShellError::InvalidCommand { .. } => write!(f, "InvalidCommand"), ProximateShellError::TypeError { .. } => write!(f, "TypeError"), + ProximateShellError::UnexpectedEof { .. } => write!(f, "UnexpectedEof"), ProximateShellError::RangeError { .. } => write!(f, "RangeError"), ProximateShellError::SyntaxError { .. } => write!(f, "SyntaxError"), ProximateShellError::MissingProperty { .. } => write!(f, "MissingProperty"), ProximateShellError::ArgumentError { .. } => write!(f, "ArgumentError"), ProximateShellError::Diagnostic(_) => write!(f, ""), ProximateShellError::CoerceError { .. } => write!(f, "CoerceError"), + ProximateShellError::UntaggedRuntimeError { .. } => write!(f, "UntaggedRuntimeError"), } } } @@ -487,71 +616,43 @@ impl std::error::Error for ShellError {} impl std::convert::From> for ShellError { fn from(input: Box) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{}", input)) } } impl std::convert::From for ShellError { fn from(input: std::io::Error) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{}", input)) } } impl std::convert::From for ShellError { fn from(input: subprocess::PopenError) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{}", input)) } } impl std::convert::From for ShellError { fn from(input: serde_yaml::Error) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{:?}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{:?}", input)) } } impl std::convert::From for ShellError { fn from(input: toml::ser::Error) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{:?}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{:?}", input)) } } impl std::convert::From for ShellError { fn from(input: serde_json::Error) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{:?}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{:?}", input)) } } impl std::convert::From> for ShellError { fn from(input: Box) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{:?}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{:?}", input)) } } @@ -567,7 +668,6 @@ impl ShellErrorUtils> for Option> { } } } - pub trait CoerceInto { fn coerce_into(self, operation: impl Into) -> Result; } diff --git a/src/evaluate/evaluator.rs b/src/evaluate/evaluator.rs index a111d3964d..ceb555c339 100644 --- a/src/evaluate/evaluator.rs +++ b/src/evaluate/evaluator.rs @@ -5,8 +5,11 @@ use crate::parser::{ CommandRegistry, Text, }; use crate::prelude::*; +use crate::TaggedDictBuilder; use derive_new::new; use indexmap::IndexMap; +use log::trace; +use std::fmt; #[derive(new)] pub struct Scope { @@ -15,6 +18,15 @@ pub struct Scope { vars: IndexMap>, } +impl fmt::Display for Scope { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_map() + .entry(&"$it", &format!("{:?}", self.it.item)) + .entries(self.vars.iter().map(|(k, v)| (k, &v.item))) + .finish() + } +} + impl Scope { pub(crate) fn empty() -> Scope { Scope { @@ -37,28 +49,41 @@ pub(crate) fn evaluate_baseline_expr( scope: &Scope, source: &Text, ) -> Result, ShellError> { + let tag = Tag { + span: expr.span, + anchor: None, + }; match &expr.item { - RawExpression::Literal(literal) => Ok(evaluate_literal(expr.copy_tag(literal), source)), + RawExpression::Literal(literal) => Ok(evaluate_literal(literal.tagged(tag), source)), RawExpression::ExternalWord => Err(ShellError::argument_error( "Invalid external word", ArgumentError::InvalidExternalWord, - expr.tag(), + tag, )), - RawExpression::FilePath(path) => Ok(Value::path(path.clone()).tagged(expr.tag())), + RawExpression::FilePath(path) => Ok(Value::path(path.clone()).tagged(tag)), RawExpression::Synthetic(hir::Synthetic::String(s)) => { Ok(Value::string(s).tagged_unknown()) } - RawExpression::Variable(var) => evaluate_reference(var, scope, source), + RawExpression::Variable(var) => evaluate_reference(var, scope, source, tag), + RawExpression::Command(_) => evaluate_command(tag, scope, source), RawExpression::ExternalCommand(external) => evaluate_external(external, scope, source), RawExpression::Binary(binary) => { let left = evaluate_baseline_expr(binary.left(), registry, scope, source)?; let right = evaluate_baseline_expr(binary.right(), registry, scope, source)?; + trace!("left={:?} right={:?}", left.item, right.item); + match left.compare(binary.op(), &*right) { - Ok(result) => Ok(Value::boolean(result).tagged(expr.tag())), + Ok(result) => Ok(Value::boolean(result).tagged(tag)), Err((left_type, right_type)) => Err(ShellError::coerce_error( - binary.left().copy_tag(left_type), - binary.right().copy_tag(right_type), + left_type.tagged(Tag { + span: binary.left().span, + anchor: None, + }), + right_type.tagged(Tag { + span: binary.right().span, + anchor: None, + }), )), } } @@ -70,13 +95,10 @@ pub(crate) fn evaluate_baseline_expr( exprs.push(expr); } - Ok(Value::Table(exprs).tagged(expr.tag())) + Ok(Value::Table(exprs).tagged(tag)) } RawExpression::Block(block) => { - Ok( - Value::Block(Block::new(block.clone(), source.clone(), expr.tag())) - .tagged(expr.tag()), - ) + Ok(Value::Block(Block::new(block.clone(), source.clone(), tag.clone())).tagged(&tag)) } RawExpression::Path(path) => { let value = evaluate_baseline_expr(path.head(), registry, scope, source)?; @@ -96,19 +118,27 @@ pub(crate) fn evaluate_baseline_expr( possible_matches.sort(); - return Err(ShellError::labeled_error( - "Unknown column", - format!("did you mean '{}'?", possible_matches[0].1), - expr.tag(), - )); + if possible_matches.len() > 0 { + return Err(ShellError::labeled_error( + "Unknown column", + format!("did you mean '{}'?", possible_matches[0].1), + &tag, + )); + } else { + return Err(ShellError::labeled_error( + "Unknown column", + "row does not have this column", + &tag, + )); + } } Some(next) => { - item = next.clone().item.tagged(expr.tag()); + item = next.clone().item.tagged(&tag); } }; } - Ok(item.item().clone().tagged(expr.tag())) + Ok(item.item().clone().tagged(tag)) } RawExpression::Boolean(_boolean) => unimplemented!(), } @@ -119,7 +149,7 @@ fn evaluate_literal(literal: Tagged<&hir::Literal>, source: &Text) -> Tagged int.into(), hir::Literal::Size(int, unit) => unit.compute(int), hir::Literal::String(tag) => Value::string(tag.slice(source)), - hir::Literal::GlobPattern => Value::pattern(literal.tag().slice(source)), + hir::Literal::GlobPattern(pattern) => Value::pattern(pattern), hir::Literal::Bare => Value::string(literal.tag().slice(source)), }; @@ -130,14 +160,43 @@ fn evaluate_reference( name: &hir::Variable, scope: &Scope, source: &Text, + tag: Tag, ) -> Result, ShellError> { + trace!("Evaluating {} with Scope {}", name, scope); match name { - hir::Variable::It(tag) => Ok(scope.it.item.clone().tagged(*tag)), - hir::Variable::Other(tag) => Ok(scope - .vars - .get(tag.slice(source)) - .map(|v| v.clone()) - .unwrap_or_else(|| Value::nothing().tagged(*tag))), + hir::Variable::It(_) => Ok(scope.it.item.clone().tagged(tag)), + hir::Variable::Other(inner) => match inner.slice(source) { + x if x == "nu:env" => { + let mut dict = TaggedDictBuilder::new(&tag); + for v in std::env::vars() { + if v.0 != "PATH" && v.0 != "Path" { + dict.insert(v.0, Value::string(v.1)); + } + } + Ok(dict.into_tagged_value()) + } + x if x == "nu:config" => { + let config = crate::data::config::read(tag.clone(), &None)?; + Ok(Value::row(config).tagged(tag)) + } + x if x == "nu:path" => { + let mut table = vec![]; + match std::env::var_os("PATH") { + Some(paths) => { + for path in std::env::split_paths(&paths) { + table.push(Value::path(path).tagged(&tag)); + } + } + _ => {} + } + Ok(Value::table(&table).tagged(tag)) + } + x => Ok(scope + .vars + .get(x) + .map(|v| v.clone()) + .unwrap_or_else(|| Value::nothing().tagged(tag))), + }, } } @@ -150,3 +209,7 @@ fn evaluate_external( "Unexpected external command".tagged(*external.name()), )) } + +fn evaluate_command(tag: Tag, _scope: &Scope, _source: &Text) -> Result, ShellError> { + Err(ShellError::syntax_error("Unexpected command".tagged(tag))) +} diff --git a/src/format/generic.rs b/src/format/generic.rs index b6f9e29f26..fd058f31fc 100644 --- a/src/format/generic.rs +++ b/src/format/generic.rs @@ -14,7 +14,7 @@ impl RenderView for GenericView<'_> { match self.value { Value::Primitive(p) => Ok(host.stdout(&p.format(None))), Value::Table(l) => { - let view = TableView::from_list(l); + let view = TableView::from_list(l, 0); if let Some(view) = view { view.render_view(host)?; @@ -35,6 +35,8 @@ impl RenderView for GenericView<'_> { view.render_view(host)?; Ok(()) } + + Value::Error(e) => Err(e.clone()), } } } diff --git a/src/format/table.rs b/src/format/table.rs index 286be222c3..3ed5937b4b 100644 --- a/src/format/table.rs +++ b/src/format/table.rs @@ -23,18 +23,27 @@ enum TableMode { impl TableView { fn merge_descriptors(values: &[Tagged]) -> Vec { - let mut ret = vec![]; + let mut ret: Vec = vec![]; + let value_column = "".to_string(); for value in values { - for desc in value.data_descriptors() { - if !ret.contains(&desc) { - ret.push(desc); + let descs = value.data_descriptors(); + + if descs.len() == 0 { + if !ret.contains(&value_column) { + ret.push("".to_string()); + } + } else { + for desc in value.data_descriptors() { + if !ret.contains(&desc) { + ret.push(desc); + } } } } ret } - pub fn from_list(values: &[Tagged]) -> Option { + pub fn from_list(values: &[Tagged], starting_idx: usize) -> Option { if values.len() == 0 { return None; } @@ -42,33 +51,69 @@ impl TableView { let mut headers = TableView::merge_descriptors(values); if headers.len() == 0 { - headers.push("value".to_string()); + headers.push("".to_string()); } let mut entries = vec![]; for (idx, value) in values.iter().enumerate() { - let mut row: Vec<(String, &'static str)> = match value { - Tagged { - item: Value::Row(..), - .. - } => headers - .iter() - .enumerate() - .map(|(i, d)| { - let data = value.get_data(d); - return ( - data.borrow().format_leaf(Some(&headers[i])), - data.borrow().style_leaf(), - ); - }) - .collect(), - x => vec![(x.format_leaf(None), x.style_leaf())], - }; + // let mut row: Vec<(String, &'static str)> = match value { + // Tagged { + // item: Value::Row(..), + // .. + // } => headers + // .iter() + // .enumerate() + // .map(|(i, d)| { + // let data = value.get_data(d); + // return ( + // data.borrow().format_leaf(Some(&headers[i])), + // data.borrow().style_leaf(), + // ); + // }) + // .collect(), + // x => vec![(x.format_leaf(None), x.style_leaf())], + // }; + + let mut row: Vec<(String, &'static str)> = headers + .iter() + .enumerate() + .map(|(i, d)| { + if d == "" { + match value { + Tagged { + item: Value::Row(..), + .. + } => ( + Value::nothing().format_leaf(None), + Value::nothing().style_leaf(), + ), + _ => (value.format_leaf(None), value.style_leaf()), + } + } else { + match value { + Tagged { + item: Value::Row(..), + .. + } => { + let data = value.get_data(d); + ( + data.borrow().format_leaf(Some(&headers[i])), + data.borrow().style_leaf(), + ) + } + _ => ( + Value::nothing().format_leaf(None), + Value::nothing().style_leaf(), + ), + } + } + }) + .collect(); if values.len() > 1 { // Indices are black, bold, right-aligned: - row.insert(0, (format!("{}", idx.to_string()), "Fdbr")); + row.insert(0, (format!("{}", (starting_idx + idx).to_string()), "Fdbr")); } entries.push(row); diff --git a/src/fuzzysearch.rs b/src/fuzzysearch.rs index 5cb08dd3f5..c7d58ed632 100644 --- a/src/fuzzysearch.rs +++ b/src/fuzzysearch.rs @@ -73,9 +73,7 @@ pub fn interactive_fuzzy_search(lines: &Vec<&str>, max_results: usize) -> Select searchinput.pop(); selected = 0; } - _ => { - // println!("OTHER InputEvent: {:?}", k); - } + _ => {} }, _ => {} } diff --git a/src/lib.rs b/src/lib.rs index e8e09aacdd..38f770dc21 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,4 +1,8 @@ -#![recursion_limit = "512"] +#![recursion_limit = "1024"] + +#[cfg(test)] +#[macro_use] +extern crate indexmap; #[macro_use] mod prelude; @@ -21,17 +25,21 @@ mod traits; mod utils; pub use crate::commands::command::{CallInfo, ReturnSuccess, ReturnValue}; -pub use crate::context::{AnchorLocation, SourceMap}; +pub use crate::context::AnchorLocation; pub use crate::env::host::BasicHost; pub use crate::parser::hir::SyntaxShape; pub use crate::parser::parse::token_tree_builder::TokenTreeBuilder; pub use crate::plugin::{serve_plugin, Plugin}; -pub use crate::utils::{AbsoluteFile, AbsolutePath, RelativePath}; +pub use crate::traits::{DebugFormatter, FormatDebug, ToDebug}; +pub use crate::utils::{did_you_mean, AbsoluteFile, AbsolutePath, RelativePath}; pub use cli::cli; pub use data::base::{Primitive, Value}; pub use data::config::{config_path, APP_INFO}; pub use data::dict::{Dictionary, TaggedDictBuilder}; -pub use data::meta::{Tag, Tagged, TaggedItem}; +pub use data::meta::{ + tag_for_tagged_list, HasFallibleSpan, HasSpan, Span, Spanned, SpannedItem, Tag, Tagged, + TaggedItem, +}; pub use errors::{CoerceInto, ShellError}; pub use num_traits::cast::ToPrimitive; pub use parser::parse::text::Text; diff --git a/src/main.rs b/src/main.rs index 7f82808e74..e31c983f60 100644 --- a/src/main.rs +++ b/src/main.rs @@ -19,6 +19,12 @@ fn main() -> Result<(), Box> { .multiple(true) .takes_value(true), ) + .arg( + Arg::with_name("debug") + .long("debug") + .multiple(true) + .takes_value(true), + ) .get_matches(); let loglevel = match matches.value_of("loglevel") { @@ -48,6 +54,15 @@ fn main() -> Result<(), Box> { } } + match matches.values_of("debug") { + None => {} + Some(values) => { + for item in values { + builder.filter_module(&format!("nu::{}", item), LevelFilter::Debug); + } + } + } + builder.try_init()?; futures::executor::block_on(nu::cli())?; diff --git a/src/parser.rs b/src/parser.rs index 138125769b..7acdf6e6bf 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -7,24 +7,23 @@ pub(crate) mod registry; use crate::errors::ShellError; pub(crate) use deserializer::ConfigDeserializer; -pub(crate) use hir::baseline_parse_tokens::baseline_parse_tokens; +pub(crate) use hir::syntax_shape::flat_shape::FlatShape; +pub(crate) use hir::TokensIterator; pub(crate) use parse::call_node::CallNode; pub(crate) use parse::files::Files; -pub(crate) use parse::flag::Flag; +pub(crate) use parse::flag::{Flag, FlagKind}; pub(crate) use parse::operator::Operator; pub(crate) use parse::parser::{nom_input, pipeline}; -pub(crate) use parse::pipeline::{Pipeline, PipelineElement}; pub(crate) use parse::text::Text; -pub(crate) use parse::token_tree::{DelimitedNode, Delimiter, PathNode, TokenNode}; -pub(crate) use parse::tokens::{RawToken, Token}; +pub(crate) use parse::token_tree::{DelimitedNode, Delimiter, TokenNode}; +pub(crate) use parse::tokens::{RawNumber, RawToken}; pub(crate) use parse::unit::Unit; -pub(crate) use parse_command::parse_command; pub(crate) use registry::CommandRegistry; -pub fn parse(input: &str, anchor: uuid::Uuid) -> Result { +pub fn parse(input: &str) -> Result { let _ = pretty_env_logger::try_init(); - match pipeline(nom_input(input, anchor)) { + match pipeline(nom_input(input)) { Ok((_rest, val)) => Ok(val), Err(err) => Err(ShellError::parse_error(err)), } diff --git a/src/parser/deserializer.rs b/src/parser/deserializer.rs index f9b9146e50..c9436ab29e 100644 --- a/src/parser/deserializer.rs +++ b/src/parser/deserializer.rs @@ -52,7 +52,7 @@ impl<'de> ConfigDeserializer<'de> { self.stack.push(DeserializerItem { key_struct_field: Some((name.to_string(), name)), - val: value.unwrap_or_else(|| Value::nothing().tagged(self.call.name_tag)), + val: value.unwrap_or_else(|| Value::nothing().tagged(&self.call.name_tag)), }); Ok(()) @@ -61,7 +61,7 @@ impl<'de> ConfigDeserializer<'de> { pub fn top(&mut self) -> &DeserializerItem { let value = self.stack.last(); trace!("inspecting top value :: {:?}", value); - value.expect("Can't get top elemant of an empty stack") + value.expect("Can't get top element of an empty stack") } pub fn pop(&mut self) -> DeserializerItem { @@ -310,9 +310,10 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut ConfigDeserializer<'de> { return Ok(r); } trace!( - "deserializing struct {:?} {:?} (stack={:?})", + "deserializing struct {:?} {:?} (saw_root={} stack={:?})", name, fields, + self.saw_root, self.stack ); @@ -326,6 +327,12 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut ConfigDeserializer<'de> { let type_name = std::any::type_name::(); let tagged_val_name = std::any::type_name::>(); + trace!( + "type_name={} tagged_val_name={}", + type_name, + tagged_val_name + ); + if type_name == tagged_val_name { return visit::, _>(value.val, name, fields, visitor); } @@ -479,8 +486,8 @@ mod tests { // is unspecified and change is likely. // This test makes sure that such change is detected // by this test failing, and not things silently breaking. - // Specifically, we rely on this behaviour further above - // in the file to special case Tagged parsing. + // Specifically, we rely on this behavior further above + // in the file for the Tagged special case parsing. let tuple = type_name::<()>(); let tagged_tuple = type_name::>(); let tagged_value = type_name::>(); diff --git a/src/parser/hir.rs b/src/parser/hir.rs index 96eb7272a6..28b8a21a03 100644 --- a/src/parser/hir.rs +++ b/src/parser/hir.rs @@ -1,11 +1,13 @@ pub(crate) mod baseline_parse; -pub(crate) mod baseline_parse_tokens; pub(crate) mod binary; +pub(crate) mod expand_external_tokens; pub(crate) mod external_command; pub(crate) mod named; pub(crate) mod path; +pub(crate) mod syntax_shape; +pub(crate) mod tokens_iterator; -use crate::parser::{registry, Unit}; +use crate::parser::{registry, Operator, Unit}; use crate::prelude::*; use derive_new::new; use getset::Getters; @@ -14,27 +16,17 @@ use std::fmt; use std::path::PathBuf; use crate::evaluate::Scope; +use crate::parser::parse::tokens::RawNumber; +use crate::traits::ToDebug; -pub(crate) use self::baseline_parse::{ - baseline_parse_single_token, baseline_parse_token_as_number, baseline_parse_token_as_path, - baseline_parse_token_as_pattern, baseline_parse_token_as_string, -}; -pub(crate) use self::baseline_parse_tokens::{baseline_parse_next_expr, TokensIterator}; pub(crate) use self::binary::Binary; pub(crate) use self::external_command::ExternalCommand; pub(crate) use self::named::NamedArguments; pub(crate) use self::path::Path; +pub(crate) use self::syntax_shape::ExpandContext; +pub(crate) use self::tokens_iterator::TokensIterator; -pub use self::baseline_parse_tokens::SyntaxShape; - -pub fn path(head: impl Into, tail: Vec>>) -> Path { - Path::new( - head.into(), - tail.into_iter() - .map(|item| item.map(|string| string.into())) - .collect(), - ) -} +pub use self::syntax_shape::SyntaxShape; #[derive(Debug, Clone, Eq, PartialEq, Getters, Serialize, Deserialize, new)] pub struct Call { @@ -57,8 +49,8 @@ impl Call { } } -impl ToDebug for Call { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { +impl FormatDebug for Call { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { write!(f, "({}", self.head.debug(source))?; if let Some(positional) = &self.positional { @@ -93,6 +85,7 @@ pub enum RawExpression { FilePath(PathBuf), ExternalCommand(ExternalCommand), + Command(Span), Boolean(bool), } @@ -115,79 +108,158 @@ impl RawExpression { match self { RawExpression::Literal(literal) => literal.type_name(), RawExpression::Synthetic(synthetic) => synthetic.type_name(), - RawExpression::ExternalWord => "externalword", - RawExpression::FilePath(..) => "filepath", + RawExpression::Command(..) => "command", + RawExpression::ExternalWord => "external word", + RawExpression::FilePath(..) => "file path", RawExpression::Variable(..) => "variable", RawExpression::List(..) => "list", RawExpression::Binary(..) => "binary", RawExpression::Block(..) => "block", - RawExpression::Path(..) => "path", + RawExpression::Path(..) => "variable path", RawExpression::Boolean(..) => "boolean", RawExpression::ExternalCommand(..) => "external", } } } -pub type Expression = Tagged; +pub type Expression = Spanned; + +impl std::fmt::Display for Expression { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let span = self.span; + + match &self.item { + RawExpression::Literal(literal) => write!(f, "{}", literal.tagged(self.span)), + RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{}", s), + RawExpression::Command(_) => write!(f, "Command{{ {}..{} }}", span.start(), span.end()), + RawExpression::ExternalWord => { + write!(f, "ExternalWord{{ {}..{} }}", span.start(), span.end()) + } + RawExpression::FilePath(file) => write!(f, "Path{{ {} }}", file.display()), + RawExpression::Variable(variable) => write!(f, "{}", variable), + RawExpression::List(list) => f + .debug_list() + .entries(list.iter().map(|e| format!("{}", e))) + .finish(), + RawExpression::Binary(binary) => write!(f, "{}", binary), + RawExpression::Block(items) => { + write!(f, "Block")?; + f.debug_set() + .entries(items.iter().map(|i| format!("{}", i))) + .finish() + } + RawExpression::Path(path) => write!(f, "{}", path), + RawExpression::Boolean(b) => write!(f, "${}", b), + RawExpression::ExternalCommand(..) => { + write!(f, "ExternalComment{{ {}..{} }}", span.start(), span.end()) + } + } + } +} impl Expression { - pub(crate) fn number(i: impl Into, tag: impl Into) -> Expression { - RawExpression::Literal(Literal::Number(i.into())).tagged(tag.into()) + pub(crate) fn number(i: impl Into, span: impl Into) -> Expression { + RawExpression::Literal(Literal::Number(i.into())).spanned(span.into()) } pub(crate) fn size( i: impl Into, unit: impl Into, - tag: impl Into, + span: impl Into, ) -> Expression { - RawExpression::Literal(Literal::Size(i.into(), unit.into())).tagged(tag.into()) + RawExpression::Literal(Literal::Size(i.into(), unit.into())).spanned(span.into()) } pub(crate) fn synthetic_string(s: impl Into) -> Expression { - RawExpression::Synthetic(Synthetic::String(s.into())).tagged_unknown() + RawExpression::Synthetic(Synthetic::String(s.into())).spanned_unknown() } - pub(crate) fn string(inner: impl Into, outer: impl Into) -> Expression { - RawExpression::Literal(Literal::String(inner.into())).tagged(outer.into()) + pub(crate) fn string(inner: impl Into, outer: impl Into) -> Expression { + RawExpression::Literal(Literal::String(inner.into())).spanned(outer.into()) } - pub(crate) fn file_path(path: impl Into, outer: impl Into) -> Expression { - RawExpression::FilePath(path.into()).tagged(outer) + pub(crate) fn path( + head: Expression, + tail: Vec>>, + span: impl Into, + ) -> Expression { + let tail = tail.into_iter().map(|t| t.map(|s| s.into())).collect(); + RawExpression::Path(Box::new(Path::new(head, tail))).spanned(span.into()) } - pub(crate) fn bare(tag: impl Into) -> Expression { - RawExpression::Literal(Literal::Bare).tagged(tag) + pub(crate) fn dot_member(head: Expression, next: Spanned>) -> Expression { + let Spanned { item, span } = head; + let new_span = head.span.until(next.span); + + match item { + RawExpression::Path(path) => { + let (head, mut tail) = path.parts(); + + tail.push(next.map(|i| i.into())); + Expression::path(head, tail, new_span) + } + + other => Expression::path(other.spanned(span), vec![next], new_span), + } } - pub(crate) fn pattern(tag: impl Into) -> Expression { - RawExpression::Literal(Literal::GlobPattern).tagged(tag.into()) + pub(crate) fn infix( + left: Expression, + op: Spanned>, + right: Expression, + ) -> Expression { + let new_span = left.span.until(right.span); + + RawExpression::Binary(Box::new(Binary::new(left, op.map(|o| o.into()), right))) + .spanned(new_span) } - pub(crate) fn variable(inner: impl Into, outer: impl Into) -> Expression { - RawExpression::Variable(Variable::Other(inner.into())).tagged(outer) + pub(crate) fn file_path(path: impl Into, outer: impl Into) -> Expression { + RawExpression::FilePath(path.into()).spanned(outer) } - pub(crate) fn external_command(inner: impl Into, outer: impl Into) -> Expression { - RawExpression::ExternalCommand(ExternalCommand::new(inner.into())).tagged(outer) + pub(crate) fn list(list: Vec, span: impl Into) -> Expression { + RawExpression::List(list).spanned(span) } - pub(crate) fn it_variable(inner: impl Into, outer: impl Into) -> Expression { - RawExpression::Variable(Variable::It(inner.into())).tagged(outer) + pub(crate) fn bare(span: impl Into) -> Expression { + RawExpression::Literal(Literal::Bare).spanned(span) + } + + pub(crate) fn pattern(inner: impl Into, outer: impl Into) -> Expression { + RawExpression::Literal(Literal::GlobPattern(inner.into())).spanned(outer.into()) + } + + pub(crate) fn variable(inner: impl Into, outer: impl Into) -> Expression { + RawExpression::Variable(Variable::Other(inner.into())).spanned(outer) + } + + pub(crate) fn external_command(inner: impl Into, outer: impl Into) -> Expression { + RawExpression::ExternalCommand(ExternalCommand::new(inner.into())).spanned(outer) + } + + pub(crate) fn it_variable(inner: impl Into, outer: impl Into) -> Expression { + RawExpression::Variable(Variable::It(inner.into())).spanned(outer) + } + + pub(crate) fn tagged_type_name(&self) -> Tagged<&'static str> { + self.item.type_name().tagged(self.span) } } -impl ToDebug for Expression { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { - match self.item() { - RawExpression::Literal(l) => l.tagged(self.tag()).fmt_debug(f, source), +impl FormatDebug for Spanned { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + match &self.item { + RawExpression::Literal(l) => l.spanned(self.span).fmt_debug(f, source), RawExpression::FilePath(p) => write!(f, "{}", p.display()), - RawExpression::ExternalWord => write!(f, "{}", self.tag().slice(source)), + RawExpression::ExternalWord => write!(f, "{}", self.span.slice(source)), + RawExpression::Command(tag) => write!(f, "{}", tag.slice(source)), RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{:?}", s), RawExpression::Variable(Variable::It(_)) => write!(f, "$it"), RawExpression::Variable(Variable::Other(s)) => write!(f, "${}", s.slice(source)), RawExpression::Binary(b) => write!(f, "{}", b.debug(source)), RawExpression::ExternalCommand(c) => write!(f, "^{}", c.name().slice(source)), - RawExpression::Block(exprs) => { + RawExpression::Block(exprs) => f.say_block("block", |f| { write!(f, "{{ ")?; for expr in exprs { @@ -195,8 +267,8 @@ impl ToDebug for Expression { } write!(f, "}}") - } - RawExpression::List(exprs) => { + }), + RawExpression::List(exprs) => f.say_block("list", |f| { write!(f, "[ ")?; for expr in exprs { @@ -204,7 +276,7 @@ impl ToDebug for Expression { } write!(f, "]") - } + }), RawExpression::Path(p) => write!(f, "{}", p.debug(source)), RawExpression::Boolean(true) => write!(f, "$yes"), RawExpression::Boolean(false) => write!(f, "$no"), @@ -212,8 +284,8 @@ impl ToDebug for Expression { } } -impl From> for Expression { - fn from(path: Tagged) -> Expression { +impl From> for Expression { + fn from(path: Spanned) -> Expression { path.map(|p| RawExpression::Path(Box::new(p))) } } @@ -227,19 +299,39 @@ impl From> for Expression { pub enum Literal { Number(Number), Size(Number, Unit), - String(Tag), - GlobPattern, + String(Span), + GlobPattern(String), Bare, } -impl ToDebug for Tagged<&Literal> { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { - match self.item() { - Literal::Number(number) => write!(f, "{:?}", *number), - Literal::Size(number, unit) => write!(f, "{:?}{:?}", *number, unit), - Literal::String(tag) => write!(f, "{}", tag.slice(source)), - Literal::GlobPattern => write!(f, "{}", self.tag().slice(source)), - Literal::Bare => write!(f, "{}", self.tag().slice(source)), +impl std::fmt::Display for Tagged { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", Tagged::new(self.tag.clone(), &self.item)) + } +} + +impl std::fmt::Display for Tagged<&Literal> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let span = self.tag.span; + + match &self.item { + Literal::Number(number) => write!(f, "{}", number), + Literal::Size(number, unit) => write!(f, "{}{}", number, unit.as_str()), + Literal::String(_) => write!(f, "String{{ {}..{} }}", span.start(), span.end()), + Literal::GlobPattern(_) => write!(f, "Glob{{ {}..{} }}", span.start(), span.end()), + Literal::Bare => write!(f, "Bare{{ {}..{} }}", span.start(), span.end()), + } + } +} + +impl FormatDebug for Spanned<&Literal> { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + match self.item { + Literal::Number(..) => f.say_str("number", self.span.slice(source)), + Literal::Size(..) => f.say_str("size", self.span.slice(source)), + Literal::String(..) => f.say_str("string", self.span.slice(source)), + Literal::GlobPattern(..) => f.say_str("glob", self.span.slice(source)), + Literal::Bare => f.say_str("word", self.span.slice(source)), } } } @@ -251,13 +343,28 @@ impl Literal { Literal::Size(..) => "size", Literal::String(..) => "string", Literal::Bare => "string", - Literal::GlobPattern => "pattern", + Literal::GlobPattern(_) => "pattern", } } } #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum Variable { - It(Tag), - Other(Tag), + It(Span), + Other(Span), +} + +impl std::fmt::Display for Variable { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Variable::It(_) => write!(f, "$it"), + Variable::Other(span) => write!(f, "${{ {}..{} }}", span.start(), span.end()), + } + } +} + +impl FormatDebug for Spanned { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + write!(f, "{}", self.span.slice(source)) + } } diff --git a/src/parser/hir/baseline_parse.rs b/src/parser/hir/baseline_parse.rs index 267494f27c..87c2771955 100644 --- a/src/parser/hir/baseline_parse.rs +++ b/src/parser/hir/baseline_parse.rs @@ -1,140 +1,2 @@ -use crate::context::Context; -use crate::errors::ShellError; -use crate::parser::{hir, RawToken, Token}; -use crate::TaggedItem; -use crate::Text; -use std::path::PathBuf; - -pub fn baseline_parse_single_token( - token: &Token, - source: &Text, -) -> Result { - Ok(match *token.item() { - RawToken::Number(number) => hir::Expression::number(number.to_number(source), token.tag()), - RawToken::Size(int, unit) => { - hir::Expression::size(int.to_number(source), unit, token.tag()) - } - RawToken::String(tag) => hir::Expression::string(tag, token.tag()), - RawToken::Variable(tag) if tag.slice(source) == "it" => { - hir::Expression::it_variable(tag, token.tag()) - } - RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()), - RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()), - RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())), - RawToken::GlobPattern => hir::Expression::pattern(token.tag()), - RawToken::Bare => hir::Expression::bare(token.tag()), - }) -} - -pub fn baseline_parse_token_as_number( - token: &Token, - source: &Text, -) -> Result { - Ok(match *token.item() { - RawToken::Variable(tag) if tag.slice(source) == "it" => { - hir::Expression::it_variable(tag, token.tag()) - } - RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()), - RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())), - RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()), - RawToken::Number(number) => hir::Expression::number(number.to_number(source), token.tag()), - RawToken::Size(number, unit) => { - hir::Expression::size(number.to_number(source), unit, token.tag()) - } - RawToken::Bare => hir::Expression::bare(token.tag()), - RawToken::GlobPattern => { - return Err(ShellError::type_error( - "Number", - "glob pattern".to_string().tagged(token.tag()), - )) - } - RawToken::String(tag) => hir::Expression::string(tag, token.tag()), - }) -} - -pub fn baseline_parse_token_as_string( - token: &Token, - source: &Text, -) -> Result { - Ok(match *token.item() { - RawToken::Variable(tag) if tag.slice(source) == "it" => { - hir::Expression::it_variable(tag, token.tag()) - } - RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()), - RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())), - RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()), - RawToken::Number(_) => hir::Expression::bare(token.tag()), - RawToken::Size(_, _) => hir::Expression::bare(token.tag()), - RawToken::Bare => hir::Expression::bare(token.tag()), - RawToken::GlobPattern => { - return Err(ShellError::type_error( - "String", - "glob pattern".tagged(token.tag()), - )) - } - RawToken::String(tag) => hir::Expression::string(tag, token.tag()), - }) -} - -pub fn baseline_parse_token_as_path( - token: &Token, - context: &Context, - source: &Text, -) -> Result { - Ok(match *token.item() { - RawToken::Variable(tag) if tag.slice(source) == "it" => { - hir::Expression::it_variable(tag, token.tag()) - } - RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()), - RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())), - RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()), - RawToken::Number(_) => hir::Expression::bare(token.tag()), - RawToken::Size(_, _) => hir::Expression::bare(token.tag()), - RawToken::Bare => { - hir::Expression::file_path(expand_path(token.tag().slice(source), context), token.tag()) - } - RawToken::GlobPattern => { - return Err(ShellError::type_error( - "Path", - "glob pattern".tagged(token.tag()), - )) - } - RawToken::String(tag) => { - hir::Expression::file_path(expand_path(tag.slice(source), context), token.tag()) - } - }) -} - -pub fn baseline_parse_token_as_pattern( - token: &Token, - context: &Context, - source: &Text, -) -> Result { - Ok(match *token.item() { - RawToken::Variable(tag) if tag.slice(source) == "it" => { - hir::Expression::it_variable(tag, token.tag()) - } - RawToken::ExternalCommand(_) => { - return Err(ShellError::syntax_error( - "Invalid external command".to_string().tagged(token.tag()), - )) - } - RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())), - RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()), - RawToken::Number(_) => hir::Expression::bare(token.tag()), - RawToken::Size(_, _) => hir::Expression::bare(token.tag()), - RawToken::GlobPattern => hir::Expression::pattern(token.tag()), - RawToken::Bare => { - hir::Expression::file_path(expand_path(token.tag().slice(source), context), token.tag()) - } - RawToken::String(tag) => { - hir::Expression::file_path(expand_path(tag.slice(source), context), token.tag()) - } - }) -} - -pub fn expand_path(string: &str, context: &Context) -> PathBuf { - let expanded = shellexpand::tilde_with_context(string, || context.shell_manager.homedir()); - - PathBuf::from(expanded.as_ref()) -} +#[cfg(test)] +mod tests; diff --git a/src/parser/hir/baseline_parse/tests.rs b/src/parser/hir/baseline_parse/tests.rs new file mode 100644 index 0000000000..c930fbe56c --- /dev/null +++ b/src/parser/hir/baseline_parse/tests.rs @@ -0,0 +1,120 @@ +use crate::commands::classified::InternalCommand; +use crate::commands::ClassifiedCommand; +use crate::env::host::BasicHost; +use crate::parser::hir; +use crate::parser::hir::syntax_shape::*; +use crate::parser::hir::TokensIterator; +use crate::parser::parse::token_tree_builder::{CurriedToken, TokenTreeBuilder as b}; +use crate::parser::TokenNode; +use crate::{HasSpan, Span, SpannedItem, Tag, Text}; +use pretty_assertions::assert_eq; +use std::fmt::Debug; + +#[test] +fn test_parse_string() { + parse_tokens(StringShape, vec![b::string("hello")], |tokens| { + hir::Expression::string(inner_string_span(tokens[0].span()), tokens[0].span()) + }); +} + +#[test] +fn test_parse_path() { + parse_tokens( + VariablePathShape, + vec![b::var("it"), b::op("."), b::bare("cpu")], + |tokens| { + let (outer_var, inner_var) = tokens[0].expect_var(); + let bare = tokens[2].expect_bare(); + hir::Expression::path( + hir::Expression::it_variable(inner_var, outer_var), + vec!["cpu".spanned(bare)], + outer_var.until(bare), + ) + }, + ); + + parse_tokens( + VariablePathShape, + vec![ + b::var("cpu"), + b::op("."), + b::bare("amount"), + b::op("."), + b::string("max ghz"), + ], + |tokens| { + let (outer_var, inner_var) = tokens[0].expect_var(); + let amount = tokens[2].expect_bare(); + let (outer_max_ghz, _) = tokens[4].expect_string(); + + hir::Expression::path( + hir::Expression::variable(inner_var, outer_var), + vec!["amount".spanned(amount), "max ghz".spanned(outer_max_ghz)], + outer_var.until(outer_max_ghz), + ) + }, + ); +} + +#[test] +fn test_parse_command() { + parse_tokens( + ClassifiedCommandShape, + vec![b::bare("ls"), b::sp(), b::pattern("*.txt")], + |tokens| { + let bare = tokens[0].expect_bare(); + let pat = tokens[2].expect_pattern(); + + eprintln!("{:?} {:?} {:?}", bare, pat, bare.until(pat)); + + ClassifiedCommand::Internal(InternalCommand::new( + "ls".to_string(), + Tag { + span: bare, + anchor: None, + }, + hir::Call { + head: Box::new(hir::RawExpression::Command(bare).spanned(bare)), + positional: Some(vec![hir::Expression::pattern("*.txt", pat)]), + named: None, + } + .spanned(bare.until(pat)), + )) + // hir::Expression::path( + // hir::Expression::variable(inner_var, outer_var), + // vec!["cpu".tagged(bare)], + // outer_var.until(bare), + // ) + }, + ); +} + +fn parse_tokens( + shape: impl ExpandSyntax, + tokens: Vec, + expected: impl FnOnce(&[TokenNode]) -> T, +) { + let tokens = b::token_list(tokens); + let (tokens, source) = b::build(tokens); + + ExpandContext::with_empty(&Text::from(source), |context| { + let tokens = tokens.expect_list(); + let mut iterator = TokensIterator::all(tokens.item, tokens.span); + + let expr = expand_syntax(&shape, &mut iterator, &context); + + let expr = match expr { + Ok(expr) => expr, + Err(err) => { + crate::cli::print_err(err.into(), &BasicHost, context.source().clone()); + panic!("Parse failed"); + } + }; + + assert_eq!(expr, expected(tokens.item)); + }) +} + +fn inner_string_span(span: Span) -> Span { + Span::new(span.start() + 1, span.end() - 1) +} diff --git a/src/parser/hir/baseline_parse_tokens.rs b/src/parser/hir/baseline_parse_tokens.rs deleted file mode 100644 index 8413bd07e1..0000000000 --- a/src/parser/hir/baseline_parse_tokens.rs +++ /dev/null @@ -1,459 +0,0 @@ -use crate::context::Context; -use crate::errors::ShellError; -use crate::parser::{ - hir, - hir::{ - baseline_parse_single_token, baseline_parse_token_as_number, baseline_parse_token_as_path, - baseline_parse_token_as_pattern, baseline_parse_token_as_string, - }, - DelimitedNode, Delimiter, PathNode, RawToken, TokenNode, -}; -use crate::{Tag, Tagged, TaggedItem, Text}; -use derive_new::new; -use log::trace; -use serde::{Deserialize, Serialize}; - -pub fn baseline_parse_tokens( - token_nodes: &mut TokensIterator<'_>, - context: &Context, - source: &Text, - syntax_type: SyntaxShape, -) -> Result, ShellError> { - let mut exprs: Vec = vec![]; - - loop { - if token_nodes.at_end() { - break; - } - - let expr = baseline_parse_next_expr(token_nodes, context, source, syntax_type)?; - exprs.push(expr); - } - - Ok(exprs) -} - -#[derive(Debug, Copy, Clone, Serialize, Deserialize)] -pub enum SyntaxShape { - Any, - List, - Literal, - String, - Member, - Variable, - Number, - Path, - Pattern, - Binary, - Block, - Boolean, -} - -impl std::fmt::Display for SyntaxShape { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - match self { - SyntaxShape::Any => write!(f, "Any"), - SyntaxShape::List => write!(f, "List"), - SyntaxShape::Literal => write!(f, "Literal"), - SyntaxShape::String => write!(f, "String"), - SyntaxShape::Member => write!(f, "Member"), - SyntaxShape::Variable => write!(f, "Variable"), - SyntaxShape::Number => write!(f, "Number"), - SyntaxShape::Path => write!(f, "Path"), - SyntaxShape::Pattern => write!(f, "Pattern"), - SyntaxShape::Binary => write!(f, "Binary"), - SyntaxShape::Block => write!(f, "Block"), - SyntaxShape::Boolean => write!(f, "Boolean"), - } - } -} - -pub fn baseline_parse_next_expr( - tokens: &mut TokensIterator, - context: &Context, - source: &Text, - syntax_type: SyntaxShape, -) -> Result { - let next = tokens - .next() - .ok_or_else(|| ShellError::string("Expected token, found none"))?; - - trace!(target: "nu::parser::parse_one_expr", "syntax_type={:?}, token={:?}", syntax_type, next); - - match (syntax_type, next) { - (SyntaxShape::Path, TokenNode::Token(token)) => { - return baseline_parse_token_as_path(token, context, source) - } - - (SyntaxShape::Path, token) => { - return Err(ShellError::type_error( - "Path", - token.type_name().tagged(token.tag()), - )) - } - - (SyntaxShape::Pattern, TokenNode::Token(token)) => { - return baseline_parse_token_as_pattern(token, context, source) - } - - (SyntaxShape::Pattern, token) => { - return Err(ShellError::type_error( - "Path", - token.type_name().tagged(token.tag()), - )) - } - - (SyntaxShape::String, TokenNode::Token(token)) => { - return baseline_parse_token_as_string(token, source); - } - - (SyntaxShape::String, token) => { - return Err(ShellError::type_error( - "String", - token.type_name().tagged(token.tag()), - )) - } - - (SyntaxShape::Number, TokenNode::Token(token)) => { - return Ok(baseline_parse_token_as_number(token, source)?); - } - - (SyntaxShape::Number, token) => { - return Err(ShellError::type_error( - "Numeric", - token.type_name().tagged(token.tag()), - )) - } - - // TODO: More legit member processing - (SyntaxShape::Member, TokenNode::Token(token)) => { - return baseline_parse_token_as_string(token, source); - } - - (SyntaxShape::Member, token) => { - return Err(ShellError::type_error( - "member", - token.type_name().tagged(token.tag()), - )) - } - - (SyntaxShape::Any, _) => {} - (SyntaxShape::List, _) => {} - (SyntaxShape::Literal, _) => {} - (SyntaxShape::Variable, _) => {} - (SyntaxShape::Binary, _) => {} - (SyntaxShape::Block, _) => {} - (SyntaxShape::Boolean, _) => {} - }; - - let first = baseline_parse_semantic_token(next, context, source)?; - - let possible_op = tokens.peek(); - - let op = match possible_op { - Some(TokenNode::Operator(op)) => op.clone(), - _ => return Ok(first), - }; - - tokens.next(); - - let second = match tokens.next() { - None => { - return Err(ShellError::labeled_error( - "Expected something after an operator", - "operator", - op.tag(), - )) - } - Some(token) => baseline_parse_semantic_token(token, context, source)?, - }; - - // We definitely have a binary expression here -- let's see if we should coerce it into a block - - match syntax_type { - SyntaxShape::Any => { - let tag = first.tag().until(second.tag()); - let binary = hir::Binary::new(first, op, second); - let binary = hir::RawExpression::Binary(Box::new(binary)); - let binary = binary.tagged(tag); - - Ok(binary) - } - - SyntaxShape::Block => { - let tag = first.tag().until(second.tag()); - - let path: Tagged = match first { - Tagged { - item: hir::RawExpression::Literal(hir::Literal::Bare), - tag, - } => { - let string = tag.slice(source).to_string().tagged(tag); - let path = hir::Path::new( - // TODO: Deal with synthetic nodes that have no representation at all in source - hir::RawExpression::Variable(hir::Variable::It(Tag::unknown())) - .tagged(Tag::unknown()), - vec![string], - ); - let path = hir::RawExpression::Path(Box::new(path)); - path.tagged(first.tag()) - } - Tagged { - item: hir::RawExpression::Literal(hir::Literal::String(inner)), - tag, - } => { - let string = inner.slice(source).to_string().tagged(tag); - let path = hir::Path::new( - // TODO: Deal with synthetic nodes that have no representation at all in source - hir::RawExpression::Variable(hir::Variable::It(Tag::unknown())) - .tagged_unknown(), - vec![string], - ); - let path = hir::RawExpression::Path(Box::new(path)); - path.tagged(first.tag()) - } - Tagged { - item: hir::RawExpression::Variable(..), - .. - } => first, - Tagged { tag, item } => { - return Err(ShellError::labeled_error( - "The first part of an un-braced block must be a column name", - item.type_name(), - tag, - )) - } - }; - - let binary = hir::Binary::new(path, op, second); - let binary = hir::RawExpression::Binary(Box::new(binary)); - let binary = binary.tagged(tag); - - let block = hir::RawExpression::Block(vec![binary]); - let block = block.tagged(tag); - - Ok(block) - } - - other => Err(ShellError::unimplemented(format!( - "coerce hint {:?}", - other - ))), - } -} - -pub fn baseline_parse_semantic_token( - token: &TokenNode, - context: &Context, - source: &Text, -) -> Result { - match token { - TokenNode::Token(token) => baseline_parse_single_token(token, source), - TokenNode::Call(_call) => unimplemented!(), - TokenNode::Delimited(delimited) => baseline_parse_delimited(delimited, context, source), - TokenNode::Pipeline(_pipeline) => unimplemented!(), - TokenNode::Operator(op) => Err(ShellError::syntax_error( - "Unexpected operator".tagged(op.tag), - )), - TokenNode::Flag(flag) => Err(ShellError::syntax_error("Unexpected flag".tagged(flag.tag))), - TokenNode::Member(tag) => Err(ShellError::syntax_error( - "BUG: Top-level member".tagged(*tag), - )), - TokenNode::Whitespace(tag) => Err(ShellError::syntax_error( - "BUG: Whitespace found during parse".tagged(*tag), - )), - TokenNode::Error(error) => Err(*error.item.clone()), - TokenNode::Path(path) => baseline_parse_path(path, context, source), - } -} - -pub fn baseline_parse_delimited( - token: &Tagged, - context: &Context, - source: &Text, -) -> Result { - match token.delimiter() { - Delimiter::Brace => { - let children = token.children(); - let exprs = baseline_parse_tokens( - &mut TokensIterator::new(children), - context, - source, - SyntaxShape::Any, - )?; - - let expr = hir::RawExpression::Block(exprs); - Ok(expr.tagged(token.tag())) - } - Delimiter::Paren => unimplemented!(), - Delimiter::Square => { - let children = token.children(); - let exprs = baseline_parse_tokens( - &mut TokensIterator::new(children), - context, - source, - SyntaxShape::Any, - )?; - - let expr = hir::RawExpression::List(exprs); - Ok(expr.tagged(token.tag())) - } - } -} - -pub fn baseline_parse_path( - token: &Tagged, - context: &Context, - source: &Text, -) -> Result { - let head = baseline_parse_semantic_token(token.head(), context, source)?; - - let mut tail = vec![]; - - for part in token.tail() { - let string = match part { - TokenNode::Token(token) => match token.item() { - RawToken::Bare => token.tag().slice(source), - RawToken::String(tag) => tag.slice(source), - RawToken::Number(_) - | RawToken::Size(..) - | RawToken::Variable(_) - | RawToken::ExternalCommand(_) - | RawToken::GlobPattern - | RawToken::ExternalWord => { - return Err(ShellError::type_error( - "String", - token.type_name().tagged(part.tag()), - )) - } - }, - - TokenNode::Member(tag) => tag.slice(source), - - // TODO: Make this impossible - other => { - return Err(ShellError::syntax_error( - format!("{} in path", other.type_name()).tagged(other.tag()), - )) - } - } - .to_string(); - - tail.push(string.tagged(part.tag())); - } - - Ok(hir::path(head, tail).tagged(token.tag()).into()) -} - -#[derive(Debug, new)] -pub struct TokensIterator<'a> { - tokens: &'a [TokenNode], - #[new(default)] - index: usize, - #[new(default)] - seen: indexmap::IndexSet, -} - -impl TokensIterator<'_> { - pub fn remove(&mut self, position: usize) { - self.seen.insert(position); - } - - pub fn len(&self) -> usize { - self.tokens.len() - } - - pub fn at_end(&self) -> bool { - for index in self.index..self.tokens.len() { - if !self.seen.contains(&index) { - return false; - } - } - - true - } - - pub fn advance(&mut self) { - self.seen.insert(self.index); - self.index += 1; - } - - pub fn extract(&mut self, f: impl Fn(&TokenNode) -> Option) -> Option<(usize, T)> { - for (i, item) in self.tokens.iter().enumerate() { - if self.seen.contains(&i) { - continue; - } - - match f(item) { - None => { - continue; - } - Some(value) => { - self.seen.insert(i); - return Some((i, value)); - } - } - } - - None - } - - pub fn move_to(&mut self, pos: usize) { - self.index = pos; - } - - pub fn restart(&mut self) { - self.index = 0; - } - - pub fn clone(&self) -> TokensIterator { - TokensIterator { - tokens: self.tokens, - index: self.index, - seen: self.seen.clone(), - } - } - - pub fn peek(&self) -> Option<&TokenNode> { - let mut tokens = self.clone(); - - tokens.next() - } - - pub fn debug_remaining(&self) -> Vec { - let mut tokens = self.clone(); - tokens.restart(); - tokens.cloned().collect() - } -} - -impl<'a> Iterator for TokensIterator<'a> { - type Item = &'a TokenNode; - - fn next(&mut self) -> Option<&'a TokenNode> { - loop { - if self.index >= self.tokens.len() { - return None; - } - - if self.seen.contains(&self.index) { - self.advance(); - continue; - } - - if self.index >= self.tokens.len() { - return None; - } - - match &self.tokens[self.index] { - TokenNode::Whitespace(_) => { - self.advance(); - } - other => { - self.advance(); - return Some(other); - } - } - } - } -} diff --git a/src/parser/hir/binary.rs b/src/parser/hir/binary.rs index 02a4d416e4..ee90e284e9 100644 --- a/src/parser/hir/binary.rs +++ b/src/parser/hir/binary.rs @@ -1,6 +1,6 @@ use crate::parser::{hir::Expression, Operator}; use crate::prelude::*; -use crate::Tagged; + use derive_new::new; use getset::Getters; use serde::{Deserialize, Serialize}; @@ -12,12 +12,18 @@ use std::fmt; #[get = "pub(crate)"] pub struct Binary { left: Expression, - op: Tagged, + op: Spanned, right: Expression, } -impl ToDebug for Binary { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { +impl fmt::Display for Binary { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "({} {} {})", self.op.as_str(), self.left, self.right) + } +} + +impl FormatDebug for Binary { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { write!(f, "{}", self.left.debug(source))?; write!(f, " {} ", self.op.debug(source))?; write!(f, "{}", self.right.debug(source))?; diff --git a/src/parser/hir/expand_external_tokens.rs b/src/parser/hir/expand_external_tokens.rs new file mode 100644 index 0000000000..e99147c228 --- /dev/null +++ b/src/parser/hir/expand_external_tokens.rs @@ -0,0 +1,374 @@ +use crate::errors::ParseError; +#[cfg(not(coloring_in_tokens))] +use crate::parser::hir::syntax_shape::FlatShape; +use crate::parser::{ + hir::syntax_shape::{ + color_syntax, expand_atom, expand_expr, expand_syntax, AtomicToken, ColorSyntax, + ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, MaybeSpaceShape, + }, + hir::Expression, + TokensIterator, +}; +use crate::{DebugFormatter, FormatDebug, Span, Spanned, SpannedItem}; +use std::fmt; + +#[derive(Debug, Copy, Clone)] +pub struct ExternalTokensShape; + +impl FormatDebug for Spanned>> { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + FormatDebug::fmt_debug(&self.item, f, source) + } +} + +impl ExpandSyntax for ExternalTokensShape { + type Output = Spanned>>; + + fn name(&self) -> &'static str { + "external command" + } + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + let mut out: Vec> = vec![]; + + let start = token_nodes.span_at_cursor(); + + loop { + match expand_syntax(&ExternalExpressionShape, token_nodes, context) { + Err(_) | Ok(None) => break, + Ok(Some(span)) => out.push(span.spanned_string(context.source())), + } + } + + let end = token_nodes.span_at_cursor(); + + Ok(out.spanned(start.until(end))) + } +} + +#[cfg(not(coloring_in_tokens))] +impl ColorSyntax for ExternalTokensShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Self::Info { + loop { + // Allow a space + color_syntax(&MaybeSpaceShape, token_nodes, context, shapes); + + // Process an external expression. External expressions are mostly words, with a + // few exceptions (like $variables and path expansion rules) + match color_syntax(&ExternalExpression, token_nodes, context, shapes).1 { + ExternalExpressionResult::Eof => break, + ExternalExpressionResult::Processed => continue, + } + } + } +} + +#[cfg(coloring_in_tokens)] +impl ColorSyntax for ExternalTokensShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "ExternalTokensShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Self::Info { + loop { + // Allow a space + color_syntax(&MaybeSpaceShape, token_nodes, context); + + // Process an external expression. External expressions are mostly words, with a + // few exceptions (like $variables and path expansion rules) + match color_syntax(&ExternalExpression, token_nodes, context).1 { + ExternalExpressionResult::Eof => break, + ExternalExpressionResult::Processed => continue, + } + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct ExternalExpressionShape; + +impl ExpandSyntax for ExternalExpressionShape { + type Output = Option; + + fn name(&self) -> &'static str { + "external expression" + } + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + expand_syntax(&MaybeSpaceShape, token_nodes, context)?; + + let first = expand_atom( + token_nodes, + "external command", + context, + ExpansionRule::new().allow_external_command(), + )? + .span; + + let mut last = first; + + loop { + let continuation = expand_expr(&ExternalContinuationShape, token_nodes, context); + + if let Ok(continuation) = continuation { + last = continuation.span; + } else { + break; + } + } + + Ok(Some(first.until(last))) + } +} + +#[derive(Debug, Copy, Clone)] +struct ExternalExpression; + +impl ExpandSyntax for ExternalExpression { + type Output = Option; + + fn name(&self) -> &'static str { + "external expression" + } + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + expand_syntax(&MaybeSpaceShape, token_nodes, context)?; + + let first = expand_syntax(&ExternalHeadShape, token_nodes, context)?.span; + let mut last = first; + + loop { + let continuation = expand_syntax(&ExternalContinuationShape, token_nodes, context); + + if let Ok(continuation) = continuation { + last = continuation.span; + } else { + break; + } + } + + Ok(Some(first.until(last))) + } +} + +#[derive(Debug, Copy, Clone)] +struct ExternalHeadShape; + +impl ExpandExpression for ExternalHeadShape { + fn name(&self) -> &'static str { + "external argument" + } + + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + match expand_atom( + token_nodes, + "external argument", + context, + ExpansionRule::new() + .allow_external_word() + .treat_size_as_word(), + )? { + atom => match &atom { + Spanned { item, span } => Ok(match item { + AtomicToken::Eof { .. } => unreachable!("ExpansionRule doesn't allow EOF"), + AtomicToken::Error { .. } => unreachable!("ExpansionRule doesn't allow Error"), + AtomicToken::Size { .. } => unreachable!("ExpansionRule treats size as word"), + AtomicToken::Whitespace { .. } => { + unreachable!("ExpansionRule doesn't allow Whitespace") + } + AtomicToken::ShorthandFlag { .. } + | AtomicToken::LonghandFlag { .. } + | AtomicToken::SquareDelimited { .. } + | AtomicToken::ParenDelimited { .. } + | AtomicToken::BraceDelimited { .. } + | AtomicToken::Pipeline { .. } => { + return Err(ParseError::mismatch( + "external command name", + atom.tagged_type_name(), + )) + } + AtomicToken::ExternalCommand { command } => { + Expression::external_command(*command, *span) + } + AtomicToken::Number { number } => { + Expression::number(number.to_number(context.source()), *span) + } + AtomicToken::String { body } => Expression::string(*body, *span), + AtomicToken::ItVariable { name } => Expression::it_variable(*name, *span), + AtomicToken::Variable { name } => Expression::variable(*name, *span), + AtomicToken::ExternalWord { .. } + | AtomicToken::GlobPattern { .. } + | AtomicToken::FilePath { .. } + | AtomicToken::Word { .. } + | AtomicToken::Dot { .. } + | AtomicToken::Operator { .. } => Expression::external_command(*span, *span), + }), + }, + } + } +} + +#[derive(Debug, Copy, Clone)] +struct ExternalContinuationShape; + +impl ExpandExpression for ExternalContinuationShape { + fn name(&self) -> &'static str { + "external argument" + } + + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + match expand_atom( + token_nodes, + "external argument", + context, + ExpansionRule::new() + .allow_external_word() + .treat_size_as_word(), + )? { + atom => match &atom { + Spanned { item, span } => Ok(match item { + AtomicToken::Eof { .. } => unreachable!("ExpansionRule doesn't allow EOF"), + AtomicToken::Error { .. } => unreachable!("ExpansionRule doesn't allow Error"), + AtomicToken::Number { number } => { + Expression::number(number.to_number(context.source()), *span) + } + AtomicToken::Size { .. } => unreachable!("ExpansionRule treats size as word"), + AtomicToken::ExternalCommand { .. } => { + unreachable!("ExpansionRule doesn't allow ExternalCommand") + } + AtomicToken::Whitespace { .. } => { + unreachable!("ExpansionRule doesn't allow Whitespace") + } + AtomicToken::String { body } => Expression::string(*body, *span), + AtomicToken::ItVariable { name } => Expression::it_variable(*name, *span), + AtomicToken::Variable { name } => Expression::variable(*name, *span), + AtomicToken::ExternalWord { .. } + | AtomicToken::GlobPattern { .. } + | AtomicToken::FilePath { .. } + | AtomicToken::Word { .. } + | AtomicToken::ShorthandFlag { .. } + | AtomicToken::LonghandFlag { .. } + | AtomicToken::Dot { .. } + | AtomicToken::Operator { .. } => Expression::bare(*span), + AtomicToken::SquareDelimited { .. } + | AtomicToken::ParenDelimited { .. } + | AtomicToken::BraceDelimited { .. } + | AtomicToken::Pipeline { .. } => { + return Err(ParseError::mismatch( + "external argument", + atom.tagged_type_name(), + )) + } + }), + }, + } + } +} + +#[cfg(coloring_in_tokens)] +impl ColorSyntax for ExternalExpression { + type Info = ExternalExpressionResult; + type Input = (); + + fn name(&self) -> &'static str { + "ExternalExpression" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> ExternalExpressionResult { + let atom = match expand_atom( + token_nodes, + "external word", + context, + ExpansionRule::permissive(), + ) { + Err(_) => unreachable!("TODO: separate infallible expand_atom"), + Ok(Spanned { + item: AtomicToken::Eof { .. }, + .. + }) => return ExternalExpressionResult::Eof, + Ok(atom) => atom, + }; + + token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)); + return ExternalExpressionResult::Processed; + } +} + +#[must_use] +enum ExternalExpressionResult { + Eof, + Processed, +} + +#[cfg(not(coloring_in_tokens))] +impl ColorSyntax for ExternalExpression { + type Info = ExternalExpressionResult; + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> ExternalExpressionResult { + let atom = match expand_atom( + token_nodes, + "external word", + context, + ExpansionRule::permissive(), + ) { + Err(_) => unreachable!("TODO: separate infallible expand_atom"), + Ok(Spanned { + item: AtomicToken::Eof { .. }, + .. + }) => return ExternalExpressionResult::Eof, + Ok(atom) => atom, + }; + + atom.color_tokens(shapes); + return ExternalExpressionResult::Processed; + } +} diff --git a/src/parser/hir/external_command.rs b/src/parser/hir/external_command.rs index 28865330d5..af207c458e 100644 --- a/src/parser/hir/external_command.rs +++ b/src/parser/hir/external_command.rs @@ -9,11 +9,11 @@ use std::fmt; )] #[get = "pub(crate)"] pub struct ExternalCommand { - name: Tag, + pub(crate) name: Span, } -impl ToDebug for ExternalCommand { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { +impl FormatDebug for ExternalCommand { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { write!(f, "{}", self.name.slice(source))?; Ok(()) diff --git a/src/parser/hir/named.rs b/src/parser/hir/named.rs index 838f643be5..152525f0aa 100644 --- a/src/parser/hir/named.rs +++ b/src/parser/hir/named.rs @@ -21,8 +21,8 @@ pub struct NamedArguments { pub(crate) named: IndexMap, } -impl ToDebug for NamedArguments { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { +impl FormatDebug for NamedArguments { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { for (name, value) in &self.named { match value { NamedValue::AbsentSwitch => continue, @@ -43,9 +43,13 @@ impl NamedArguments { match switch { None => self.named.insert(name.into(), NamedValue::AbsentSwitch), - Some(flag) => self - .named - .insert(name, NamedValue::PresentSwitch(*flag.name())), + Some(flag) => self.named.insert( + name, + NamedValue::PresentSwitch(Tag { + span: *flag.name(), + anchor: None, + }), + ), }; } diff --git a/src/parser/hir/path.rs b/src/parser/hir/path.rs index f43edf1762..4a9907475b 100644 --- a/src/parser/hir/path.rs +++ b/src/parser/hir/path.rs @@ -1,26 +1,55 @@ use crate::parser::hir::Expression; use crate::prelude::*; -use crate::Tagged; use derive_new::new; -use getset::Getters; +use getset::{Getters, MutGetters}; use serde::{Deserialize, Serialize}; use std::fmt; #[derive( - Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Getters, Serialize, Deserialize, new, + Debug, + Clone, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + Getters, + MutGetters, + Serialize, + Deserialize, + new, )] #[get = "pub(crate)"] pub struct Path { head: Expression, - tail: Vec>, + #[get_mut = "pub(crate)"] + tail: Vec>, } -impl ToDebug for Path { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { - write!(f, "{}", self.head.debug(source))?; +impl fmt::Display for Path { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.head)?; - for part in &self.tail { - write!(f, ".{}", part.item())?; + for entry in &self.tail { + write!(f, ".{}", entry.item)?; + } + + Ok(()) + } +} + +impl Path { + pub(crate) fn parts(self) -> (Expression, Vec>) { + (self.head, self.tail) + } +} + +impl FormatDebug for Path { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + write!(f, "{}", self.head.debug(source))?; + + for part in &self.tail { + write!(f, ".{}", part.item)?; } Ok(()) diff --git a/src/parser/hir/syntax_shape.rs b/src/parser/hir/syntax_shape.rs new file mode 100644 index 0000000000..32c54bc4e3 --- /dev/null +++ b/src/parser/hir/syntax_shape.rs @@ -0,0 +1,1736 @@ +mod block; +mod expression; +pub(crate) mod flat_shape; + +use crate::cli::external_command; +use crate::commands::{ + classified::{ClassifiedPipeline, InternalCommand}, + ClassifiedCommand, Command, +}; +use crate::parser::hir::expand_external_tokens::ExternalTokensShape; +use crate::parser::hir::syntax_shape::block::AnyBlockShape; +use crate::parser::hir::tokens_iterator::Peeked; +use crate::parser::parse_command::{parse_command_tail, CommandTailShape}; +use crate::parser::{hir, hir::TokensIterator, Operator, RawToken, TokenNode}; +use crate::prelude::*; +use derive_new::new; +use getset::Getters; +use serde::{Deserialize, Serialize}; +use std::fmt; +use std::path::{Path, PathBuf}; + +pub(crate) use self::expression::atom::{expand_atom, AtomicToken, ExpansionRule}; +pub(crate) use self::expression::delimited::{ + color_delimited_square, expand_delimited_square, DelimitedShape, +}; +pub(crate) use self::expression::file_path::FilePathShape; +pub(crate) use self::expression::list::{BackoffColoringMode, ExpressionListShape}; +pub(crate) use self::expression::number::{IntShape, NumberShape}; +pub(crate) use self::expression::pattern::{BarePatternShape, PatternShape}; +pub(crate) use self::expression::string::StringShape; +pub(crate) use self::expression::unit::UnitShape; +pub(crate) use self::expression::variable_path::{ + ColorableDotShape, ColumnPathShape, DotShape, ExpressionContinuation, + ExpressionContinuationShape, MemberShape, PathTailShape, VariablePathShape, +}; +pub(crate) use self::expression::{continue_expression, AnyExpressionShape}; +pub(crate) use self::flat_shape::FlatShape; + +#[cfg(not(coloring_in_tokens))] +use crate::parser::hir::tokens_iterator::debug::debug_tokens; +#[cfg(not(coloring_in_tokens))] +use crate::parser::parse::pipeline::Pipeline; +#[cfg(not(coloring_in_tokens))] +use log::{log_enabled, trace}; + +#[derive(Debug, Copy, Clone, Serialize, Deserialize)] +pub enum SyntaxShape { + Any, + String, + Member, + ColumnPath, + Number, + Int, + Path, + Pattern, + Block, +} + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for SyntaxShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + match self { + SyntaxShape::Any => { + color_fallible_syntax(&AnyExpressionShape, token_nodes, context, shapes) + } + SyntaxShape::Int => color_fallible_syntax(&IntShape, token_nodes, context, shapes), + SyntaxShape::String => color_fallible_syntax_with( + &StringShape, + &FlatShape::String, + token_nodes, + context, + shapes, + ), + SyntaxShape::Member => { + color_fallible_syntax(&MemberShape, token_nodes, context, shapes) + } + SyntaxShape::ColumnPath => { + color_fallible_syntax(&ColumnPathShape, token_nodes, context, shapes) + } + SyntaxShape::Number => { + color_fallible_syntax(&NumberShape, token_nodes, context, shapes) + } + SyntaxShape::Path => { + color_fallible_syntax(&FilePathShape, token_nodes, context, shapes) + } + SyntaxShape::Pattern => { + color_fallible_syntax(&PatternShape, token_nodes, context, shapes) + } + SyntaxShape::Block => { + color_fallible_syntax(&AnyBlockShape, token_nodes, context, shapes) + } + } + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for SyntaxShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "SyntaxShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + match self { + SyntaxShape::Any => color_fallible_syntax(&AnyExpressionShape, token_nodes, context), + SyntaxShape::Int => color_fallible_syntax(&IntShape, token_nodes, context), + SyntaxShape::String => { + color_fallible_syntax_with(&StringShape, &FlatShape::String, token_nodes, context) + } + SyntaxShape::Member => color_fallible_syntax(&MemberShape, token_nodes, context), + SyntaxShape::ColumnPath => { + color_fallible_syntax(&ColumnPathShape, token_nodes, context) + } + SyntaxShape::Number => color_fallible_syntax(&NumberShape, token_nodes, context), + SyntaxShape::Path => color_fallible_syntax(&FilePathShape, token_nodes, context), + SyntaxShape::Pattern => color_fallible_syntax(&PatternShape, token_nodes, context), + SyntaxShape::Block => color_fallible_syntax(&AnyBlockShape, token_nodes, context), + } + } +} + +impl ExpandExpression for SyntaxShape { + fn name(&self) -> &'static str { + match self { + SyntaxShape::Any => "any", + SyntaxShape::Int => "integer", + SyntaxShape::String => "string", + SyntaxShape::Member => "column name", + SyntaxShape::ColumnPath => "column path", + SyntaxShape::Number => "number", + SyntaxShape::Path => "file path", + SyntaxShape::Pattern => "glob pattern", + SyntaxShape::Block => "block", + } + } + + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + match self { + SyntaxShape::Any => expand_expr(&AnyExpressionShape, token_nodes, context), + SyntaxShape::Int => expand_expr(&IntShape, token_nodes, context), + SyntaxShape::String => expand_expr(&StringShape, token_nodes, context), + SyntaxShape::Member => { + let syntax = expand_syntax(&MemberShape, token_nodes, context)?; + Ok(syntax.to_expr()) + } + SyntaxShape::ColumnPath => { + let column_path = expand_syntax(&ColumnPathShape, token_nodes, context)?; + + let Tagged { item: members, tag } = column_path.path(); + + Ok(hir::Expression::list( + members.into_iter().map(|s| s.to_expr()).collect(), + tag, + )) + } + SyntaxShape::Number => expand_expr(&NumberShape, token_nodes, context), + SyntaxShape::Path => expand_expr(&FilePathShape, token_nodes, context), + SyntaxShape::Pattern => expand_expr(&PatternShape, token_nodes, context), + SyntaxShape::Block => expand_expr(&AnyBlockShape, token_nodes, context), + } + } +} + +impl std::fmt::Display for SyntaxShape { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + match self { + SyntaxShape::Any => write!(f, "Any"), + SyntaxShape::String => write!(f, "String"), + SyntaxShape::Int => write!(f, "Integer"), + SyntaxShape::Member => write!(f, "Member"), + SyntaxShape::ColumnPath => write!(f, "ColumnPath"), + SyntaxShape::Number => write!(f, "Number"), + SyntaxShape::Path => write!(f, "Path"), + SyntaxShape::Pattern => write!(f, "Pattern"), + SyntaxShape::Block => write!(f, "Block"), + } + } +} + +#[derive(Getters, new)] +pub struct ExpandContext<'context> { + #[get = "pub(crate)"] + registry: &'context CommandRegistry, + #[get = "pub(crate)"] + source: &'context Text, + homedir: Option, +} + +impl<'context> ExpandContext<'context> { + pub(crate) fn homedir(&self) -> Option<&Path> { + self.homedir.as_ref().map(|h| h.as_path()) + } + + #[cfg(test)] + pub fn with_empty(source: &Text, callback: impl FnOnce(ExpandContext)) { + let mut registry = CommandRegistry::new(); + registry.insert( + "ls", + crate::commands::whole_stream_command(crate::commands::LS), + ); + + callback(ExpandContext { + registry: ®istry, + source, + homedir: None, + }) + } +} + +pub trait TestSyntax: std::fmt::Debug + Copy { + fn test<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Option>; +} + +pub trait ExpandExpression: std::fmt::Debug + Copy { + fn name(&self) -> &'static str; + + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result; +} + +#[cfg(coloring_in_tokens)] +pub trait FallibleColorSyntax: std::fmt::Debug + Copy { + type Info; + type Input; + + fn name(&self) -> &'static str; + + fn color_syntax<'a, 'b>( + &self, + input: &Self::Input, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result; +} + +#[cfg(not(coloring_in_tokens))] +pub trait FallibleColorSyntax: std::fmt::Debug + Copy { + type Info; + type Input; + + fn color_syntax<'a, 'b>( + &self, + input: &Self::Input, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result; +} + +#[cfg(not(coloring_in_tokens))] +pub trait ColorSyntax: std::fmt::Debug + Copy { + type Info; + type Input; + + fn color_syntax<'a, 'b>( + &self, + input: &Self::Input, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Self::Info; +} + +#[cfg(coloring_in_tokens)] +pub trait ColorSyntax: std::fmt::Debug + Copy { + type Info; + type Input; + + fn name(&self) -> &'static str; + + fn color_syntax<'a, 'b>( + &self, + input: &Self::Input, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Self::Info; +} + +pub(crate) trait ExpandSyntax: std::fmt::Debug + Copy { + type Output: HasFallibleSpan + Clone + std::fmt::Debug + 'static; + + fn name(&self) -> &'static str; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result; +} + +pub(crate) fn expand_syntax<'a, 'b, T: ExpandSyntax>( + shape: &T, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, +) -> Result { + token_nodes.expand_frame(shape.name(), |token_nodes| { + shape.expand_syntax(token_nodes, context) + }) +} + +pub(crate) fn expand_expr<'a, 'b, T: ExpandExpression>( + shape: &T, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, +) -> Result { + token_nodes.expand_expr_frame(shape.name(), |token_nodes| { + shape.expand_expr(token_nodes, context) + }) +} + +#[cfg(coloring_in_tokens)] +pub fn color_syntax<'a, 'b, T: ColorSyntax, U>( + shape: &T, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, +) -> ((), U) { + ( + (), + token_nodes.color_frame(shape.name(), |token_nodes| { + shape.color_syntax(&(), token_nodes, context) + }), + ) +} + +#[cfg(not(coloring_in_tokens))] +pub fn color_syntax<'a, 'b, T: ColorSyntax, U>( + shape: &T, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, +) -> ((), U) { + trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes.state(), context.source)); + + let len = shapes.len(); + let result = shape.color_syntax(&(), token_nodes, context, shapes); + + trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes.state(), context.source)); + + if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { + trace!(target: "nu::color_syntax", "after {}", std::any::type_name::()); + + if len < shapes.len() { + for i in len..(shapes.len()) { + trace!(target: "nu::color_syntax", "new shape :: {:?}", shapes[i]); + } + } else { + trace!(target: "nu::color_syntax", "no new shapes"); + } + } + + ((), result) +} + +#[cfg(not(coloring_in_tokens))] +pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax, U>( + shape: &T, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, +) -> Result { + trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes.state(), context.source)); + + if token_nodes.at_end() { + trace!(target: "nu::color_syntax", "at eof"); + return Err(ShellError::unexpected_eof("coloring", Tag::unknown())); + } + + let len = shapes.len(); + let result = shape.color_syntax(&(), token_nodes, context, shapes); + + trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes.state(), context.source)); + + if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { + trace!(target: "nu::color_syntax", "after {}", std::any::type_name::()); + + if len < shapes.len() { + for i in len..(shapes.len()) { + trace!(target: "nu::color_syntax", "new shape :: {:?}", shapes[i]); + } + } else { + trace!(target: "nu::color_syntax", "no new shapes"); + } + } + + result +} + +#[cfg(coloring_in_tokens)] +pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax, U>( + shape: &T, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, +) -> Result { + token_nodes.color_fallible_frame(shape.name(), |token_nodes| { + shape.color_syntax(&(), token_nodes, context) + }) +} + +#[cfg(not(coloring_in_tokens))] +pub fn color_syntax_with<'a, 'b, T: ColorSyntax, U, I>( + shape: &T, + input: &I, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, +) -> ((), U) { + trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes.state(), context.source)); + + let len = shapes.len(); + let result = shape.color_syntax(input, token_nodes, context, shapes); + + trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes.state(), context.source)); + + if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { + trace!(target: "nu::color_syntax", "after {}", std::any::type_name::()); + + if len < shapes.len() { + for i in len..(shapes.len()) { + trace!(target: "nu::color_syntax", "new shape :: {:?}", shapes[i]); + } + } else { + trace!(target: "nu::color_syntax", "no new shapes"); + } + } + + ((), result) +} + +#[cfg(coloring_in_tokens)] +pub fn color_syntax_with<'a, 'b, T: ColorSyntax, U, I>( + shape: &T, + input: &I, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, +) -> ((), U) { + ( + (), + token_nodes.color_frame(shape.name(), |token_nodes| { + shape.color_syntax(input, token_nodes, context) + }), + ) +} + +#[cfg(not(coloring_in_tokens))] +pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax, U, I>( + shape: &T, + input: &I, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, +) -> Result { + token_nodes.color_fallible_frame(std::any::type_name::(), |token_nodes| { + shape.color_syntax(input, token_nodes, context, shapes) + }) +} + +#[cfg(coloring_in_tokens)] +pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax, U, I>( + shape: &T, + input: &I, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, +) -> Result { + token_nodes.color_fallible_frame(shape.name(), |token_nodes| { + shape.color_syntax(input, token_nodes, context) + }) +} + +impl ExpandSyntax for T { + type Output = hir::Expression; + + fn name(&self) -> &'static str { + ExpandExpression::name(self) + } + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + ExpandExpression::expand_expr(self, token_nodes, context) + } +} + +pub trait SkipSyntax: std::fmt::Debug + Copy { + fn skip<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError>; +} + +enum BarePathState { + Initial, + Seen(Span, Span), + Error(ParseError), +} + +impl BarePathState { + pub fn seen(self, span: Span) -> BarePathState { + match self { + BarePathState::Initial => BarePathState::Seen(span, span), + BarePathState::Seen(start, _) => BarePathState::Seen(start, span), + BarePathState::Error(err) => BarePathState::Error(err), + } + } + + pub fn end(self, peeked: Peeked, reason: &'static str) -> BarePathState { + match self { + BarePathState::Initial => BarePathState::Error(peeked.type_error(reason)), + BarePathState::Seen(start, end) => BarePathState::Seen(start, end), + BarePathState::Error(err) => BarePathState::Error(err), + } + } + + pub fn into_bare(self) -> Result { + match self { + BarePathState::Initial => unreachable!("into_bare in initial state"), + BarePathState::Seen(start, end) => Ok(start.until(end)), + BarePathState::Error(err) => Err(err), + } + } +} + +pub fn expand_bare<'a, 'b>( + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + predicate: impl Fn(&TokenNode) -> bool, +) -> Result { + let mut state = BarePathState::Initial; + + loop { + // Whitespace ends a word + let mut peeked = token_nodes.peek_any(); + + match peeked.node { + None => { + state = state.end(peeked, "word"); + break; + } + Some(node) => { + if predicate(node) { + state = state.seen(node.span()); + peeked.commit(); + } else { + state = state.end(peeked, "word"); + break; + } + } + } + } + + state.into_bare() +} + +#[derive(Debug, Copy, Clone)] +pub struct BarePathShape; + +impl ExpandSyntax for BarePathShape { + type Output = Span; + + fn name(&self) -> &'static str { + "shorthand path" + } + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + expand_bare(token_nodes, context, |token| match token { + TokenNode::Token(Spanned { + item: RawToken::Bare, + .. + }) + | TokenNode::Token(Spanned { + item: RawToken::Operator(Operator::Dot), + .. + }) => true, + + _ => false, + }) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct BareShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for BareShape { + type Info = (); + type Input = FlatShape; + + fn color_syntax<'a, 'b>( + &self, + input: &FlatShape, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + token_nodes + .peek_any_token("word", |token| match token { + // If it's a bare token, color it + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => { + shapes.push((*input).spanned(*span)); + Ok(()) + } + + // otherwise, fail + other => Err(ParseError::mismatch("word", other.tagged_type_name())), + }) + .map_err(|err| err.into()) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for BareShape { + type Info = (); + type Input = FlatShape; + + fn name(&self) -> &'static str { + "BareShape" + } + + fn color_syntax<'a, 'b>( + &self, + input: &FlatShape, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result<(), ShellError> { + let span = token_nodes.peek_any_token("word", |token| match token { + // If it's a bare token, color it + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => Ok(span), + + // otherwise, fail + other => Err(ParseError::mismatch("word", other.tagged_type_name())), + })?; + + token_nodes.color_shape((*input).spanned(*span)); + + Ok(()) + } +} + +impl ExpandSyntax for BareShape { + type Output = Spanned; + + fn name(&self) -> &'static str { + "word" + } + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + let peeked = token_nodes.peek_any().not_eof("word")?; + + match peeked.node { + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => { + peeked.commit(); + Ok(span.spanned_string(context.source)) + } + + other => Err(ParseError::mismatch("word", other.tagged_type_name())), + } + } +} + +impl TestSyntax for BareShape { + fn test<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Option> { + let peeked = token_nodes.peek_any(); + + match peeked.node { + Some(token) if token.is_bare() => Some(peeked), + _ => None, + } + } +} + +#[derive(Debug, Clone)] +pub enum CommandSignature { + Internal(Spanned>), + LiteralExternal { outer: Span, inner: Span }, + External(Span), + Expression(hir::Expression), +} + +impl FormatDebug for CommandSignature { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + match self { + CommandSignature::Internal(internal) => { + f.say_str("internal", internal.span.slice(source)) + } + CommandSignature::LiteralExternal { outer, .. } => { + f.say_str("external", outer.slice(source)) + } + CommandSignature::External(external) => { + write!(f, "external:{}", external.slice(source)) + } + CommandSignature::Expression(expr) => expr.fmt_debug(f, source), + } + } +} + +impl HasSpan for CommandSignature { + fn span(&self) -> Span { + match self { + CommandSignature::Internal(spanned) => spanned.span, + CommandSignature::LiteralExternal { outer, .. } => *outer, + CommandSignature::External(span) => *span, + CommandSignature::Expression(expr) => expr.span, + } + } +} + +impl CommandSignature { + pub fn to_expression(&self) -> hir::Expression { + match self { + CommandSignature::Internal(command) => { + let span = command.span; + hir::RawExpression::Command(span).spanned(span) + } + CommandSignature::LiteralExternal { outer, inner } => { + hir::RawExpression::ExternalCommand(hir::ExternalCommand::new(*inner)) + .spanned(*outer) + } + CommandSignature::External(span) => { + hir::RawExpression::ExternalCommand(hir::ExternalCommand::new(*span)).spanned(*span) + } + CommandSignature::Expression(expr) => expr.clone(), + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct PipelineShape; + +#[cfg(not(coloring_in_tokens))] +// The failure mode is if the head of the token stream is not a pipeline +impl FallibleColorSyntax for PipelineShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + // Make sure we're looking at a pipeline + let Pipeline { parts, .. } = + token_nodes.peek_any_token("pipeline", |node| node.as_pipeline())?; + + // Enumerate the pipeline parts + for part in parts { + // If the pipeline part has a prefix `|`, emit a pipe to color + if let Some(pipe) = part.pipe { + shapes.push(FlatShape::Pipe.spanned(pipe)); + } + + // Create a new iterator containing the tokens in the pipeline part to color + let mut token_nodes = TokensIterator::new(&part.tokens.item, part.span, false); + + color_syntax(&MaybeSpaceShape, &mut token_nodes, context, shapes); + color_syntax(&CommandShape, &mut token_nodes, context, shapes); + } + + Ok(()) + } +} + +#[cfg(coloring_in_tokens)] +// The failure mode is if the head of the token stream is not a pipeline +impl FallibleColorSyntax for PipelineShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "PipelineShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + // Make sure we're looking at a pipeline + let pipeline = token_nodes.peek_any_token("pipeline", |node| node.as_pipeline())?; + + let parts = &pipeline.parts[..]; + + // Enumerate the pipeline parts + for part in parts { + // If the pipeline part has a prefix `|`, emit a pipe to color + if let Some(pipe) = part.pipe { + token_nodes.color_shape(FlatShape::Pipe.spanned(pipe)) + } + + let tokens: Spanned<&[TokenNode]> = (&part.item.tokens[..]).spanned(part.span); + + token_nodes.child(tokens, move |token_nodes| { + color_syntax(&MaybeSpaceShape, token_nodes, context); + color_syntax(&CommandShape, token_nodes, context); + }); + } + + Ok(()) + } +} + +#[cfg(coloring_in_tokens)] +impl ExpandSyntax for PipelineShape { + type Output = ClassifiedPipeline; + + fn name(&self) -> &'static str { + "pipeline" + } + + fn expand_syntax<'content, 'me>( + &self, + iterator: &'me mut TokensIterator<'content>, + context: &ExpandContext, + ) -> Result { + let start = iterator.span_at_cursor(); + + let peeked = iterator.peek_any().not_eof("pipeline")?; + let pipeline = peeked.commit().as_pipeline()?; + + let parts = &pipeline.parts[..]; + + let mut out = vec![]; + + for part in parts { + let tokens: Spanned<&[TokenNode]> = (&part.item.tokens[..]).spanned(part.span); + + let classified = iterator.child(tokens, move |token_nodes| { + expand_syntax(&ClassifiedCommandShape, token_nodes, context) + })?; + + out.push(classified); + } + + let end = iterator.span_at_cursor(); + + Ok(ClassifiedPipeline { + commands: out.spanned(start.until(end)), + }) + } +} + +#[cfg(not(coloring_in_tokens))] +impl ExpandSyntax for PipelineShape { + type Output = ClassifiedPipeline; + + fn name(&self) -> &'static str { + "pipeline" + } + + fn expand_syntax<'content, 'me>( + &self, + iterator: &'me mut TokensIterator<'content>, + context: &ExpandContext, + ) -> Result { + let start = iterator.span_at_cursor(); + + let peeked = iterator.peek_any().not_eof("pipeline")?; + let pipeline = peeked.commit().as_pipeline()?; + + let parts = &pipeline.parts[..]; + + let mut out = vec![]; + + for part in parts { + let tokens: Spanned<&[TokenNode]> = (&part.item.tokens[..]).spanned(part.span); + + let classified = iterator.child(tokens, move |token_nodes| { + expand_syntax(&ClassifiedCommandShape, token_nodes, context) + })?; + + out.push(classified); + } + + let end = iterator.span_at_cursor(); + + Ok(ClassifiedPipeline { + commands: out.spanned(start.until(end)), + }) + } +} + +pub enum CommandHeadKind { + External, + Internal(Signature), +} + +#[derive(Debug, Copy, Clone)] +pub struct CommandHeadShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for CommandHeadShape { + type Info = CommandHeadKind; + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result { + // If we don't ultimately find a token, roll back + token_nodes.atomic(|token_nodes| { + // First, take a look at the next token + let atom = expand_atom( + token_nodes, + "command head", + context, + ExpansionRule::permissive(), + )?; + + match atom.item { + // If the head is an explicit external command (^cmd), color it as an external command + AtomicToken::ExternalCommand { .. } => { + shapes.push(FlatShape::ExternalCommand.spanned(atom.span)); + Ok(CommandHeadKind::External) + } + + // If the head is a word, it depends on whether it matches a registered internal command + AtomicToken::Word { text } => { + let name = text.slice(context.source); + + if context.registry.has(name) { + // If the registry has the command, color it as an internal command + shapes.push(FlatShape::InternalCommand.spanned(text)); + let command = context.registry.expect_command(name); + Ok(CommandHeadKind::Internal(command.signature())) + } else { + // Otherwise, color it as an external command + shapes.push(FlatShape::ExternalCommand.spanned(text)); + Ok(CommandHeadKind::External) + } + } + + // Otherwise, we're not actually looking at a command + _ => Err(ShellError::syntax_error( + "No command at the head".tagged(atom.span), + )), + } + }) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for CommandHeadShape { + type Info = CommandHeadKind; + type Input = (); + + fn name(&self) -> &'static str { + "CommandHeadShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + // If we don't ultimately find a token, roll back + token_nodes.atomic(|token_nodes| { + // First, take a look at the next token + let atom = expand_atom( + token_nodes, + "command head", + context, + ExpansionRule::permissive(), + )?; + + match atom.item { + // If the head is an explicit external command (^cmd), color it as an external command + AtomicToken::ExternalCommand { .. } => { + token_nodes.color_shape(FlatShape::ExternalCommand.spanned(atom.span)); + Ok(CommandHeadKind::External) + } + + // If the head is a word, it depends on whether it matches a registered internal command + AtomicToken::Word { text } => { + let name = text.slice(context.source); + + if context.registry.has(name) { + // If the registry has the command, color it as an internal command + token_nodes.color_shape(FlatShape::InternalCommand.spanned(text)); + let command = context.registry.expect_command(name); + Ok(CommandHeadKind::Internal(command.signature())) + } else { + // Otherwise, color it as an external command + token_nodes.color_shape(FlatShape::ExternalCommand.spanned(text)); + Ok(CommandHeadKind::External) + } + } + + // Otherwise, we're not actually looking at a command + _ => Err(ShellError::syntax_error( + "No command at the head".tagged(atom.span), + )), + } + }) + } +} + +impl ExpandSyntax for CommandHeadShape { + type Output = CommandSignature; + + fn name(&self) -> &'static str { + "command head" + } + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + let node = + parse_single_node_skipping_ws(token_nodes, "command head1", |token, token_span, _| { + Ok(match token { + RawToken::ExternalCommand(span) => CommandSignature::LiteralExternal { + outer: token_span, + inner: span, + }, + RawToken::Bare => { + let name = token_span.slice(context.source); + if context.registry.has(name) { + let command = context.registry.expect_command(name); + CommandSignature::Internal(command.spanned(token_span)) + } else { + CommandSignature::External(token_span) + } + } + _ => { + return Err(ShellError::type_error( + "command head2", + token.type_name().tagged(token_span), + )) + } + }) + }); + + match node { + Ok(expr) => return Ok(expr), + Err(_) => match expand_expr(&AnyExpressionShape, token_nodes, context) { + Ok(expr) => return Ok(CommandSignature::Expression(expr)), + Err(_) => Err(token_nodes.peek_non_ws().type_error("command head3")), + }, + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct ClassifiedCommandShape; + +impl ExpandSyntax for ClassifiedCommandShape { + type Output = ClassifiedCommand; + + fn name(&self) -> &'static str { + "classified command" + } + + fn expand_syntax<'a, 'b>( + &self, + iterator: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + let start = iterator.span_at_cursor(); + let head = expand_syntax(&CommandHeadShape, iterator, context)?; + + match &head { + CommandSignature::Expression(expr) => { + Err(ParseError::mismatch("command", expr.tagged_type_name())) + } + + // If the command starts with `^`, treat it as an external command no matter what + CommandSignature::External(name) => { + let name_str = name.slice(&context.source); + + external_command(iterator, context, name_str.tagged(name)) + } + + CommandSignature::LiteralExternal { outer, inner } => { + let name_str = inner.slice(&context.source); + + external_command(iterator, context, name_str.tagged(outer)) + } + + CommandSignature::Internal(command) => { + let tail = + parse_command_tail(&command.signature(), &context, iterator, command.span)?; + + let (positional, named) = match tail { + None => (None, None), + Some((positional, named)) => (positional, named), + }; + + let end = iterator.span_at_cursor(); + + let call = hir::Call { + head: Box::new(head.to_expression()), + positional, + named, + } + .spanned(start.until(end)); + + Ok(ClassifiedCommand::Internal(InternalCommand::new( + command.item.name().to_string(), + Tag { + span: command.span, + anchor: None, + }, + call, + ))) + } + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct InternalCommandHeadShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for InternalCommandHeadShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let peeked_head = token_nodes.peek_non_ws().not_eof("command head4"); + + let peeked_head = match peeked_head { + Err(_) => return Ok(()), + Ok(peeked_head) => peeked_head, + }; + + let _expr = match peeked_head.node { + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => shapes.push(FlatShape::Word.spanned(*span)), + + TokenNode::Token(Spanned { + item: RawToken::String(_inner_tag), + span, + }) => shapes.push(FlatShape::String.spanned(*span)), + + _node => shapes.push(FlatShape::Error.spanned(peeked_head.node.span())), + }; + + peeked_head.commit(); + + Ok(()) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for InternalCommandHeadShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "InternalCommandHeadShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result<(), ShellError> { + let peeked_head = token_nodes.peek_non_ws().not_eof("command head4"); + + let peeked_head = match peeked_head { + Err(_) => return Ok(()), + Ok(peeked_head) => peeked_head, + }; + + let node = peeked_head.commit(); + + let _expr = match node { + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => token_nodes.color_shape(FlatShape::Word.spanned(*span)), + + TokenNode::Token(Spanned { + item: RawToken::String(_inner_tag), + span, + }) => token_nodes.color_shape(FlatShape::String.spanned(*span)), + + _node => token_nodes.color_shape(FlatShape::Error.spanned(node.span())), + }; + + Ok(()) + } +} + +impl ExpandExpression for InternalCommandHeadShape { + fn name(&self) -> &'static str { + "internal command head" + } + + fn expand_expr( + &self, + token_nodes: &mut TokensIterator<'_>, + _context: &ExpandContext, + ) -> Result { + let peeked_head = token_nodes.peek_non_ws().not_eof("command head")?; + + let expr = match peeked_head.node { + TokenNode::Token( + spanned @ Spanned { + item: RawToken::Bare, + .. + }, + ) => spanned.map(|_| hir::RawExpression::Literal(hir::Literal::Bare)), + + TokenNode::Token(Spanned { + item: RawToken::String(inner_span), + span, + }) => hir::RawExpression::Literal(hir::Literal::String(*inner_span)).spanned(*span), + + node => { + return Err(ParseError::mismatch( + "command head", + node.tagged_type_name(), + )) + } + }; + + peeked_head.commit(); + + Ok(expr) + } +} + +pub(crate) struct SingleError<'token> { + expected: &'static str, + node: &'token Spanned, +} + +impl<'token> SingleError<'token> { + pub(crate) fn error(&self) -> ParseError { + ParseError::mismatch(self.expected, self.node.type_name().tagged(self.node.span)) + } +} + +fn parse_single_node<'a, 'b, T>( + token_nodes: &'b mut TokensIterator<'a>, + expected: &'static str, + callback: impl FnOnce(RawToken, Span, SingleError) -> Result, +) -> Result { + token_nodes.peek_any_token(expected, |node| match node { + TokenNode::Token(token) => callback( + token.item, + token.span, + SingleError { + expected, + node: token, + }, + ), + + other => Err(ParseError::mismatch(expected, other.tagged_type_name())), + }) +} + +fn parse_single_node_skipping_ws<'a, 'b, T>( + token_nodes: &'b mut TokensIterator<'a>, + expected: &'static str, + callback: impl FnOnce(RawToken, Span, SingleError) -> Result, +) -> Result { + let peeked = token_nodes.peek_non_ws().not_eof(expected)?; + + let expr = match peeked.node { + TokenNode::Token(token) => callback( + token.item, + token.span, + SingleError { + expected, + node: token, + }, + )?, + + other => return Err(ShellError::type_error(expected, other.tagged_type_name())), + }; + + peeked.commit(); + + Ok(expr) +} + +#[derive(Debug, Copy, Clone)] +pub struct WhitespaceShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for WhitespaceShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let peeked = token_nodes.peek_any().not_eof("whitespace"); + + let peeked = match peeked { + Err(_) => return Ok(()), + Ok(peeked) => peeked, + }; + + let _tag = match peeked.node { + TokenNode::Whitespace(span) => shapes.push(FlatShape::Whitespace.spanned(*span)), + + _other => return Ok(()), + }; + + peeked.commit(); + + Ok(()) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for WhitespaceShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "WhitespaceShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result<(), ShellError> { + let peeked = token_nodes.peek_any().not_eof("whitespace"); + + let peeked = match peeked { + Err(_) => return Ok(()), + Ok(peeked) => peeked, + }; + + let node = peeked.commit(); + + let _ = match node { + TokenNode::Whitespace(span) => { + token_nodes.color_shape(FlatShape::Whitespace.spanned(*span)) + } + + _other => return Ok(()), + }; + + Ok(()) + } +} + +impl ExpandSyntax for WhitespaceShape { + type Output = Span; + + fn name(&self) -> &'static str { + "whitespace" + } + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result { + let peeked = token_nodes.peek_any().not_eof("whitespace")?; + + let span = match peeked.node { + TokenNode::Whitespace(tag) => *tag, + + other => return Err(ParseError::mismatch("whitespace", other.tagged_type_name())), + }; + + peeked.commit(); + + Ok(span) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct SpacedExpression { + inner: T, +} + +impl ExpandExpression for SpacedExpression { + fn name(&self) -> &'static str { + "spaced expression" + } + + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + // TODO: Make the name part of the trait + let peeked = token_nodes.peek_any().not_eof("whitespace")?; + + match peeked.node { + TokenNode::Whitespace(_) => { + peeked.commit(); + expand_expr(&self.inner, token_nodes, context) + } + + other => Err(ParseError::mismatch("whitespace", other.tagged_type_name())), + } + } +} + +pub fn maybe_spaced(inner: T) -> MaybeSpacedExpression { + MaybeSpacedExpression { inner } +} + +#[derive(Debug, Copy, Clone)] +pub struct MaybeSpacedExpression { + inner: T, +} + +#[derive(Debug, Copy, Clone)] +pub struct MaybeSpaceShape; + +impl ExpandSyntax for MaybeSpaceShape { + type Output = Option; + + fn name(&self) -> &'static str { + "maybe space" + } + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result { + let peeked = token_nodes.peek_any().not_eof("whitespace"); + + let span = match peeked { + Err(_) => None, + Ok(peeked) => { + if let TokenNode::Whitespace(..) = peeked.node { + let node = peeked.commit(); + Some(node.span()) + } else { + None + } + } + }; + + Ok(span) + } +} + +#[cfg(not(coloring_in_tokens))] +impl ColorSyntax for MaybeSpaceShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Self::Info { + let peeked = token_nodes.peek_any().not_eof("whitespace"); + + let peeked = match peeked { + Err(_) => return, + Ok(peeked) => peeked, + }; + + if let TokenNode::Whitespace(span) = peeked.node { + peeked.commit(); + shapes.push(FlatShape::Whitespace.spanned(*span)); + } + } +} + +#[cfg(coloring_in_tokens)] +impl ColorSyntax for MaybeSpaceShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "MaybeSpaceShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Self::Info { + let peeked = token_nodes.peek_any().not_eof("whitespace"); + + let peeked = match peeked { + Err(_) => return, + Ok(peeked) => peeked, + }; + + if let TokenNode::Whitespace(span) = peeked.node { + peeked.commit(); + token_nodes.color_shape(FlatShape::Whitespace.spanned(*span)); + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct SpaceShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for SpaceShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let peeked = token_nodes.peek_any().not_eof("whitespace")?; + + match peeked.node { + TokenNode::Whitespace(span) => { + peeked.commit(); + shapes.push(FlatShape::Whitespace.spanned(*span)); + Ok(()) + } + + other => Err(ShellError::type_error( + "whitespace", + other.tagged_type_name(), + )), + } + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for SpaceShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "SpaceShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result<(), ShellError> { + let peeked = token_nodes.peek_any().not_eof("whitespace")?; + + match peeked.node { + TokenNode::Whitespace(span) => { + peeked.commit(); + token_nodes.color_shape(FlatShape::Whitespace.spanned(*span)); + Ok(()) + } + + other => Err(ShellError::type_error( + "whitespace", + other.tagged_type_name(), + )), + } + } +} + +impl ExpandExpression for MaybeSpacedExpression { + fn name(&self) -> &'static str { + "maybe space" + } + + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + // TODO: Make the name part of the trait + let peeked = token_nodes.peek_any().not_eof("whitespace")?; + + match peeked.node { + TokenNode::Whitespace(_) => { + peeked.commit(); + expand_expr(&self.inner, token_nodes, context) + } + + _ => { + peeked.rollback(); + expand_expr(&self.inner, token_nodes, context) + } + } + } +} + +pub fn spaced(inner: T) -> SpacedExpression { + SpacedExpression { inner } +} + +fn expand_variable(span: Span, token_span: Span, source: &Text) -> hir::Expression { + if span.slice(source) == "it" { + hir::Expression::it_variable(span, token_span) + } else { + hir::Expression::variable(span, token_span) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct CommandShape; + +#[cfg(not(coloring_in_tokens))] +impl ColorSyntax for CommandShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) { + let kind = color_fallible_syntax(&CommandHeadShape, token_nodes, context, shapes); + + match kind { + Err(_) => { + // We didn't find a command, so we'll have to fall back to parsing this pipeline part + // as a blob of undifferentiated expressions + color_syntax(&ExpressionListShape, token_nodes, context, shapes); + } + + Ok(CommandHeadKind::External) => { + color_syntax(&ExternalTokensShape, token_nodes, context, shapes); + } + Ok(CommandHeadKind::Internal(signature)) => { + color_syntax_with(&CommandTailShape, &signature, token_nodes, context, shapes); + } + }; + } +} + +#[cfg(coloring_in_tokens)] +impl ColorSyntax for CommandShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "CommandShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) { + let kind = color_fallible_syntax(&CommandHeadShape, token_nodes, context); + + match kind { + Err(_) => { + // We didn't find a command, so we'll have to fall back to parsing this pipeline part + // as a blob of undifferentiated expressions + color_syntax(&ExpressionListShape, token_nodes, context); + } + + Ok(CommandHeadKind::External) => { + color_syntax(&ExternalTokensShape, token_nodes, context); + } + Ok(CommandHeadKind::Internal(signature)) => { + color_syntax_with(&CommandTailShape, &signature, token_nodes, context); + } + }; + } +} diff --git a/src/parser/hir/syntax_shape/block.rs b/src/parser/hir/syntax_shape/block.rs new file mode 100644 index 0000000000..b5059bcb7b --- /dev/null +++ b/src/parser/hir/syntax_shape/block.rs @@ -0,0 +1,526 @@ +use crate::errors::ShellError; +#[cfg(not(coloring_in_tokens))] +use crate::parser::hir::syntax_shape::FlatShape; +use crate::parser::{ + hir, + hir::syntax_shape::{ + color_fallible_syntax, color_syntax_with, continue_expression, expand_expr, expand_syntax, + DelimitedShape, ExpandContext, ExpandExpression, ExpressionContinuationShape, + ExpressionListShape, FallibleColorSyntax, MemberShape, ParseError, PathTailShape, + VariablePathShape, + }, + hir::tokens_iterator::TokensIterator, + parse::token_tree::Delimiter, + RawToken, TokenNode, +}; +use crate::{Span, Spanned, SpannedItem}; + +#[derive(Debug, Copy, Clone)] +pub struct AnyBlockShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for AnyBlockShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let block = token_nodes.peek_non_ws().not_eof("block"); + + let block = match block { + Err(_) => return Ok(()), + Ok(block) => block, + }; + + // is it just a block? + let block = block.node.as_block(); + + match block { + // If so, color it as a block + Some((children, spans)) => { + let mut token_nodes = TokensIterator::new(children.item, children.span, false); + color_syntax_with( + &DelimitedShape, + &(Delimiter::Brace, spans.0, spans.1), + &mut token_nodes, + context, + shapes, + ); + + return Ok(()); + } + _ => {} + } + + // Otherwise, look for a shorthand block. If none found, fail + color_fallible_syntax(&ShorthandBlock, token_nodes, context, shapes) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for AnyBlockShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "AnyBlockShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let block = token_nodes.peek_non_ws().not_eof("block"); + + let block = match block { + Err(_) => return Ok(()), + Ok(block) => block, + }; + + // is it just a block? + let block = block.node.as_block(); + + match block { + // If so, color it as a block + Some((children, spans)) => { + token_nodes.child(children, |token_nodes| { + color_syntax_with( + &DelimitedShape, + &(Delimiter::Brace, spans.0, spans.1), + token_nodes, + context, + ); + }); + + return Ok(()); + } + _ => {} + } + + // Otherwise, look for a shorthand block. If none found, fail + color_fallible_syntax(&ShorthandBlock, token_nodes, context) + } +} + +impl ExpandExpression for AnyBlockShape { + fn name(&self) -> &'static str { + "any block" + } + + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + let block = token_nodes.peek_non_ws().not_eof("block")?; + + // is it just a block? + let block = block.node.as_block(); + + match block { + Some((block, _tags)) => { + let mut iterator = TokensIterator::new(&block.item, block.span, false); + + let exprs = expand_syntax(&ExpressionListShape, &mut iterator, context)?; + + return Ok(hir::RawExpression::Block(exprs.item).spanned(block.span)); + } + _ => {} + } + + expand_syntax(&ShorthandBlock, token_nodes, context) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct ShorthandBlock; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for ShorthandBlock { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + // Try to find a shorthand head. If none found, fail + color_fallible_syntax(&ShorthandPath, token_nodes, context, shapes)?; + + loop { + // Check to see whether there's any continuation after the head expression + let result = + color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes); + + match result { + // if no continuation was found, we're done + Err(_) => break, + // if a continuation was found, look for another one + Ok(_) => continue, + } + } + + Ok(()) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for ShorthandBlock { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "ShorthandBlock" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + // Try to find a shorthand head. If none found, fail + color_fallible_syntax(&ShorthandPath, token_nodes, context)?; + + loop { + // Check to see whether there's any continuation after the head expression + let result = color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context); + + match result { + // if no continuation was found, we're done + Err(_) => break, + // if a continuation was found, look for another one + Ok(_) => continue, + } + } + + Ok(()) + } +} + +impl ExpandExpression for ShorthandBlock { + fn name(&self) -> &'static str { + "shorthand block" + } + + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + let path = expand_expr(&ShorthandPath, token_nodes, context)?; + let start = path.span; + let expr = continue_expression(path, token_nodes, context); + let end = expr.span; + let block = hir::RawExpression::Block(vec![expr]).spanned(start.until(end)); + + Ok(block) + } +} + +/// A shorthand for `$it.foo."bar"`, used inside of a shorthand block +#[derive(Debug, Copy, Clone)] +pub struct ShorthandPath; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for ShorthandPath { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| { + let variable = color_fallible_syntax(&VariablePathShape, token_nodes, context, shapes); + + match variable { + Ok(_) => { + // if it's a variable path, that's the head part + return Ok(()); + } + + Err(_) => { + // otherwise, we'll try to find a member path + } + } + + // look for a member (`` -> `$it.`) + color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?; + + // Now that we've synthesized the head, of the path, proceed to expand the tail of the path + // like any other path. + let tail = color_fallible_syntax(&PathTailShape, token_nodes, context, shapes); + + match tail { + Ok(_) => {} + Err(_) => { + // It's ok if there's no path tail; a single member is sufficient + } + } + + Ok(()) + }) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for ShorthandPath { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "ShorthandPath" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| { + let variable = color_fallible_syntax(&VariablePathShape, token_nodes, context); + + match variable { + Ok(_) => { + // if it's a variable path, that's the head part + return Ok(()); + } + + Err(_) => { + // otherwise, we'll try to find a member path + } + } + + // look for a member (`` -> `$it.`) + color_fallible_syntax(&MemberShape, token_nodes, context)?; + + // Now that we've synthesized the head, of the path, proceed to expand the tail of the path + // like any other path. + let tail = color_fallible_syntax(&PathTailShape, token_nodes, context); + + match tail { + Ok(_) => {} + Err(_) => { + // It's ok if there's no path tail; a single member is sufficient + } + } + + Ok(()) + }) + } +} + +impl ExpandExpression for ShorthandPath { + fn name(&self) -> &'static str { + "shorthand path" + } + + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + // if it's a variable path, that's the head part + let path = expand_expr(&VariablePathShape, token_nodes, context); + + match path { + Ok(path) => return Ok(path), + Err(_) => {} + } + + // Synthesize the head of the shorthand path (`` -> `$it.`) + let mut head = expand_expr(&ShorthandHeadShape, token_nodes, context)?; + + // Now that we've synthesized the head, of the path, proceed to expand the tail of the path + // like any other path. + let tail = expand_syntax(&PathTailShape, token_nodes, context); + + match tail { + Err(_) => return Ok(head), + Ok(Spanned { item: tail, .. }) => { + // For each member that `PathTailShape` expanded, join it onto the existing expression + // to form a new path + for member in tail { + head = hir::Expression::dot_member(head, member); + } + + Ok(head) + } + } + } +} + +/// A shorthand for `$it.foo."bar"`, used inside of a shorthand block +#[derive(Debug, Copy, Clone)] +pub struct ShorthandHeadShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for ShorthandHeadShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + // A shorthand path must not be at EOF + let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?; + + match peeked.node { + // If the head of a shorthand path is a bare token, it expands to `$it.bare` + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => { + peeked.commit(); + shapes.push(FlatShape::BareMember.spanned(*span)); + Ok(()) + } + + // If the head of a shorthand path is a string, it expands to `$it."some string"` + TokenNode::Token(Spanned { + item: RawToken::String(_), + span: outer, + }) => { + peeked.commit(); + shapes.push(FlatShape::StringMember.spanned(*outer)); + Ok(()) + } + + other => Err(ShellError::type_error( + "shorthand head", + other.tagged_type_name(), + )), + } + } +} + +#[cfg(coloring_in_tokens)] +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for ShorthandHeadShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + // A shorthand path must not be at EOF + let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?; + + match peeked.node { + // If the head of a shorthand path is a bare token, it expands to `$it.bare` + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => { + peeked.commit(); + shapes.push(FlatShape::BareMember.spanned(*span)); + Ok(()) + } + + // If the head of a shorthand path is a string, it expands to `$it."some string"` + TokenNode::Token(Spanned { + item: RawToken::String(_), + span: outer, + }) => { + peeked.commit(); + shapes.push(FlatShape::StringMember.spanned(*outer)); + Ok(()) + } + + other => Err(ShellError::type_error( + "shorthand head", + other.tagged_type_name(), + )), + } + } +} + +impl ExpandExpression for ShorthandHeadShape { + fn name(&self) -> &'static str { + "shorthand head" + } + + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + // A shorthand path must not be at EOF + let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?; + + match peeked.node { + // If the head of a shorthand path is a bare token, it expands to `$it.bare` + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => { + // Commit the peeked token + peeked.commit(); + + // Synthesize an `$it` expression + let it = synthetic_it(); + + // Make a path out of `$it` and the bare token as a member + Ok(hir::Expression::path( + it, + vec![span.spanned_string(context.source)], + *span, + )) + } + + // If the head of a shorthand path is a string, it expands to `$it."some string"` + TokenNode::Token(Spanned { + item: RawToken::String(inner), + span: outer, + }) => { + // Commit the peeked token + peeked.commit(); + + // Synthesize an `$it` expression + let it = synthetic_it(); + + // Make a path out of `$it` and the bare token as a member + Ok(hir::Expression::path( + it, + vec![inner.string(context.source).spanned(*outer)], + *outer, + )) + } + + // Any other token is not a valid bare head + other => { + return Err(ParseError::mismatch( + "shorthand path", + other.tagged_type_name(), + )) + } + } + } +} + +fn synthetic_it() -> hir::Expression { + hir::Expression::it_variable(Span::unknown(), Span::unknown()) +} diff --git a/src/parser/hir/syntax_shape/expression.rs b/src/parser/hir/syntax_shape/expression.rs new file mode 100644 index 0000000000..6429ab57c3 --- /dev/null +++ b/src/parser/hir/syntax_shape/expression.rs @@ -0,0 +1,495 @@ +pub(crate) mod atom; +pub(crate) mod delimited; +pub(crate) mod file_path; +pub(crate) mod list; +pub(crate) mod number; +pub(crate) mod pattern; +pub(crate) mod string; +pub(crate) mod unit; +pub(crate) mod variable_path; + +use crate::parser::hir::syntax_shape::{ + color_delimited_square, color_fallible_syntax, color_fallible_syntax_with, expand_atom, + expand_delimited_square, expand_expr, expand_syntax, AtomicToken, BareShape, ColorableDotShape, + DotShape, ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, ExpressionContinuation, + ExpressionContinuationShape, FallibleColorSyntax, FlatShape, ParseError, +}; +use crate::parser::{ + hir, + hir::{Expression, TokensIterator}, +}; +use crate::prelude::*; +use std::path::PathBuf; + +#[derive(Debug, Copy, Clone)] +pub struct AnyExpressionShape; + +impl ExpandExpression for AnyExpressionShape { + fn name(&self) -> &'static str { + "any expression" + } + + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + // Look for an expression at the cursor + let head = expand_expr(&AnyExpressionStartShape, token_nodes, context)?; + + Ok(continue_expression(head, token_nodes, context)) + } +} + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for AnyExpressionShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + // Look for an expression at the cursor + color_fallible_syntax(&AnyExpressionStartShape, token_nodes, context, shapes)?; + + match continue_coloring_expression(token_nodes, context, shapes) { + Err(_) => { + // it's fine for there to be no continuation + } + + Ok(()) => {} + } + + Ok(()) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for AnyExpressionShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "AnyExpressionShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + // Look for an expression at the cursor + color_fallible_syntax(&AnyExpressionStartShape, token_nodes, context)?; + + match continue_coloring_expression(token_nodes, context) { + Err(_) => { + // it's fine for there to be no continuation + } + + Ok(()) => {} + } + + Ok(()) + } +} + +pub(crate) fn continue_expression( + mut head: hir::Expression, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, +) -> hir::Expression { + loop { + // Check to see whether there's any continuation after the head expression + let continuation = expand_syntax(&ExpressionContinuationShape, token_nodes, context); + + match continuation { + // If there's no continuation, return the head + Err(_) => return head, + // Otherwise, form a new expression by combining the head with the continuation + Ok(continuation) => match continuation { + // If the continuation is a `.member`, form a path with the new member + ExpressionContinuation::DotSuffix(_dot, member) => { + head = Expression::dot_member(head, member); + } + + // Otherwise, if the continuation is an infix suffix, form an infix expression + ExpressionContinuation::InfixSuffix(op, expr) => { + head = Expression::infix(head, op, expr); + } + }, + } + } +} + +#[cfg(not(coloring_in_tokens))] +pub(crate) fn continue_coloring_expression( + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + shapes: &mut Vec>, +) -> Result<(), ShellError> { + // if there's not even one expression continuation, fail + color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes)?; + + loop { + // Check to see whether there's any continuation after the head expression + let result = + color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes); + + match result { + Err(_) => { + // We already saw one continuation, so just return + return Ok(()); + } + + Ok(_) => {} + } + } +} + +#[cfg(coloring_in_tokens)] +pub(crate) fn continue_coloring_expression( + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, +) -> Result<(), ShellError> { + // if there's not even one expression continuation, fail + color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context)?; + + loop { + // Check to see whether there's any continuation after the head expression + let result = color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context); + + match result { + Err(_) => { + // We already saw one continuation, so just return + return Ok(()); + } + + Ok(_) => {} + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct AnyExpressionStartShape; + +impl ExpandExpression for AnyExpressionStartShape { + fn name(&self) -> &'static str { + "any expression start" + } + + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + let atom = expand_atom(token_nodes, "expression", context, ExpansionRule::new())?; + + match atom.item { + AtomicToken::Size { number, unit } => { + return Ok(hir::Expression::size( + number.to_number(context.source), + unit.item, + Tag { + span: atom.span, + anchor: None, + }, + )) + } + + AtomicToken::SquareDelimited { nodes, .. } => { + expand_delimited_square(&nodes, atom.span.into(), context) + } + + AtomicToken::Word { .. } | AtomicToken::Dot { .. } => { + let end = expand_syntax(&BareTailShape, token_nodes, context)?; + Ok(hir::Expression::bare(atom.span.until_option(end))) + } + + other => return other.spanned(atom.span).into_hir(context, "expression"), + } + } +} + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for AnyExpressionStartShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let atom = token_nodes.spanned(|token_nodes| { + expand_atom( + token_nodes, + "expression", + context, + ExpansionRule::permissive(), + ) + }); + + let atom = match atom { + Spanned { + item: Err(_err), + span, + } => { + shapes.push(FlatShape::Error.spanned(span)); + return Ok(()); + } + + Spanned { + item: Ok(value), .. + } => value, + }; + + match atom.item { + AtomicToken::Size { number, unit } => shapes.push( + FlatShape::Size { + number: number.span.into(), + unit: unit.span.into(), + } + .spanned(atom.span), + ), + + AtomicToken::SquareDelimited { nodes, spans } => { + color_delimited_square(spans, &nodes, atom.span.into(), context, shapes) + } + + AtomicToken::Word { .. } | AtomicToken::Dot { .. } => { + shapes.push(FlatShape::Word.spanned(atom.span)); + } + + _ => atom.color_tokens(shapes), + } + + Ok(()) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for AnyExpressionStartShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "AnyExpressionStartShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let atom = token_nodes.spanned(|token_nodes| { + expand_atom( + token_nodes, + "expression", + context, + ExpansionRule::permissive(), + ) + }); + + let atom = match atom { + Spanned { + item: Err(_err), + span, + } => { + token_nodes.color_shape(FlatShape::Error.spanned(span)); + return Ok(()); + } + + Spanned { + item: Ok(value), .. + } => value, + }; + + match atom.item { + AtomicToken::Size { number, unit } => token_nodes.color_shape( + FlatShape::Size { + number: number.span.into(), + unit: unit.span.into(), + } + .spanned(atom.span), + ), + + AtomicToken::SquareDelimited { nodes, spans } => { + token_nodes.child((&nodes[..]).spanned(atom.span), |tokens| { + color_delimited_square(spans, tokens, atom.span.into(), context); + }); + } + + AtomicToken::Word { .. } | AtomicToken::Dot { .. } => { + token_nodes.color_shape(FlatShape::Word.spanned(atom.span)); + } + + _ => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)), + } + + Ok(()) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct BareTailShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for BareTailShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let len = shapes.len(); + + loop { + let word = color_fallible_syntax_with( + &BareShape, + &FlatShape::Word, + token_nodes, + context, + shapes, + ); + + match word { + // if a word was found, continue + Ok(_) => continue, + // if a word wasn't found, try to find a dot + Err(_) => {} + } + + // try to find a dot + let dot = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Word, + token_nodes, + context, + shapes, + ); + + match dot { + // if a dot was found, try to find another word + Ok(_) => continue, + // otherwise, we're done + Err(_) => break, + } + } + + if shapes.len() > len { + Ok(()) + } else { + Err(ShellError::syntax_error( + "No tokens matched BareTailShape".tagged_unknown(), + )) + } + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for BareTailShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "BareTailShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let len = token_nodes.state().shapes().len(); + + loop { + let word = + color_fallible_syntax_with(&BareShape, &FlatShape::Word, token_nodes, context); + + match word { + // if a word was found, continue + Ok(_) => continue, + // if a word wasn't found, try to find a dot + Err(_) => {} + } + + // try to find a dot + let dot = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Word, + token_nodes, + context, + ); + + match dot { + // if a dot was found, try to find another word + Ok(_) => continue, + // otherwise, we're done + Err(_) => break, + } + } + + if token_nodes.state().shapes().len() > len { + Ok(()) + } else { + Err(ShellError::syntax_error( + "No tokens matched BareTailShape".tagged_unknown(), + )) + } + } +} + +impl ExpandSyntax for BareTailShape { + fn name(&self) -> &'static str { + "word continuation" + } + + type Output = Option; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result, ParseError> { + let mut end: Option = None; + + loop { + match expand_syntax(&BareShape, token_nodes, context) { + Ok(bare) => { + end = Some(bare.span); + continue; + } + + Err(_) => match expand_syntax(&DotShape, token_nodes, context) { + Ok(dot) => { + end = Some(dot); + continue; + } + + Err(_) => break, + }, + } + } + + Ok(end) + } +} + +pub fn expand_file_path(string: &str, context: &ExpandContext) -> PathBuf { + let expanded = shellexpand::tilde_with_context(string, || context.homedir()); + + PathBuf::from(expanded.as_ref()) +} diff --git a/src/parser/hir/syntax_shape/expression/atom.rs b/src/parser/hir/syntax_shape/expression/atom.rs new file mode 100644 index 0000000000..09583c728b --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/atom.rs @@ -0,0 +1,587 @@ +use crate::parser::hir::syntax_shape::{ + expand_syntax, expression::expand_file_path, parse_single_node, BarePathShape, + BarePatternShape, ExpandContext, UnitShape, +}; +use crate::parser::{ + hir, + hir::{Expression, RawNumber, TokensIterator}, + parse::flag::{Flag, FlagKind}, + DelimitedNode, Delimiter, FlatShape, RawToken, TokenNode, Unit, +}; +use crate::prelude::*; +use crate::{Span, Spanned}; + +#[derive(Debug)] +pub enum AtomicToken<'tokens> { + Eof { + span: Span, + }, + Error { + error: Spanned, + }, + Number { + number: RawNumber, + }, + Size { + number: Spanned, + unit: Spanned, + }, + String { + body: Span, + }, + ItVariable { + name: Span, + }, + Variable { + name: Span, + }, + ExternalCommand { + command: Span, + }, + ExternalWord { + text: Span, + }, + GlobPattern { + pattern: Span, + }, + FilePath { + path: Span, + }, + Word { + text: Span, + }, + SquareDelimited { + spans: (Span, Span), + nodes: &'tokens Vec, + }, + ParenDelimited { + span: (Span, Span), + nodes: &'tokens Vec, + }, + BraceDelimited { + spans: (Span, Span), + nodes: &'tokens Vec, + }, + Pipeline { + pipe: Option, + elements: Spanned<&'tokens Vec>, + }, + ShorthandFlag { + name: Span, + }, + LonghandFlag { + name: Span, + }, + Dot { + text: Span, + }, + Operator { + text: Span, + }, + Whitespace { + text: Span, + }, +} + +pub type SpannedAtomicToken<'tokens> = Spanned>; + +impl<'tokens> SpannedAtomicToken<'tokens> { + pub fn into_hir( + &self, + context: &ExpandContext, + expected: &'static str, + ) -> Result { + Ok(match &self.item { + AtomicToken::Eof { .. } => { + return Err(ParseError::mismatch( + expected, + "eof atomic token".tagged(self.span), + )) + } + AtomicToken::Error { .. } => { + return Err(ParseError::mismatch( + expected, + "eof atomic token".tagged(self.span), + )) + } + AtomicToken::Operator { .. } => { + return Err(ParseError::mismatch(expected, "operator".tagged(self.span))) + } + AtomicToken::ShorthandFlag { .. } => { + return Err(ParseError::mismatch( + expected, + "shorthand flag".tagged(self.span), + )) + } + AtomicToken::LonghandFlag { .. } => { + return Err(ParseError::mismatch(expected, "flag".tagged(self.span))) + } + AtomicToken::Whitespace { .. } => { + return Err(ParseError::mismatch( + expected, + "whitespace".tagged(self.span), + )) + } + AtomicToken::Dot { .. } => { + return Err(ParseError::mismatch(expected, "dot".tagged(self.span))) + } + AtomicToken::Number { number } => { + Expression::number(number.to_number(context.source), self.span) + } + AtomicToken::FilePath { path } => Expression::file_path( + expand_file_path(path.slice(context.source), context), + self.span, + ), + AtomicToken::Size { number, unit } => { + Expression::size(number.to_number(context.source), **unit, self.span) + } + AtomicToken::String { body } => Expression::string(*body, self.span), + AtomicToken::ItVariable { name } => Expression::it_variable(*name, self.span), + AtomicToken::Variable { name } => Expression::variable(*name, self.span), + AtomicToken::ExternalCommand { command } => { + Expression::external_command(*command, self.span) + } + AtomicToken::ExternalWord { text } => Expression::string(*text, self.span), + AtomicToken::GlobPattern { pattern } => Expression::pattern( + expand_file_path(pattern.slice(context.source), context).to_string_lossy(), + self.span, + ), + AtomicToken::Word { text } => Expression::string(*text, *text), + AtomicToken::SquareDelimited { .. } => unimplemented!("into_hir"), + AtomicToken::ParenDelimited { .. } => unimplemented!("into_hir"), + AtomicToken::BraceDelimited { .. } => unimplemented!("into_hir"), + AtomicToken::Pipeline { .. } => unimplemented!("into_hir"), + }) + } + + pub fn spanned_type_name(&self) -> Spanned<&'static str> { + match &self.item { + AtomicToken::Eof { .. } => "eof", + AtomicToken::Error { .. } => "error", + AtomicToken::Operator { .. } => "operator", + AtomicToken::ShorthandFlag { .. } => "shorthand flag", + AtomicToken::LonghandFlag { .. } => "flag", + AtomicToken::Whitespace { .. } => "whitespace", + AtomicToken::Dot { .. } => "dot", + AtomicToken::Number { .. } => "number", + AtomicToken::FilePath { .. } => "file path", + AtomicToken::Size { .. } => "size", + AtomicToken::String { .. } => "string", + AtomicToken::ItVariable { .. } => "$it", + AtomicToken::Variable { .. } => "variable", + AtomicToken::ExternalCommand { .. } => "external command", + AtomicToken::ExternalWord { .. } => "external word", + AtomicToken::GlobPattern { .. } => "file pattern", + AtomicToken::Word { .. } => "word", + AtomicToken::SquareDelimited { .. } => "array literal", + AtomicToken::ParenDelimited { .. } => "parenthesized expression", + AtomicToken::BraceDelimited { .. } => "block", + AtomicToken::Pipeline { .. } => "pipeline", + } + .spanned(self.span) + } + + pub fn tagged_type_name(&self) -> Tagged<&'static str> { + match &self.item { + AtomicToken::Eof { .. } => "eof", + AtomicToken::Error { .. } => "error", + AtomicToken::Operator { .. } => "operator", + AtomicToken::ShorthandFlag { .. } => "shorthand flag", + AtomicToken::LonghandFlag { .. } => "flag", + AtomicToken::Whitespace { .. } => "whitespace", + AtomicToken::Dot { .. } => "dot", + AtomicToken::Number { .. } => "number", + AtomicToken::FilePath { .. } => "file path", + AtomicToken::Size { .. } => "size", + AtomicToken::String { .. } => "string", + AtomicToken::ItVariable { .. } => "$it", + AtomicToken::Variable { .. } => "variable", + AtomicToken::ExternalCommand { .. } => "external command", + AtomicToken::ExternalWord { .. } => "external word", + AtomicToken::GlobPattern { .. } => "file pattern", + AtomicToken::Word { .. } => "word", + AtomicToken::SquareDelimited { .. } => "array literal", + AtomicToken::ParenDelimited { .. } => "parenthesized expression", + AtomicToken::BraceDelimited { .. } => "block", + AtomicToken::Pipeline { .. } => "pipeline", + } + .tagged(self.span) + } + + pub(crate) fn color_tokens(&self, shapes: &mut Vec>) { + match &self.item { + AtomicToken::Eof { .. } => {} + AtomicToken::Error { .. } => return shapes.push(FlatShape::Error.spanned(self.span)), + AtomicToken::Operator { .. } => { + return shapes.push(FlatShape::Operator.spanned(self.span)); + } + AtomicToken::ShorthandFlag { .. } => { + return shapes.push(FlatShape::ShorthandFlag.spanned(self.span)); + } + AtomicToken::LonghandFlag { .. } => { + return shapes.push(FlatShape::Flag.spanned(self.span)); + } + AtomicToken::Whitespace { .. } => { + return shapes.push(FlatShape::Whitespace.spanned(self.span)); + } + AtomicToken::FilePath { .. } => return shapes.push(FlatShape::Path.spanned(self.span)), + AtomicToken::Dot { .. } => return shapes.push(FlatShape::Dot.spanned(self.span)), + AtomicToken::Number { + number: RawNumber::Decimal(_), + } => { + return shapes.push(FlatShape::Decimal.spanned(self.span)); + } + AtomicToken::Number { + number: RawNumber::Int(_), + } => { + return shapes.push(FlatShape::Int.spanned(self.span)); + } + AtomicToken::Size { number, unit } => { + return shapes.push( + FlatShape::Size { + number: number.span, + unit: unit.span, + } + .spanned(self.span), + ); + } + AtomicToken::String { .. } => return shapes.push(FlatShape::String.spanned(self.span)), + AtomicToken::ItVariable { .. } => { + return shapes.push(FlatShape::ItVariable.spanned(self.span)) + } + AtomicToken::Variable { .. } => { + return shapes.push(FlatShape::Variable.spanned(self.span)) + } + AtomicToken::ExternalCommand { .. } => { + return shapes.push(FlatShape::ExternalCommand.spanned(self.span)); + } + AtomicToken::ExternalWord { .. } => { + return shapes.push(FlatShape::ExternalWord.spanned(self.span)) + } + AtomicToken::GlobPattern { .. } => { + return shapes.push(FlatShape::GlobPattern.spanned(self.span)) + } + AtomicToken::Word { .. } => return shapes.push(FlatShape::Word.spanned(self.span)), + _ => return shapes.push(FlatShape::Error.spanned(self.span)), + } + } +} + +#[derive(Debug)] +pub enum WhitespaceHandling { + #[allow(unused)] + AllowWhitespace, + RejectWhitespace, +} + +#[derive(Debug)] +pub struct ExpansionRule { + pub(crate) allow_external_command: bool, + pub(crate) allow_external_word: bool, + pub(crate) allow_operator: bool, + pub(crate) allow_eof: bool, + pub(crate) treat_size_as_word: bool, + pub(crate) commit_errors: bool, + pub(crate) whitespace: WhitespaceHandling, +} + +impl ExpansionRule { + pub fn new() -> ExpansionRule { + ExpansionRule { + allow_external_command: false, + allow_external_word: false, + allow_operator: false, + allow_eof: false, + treat_size_as_word: false, + commit_errors: false, + whitespace: WhitespaceHandling::RejectWhitespace, + } + } + + /// The intent of permissive mode is to return an atomic token for every possible + /// input token. This is important for error-correcting parsing, such as the + /// syntax highlighter. + pub fn permissive() -> ExpansionRule { + ExpansionRule { + allow_external_command: true, + allow_external_word: true, + allow_operator: true, + allow_eof: true, + treat_size_as_word: false, + commit_errors: true, + whitespace: WhitespaceHandling::AllowWhitespace, + } + } + + #[allow(unused)] + pub fn allow_external_command(mut self) -> ExpansionRule { + self.allow_external_command = true; + self + } + + #[allow(unused)] + pub fn allow_operator(mut self) -> ExpansionRule { + self.allow_operator = true; + self + } + + #[allow(unused)] + pub fn no_operator(mut self) -> ExpansionRule { + self.allow_operator = false; + self + } + + #[allow(unused)] + pub fn no_external_command(mut self) -> ExpansionRule { + self.allow_external_command = false; + self + } + + #[allow(unused)] + pub fn allow_external_word(mut self) -> ExpansionRule { + self.allow_external_word = true; + self + } + + #[allow(unused)] + pub fn no_external_word(mut self) -> ExpansionRule { + self.allow_external_word = false; + self + } + + #[allow(unused)] + pub fn treat_size_as_word(mut self) -> ExpansionRule { + self.treat_size_as_word = true; + self + } + + #[allow(unused)] + pub fn commit_errors(mut self) -> ExpansionRule { + self.commit_errors = true; + self + } + + #[allow(unused)] + pub fn allow_whitespace(mut self) -> ExpansionRule { + self.whitespace = WhitespaceHandling::AllowWhitespace; + self + } + + #[allow(unused)] + pub fn reject_whitespace(mut self) -> ExpansionRule { + self.whitespace = WhitespaceHandling::RejectWhitespace; + self + } +} + +/// If the caller of expand_atom throws away the returned atomic token returned, it +/// must use a checkpoint to roll it back. +pub fn expand_atom<'me, 'content>( + token_nodes: &'me mut TokensIterator<'content>, + expected: &'static str, + context: &ExpandContext, + rule: ExpansionRule, +) -> Result, ParseError> { + if token_nodes.at_end() { + match rule.allow_eof { + true => { + return Ok(AtomicToken::Eof { + span: Span::unknown(), + } + .spanned(Span::unknown())) + } + false => return Err(ParseError::unexpected_eof("anything", Span::unknown())), + } + } + + // First, we'll need to handle the situation where more than one token corresponds + // to a single atomic token + + // If treat_size_as_word, don't try to parse the head of the token stream + // as a size. + match rule.treat_size_as_word { + true => {} + false => match expand_syntax(&UnitShape, token_nodes, context) { + // If the head of the stream isn't a valid unit, we'll try to parse + // it again next as a word + Err(_) => {} + + // But if it was a valid unit, we're done here + Ok(Spanned { + item: (number, unit), + span, + }) => return Ok(AtomicToken::Size { number, unit }.spanned(span)), + }, + } + + // Try to parse the head of the stream as a bare path. A bare path includes + // words as well as `.`s, connected together without whitespace. + match expand_syntax(&BarePathShape, token_nodes, context) { + // If we didn't find a bare path + Err(_) => {} + Ok(span) => { + let next = token_nodes.peek_any(); + + match next.node { + Some(token) if token.is_pattern() => { + // if the very next token is a pattern, we're looking at a glob, not a + // word, and we should try to parse it as a glob next + } + + _ => return Ok(AtomicToken::Word { text: span }.spanned(span)), + } + } + } + + // Try to parse the head of the stream as a pattern. A pattern includes + // words, words with `*` as well as `.`s, connected together without whitespace. + match expand_syntax(&BarePatternShape, token_nodes, context) { + // If we didn't find a bare path + Err(_) => {} + Ok(span) => return Ok(AtomicToken::GlobPattern { pattern: span }.spanned(span)), + } + + // The next token corresponds to at most one atomic token + + // We need to `peek` because `parse_single_node` doesn't cover all of the + // cases that `expand_atom` covers. We should probably collapse the two + // if possible. + let peeked = token_nodes.peek_any().not_eof(expected)?; + + match peeked.node { + TokenNode::Token(_) => { + // handle this next + } + + TokenNode::Error(error) => { + peeked.commit(); + return Ok(AtomicToken::Error { + error: error.clone(), + } + .spanned(error.span)); + } + + // [ ... ] + TokenNode::Delimited(Spanned { + item: + DelimitedNode { + delimiter: Delimiter::Square, + spans, + children, + }, + span, + }) => { + peeked.commit(); + let span = *span; + return Ok(AtomicToken::SquareDelimited { + nodes: children, + spans: *spans, + } + .spanned(span)); + } + + TokenNode::Flag(Spanned { + item: + Flag { + kind: FlagKind::Shorthand, + name, + }, + span, + }) => { + peeked.commit(); + return Ok(AtomicToken::ShorthandFlag { name: *name }.spanned(*span)); + } + + TokenNode::Flag(Spanned { + item: + Flag { + kind: FlagKind::Longhand, + name, + }, + span, + }) => { + peeked.commit(); + return Ok(AtomicToken::ShorthandFlag { name: *name }.spanned(*span)); + } + + // If we see whitespace, process the whitespace according to the whitespace + // handling rules + TokenNode::Whitespace(span) => match rule.whitespace { + // if whitespace is allowed, return a whitespace token + WhitespaceHandling::AllowWhitespace => { + peeked.commit(); + return Ok(AtomicToken::Whitespace { text: *span }.spanned(*span)); + } + + // if whitespace is disallowed, return an error + WhitespaceHandling::RejectWhitespace => { + return Err(ParseError::mismatch( + expected, + "whitespace".tagged(Tag { + span: *span, + anchor: None, + }), + )) + } + }, + + other => { + let span = peeked.node.span(); + + peeked.commit(); + return Ok(AtomicToken::Error { + error: ShellError::type_error("token", other.tagged_type_name()).spanned(span), + } + .spanned(span)); + } + } + + parse_single_node(token_nodes, expected, |token, token_span, err| { + Ok(match token { + // First, the error cases. Each error case corresponds to a expansion rule + // flag that can be used to allow the case + + // rule.allow_operator + RawToken::Operator(_) if !rule.allow_operator => return Err(err.error()), + // rule.allow_external_command + RawToken::ExternalCommand(_) if !rule.allow_external_command => { + return Err(ParseError::mismatch( + expected, + token.type_name().tagged(Tag { + span: token_span, + anchor: None, + }), + )) + } + // rule.allow_external_word + RawToken::ExternalWord if !rule.allow_external_word => { + return Err(ParseError::mismatch( + expected, + "external word".tagged(Tag { + span: token_span, + anchor: None, + }), + )) + } + + RawToken::Number(number) => AtomicToken::Number { number }.spanned(token_span), + RawToken::Operator(_) => AtomicToken::Operator { text: token_span }.spanned(token_span), + RawToken::String(body) => AtomicToken::String { body }.spanned(token_span), + RawToken::Variable(name) if name.slice(context.source) == "it" => { + AtomicToken::ItVariable { name }.spanned(token_span) + } + RawToken::Variable(name) => AtomicToken::Variable { name }.spanned(token_span), + RawToken::ExternalCommand(command) => { + AtomicToken::ExternalCommand { command }.spanned(token_span) + } + RawToken::ExternalWord => { + AtomicToken::ExternalWord { text: token_span }.spanned(token_span) + } + RawToken::GlobPattern => AtomicToken::GlobPattern { + pattern: token_span, + } + .spanned(token_span), + RawToken::Bare => AtomicToken::Word { text: token_span }.spanned(token_span), + }) + }) +} diff --git a/src/parser/hir/syntax_shape/expression/delimited.rs b/src/parser/hir/syntax_shape/expression/delimited.rs new file mode 100644 index 0000000000..02b61e4730 --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/delimited.rs @@ -0,0 +1,87 @@ +use crate::parser::hir::syntax_shape::{ + color_syntax, expand_syntax, ColorSyntax, ExpandContext, ExpressionListShape, TokenNode, +}; +use crate::parser::{hir, hir::TokensIterator, Delimiter, FlatShape}; +use crate::prelude::*; + +pub fn expand_delimited_square( + children: &Vec, + span: Span, + context: &ExpandContext, +) -> Result { + let mut tokens = TokensIterator::new(&children, span, false); + + let list = expand_syntax(&ExpressionListShape, &mut tokens, context); + + Ok(hir::Expression::list( + list?.item, + Tag { span, anchor: None }, + )) +} + +#[cfg(not(coloring_in_tokens))] +pub fn color_delimited_square( + (open, close): (Span, Span), + children: &Vec, + span: Span, + context: &ExpandContext, + shapes: &mut Vec>, +) { + shapes.push(FlatShape::OpenDelimiter(Delimiter::Square).spanned(open)); + let mut tokens = TokensIterator::new(&children, span, false); + let _list = color_syntax(&ExpressionListShape, &mut tokens, context, shapes); + shapes.push(FlatShape::CloseDelimiter(Delimiter::Square).spanned(close)); +} + +#[cfg(coloring_in_tokens)] +pub fn color_delimited_square( + (open, close): (Span, Span), + token_nodes: &mut TokensIterator, + _span: Span, + context: &ExpandContext, +) { + token_nodes.color_shape(FlatShape::OpenDelimiter(Delimiter::Square).spanned(open)); + let _list = color_syntax(&ExpressionListShape, token_nodes, context); + token_nodes.color_shape(FlatShape::CloseDelimiter(Delimiter::Square).spanned(close)); +} + +#[derive(Debug, Copy, Clone)] +pub struct DelimitedShape; + +#[cfg(not(coloring_in_tokens))] +impl ColorSyntax for DelimitedShape { + type Info = (); + type Input = (Delimiter, Span, Span); + fn color_syntax<'a, 'b>( + &self, + (delimiter, open, close): &(Delimiter, Span, Span), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Self::Info { + shapes.push(FlatShape::OpenDelimiter(*delimiter).spanned(*open)); + color_syntax(&ExpressionListShape, token_nodes, context, shapes); + shapes.push(FlatShape::CloseDelimiter(*delimiter).spanned(*close)); + } +} + +#[cfg(coloring_in_tokens)] +impl ColorSyntax for DelimitedShape { + type Info = (); + type Input = (Delimiter, Span, Span); + + fn name(&self) -> &'static str { + "DelimitedShape" + } + + fn color_syntax<'a, 'b>( + &self, + (delimiter, open, close): &(Delimiter, Span, Span), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Self::Info { + token_nodes.color_shape(FlatShape::OpenDelimiter(*delimiter).spanned(*open)); + color_syntax(&ExpressionListShape, token_nodes, context); + token_nodes.color_shape(FlatShape::CloseDelimiter(*delimiter).spanned(*close)); + } +} diff --git a/src/parser/hir/syntax_shape/expression/file_path.rs b/src/parser/hir/syntax_shape/expression/file_path.rs new file mode 100644 index 0000000000..4b7caf9f3e --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/file_path.rs @@ -0,0 +1,119 @@ +use crate::parser::hir::syntax_shape::expression::atom::{expand_atom, AtomicToken, ExpansionRule}; +use crate::parser::hir::syntax_shape::{ + expression::expand_file_path, ExpandContext, ExpandExpression, FallibleColorSyntax, FlatShape, + ParseError, +}; +use crate::parser::{hir, hir::TokensIterator}; +use crate::prelude::*; + +#[derive(Debug, Copy, Clone)] +pub struct FilePathShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for FilePathShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let atom = expand_atom( + token_nodes, + "file path", + context, + ExpansionRule::permissive(), + ); + + let atom = match atom { + Err(_) => return Ok(()), + Ok(atom) => atom, + }; + + match atom.item { + AtomicToken::Word { .. } + | AtomicToken::String { .. } + | AtomicToken::Number { .. } + | AtomicToken::Size { .. } => { + shapes.push(FlatShape::Path.spanned(atom.span)); + } + + _ => atom.color_tokens(shapes), + } + + Ok(()) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for FilePathShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "FilePathShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let atom = expand_atom( + token_nodes, + "file path", + context, + ExpansionRule::permissive(), + ); + + let atom = match atom { + Err(_) => return Ok(()), + Ok(atom) => atom, + }; + + match atom.item { + AtomicToken::Word { .. } + | AtomicToken::String { .. } + | AtomicToken::Number { .. } + | AtomicToken::Size { .. } => { + token_nodes.color_shape(FlatShape::Path.spanned(atom.span)); + } + + _ => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)), + } + + Ok(()) + } +} + +impl ExpandExpression for FilePathShape { + fn name(&self) -> &'static str { + "file path" + } + + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + let atom = expand_atom(token_nodes, "file path", context, ExpansionRule::new())?; + + match atom.item { + AtomicToken::Word { text: body } | AtomicToken::String { body } => { + let path = expand_file_path(body.slice(context.source), context); + return Ok(hir::Expression::file_path(path, atom.span)); + } + + AtomicToken::Number { .. } | AtomicToken::Size { .. } => { + let path = atom.span.slice(context.source); + return Ok(hir::Expression::file_path(path, atom.span)); + } + + _ => return atom.into_hir(context, "file path"), + } + } +} diff --git a/src/parser/hir/syntax_shape/expression/list.rs b/src/parser/hir/syntax_shape/expression/list.rs new file mode 100644 index 0000000000..fa6b5864a1 --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/list.rs @@ -0,0 +1,327 @@ +use crate::errors::ParseError; +#[cfg(not(coloring_in_tokens))] +use crate::parser::hir::syntax_shape::FlatShape; +use crate::parser::{ + hir, + hir::syntax_shape::{ + color_fallible_syntax, color_syntax, expand_atom, expand_expr, maybe_spaced, spaced, + AnyExpressionShape, ColorSyntax, ExpandContext, ExpandSyntax, ExpansionRule, + MaybeSpaceShape, SpaceShape, + }, + hir::TokensIterator, +}; +use crate::{DebugFormatter, FormatDebug, Spanned, SpannedItem}; +use std::fmt; + +#[derive(Debug, Copy, Clone)] +pub struct ExpressionListShape; + +impl FormatDebug for Spanned> { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + FormatDebug::fmt_debug(&self.item, f, source) + } +} + +impl ExpandSyntax for ExpressionListShape { + type Output = Spanned>; + + fn name(&self) -> &'static str { + "expression list" + } + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result>, ParseError> { + let mut exprs = vec![]; + + let start = token_nodes.span_at_cursor(); + + if token_nodes.at_end_possible_ws() { + return Ok(exprs.spanned(start)); + } + + let expr = expand_expr(&maybe_spaced(AnyExpressionShape), token_nodes, context)?; + + exprs.push(expr); + + loop { + if token_nodes.at_end_possible_ws() { + let end = token_nodes.span_at_cursor(); + return Ok(exprs.spanned(start.until(end))); + } + + let expr = expand_expr(&spaced(AnyExpressionShape), token_nodes, context)?; + + exprs.push(expr); + } + } +} + +#[cfg(not(coloring_in_tokens))] +impl ColorSyntax for ExpressionListShape { + type Info = (); + type Input = (); + + /// The intent of this method is to fully color an expression list shape infallibly. + /// This means that if we can't expand a token into an expression, we fall back to + /// a simpler coloring strategy. + /// + /// This would apply to something like `where x >`, which includes an incomplete + /// binary operator. Since we will fail to process it as a binary operator, we'll + /// fall back to a simpler coloring and move on. + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) { + // We encountered a parsing error and will continue with simpler coloring ("backoff + // coloring mode") + let mut backoff = false; + + // Consume any leading whitespace + color_syntax(&MaybeSpaceShape, token_nodes, context, shapes); + + loop { + // If we reached the very end of the token stream, we're done + if token_nodes.at_end() { + return; + } + + if backoff { + let len = shapes.len(); + + // If we previously encountered a parsing error, use backoff coloring mode + color_syntax(&SimplestExpression, token_nodes, context, shapes); + + if len == shapes.len() && !token_nodes.at_end() { + // This should never happen, but if it does, a panic is better than an infinite loop + panic!("Unexpected tokens left that couldn't be colored even with SimplestExpression") + } + } else { + // Try to color the head of the stream as an expression + match color_fallible_syntax(&AnyExpressionShape, token_nodes, context, shapes) { + // If no expression was found, switch to backoff coloring mode + Err(_) => { + backoff = true; + continue; + } + Ok(_) => {} + } + + // If an expression was found, consume a space + match color_fallible_syntax(&SpaceShape, token_nodes, context, shapes) { + Err(_) => { + // If no space was found, we're either at the end or there's an error. + // Either way, switch to backoff coloring mode. If we're at the end + // it won't have any consequences. + backoff = true; + } + Ok(_) => { + // Otherwise, move on to the next expression + } + } + } + } + } +} + +#[cfg(coloring_in_tokens)] +impl ColorSyntax for ExpressionListShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "ExpressionListShape" + } + + /// The intent of this method is to fully color an expression list shape infallibly. + /// This means that if we can't expand a token into an expression, we fall back to + /// a simpler coloring strategy. + /// + /// This would apply to something like `where x >`, which includes an incomplete + /// binary operator. Since we will fail to process it as a binary operator, we'll + /// fall back to a simpler coloring and move on. + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) { + // We encountered a parsing error and will continue with simpler coloring ("backoff + // coloring mode") + let mut backoff = false; + + // Consume any leading whitespace + color_syntax(&MaybeSpaceShape, token_nodes, context); + + loop { + // If we reached the very end of the token stream, we're done + if token_nodes.at_end() { + return; + } + + if backoff { + let len = token_nodes.state().shapes().len(); + + // If we previously encountered a parsing error, use backoff coloring mode + color_syntax(&SimplestExpression, token_nodes, context); + + if len == token_nodes.state().shapes().len() && !token_nodes.at_end() { + // This should never happen, but if it does, a panic is better than an infinite loop + panic!("Unexpected tokens left that couldn't be colored even with SimplestExpression") + } + } else { + // Try to color the head of the stream as an expression + match color_fallible_syntax(&AnyExpressionShape, token_nodes, context) { + // If no expression was found, switch to backoff coloring mode + Err(_) => { + backoff = true; + continue; + } + Ok(_) => {} + } + + // If an expression was found, consume a space + match color_fallible_syntax(&SpaceShape, token_nodes, context) { + Err(_) => { + // If no space was found, we're either at the end or there's an error. + // Either way, switch to backoff coloring mode. If we're at the end + // it won't have any consequences. + backoff = true; + } + Ok(_) => { + // Otherwise, move on to the next expression + } + } + } + } + } +} + +/// BackoffColoringMode consumes all of the remaining tokens in an infallible way +#[derive(Debug, Copy, Clone)] +pub struct BackoffColoringMode; + +#[cfg(not(coloring_in_tokens))] +impl ColorSyntax for BackoffColoringMode { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &Self::Input, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Self::Info { + loop { + if token_nodes.at_end() { + break; + } + + let len = shapes.len(); + color_syntax(&SimplestExpression, token_nodes, context, shapes); + + if len == shapes.len() && !token_nodes.at_end() { + // This shouldn't happen, but if it does, a panic is better than an infinite loop + panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, shapes); + } + } + } +} + +#[cfg(coloring_in_tokens)] +impl ColorSyntax for BackoffColoringMode { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "BackoffColoringMode" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &Self::Input, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Self::Info { + loop { + if token_nodes.at_end() { + break; + } + + let len = token_nodes.state().shapes().len(); + color_syntax(&SimplestExpression, token_nodes, context); + + if len == token_nodes.state().shapes().len() && !token_nodes.at_end() { + // This shouldn't happen, but if it does, a panic is better than an infinite loop + panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, token_nodes.state().shapes()); + } + } + } +} + +/// The point of `SimplestExpression` is to serve as an infallible base case for coloring. +/// As a last ditch effort, if we can't find any way to parse the head of the stream as an +/// expression, fall back to simple coloring. +#[derive(Debug, Copy, Clone)] +pub struct SimplestExpression; + +#[cfg(not(coloring_in_tokens))] +impl ColorSyntax for SimplestExpression { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) { + let atom = expand_atom( + token_nodes, + "any token", + context, + ExpansionRule::permissive(), + ); + + match atom { + Err(_) => {} + Ok(atom) => atom.color_tokens(shapes), + } + } +} + +#[cfg(coloring_in_tokens)] +impl ColorSyntax for SimplestExpression { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "SimplestExpression" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) { + let atom = expand_atom( + token_nodes, + "any token", + context, + ExpansionRule::permissive(), + ); + + match atom { + Err(_) => {} + Ok(atom) => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)), + } + } +} diff --git a/src/parser/hir/syntax_shape/expression/number.rs b/src/parser/hir/syntax_shape/expression/number.rs new file mode 100644 index 0000000000..492a29202b --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/number.rs @@ -0,0 +1,230 @@ +use crate::parser::hir::syntax_shape::{ + expand_atom, parse_single_node, ExpandContext, ExpandExpression, ExpansionRule, + FallibleColorSyntax, FlatShape, ParseError, TestSyntax, +}; +use crate::parser::hir::tokens_iterator::Peeked; +use crate::parser::{ + hir, + hir::{RawNumber, TokensIterator}, + RawToken, +}; +use crate::prelude::*; + +#[derive(Debug, Copy, Clone)] +pub struct NumberShape; + +impl ExpandExpression for NumberShape { + fn name(&self) -> &'static str { + "number" + } + + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + parse_single_node(token_nodes, "Number", |token, token_span, err| { + Ok(match token { + RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()), + RawToken::Variable(tag) if tag.slice(context.source) == "it" => { + hir::Expression::it_variable(tag, token_span) + } + RawToken::ExternalCommand(tag) => { + hir::Expression::external_command(tag, token_span) + } + RawToken::ExternalWord => { + return Err(ParseError::mismatch( + "number", + "syntax error".tagged(Tag { + span: token_span, + anchor: None, + }), + )) + } + RawToken::Variable(tag) => hir::Expression::variable(tag, token_span), + RawToken::Number(number) => { + hir::Expression::number(number.to_number(context.source), token_span) + } + RawToken::Bare => hir::Expression::bare(token_span), + RawToken::String(tag) => hir::Expression::string(tag, token_span), + }) + }) + } +} + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for NumberShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let atom = token_nodes.spanned(|token_nodes| { + expand_atom(token_nodes, "number", context, ExpansionRule::permissive()) + }); + + let atom = match atom { + Spanned { item: Err(_), span } => { + shapes.push(FlatShape::Error.spanned(span)); + return Ok(()); + } + Spanned { item: Ok(atom), .. } => atom, + }; + + atom.color_tokens(shapes); + + Ok(()) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for NumberShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "NumberShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let atom = token_nodes.spanned(|token_nodes| { + expand_atom(token_nodes, "number", context, ExpansionRule::permissive()) + }); + + let atom = match atom { + Spanned { item: Err(_), span } => { + token_nodes.color_shape(FlatShape::Error.spanned(span)); + return Ok(()); + } + Spanned { item: Ok(atom), .. } => atom, + }; + + token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)); + + Ok(()) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct IntShape; + +impl ExpandExpression for IntShape { + fn name(&self) -> &'static str { + "integer" + } + + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + parse_single_node(token_nodes, "Integer", |token, token_span, err| { + Ok(match token { + RawToken::GlobPattern | RawToken::Operator(..) | RawToken::ExternalWord => { + return Err(err.error()) + } + RawToken::Variable(span) if span.slice(context.source) == "it" => { + hir::Expression::it_variable(span, token_span) + } + RawToken::ExternalCommand(span) => { + hir::Expression::external_command(span, token_span) + } + RawToken::Variable(span) => hir::Expression::variable(span, token_span), + RawToken::Number(number @ RawNumber::Int(_)) => { + hir::Expression::number(number.to_number(context.source), token_span) + } + RawToken::Number(_) => return Err(err.error()), + RawToken::Bare => hir::Expression::bare(token_span), + RawToken::String(span) => hir::Expression::string(span, token_span), + }) + }) + } +} + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for IntShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let atom = token_nodes.spanned(|token_nodes| { + expand_atom(token_nodes, "integer", context, ExpansionRule::permissive()) + }); + + let atom = match atom { + Spanned { item: Err(_), span } => { + shapes.push(FlatShape::Error.spanned(span)); + return Ok(()); + } + Spanned { item: Ok(atom), .. } => atom, + }; + + atom.color_tokens(shapes); + + Ok(()) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for IntShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "IntShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let atom = token_nodes.spanned(|token_nodes| { + expand_atom(token_nodes, "integer", context, ExpansionRule::permissive()) + }); + + let atom = match atom { + Spanned { item: Err(_), span } => { + token_nodes.color_shape(FlatShape::Error.spanned(span)); + return Ok(()); + } + Spanned { item: Ok(atom), .. } => atom, + }; + + token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)); + + Ok(()) + } +} + +impl TestSyntax for NumberShape { + fn test<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Option> { + let peeked = token_nodes.peek_any(); + + match peeked.node { + Some(token) if token.is_number() => Some(peeked), + _ => None, + } + } +} diff --git a/src/parser/hir/syntax_shape/expression/pattern.rs b/src/parser/hir/syntax_shape/expression/pattern.rs new file mode 100644 index 0000000000..2ccd5a4f0d --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/pattern.rs @@ -0,0 +1,124 @@ +use crate::parser::hir::syntax_shape::{ + expand_atom, expand_bare, expression::expand_file_path, AtomicToken, ExpandContext, + ExpandExpression, ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, ParseError, +}; +use crate::parser::{hir, hir::TokensIterator, Operator, RawToken, TokenNode}; +use crate::prelude::*; + +#[derive(Debug, Copy, Clone)] +pub struct PatternShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for PatternShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| { + let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::permissive())?; + + match &atom.item { + AtomicToken::GlobPattern { .. } | AtomicToken::Word { .. } => { + shapes.push(FlatShape::GlobPattern.spanned(atom.span)); + Ok(()) + } + + _ => Err(ShellError::type_error("pattern", atom.tagged_type_name())), + } + }) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for PatternShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "PatternShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| { + let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::permissive())?; + + match &atom.item { + AtomicToken::GlobPattern { .. } | AtomicToken::Word { .. } => { + token_nodes.color_shape(FlatShape::GlobPattern.spanned(atom.span)); + Ok(()) + } + + _ => Err(ShellError::type_error("pattern", atom.tagged_type_name())), + } + }) + } +} + +impl ExpandExpression for PatternShape { + fn name(&self) -> &'static str { + "glob pattern" + } + + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::new())?; + + match atom.item { + AtomicToken::Word { text: body } + | AtomicToken::String { body } + | AtomicToken::GlobPattern { pattern: body } => { + let path = expand_file_path(body.slice(context.source), context); + return Ok(hir::Expression::pattern(path.to_string_lossy(), atom.span)); + } + _ => return atom.into_hir(context, "pattern"), + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct BarePatternShape; + +impl ExpandSyntax for BarePatternShape { + type Output = Span; + + fn name(&self) -> &'static str { + "bare pattern" + } + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + expand_bare(token_nodes, context, |token| match token { + TokenNode::Token(Spanned { + item: RawToken::Bare, + .. + }) + | TokenNode::Token(Spanned { + item: RawToken::Operator(Operator::Dot), + .. + }) + | TokenNode::Token(Spanned { + item: RawToken::GlobPattern, + .. + }) => true, + + _ => false, + }) + } +} diff --git a/src/parser/hir/syntax_shape/expression/string.rs b/src/parser/hir/syntax_shape/expression/string.rs new file mode 100644 index 0000000000..454cb9f46d --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/string.rs @@ -0,0 +1,117 @@ +use crate::parser::hir::syntax_shape::{ + expand_atom, expand_variable, parse_single_node, AtomicToken, ExpandContext, ExpandExpression, + ExpansionRule, FallibleColorSyntax, FlatShape, ParseError, TestSyntax, +}; +use crate::parser::hir::tokens_iterator::Peeked; +use crate::parser::{hir, hir::TokensIterator, RawToken}; +use crate::prelude::*; + +#[derive(Debug, Copy, Clone)] +pub struct StringShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for StringShape { + type Info = (); + type Input = FlatShape; + + fn color_syntax<'a, 'b>( + &self, + input: &FlatShape, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let atom = expand_atom(token_nodes, "string", context, ExpansionRule::permissive()); + + let atom = match atom { + Err(_) => return Ok(()), + Ok(atom) => atom, + }; + + match atom { + Spanned { + item: AtomicToken::String { .. }, + span, + } => shapes.push((*input).spanned(span)), + other => other.color_tokens(shapes), + } + + Ok(()) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for StringShape { + type Info = (); + type Input = FlatShape; + + fn name(&self) -> &'static str { + "StringShape" + } + + fn color_syntax<'a, 'b>( + &self, + input: &FlatShape, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let atom = expand_atom(token_nodes, "string", context, ExpansionRule::permissive()); + + let atom = match atom { + Err(_) => return Ok(()), + Ok(atom) => atom, + }; + + match atom { + Spanned { + item: AtomicToken::String { .. }, + span, + } => token_nodes.color_shape((*input).spanned(span)), + atom => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)), + } + + Ok(()) + } +} + +impl ExpandExpression for StringShape { + fn name(&self) -> &'static str { + "string" + } + + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + parse_single_node(token_nodes, "String", |token, token_span, err| { + Ok(match token { + RawToken::GlobPattern | RawToken::Operator(..) | RawToken::ExternalWord => { + return Err(err.error()) + } + RawToken::Variable(span) => expand_variable(span, token_span, &context.source), + RawToken::ExternalCommand(span) => { + hir::Expression::external_command(span, token_span) + } + RawToken::Number(_) => hir::Expression::bare(token_span), + RawToken::Bare => hir::Expression::bare(token_span), + RawToken::String(span) => hir::Expression::string(span, token_span), + }) + }) + } +} + +impl TestSyntax for StringShape { + fn test<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Option> { + let peeked = token_nodes.peek_any(); + + match peeked.node { + Some(token) if token.is_string() => Some(peeked), + _ => None, + } + } +} diff --git a/src/parser/hir/syntax_shape/expression/unit.rs b/src/parser/hir/syntax_shape/expression/unit.rs new file mode 100644 index 0000000000..901b86e8d6 --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/unit.rs @@ -0,0 +1,110 @@ +use crate::data::meta::Span; +use crate::parser::hir::syntax_shape::{ExpandContext, ExpandSyntax, ParseError}; +use crate::parser::parse::tokens::RawNumber; +use crate::parser::parse::unit::Unit; +use crate::parser::{hir::TokensIterator, RawToken, TokenNode}; +use crate::prelude::*; +use nom::branch::alt; +use nom::bytes::complete::tag; +use nom::character::complete::digit1; +use nom::combinator::{all_consuming, opt, value}; +use nom::IResult; +use std::fmt; + +#[derive(Debug, Copy, Clone)] +pub struct UnitShape; + +impl FormatDebug for Spanned<(Spanned, Spanned)> { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + let dict = indexmap::indexmap! { + "number" => format!("{}", self.item.0.item.debug(source)), + "unit" => format!("{}", self.item.1.debug(source)), + }; + + f.say_dict("unit", dict) + } +} + +impl ExpandSyntax for UnitShape { + type Output = Spanned<(Spanned, Spanned)>; + + fn name(&self) -> &'static str { + "unit" + } + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result, Spanned)>, ParseError> { + let peeked = token_nodes.peek_any().not_eof("unit")?; + + let span = match peeked.node { + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => span, + _ => return Err(peeked.type_error("unit")), + }; + + let unit = unit_size(span.slice(context.source), *span); + + let (_, (number, unit)) = match unit { + Err(_) => return Err(ParseError::mismatch("unit", "word".tagged(Tag::unknown()))), + Ok((number, unit)) => (number, unit), + }; + + peeked.commit(); + Ok((number, unit).spanned(*span)) + } +} + +fn unit_size(input: &str, bare_span: Span) -> IResult<&str, (Spanned, Spanned)> { + let (input, digits) = digit1(input)?; + + let (input, dot) = opt(tag("."))(input)?; + + let (input, number) = match dot { + Some(dot) => { + let (input, rest) = digit1(input)?; + ( + input, + RawNumber::decimal(Span::new( + bare_span.start(), + bare_span.start() + digits.len() + dot.len() + rest.len(), + )), + ) + } + + None => ( + input, + RawNumber::int(Span::new( + bare_span.start(), + bare_span.start() + digits.len(), + )), + ), + }; + + let (input, unit) = all_consuming(alt(( + value(Unit::Byte, alt((tag("B"), tag("b")))), + value(Unit::Kilobyte, alt((tag("KB"), tag("kb"), tag("Kb")))), + value(Unit::Megabyte, alt((tag("MB"), tag("mb"), tag("Mb")))), + value(Unit::Gigabyte, alt((tag("GB"), tag("gb"), tag("Gb")))), + value(Unit::Terabyte, alt((tag("TB"), tag("tb"), tag("Tb")))), + value(Unit::Petabyte, alt((tag("PB"), tag("pb"), tag("Pb")))), + value(Unit::Second, tag("s")), + value(Unit::Minute, tag("m")), + value(Unit::Hour, tag("h")), + value(Unit::Day, tag("d")), + value(Unit::Week, tag("w")), + value(Unit::Month, tag("M")), + value(Unit::Year, tag("y")), + )))(input)?; + + let start_span = number.span.end(); + + Ok(( + input, + (number, unit.spanned(Span::new(start_span, bare_span.end()))), + )) +} diff --git a/src/parser/hir/syntax_shape/expression/variable_path.rs b/src/parser/hir/syntax_shape/expression/variable_path.rs new file mode 100644 index 0000000000..f6b4c1931c --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/variable_path.rs @@ -0,0 +1,1197 @@ +use crate::parser::hir::syntax_shape::{ + color_fallible_syntax, color_fallible_syntax_with, expand_atom, expand_expr, expand_syntax, + parse_single_node, AnyExpressionShape, AtomicToken, BareShape, ExpandContext, ExpandExpression, + ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, ParseError, Peeked, SkipSyntax, + StringShape, TestSyntax, WhitespaceShape, +}; +use crate::parser::{hir, hir::Expression, hir::TokensIterator, Operator, RawToken}; +use crate::prelude::*; +use derive_new::new; +use getset::Getters; +use std::fmt; + +#[derive(Debug, Copy, Clone)] +pub struct VariablePathShape; + +impl ExpandExpression for VariablePathShape { + fn name(&self) -> &'static str { + "variable path" + } + + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + // 1. let the head be the first token, expecting a variable + // 2. let the tail be an empty list of members + // 2. while the next token (excluding ws) is a dot: + // 1. consume the dot + // 2. consume the next token as a member and push it onto tail + + let head = expand_expr(&VariableShape, token_nodes, context)?; + let start = head.span; + let mut end = start; + let mut tail: Vec> = vec![]; + + loop { + match DotShape.skip(token_nodes, context) { + Err(_) => break, + Ok(_) => {} + } + + let syntax = expand_syntax(&MemberShape, token_nodes, context)?; + let member = syntax.to_spanned_string(context.source); + + end = member.span; + tail.push(member); + } + + Ok(hir::Expression::path(head, tail, start.until(end))) + } +} + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for VariablePathShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| { + // If the head of the token stream is not a variable, fail + color_fallible_syntax(&VariableShape, token_nodes, context, shapes)?; + + loop { + // look for a dot at the head of a stream + let dot = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + token_nodes, + context, + shapes, + ); + + // if there's no dot, we're done + match dot { + Err(_) => break, + Ok(_) => {} + } + + // otherwise, look for a member, and if you don't find one, fail + color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?; + } + + Ok(()) + }) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for VariablePathShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "VariablePathShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| { + // If the head of the token stream is not a variable, fail + color_fallible_syntax(&VariableShape, token_nodes, context)?; + + loop { + // look for a dot at the head of a stream + let dot = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + token_nodes, + context, + ); + + // if there's no dot, we're done + match dot { + Err(_) => break, + Ok(_) => {} + } + + // otherwise, look for a member, and if you don't find one, fail + color_fallible_syntax(&MemberShape, token_nodes, context)?; + } + + Ok(()) + }) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct PathTailShape; + +#[cfg(not(coloring_in_tokens))] +/// The failure mode of `PathTailShape` is a dot followed by a non-member +impl FallibleColorSyntax for PathTailShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| loop { + let result = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + token_nodes, + context, + shapes, + ); + + match result { + Err(_) => return Ok(()), + Ok(_) => {} + } + + // If we've seen a dot but not a member, fail + color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?; + }) + } +} + +#[cfg(coloring_in_tokens)] +/// The failure mode of `PathTailShape` is a dot followed by a non-member +impl FallibleColorSyntax for PathTailShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "PathTailShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| loop { + let result = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + token_nodes, + context, + ); + + match result { + Err(_) => return Ok(()), + Ok(_) => {} + } + + // If we've seen a dot but not a member, fail + color_fallible_syntax(&MemberShape, token_nodes, context)?; + }) + } +} + +impl ExpandSyntax for PathTailShape { + type Output = Spanned>>; + + fn name(&self) -> &'static str { + "path continuation" + } + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + let mut end: Option = None; + let mut tail = vec![]; + + loop { + match DotShape.skip(token_nodes, context) { + Err(_) => break, + Ok(_) => {} + } + + let syntax = expand_syntax(&MemberShape, token_nodes, context)?; + let member = syntax.to_spanned_string(context.source); + end = Some(member.span); + tail.push(member); + } + + match end { + None => { + return Err(ParseError::mismatch("path tail", { + let typed_span = token_nodes.typed_span_at_cursor(); + + Tagged { + tag: typed_span.span.into(), + item: typed_span.item, + } + })) + } + + Some(end) => Ok(tail.spanned(end)), + } + } +} + +#[derive(Debug, Clone)] +pub enum ExpressionContinuation { + DotSuffix(Span, Spanned), + InfixSuffix(Spanned, Expression), +} + +impl FormatDebug for ExpressionContinuation { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + match self { + ExpressionContinuation::DotSuffix(dot, rest) => { + f.say_str("dot suffix", dot.until(rest.span).slice(source)) + } + ExpressionContinuation::InfixSuffix(operator, expr) => { + f.say_str("infix suffix", operator.span.until(expr.span).slice(source)) + } + } + } +} + +impl HasSpan for ExpressionContinuation { + fn span(&self) -> Span { + match self { + ExpressionContinuation::DotSuffix(dot, column) => dot.until(column.span), + ExpressionContinuation::InfixSuffix(operator, expression) => { + operator.span.until(expression.span) + } + } + } +} + +/// An expression continuation +#[derive(Debug, Copy, Clone)] +pub struct ExpressionContinuationShape; + +impl ExpandSyntax for ExpressionContinuationShape { + type Output = ExpressionContinuation; + + fn name(&self) -> &'static str { + "expression continuation" + } + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + // Try to expand a `.` + let dot = expand_syntax(&DotShape, token_nodes, context); + + match dot { + // If a `.` was matched, it's a `Path`, and we expect a `Member` next + Ok(dot) => { + let syntax = expand_syntax(&MemberShape, token_nodes, context)?; + let member = syntax.to_spanned_string(context.source); + + Ok(ExpressionContinuation::DotSuffix(dot, member)) + } + + // Otherwise, we expect an infix operator and an expression next + Err(_) => { + let (_, op, _) = expand_syntax(&InfixShape, token_nodes, context)?.item; + let next = expand_expr(&AnyExpressionShape, token_nodes, context)?; + + Ok(ExpressionContinuation::InfixSuffix(op, next)) + } + } + } +} + +pub enum ContinuationInfo { + Dot, + Infix, +} + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for ExpressionContinuationShape { + type Info = ContinuationInfo; + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result { + token_nodes.atomic(|token_nodes| { + // Try to expand a `.` + let dot = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + token_nodes, + context, + shapes, + ); + + match dot { + Ok(_) => { + // we found a dot, so let's keep looking for a member; if no member was found, fail + color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?; + + Ok(ContinuationInfo::Dot) + } + Err(_) => { + let mut new_shapes = vec![]; + let result = token_nodes.atomic(|token_nodes| { + // we didn't find a dot, so let's see if we're looking at an infix. If not found, fail + color_fallible_syntax(&InfixShape, token_nodes, context, &mut new_shapes)?; + + // now that we've seen an infix shape, look for any expression. If not found, fail + color_fallible_syntax( + &AnyExpressionShape, + token_nodes, + context, + &mut new_shapes, + )?; + + Ok(ContinuationInfo::Infix) + })?; + shapes.extend(new_shapes); + Ok(result) + } + } + }) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for ExpressionContinuationShape { + type Info = ContinuationInfo; + type Input = (); + + fn name(&self) -> &'static str { + "ExpressionContinuationShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + token_nodes.atomic(|token_nodes| { + // Try to expand a `.` + let dot = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + token_nodes, + context, + ); + + match dot { + Ok(_) => { + // we found a dot, so let's keep looking for a member; if no member was found, fail + color_fallible_syntax(&MemberShape, token_nodes, context)?; + + Ok(ContinuationInfo::Dot) + } + Err(_) => { + let result = token_nodes.atomic(|token_nodes| { + // we didn't find a dot, so let's see if we're looking at an infix. If not found, fail + color_fallible_syntax(&InfixShape, token_nodes, context)?; + + // now that we've seen an infix shape, look for any expression. If not found, fail + color_fallible_syntax(&AnyExpressionShape, token_nodes, context)?; + + Ok(ContinuationInfo::Infix) + })?; + + Ok(result) + } + } + }) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct VariableShape; + +impl ExpandExpression for VariableShape { + fn name(&self) -> &'static str { + "variable" + } + + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + parse_single_node(token_nodes, "variable", |token, token_tag, err| { + Ok(match token { + RawToken::Variable(tag) => { + if tag.slice(context.source) == "it" { + hir::Expression::it_variable(tag, token_tag) + } else { + hir::Expression::variable(tag, token_tag) + } + } + _ => return Err(err.error()), + }) + }) + } +} + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for VariableShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let atom = expand_atom( + token_nodes, + "variable", + context, + ExpansionRule::permissive(), + ); + + let atom = match atom { + Err(err) => return Err(err.into()), + Ok(atom) => atom, + }; + + match &atom.item { + AtomicToken::Variable { .. } => { + shapes.push(FlatShape::Variable.spanned(atom.span)); + Ok(()) + } + AtomicToken::ItVariable { .. } => { + shapes.push(FlatShape::ItVariable.spanned(atom.span)); + Ok(()) + } + _ => Err(ShellError::type_error("variable", atom.tagged_type_name())), + } + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for VariableShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "VariableShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let atom = expand_atom( + token_nodes, + "variable", + context, + ExpansionRule::permissive(), + ); + + let atom = match atom { + Err(err) => return Err(err.into()), + Ok(atom) => atom, + }; + + match &atom.item { + AtomicToken::Variable { .. } => { + token_nodes.color_shape(FlatShape::Variable.spanned(atom.span)); + Ok(()) + } + AtomicToken::ItVariable { .. } => { + token_nodes.color_shape(FlatShape::ItVariable.spanned(atom.span)); + Ok(()) + } + _ => Err(ParseError::mismatch("variable", atom.tagged_type_name()).into()), + } + } +} + +#[derive(Debug, Clone, Copy)] +pub enum Member { + String(/* outer */ Span, /* inner */ Span), + Bare(Span), +} + +impl FormatDebug for Member { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + match self { + Member::String(outer, _) => write!(f, "member ({})", outer.slice(source)), + Member::Bare(bare) => write!(f, "member ({})", bare.slice(source)), + } + } +} + +impl HasSpan for Member { + fn span(&self) -> Span { + match self { + Member::String(outer, ..) => *outer, + Member::Bare(name) => *name, + } + } +} + +impl Member { + pub(crate) fn to_expr(&self) -> hir::Expression { + match self { + Member::String(outer, inner) => hir::Expression::string(*inner, *outer), + Member::Bare(span) => hir::Expression::string(*span, *span), + } + } + + pub(crate) fn span(&self) -> Span { + match self { + Member::String(outer, _inner) => *outer, + Member::Bare(span) => *span, + } + } + + pub(crate) fn to_spanned_string(&self, source: &str) -> Spanned { + match self { + Member::String(outer, inner) => inner.string(source).spanned(*outer), + Member::Bare(span) => span.spanned_string(source), + } + } + + pub(crate) fn tagged_type_name(&self) -> Tagged<&'static str> { + match self { + Member::String(outer, _inner) => "string".tagged(outer), + Member::Bare(span) => "word".tagged(Tag { + span: *span, + anchor: None, + }), + } + } +} + +enum ColumnPathState { + Initial, + LeadingDot(Span), + Dot(Span, Vec, Span), + Member(Span, Vec), + Error(ParseError), +} + +impl ColumnPathState { + pub fn dot(self, dot: Span) -> ColumnPathState { + match self { + ColumnPathState::Initial => ColumnPathState::LeadingDot(dot), + ColumnPathState::LeadingDot(_) => { + ColumnPathState::Error(ParseError::mismatch("column", "dot".tagged(dot))) + } + ColumnPathState::Dot(..) => { + ColumnPathState::Error(ParseError::mismatch("column", "dot".tagged(dot))) + } + ColumnPathState::Member(tag, members) => ColumnPathState::Dot(tag, members, dot), + ColumnPathState::Error(err) => ColumnPathState::Error(err), + } + } + + pub fn member(self, member: Member) -> ColumnPathState { + match self { + ColumnPathState::Initial => ColumnPathState::Member(member.span(), vec![member]), + ColumnPathState::LeadingDot(tag) => { + ColumnPathState::Member(tag.until(member.span()), vec![member]) + } + + ColumnPathState::Dot(tag, mut tags, _) => { + ColumnPathState::Member(tag.until(member.span()), { + tags.push(member); + tags + }) + } + ColumnPathState::Member(..) => { + ColumnPathState::Error(ParseError::mismatch("column", member.tagged_type_name())) + } + ColumnPathState::Error(err) => ColumnPathState::Error(err), + } + } + + pub fn into_path(self, next: Peeked) -> Result>, ParseError> { + match self { + ColumnPathState::Initial => Err(next.type_error("column path")), + ColumnPathState::LeadingDot(dot) => { + Err(ParseError::mismatch("column", "dot".tagged(dot))) + } + ColumnPathState::Dot(_tag, _members, dot) => { + Err(ParseError::mismatch("column", "dot".tagged(dot))) + } + ColumnPathState::Member(tag, tags) => Ok(tags.tagged(tag)), + ColumnPathState::Error(err) => Err(err), + } + } +} + +pub fn expand_column_path<'a, 'b>( + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, +) -> Result>, ParseError> { + let mut state = ColumnPathState::Initial; + + loop { + let member = MemberShape.expand_syntax(token_nodes, context); + + match member { + Err(_) => break, + Ok(member) => state = state.member(member), + } + + let dot = DotShape.expand_syntax(token_nodes, context); + + match dot { + Err(_) => break, + Ok(dot) => state = state.dot(dot), + } + } + + state.into_path(token_nodes.peek_non_ws()) +} + +#[derive(Debug, Copy, Clone)] +pub struct ColumnPathShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for ColumnPathShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + // If there's not even one member shape, fail + color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?; + + loop { + let checkpoint = token_nodes.checkpoint(); + + match color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + checkpoint.iterator, + context, + shapes, + ) { + Err(_) => { + // we already saw at least one member shape, so return successfully + return Ok(()); + } + + Ok(_) => { + match color_fallible_syntax(&MemberShape, checkpoint.iterator, context, shapes) + { + Err(_) => { + // we saw a dot but not a member (but we saw at least one member), + // so don't commit the dot but return successfully + return Ok(()); + } + + Ok(_) => { + // we saw a dot and a member, so commit it and continue on + checkpoint.commit(); + } + } + } + } + } + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for ColumnPathShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "ColumnPathShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + // If there's not even one member shape, fail + color_fallible_syntax(&MemberShape, token_nodes, context)?; + + loop { + let checkpoint = token_nodes.checkpoint(); + + match color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + checkpoint.iterator, + context, + ) { + Err(_) => { + // we already saw at least one member shape, so return successfully + return Ok(()); + } + + Ok(_) => { + match color_fallible_syntax(&MemberShape, checkpoint.iterator, context) { + Err(_) => { + // we saw a dot but not a member (but we saw at least one member), + // so don't commit the dot but return successfully + return Ok(()); + } + + Ok(_) => { + // we saw a dot and a member, so commit it and continue on + checkpoint.commit(); + } + } + } + } + } + } +} + +impl FormatDebug for Tagged> { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + self.item.fmt_debug(f, source) + } +} + +#[derive(Debug, Clone, Getters, new)] +pub struct ColumnPath { + #[get = "pub"] + path: Tagged>, +} + +impl HasSpan for ColumnPath { + fn span(&self) -> Span { + self.path.tag.span + } +} + +impl FormatDebug for ColumnPath { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + f.say("column path", self.path.item.debug(source)) + } +} + +impl ExpandSyntax for ColumnPathShape { + type Output = ColumnPath; + + fn name(&self) -> &'static str { + "column path" + } + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + Ok(ColumnPath::new(expand_column_path(token_nodes, context)?)) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct MemberShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for MemberShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let bare = color_fallible_syntax_with( + &BareShape, + &FlatShape::BareMember, + token_nodes, + context, + shapes, + ); + + match bare { + Ok(_) => return Ok(()), + Err(_) => { + // If we don't have a bare word, we'll look for a string + } + } + + // Look for a string token. If we don't find one, fail + color_fallible_syntax_with( + &StringShape, + &FlatShape::StringMember, + token_nodes, + context, + shapes, + ) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for MemberShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "MemberShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let bare = + color_fallible_syntax_with(&BareShape, &FlatShape::BareMember, token_nodes, context); + + match bare { + Ok(_) => return Ok(()), + Err(_) => { + // If we don't have a bare word, we'll look for a string + } + } + + // Look for a string token. If we don't find one, fail + color_fallible_syntax_with(&StringShape, &FlatShape::StringMember, token_nodes, context) + } +} + +impl ExpandSyntax for MemberShape { + type Output = Member; + + fn name(&self) -> &'static str { + "column" + } + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + let bare = BareShape.test(token_nodes, context); + if let Some(peeked) = bare { + let node = peeked.not_eof("column")?.commit(); + return Ok(Member::Bare(node.span())); + } + + /* KATZ */ + /* let number = NumberShape.test(token_nodes, context); + + if let Some(peeked) = number { + let node = peeked.not_eof("column")?.commit(); + let (n, span) = node.as_number().unwrap(); + + return Ok(Member::Number(n, span)) + }*/ + + let string = StringShape.test(token_nodes, context); + + if let Some(peeked) = string { + let node = peeked.not_eof("column")?.commit(); + let (outer, inner) = node.as_string().unwrap(); + + return Ok(Member::String(outer, inner)); + } + + Err(token_nodes.peek_any().type_error("column")) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct DotShape; + +#[derive(Debug, Copy, Clone)] +pub struct ColorableDotShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for ColorableDotShape { + type Info = (); + type Input = FlatShape; + + fn color_syntax<'a, 'b>( + &self, + input: &FlatShape, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let peeked = token_nodes.peek_any().not_eof("dot")?; + + match peeked.node { + node if node.is_dot() => { + peeked.commit(); + shapes.push((*input).spanned(node.span())); + Ok(()) + } + + other => Err(ShellError::type_error("dot", other.tagged_type_name())), + } + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for ColorableDotShape { + type Info = (); + type Input = FlatShape; + + fn name(&self) -> &'static str { + "ColorableDotShape" + } + + fn color_syntax<'a, 'b>( + &self, + input: &FlatShape, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result<(), ShellError> { + let peeked = token_nodes.peek_any().not_eof("dot")?; + + match peeked.node { + node if node.is_dot() => { + peeked.commit(); + token_nodes.color_shape((*input).spanned(node.span())); + Ok(()) + } + + other => Err(ShellError::type_error("dot", other.tagged_type_name())), + } + } +} + +impl SkipSyntax for DotShape { + fn skip<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + expand_syntax(self, token_nodes, context)?; + + Ok(()) + } +} + +impl ExpandSyntax for DotShape { + type Output = Span; + + fn name(&self) -> &'static str { + "dot" + } + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result { + parse_single_node(token_nodes, "dot", |token, token_span, _| { + Ok(match token { + RawToken::Operator(Operator::Dot) => token_span, + _ => { + return Err(ParseError::mismatch( + "dot", + token.type_name().tagged(token_span), + )) + } + }) + }) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct InfixShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for InfixShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + outer_shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let checkpoint = token_nodes.checkpoint(); + let mut shapes = vec![]; + + // An infix operator must be prefixed by whitespace. If no whitespace was found, fail + color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context, &mut shapes)?; + + // Parse the next TokenNode after the whitespace + parse_single_node( + checkpoint.iterator, + "infix operator", + |token, token_span, err| { + match token { + // If it's an operator (and not `.`), it's a match + RawToken::Operator(operator) if operator != Operator::Dot => { + shapes.push(FlatShape::Operator.spanned(token_span)); + Ok(()) + } + + // Otherwise, it's not a match + _ => Err(err.error()), + } + }, + )?; + + // An infix operator must be followed by whitespace. If no whitespace was found, fail + color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context, &mut shapes)?; + + outer_shapes.extend(shapes); + checkpoint.commit(); + Ok(()) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for InfixShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "InfixShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let checkpoint = token_nodes.checkpoint(); + + // An infix operator must be prefixed by whitespace. If no whitespace was found, fail + color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context)?; + + // Parse the next TokenNode after the whitespace + let operator_span = parse_single_node( + checkpoint.iterator, + "infix operator", + |token, token_span, _| { + match token { + // If it's an operator (and not `.`), it's a match + RawToken::Operator(operator) if operator != Operator::Dot => Ok(token_span), + + // Otherwise, it's not a match + _ => Err(ParseError::mismatch( + "infix operator", + token.type_name().tagged(token_span), + )), + } + }, + )?; + + checkpoint + .iterator + .color_shape(FlatShape::Operator.spanned(operator_span)); + + // An infix operator must be followed by whitespace. If no whitespace was found, fail + color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context)?; + + checkpoint.commit(); + Ok(()) + } +} + +impl FormatDebug for Spanned<(Span, Spanned, Span)> { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + f.say_str("operator", self.item.1.span.slice(source)) + } +} + +impl ExpandSyntax for InfixShape { + type Output = Spanned<(Span, Spanned, Span)>; + + fn name(&self) -> &'static str { + "infix operator" + } + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + let mut checkpoint = token_nodes.checkpoint(); + + // An infix operator must be prefixed by whitespace + let start = expand_syntax(&WhitespaceShape, checkpoint.iterator, context)?; + + // Parse the next TokenNode after the whitespace + let operator = expand_syntax(&InfixInnerShape, &mut checkpoint.iterator, context)?; + + // An infix operator must be followed by whitespace + let end = expand_syntax(&WhitespaceShape, checkpoint.iterator, context)?; + + checkpoint.commit(); + + Ok((start, operator, end).spanned(start.until(end))) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct InfixInnerShape; + +impl FormatDebug for Spanned { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + f.say_str("operator", self.span.slice(source)) + } +} + +impl ExpandSyntax for InfixInnerShape { + type Output = Spanned; + + fn name(&self) -> &'static str { + "infix inner" + } + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result { + parse_single_node(token_nodes, "infix operator", |token, token_span, err| { + Ok(match token { + // If it's an operator (and not `.`), it's a match + RawToken::Operator(operator) if operator != Operator::Dot => { + operator.spanned(token_span) + } + + // Otherwise, it's not a match + _ => return Err(err.error()), + }) + }) + } +} diff --git a/src/parser/hir/syntax_shape/flat_shape.rs b/src/parser/hir/syntax_shape/flat_shape.rs new file mode 100644 index 0000000000..48cb5b72cc --- /dev/null +++ b/src/parser/hir/syntax_shape/flat_shape.rs @@ -0,0 +1,97 @@ +use crate::parser::{Delimiter, Flag, FlagKind, Operator, RawNumber, RawToken, TokenNode}; +use crate::{HasSpan, Span, Spanned, SpannedItem, Text}; + +#[derive(Debug, Copy, Clone)] +pub enum FlatShape { + OpenDelimiter(Delimiter), + CloseDelimiter(Delimiter), + ItVariable, + Variable, + Operator, + Dot, + InternalCommand, + ExternalCommand, + ExternalWord, + BareMember, + StringMember, + String, + Path, + Word, + Pipe, + GlobPattern, + Flag, + ShorthandFlag, + Int, + Decimal, + Whitespace, + Error, + Size { number: Span, unit: Span }, +} + +impl FlatShape { + pub fn from(token: &TokenNode, source: &Text, shapes: &mut Vec>) -> () { + match token { + TokenNode::Token(token) => match token.item { + RawToken::Number(RawNumber::Int(_)) => { + shapes.push(FlatShape::Int.spanned(token.span)) + } + RawToken::Number(RawNumber::Decimal(_)) => { + shapes.push(FlatShape::Decimal.spanned(token.span)) + } + RawToken::Operator(Operator::Dot) => { + shapes.push(FlatShape::Dot.spanned(token.span)) + } + RawToken::Operator(_) => shapes.push(FlatShape::Operator.spanned(token.span)), + RawToken::String(_) => shapes.push(FlatShape::String.spanned(token.span)), + RawToken::Variable(v) if v.slice(source) == "it" => { + shapes.push(FlatShape::ItVariable.spanned(token.span)) + } + RawToken::Variable(_) => shapes.push(FlatShape::Variable.spanned(token.span)), + RawToken::ExternalCommand(_) => { + shapes.push(FlatShape::ExternalCommand.spanned(token.span)) + } + RawToken::ExternalWord => shapes.push(FlatShape::ExternalWord.spanned(token.span)), + RawToken::GlobPattern => shapes.push(FlatShape::GlobPattern.spanned(token.span)), + RawToken::Bare => shapes.push(FlatShape::Word.spanned(token.span)), + }, + TokenNode::Call(_) => unimplemented!(), + TokenNode::Nodes(nodes) => { + for node in &nodes.item { + FlatShape::from(node, source, shapes); + } + } + TokenNode::Delimited(v) => { + shapes.push(FlatShape::OpenDelimiter(v.item.delimiter).spanned(v.item.spans.0)); + for token in &v.item.children { + FlatShape::from(token, source, shapes); + } + shapes.push(FlatShape::CloseDelimiter(v.item.delimiter).spanned(v.item.spans.1)); + } + TokenNode::Pipeline(pipeline) => { + for part in &pipeline.parts { + if let Some(_) = part.pipe { + shapes.push(FlatShape::Pipe.spanned(part.span)); + } + } + } + TokenNode::Flag(Spanned { + item: + Flag { + kind: FlagKind::Longhand, + .. + }, + span, + }) => shapes.push(FlatShape::Flag.spanned(*span)), + TokenNode::Flag(Spanned { + item: + Flag { + kind: FlagKind::Shorthand, + .. + }, + span, + }) => shapes.push(FlatShape::ShorthandFlag.spanned(*span)), + TokenNode::Whitespace(_) => shapes.push(FlatShape::Whitespace.spanned(token.span())), + TokenNode::Error(v) => shapes.push(FlatShape::Error.spanned(v.span)), + } + } +} diff --git a/src/parser/hir/tokens_iterator.rs b/src/parser/hir/tokens_iterator.rs new file mode 100644 index 0000000000..bba5ff1352 --- /dev/null +++ b/src/parser/hir/tokens_iterator.rs @@ -0,0 +1,793 @@ +pub(crate) mod debug; + +use self::debug::{ColorTracer, ExpandTracer}; +use crate::errors::ShellError; +#[cfg(coloring_in_tokens)] +use crate::parser::hir::syntax_shape::FlatShape; +use crate::parser::hir::Expression; +use crate::parser::TokenNode; +use crate::prelude::*; +use crate::{Span, Spanned, SpannedItem}; +#[allow(unused)] +use getset::{Getters, MutGetters}; + +cfg_if::cfg_if! { + if #[cfg(coloring_in_tokens)] { + #[derive(Getters, Debug)] + pub struct TokensIteratorState<'content> { + tokens: &'content [TokenNode], + span: Span, + skip_ws: bool, + index: usize, + seen: indexmap::IndexSet, + #[get = "pub"] + shapes: Vec>, + } + } else { + #[derive(Getters, Debug)] + pub struct TokensIteratorState<'content> { + tokens: &'content [TokenNode], + span: Span, + skip_ws: bool, + index: usize, + seen: indexmap::IndexSet, + } + } +} + +#[derive(Getters, MutGetters, Debug)] +pub struct TokensIterator<'content> { + #[get = "pub"] + #[get_mut = "pub"] + state: TokensIteratorState<'content>, + #[get = "pub"] + #[get_mut = "pub"] + color_tracer: ColorTracer, + #[get = "pub"] + #[get_mut = "pub"] + expand_tracer: ExpandTracer, +} + +#[derive(Debug)] +pub struct Checkpoint<'content, 'me> { + pub(crate) iterator: &'me mut TokensIterator<'content>, + index: usize, + seen: indexmap::IndexSet, + #[cfg(coloring_in_tokens)] + shape_start: usize, + committed: bool, +} + +impl<'content, 'me> Checkpoint<'content, 'me> { + pub(crate) fn commit(mut self) { + self.committed = true; + } +} + +impl<'content, 'me> std::ops::Drop for Checkpoint<'content, 'me> { + fn drop(&mut self) { + if !self.committed { + let state = &mut self.iterator.state; + + state.index = self.index; + state.seen = self.seen.clone(); + #[cfg(coloring_in_tokens)] + state.shapes.truncate(self.shape_start); + } + } +} + +#[derive(Debug)] +pub struct Peeked<'content, 'me> { + pub(crate) node: Option<&'content TokenNode>, + iterator: &'me mut TokensIterator<'content>, + from: usize, + to: usize, +} + +impl<'content, 'me> Peeked<'content, 'me> { + pub fn commit(&mut self) -> Option<&'content TokenNode> { + let Peeked { + node, + iterator, + from, + to, + } = self; + + let node = (*node)?; + iterator.commit(*from, *to); + Some(node) + } + + pub fn not_eof(self, expected: &'static str) -> Result, ParseError> { + match self.node { + None => Err(ParseError::unexpected_eof( + expected, + self.iterator.eof_span(), + )), + Some(node) => Ok(PeekedNode { + node, + iterator: self.iterator, + from: self.from, + to: self.to, + }), + } + } + + pub fn type_error(&self, expected: &'static str) -> ParseError { + peek_error(&self.node, self.iterator.eof_span(), expected) + } +} + +#[derive(Debug)] +pub struct PeekedNode<'content, 'me> { + pub(crate) node: &'content TokenNode, + iterator: &'me mut TokensIterator<'content>, + from: usize, + to: usize, +} + +impl<'content, 'me> PeekedNode<'content, 'me> { + pub fn commit(self) -> &'content TokenNode { + let PeekedNode { + node, + iterator, + from, + to, + } = self; + + iterator.commit(from, to); + node + } + + pub fn rollback(self) {} + + pub fn type_error(&self, expected: &'static str) -> ParseError { + peek_error(&Some(self.node), self.iterator.eof_span(), expected) + } +} + +pub fn peek_error(node: &Option<&TokenNode>, eof_span: Span, expected: &'static str) -> ParseError { + match node { + None => ParseError::unexpected_eof(expected, eof_span), + Some(node) => ParseError::mismatch(expected, node.tagged_type_name()), + } +} + +impl<'content> TokensIterator<'content> { + pub fn new( + items: &'content [TokenNode], + span: Span, + skip_ws: bool, + ) -> TokensIterator<'content> { + TokensIterator { + state: TokensIteratorState { + tokens: items, + span, + skip_ws, + index: 0, + seen: indexmap::IndexSet::new(), + #[cfg(coloring_in_tokens)] + shapes: vec![], + }, + color_tracer: ColorTracer::new(), + expand_tracer: ExpandTracer::new(), + } + } + + pub fn all(tokens: &'content [TokenNode], span: Span) -> TokensIterator<'content> { + TokensIterator::new(tokens, span, false) + } + + pub fn len(&self) -> usize { + self.state.tokens.len() + } + + pub fn spanned( + &mut self, + block: impl FnOnce(&mut TokensIterator<'content>) -> T, + ) -> Spanned { + let start = self.span_at_cursor(); + + let result = block(self); + + let end = self.span_at_cursor(); + + result.spanned(start.until(end)) + } + + #[cfg(coloring_in_tokens)] + pub fn color_shape(&mut self, shape: Spanned) { + self.with_color_tracer(|_, tracer| tracer.add_shape(shape)); + self.state.shapes.push(shape); + } + + #[cfg(coloring_in_tokens)] + pub fn mutate_shapes(&mut self, block: impl FnOnce(&mut Vec>)) { + let new_shapes: Vec> = { + let shapes = &mut self.state.shapes; + let len = shapes.len(); + block(shapes); + (len..(shapes.len())).map(|i| shapes[i]).collect() + }; + + self.with_color_tracer(|_, tracer| { + for shape in new_shapes { + tracer.add_shape(shape) + } + }); + } + + #[cfg(coloring_in_tokens)] + pub fn silently_mutate_shapes(&mut self, block: impl FnOnce(&mut Vec>)) { + let shapes = &mut self.state.shapes; + block(shapes); + } + + #[cfg(coloring_in_tokens)] + pub fn sort_shapes(&mut self) { + // This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring + // this solution. + + self.state + .shapes + .sort_by(|a, b| a.span.start().cmp(&b.span.start())); + } + + #[cfg(coloring_in_tokens)] + pub fn child<'me, T>( + &'me mut self, + tokens: Spanned<&'me [TokenNode]>, + block: impl FnOnce(&mut TokensIterator<'me>) -> T, + ) -> T { + let mut shapes = vec![]; + std::mem::swap(&mut shapes, &mut self.state.shapes); + + let mut color_tracer = ColorTracer::new(); + std::mem::swap(&mut color_tracer, &mut self.color_tracer); + + let mut expand_tracer = ExpandTracer::new(); + std::mem::swap(&mut expand_tracer, &mut self.expand_tracer); + + let mut iterator = TokensIterator { + state: TokensIteratorState { + tokens: tokens.item, + span: tokens.span, + skip_ws: false, + index: 0, + seen: indexmap::IndexSet::new(), + shapes, + }, + color_tracer, + expand_tracer, + }; + + let result = block(&mut iterator); + + std::mem::swap(&mut iterator.state.shapes, &mut self.state.shapes); + std::mem::swap(&mut iterator.color_tracer, &mut self.color_tracer); + std::mem::swap(&mut iterator.expand_tracer, &mut self.expand_tracer); + + result + } + + #[cfg(not(coloring_in_tokens))] + pub fn child<'me, T>( + &'me mut self, + tokens: Spanned<&'me [TokenNode]>, + block: impl FnOnce(&mut TokensIterator<'me>) -> T, + ) -> T { + let mut color_tracer = ColorTracer::new(); + std::mem::swap(&mut color_tracer, &mut self.color_tracer); + + let mut expand_tracer = ExpandTracer::new(); + std::mem::swap(&mut expand_tracer, &mut self.expand_tracer); + + let mut iterator = TokensIterator { + state: TokensIteratorState { + tokens: tokens.item, + span: tokens.span, + skip_ws: false, + index: 0, + seen: indexmap::IndexSet::new(), + }, + color_tracer, + expand_tracer, + }; + + let result = block(&mut iterator); + + std::mem::swap(&mut iterator.color_tracer, &mut self.color_tracer); + std::mem::swap(&mut iterator.expand_tracer, &mut self.expand_tracer); + + result + } + + pub fn with_color_tracer( + &mut self, + block: impl FnOnce(&mut TokensIteratorState, &mut ColorTracer), + ) { + let state = &mut self.state; + let color_tracer = &mut self.color_tracer; + + block(state, color_tracer) + } + + pub fn with_expand_tracer( + &mut self, + block: impl FnOnce(&mut TokensIteratorState, &mut ExpandTracer), + ) { + let state = &mut self.state; + let tracer = &mut self.expand_tracer; + + block(state, tracer) + } + + #[cfg(coloring_in_tokens)] + pub fn color_frame( + &mut self, + desc: &'static str, + block: impl FnOnce(&mut TokensIterator) -> T, + ) -> T { + self.with_color_tracer(|_, tracer| tracer.start(desc)); + + let result = block(self); + + self.with_color_tracer(|_, tracer| { + tracer.success(); + }); + + result + } + + pub fn expand_frame( + &mut self, + desc: &'static str, + block: impl FnOnce(&mut TokensIterator) -> Result, + ) -> Result + where + T: std::fmt::Debug + FormatDebug + Clone + HasFallibleSpan + 'static, + { + self.with_expand_tracer(|_, tracer| tracer.start(desc)); + + let result = block(self); + + self.with_expand_tracer(|_, tracer| match &result { + Ok(result) => { + tracer.add_result(Box::new(result.clone())); + tracer.success(); + } + + Err(err) => tracer.failed(err), + }); + + result + } + + pub fn expand_expr_frame( + &mut self, + desc: &'static str, + block: impl FnOnce(&mut TokensIterator) -> Result, + ) -> Result { + self.with_expand_tracer(|_, tracer| tracer.start(desc)); + + let result = block(self); + + self.with_expand_tracer(|_, tracer| match &result { + Ok(expr) => { + tracer.add_expr(expr.clone()); + tracer.success() + } + + Err(err) => tracer.failed(err), + }); + + result + } + + pub fn color_fallible_frame( + &mut self, + desc: &'static str, + block: impl FnOnce(&mut TokensIterator) -> Result, + ) -> Result { + self.with_color_tracer(|_, tracer| tracer.start(desc)); + + if self.at_end() { + self.with_color_tracer(|_, tracer| tracer.eof_frame()); + return Err(ShellError::unexpected_eof("coloring", Tag::unknown())); + } + + let result = block(self); + + self.with_color_tracer(|_, tracer| match &result { + Ok(_) => { + tracer.success(); + } + + Err(err) => tracer.failed(err), + }); + + result + } + + /// Use a checkpoint when you need to peek more than one token ahead, but can't be sure + /// that you'll succeed. + pub fn checkpoint<'me>(&'me mut self) -> Checkpoint<'content, 'me> { + let state = &mut self.state; + + let index = state.index; + #[cfg(coloring_in_tokens)] + let shape_start = state.shapes.len(); + let seen = state.seen.clone(); + + Checkpoint { + iterator: self, + index, + seen, + committed: false, + #[cfg(coloring_in_tokens)] + shape_start, + } + } + + /// Use a checkpoint when you need to peek more than one token ahead, but can't be sure + /// that you'll succeed. + pub fn atomic<'me, T>( + &'me mut self, + block: impl FnOnce(&mut TokensIterator<'content>) -> Result, + ) -> Result { + let state = &mut self.state; + + let index = state.index; + #[cfg(coloring_in_tokens)] + let shape_start = state.shapes.len(); + let seen = state.seen.clone(); + + let checkpoint = Checkpoint { + iterator: self, + index, + seen, + committed: false, + #[cfg(coloring_in_tokens)] + shape_start, + }; + + let value = block(checkpoint.iterator)?; + + checkpoint.commit(); + return Ok(value); + } + + #[cfg(coloring_in_tokens)] + /// Use a checkpoint when you need to peek more than one token ahead, but can't be sure + /// that you'll succeed. + pub fn atomic_returning_shapes<'me, T>( + &'me mut self, + block: impl FnOnce(&mut TokensIterator<'content>) -> Result, + ) -> (Result, Vec>) { + let index = self.state.index; + let mut shapes = vec![]; + + let seen = self.state.seen.clone(); + std::mem::swap(&mut self.state.shapes, &mut shapes); + + let checkpoint = Checkpoint { + iterator: self, + index, + seen, + committed: false, + shape_start: 0, + }; + + let value = block(checkpoint.iterator); + + let value = match value { + Err(err) => { + drop(checkpoint); + std::mem::swap(&mut self.state.shapes, &mut shapes); + return (Err(err), vec![]); + } + + Ok(value) => value, + }; + + checkpoint.commit(); + std::mem::swap(&mut self.state.shapes, &mut shapes); + return (Ok(value), shapes); + } + + fn eof_span(&self) -> Span { + Span::new(self.state.span.end(), self.state.span.end()) + } + + pub fn typed_span_at_cursor(&mut self) -> Spanned<&'static str> { + let next = self.peek_any(); + + match next.node { + None => "end".spanned(self.eof_span()), + Some(node) => node.spanned_type_name(), + } + } + + pub fn span_at_cursor(&mut self) -> Span { + let next = self.peek_any(); + + match next.node { + None => self.eof_span(), + Some(node) => node.span(), + } + } + + pub fn remove(&mut self, position: usize) { + self.state.seen.insert(position); + } + + pub fn at_end(&self) -> bool { + peek(self, self.state.skip_ws).is_none() + } + + pub fn at_end_possible_ws(&self) -> bool { + peek(self, true).is_none() + } + + pub fn advance(&mut self) { + self.state.seen.insert(self.state.index); + self.state.index += 1; + } + + pub fn extract(&mut self, f: impl Fn(&TokenNode) -> Option) -> Option<(usize, T)> { + let state = &mut self.state; + + for (i, item) in state.tokens.iter().enumerate() { + if state.seen.contains(&i) { + continue; + } + + match f(item) { + None => { + continue; + } + Some(value) => { + state.seen.insert(i); + return Some((i, value)); + } + } + } + + None + } + + pub fn move_to(&mut self, pos: usize) { + self.state.index = pos; + } + + pub fn restart(&mut self) { + self.state.index = 0; + } + + // pub fn clone(&self) -> TokensIterator<'content> { + // let state = &self.state; + // TokensIterator { + // state: TokensIteratorState { + // tokens: state.tokens, + // span: state.span, + // index: state.index, + // seen: state.seen.clone(), + // skip_ws: state.skip_ws, + // #[cfg(coloring_in_tokens)] + // shapes: state.shapes.clone(), + // }, + // color_tracer: self.color_tracer.clone(), + // expand_tracer: self.expand_tracer.clone(), + // } + // } + + // Peek the next token, not including whitespace + pub fn peek_non_ws<'me>(&'me mut self) -> Peeked<'content, 'me> { + start_next(self, true) + } + + // Peek the next token, including whitespace + pub fn peek_any<'me>(&'me mut self) -> Peeked<'content, 'me> { + start_next(self, false) + } + + // Peek the next token, including whitespace, but not EOF + pub fn peek_any_token<'me, T>( + &'me mut self, + expected: &'static str, + block: impl FnOnce(&'content TokenNode) -> Result, + ) -> Result { + let peeked = start_next(self, false); + let peeked = peeked.not_eof(expected); + + match peeked { + Err(err) => return Err(err), + Ok(peeked) => match block(peeked.node) { + Err(err) => return Err(err), + Ok(val) => { + peeked.commit(); + return Ok(val); + } + }, + } + } + + fn commit(&mut self, from: usize, to: usize) { + for index in from..to { + self.state.seen.insert(index); + } + + self.state.index = to; + } + + pub fn pos(&self, skip_ws: bool) -> Option { + peek_pos(self, skip_ws) + } + + pub fn debug_remaining(&self) -> Vec { + // TODO: TODO: TODO: Clean up + vec![] + // let mut tokens = self.clone(); + // tokens.restart(); + // tokens.cloned().collect() + } +} + +impl<'content> Iterator for TokensIterator<'content> { + type Item = &'content TokenNode; + + fn next(&mut self) -> Option { + next(self, self.state.skip_ws) + } +} + +fn peek<'content, 'me>( + iterator: &'me TokensIterator<'content>, + skip_ws: bool, +) -> Option<&'me TokenNode> { + let state = iterator.state(); + + let mut to = state.index; + + loop { + if to >= state.tokens.len() { + return None; + } + + if state.seen.contains(&to) { + to += 1; + continue; + } + + if to >= state.tokens.len() { + return None; + } + + let node = &state.tokens[to]; + + match node { + TokenNode::Whitespace(_) if skip_ws => { + to += 1; + } + _ => { + return Some(node); + } + } + } +} + +fn peek_pos<'content, 'me>( + iterator: &'me TokensIterator<'content>, + skip_ws: bool, +) -> Option { + let state = iterator.state(); + + let mut to = state.index; + + loop { + if to >= state.tokens.len() { + return None; + } + + if state.seen.contains(&to) { + to += 1; + continue; + } + + if to >= state.tokens.len() { + return None; + } + + let node = &state.tokens[to]; + + match node { + TokenNode::Whitespace(_) if skip_ws => { + to += 1; + } + _ => return Some(to), + } + } +} + +fn start_next<'content, 'me>( + iterator: &'me mut TokensIterator<'content>, + skip_ws: bool, +) -> Peeked<'content, 'me> { + let state = iterator.state(); + + let from = state.index; + let mut to = state.index; + + loop { + if to >= state.tokens.len() { + return Peeked { + node: None, + iterator, + from, + to, + }; + } + + if state.seen.contains(&to) { + to += 1; + continue; + } + + if to >= state.tokens.len() { + return Peeked { + node: None, + iterator, + from, + to, + }; + } + + let node = &state.tokens[to]; + + match node { + TokenNode::Whitespace(_) if skip_ws => { + to += 1; + } + _ => { + to += 1; + return Peeked { + node: Some(node), + iterator, + from, + to, + }; + } + } + } +} + +fn next<'me, 'content>( + iterator: &'me mut TokensIterator<'content>, + skip_ws: bool, +) -> Option<&'content TokenNode> { + loop { + if iterator.state().index >= iterator.state().tokens.len() { + return None; + } + + if iterator.state().seen.contains(&iterator.state().index) { + iterator.advance(); + continue; + } + + if iterator.state().index >= iterator.state().tokens.len() { + return None; + } + + match &iterator.state().tokens[iterator.state().index] { + TokenNode::Whitespace(_) if skip_ws => { + iterator.advance(); + } + other => { + iterator.advance(); + return Some(other); + } + } + } +} diff --git a/src/parser/hir/tokens_iterator/debug.rs b/src/parser/hir/tokens_iterator/debug.rs new file mode 100644 index 0000000000..6e2d7082b2 --- /dev/null +++ b/src/parser/hir/tokens_iterator/debug.rs @@ -0,0 +1,38 @@ +#![allow(unused)] + +pub(crate) mod color_trace; +pub(crate) mod expand_trace; + +pub(crate) use self::color_trace::*; +pub(crate) use self::expand_trace::*; + +use crate::parser::hir::tokens_iterator::TokensIteratorState; +use crate::traits::ToDebug; + +#[derive(Debug)] +pub(crate) enum DebugIteratorToken { + Seen(String), + Unseen(String), + Cursor, +} + +pub(crate) fn debug_tokens(state: &TokensIteratorState, source: &str) -> Vec { + let mut out = vec![]; + + for (i, token) in state.tokens.iter().enumerate() { + if state.index == i { + out.push(DebugIteratorToken::Cursor); + } + + if state.seen.contains(&i) { + out.push(DebugIteratorToken::Seen(format!("{}", token.debug(source)))); + } else { + out.push(DebugIteratorToken::Unseen(format!( + "{}", + token.debug(source) + ))); + } + } + + out +} diff --git a/src/parser/hir/tokens_iterator/debug/color_trace.rs b/src/parser/hir/tokens_iterator/debug/color_trace.rs new file mode 100644 index 0000000000..bbb9d856c4 --- /dev/null +++ b/src/parser/hir/tokens_iterator/debug/color_trace.rs @@ -0,0 +1,351 @@ +use crate::errors::ShellError; +use crate::parser::hir::syntax_shape::FlatShape; +use crate::prelude::*; +use ansi_term::Color; +use log::trace; +use ptree::*; +use std::borrow::Cow; +use std::io; + +#[derive(Debug, Clone)] +pub enum FrameChild { + #[allow(unused)] + Shape(Spanned), + Frame(ColorFrame), +} + +impl FrameChild { + fn colored_leaf_description(&self, text: &Text, f: &mut impl io::Write) -> io::Result<()> { + match self { + FrameChild::Shape(shape) => write!( + f, + "{} {:?}", + Color::White + .bold() + .on(Color::Green) + .paint(format!("{:?}", shape.item)), + shape.span.slice(text) + ), + + FrameChild::Frame(frame) => frame.colored_leaf_description(f), + } + } + + fn into_tree_child(self, text: &Text) -> TreeChild { + match self { + FrameChild::Shape(shape) => TreeChild::Shape(shape, text.clone()), + FrameChild::Frame(frame) => TreeChild::Frame(frame, text.clone()), + } + } +} + +#[derive(Debug, Clone)] +pub struct ColorFrame { + description: &'static str, + children: Vec, + error: Option, +} + +impl ColorFrame { + fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> { + if self.has_only_error_descendents() { + if self.children.len() == 0 { + write!( + f, + "{}", + Color::White.bold().on(Color::Red).paint(self.description) + ) + } else { + write!(f, "{}", Color::Red.normal().paint(self.description)) + } + } else if self.has_descendent_shapes() { + write!(f, "{}", Color::Green.normal().paint(self.description)) + } else { + write!(f, "{}", Color::Yellow.bold().paint(self.description)) + } + } + + fn colored_description(&self, text: &Text, f: &mut impl io::Write) -> io::Result<()> { + if self.children.len() == 1 { + let child = &self.children[0]; + + self.colored_leaf_description(f)?; + write!(f, " -> ")?; + child.colored_leaf_description(text, f) + } else { + self.colored_leaf_description(f) + } + } + + fn children_for_formatting(&self, text: &Text) -> Vec { + if self.children.len() == 1 { + let child = &self.children[0]; + + match child { + FrameChild::Shape(_) => vec![], + FrameChild::Frame(frame) => frame.tree_children(text), + } + } else { + self.tree_children(text) + } + } + + fn tree_children(&self, text: &Text) -> Vec { + self.children + .clone() + .into_iter() + .map(|c| c.into_tree_child(text)) + .collect() + } + + #[allow(unused)] + fn add_shape(&mut self, shape: Spanned) { + self.children.push(FrameChild::Shape(shape)) + } + + fn has_child_shapes(&self) -> bool { + self.any_child_shape(|_| true) + } + + fn any_child_shape(&self, predicate: impl Fn(Spanned) -> bool) -> bool { + for item in &self.children { + match item { + FrameChild::Shape(shape) => { + if predicate(*shape) { + return true; + } + } + + _ => {} + } + } + + false + } + + fn any_child_frame(&self, predicate: impl Fn(&ColorFrame) -> bool) -> bool { + for item in &self.children { + match item { + FrameChild::Frame(frame) => { + if predicate(frame) { + return true; + } + } + + _ => {} + } + } + + false + } + + fn has_descendent_shapes(&self) -> bool { + if self.has_child_shapes() { + true + } else { + self.any_child_frame(|frame| frame.has_descendent_shapes()) + } + } + + fn has_only_error_descendents(&self) -> bool { + if self.children.len() == 0 { + // if this frame has no children at all, it has only error descendents if this frame + // is an error + self.error.is_some() + } else { + // otherwise, it has only error descendents if all of its children terminate in an + // error (transitively) + + let mut seen_error = false; + + for child in &self.children { + match child { + // if this frame has at least one child shape, this frame has non-error descendents + FrameChild::Shape(_) => return false, + FrameChild::Frame(frame) => { + // if the chi + if frame.has_only_error_descendents() { + seen_error = true; + } else { + return false; + } + } + } + } + + seen_error + } + } +} + +#[derive(Debug, Clone)] +pub enum TreeChild { + Shape(Spanned, Text), + Frame(ColorFrame, Text), +} + +impl TreeChild { + fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> { + match self { + TreeChild::Shape(shape, text) => write!( + f, + "{} {:?}", + Color::White + .bold() + .on(Color::Green) + .paint(format!("{:?}", shape.item)), + shape.span.slice(text) + ), + + TreeChild::Frame(frame, _) => frame.colored_leaf_description(f), + } + } +} + +impl TreeItem for TreeChild { + type Child = TreeChild; + + fn write_self(&self, f: &mut W, _style: &Style) -> io::Result<()> { + match self { + shape @ TreeChild::Shape(..) => shape.colored_leaf_description(f), + + TreeChild::Frame(frame, text) => frame.colored_description(text, f), + } + } + + fn children(&self) -> Cow<[Self::Child]> { + match self { + TreeChild::Shape(..) => Cow::Borrowed(&[]), + TreeChild::Frame(frame, text) => Cow::Owned(frame.children_for_formatting(text)), + } + } +} + +#[derive(Debug, Clone)] +pub struct ColorTracer { + frame_stack: Vec, +} + +impl ColorTracer { + pub fn print(self, source: Text) -> PrintTracer { + PrintTracer { + tracer: self, + source, + } + } + + pub fn new() -> ColorTracer { + let root = ColorFrame { + description: "Trace", + children: vec![], + error: None, + }; + + ColorTracer { + frame_stack: vec![root], + } + } + + fn current_frame(&mut self) -> &mut ColorFrame { + let frames = &mut self.frame_stack; + let last = frames.len() - 1; + &mut frames[last] + } + + fn pop_frame(&mut self) -> ColorFrame { + trace!(target: "nu::color_syntax", "Popping {:#?}", self); + + let result = self.frame_stack.pop().expect("Can't pop root tracer frame"); + + if self.frame_stack.len() == 0 { + panic!("Can't pop root tracer frame {:#?}", self); + } + + self.debug(); + + result + } + + pub fn start(&mut self, description: &'static str) { + let frame = ColorFrame { + description, + children: vec![], + error: None, + }; + + self.frame_stack.push(frame); + self.debug(); + } + + pub fn eof_frame(&mut self) { + let current = self.pop_frame(); + self.current_frame() + .children + .push(FrameChild::Frame(current)); + } + + #[allow(unused)] + pub fn finish(&mut self) { + loop { + if self.frame_stack.len() == 1 { + break; + } + + let frame = self.pop_frame(); + self.current_frame().children.push(FrameChild::Frame(frame)); + } + } + + #[allow(unused)] + pub fn add_shape(&mut self, shape: Spanned) { + self.current_frame().add_shape(shape); + } + + pub fn success(&mut self) { + let current = self.pop_frame(); + self.current_frame() + .children + .push(FrameChild::Frame(current)); + } + + pub fn failed(&mut self, error: &ShellError) { + let mut current = self.pop_frame(); + current.error = Some(error.clone()); + self.current_frame() + .children + .push(FrameChild::Frame(current)); + } + + fn debug(&self) { + trace!(target: "nu::color_syntax", + "frames = {:?}", + self.frame_stack + .iter() + .map(|f| f.description) + .collect::>() + ); + + trace!(target: "nu::color_syntax", "{:#?}", self); + } +} + +#[derive(Debug, Clone)] +pub struct PrintTracer { + tracer: ColorTracer, + source: Text, +} + +impl TreeItem for PrintTracer { + type Child = TreeChild; + + fn write_self(&self, f: &mut W, style: &Style) -> io::Result<()> { + write!(f, "{}", style.paint("Color Trace")) + } + + fn children(&self) -> Cow<[Self::Child]> { + Cow::Owned(vec![TreeChild::Frame( + self.tracer.frame_stack[0].clone(), + self.source.clone(), + )]) + } +} diff --git a/src/parser/hir/tokens_iterator/debug/expand_trace.rs b/src/parser/hir/tokens_iterator/debug/expand_trace.rs new file mode 100644 index 0000000000..11b705b9d8 --- /dev/null +++ b/src/parser/hir/tokens_iterator/debug/expand_trace.rs @@ -0,0 +1,365 @@ +use crate::parser::hir::Expression; +use crate::prelude::*; +use ansi_term::Color; +use log::trace; +use ptree::*; +use std::borrow::Cow; +use std::io; + +#[derive(Debug)] +pub enum FrameChild { + Expr(Expression), + Frame(ExprFrame), + Result(Box), +} + +impl FrameChild { + fn get_error_leaf(&self) -> Option<&'static str> { + match self { + FrameChild::Frame(frame) if frame.error.is_some() => { + if frame.children.len() == 0 { + Some(frame.description) + } else { + None + } + } + _ => None, + } + } + + fn to_tree_child(&self, text: &Text) -> TreeChild { + match self { + FrameChild::Expr(expr) => TreeChild::OkExpr(expr.clone(), text.clone()), + FrameChild::Result(result) => { + let result = format!("{}", result.debug(text)); + TreeChild::OkNonExpr(result) + } + FrameChild::Frame(frame) => { + if frame.error.is_some() { + if frame.children.len() == 0 { + TreeChild::ErrorLeaf(vec![frame.description]) + } else { + TreeChild::ErrorFrame(frame.to_tree_frame(text), text.clone()) + } + } else { + TreeChild::OkFrame(frame.to_tree_frame(text), text.clone()) + } + } + } + } +} + +#[derive(Debug)] +pub struct ExprFrame { + description: &'static str, + children: Vec, + error: Option, +} + +impl ExprFrame { + fn to_tree_frame(&self, text: &Text) -> TreeFrame { + let mut children = vec![]; + let mut errors = vec![]; + + for child in &self.children { + if let Some(error_leaf) = child.get_error_leaf() { + errors.push(error_leaf); + continue; + } else if errors.len() > 0 { + children.push(TreeChild::ErrorLeaf(errors)); + errors = vec![]; + } + + children.push(child.to_tree_child(text)); + } + + if errors.len() > 0 { + children.push(TreeChild::ErrorLeaf(errors)); + } + + TreeFrame { + description: self.description, + children, + error: self.error.clone(), + } + } + + fn add_expr(&mut self, expr: Expression) { + self.children.push(FrameChild::Expr(expr)) + } + + fn add_result(&mut self, result: Box) { + self.children.push(FrameChild::Result(result)) + } +} + +#[derive(Debug, Clone)] +pub struct TreeFrame { + description: &'static str, + children: Vec, + error: Option, +} + +impl TreeFrame { + fn leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> { + if self.children.len() == 1 { + if self.error.is_some() { + write!(f, "{}", Color::Red.normal().paint(self.description))?; + } else if self.has_descendent_green() { + write!(f, "{}", Color::Green.normal().paint(self.description))?; + } else { + write!(f, "{}", Color::Yellow.bold().paint(self.description))?; + } + + write!(f, " -> ")?; + self.children[0].leaf_description(f) + } else { + if self.error.is_some() { + if self.children.len() == 0 { + write!( + f, + "{}", + Color::White.bold().on(Color::Red).paint(self.description) + ) + } else { + write!(f, "{}", Color::Red.normal().paint(self.description)) + } + } else if self.has_descendent_green() { + write!(f, "{}", Color::Green.normal().paint(self.description)) + } else { + write!(f, "{}", Color::Yellow.bold().paint(self.description)) + } + } + } + + fn has_child_green(&self) -> bool { + self.children.iter().any(|item| match item { + TreeChild::OkFrame(..) | TreeChild::ErrorFrame(..) | TreeChild::ErrorLeaf(..) => false, + TreeChild::OkExpr(..) | TreeChild::OkNonExpr(..) => true, + }) + } + + fn any_child_frame(&self, predicate: impl Fn(&TreeFrame) -> bool) -> bool { + for item in &self.children { + match item { + TreeChild::OkFrame(frame, ..) => { + if predicate(frame) { + return true; + } + } + + _ => {} + } + } + + false + } + + fn has_descendent_green(&self) -> bool { + if self.has_child_green() { + true + } else { + self.any_child_frame(|frame| frame.has_child_green()) + } + } + + fn children_for_formatting(&self, text: &Text) -> Vec { + if self.children.len() == 1 { + let child: &TreeChild = &self.children[0]; + match child { + TreeChild::OkExpr(..) | TreeChild::OkNonExpr(..) | TreeChild::ErrorLeaf(..) => { + vec![] + } + TreeChild::OkFrame(frame, _) | TreeChild::ErrorFrame(frame, _) => { + frame.children_for_formatting(text) + } + } + } else { + self.children.clone() + } + } +} + +#[derive(Debug, Clone)] +pub enum TreeChild { + OkNonExpr(String), + OkExpr(Expression, Text), + OkFrame(TreeFrame, Text), + ErrorFrame(TreeFrame, Text), + ErrorLeaf(Vec<&'static str>), +} + +impl TreeChild { + fn leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> { + match self { + TreeChild::OkExpr(expr, text) => write!( + f, + "{} {} {}", + Color::Cyan.normal().paint("returns"), + Color::White.bold().on(Color::Green).paint(expr.type_name()), + expr.span.slice(text) + ), + + TreeChild::OkNonExpr(result) => write!( + f, + "{} {}", + Color::Cyan.normal().paint("returns"), + Color::White + .bold() + .on(Color::Green) + .paint(format!("{}", result)) + ), + + TreeChild::ErrorLeaf(desc) => { + let last = desc.len() - 1; + + for (i, item) in desc.iter().enumerate() { + write!(f, "{}", Color::White.bold().on(Color::Red).paint(*item))?; + + if i != last { + write!(f, "{}", Color::White.normal().paint(", "))?; + } + } + + Ok(()) + } + + TreeChild::ErrorFrame(frame, _) | TreeChild::OkFrame(frame, _) => { + frame.leaf_description(f) + } + } + } +} + +impl TreeItem for TreeChild { + type Child = TreeChild; + + fn write_self(&self, f: &mut W, _style: &Style) -> io::Result<()> { + self.leaf_description(f) + } + + fn children(&self) -> Cow<[Self::Child]> { + match self { + TreeChild::OkExpr(..) | TreeChild::OkNonExpr(..) | TreeChild::ErrorLeaf(..) => { + Cow::Borrowed(&[]) + } + TreeChild::OkFrame(frame, text) | TreeChild::ErrorFrame(frame, text) => { + Cow::Owned(frame.children_for_formatting(text)) + } + } + } +} + +#[derive(Debug)] +pub struct ExpandTracer { + frame_stack: Vec, +} + +impl ExpandTracer { + pub fn print(&self, source: Text) -> PrintTracer { + let root = self + .frame_stack + .iter() + .nth(0) + .unwrap() + .to_tree_frame(&source); + + PrintTracer { root, source } + } + + pub fn new() -> ExpandTracer { + let root = ExprFrame { + description: "Trace", + children: vec![], + error: None, + }; + + ExpandTracer { + frame_stack: vec![root], + } + } + + fn current_frame(&mut self) -> &mut ExprFrame { + let frames = &mut self.frame_stack; + let last = frames.len() - 1; + &mut frames[last] + } + + fn pop_frame(&mut self) -> ExprFrame { + let result = self.frame_stack.pop().expect("Can't pop root tracer frame"); + + if self.frame_stack.len() == 0 { + panic!("Can't pop root tracer frame"); + } + + self.debug(); + + result + } + + pub fn start(&mut self, description: &'static str) { + let frame = ExprFrame { + description, + children: vec![], + error: None, + }; + + self.frame_stack.push(frame); + self.debug(); + } + + pub fn add_expr(&mut self, shape: Expression) { + self.current_frame().add_expr(shape); + } + + pub fn add_result(&mut self, result: Box) { + self.current_frame().add_result(result); + } + + pub fn success(&mut self) { + trace!(target: "parser::expand_syntax", "success {:#?}", self); + + let current = self.pop_frame(); + self.current_frame() + .children + .push(FrameChild::Frame(current)); + } + + pub fn failed(&mut self, error: &ParseError) { + let mut current = self.pop_frame(); + current.error = Some(error.clone()); + self.current_frame() + .children + .push(FrameChild::Frame(current)); + } + + fn debug(&self) { + trace!(target: "nu::parser::expand", + "frames = {:?}", + self.frame_stack + .iter() + .map(|f| f.description) + .collect::>() + ); + + trace!(target: "nu::parser::expand", "{:#?}", self); + } +} + +#[derive(Debug, Clone)] +pub struct PrintTracer { + root: TreeFrame, + source: Text, +} + +impl TreeItem for PrintTracer { + type Child = TreeChild; + + fn write_self(&self, f: &mut W, style: &Style) -> io::Result<()> { + write!(f, "{}", style.paint("Expansion Trace")) + } + + fn children(&self) -> Cow<[Self::Child]> { + Cow::Borrowed(&self.root.children) + } +} diff --git a/src/parser/hir/tokens_iterator/tests.rs b/src/parser/hir/tokens_iterator/tests.rs new file mode 100644 index 0000000000..23f8889786 --- /dev/null +++ b/src/parser/hir/tokens_iterator/tests.rs @@ -0,0 +1,16 @@ +use crate::parser::hir::TokensIterator; +use crate::parser::parse::token_tree_builder::TokenTreeBuilder as b; +use crate::Span; + +#[test] +fn supplies_tokens() { + let tokens = b::token_list(vec![b::var("it"), b::op("."), b::bare("cpu")]); + let (tokens, _) = b::build(tokens); + + let tokens = tokens.expect_list(); + let mut iterator = TokensIterator::all(tokens, Span::unknown()); + + iterator.next().unwrap().expect_var(); + iterator.next().unwrap().expect_dot(); + iterator.next().unwrap().expect_bare(); +} diff --git a/src/parser/parse/call_node.rs b/src/parser/parse/call_node.rs index eb715cd376..57d7fa9ad4 100644 --- a/src/parser/parse/call_node.rs +++ b/src/parser/parse/call_node.rs @@ -1,7 +1,7 @@ use crate::parser::TokenNode; -use crate::traits::ToDebug; +use crate::traits::{DebugFormatter, FormatDebug, ToDebug}; use getset::Getters; -use std::fmt; +use std::fmt::{self, Write}; #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters)] pub struct CallNode { @@ -27,8 +27,8 @@ impl CallNode { } } -impl ToDebug for CallNode { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { +impl FormatDebug for CallNode { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { write!(f, "{}", self.head.debug(source))?; if let Some(children) = &self.children { diff --git a/src/parser/parse/files.rs b/src/parser/parse/files.rs index afe75ddb27..8a2d3c90eb 100644 --- a/src/parser/parse/files.rs +++ b/src/parser/parse/files.rs @@ -1,7 +1,7 @@ -use crate::Tag; +use crate::Span; use derive_new::new; use language_reporting::{FileName, Location}; -use uuid::Uuid; +use log::trace; #[derive(new, Debug, Clone)] pub struct Files { @@ -9,20 +9,20 @@ pub struct Files { } impl language_reporting::ReportingFiles for Files { - type Span = Tag; - type FileId = Uuid; + type Span = Span; + type FileId = usize; fn byte_span( &self, - file: Self::FileId, + _file: Self::FileId, from_index: usize, to_index: usize, ) -> Option { - Some(Tag::from((from_index, to_index, file))) + Some(Span::new(from_index, to_index)) } - fn file_id(&self, tag: Self::Span) -> Self::FileId { - tag.anchor + fn file_id(&self, _tag: Self::Span) -> Self::FileId { + 0 } fn file_name(&self, _file: Self::FileId) -> FileName { @@ -38,8 +38,18 @@ impl language_reporting::ReportingFiles for Files { let mut seen_lines = 0; let mut seen_bytes = 0; - for (pos, _) in source.match_indices('\n') { - if pos > byte_index { + for (pos, slice) in source.match_indices('\n') { + trace!( + "SEARCH={} SEEN={} POS={} SLICE={:?} LEN={} ALL={:?}", + byte_index, + seen_bytes, + pos, + slice, + source.len(), + source + ); + + if pos >= byte_index { return Some(language_reporting::Location::new( seen_lines, byte_index - seen_bytes, @@ -53,18 +63,18 @@ impl language_reporting::ReportingFiles for Files { if seen_lines == 0 { Some(language_reporting::Location::new(0, byte_index)) } else { - None + panic!("byte index {} wasn't valid", byte_index); } } - fn line_span(&self, file: Self::FileId, lineno: usize) -> Option { + fn line_span(&self, _file: Self::FileId, lineno: usize) -> Option { let source = &self.snippet; let mut seen_lines = 0; let mut seen_bytes = 0; for (pos, _) in source.match_indices('\n') { if seen_lines == lineno { - return Some(Tag::from((seen_bytes, pos, file))); + return Some(Span::new(seen_bytes, pos + 1)); } else { seen_lines += 1; seen_bytes = pos + 1; @@ -72,18 +82,20 @@ impl language_reporting::ReportingFiles for Files { } if seen_lines == 0 { - Some(Tag::from((0, self.snippet.len() - 1, file))) + Some(Span::new(0, self.snippet.len() - 1)) } else { None } } - fn source(&self, tag: Self::Span) -> Option { - if tag.span.start > tag.span.end { + fn source(&self, span: Self::Span) -> Option { + trace!("source(tag={:?}) snippet={:?}", span, self.snippet); + + if span.start() > span.end() { return None; - } else if tag.span.end >= self.snippet.len() { + } else if span.end() > self.snippet.len() { return None; } - Some(tag.slice(&self.snippet).to_string()) + Some(span.slice(&self.snippet).to_string()) } } diff --git a/src/parser/parse/flag.rs b/src/parser/parse/flag.rs index 09d1e86337..28b6749f1c 100644 --- a/src/parser/parse/flag.rs +++ b/src/parser/parse/flag.rs @@ -1,4 +1,5 @@ -use crate::Tag; +use crate::parser::hir::syntax_shape::flat_shape::FlatShape; +use crate::{Span, Spanned, SpannedItem}; use derive_new::new; use getset::Getters; use serde::{Deserialize, Serialize}; @@ -12,6 +13,15 @@ pub enum FlagKind { #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Getters, new)] #[get = "pub(crate)"] pub struct Flag { - kind: FlagKind, - name: Tag, + pub(crate) kind: FlagKind, + pub(crate) name: Span, +} + +impl Spanned { + pub fn color(&self) -> Spanned { + match self.item.kind { + FlagKind::Longhand => FlatShape::Flag.spanned(self.span), + FlagKind::Shorthand => FlatShape::ShorthandFlag.spanned(self.span), + } + } } diff --git a/src/parser/parse/operator.rs b/src/parser/parse/operator.rs index 82a04ed796..47c63075af 100644 --- a/src/parser/parse/operator.rs +++ b/src/parser/parse/operator.rs @@ -11,10 +11,11 @@ pub enum Operator { GreaterThan, LessThanOrEqual, GreaterThanOrEqual, + Dot, } -impl ToDebug for Operator { - fn fmt_debug(&self, f: &mut fmt::Formatter, _source: &str) -> fmt::Result { +impl FormatDebug for Operator { + fn fmt_debug(&self, f: &mut DebugFormatter, _source: &str) -> fmt::Result { write!(f, "{}", self.as_str()) } } @@ -32,6 +33,7 @@ impl Operator { Operator::GreaterThan => ">", Operator::LessThanOrEqual => "<=", Operator::GreaterThanOrEqual => ">=", + Operator::Dot => ".", } } } @@ -52,6 +54,7 @@ impl FromStr for Operator { ">" => Ok(Operator::GreaterThan), "<=" => Ok(Operator::LessThanOrEqual), ">=" => Ok(Operator::GreaterThanOrEqual), + "." => Ok(Operator::Dot), _ => Err(()), } } diff --git a/src/parser/parse/parser.rs b/src/parser/parse/parser.rs index 33903ba37c..0dd1bc8566 100644 --- a/src/parser/parse/parser.rs +++ b/src/parser/parse/parser.rs @@ -14,24 +14,49 @@ use nom::combinator::*; use nom::multi::*; use nom::sequence::*; +use derive_new::new; use log::trace; use nom::dbg; use nom::*; use nom::{AsBytes, FindSubstring, IResult, InputLength, InputTake, Slice}; use nom_locate::{position, LocatedSpanEx}; +use nom_tracable::{tracable_parser, HasTracableInfo, TracableInfo}; use serde::{Deserialize, Serialize}; use std::fmt::Debug; use std::str::FromStr; -use uuid::Uuid; -pub type NomSpan<'a> = LocatedSpanEx<&'a str, Uuid>; +pub type NomSpan<'a> = LocatedSpanEx<&'a str, TracableContext>; -pub fn nom_input(s: &str, anchor: Uuid) -> NomSpan<'_> { - LocatedSpanEx::new_extra(s, anchor) +#[derive(Debug, Clone, Copy, PartialEq, new)] +pub struct TracableContext { + pub(crate) info: TracableInfo, +} + +impl HasTracableInfo for TracableContext { + fn get_tracable_info(&self) -> TracableInfo { + self.info + } + + fn set_tracable_info(mut self, info: TracableInfo) -> Self { + TracableContext { info } + } +} + +impl std::ops::Deref for TracableContext { + type Target = TracableInfo; + + fn deref(&self) -> &TracableInfo { + &self.info + } +} + +pub fn nom_input(s: &str) -> NomSpan<'_> { + LocatedSpanEx::new_extra(s, TracableContext::new(TracableInfo::new())) } macro_rules! operator { ($name:tt : $token:tt ) => { + #[tracable_parser] pub fn $name(input: NomSpan) -> IResult { let start = input.offset; let (input, tag) = tag(stringify!($token))(input)?; @@ -39,7 +64,7 @@ macro_rules! operator { Ok(( input, - TokenTreeBuilder::tagged_op(tag.fragment, (start, end, input.extra)), + TokenTreeBuilder::spanned_op(tag.fragment, Span::new(start, end)), )) } }; @@ -51,25 +76,7 @@ operator! { gte: >= } operator! { lte: <= } operator! { eq: == } operator! { neq: != } - -fn trace_step<'a, T: Debug>( - input: NomSpan<'a>, - name: &str, - block: impl FnOnce(NomSpan<'a>) -> IResult, T>, -) -> IResult, T> { - trace!(target: "nu::lite_parse", "+ before {} @ {:?}", name, input); - match block(input) { - Ok((input, result)) => { - trace!(target: "nu::lite_parse", "after {} @ {:?} -> {:?}", name, input, result); - Ok((input, result)) - } - - Err(e) => { - trace!(target: "nu::lite_parse", "- failed {} :: {:?}", name, e); - Err(e) - } - } -} +operator! { dot: . } #[derive(Debug, Clone, Eq, PartialEq, Hash, Ord, PartialOrd, Serialize, Deserialize)] pub enum Number { @@ -77,6 +84,15 @@ pub enum Number { Decimal(BigDecimal), } +impl std::fmt::Display for Number { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Number::Int(int) => write!(f, "{}", int), + Number::Decimal(decimal) => write!(f, "{}", decimal), + } + } +} + macro_rules! primitive_int { ($($ty:ty)*) => { $( @@ -148,540 +164,473 @@ impl Into for BigDecimal { } } -pub fn raw_number(input: NomSpan) -> IResult> { +#[tracable_parser] +pub fn number(input: NomSpan) -> IResult { + let (input, number) = raw_number(input)?; + + Ok(( + input, + TokenTreeBuilder::spanned_number(number.item, number.span), + )) +} + +#[tracable_parser] +pub fn raw_number(input: NomSpan) -> IResult> { let anchoral = input; let start = input.offset; - trace_step(input, "raw_decimal", move |input| { - let (input, neg) = opt(tag("-"))(input)?; - let (input, head) = digit1(input)?; - let dot: IResult = tag(".")(input); + let (input, neg) = opt(tag("-"))(input)?; + let (input, head) = digit1(input)?; - let input = match dot { - Ok((input, dot)) => input, + match input.fragment.chars().next() { + None => return Ok((input, RawNumber::int(Span::new(start, input.offset)))), + Some('.') => (), + other if is_boundary(other) => { + return Ok((input, RawNumber::int(Span::new(start, input.offset)))) + } + _ => { + return Err(nom::Err::Error(nom::error::make_error( + input, + nom::error::ErrorKind::Tag, + ))) + } + } - // it's just an integer - Err(_) => return Ok((input, RawNumber::int((start, input.offset, input.extra)))), - }; + let dot: IResult = tag(".")(input); - let (input, tail) = digit1(input)?; + let input = match dot { + Ok((input, dot)) => input, - let end = input.offset; + // it's just an integer + Err(_) => return Ok((input, RawNumber::int(Span::new(start, input.offset)))), + }; - Ok((input, RawNumber::decimal((start, end, input.extra)))) - }) + let (input, tail) = digit1(input)?; + + let end = input.offset; + + let next = input.fragment.chars().next(); + + if is_boundary(next) { + Ok((input, RawNumber::decimal(Span::new(start, end)))) + } else { + Err(nom::Err::Error(nom::error::make_error( + input, + nom::error::ErrorKind::Tag, + ))) + } } +#[tracable_parser] pub fn operator(input: NomSpan) -> IResult { - trace_step(input, "operator", |input| { - let (input, operator) = alt((gte, lte, neq, gt, lt, eq))(input)?; + let (input, operator) = alt((gte, lte, neq, gt, lt, eq))(input)?; - Ok((input, operator)) - }) + Ok((input, operator)) } +#[tracable_parser] pub fn dq_string(input: NomSpan) -> IResult { - trace_step(input, "dq_string", |input| { - let start = input.offset; - let (input, _) = char('"')(input)?; - let start1 = input.offset; - let (input, _) = many0(none_of("\""))(input)?; - let end1 = input.offset; - let (input, _) = char('"')(input)?; - let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_string((start1, end1, input.extra), (start, end, input.extra)), - )) - }) + let start = input.offset; + let (input, _) = char('"')(input)?; + let start1 = input.offset; + let (input, _) = many0(none_of("\""))(input)?; + let end1 = input.offset; + let (input, _) = char('"')(input)?; + let end = input.offset; + Ok(( + input, + TokenTreeBuilder::spanned_string(Span::new(start1, end1), Span::new(start, end)), + )) } +#[tracable_parser] pub fn sq_string(input: NomSpan) -> IResult { - trace_step(input, "sq_string", move |input| { - let start = input.offset; - let (input, _) = char('\'')(input)?; - let start1 = input.offset; - let (input, _) = many0(none_of("\'"))(input)?; - let end1 = input.offset; - let (input, _) = char('\'')(input)?; - let end = input.offset; + let start = input.offset; + let (input, _) = char('\'')(input)?; + let start1 = input.offset; + let (input, _) = many0(none_of("\'"))(input)?; + let end1 = input.offset; + let (input, _) = char('\'')(input)?; + let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_string((start1, end1, input.extra), (start, end, input.extra)), - )) - }) + Ok(( + input, + TokenTreeBuilder::spanned_string(Span::new(start1, end1), Span::new(start, end)), + )) } +#[tracable_parser] pub fn string(input: NomSpan) -> IResult { - trace_step(input, "string", move |input| { - alt((sq_string, dq_string))(input) - }) + alt((sq_string, dq_string))(input) } +#[tracable_parser] pub fn external(input: NomSpan) -> IResult { - trace_step(input, "external", move |input| { - let start = input.offset; - let (input, _) = tag("^")(input)?; - let (input, bare) = take_while(is_bare_char)(input)?; - let end = input.offset; + let start = input.offset; + let (input, _) = tag("^")(input)?; + let (input, bare) = take_while(is_bare_char)(input)?; + let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_external(bare, (start, end, input.extra)), - )) - }) + Ok(( + input, + TokenTreeBuilder::spanned_external_command(bare, Span::new(start, end)), + )) } +#[tracable_parser] pub fn pattern(input: NomSpan) -> IResult { - trace_step(input, "bare", move |input| { - let start = input.offset; - let (input, _) = take_while1(is_start_glob_char)(input)?; - let (input, _) = take_while(is_glob_char)(input)?; + let start = input.offset; + let (input, _) = take_while1(is_start_glob_char)(input)?; + let (input, _) = take_while(is_glob_char)(input)?; - let next_char = &input.fragment.chars().nth(0); + let next_char = &input.fragment.chars().nth(0); - if let Some(next_char) = next_char { - if is_external_word_char(*next_char) { - return Err(nom::Err::Error(nom::error::make_error( - input, - nom::error::ErrorKind::TakeWhile1, - ))); - } + if let Some(next_char) = next_char { + if is_external_word_char(*next_char) { + return Err(nom::Err::Error(nom::error::make_error( + input, + nom::error::ErrorKind::TakeWhile1, + ))); } + } - let end = input.offset; + let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_pattern((start, end, input.extra)), - )) - }) + Ok(( + input, + TokenTreeBuilder::spanned_pattern(Span::new(start, end)), + )) } +#[tracable_parser] pub fn bare(input: NomSpan) -> IResult { - trace_step(input, "bare", move |input| { - let start = input.offset; - let (input, _) = take_while1(is_start_bare_char)(input)?; - let (input, _) = take_while(is_bare_char)(input)?; + let start = input.offset; + let (input, _) = take_while1(is_start_bare_char)(input)?; + let (input, last) = take_while(is_bare_char)(input)?; - let next_char = &input.fragment.chars().nth(0); + let next_char = &input.fragment.chars().nth(0); + let prev_char = last.fragment.chars().nth(0); - if let Some(next_char) = next_char { - if is_external_word_char(*next_char) || is_glob_specific_char(*next_char) { - return Err(nom::Err::Error(nom::error::make_error( - input, - nom::error::ErrorKind::TakeWhile1, - ))); - } + if let Some(next_char) = next_char { + if is_external_word_char(*next_char) || is_glob_specific_char(*next_char) { + return Err(nom::Err::Error(nom::error::make_error( + input, + nom::error::ErrorKind::TakeWhile1, + ))); } + } - let end = input.offset; + let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_bare((start, end, input.extra)), - )) - }) + Ok((input, TokenTreeBuilder::spanned_bare(Span::new(start, end)))) } +#[tracable_parser] pub fn external_word(input: NomSpan) -> IResult { - trace_step(input, "bare", move |input| { - let start = input.offset; - let (input, _) = take_while1(is_external_word_char)(input)?; - let end = input.offset; + let start = input.offset; + let (input, _) = take_while1(is_external_word_char)(input)?; + let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_external_word((start, end, input.extra)), - )) - }) + Ok(( + input, + TokenTreeBuilder::spanned_external_word(Span::new(start, end)), + )) } +#[tracable_parser] pub fn var(input: NomSpan) -> IResult { - trace_step(input, "var", move |input| { - let start = input.offset; - let (input, _) = tag("$")(input)?; - let (input, bare) = member(input)?; - let end = input.offset; + let start = input.offset; + let (input, _) = tag("$")(input)?; + let (input, bare) = ident(input)?; + let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_var(bare.tag(), (start, end, input.extra)), - )) - }) + Ok(( + input, + TokenTreeBuilder::spanned_var(bare, Span::new(start, end)), + )) } -pub fn member(input: NomSpan) -> IResult { - trace_step(input, "identifier", move |input| { - let start = input.offset; - let (input, _) = take_while1(is_id_start)(input)?; - let (input, _) = take_while(is_id_continue)(input)?; +#[tracable_parser] +pub fn ident(input: NomSpan) -> IResult { + let start = input.offset; + let (input, _) = take_while1(is_start_bare_char)(input)?; + let (input, _) = take_while(is_bare_char)(input)?; + let end = input.offset; - let end = input.offset; - - Ok(( - input, - TokenTreeBuilder::tagged_member((start, end, input.extra)), - )) - }) + Ok((input, Tag::from((start, end, None)))) } +#[tracable_parser] pub fn flag(input: NomSpan) -> IResult { - trace_step(input, "flag", move |input| { - let start = input.offset; - let (input, _) = tag("--")(input)?; - let (input, bare) = bare(input)?; - let end = input.offset; + let start = input.offset; + let (input, _) = tag("--")(input)?; + let (input, bare) = bare(input)?; + let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_flag(bare.tag(), (start, end, input.extra)), - )) - }) + Ok(( + input, + TokenTreeBuilder::spanned_flag(bare.span(), Span::new(start, end)), + )) } +#[tracable_parser] pub fn shorthand(input: NomSpan) -> IResult { - trace_step(input, "shorthand", move |input| { - let start = input.offset; - let (input, _) = tag("-")(input)?; - let (input, bare) = bare(input)?; - let end = input.offset; + let start = input.offset; + let (input, _) = tag("-")(input)?; + let (input, bare) = bare(input)?; + let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_shorthand(bare.tag(), (start, end, input.extra)), - )) - }) -} - -pub fn raw_unit(input: NomSpan) -> IResult> { - trace_step(input, "raw_unit", move |input| { - let start = input.offset; - let (input, unit) = alt(( - tag("B"), - tag("b"), - tag("KB"), - tag("kb"), - tag("Kb"), - tag("K"), - tag("k"), - tag("MB"), - tag("mb"), - tag("Mb"), - tag("GB"), - tag("gb"), - tag("Gb"), - tag("TB"), - tag("tb"), - tag("Tb"), - tag("PB"), - tag("pb"), - tag("Pb"), - ))(input)?; - let end = input.offset; - - Ok(( - input, - Unit::from(unit.fragment).tagged((start, end, input.extra)), - )) - }) -} - -pub fn size(input: NomSpan) -> IResult { - trace_step(input, "size", move |input| { - let mut is_size = false; - let start = input.offset; - let (input, number) = raw_number(input)?; - if let Ok((input, Some(size))) = opt(raw_unit)(input) { - let end = input.offset; - - // Check to make sure there is no trailing parseable characters - if let Ok((input, Some(extra))) = opt(bare)(input) { - return Err(nom::Err::Error((input, nom::error::ErrorKind::Char))); - } - - Ok(( - input, - TokenTreeBuilder::tagged_size((number.item, *size), (start, end, input.extra)), - )) - } else { - let end = input.offset; - - // Check to make sure there is no trailing parseable characters - if let Ok((input, Some(extra))) = opt(bare)(input) { - return Err(nom::Err::Error((input, nom::error::ErrorKind::Char))); - } - - Ok(( - input, - TokenTreeBuilder::tagged_number(number.item, number.tag), - )) - } - }) + Ok(( + input, + TokenTreeBuilder::spanned_shorthand(bare.span(), Span::new(start, end)), + )) } +#[tracable_parser] pub fn leaf(input: NomSpan) -> IResult { - trace_step(input, "leaf", move |input| { - let (input, node) = alt(( - size, - string, - operator, - flag, - shorthand, - var, - external, - bare, - pattern, - external_word, - ))(input)?; + let (input, node) = alt((number, string, operator, flag, shorthand, var, external))(input)?; - Ok((input, node)) - }) + Ok((input, node)) } -pub fn token_list(input: NomSpan) -> IResult> { - trace_step(input, "token_list", move |input| { - let (input, first) = node(input)?; - let (input, list) = many0(pair(space1, node))(input)?; +#[tracable_parser] +pub fn token_list(input: NomSpan) -> IResult>> { + let start = input.offset; + let (input, first) = node(input)?; - Ok((input, make_token_list(None, first, list, None))) - }) + let (input, mut list) = many0(pair(alt((whitespace, dot)), node))(input)?; + + let end = input.offset; + + Ok(( + input, + make_token_list(first, list, None).spanned(Span::new(start, end)), + )) } -pub fn spaced_token_list(input: NomSpan) -> IResult> { - trace_step(input, "spaced_token_list", move |input| { - let (input, sp_left) = opt(space1)(input)?; - let (input, first) = node(input)?; - let (input, list) = many0(pair(space1, node))(input)?; - let (input, sp_right) = opt(space1)(input)?; +#[tracable_parser] +pub fn spaced_token_list(input: NomSpan) -> IResult>> { + let start = input.offset; + let (input, pre_ws) = opt(whitespace)(input)?; + let (input, items) = token_list(input)?; + let (input, post_ws) = opt(whitespace)(input)?; + let end = input.offset; - Ok((input, make_token_list(sp_left, first, list, sp_right))) - }) + let mut out = vec![]; + + out.extend(pre_ws); + out.extend(items.item); + out.extend(post_ws); + + Ok((input, out.spanned(Span::new(start, end)))) } fn make_token_list( - sp_left: Option, - first: TokenNode, - list: Vec<(NomSpan, TokenNode)>, - sp_right: Option, + first: Vec, + list: Vec<(TokenNode, Vec)>, + sp_right: Option, ) -> Vec { let mut nodes = vec![]; - if let Some(sp_left) = sp_left { - nodes.push(TokenNode::Whitespace(Tag::from(sp_left))); - } + nodes.extend(first); - nodes.push(first); - - for (ws, token) in list { - nodes.push(TokenNode::Whitespace(Tag::from(ws))); - nodes.push(token); + for (left, right) in list { + nodes.push(left); + nodes.extend(right); } if let Some(sp_right) = sp_right { - nodes.push(TokenNode::Whitespace(Tag::from(sp_right))); + nodes.push(sp_right); } nodes } +#[tracable_parser] pub fn whitespace(input: NomSpan) -> IResult { - trace_step(input, "whitespace", move |input| { - let left = input.offset; - let (input, ws1) = space1(input)?; - let right = input.offset; + let left = input.offset; + let (input, ws1) = space1(input)?; + let right = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_ws((left, right, input.extra)), - )) - }) + Ok((input, TokenTreeBuilder::spanned_ws(Span::new(left, right)))) } -pub fn delimited_paren(input: NomSpan) -> IResult { - trace_step(input, "delimited_paren", move |input| { - let left = input.offset; - let (input, _) = char('(')(input)?; - let (input, ws1) = opt(whitespace)(input)?; - let (input, inner_items) = opt(token_list)(input)?; - let (input, ws2) = opt(whitespace)(input)?; - let (input, _) = char(')')(input)?; - let right = input.offset; +pub fn delimited( + input: NomSpan, + delimiter: Delimiter, +) -> IResult>)> { + let left = input.offset; + let (input, open_span) = tag(delimiter.open())(input)?; + let (input, inner_items) = opt(spaced_token_list)(input)?; + let (input, close_span) = tag(delimiter.close())(input)?; + let right = input.offset; - let mut items = vec![]; + let mut items = vec![]; - if let Some(space) = ws1 { - items.push(space); - } + if let Some(inner_items) = inner_items { + items.extend(inner_items.item); + } - if let Some(inner_items) = inner_items { - items.extend(inner_items); - } - - if let Some(space) = ws2 { - items.push(space); - } - - Ok(( - input, - TokenTreeBuilder::tagged_parens(items, (left, right, input.extra)), - )) - }) -} - -pub fn delimited_square(input: NomSpan) -> IResult { - trace_step(input, "delimited_paren", move |input| { - let left = input.offset; - let (input, _) = char('[')(input)?; - let (input, ws1) = opt(whitespace)(input)?; - let (input, inner_items) = opt(token_list)(input)?; - let (input, ws2) = opt(whitespace)(input)?; - let (input, _) = char(']')(input)?; - let right = input.offset; - - let mut items = vec![]; - - if let Some(space) = ws1 { - items.push(space); - } - - if let Some(inner_items) = inner_items { - items.extend(inner_items); - } - - if let Some(space) = ws2 { - items.push(space); - } - - Ok(( - input, - TokenTreeBuilder::tagged_square(items, (left, right, input.extra)), - )) - }) -} - -pub fn delimited_brace(input: NomSpan) -> IResult { - trace_step(input, "delimited_brace", move |input| { - let left = input.offset; - let (input, _) = char('{')(input)?; - let (input, _) = opt(space1)(input)?; - let (input, items) = opt(token_list)(input)?; - let (input, _) = opt(space1)(input)?; - let (input, _) = char('}')(input)?; - let right = input.offset; - - Ok(( - input, - TokenTreeBuilder::tagged_brace( - items.unwrap_or_else(|| vec![]), - (left, right, input.extra), - ), - )) - }) -} - -pub fn raw_call(input: NomSpan) -> IResult> { - trace_step(input, "raw_call", move |input| { - let left = input.offset; - let (input, items) = token_list(input)?; - let right = input.offset; - - Ok(( - input, - TokenTreeBuilder::tagged_call(items, (left, right, input.extra)), - )) - }) -} - -pub fn path(input: NomSpan) -> IResult { - trace_step(input, "path", move |input| { - let left = input.offset; - let (input, head) = node1(input)?; - let (input, _) = tag(".")(input)?; - let (input, tail) = separated_list(tag("."), alt((member, string)))(input)?; - let right = input.offset; - - Ok(( - input, - TokenTreeBuilder::tagged_path((head, tail), (left, right, input.extra)), - )) - }) -} - -pub fn node1(input: NomSpan) -> IResult { - trace_step(input, "node1", alt((leaf, delimited_paren))) -} - -pub fn node(input: NomSpan) -> IResult { - trace_step( + Ok(( input, - "node", - alt(( - path, - leaf, - delimited_paren, - delimited_brace, - delimited_square, - )), - ) + ( + Span::from(open_span), + Span::from(close_span), + items.spanned(Span::new(left, right)), + ), + )) } +#[tracable_parser] +pub fn delimited_paren(input: NomSpan) -> IResult { + let (input, (left, right, tokens)) = delimited(input, Delimiter::Paren)?; + + Ok(( + input, + TokenTreeBuilder::spanned_parens(tokens.item, (left, right), tokens.span), + )) +} + +#[tracable_parser] +pub fn delimited_square(input: NomSpan) -> IResult { + let (input, (left, right, tokens)) = delimited(input, Delimiter::Square)?; + + Ok(( + input, + TokenTreeBuilder::spanned_square(tokens.item, (left, right), tokens.span), + )) +} + +#[tracable_parser] +pub fn delimited_brace(input: NomSpan) -> IResult { + let (input, (left, right, tokens)) = delimited(input, Delimiter::Brace)?; + + Ok(( + input, + TokenTreeBuilder::spanned_square(tokens.item, (left, right), tokens.span), + )) +} + +#[tracable_parser] +pub fn raw_call(input: NomSpan) -> IResult> { + let left = input.offset; + let (input, items) = token_list(input)?; + let right = input.offset; + + Ok(( + input, + TokenTreeBuilder::spanned_call(items.item, Span::new(left, right)), + )) +} + +#[tracable_parser] +pub fn bare_path(input: NomSpan) -> IResult> { + let (input, head) = alt((bare, dot))(input)?; + + let (input, tail) = many0(alt((bare, dot, string)))(input)?; + + let next_char = &input.fragment.chars().nth(0); + + if is_boundary(*next_char) { + let mut result = vec![head]; + result.extend(tail); + + Ok((input, result)) + } else { + Err(nom::Err::Error(nom::error::make_error( + input, + nom::error::ErrorKind::Many0, + ))) + } +} + +#[tracable_parser] +pub fn pattern_path(input: NomSpan) -> IResult> { + let (input, head) = alt((pattern, dot))(input)?; + + let (input, tail) = many0(alt((pattern, dot, string)))(input)?; + + let next_char = &input.fragment.chars().nth(0); + + if is_boundary(*next_char) { + let mut result = vec![head]; + result.extend(tail); + + Ok((input, result)) + } else { + Err(nom::Err::Error(nom::error::make_error( + input, + nom::error::ErrorKind::Many0, + ))) + } +} + +#[tracable_parser] +pub fn node1(input: NomSpan) -> IResult { + alt((leaf, bare, pattern, external_word, delimited_paren))(input) +} + +#[tracable_parser] +pub fn node(input: NomSpan) -> IResult> { + alt(( + to_list(leaf), + bare_path, + pattern_path, + to_list(external_word), + to_list(delimited_paren), + to_list(delimited_brace), + to_list(delimited_square), + ))(input) +} + +fn to_list( + parser: impl Fn(NomSpan) -> IResult, +) -> impl Fn(NomSpan) -> IResult> { + move |input| { + let (input, next) = parser(input)?; + + Ok((input, vec![next])) + } +} + +#[tracable_parser] +pub fn nodes(input: NomSpan) -> IResult { + let (input, tokens) = token_list(input)?; + + Ok(( + input, + TokenTreeBuilder::spanned_token_list(tokens.item, tokens.span), + )) +} + +#[tracable_parser] pub fn pipeline(input: NomSpan) -> IResult { - trace_step(input, "pipeline", |input| { - let start = input.offset; - let (input, head) = opt(tuple((opt(space1), raw_call, opt(space1))))(input)?; - let (input, items) = trace_step( + let start = input.offset; + let (input, head) = spaced_token_list(input)?; + let (input, items) = many0(tuple((tag("|"), spaced_token_list)))(input)?; + + if input.input_len() != 0 { + return Err(Err::Error(error_position!( input, - "many0", - many0(tuple((tag("|"), opt(space1), raw_call, opt(space1)))), - )?; - - let (input, tail) = opt(space1)(input)?; - let (input, newline) = opt(multispace1)(input)?; - - if input.input_len() != 0 { - return Err(Err::Error(error_position!( - input, - nom::error::ErrorKind::Eof - ))); - } - - let end = input.offset; - - Ok(( - input, - TokenTreeBuilder::tagged_pipeline( - (make_call_list(head, items), tail.map(Tag::from)), - (start, end, input.extra), - ), - )) - }) -} - -fn make_call_list( - head: Option<(Option, Tagged, Option)>, - items: Vec<(NomSpan, Option, Tagged, Option)>, -) -> Vec { - let mut out = vec![]; - - if let Some(head) = head { - let el = PipelineElement::new(None, head.0.map(Tag::from), head.1, head.2.map(Tag::from)); - - out.push(el); + nom::error::ErrorKind::Eof + ))); } - for (pipe, ws1, call, ws2) in items { - let el = PipelineElement::new( - Some(pipe).map(Tag::from), - ws1.map(Tag::from), - call, - ws2.map(Tag::from), - ); + let end = input.offset; - out.push(el); - } + let head_span = head.span; + let mut all_items: Vec> = + vec![PipelineElement::new(None, head).spanned(head_span)]; - out + all_items.extend(items.into_iter().map(|(pipe, items)| { + let items_span = items.span; + PipelineElement::new(Some(Span::from(pipe)), items) + .spanned(Span::from(pipe).until(items_span)) + })); + + Ok(( + input, + TokenTreeBuilder::spanned_pipeline(all_items, Span::new(start, end)), + )) } fn int(frag: &str, neg: Option) -> i64 { @@ -693,9 +642,19 @@ fn int(frag: &str, neg: Option) -> i64 { } } +fn is_boundary(c: Option) -> bool { + match c { + None => true, + Some(')') | Some(']') | Some('}') => true, + Some(c) if c.is_whitespace() => true, + _ => false, + } +} + fn is_external_word_char(c: char) -> bool { match c { - ';' | '|' | '#' | '-' | '"' | '\'' | '$' | '(' | ')' | '[' | ']' | '{' | '}' | '`' => false, + ';' | '|' | '#' | '-' | '"' | '\'' | '$' | '(' | ')' | '[' | ']' | '{' | '}' | '`' + | '.' => false, other if other.is_whitespace() => false, _ => true, } @@ -707,7 +666,7 @@ fn is_glob_specific_char(c: char) -> bool { } fn is_start_glob_char(c: char) -> bool { - is_start_bare_char(c) || is_glob_specific_char(c) + is_start_bare_char(c) || is_glob_specific_char(c) || c == '.' } fn is_glob_char(c: char) -> bool { @@ -717,8 +676,7 @@ fn is_glob_char(c: char) -> bool { fn is_start_bare_char(c: char) -> bool { match c { '+' => false, - _ if c.is_alphabetic() => true, - '.' => true, + _ if c.is_alphanumeric() => true, '\\' => true, '/' => true, '_' => true, @@ -730,9 +688,8 @@ fn is_start_bare_char(c: char) -> bool { fn is_bare_char(c: char) -> bool { match c { - '+' => false, + '+' => true, _ if c.is_alphanumeric() => true, - '.' => true, '\\' => true, '/' => true, '_' => true, @@ -759,6 +716,16 @@ fn is_id_continue(c: char) -> bool { } } +fn is_member_start(c: char) -> bool { + match c { + '"' | '\'' => true, + '1'..='9' => true, + + other if is_id_start(other) => true, + _ => false, + } +} + #[cfg(test)] mod tests { use super::*; @@ -768,45 +735,10 @@ mod tests { pub type CurriedNode = Box T + 'static>; - macro_rules! assert_leaf { - (parsers [ $($name:tt)* ] $input:tt -> $left:tt .. $right:tt { $kind:tt $parens:tt } ) => { - $( - assert_eq!( - apply($name, stringify!($name), $input), - token(RawToken::$kind $parens, $left, $right) - ); - )* - - assert_eq!( - apply(leaf, "leaf", $input), - token(RawToken::$kind $parens, $left, $right) - ); - - assert_eq!( - apply(leaf, "leaf", $input), - token(RawToken::$kind $parens, $left, $right) - ); - - assert_eq!( - apply(node, "node", $input), - token(RawToken::$kind $parens, $left, $right) - ); - }; - - (parsers [ $($name:tt)* ] $input:tt -> $left:tt .. $right:tt { $kind:tt } ) => { - $( - assert_eq!( - apply($name, stringify!($name), $input), - token(RawToken::$kind, $left, $right) - ); - )* - } - } - macro_rules! equal_tokens { ($source:tt -> $tokens:expr) => { let result = apply(pipeline, "pipeline", $source); - let (expected_tree, expected_source) = TokenTreeBuilder::build(uuid::Uuid::nil(), $tokens); + let (expected_tree, expected_source) = TokenTreeBuilder::build($tokens); if result != expected_tree { let debug_result = format!("{}", result.debug($source)); @@ -823,356 +755,346 @@ mod tests { assert_eq!(debug_result, debug_expected) } } - - // apply(pipeline, "pipeline", r#"cargo +nightly run"#), - // build_token(b::pipeline(vec![( - // None, - // b::call( - // b::bare("cargo"), - // vec![ - // b::sp(), - // b::external_word("+nightly"), - // b::sp(), - // b::bare("run") - // ] - // ), - // None - // )])) }; + + (<$parser:tt> $source:tt -> $tokens:expr) => { + let result = apply($parser, stringify!($parser), $source); + let (expected_tree, expected_source) = TokenTreeBuilder::build($tokens); + + if result != expected_tree { + let debug_result = format!("{}", result.debug($source)); + let debug_expected = format!("{}", expected_tree.debug(&expected_source)); + + if debug_result == debug_expected { + assert_eq!( + result, expected_tree, + "NOTE: actual and expected had equivalent debug serializations, source={:?}, debug_expected={:?}", + $source, + debug_expected + ) + } else { + assert_eq!(debug_result, debug_expected) + } + } + }; + } #[test] fn test_integer() { - assert_leaf! { - parsers [ size ] - "123" -> 0..3 { Number(RawNumber::int((0, 3, test_uuid())).item) } + equal_tokens! { + + "123" -> b::token_list(vec![b::int(123)]) } - assert_leaf! { - parsers [ size ] - "-123" -> 0..4 { Number(RawNumber::int((0, 4, test_uuid())).item) } - } - } - - #[test] - fn test_size() { - assert_leaf! { - parsers [ size ] - "123MB" -> 0..5 { Size(RawNumber::int((0, 3, test_uuid())).item, Unit::MB) } - } - - assert_leaf! { - parsers [ size ] - "10GB" -> 0..4 { Size(RawNumber::int((0, 2, test_uuid())).item, Unit::GB) } + equal_tokens! { + + "-123" -> b::token_list(vec![b::int(-123)]) } } #[test] fn test_operator() { - assert_eq!(apply(node, "node", ">"), build_token(b::op(">"))); + equal_tokens! { + + ">" -> b::token_list(vec![b::op(">")]) + } - // assert_leaf! { - // parsers [ operator ] - // ">=" -> 0..2 { Operator(Operator::GreaterThanOrEqual) } - // } + equal_tokens! { + + ">=" -> b::token_list(vec![b::op(">=")]) + } - // assert_leaf! { - // parsers [ operator ] - // "<" -> 0..1 { Operator(Operator::LessThan) } - // } + equal_tokens! { + + "<" -> b::token_list(vec![b::op("<")]) + } - // assert_leaf! { - // parsers [ operator ] - // "<=" -> 0..2 { Operator(Operator::LessThanOrEqual) } - // } + equal_tokens! { + + "<=" -> b::token_list(vec![b::op("<=")]) + } - // assert_leaf! { - // parsers [ operator ] - // "==" -> 0..2 { Operator(Operator::Equal) } - // } + equal_tokens! { + + "==" -> b::token_list(vec![b::op("==")]) + } - // assert_leaf! { - // parsers [ operator ] - // "!=" -> 0..2 { Operator(Operator::NotEqual) } - // } + equal_tokens! { + + "!=" -> b::token_list(vec![b::op("!=")]) + } } #[test] fn test_string() { - assert_leaf! { - parsers [ string dq_string ] - r#""hello world""# -> 0..13 { String(tag(1, 12)) } + equal_tokens! { + + r#""hello world""# -> b::token_list(vec![b::string("hello world")]) } - assert_leaf! { - parsers [ string sq_string ] - r"'hello world'" -> 0..13 { String(tag(1, 12)) } + equal_tokens! { + + r#"'hello world'"# -> b::token_list(vec![b::string("hello world")]) } } #[test] fn test_bare() { - assert_leaf! { - parsers [ bare ] - "hello" -> 0..5 { Bare } + equal_tokens! { + + "hello" -> b::token_list(vec![b::bare("hello")]) + } + } + + #[test] + fn test_unit_sizes() { + equal_tokens! { + + "450MB" -> b::token_list(vec![b::bare("450MB")]) + } + } + #[test] + fn test_simple_path() { + equal_tokens! { + + "chrome.exe" -> b::token_list(vec![b::bare("chrome"), b::op(Operator::Dot), b::bare("exe")]) } - assert_leaf! { - parsers [ bare ] - "chrome.exe" -> 0..10 { Bare } + equal_tokens! { + + ".azure" -> b::token_list(vec![b::op(Operator::Dot), b::bare("azure")]) } - assert_leaf! { - parsers [ bare ] - r"C:\windows\system.dll" -> 0..21 { Bare } + equal_tokens! { + + r"C:\windows\system.dll" -> b::token_list(vec![b::bare(r"C:\windows\system"), b::op(Operator::Dot), b::bare("dll")]) } - assert_leaf! { - parsers [ bare ] - r"C:\Code\-testing\my_tests.js" -> 0..28 { Bare } + equal_tokens! { + + r"C:\Code\-testing\my_tests.js" -> b::token_list(vec![b::bare(r"C:\Code\-testing\my_tests"), b::op(Operator::Dot), b::bare("js")]) } } #[test] fn test_flag() { - // assert_leaf! { - // parsers [ flag ] - // "--hello" -> 0..7 { Flag(Tagged::from_item(FlagKind::Longhand, span(2, 7))) } - // } + equal_tokens! { + + "--amigos" -> b::token_list(vec![b::flag("arepas")]) + } - // assert_leaf! { - // parsers [ flag ] - // "--hello-world" -> 0..13 { Flag(Tagged::from_item(FlagKind::Longhand, span(2, 13))) } - // } + equal_tokens! { + + "--all-amigos" -> b::token_list(vec![b::flag("all-amigos")]) + } } #[test] - fn test_shorthand() { - // assert_leaf! { - // parsers [ shorthand ] - // "-alt" -> 0..4 { Flag(Tagged::from_item(FlagKind::Shorthand, span(1, 4))) } - // } + fn test_shorthand_flag() { + equal_tokens! { + + "-katz" -> b::token_list(vec![b::shorthand("katz")]) + } } #[test] fn test_variable() { - assert_leaf! { - parsers [ var ] - "$it" -> 0..3 { Variable(tag(1, 3)) } + equal_tokens! { + + "$it" -> b::token_list(vec![b::var("it")]) } - assert_leaf! { - parsers [ var ] - "$name" -> 0..5 { Variable(tag(1, 5)) } + equal_tokens! { + + "$name" -> b::token_list(vec![b::var("name")]) } } #[test] fn test_external() { - assert_leaf! { - parsers [ external ] - "^ls" -> 0..3 { ExternalCommand(tag(1, 3)) } + equal_tokens! { + + "^ls" -> b::token_list(vec![b::external_command("ls")]) + } + } + + #[test] + fn test_dot_prefixed_name() { + equal_tokens! { + + ".azure" -> b::token_list(vec![b::op("."), b::bare("azure")]) } } #[test] fn test_delimited_paren() { - assert_eq!( - apply(node, "node", "(abc)"), - build_token(b::parens(vec![b::bare("abc")])) - ); + equal_tokens! { + + "(abc)" -> b::token_list(vec![b::parens(vec![b::bare("abc")])]) + } - assert_eq!( - apply(node, "node", "( abc )"), - build_token(b::parens(vec![b::ws(" "), b::bare("abc"), b::ws(" ")])) - ); + equal_tokens! { + + "( abc )" -> b::token_list(vec![b::parens(vec![b::ws(" "), b::bare("abc"), b::ws(" ")])]) + } - assert_eq!( - apply(node, "node", "( abc def )"), - build_token(b::parens(vec![ - b::ws(" "), - b::bare("abc"), - b::sp(), - b::bare("def"), - b::sp() - ])) - ); + equal_tokens! { + + "( abc def )" -> b::token_list(vec![b::parens(vec![b::ws(" "), b::bare("abc"), b::sp(), b::bare("def"), b::sp()])]) + } - assert_eq!( - apply(node, "node", "( abc def 123 456GB )"), - build_token(b::parens(vec![ - b::ws(" "), - b::bare("abc"), - b::sp(), - b::bare("def"), - b::sp(), - b::int(123), - b::sp(), - b::size(456, "GB"), - b::sp() - ])) - ); + equal_tokens! { + + "( abc def 123 456GB )" -> b::token_list(vec![b::parens(vec![ + b::ws(" "), b::bare("abc"), b::sp(), b::bare("def"), b::sp(), b::int(123), b::sp(), b::bare("456GB"), b::sp() + ])]) + } } #[test] fn test_delimited_square() { - assert_eq!( - apply(node, "node", "[abc]"), - build_token(b::square(vec![b::bare("abc")])) - ); + equal_tokens! { + + "[abc]" -> b::token_list(vec![b::square(vec![b::bare("abc")])]) + } - assert_eq!( - apply(node, "node", "[ abc ]"), - build_token(b::square(vec![b::ws(" "), b::bare("abc"), b::ws(" ")])) - ); + equal_tokens! { + + "[ abc ]" -> b::token_list(vec![b::square(vec![b::ws(" "), b::bare("abc"), b::ws(" ")])]) + } - assert_eq!( - apply(node, "node", "[ abc def ]"), - build_token(b::square(vec![ - b::ws(" "), - b::bare("abc"), - b::sp(), - b::bare("def"), - b::sp() - ])) - ); + equal_tokens! { + + "[ abc def ]" -> b::token_list(vec![b::square(vec![b::ws(" "), b::bare("abc"), b::sp(), b::bare("def"), b::sp()])]) + } - assert_eq!( - apply(node, "node", "[ abc def 123 456GB ]"), - build_token(b::square(vec![ - b::ws(" "), - b::bare("abc"), - b::sp(), - b::bare("def"), - b::sp(), - b::int(123), - b::sp(), - b::size(456, "GB"), - b::sp() - ])) - ); + equal_tokens! { + + "[ abc def 123 456GB ]" -> b::token_list(vec![b::square(vec![ + b::ws(" "), b::bare("abc"), b::sp(), b::bare("def"), b::sp(), b::int(123), b::sp(), b::bare("456GB"), b::sp() + ])]) + } } #[test] fn test_path() { let _ = pretty_env_logger::try_init(); - assert_eq!( - apply(node, "node", "$it.print"), - build_token(b::path(b::var("it"), vec![b::member("print")])) - ); - assert_eq!( - apply(node, "node", "$head.part1.part2"), - build_token(b::path( - b::var("head"), - vec![b::member("part1"), b::member("part2")] - )) - ); + equal_tokens! { + + "$it.print" -> b::token_list(vec![b::var("it"), b::op("."), b::bare("print")]) + } - assert_eq!( - apply(node, "node", "( hello ).world"), - build_token(b::path( - b::parens(vec![b::sp(), b::bare("hello"), b::sp()]), - vec![b::member("world")] - )) - ); + equal_tokens! { + + "$it.0" -> b::token_list(vec![b::var("it"), b::op("."), b::int(0)]) + } - assert_eq!( - apply(node, "node", "( hello ).\"world\""), - build_token(b::path( - b::parens(vec![b::sp(), b::bare("hello"), b::sp()],), - vec![b::string("world")] - )) - ); + equal_tokens! { + + "$head.part1.part2" -> b::token_list(vec![b::var("head"), b::op("."), b::bare("part1"), b::op("."), b::bare("part2")]) + } + + equal_tokens! { + + "( hello ).world" -> b::token_list(vec![b::parens(vec![b::sp(), b::bare("hello"), b::sp()]), b::op("."), b::bare("world")]) + } + + equal_tokens! { + + r#"( hello )."world""# -> b::token_list(vec![b::parens(vec![b::sp(), b::bare("hello"), b::sp()]), b::op("."), b::string("world")]) + } } #[test] fn test_nested_path() { - assert_eq!( - apply( - node, - "node", - "( $it.is.\"great news\".right yep $yep ).\"world\"" - ), - build_token(b::path( - b::parens(vec![ - b::sp(), - b::path( + equal_tokens! { + + r#"( $it.is."great news".right yep $yep )."world""# -> b::token_list( + vec![ + b::parens(vec![ + b::sp(), b::var("it"), - vec![b::member("is"), b::string("great news"), b::member("right")] - ), - b::sp(), - b::bare("yep"), - b::sp(), - b::var("yep"), - b::sp() - ]), - vec![b::string("world")] - )) - ) + b::op("."), + b::bare("is"), + b::op("."), + b::string("great news"), + b::op("."), + b::bare("right"), + b::sp(), + b::bare("yep"), + b::sp(), + b::var("yep"), + b::sp() + ]), + b::op("."), b::string("world")] + ) + } + + equal_tokens! { + + r#"$it."are PAS".0"# -> b::token_list( + vec![ + b::var("it"), + b::op("."), + b::string("are PAS"), + b::op("."), + b::int(0), + ] + ) + } } #[test] fn test_smoke_single_command() { - assert_eq!( - apply(raw_call, "raw_call", "git add ."), - build(b::call( - b::bare("git"), - vec![b::sp(), b::bare("add"), b::sp(), b::bare(".")] - )) - ); + equal_tokens! { + + "git add ." -> b::token_list(vec![b::bare("git"), b::sp(), b::bare("add"), b::sp(), b::op(".")]) + } - assert_eq!( - apply(raw_call, "raw_call", "open Cargo.toml"), - build(b::call( - b::bare("open"), - vec![b::sp(), b::bare("Cargo.toml")] - )) - ); + equal_tokens! { + + "open Cargo.toml" -> b::token_list(vec![b::bare("open"), b::sp(), b::bare("Cargo"), b::op("."), b::bare("toml")]) + } - assert_eq!( - apply(raw_call, "raw_call", "select package.version"), - build(b::call( - b::bare("select"), - vec![b::sp(), b::bare("package.version")] - )) - ); + equal_tokens! { + + "select package.version" -> b::token_list(vec![b::bare("select"), b::sp(), b::bare("package"), b::op("."), b::bare("version")]) + } - assert_eq!( - apply(raw_call, "raw_call", "echo $it"), - build(b::call(b::bare("echo"), vec![b::sp(), b::var("it")])) - ); + equal_tokens! { + + "echo $it" -> b::token_list(vec![b::bare("echo"), b::sp(), b::var("it")]) + } - assert_eq!( - apply(raw_call, "raw_call", "open Cargo.toml --raw"), - build(b::call( - b::bare("open"), - vec![b::sp(), b::bare("Cargo.toml"), b::sp(), b::flag("raw")] - )) - ); + equal_tokens! { + + "open Cargo.toml --raw" -> b::token_list(vec![b::bare("open"), b::sp(), b::bare("Cargo"), b::op("."), b::bare("toml"), b::sp(), b::flag("raw")]) + } - assert_eq!( - apply(raw_call, "raw_call", "open Cargo.toml -r"), - build(b::call( - b::bare("open"), - vec![b::sp(), b::bare("Cargo.toml"), b::sp(), b::shorthand("r")] - )) - ); + equal_tokens! { + + "open Cargo.toml -r" -> b::token_list(vec![b::bare("open"), b::sp(), b::bare("Cargo"), b::op("."), b::bare("toml"), b::sp(), b::shorthand("r")]) + } - assert_eq!( - apply(raw_call, "raw_call", "config --set tabs 2"), - build(b::call( - b::bare("config"), + equal_tokens! { + + "config --set tabs 2" -> b::token_list(vec![b::bare("config"), b::sp(), b::flag("set"), b::sp(), b::bare("tabs"), b::sp(), b::int(2)]) + } + + equal_tokens! { + + "inc --patch package.version" -> b::token_list( vec![ + b::bare("inc"), b::sp(), - b::flag("set"), + b::flag("patch"), b::sp(), - b::bare("tabs"), - b::sp(), - b::int(2) + b::bare("package"), b::op("."), b::bare("version") ] - )) - ); + ) + } } #[test] @@ -1181,162 +1103,187 @@ mod tests { equal_tokens!( "cargo +nightly run" -> - b::pipeline(vec![( - None, - b::call( - b::bare("cargo"), - vec![ - b::sp(), - b::external_word("+nightly"), - b::sp(), - b::bare("run") - ] - ), - None - )]) + b::pipeline(vec![vec![ + b::bare("cargo"), + b::sp(), + b::external_word("+nightly"), + b::sp(), + b::bare("run") + ]]) ); equal_tokens!( "rm foo%bar" -> - b::pipeline(vec![( - None, - b::call(b::bare("rm"), vec![b::sp(), b::external_word("foo%bar"),]), - None - )]) + b::pipeline(vec![vec![ + b::bare("rm"), b::sp(), b::external_word("foo%bar") + ]]) ); equal_tokens!( "rm foo%bar" -> - b::pipeline(vec![( - None, - b::call(b::bare("rm"), vec![b::sp(), b::external_word("foo%bar"),]), - None - )]) + b::pipeline(vec![vec![ + b::bare("rm"), b::sp(), b::external_word("foo%bar"), + ]]) ); } #[test] - fn test_smoke_pipeline() { + fn test_pipeline() { let _ = pretty_env_logger::try_init(); - assert_eq!( - apply( - pipeline, - "pipeline", - r#"git branch --merged | split-row "`n" | where $it != "* master""# - ), - build_token(b::pipeline(vec![ - ( - None, - b::call( - b::bare("git"), - vec![b::sp(), b::bare("branch"), b::sp(), b::flag("merged")] - ), - Some(" ") - ), - ( - Some(" "), - b::call(b::bare("split-row"), vec![b::sp(), b::string("`n")]), - Some(" ") - ), - ( - Some(" "), - b::call( - b::bare("where"), - vec![ - b::sp(), - b::var("it"), - b::sp(), - b::op("!="), - b::sp(), - b::string("* master") - ] - ), - None - ) - ])) - ); - - assert_eq!( - apply(pipeline, "pipeline", "ls | where { $it.size > 100 }"), - build_token(b::pipeline(vec![ - (None, b::call(b::bare("ls"), vec![]), Some(" ")), - ( - Some(" "), - b::call( - b::bare("where"), - vec![ - b::sp(), - b::braced(vec![ - b::path(b::var("it"), vec![b::member("size")]), - b::sp(), - b::op(">"), - b::sp(), - b::int(100) - ]) - ] - ), - None - ) - ])) - ) - } - - fn apply( - f: impl Fn(NomSpan) -> Result<(NomSpan, T), nom::Err<(NomSpan, nom::error::ErrorKind)>>, - desc: &str, - string: &str, - ) -> T { - match f(NomSpan::new_extra(string, uuid::Uuid::nil())) { - Ok(v) => v.1, - Err(other) => { - println!("{:?}", other); - println!("for {} @ {}", string, desc); - panic!("No dice"); - } + equal_tokens! { + "sys | echo" -> b::pipeline(vec![ + vec![ + b::bare("sys"), b::sp() + ], + vec![ + b::sp(), b::bare("echo") + ] + ]) } } - fn tag(left: usize, right: usize) -> Tag { - Tag::from((left, right, uuid::Uuid::nil())) + #[test] + fn test_patterns() { + equal_tokens! { + + "cp ../formats/*" -> b::pipeline(vec![vec![b::bare("cp"), b::sp(), b::pattern("../formats/*")]]) + } + + equal_tokens! { + + "cp * /dev/null" -> b::pipeline(vec![vec![b::bare("cp"), b::sp(), b::pattern("*"), b::sp(), b::bare("/dev/null")]]) + } + } + + #[test] + fn test_pseudo_paths() { + let _ = pretty_env_logger::try_init(); + + equal_tokens!( + + r#"sys | where cpu."max ghz" > 1"# -> b::pipeline(vec![ + vec![ + b::bare("sys"), b::sp() + ], + vec![ + b::sp(), + b::bare("where"), + b::sp(), + b::bare("cpu"), + b::op("."), + b::string("max ghz"), + b::sp(), + b::op(">"), + b::sp(), + b::int(1) + ]]) + ); + } + + // #[test] + // fn test_smoke_pipeline() { + // let _ = pretty_env_logger::try_init(); + + // assert_eq!( + // apply( + // pipeline, + // "pipeline", + // r#"git branch --merged | split-row "`n" | where $it != "* master""# + // ), + // build_token(b::pipeline(vec![ + // ( + // None, + // b::call( + // b::bare("git"), + // vec![b::sp(), b::bare("branch"), b::sp(), b::flag("merged")] + // ), + // Some(" ") + // ), + // ( + // Some(" "), + // b::call(b::bare("split-row"), vec![b::sp(), b::string("`n")]), + // Some(" ") + // ), + // ( + // Some(" "), + // b::call( + // b::bare("where"), + // vec![ + // b::sp(), + // b::var("it"), + // b::sp(), + // b::op("!="), + // b::sp(), + // b::string("* master") + // ] + // ), + // None + // ) + // ])) + // ); + + // assert_eq!( + // apply(pipeline, "pipeline", "ls | where { $it.size > 100 }"), + // build_token(b::pipeline(vec![ + // (None, b::call(b::bare("ls"), vec![]), Some(" ")), + // ( + // Some(" "), + // b::call( + // b::bare("where"), + // vec![ + // b::sp(), + // b::braced(vec![ + // b::path(b::var("it"), vec![b::member("size")]), + // b::sp(), + // b::op(">"), + // b::sp(), + // b::int(100) + // ]) + // ] + // ), + // None + // ) + // ])) + // ) + // } + + fn apply( + f: impl Fn(NomSpan) -> Result<(NomSpan, TokenNode), nom::Err<(NomSpan, nom::error::ErrorKind)>>, + desc: &str, + string: &str, + ) -> TokenNode { + f(nom_input(string)).unwrap().1 + } + + fn span((left, right): (usize, usize)) -> Span { + Span::new(left, right) } fn delimited( - delimiter: Tagged, + delimiter: Spanned, children: Vec, left: usize, right: usize, ) -> TokenNode { - let node = DelimitedNode::new(*delimiter, children); - let spanned = node.tagged((left, right, delimiter.tag.anchor)); + let start = Span::for_char(left); + let end = Span::for_char(right); + + let node = DelimitedNode::new(delimiter.item, (start, end), children); + let spanned = node.spanned(Span::new(left, right)); TokenNode::Delimited(spanned) } - fn path(head: TokenNode, tail: Vec, left: usize, right: usize) -> TokenNode { - let tag = head.tag(); - - let node = PathNode::new( - Box::new(head), - tail.into_iter().map(TokenNode::Token).collect(), - ); - let spanned = node.tagged((left, right, tag.anchor)); - TokenNode::Path(spanned) - } - fn token(token: RawToken, left: usize, right: usize) -> TokenNode { - TokenNode::Token(token.tagged((left, right, uuid::Uuid::nil()))) + TokenNode::Token(token.spanned(Span::new(left, right))) } fn build(block: CurriedNode) -> T { - let mut builder = TokenTreeBuilder::new(uuid::Uuid::nil()); + let mut builder = TokenTreeBuilder::new(); block(&mut builder) } fn build_token(block: CurriedToken) -> TokenNode { - TokenTreeBuilder::build(uuid::Uuid::nil(), block).0 - } - - fn test_uuid() -> uuid::Uuid { - uuid::Uuid::nil() + TokenTreeBuilder::build(block).0 } } diff --git a/src/parser/parse/pipeline.rs b/src/parser/parse/pipeline.rs index 42bbe23a18..c14f3745df 100644 --- a/src/parser/parse/pipeline.rs +++ b/src/parser/parse/pipeline.rs @@ -1,53 +1,40 @@ -use crate::parser::CallNode; -use crate::traits::ToDebug; -use crate::{Tag, Tagged}; +use crate::parser::TokenNode; +use crate::{DebugFormatter, FormatDebug, Span, Spanned, ToDebug}; use derive_new::new; use getset::Getters; -use std::fmt; +use itertools::Itertools; +use std::fmt::{self, Write}; -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, new)] +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Getters, new)] pub struct Pipeline { - pub(crate) parts: Vec, - pub(crate) post_ws: Option, + #[get = "pub"] + pub(crate) parts: Vec>, } -impl ToDebug for Pipeline { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { - for part in &self.parts { - write!(f, "{}", part.debug(source))?; - } - - if let Some(post_ws) = self.post_ws { - write!(f, "{}", post_ws.slice(source))? - } - - Ok(()) +impl FormatDebug for Pipeline { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + f.say_str( + "pipeline", + self.parts.iter().map(|p| p.debug(source)).join(" "), + ) } } #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)] pub struct PipelineElement { - pub pipe: Option, - pub pre_ws: Option, - #[get = "pub(crate)"] - call: Tagged, - pub post_ws: Option, + pub pipe: Option, + #[get = "pub"] + pub tokens: Spanned>, } -impl ToDebug for PipelineElement { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { +impl FormatDebug for PipelineElement { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { if let Some(pipe) = self.pipe { write!(f, "{}", pipe.slice(source))?; } - if let Some(pre_ws) = self.pre_ws { - write!(f, "{}", pre_ws.slice(source))?; - } - - write!(f, "{}", self.call.debug(source))?; - - if let Some(post_ws) = self.post_ws { - write!(f, "{}", post_ws.slice(source))?; + for token in &self.tokens.item { + write!(f, "{}", token.debug(source))?; } Ok(()) diff --git a/src/parser/parse/token_tree.rs b/src/parser/parse/token_tree.rs index e0072360e8..137b22be7d 100644 --- a/src/parser/parse/token_tree.rs +++ b/src/parser/parse/token_tree.rs @@ -1,9 +1,9 @@ -use crate::errors::ShellError; +use crate::errors::{ParseError, ShellError}; use crate::parser::parse::{call_node::*, flag::*, operator::*, pipeline::*, tokens::*}; +use crate::prelude::*; use crate::traits::ToDebug; -use crate::{Tag, Tagged, Text}; +use crate::{Tagged, Text}; use derive_new::new; -use enum_utils::FromStr; use getset::Getters; use std::fmt; @@ -11,20 +11,24 @@ use std::fmt; pub enum TokenNode { Token(Token), - Call(Tagged), - Delimited(Tagged), - Pipeline(Tagged), - Operator(Tagged), - Flag(Tagged), - Member(Tag), - Whitespace(Tag), + Call(Spanned), + Nodes(Spanned>), + Delimited(Spanned), + Pipeline(Spanned), + Flag(Spanned), + Whitespace(Span), - Error(Tagged>), - Path(Tagged), + Error(Spanned), } -impl ToDebug for TokenNode { - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { +impl HasSpan for TokenNode { + fn span(&self) -> Span { + self.get_span() + } +} + +impl FormatDebug for TokenNode { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { write!(f, "{:?}", self.old_debug(&Text::from(source))) } } @@ -78,48 +82,51 @@ impl fmt::Debug for DebugTokenNode<'_> { ) } TokenNode::Pipeline(pipeline) => write!(f, "{}", pipeline.debug(self.source)), - TokenNode::Error(s) => write!(f, " for {:?}", s.tag().slice(self.source)), - rest => write!(f, "{}", rest.tag().slice(self.source)), + TokenNode::Error(_) => write!(f, ""), + rest => write!(f, "{}", rest.span().slice(self.source)), } } } -impl From<&TokenNode> for Tag { - fn from(token: &TokenNode) -> Tag { - token.tag() +impl From<&TokenNode> for Span { + fn from(token: &TokenNode) -> Span { + token.get_span() } } impl TokenNode { - pub fn tag(&self) -> Tag { + pub fn get_span(&self) -> Span { match self { - TokenNode::Token(t) => t.tag(), - TokenNode::Call(s) => s.tag(), - TokenNode::Delimited(s) => s.tag(), - TokenNode::Pipeline(s) => s.tag(), - TokenNode::Operator(s) => s.tag(), - TokenNode::Flag(s) => s.tag(), - TokenNode::Member(s) => *s, + TokenNode::Token(t) => t.span, + TokenNode::Nodes(t) => t.span, + TokenNode::Call(s) => s.span, + TokenNode::Delimited(s) => s.span, + TokenNode::Pipeline(s) => s.span, + TokenNode::Flag(s) => s.span, TokenNode::Whitespace(s) => *s, - TokenNode::Error(s) => s.tag(), - TokenNode::Path(s) => s.tag(), + TokenNode::Error(s) => s.span, } } - pub fn type_name(&self) -> String { + pub fn type_name(&self) -> &'static str { match self { TokenNode::Token(t) => t.type_name(), + TokenNode::Nodes(_) => "nodes", TokenNode::Call(_) => "command", TokenNode::Delimited(d) => d.type_name(), TokenNode::Pipeline(_) => "pipeline", - TokenNode::Operator(_) => "operator", TokenNode::Flag(_) => "flag", - TokenNode::Member(_) => "member", TokenNode::Whitespace(_) => "whitespace", TokenNode::Error(_) => "error", - TokenNode::Path(_) => "path", } - .to_string() + } + + pub fn spanned_type_name(&self) -> Spanned<&'static str> { + self.type_name().spanned(self.span()) + } + + pub fn tagged_type_name(&self) -> Tagged<&'static str> { + self.type_name().tagged(self.span()) } pub fn old_debug<'a>(&'a self, source: &'a Text) -> DebugTokenNode<'a> { @@ -127,16 +134,26 @@ impl TokenNode { } pub fn as_external_arg(&self, source: &Text) -> String { - self.tag().slice(source).to_string() + self.span().slice(source).to_string() } pub fn source<'a>(&self, source: &'a Text) -> &'a str { - self.tag().slice(source) + self.span().slice(source) + } + + pub fn get_variable(&self) -> Result<(Span, Span), ShellError> { + match self { + TokenNode::Token(Spanned { + item: RawToken::Variable(inner_span), + span: outer_span, + }) => Ok((*outer_span, *inner_span)), + _ => Err(ShellError::type_error("variable", self.tagged_type_name())), + } } pub fn is_bare(&self) -> bool { match self { - TokenNode::Token(Tagged { + TokenNode::Token(Spanned { item: RawToken::Bare, .. }) => true, @@ -144,9 +161,74 @@ impl TokenNode { } } + pub fn is_string(&self) -> bool { + match self { + TokenNode::Token(Spanned { + item: RawToken::String(_), + .. + }) => true, + _ => false, + } + } + + pub fn is_number(&self) -> bool { + match self { + TokenNode::Token(Spanned { + item: RawToken::Number(_), + .. + }) => true, + _ => false, + } + } + + pub fn as_string(&self) -> Option<(Span, Span)> { + match self { + TokenNode::Token(Spanned { + item: RawToken::String(inner_span), + span: outer_span, + }) => Some((*outer_span, *inner_span)), + _ => None, + } + } + + pub fn is_pattern(&self) -> bool { + match self { + TokenNode::Token(Spanned { + item: RawToken::GlobPattern, + .. + }) => true, + _ => false, + } + } + + pub fn is_dot(&self) -> bool { + match self { + TokenNode::Token(Spanned { + item: RawToken::Operator(Operator::Dot), + .. + }) => true, + _ => false, + } + } + + pub fn as_block(&self) -> Option<(Spanned<&[TokenNode]>, (Span, Span))> { + match self { + TokenNode::Delimited(Spanned { + item: + DelimitedNode { + delimiter, + children, + spans, + }, + span, + }) if *delimiter == Delimiter::Brace => Some(((&children[..]).spanned(*span), *spans)), + _ => None, + } + } + pub fn is_external(&self) -> bool { match self { - TokenNode::Token(Tagged { + TokenNode::Token(Spanned { item: RawToken::ExternalCommand(..), .. }) => true, @@ -154,20 +236,10 @@ impl TokenNode { } } - pub fn expect_external(&self) -> Tag { - match self { - TokenNode::Token(Tagged { - item: RawToken::ExternalCommand(tag), - .. - }) => *tag, - _ => panic!("Only call expect_external if you checked is_external first"), - } - } - - pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option> { + pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option> { match self { TokenNode::Flag( - flag @ Tagged { + flag @ Spanned { item: Flag { .. }, .. }, ) if value == flag.name().slice(source) => Some(*flag), @@ -175,10 +247,17 @@ impl TokenNode { } } - pub fn as_pipeline(&self) -> Result { + pub fn as_pipeline(&self) -> Result { match self { - TokenNode::Pipeline(Tagged { item, .. }) => Ok(item.clone()), - _ => Err(ShellError::string("unimplemented")), + TokenNode::Pipeline(Spanned { item, .. }) => Ok(item.clone()), + other => Err(ParseError::mismatch("pipeline", other.tagged_type_name())), + } + } + + pub fn is_whitespace(&self) -> bool { + match self { + TokenNode::Whitespace(_) => true, + _ => false, } } } @@ -186,8 +265,9 @@ impl TokenNode { #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)] #[get = "pub(crate)"] pub struct DelimitedNode { - delimiter: Delimiter, - children: Vec, + pub(crate) delimiter: Delimiter, + pub(crate) spans: (Span, Span), + pub(crate) children: Vec, } impl DelimitedNode { @@ -200,16 +280,107 @@ impl DelimitedNode { } } -#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, FromStr)] +#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)] pub enum Delimiter { Paren, Brace, Square, } +impl Delimiter { + pub(crate) fn open(&self) -> &'static str { + match self { + Delimiter::Paren => "(", + Delimiter::Brace => "{", + Delimiter::Square => "[", + } + } + + pub(crate) fn close(&self) -> &'static str { + match self { + Delimiter::Paren => ")", + Delimiter::Brace => "}", + Delimiter::Square => "]", + } + } +} + #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)] #[get = "pub(crate)"] pub struct PathNode { head: Box, tail: Vec, } + +#[cfg(test)] +impl TokenNode { + pub fn expect_external(&self) -> Span { + match self { + TokenNode::Token(Spanned { + item: RawToken::ExternalCommand(span), + .. + }) => *span, + other => panic!( + "Only call expect_external if you checked is_external first, found {:?}", + other + ), + } + } + + pub fn expect_string(&self) -> (Span, Span) { + match self { + TokenNode::Token(Spanned { + item: RawToken::String(inner_span), + span: outer_span, + }) => (*outer_span, *inner_span), + other => panic!("Expected string, found {:?}", other), + } + } + + pub fn expect_list(&self) -> Spanned<&[TokenNode]> { + match self { + TokenNode::Nodes(token_nodes) => token_nodes[..].spanned(token_nodes.span), + other => panic!("Expected list, found {:?}", other), + } + } + + pub fn expect_pattern(&self) -> Span { + match self { + TokenNode::Token(Spanned { + item: RawToken::GlobPattern, + span: outer_span, + }) => *outer_span, + other => panic!("Expected pattern, found {:?}", other), + } + } + + pub fn expect_var(&self) -> (Span, Span) { + match self { + TokenNode::Token(Spanned { + item: RawToken::Variable(inner_span), + span: outer_span, + }) => (*outer_span, *inner_span), + other => panic!("Expected var, found {:?}", other), + } + } + + pub fn expect_dot(&self) -> Span { + match self { + TokenNode::Token(Spanned { + item: RawToken::Operator(Operator::Dot), + span, + }) => *span, + other => panic!("Expected dot, found {:?}", other), + } + } + + pub fn expect_bare(&self) -> Span { + match self { + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => *span, + other => panic!("Expected bare, found {:?}", other), + } + } +} diff --git a/src/parser/parse/token_tree_builder.rs b/src/parser/parse/token_tree_builder.rs index 9a2e6ab721..7146a3c201 100644 --- a/src/parser/parse/token_tree_builder.rs +++ b/src/parser/parse/token_tree_builder.rs @@ -3,12 +3,10 @@ use crate::prelude::*; use crate::parser::parse::flag::{Flag, FlagKind}; use crate::parser::parse::operator::Operator; use crate::parser::parse::pipeline::{Pipeline, PipelineElement}; -use crate::parser::parse::token_tree::{DelimitedNode, Delimiter, PathNode, TokenNode}; +use crate::parser::parse::token_tree::{DelimitedNode, Delimiter, TokenNode}; use crate::parser::parse::tokens::{RawNumber, RawToken}; -use crate::parser::parse::unit::Unit; use crate::parser::CallNode; use derive_new::new; -use uuid::Uuid; #[derive(new)] pub struct TokenTreeBuilder { @@ -17,74 +15,86 @@ pub struct TokenTreeBuilder { #[new(default)] output: String, - - anchor: Uuid, } pub type CurriedToken = Box TokenNode + 'static>; -pub type CurriedCall = Box Tagged + 'static>; +pub type CurriedCall = Box Spanned + 'static>; impl TokenTreeBuilder { - pub fn build(anchor: Uuid, block: impl FnOnce(&mut Self) -> TokenNode) -> (TokenNode, String) { - let mut builder = TokenTreeBuilder::new(anchor); + pub fn build(block: impl FnOnce(&mut Self) -> TokenNode) -> (TokenNode, String) { + let mut builder = TokenTreeBuilder::new(); let node = block(&mut builder); (node, builder.output) } - pub fn pipeline(input: Vec<(Option<&str>, CurriedCall, Option<&str>)>) -> CurriedToken { - let input: Vec<(Option, CurriedCall, Option)> = input - .into_iter() - .map(|(pre, call, post)| { - ( - pre.map(|s| s.to_string()), - call, - post.map(|s| s.to_string()), - ) - }) - .collect(); + fn build_spanned( + &mut self, + callback: impl FnOnce(&mut TokenTreeBuilder) -> T, + ) -> Spanned { + let start = self.pos; + let ret = callback(self); + let end = self.pos; + ret.spanned(Span::new(start, end)) + } + + pub fn pipeline(input: Vec>) -> CurriedToken { Box::new(move |b| { let start = b.pos; - let mut out: Vec = vec![]; + let mut out: Vec> = vec![]; let mut input = input.into_iter().peekable(); - let (pre, call, post) = input + let head = input .next() .expect("A pipeline must contain at least one element"); let pipe = None; - let pre_tag = pre.map(|pre| b.consume_tag(&pre)); - let call = call(b); - let post_tag = post.map(|post| b.consume_tag(&post)); + let head = b.build_spanned(|b| head.into_iter().map(|node| node(b)).collect()); - out.push(PipelineElement::new(pipe, pre_tag, call, post_tag)); + let head_span: Span = head.span; + out.push(PipelineElement::new(pipe, head).spanned(head_span)); loop { match input.next() { None => break, - Some((pre, call, post)) => { - let pipe = Some(b.consume_tag("|")); - let pre_span = pre.map(|pre| b.consume_tag(&pre)); - let call = call(b); - let post_span = post.map(|post| b.consume_tag(&post)); + Some(node) => { + let start = b.pos; + let pipe = Some(b.consume_span("|")); + let node = + b.build_spanned(|b| node.into_iter().map(|node| node(b)).collect()); + let end = b.pos; - out.push(PipelineElement::new(pipe, pre_span, call, post_span)); + out.push(PipelineElement::new(pipe, node).spanned(Span::new(start, end))); } } } let end = b.pos; - TokenTreeBuilder::tagged_pipeline((out, None), (start, end, b.anchor)) + TokenTreeBuilder::spanned_pipeline(out, Span::new(start, end)) }) } - pub fn tagged_pipeline( - input: (Vec, Option), - tag: impl Into, + pub fn spanned_pipeline( + input: Vec>, + span: impl Into, ) -> TokenNode { - TokenNode::Pipeline(Pipeline::new(input.0, input.1.into()).tagged(tag.into())) + TokenNode::Pipeline(Pipeline::new(input).spanned(span)) + } + + pub fn token_list(input: Vec) -> CurriedToken { + Box::new(move |b| { + let start = b.pos; + let tokens = input.into_iter().map(|i| i(b)).collect(); + let end = b.pos; + + TokenTreeBuilder::spanned_token_list(tokens, Span::new(start, end)) + }) + } + + pub fn spanned_token_list(input: Vec, span: impl Into) -> TokenNode { + TokenNode::Nodes(input.spanned(span.into())) } pub fn op(input: impl Into) -> CurriedToken { @@ -95,12 +105,12 @@ impl TokenTreeBuilder { b.pos = end; - TokenTreeBuilder::tagged_op(input, (start, end, b.anchor)) + TokenTreeBuilder::spanned_op(input, Span::new(start, end)) }) } - pub fn tagged_op(input: impl Into, tag: impl Into) -> TokenNode { - TokenNode::Operator(input.into().tagged(tag.into())) + pub fn spanned_op(input: impl Into, span: impl Into) -> TokenNode { + TokenNode::Token(RawToken::Operator(input.into()).spanned(span.into())) } pub fn string(input: impl Into) -> CurriedToken { @@ -112,15 +122,15 @@ impl TokenTreeBuilder { let (_, end) = b.consume("\""); b.pos = end; - TokenTreeBuilder::tagged_string( - (inner_start, inner_end, b.anchor), - (start, end, b.anchor), + TokenTreeBuilder::spanned_string( + Span::new(inner_start, inner_end), + Span::new(start, end), ) }) } - pub fn tagged_string(input: impl Into, tag: impl Into) -> TokenNode { - TokenNode::Token(RawToken::String(input.into()).tagged(tag.into())) + pub fn spanned_string(input: impl Into, span: impl Into) -> TokenNode { + TokenNode::Token(RawToken::String(input.into()).spanned(span.into())) } pub fn bare(input: impl Into) -> CurriedToken { @@ -130,12 +140,12 @@ impl TokenTreeBuilder { let (start, end) = b.consume(&input); b.pos = end; - TokenTreeBuilder::tagged_bare((start, end, b.anchor)) + TokenTreeBuilder::spanned_bare(Span::new(start, end)) }) } - pub fn tagged_bare(tag: impl Into) -> TokenNode { - TokenNode::Token(RawToken::Bare.tagged(tag.into())) + pub fn spanned_bare(span: impl Into) -> TokenNode { + TokenNode::Token(RawToken::Bare.spanned(span)) } pub fn pattern(input: impl Into) -> CurriedToken { @@ -145,12 +155,12 @@ impl TokenTreeBuilder { let (start, end) = b.consume(&input); b.pos = end; - TokenTreeBuilder::tagged_pattern((start, end, b.anchor)) + TokenTreeBuilder::spanned_pattern(Span::new(start, end)) }) } - pub fn tagged_pattern(input: impl Into) -> TokenNode { - TokenNode::Token(RawToken::GlobPattern.tagged(input.into())) + pub fn spanned_pattern(input: impl Into) -> TokenNode { + TokenNode::Token(RawToken::GlobPattern.spanned(input.into())) } pub fn external_word(input: impl Into) -> CurriedToken { @@ -160,16 +170,31 @@ impl TokenTreeBuilder { let (start, end) = b.consume(&input); b.pos = end; - TokenTreeBuilder::tagged_external_word((start, end, b.anchor)) + TokenTreeBuilder::spanned_external_word(Span::new(start, end)) }) } - pub fn tagged_external_word(input: impl Into) -> TokenNode { - TokenNode::Token(RawToken::ExternalWord.tagged(input.into())) + pub fn spanned_external_word(input: impl Into) -> TokenNode { + TokenNode::Token(RawToken::ExternalWord.spanned(input.into())) } - pub fn tagged_external(input: impl Into, tag: impl Into) -> TokenNode { - TokenNode::Token(RawToken::ExternalCommand(input.into()).tagged(tag.into())) + pub fn external_command(input: impl Into) -> CurriedToken { + let input = input.into(); + + Box::new(move |b| { + let (outer_start, _) = b.consume("^"); + let (inner_start, end) = b.consume(&input); + b.pos = end; + + TokenTreeBuilder::spanned_external_command( + Span::new(inner_start, end), + Span::new(outer_start, end), + ) + }) + } + + pub fn spanned_external_command(inner: impl Into, outer: impl Into) -> TokenNode { + TokenNode::Token(RawToken::ExternalCommand(inner.into()).spanned(outer.into())) } pub fn int(input: impl Into) -> CurriedToken { @@ -179,9 +204,9 @@ impl TokenTreeBuilder { let (start, end) = b.consume(&int.to_string()); b.pos = end; - TokenTreeBuilder::tagged_number( - RawNumber::Int((start, end, b.anchor).into()), - (start, end, b.anchor), + TokenTreeBuilder::spanned_number( + RawNumber::Int(Span::new(start, end)), + Span::new(start, end), ) }) } @@ -193,63 +218,15 @@ impl TokenTreeBuilder { let (start, end) = b.consume(&decimal.to_string()); b.pos = end; - TokenTreeBuilder::tagged_number( - RawNumber::Decimal((start, end, b.anchor).into()), - (start, end, b.anchor), + TokenTreeBuilder::spanned_number( + RawNumber::Decimal(Span::new(start, end)), + Span::new(start, end), ) }) } - pub fn tagged_number(input: impl Into, tag: impl Into) -> TokenNode { - TokenNode::Token(RawToken::Number(input.into()).tagged(tag.into())) - } - - pub fn size(int: impl Into, unit: impl Into) -> CurriedToken { - let int = int.into(); - let unit = unit.into(); - - Box::new(move |b| { - let (start_int, end_int) = b.consume(&int.to_string()); - let (_, end_unit) = b.consume(unit.as_str()); - b.pos = end_unit; - - TokenTreeBuilder::tagged_size( - (RawNumber::Int((start_int, end_int, b.anchor).into()), unit), - (start_int, end_unit, b.anchor), - ) - }) - } - - pub fn tagged_size( - input: (impl Into, impl Into), - tag: impl Into, - ) -> TokenNode { - let (int, unit) = (input.0.into(), input.1.into()); - - TokenNode::Token(RawToken::Size(int, unit).tagged(tag.into())) - } - - pub fn path(head: CurriedToken, tail: Vec) -> CurriedToken { - Box::new(move |b| { - let start = b.pos; - let head = head(b); - - let mut output = vec![]; - - for item in tail { - b.consume("."); - - output.push(item(b)); - } - - let end = b.pos; - - TokenTreeBuilder::tagged_path((head, output), (start, end, b.anchor)) - }) - } - - pub fn tagged_path(input: (TokenNode, Vec), tag: impl Into) -> TokenNode { - TokenNode::Path(PathNode::new(Box::new(input.0), input.1).tagged(tag.into())) + pub fn spanned_number(input: impl Into, span: impl Into) -> TokenNode { + TokenNode::Token(RawToken::Number(input.into()).spanned(span.into())) } pub fn var(input: impl Into) -> CurriedToken { @@ -259,12 +236,12 @@ impl TokenTreeBuilder { let (start, _) = b.consume("$"); let (inner_start, end) = b.consume(&input); - TokenTreeBuilder::tagged_var((inner_start, end, b.anchor), (start, end, b.anchor)) + TokenTreeBuilder::spanned_var(Span::new(inner_start, end), Span::new(start, end)) }) } - pub fn tagged_var(input: impl Into, tag: impl Into) -> TokenNode { - TokenNode::Token(RawToken::Variable(input.into()).tagged(tag.into())) + pub fn spanned_var(input: impl Into, span: impl Into) -> TokenNode { + TokenNode::Token(RawToken::Variable(input.into()).spanned(span.into())) } pub fn flag(input: impl Into) -> CurriedToken { @@ -274,12 +251,12 @@ impl TokenTreeBuilder { let (start, _) = b.consume("--"); let (inner_start, end) = b.consume(&input); - TokenTreeBuilder::tagged_flag((inner_start, end, b.anchor), (start, end, b.anchor)) + TokenTreeBuilder::spanned_flag(Span::new(inner_start, end), Span::new(start, end)) }) } - pub fn tagged_flag(input: impl Into, tag: impl Into) -> TokenNode { - TokenNode::Flag(Flag::new(FlagKind::Longhand, input.into()).tagged(tag.into())) + pub fn spanned_flag(input: impl Into, span: impl Into) -> TokenNode { + TokenNode::Flag(Flag::new(FlagKind::Longhand, input.into()).spanned(span.into())) } pub fn shorthand(input: impl Into) -> CurriedToken { @@ -289,25 +266,12 @@ impl TokenTreeBuilder { let (start, _) = b.consume("-"); let (inner_start, end) = b.consume(&input); - TokenTreeBuilder::tagged_shorthand((inner_start, end, b.anchor), (start, end, b.anchor)) + TokenTreeBuilder::spanned_shorthand((inner_start, end), (start, end)) }) } - pub fn tagged_shorthand(input: impl Into, tag: impl Into) -> TokenNode { - TokenNode::Flag(Flag::new(FlagKind::Shorthand, input.into()).tagged(tag.into())) - } - - pub fn member(input: impl Into) -> CurriedToken { - let input = input.into(); - - Box::new(move |b| { - let (start, end) = b.consume(&input); - TokenTreeBuilder::tagged_member((start, end, b.anchor)) - }) - } - - pub fn tagged_member(tag: impl Into) -> TokenNode { - TokenNode::Member(tag.into()) + pub fn spanned_shorthand(input: impl Into, span: impl Into) -> TokenNode { + TokenNode::Flag(Flag::new(FlagKind::Shorthand, input.into()).spanned(span.into())) } pub fn call(head: CurriedToken, input: Vec) -> CurriedCall { @@ -323,11 +287,11 @@ impl TokenTreeBuilder { let end = b.pos; - TokenTreeBuilder::tagged_call(nodes, (start, end, b.anchor)) + TokenTreeBuilder::spanned_call(nodes, Span::new(start, end)) }) } - pub fn tagged_call(input: Vec, tag: impl Into) -> Tagged { + pub fn spanned_call(input: Vec, span: impl Into) -> Spanned { if input.len() == 0 { panic!("BUG: spanned call (TODO)") } @@ -337,67 +301,88 @@ impl TokenTreeBuilder { let head = input.next().unwrap(); let tail = input.collect(); - CallNode::new(Box::new(head), tail).tagged(tag.into()) + CallNode::new(Box::new(head), tail).spanned(span.into()) + } + + fn consume_delimiter( + &mut self, + input: Vec, + _open: &str, + _close: &str, + ) -> (Span, Span, Span, Vec) { + let (start_open_paren, end_open_paren) = self.consume("("); + let mut output = vec![]; + for item in input { + output.push(item(self)); + } + + let (start_close_paren, end_close_paren) = self.consume(")"); + + let open = Span::new(start_open_paren, end_open_paren); + let close = Span::new(start_close_paren, end_close_paren); + let whole = Span::new(start_open_paren, end_close_paren); + + (open, close, whole, output) } pub fn parens(input: Vec) -> CurriedToken { Box::new(move |b| { - let (start, _) = b.consume("("); - let mut output = vec![]; - for item in input { - output.push(item(b)); - } + let (open, close, whole, output) = b.consume_delimiter(input, "(", ")"); - let (_, end) = b.consume(")"); - - TokenTreeBuilder::tagged_parens(output, (start, end, b.anchor)) + TokenTreeBuilder::spanned_parens(output, (open, close), whole) }) } - pub fn tagged_parens(input: impl Into>, tag: impl Into) -> TokenNode { - TokenNode::Delimited(DelimitedNode::new(Delimiter::Paren, input.into()).tagged(tag.into())) + pub fn spanned_parens( + input: impl Into>, + spans: (Span, Span), + span: impl Into, + ) -> TokenNode { + TokenNode::Delimited( + DelimitedNode::new(Delimiter::Paren, spans, input.into()).spanned(span.into()), + ) } pub fn square(input: Vec) -> CurriedToken { Box::new(move |b| { - let (start, _) = b.consume("["); - let mut output = vec![]; - for item in input { - output.push(item(b)); - } + let (open, close, whole, tokens) = b.consume_delimiter(input, "[", "]"); - let (_, end) = b.consume("]"); - - TokenTreeBuilder::tagged_square(output, (start, end, b.anchor)) + TokenTreeBuilder::spanned_square(tokens, (open, close), whole) }) } - pub fn tagged_square(input: impl Into>, tag: impl Into) -> TokenNode { - TokenNode::Delimited(DelimitedNode::new(Delimiter::Square, input.into()).tagged(tag.into())) + pub fn spanned_square( + input: impl Into>, + spans: (Span, Span), + span: impl Into, + ) -> TokenNode { + TokenNode::Delimited( + DelimitedNode::new(Delimiter::Square, spans, input.into()).spanned(span.into()), + ) } pub fn braced(input: Vec) -> CurriedToken { Box::new(move |b| { - let (start, _) = b.consume("{ "); - let mut output = vec![]; - for item in input { - output.push(item(b)); - } + let (open, close, whole, tokens) = b.consume_delimiter(input, "{", "}"); - let (_, end) = b.consume(" }"); - - TokenTreeBuilder::tagged_brace(output, (start, end, b.anchor)) + TokenTreeBuilder::spanned_brace(tokens, (open, close), whole) }) } - pub fn tagged_brace(input: impl Into>, tag: impl Into) -> TokenNode { - TokenNode::Delimited(DelimitedNode::new(Delimiter::Brace, input.into()).tagged(tag.into())) + pub fn spanned_brace( + input: impl Into>, + spans: (Span, Span), + span: impl Into, + ) -> TokenNode { + TokenNode::Delimited( + DelimitedNode::new(Delimiter::Brace, spans, input.into()).spanned(span.into()), + ) } pub fn sp() -> CurriedToken { Box::new(|b| { let (start, end) = b.consume(" "); - TokenNode::Whitespace(Tag::from((start, end, b.anchor))) + TokenNode::Whitespace(Span::new(start, end)) }) } @@ -406,12 +391,12 @@ impl TokenTreeBuilder { Box::new(move |b| { let (start, end) = b.consume(&input); - TokenTreeBuilder::tagged_ws((start, end, b.anchor)) + TokenTreeBuilder::spanned_ws(Span::new(start, end)) }) } - pub fn tagged_ws(tag: impl Into) -> TokenNode { - TokenNode::Whitespace(tag.into()) + pub fn spanned_ws(span: impl Into) -> TokenNode { + TokenNode::Whitespace(span.into()) } fn consume(&mut self, input: &str) -> (usize, usize) { @@ -421,10 +406,10 @@ impl TokenTreeBuilder { (start, self.pos) } - fn consume_tag(&mut self, input: &str) -> Tag { + fn consume_span(&mut self, input: &str) -> Span { let start = self.pos; self.pos += input.len(); self.output.push_str(input); - (start, self.pos, self.anchor).into() + Span::new(start, self.pos) } } diff --git a/src/parser/parse/tokens.rs b/src/parser/parse/tokens.rs index d796a8fcb7..43ce7f405b 100644 --- a/src/parser/parse/tokens.rs +++ b/src/parser/parse/tokens.rs @@ -1,38 +1,62 @@ -use crate::parser::parse::unit::*; +use crate::parser::Operator; use crate::prelude::*; -use crate::{Tagged, Text}; +use crate::Text; use std::fmt; use std::str::FromStr; #[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] pub enum RawToken { Number(RawNumber), - Size(RawNumber, Unit), - String(Tag), - Variable(Tag), - ExternalCommand(Tag), + Operator(Operator), + String(Span), + Variable(Span), + ExternalCommand(Span), ExternalWord, GlobPattern, Bare, } +impl RawToken { + pub fn type_name(&self) -> &'static str { + match self { + RawToken::Number(_) => "number", + RawToken::Operator(..) => "operator", + RawToken::String(_) => "string", + RawToken::Variable(_) => "variable", + RawToken::ExternalCommand(_) => "syntax error", + RawToken::ExternalWord => "syntax error", + RawToken::GlobPattern => "glob pattern", + RawToken::Bare => "string", + } + } +} + #[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] pub enum RawNumber { - Int(Tag), - Decimal(Tag), + Int(Span), + Decimal(Span), +} + +impl FormatDebug for RawNumber { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + match self { + RawNumber::Int(span) => f.say_str("int", span.slice(source)), + RawNumber::Decimal(span) => f.say_str("decimal", span.slice(source)), + } + } } impl RawNumber { - pub fn int(tag: impl Into) -> Tagged { - let tag = tag.into(); + pub fn int(span: impl Into) -> Spanned { + let span = span.into(); - RawNumber::Int(tag).tagged(tag) + RawNumber::Int(span).spanned(span) } - pub fn decimal(tag: impl Into) -> Tagged { - let tag = tag.into(); + pub fn decimal(span: impl Into) -> Spanned { + let span = span.into(); - RawNumber::Decimal(tag).tagged(tag) + RawNumber::Decimal(span).spanned(span) } pub(crate) fn to_number(self, source: &Text) -> Number { @@ -45,22 +69,7 @@ impl RawNumber { } } -impl RawToken { - pub fn type_name(&self) -> &'static str { - match self { - RawToken::Number(_) => "Number", - RawToken::Size(..) => "Size", - RawToken::String(_) => "String", - RawToken::Variable(_) => "Variable", - RawToken::ExternalCommand(_) => "ExternalCommand", - RawToken::ExternalWord => "ExternalWord", - RawToken::GlobPattern => "GlobPattern", - RawToken::Bare => "String", - } - } -} - -pub type Token = Tagged; +pub type Token = Spanned; impl Token { pub fn debug<'a>(&self, source: &'a Text) -> DebugToken<'a> { @@ -69,6 +78,76 @@ impl Token { source, } } + + pub fn extract_number(&self) -> Option> { + match self.item { + RawToken::Number(number) => Some(number.spanned(self.span)), + _ => None, + } + } + + pub fn extract_int(&self) -> Option<(Span, Span)> { + match self.item { + RawToken::Number(RawNumber::Int(int)) => Some((int, self.span)), + _ => None, + } + } + + pub fn extract_decimal(&self) -> Option<(Span, Span)> { + match self.item { + RawToken::Number(RawNumber::Decimal(decimal)) => Some((decimal, self.span)), + _ => None, + } + } + + pub fn extract_operator(&self) -> Option> { + match self.item { + RawToken::Operator(operator) => Some(operator.spanned(self.span)), + _ => None, + } + } + + pub fn extract_string(&self) -> Option<(Span, Span)> { + match self.item { + RawToken::String(span) => Some((span, self.span)), + _ => None, + } + } + + pub fn extract_variable(&self) -> Option<(Span, Span)> { + match self.item { + RawToken::Variable(span) => Some((span, self.span)), + _ => None, + } + } + + pub fn extract_external_command(&self) -> Option<(Span, Span)> { + match self.item { + RawToken::ExternalCommand(span) => Some((span, self.span)), + _ => None, + } + } + + pub fn extract_external_word(&self) -> Option { + match self.item { + RawToken::ExternalWord => Some(self.span), + _ => None, + } + } + + pub fn extract_glob_pattern(&self) -> Option { + match self.item { + RawToken::GlobPattern => Some(self.span), + _ => None, + } + } + + pub fn extract_bare(&self) -> Option { + match self.item { + RawToken::Bare => Some(self.span), + _ => None, + } + } } pub struct DebugToken<'a> { @@ -78,6 +157,6 @@ pub struct DebugToken<'a> { impl fmt::Debug for DebugToken<'_> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.node.tag().slice(self.source)) + write!(f, "{}", self.node.span.slice(self.source)) } } diff --git a/src/parser/parse/unit.rs b/src/parser/parse/unit.rs index aa19580ac2..e98a5cdbcb 100644 --- a/src/parser/parse/unit.rs +++ b/src/parser/parse/unit.rs @@ -1,47 +1,99 @@ use crate::data::base::Value; use crate::prelude::*; use serde::{Deserialize, Serialize}; +use std::fmt; use std::str::FromStr; +use std::time::Duration; +use std::time::SystemTime; #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)] pub enum Unit { - B, - KB, - MB, - GB, - TB, - PB, + // Filesize units + Byte, + Kilobyte, + Megabyte, + Gigabyte, + Terabyte, + Petabyte, + + // Duration units + Second, + Minute, + Hour, + Day, + Week, + Month, + Year, +} + +fn convert_number_to_u64(number: &Number) -> u64 { + match number { + Number::Int(big_int) => big_int.to_u64().unwrap(), + Number::Decimal(big_decimal) => big_decimal.to_u64().unwrap(), + } +} + +impl FormatDebug for Spanned { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + write!(f, "{}", self.span.slice(source)) + } } impl Unit { pub fn as_str(&self) -> &str { match *self { - Unit::B => "B", - Unit::KB => "KB", - Unit::MB => "MB", - Unit::GB => "GB", - Unit::TB => "TB", - Unit::PB => "PB", + Unit::Byte => "B", + Unit::Kilobyte => "KB", + Unit::Megabyte => "MB", + Unit::Gigabyte => "GB", + Unit::Terabyte => "TB", + Unit::Petabyte => "PB", + Unit::Second => "s", + Unit::Minute => "m", + Unit::Hour => "h", + Unit::Day => "d", + Unit::Week => "w", + Unit::Month => "M", + Unit::Year => "y", } } pub(crate) fn compute(&self, size: &Number) -> Value { let size = size.clone(); - Value::number(match self { - Unit::B => size, - Unit::KB => size * 1024, - Unit::MB => size * 1024 * 1024, - Unit::GB => size * 1024 * 1024 * 1024, - Unit::TB => size * 1024 * 1024 * 1024 * 1024, - Unit::PB => size * 1024 * 1024 * 1024 * 1024 * 1024, - }) - } -} - -impl From<&str> for Unit { - fn from(input: &str) -> Unit { - Unit::from_str(input).unwrap() + match self { + Unit::Byte => Value::number(size), + Unit::Kilobyte => Value::number(size * 1024), + Unit::Megabyte => Value::number(size * 1024 * 1024), + Unit::Gigabyte => Value::number(size * 1024 * 1024 * 1024), + Unit::Terabyte => Value::number(size * 1024 * 1024 * 1024 * 1024), + Unit::Petabyte => Value::number(size * 1024 * 1024 * 1024 * 1024 * 1024), + Unit::Second => Value::system_date( + SystemTime::now() - Duration::from_secs(convert_number_to_u64(&size)), + ), + Unit::Minute => Value::system_date( + SystemTime::now() - Duration::from_secs(60 * convert_number_to_u64(&size)), + ), + Unit::Hour => Value::system_date( + SystemTime::now() - Duration::from_secs(60 * 60 * convert_number_to_u64(&size)), + ), + Unit::Day => Value::system_date( + SystemTime::now() + - Duration::from_secs(24 * 60 * 60 * convert_number_to_u64(&size)), + ), + Unit::Week => Value::system_date( + SystemTime::now() + - Duration::from_secs(7 * 24 * 60 * 60 * convert_number_to_u64(&size)), + ), + Unit::Month => Value::system_date( + SystemTime::now() + - Duration::from_secs(30 * 24 * 60 * 60 * convert_number_to_u64(&size)), + ), + Unit::Year => Value::system_date( + SystemTime::now() + - Duration::from_secs(365 * 24 * 60 * 60 * convert_number_to_u64(&size)), + ), + } } } @@ -49,12 +101,19 @@ impl FromStr for Unit { type Err = (); fn from_str(input: &str) -> Result::Err> { match input { - "B" | "b" => Ok(Unit::B), - "KB" | "kb" | "Kb" | "K" | "k" => Ok(Unit::KB), - "MB" | "mb" | "Mb" => Ok(Unit::MB), - "GB" | "gb" | "Gb" => Ok(Unit::GB), - "TB" | "tb" | "Tb" => Ok(Unit::TB), - "PB" | "pb" | "Pb" => Ok(Unit::PB), + "B" | "b" => Ok(Unit::Byte), + "KB" | "kb" | "Kb" | "K" | "k" => Ok(Unit::Kilobyte), + "MB" | "mb" | "Mb" => Ok(Unit::Megabyte), + "GB" | "gb" | "Gb" => Ok(Unit::Gigabyte), + "TB" | "tb" | "Tb" => Ok(Unit::Terabyte), + "PB" | "pb" | "Pb" => Ok(Unit::Petabyte), + "s" => Ok(Unit::Second), + "m" => Ok(Unit::Minute), + "h" => Ok(Unit::Hour), + "d" => Ok(Unit::Day), + "w" => Ok(Unit::Week), + "M" => Ok(Unit::Month), + "y" => Ok(Unit::Year), _ => Err(()), } } diff --git a/src/parser/parse_command.rs b/src/parser/parse_command.rs index 36ba82f8e5..cb5111179b 100644 --- a/src/parser/parse_command.rs +++ b/src/parser/parse_command.rs @@ -1,179 +1,144 @@ -use crate::context::Context; -use crate::errors::{ArgumentError, ShellError}; +use crate::errors::{ArgumentError, ParseError}; +use crate::parser::hir::syntax_shape::{ + color_fallible_syntax, color_syntax, expand_expr, flat_shape::FlatShape, spaced, + BackoffColoringMode, ColorSyntax, MaybeSpaceShape, +}; use crate::parser::registry::{NamedType, PositionalType, Signature}; -use crate::parser::{baseline_parse_tokens, CallNode}; +use crate::parser::TokensIterator; use crate::parser::{ - hir::{self, NamedArguments}, - Flag, RawToken, TokenNode, + hir::{self, ExpandContext, NamedArguments}, + Flag, }; use crate::traits::ToDebug; -use crate::{Tag, Tagged, TaggedItem, Text}; +use crate::{Span, Spanned, Tag, Text}; use log::trace; -pub fn parse_command( +pub fn parse_command_tail( config: &Signature, - context: &Context, - call: &Tagged, - source: &Text, -) -> Result { - let Tagged { item: raw_call, .. } = call; - - trace!("Processing {:?}", config); - - let head = parse_command_head(call.head())?; - - let children: Option> = raw_call.children().as_ref().map(|nodes| { - nodes - .iter() - .cloned() - .filter(|node| match node { - TokenNode::Whitespace(_) => false, - _ => true, - }) - .collect() - }); - - match parse_command_tail(&config, context, children, source, call.tag())? { - None => Ok(hir::Call::new(Box::new(head), None, None)), - Some((positional, named)) => Ok(hir::Call::new(Box::new(head), positional, named)), - } -} - -fn parse_command_head(head: &TokenNode) -> Result { - match head { - TokenNode::Token( - spanned @ Tagged { - item: RawToken::Bare, - .. - }, - ) => Ok(spanned.map(|_| hir::RawExpression::Literal(hir::Literal::Bare))), - - TokenNode::Token(Tagged { - item: RawToken::String(inner_tag), - tag, - }) => Ok(hir::RawExpression::Literal(hir::Literal::String(*inner_tag)).tagged(*tag)), - - other => Err(ShellError::unexpected(&format!( - "command head -> {:?}", - other - ))), - } -} - -fn parse_command_tail( - config: &Signature, - context: &Context, - tail: Option>, - source: &Text, - command_tag: Tag, -) -> Result>, Option)>, ShellError> { - let tail = &mut match &tail { - None => hir::TokensIterator::new(&[]), - Some(tail) => hir::TokensIterator::new(tail), - }; - + context: &ExpandContext, + tail: &mut TokensIterator, + command_span: Span, +) -> Result>, Option)>, ParseError> { let mut named = NamedArguments::new(); - - trace_remaining("nodes", tail.clone(), source); + trace_remaining("nodes", &tail, context.source()); for (name, kind) in &config.named { trace!(target: "nu::parse", "looking for {} : {:?}", name, kind); - match kind { + match &kind.0 { NamedType::Switch => { - let flag = extract_switch(name, tail, source); + let flag = extract_switch(name, tail, context.source()); named.insert_switch(name, flag); } NamedType::Mandatory(syntax_type) => { - match extract_mandatory(config, name, tail, source, command_tag) { + match extract_mandatory(config, name, tail, context.source(), command_span) { Err(err) => return Err(err), // produce a correct diagnostic Ok((pos, flag)) => { tail.move_to(pos); if tail.at_end() { - return Err(ShellError::argument_error( + return Err(ParseError::argument_error( config.name.clone(), ArgumentError::MissingValueForName(name.to_string()), - flag.tag(), + flag.span, )); } - let expr = - hir::baseline_parse_next_expr(tail, context, source, *syntax_type)?; + let expr = expand_expr(&spaced(*syntax_type), tail, context)?; tail.restart(); named.insert_mandatory(name, expr); } } } - NamedType::Optional(syntax_type) => match extract_optional(name, tail, source) { - Err(err) => return Err(err), // produce a correct diagnostic - Ok(Some((pos, flag))) => { - tail.move_to(pos); + NamedType::Optional(syntax_type) => { + match extract_optional(name, tail, context.source()) { + Err(err) => return Err(err), // produce a correct diagnostic + Ok(Some((pos, flag))) => { + tail.move_to(pos); - if tail.at_end() { - return Err(ShellError::argument_error( - config.name.clone(), - ArgumentError::MissingValueForName(name.to_string()), - flag.tag(), - )); + if tail.at_end() { + return Err(ParseError::argument_error( + config.name.clone(), + ArgumentError::MissingValueForName(name.to_string()), + flag.span, + )); + } + + let expr = expand_expr(&spaced(*syntax_type), tail, context); + + match expr { + Err(_) => named.insert_optional(name, None), + Ok(expr) => named.insert_optional(name, Some(expr)), + } + + tail.restart(); } - let expr = hir::baseline_parse_next_expr(tail, context, source, *syntax_type)?; - - tail.restart(); - named.insert_optional(name, Some(expr)); + Ok(None) => { + tail.restart(); + named.insert_optional(name, None); + } } - - Ok(None) => { - tail.restart(); - named.insert_optional(name, None); - } - }, + } }; } - trace_remaining("after named", tail.clone(), source); + trace_remaining("after named", &tail, context.source()); let mut positional = vec![]; for arg in &config.positional { - trace!("Processing positional {:?}", arg); + trace!(target: "nu::parse", "Processing positional {:?}", arg); - match arg { + match &arg.0 { PositionalType::Mandatory(..) => { - if tail.len() == 0 { - return Err(ShellError::argument_error( + if tail.at_end_possible_ws() { + return Err(ParseError::argument_error( config.name.clone(), - ArgumentError::MissingMandatoryPositional(arg.name().to_string()), - command_tag, + ArgumentError::MissingMandatoryPositional(arg.0.name().to_string()), + Tag { + span: command_span, + anchor: None, + }, )); } } PositionalType::Optional(..) => { - if tail.len() == 0 { + if tail.at_end_possible_ws() { break; } } } - let result = hir::baseline_parse_next_expr(tail, context, source, arg.syntax_type())?; + let result = expand_expr(&spaced(arg.0.syntax_type()), tail, context)?; positional.push(result); } - trace_remaining("after positional", tail.clone(), source); + trace_remaining("after positional", &tail, context.source()); - if let Some(syntax_type) = config.rest_positional { - let remainder = baseline_parse_tokens(tail, context, source, syntax_type)?; - positional.extend(remainder); + if let Some((syntax_type, _)) = config.rest_positional { + let mut out = vec![]; + + loop { + if tail.at_end_possible_ws() { + break; + } + + let next = expand_expr(&spaced(syntax_type), tail, context)?; + + out.push(next); + } + + positional.extend(out); } - trace_remaining("after rest", tail.clone(), source); + trace_remaining("after rest", &tail, context.source()); - trace!("Constructed positional={:?} named={:?}", positional, named); + trace!(target: "nu::parse", "Constructed positional={:?} named={:?}", positional, named); let positional = if positional.len() == 0 { None @@ -189,11 +154,428 @@ fn parse_command_tail( Some(named) }; - trace!("Normalized positional={:?} named={:?}", positional, named); + trace!(target: "nu::parse", "Normalized positional={:?} named={:?}", positional, named); Ok(Some((positional, named))) } +#[derive(Debug)] +struct ColoringArgs { + vec: Vec>>>, +} + +impl ColoringArgs { + fn new(len: usize) -> ColoringArgs { + let vec = vec![None; len]; + ColoringArgs { vec } + } + + fn insert(&mut self, pos: usize, shapes: Vec>) { + self.vec[pos] = Some(shapes); + } + + fn spread_shapes(self, shapes: &mut Vec>) { + for item in self.vec { + match item { + None => {} + Some(vec) => { + shapes.extend(vec); + } + } + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct CommandTailShape; + +#[cfg(not(coloring_in_tokens))] +impl ColorSyntax for CommandTailShape { + type Info = (); + type Input = Signature; + + fn color_syntax<'a, 'b>( + &self, + signature: &Signature, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Self::Info { + let mut args = ColoringArgs::new(token_nodes.len()); + for (name, kind) in &signature.named { + trace!(target: "nu::color_syntax", "looking for {} : {:?}", name, kind); + + match &kind.0 { + NamedType::Switch => { + match token_nodes.extract(|t| t.as_flag(name, context.source())) { + Some((pos, flag)) => args.insert(pos, vec![flag.color()]), + None => {} + } + } + NamedType::Mandatory(syntax_type) => { + match extract_mandatory( + signature, + name, + token_nodes, + context.source(), + Span::unknown(), + ) { + Err(_) => { + // The mandatory flag didn't exist at all, so there's nothing to color + } + Ok((pos, flag)) => { + let mut shapes = vec![flag.color()]; + token_nodes.move_to(pos); + + if token_nodes.at_end() { + args.insert(pos, shapes); + token_nodes.restart(); + continue; + } + + // We can live with unmatched syntax after a mandatory flag + let _ = token_nodes.atomic(|token_nodes| { + color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes); + + // If the part after a mandatory flag isn't present, that's ok, but we + // should roll back any whitespace we chomped + color_fallible_syntax( + syntax_type, + token_nodes, + context, + &mut shapes, + ) + }); + + args.insert(pos, shapes); + token_nodes.restart(); + } + } + } + NamedType::Optional(syntax_type) => { + match extract_optional(name, token_nodes, context.source()) { + Err(_) => { + // The optional flag didn't exist at all, so there's nothing to color + } + Ok(Some((pos, flag))) => { + let mut shapes = vec![flag.color()]; + token_nodes.move_to(pos); + + if token_nodes.at_end() { + args.insert(pos, shapes); + token_nodes.restart(); + continue; + } + + // We can live with unmatched syntax after an optional flag + let _ = token_nodes.atomic(|token_nodes| { + color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes); + + // If the part after a mandatory flag isn't present, that's ok, but we + // should roll back any whitespace we chomped + color_fallible_syntax( + syntax_type, + token_nodes, + context, + &mut shapes, + ) + }); + + args.insert(pos, shapes); + token_nodes.restart(); + } + + Ok(None) => { + token_nodes.restart(); + } + } + } + }; + } + + for arg in &signature.positional { + trace!("Processing positional {:?}", arg); + + match arg.0 { + PositionalType::Mandatory(..) => { + if token_nodes.at_end() { + break; + } + } + + PositionalType::Optional(..) => { + if token_nodes.at_end() { + break; + } + } + } + + let mut shapes = vec![]; + let pos = token_nodes.pos(false); + + match pos { + None => break, + Some(pos) => { + // We can live with an unmatched positional argument. Hopefully it will be + // matched by a future token + let _ = token_nodes.atomic(|token_nodes| { + color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes); + + // If no match, we should roll back any whitespace we chomped + color_fallible_syntax( + &arg.0.syntax_type(), + token_nodes, + context, + &mut shapes, + )?; + + args.insert(pos, shapes); + + Ok(()) + }); + } + } + } + + if let Some((syntax_type, _)) = signature.rest_positional { + loop { + if token_nodes.at_end_possible_ws() { + break; + } + + let pos = token_nodes.pos(false); + + match pos { + None => break, + Some(pos) => { + let mut shapes = vec![]; + + // If any arguments don't match, we'll fall back to backoff coloring mode + let result = token_nodes.atomic(|token_nodes| { + color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes); + + // If no match, we should roll back any whitespace we chomped + color_fallible_syntax(&syntax_type, token_nodes, context, &mut shapes)?; + + args.insert(pos, shapes); + + Ok(()) + }); + + match result { + Err(_) => break, + Ok(_) => continue, + } + } + } + } + } + + args.spread_shapes(shapes); + + // Consume any remaining tokens with backoff coloring mode + color_syntax(&BackoffColoringMode, token_nodes, context, shapes); + + shapes.sort_by(|a, b| a.span.start().cmp(&b.span.start())); + } +} + +#[cfg(coloring_in_tokens)] +impl ColorSyntax for CommandTailShape { + type Info = (); + type Input = Signature; + + fn name(&self) -> &'static str { + "CommandTailShape" + } + + fn color_syntax<'a, 'b>( + &self, + signature: &Signature, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Self::Info { + let mut args = ColoringArgs::new(token_nodes.len()); + trace_remaining("nodes", &token_nodes, context.source()); + + for (name, kind) in &signature.named { + trace!(target: "nu::color_syntax", "looking for {} : {:?}", name, kind); + + match &kind.0 { + NamedType::Switch => { + match token_nodes.extract(|t| t.as_flag(name, context.source())) { + Some((pos, flag)) => args.insert(pos, vec![flag.color()]), + None => {} + } + } + NamedType::Mandatory(syntax_type) => { + match extract_mandatory( + signature, + name, + token_nodes, + context.source(), + Span::unknown(), + ) { + Err(_) => { + // The mandatory flag didn't exist at all, so there's nothing to color + } + Ok((pos, flag)) => { + let (_, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| { + token_nodes.color_shape(flag.color()); + token_nodes.move_to(pos); + + if token_nodes.at_end() { + return Ok(()); + } + + // We still want to color the flag even if the following tokens don't match, so don't + // propagate the error to the parent atomic block if it fails + let _ = token_nodes.atomic(|token_nodes| { + // We can live with unmatched syntax after a mandatory flag + color_syntax(&MaybeSpaceShape, token_nodes, context); + + // If the part after a mandatory flag isn't present, that's ok, but we + // should roll back any whitespace we chomped + color_fallible_syntax(syntax_type, token_nodes, context)?; + + Ok(()) + }); + + Ok(()) + }); + + args.insert(pos, shapes); + token_nodes.restart(); + } + } + } + NamedType::Optional(syntax_type) => { + match extract_optional(name, token_nodes, context.source()) { + Err(_) => { + // The optional flag didn't exist at all, so there's nothing to color + } + Ok(Some((pos, flag))) => { + let (_, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| { + token_nodes.color_shape(flag.color()); + token_nodes.move_to(pos); + + if token_nodes.at_end() { + return Ok(()); + } + + // We still want to color the flag even if the following tokens don't match, so don't + // propagate the error to the parent atomic block if it fails + let _ = token_nodes.atomic(|token_nodes| { + // We can live with unmatched syntax after a mandatory flag + color_syntax(&MaybeSpaceShape, token_nodes, context); + + // If the part after a mandatory flag isn't present, that's ok, but we + // should roll back any whitespace we chomped + color_fallible_syntax(syntax_type, token_nodes, context)?; + + Ok(()) + }); + + Ok(()) + }); + + args.insert(pos, shapes); + token_nodes.restart(); + } + + Ok(None) => { + token_nodes.restart(); + } + } + } + }; + } + + trace_remaining("after named", &token_nodes, context.source()); + + for arg in &signature.positional { + trace!("Processing positional {:?}", arg); + + match &arg.0 { + PositionalType::Mandatory(..) => { + if token_nodes.at_end() { + break; + } + } + + PositionalType::Optional(..) => { + if token_nodes.at_end() { + break; + } + } + } + + let pos = token_nodes.pos(false); + + match pos { + None => break, + Some(pos) => { + // We can live with an unmatched positional argument. Hopefully it will be + // matched by a future token + let (_, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| { + color_syntax(&MaybeSpaceShape, token_nodes, context); + + // If no match, we should roll back any whitespace we chomped + color_fallible_syntax(&arg.0.syntax_type(), token_nodes, context)?; + + Ok(()) + }); + + args.insert(pos, shapes); + } + } + } + + trace_remaining("after positional", &token_nodes, context.source()); + + if let Some((syntax_type, _)) = signature.rest_positional { + loop { + if token_nodes.at_end_possible_ws() { + break; + } + + let pos = token_nodes.pos(false); + + match pos { + None => break, + Some(pos) => { + // If any arguments don't match, we'll fall back to backoff coloring mode + let (result, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| { + color_syntax(&MaybeSpaceShape, token_nodes, context); + + // If no match, we should roll back any whitespace we chomped + color_fallible_syntax(&syntax_type, token_nodes, context)?; + + Ok(()) + }); + + args.insert(pos, shapes); + + match result { + Err(_) => break, + Ok(_) => continue, + } + } + } + } + } + + token_nodes.silently_mutate_shapes(|shapes| args.spread_shapes(shapes)); + + // Consume any remaining tokens with backoff coloring mode + color_syntax(&BackoffColoringMode, token_nodes, context); + + // This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring + // this solution. + token_nodes.sort_shapes() + } +} + fn extract_switch(name: &str, tokens: &mut hir::TokensIterator<'_>, source: &Text) -> Option { tokens .extract(|t| t.as_flag(name, source)) @@ -205,15 +587,15 @@ fn extract_mandatory( name: &str, tokens: &mut hir::TokensIterator<'_>, source: &Text, - tag: Tag, -) -> Result<(usize, Tagged), ShellError> { + span: Span, +) -> Result<(usize, Spanned), ParseError> { let flag = tokens.extract(|t| t.as_flag(name, source)); match flag { - None => Err(ShellError::argument_error( + None => Err(ParseError::argument_error( config.name.clone(), ArgumentError::MissingMandatoryFlag(name.to_string()), - tag, + span, )), Some((pos, flag)) => { @@ -227,7 +609,7 @@ fn extract_optional( name: &str, tokens: &mut hir::TokensIterator<'_>, source: &Text, -) -> Result<(Option<(usize, Tagged)>), ShellError> { +) -> Result)>, ParseError> { let flag = tokens.extract(|t| t.as_flag(name, source)); match flag { @@ -239,8 +621,9 @@ fn extract_optional( } } -pub fn trace_remaining(desc: &'static str, tail: hir::TokensIterator<'_>, source: &Text) { +pub fn trace_remaining(desc: &'static str, tail: &hir::TokensIterator<'_>, source: &Text) { trace!( + target: "nu::parse", "{} = {:?}", desc, itertools::join( diff --git a/src/parser/registry.rs b/src/parser/registry.rs index 955a1a04c9..ff0a98ae85 100644 --- a/src/parser/registry.rs +++ b/src/parser/registry.rs @@ -1,11 +1,11 @@ // TODO: Temporary redirect pub(crate) use crate::context::CommandRegistry; use crate::evaluate::{evaluate_baseline_expr, Scope}; -use crate::parser::{hir, hir::SyntaxShape, parse_command, CallNode}; +use crate::parser::{hir, hir::SyntaxShape}; use crate::prelude::*; use derive_new::new; use indexmap::IndexMap; -use log::trace; + use serde::{Deserialize, Serialize}; use std::fmt; @@ -58,17 +58,19 @@ impl PositionalType { } } +type Description = String; + #[derive(Debug, Serialize, Deserialize, Clone, new)] pub struct Signature { pub name: String, #[new(default)] pub usage: String, #[new(default)] - pub positional: Vec, + pub positional: Vec<(PositionalType, Description)>, #[new(value = "None")] - pub rest_positional: Option, + pub rest_positional: Option<(SyntaxShape, Description)>, #[new(default)] - pub named: IndexMap, + pub named: IndexMap, #[new(value = "false")] pub is_filter: bool, } @@ -83,23 +85,42 @@ impl Signature { self } - pub fn required(mut self, name: impl Into, ty: impl Into) -> Signature { - self.positional - .push(PositionalType::Mandatory(name.into(), ty.into())); + pub fn required( + mut self, + name: impl Into, + ty: impl Into, + desc: impl Into, + ) -> Signature { + self.positional.push(( + PositionalType::Mandatory(name.into(), ty.into()), + desc.into(), + )); self } - pub fn optional(mut self, name: impl Into, ty: impl Into) -> Signature { - self.positional - .push(PositionalType::Optional(name.into(), ty.into())); + pub fn optional( + mut self, + name: impl Into, + ty: impl Into, + desc: impl Into, + ) -> Signature { + self.positional.push(( + PositionalType::Optional(name.into(), ty.into()), + desc.into(), + )); self } - pub fn named(mut self, name: impl Into, ty: impl Into) -> Signature { + pub fn named( + mut self, + name: impl Into, + ty: impl Into, + desc: impl Into, + ) -> Signature { self.named - .insert(name.into(), NamedType::Optional(ty.into())); + .insert(name.into(), (NamedType::Optional(ty.into()), desc.into())); self } @@ -108,15 +129,17 @@ impl Signature { mut self, name: impl Into, ty: impl Into, + desc: impl Into, ) -> Signature { self.named - .insert(name.into(), NamedType::Mandatory(ty.into())); + .insert(name.into(), (NamedType::Mandatory(ty.into()), desc.into())); self } - pub fn switch(mut self, name: impl Into) -> Signature { - self.named.insert(name.into(), NamedType::Switch); + pub fn switch(mut self, name: impl Into, desc: impl Into) -> Signature { + self.named + .insert(name.into(), (NamedType::Switch, desc.into())); self } @@ -126,8 +149,8 @@ impl Signature { self } - pub fn rest(mut self, ty: SyntaxShape) -> Signature { - self.rest_positional = Some(ty); + pub fn rest(mut self, ty: SyntaxShape, desc: impl Into) -> Signature { + self.rest_positional = Some((ty, desc.into())); self } } @@ -271,21 +294,6 @@ impl<'a> Iterator for PositionalIter<'a> { } } -impl Signature { - pub(crate) fn parse_args( - &self, - call: &Tagged, - context: &Context, - source: &Text, - ) -> Result { - let args = parse_command(self, context, call, source)?; - - trace!("parsed args: {:?}", args); - - Ok(args) - } -} - pub(crate) fn evaluate_args( call: &hir::Call, registry: &CommandRegistry, @@ -313,7 +321,7 @@ pub(crate) fn evaluate_args( for (name, value) in n.named.iter() { match value { hir::named::NamedValue::PresentSwitch(tag) => { - results.insert(name.clone(), Value::boolean(true).tagged(*tag)); + results.insert(name.clone(), Value::boolean(true).tagged(tag)); } hir::named::NamedValue::Value(expr) => { results.insert( diff --git a/src/plugin.rs b/src/plugin.rs index afd9871108..004e937fe8 100644 --- a/src/plugin.rs +++ b/src/plugin.rs @@ -32,7 +32,7 @@ pub fn serve_plugin(plugin: &mut dyn Plugin) { let input = match input { Some(arg) => std::fs::read_to_string(arg), None => { - send_response(ShellError::string(format!("No input given."))); + send_response(ShellError::untagged_runtime_error("No input given.")); return; } }; @@ -64,7 +64,7 @@ pub fn serve_plugin(plugin: &mut dyn Plugin) { return; } e => { - send_response(ShellError::string(format!( + send_response(ShellError::untagged_runtime_error(format!( "Could not handle plugin message: {} {:?}", input, e ))); @@ -102,7 +102,7 @@ pub fn serve_plugin(plugin: &mut dyn Plugin) { break; } e => { - send_response(ShellError::string(format!( + send_response(ShellError::untagged_runtime_error(format!( "Could not handle plugin message: {} {:?}", input, e ))); @@ -111,7 +111,7 @@ pub fn serve_plugin(plugin: &mut dyn Plugin) { } } e => { - send_response(ShellError::string(format!( + send_response(ShellError::untagged_runtime_error(format!( "Could not handle plugin message: {:?}", e, ))); diff --git a/src/plugins/add.rs b/src/plugins/add.rs deleted file mode 100644 index 03e1d42828..0000000000 --- a/src/plugins/add.rs +++ /dev/null @@ -1,86 +0,0 @@ -use nu::{ - serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, - SyntaxShape, Tagged, Value, -}; - -struct Add { - field: Option, - value: Option, -} -impl Add { - fn new() -> Add { - Add { - field: None, - value: None, - } - } - - fn add(&self, value: Tagged) -> Result, ShellError> { - let value_tag = value.tag(); - match (value.item, self.value.clone()) { - (obj @ Value::Row(_), Some(v)) => match &self.field { - Some(f) => match obj.insert_data_at_path(value_tag, &f, v) { - Some(v) => return Ok(v), - None => { - return Err(ShellError::string(format!( - "add could not find place to insert field {:?} {}", - obj, f - ))) - } - }, - None => Err(ShellError::string( - "add needs a column name when adding a value to a table", - )), - }, - x => Err(ShellError::string(format!( - "Unrecognized type in stream: {:?}", - x - ))), - } - } -} - -impl Plugin for Add { - fn config(&mut self) -> Result { - Ok(Signature::build("add") - .desc("Add a new field to the table.") - .required("Field", SyntaxShape::String) - .required("Value", SyntaxShape::String) - .rest(SyntaxShape::String) - .filter()) - } - - fn begin_filter(&mut self, call_info: CallInfo) -> Result, ShellError> { - if let Some(args) = call_info.args.positional { - match &args[0] { - Tagged { - item: Value::Primitive(Primitive::String(s)), - .. - } => { - self.field = Some(s.clone()); - } - _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - args[0] - ))) - } - } - match &args[1] { - Tagged { item: v, .. } => { - self.value = Some(v.clone()); - } - } - } - - Ok(vec![]) - } - - fn filter(&mut self, input: Tagged) -> Result, ShellError> { - Ok(vec![ReturnSuccess::value(self.add(input)?)]) - } -} - -fn main() { - serve_plugin(&mut Add::new()); -} diff --git a/src/plugins/average.rs b/src/plugins/average.rs new file mode 100644 index 0000000000..f78078450a --- /dev/null +++ b/src/plugins/average.rs @@ -0,0 +1,115 @@ +use nu::{ + serve_plugin, CallInfo, CoerceInto, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, + Signature, Tagged, TaggedItem, Value, +}; + +#[derive(Debug)] +struct Average { + total: Option>, + count: u64, +} + +impl Average { + fn new() -> Average { + Average { + total: None, + count: 0, + } + } + + fn average(&mut self, value: Tagged) -> Result<(), ShellError> { + match value.item() { + Value::Primitive(Primitive::Nothing) => Ok(()), + Value::Primitive(Primitive::Int(i)) => match &self.total { + Some(Tagged { + item: Value::Primitive(Primitive::Int(j)), + tag, + }) => { + self.total = Some(Value::int(i + j).tagged(tag)); + self.count += 1; + Ok(()) + } + None => { + self.total = Some(value.clone()); + self.count += 1; + Ok(()) + } + _ => Err(ShellError::labeled_error( + "Could calculate average of non-integer or unrelated types", + "source", + value.tag, + )), + }, + Value::Primitive(Primitive::Bytes(b)) => match &self.total { + Some(Tagged { + item: Value::Primitive(Primitive::Bytes(j)), + tag, + }) => { + self.total = Some(Value::bytes(b + j).tagged(tag)); + self.count += 1; + Ok(()) + } + None => { + self.total = Some(value); + self.count += 1; + Ok(()) + } + _ => Err(ShellError::labeled_error( + "Could calculate average of non-integer or unrelated types", + "source", + value.tag, + )), + }, + x => Err(ShellError::labeled_error( + format!("Unrecognized type in stream: {:?}", x), + "source", + value.tag, + )), + } + } +} + +impl Plugin for Average { + fn config(&mut self) -> Result { + Ok(Signature::build("average") + .desc("Compute the average of a column of numerical values.") + .filter()) + } + + fn begin_filter(&mut self, _: CallInfo) -> Result, ShellError> { + Ok(vec![]) + } + + fn filter(&mut self, input: Tagged) -> Result, ShellError> { + self.average(input)?; + Ok(vec![]) + } + + fn end_filter(&mut self) -> Result, ShellError> { + match self.total { + None => Ok(vec![]), + Some(ref inner) => match inner.item() { + Value::Primitive(Primitive::Int(i)) => { + let total: u64 = i + .tagged(inner.tag.clone()) + .coerce_into("converting for average")?; + let avg = total as f64 / self.count as f64; + let primitive_value: Value = Primitive::from(avg).into(); + let tagged_value = primitive_value.tagged(inner.tag.clone()); + Ok(vec![ReturnSuccess::value(tagged_value)]) + } + Value::Primitive(Primitive::Bytes(bytes)) => { + let avg = *bytes as f64 / self.count as f64; + let primitive_value: Value = Primitive::from(avg).into(); + let tagged_value = primitive_value.tagged(inner.tag.clone()); + Ok(vec![ReturnSuccess::value(tagged_value)]) + } + _ => Ok(vec![]), + }, + } + } +} + +fn main() { + serve_plugin(&mut Average::new()); +} diff --git a/src/plugins/binaryview.rs b/src/plugins/binaryview.rs index d5488d3241..4e563beabe 100644 --- a/src/plugins/binaryview.rs +++ b/src/plugins/binaryview.rs @@ -16,7 +16,7 @@ impl Plugin for BinaryView { fn config(&mut self) -> Result { Ok(Signature::build("binaryview") .desc("Autoview of binary data.") - .switch("lores")) + .switch("lores", "use low resolution output mode")) } fn sink(&mut self, call_info: CallInfo, input: Vec>) { @@ -24,8 +24,7 @@ impl Plugin for BinaryView { let value_anchor = v.anchor(); match v.item { Value::Primitive(Primitive::Binary(b)) => { - let source = call_info.source_map.get(&value_anchor); - let _ = view_binary(&b, source, call_info.args.has("lores")); + let _ = view_binary(&b, value_anchor.as_ref(), call_info.args.has("lores")); } _ => {} } @@ -212,7 +211,7 @@ struct RawImageBuffer { buffer: Vec, } -fn load_from_png_buffer(buffer: &[u8]) -> Option<(RawImageBuffer)> { +fn load_from_png_buffer(buffer: &[u8]) -> Option { use image::ImageDecoder; let decoder = image::png::PNGDecoder::new(buffer); @@ -232,7 +231,7 @@ fn load_from_png_buffer(buffer: &[u8]) -> Option<(RawImageBuffer)> { }) } -fn load_from_jpg_buffer(buffer: &[u8]) -> Option<(RawImageBuffer)> { +fn load_from_jpg_buffer(buffer: &[u8]) -> Option { use image::ImageDecoder; let decoder = image::jpeg::JPEGDecoder::new(buffer); diff --git a/src/plugins/docker.rs b/src/plugins/docker.rs index 9cb8a52e80..e0a06ab3d4 100644 --- a/src/plugins/docker.rs +++ b/src/plugins/docker.rs @@ -21,8 +21,8 @@ async fn docker(sub_command: &String, name: Tag) -> Result>, S "images" => docker_images(name), _ => Err(ShellError::labeled_error( "Unsupported Docker command", - format!("'{}'?", sub_command), - name.span, + "unknown docker command", + name, )), } } @@ -46,7 +46,7 @@ fn process_docker_output(cmd_output: &str, tag: Tag) -> Result .filter(|s| s.trim() != "") .collect(); - let mut dict = TaggedDictBuilder::new(tag); + let mut dict = TaggedDictBuilder::new(&tag); for (i, v) in values.iter().enumerate() { dict.insert(header[i].to_string(), Value::string(v.trim().to_string())); } @@ -92,18 +92,13 @@ impl Plugin for Docker { if let Some(args) = callinfo.args.positional { match &args[0] { Tagged { - item: Value::Primitive(Primitive::String(s)), + item: Value::Primitive(Primitive::String(command)), .. - } => match block_on(docker(&s, callinfo.name_tag)) { + } => match block_on(docker(&command, args[0].tag())) { Ok(v) => return Ok(v.into_iter().map(ReturnSuccess::value).collect()), Err(e) => return Err(e), }, - _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - args[0] - ))) - } + _ => return Err(ShellError::type_error("string", args[0].tagged_type_name())), } } diff --git a/src/plugins/edit.rs b/src/plugins/edit.rs index db116fedf5..fb0ac48ede 100644 --- a/src/plugins/edit.rs +++ b/src/plugins/edit.rs @@ -1,10 +1,12 @@ use nu::{ - serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, - SyntaxShape, Tagged, Value, + serve_plugin, CallInfo, Plugin, ReturnSuccess, ReturnValue, ShellError, Signature, SyntaxShape, + Tagged, Value, }; +pub type ColumnPath = Tagged>>; + struct Edit { - field: Option, + field: Option, value: Option, } impl Edit { @@ -19,22 +21,25 @@ impl Edit { let value_tag = value.tag(); match (value.item, self.value.clone()) { (obj @ Value::Row(_), Some(v)) => match &self.field { - Some(f) => match obj.replace_data_at_path(value_tag, &f, v) { + Some(f) => match obj.replace_data_at_column_path(value_tag, &f, v) { Some(v) => return Ok(v), None => { - return Err(ShellError::string( + return Err(ShellError::labeled_error( "edit could not find place to insert column", + "column name", + &f.tag, )) } }, - None => Err(ShellError::string( + None => Err(ShellError::untagged_runtime_error( "edit needs a column when changing a value in a table", )), }, - x => Err(ShellError::string(format!( - "Unrecognized type in stream: {:?}", - x - ))), + _ => Err(ShellError::labeled_error( + "Unrecognized type in stream", + "original value", + value_tag, + )), } } } @@ -43,26 +48,29 @@ impl Plugin for Edit { fn config(&mut self) -> Result { Ok(Signature::build("edit") .desc("Edit an existing column to have a new value.") - .required("Field", SyntaxShape::String) - .required("Value", SyntaxShape::String) + .required( + "Field", + SyntaxShape::ColumnPath, + "the name of the column to edit", + ) + .required( + "Value", + SyntaxShape::String, + "the new value to give the cell(s)", + ) .filter()) } fn begin_filter(&mut self, call_info: CallInfo) -> Result, ShellError> { if let Some(args) = call_info.args.positional { match &args[0] { - Tagged { - item: Value::Primitive(Primitive::String(s)), + table @ Tagged { + item: Value::Table(_), .. } => { - self.field = Some(s.clone()); - } - _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - args[0] - ))) + self.field = Some(table.as_column_path()?); } + value => return Err(ShellError::type_error("table", value.tagged_type_name())), } match &args[1] { Tagged { item: v, .. } => { diff --git a/src/plugins/embed.rs b/src/plugins/embed.rs index 646db80918..6dc539d107 100644 --- a/src/plugins/embed.rs +++ b/src/plugins/embed.rs @@ -1,6 +1,9 @@ +#[macro_use] +extern crate indexmap; + use nu::{ serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, - SyntaxShape, Tag, Tagged, TaggedDictBuilder, Value, + SyntaxShape, Tag, Tagged, TaggedItem, Value, }; struct Embed { @@ -16,20 +19,8 @@ impl Embed { } fn embed(&mut self, value: Tagged) -> Result<(), ShellError> { - match value { - Tagged { item, tag } => match &self.field { - Some(_) => { - self.values.push(Tagged { - item: item, - tag: tag, - }); - Ok(()) - } - None => Err(ShellError::string( - "embed needs a field when embedding a value", - )), - }, - } + self.values.push(value); + Ok(()) } } @@ -37,8 +28,7 @@ impl Plugin for Embed { fn config(&mut self) -> Result { Ok(Signature::build("embed") .desc("Embeds a new field to the table.") - .required("Field", SyntaxShape::String) - .rest(SyntaxShape::String) + .optional("field", SyntaxShape::String, "the name of the new column") .filter()) } @@ -52,12 +42,7 @@ impl Plugin for Embed { self.field = Some(s.clone()); self.values = Vec::new(); } - _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - args[0] - ))) - } + value => return Err(ShellError::type_error("string", value.tagged_type_name())), } } @@ -70,15 +55,15 @@ impl Plugin for Embed { } fn end_filter(&mut self) -> Result, ShellError> { - let mut root = TaggedDictBuilder::new(Tag::unknown()); - root.insert_tagged( - self.field.as_ref().unwrap(), - Tagged { - item: Value::Table(self.values.clone()), - tag: Tag::unknown(), - }, - ); - Ok(vec![ReturnSuccess::value(root.into_tagged_value())]) + let row = Value::row(indexmap! { + match &self.field { + Some(key) => key.clone(), + None => "root".into(), + } => Value::table(&self.values).tagged(Tag::unknown()), + }) + .tagged(Tag::unknown()); + + Ok(vec![ReturnSuccess::value(row)]) } } diff --git a/src/plugins/format.rs b/src/plugins/format.rs new file mode 100644 index 0000000000..7ed33f964c --- /dev/null +++ b/src/plugins/format.rs @@ -0,0 +1,128 @@ +use nu::{ + serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, + SyntaxShape, Tagged, TaggedItem, Value, +}; + +use nom::{ + bytes::complete::{tag, take_while}, + IResult, +}; + +#[derive(Debug)] +enum FormatCommand { + Text(String), + Column(String), +} + +fn format(input: &str) -> IResult<&str, Vec> { + let mut output = vec![]; + + let mut loop_input = input; + loop { + let (input, before) = take_while(|c| c != '{')(loop_input)?; + if before.len() > 0 { + output.push(FormatCommand::Text(before.to_string())); + } + if input != "" { + // Look for column as we're now at one + let (input, _) = tag("{")(input)?; + let (input, column) = take_while(|c| c != '}')(input)?; + let (input, _) = tag("}")(input)?; + + output.push(FormatCommand::Column(column.to_string())); + loop_input = input; + } else { + loop_input = input; + } + if loop_input == "" { + break; + } + } + + Ok((loop_input, output)) +} + +struct Format { + commands: Vec, +} + +impl Format { + fn new() -> Self { + Format { commands: vec![] } + } +} + +impl Plugin for Format { + fn config(&mut self) -> Result { + Ok(Signature::build("format") + .desc("Format columns into a string using a simple pattern") + .required( + "pattern", + SyntaxShape::Any, + "the pattern to match. Eg) \"{foo}: {bar}\"", + ) + .filter()) + } + fn begin_filter(&mut self, call_info: CallInfo) -> Result, ShellError> { + if let Some(args) = call_info.args.positional { + match &args[0] { + Tagged { + item: Value::Primitive(Primitive::String(pattern)), + .. + } => { + let format_pattern = format(&pattern).unwrap(); + self.commands = format_pattern.1 + } + Tagged { tag, .. } => { + return Err(ShellError::labeled_error( + "Unrecognized type in params", + "expected a string", + tag, + )); + } + } + } + Ok(vec![]) + } + + fn filter(&mut self, input: Tagged) -> Result, ShellError> { + match &input { + Tagged { + item: Value::Row(dict), + .. + } => { + let mut output = String::new(); + + for command in &self.commands { + match command { + FormatCommand::Text(s) => { + output.push_str(s); + } + FormatCommand::Column(c) => { + match dict.entries.get(c) { + Some(c) => match c.as_string() { + Ok(v) => output.push_str(&v), + _ => return Ok(vec![]), + }, + None => { + // This row doesn't match, so don't emit anything + return Ok(vec![]); + } + } + } + } + } + + return Ok(vec![ReturnSuccess::value( + Value::string(output).tagged_unknown(), + )]); + } + _ => {} + } + Ok(vec![]) + } +} + +fn main() { + serve_plugin(&mut Format::new()); +} diff --git a/src/plugins/inc.rs b/src/plugins/inc.rs index ecab03dc97..b7b24025e3 100644 --- a/src/plugins/inc.rs +++ b/src/plugins/inc.rs @@ -1,6 +1,6 @@ use nu::{ - serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, - SyntaxShape, Tagged, TaggedItem, Value, + did_you_mean, serve_plugin, tag_for_tagged_list, CallInfo, Plugin, Primitive, ReturnSuccess, + ReturnValue, ShellError, Signature, SyntaxShape, Tagged, TaggedItem, Value, }; enum Action { @@ -14,8 +14,10 @@ pub enum SemVerAction { Patch, } +pub type ColumnPath = Tagged>>; + struct Inc { - field: Option, + field: Option, error: Option, action: Option, } @@ -80,35 +82,82 @@ impl Inc { Value::Primitive(Primitive::Bytes(b)) => { Ok(Value::bytes(b + 1 as u64).tagged(value.tag())) } - Value::Primitive(Primitive::String(ref s)) => { - Ok(Tagged::from_item(self.apply(&s)?, value.tag())) + Value::Primitive(Primitive::String(ref s)) => Ok(self.apply(&s)?.tagged(value.tag())), + Value::Table(values) => { + if values.len() == 1 { + return Ok(Value::Table(vec![self.inc(values[0].clone())?]).tagged(value.tag())); + } else { + return Err(ShellError::type_error( + "incrementable value", + value.tagged_type_name(), + )); + } } + Value::Row(_) => match self.field { Some(ref f) => { - let replacement = match value.item.get_data_by_path(value.tag(), f) { - Some(result) => self.inc(result.map(|x| x.clone()))?, - None => { - return Err(ShellError::string("inc could not find field to replace")) - } + let fields = f.clone(); + + let replace_for = value.item.get_data_by_column_path( + value.tag(), + f, + Box::new(move |(obj_source, column_path_tried)| { + match did_you_mean(&obj_source, &column_path_tried) { + Some(suggestions) => { + return ShellError::labeled_error( + "Unknown column", + format!("did you mean '{}'?", suggestions[0].1), + tag_for_tagged_list(fields.iter().map(|p| p.tag())), + ) + } + None => { + return ShellError::labeled_error( + "Unknown column", + "row does not contain this column", + tag_for_tagged_list(fields.iter().map(|p| p.tag())), + ) + } + } + }), + ); + + let replacement = match replace_for { + Ok(got) => match got { + Some(result) => self.inc(result.map(|x| x.clone()))?, + None => { + return Err(ShellError::labeled_error( + "inc could not find field to replace", + "column name", + value.tag(), + )) + } + }, + Err(reason) => return Err(reason), }; - match value - .item - .replace_data_at_path(value.tag(), f, replacement.item.clone()) - { + + match value.item.replace_data_at_column_path( + value.tag(), + &f, + replacement.item.clone(), + ) { Some(v) => return Ok(v), None => { - return Err(ShellError::string("inc could not find field to replace")) + return Err(ShellError::labeled_error( + "inc could not find field to replace", + "column name", + value.tag(), + )) } } } - None => Err(ShellError::string( + None => Err(ShellError::untagged_runtime_error( "inc needs a field when incrementing a column in a table", )), }, - x => Err(ShellError::string(format!( - "Unrecognized type in stream: {:?}", - x - ))), + _ => Err(ShellError::type_error( + "incrementable value", + value.tagged_type_name(), + )), } } } @@ -117,10 +166,10 @@ impl Plugin for Inc { fn config(&mut self) -> Result { Ok(Signature::build("inc") .desc("Increment a value or version. Optionally use the column of a table.") - .switch("major") - .switch("minor") - .switch("patch") - .rest(SyntaxShape::String) + .switch("major", "increment the major version (eg 1.2.1 -> 2.0.0)") + .switch("minor", "increment the minor version (eg 1.2.1 -> 1.3.0)") + .switch("patch", "increment the patch version (eg 1.2.1 -> 1.2.2)") + .rest(SyntaxShape::ColumnPath, "the column(s) to update") .filter()) } @@ -138,18 +187,13 @@ impl Plugin for Inc { if let Some(args) = call_info.args.positional { for arg in args { match arg { - Tagged { - item: Value::Primitive(Primitive::String(s)), + table @ Tagged { + item: Value::Table(_), .. } => { - self.field = Some(s); - } - _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - arg - ))) + self.field = Some(table.as_column_path()?); } + value => return Err(ShellError::type_error("table", value.tagged_type_name())), } } } @@ -160,7 +204,11 @@ impl Plugin for Inc { match &self.error { Some(reason) => { - return Err(ShellError::string(format!("{}: {}", reason, Inc::usage()))) + return Err(ShellError::untagged_runtime_error(format!( + "{}: {}", + reason, + Inc::usage() + ))) } None => Ok(vec![]), } @@ -181,20 +229,18 @@ mod tests { use super::{Inc, SemVerAction}; use indexmap::IndexMap; use nu::{ - CallInfo, EvaluatedArgs, Plugin, ReturnSuccess, SourceMap, Tag, Tagged, TaggedDictBuilder, + CallInfo, EvaluatedArgs, Plugin, Primitive, ReturnSuccess, Tag, Tagged, TaggedDictBuilder, TaggedItem, Value, }; struct CallStub { - anchor: uuid::Uuid, positionals: Vec>, flags: IndexMap>, } impl CallStub { - fn new(anchor: uuid::Uuid) -> CallStub { + fn new() -> CallStub { CallStub { - anchor, positionals: vec![], flags: indexmap::IndexMap::new(), } @@ -209,16 +255,20 @@ mod tests { } fn with_parameter(&mut self, name: &str) -> &mut Self { + let fields: Vec> = name + .split(".") + .map(|s| Value::string(s.to_string()).tagged(Tag::unknown())) + .collect(); + self.positionals - .push(Value::string(name.to_string()).tagged(Tag::unknown_span(self.anchor))); + .push(Value::Table(fields).tagged(Tag::unknown())); self } fn create(&self) -> CallInfo { CallInfo { args: EvaluatedArgs::new(Some(self.positionals.clone()), Some(self.flags.clone())), - source_map: SourceMap::new(), - name_tag: Tag::unknown_span(self.anchor), + name_tag: Tag::unknown(), } } } @@ -245,7 +295,7 @@ mod tests { let mut plugin = Inc::new(); assert!(plugin - .begin_filter(CallStub::new(test_uuid()).with_long_flag("major").create()) + .begin_filter(CallStub::new().with_long_flag("major").create()) .is_ok()); assert!(plugin.action.is_some()); } @@ -255,7 +305,7 @@ mod tests { let mut plugin = Inc::new(); assert!(plugin - .begin_filter(CallStub::new(test_uuid()).with_long_flag("minor").create()) + .begin_filter(CallStub::new().with_long_flag("minor").create()) .is_ok()); assert!(plugin.action.is_some()); } @@ -265,7 +315,7 @@ mod tests { let mut plugin = Inc::new(); assert!(plugin - .begin_filter(CallStub::new(test_uuid()).with_long_flag("patch").create()) + .begin_filter(CallStub::new().with_long_flag("patch").create()) .is_ok()); assert!(plugin.action.is_some()); } @@ -276,7 +326,7 @@ mod tests { assert!(plugin .begin_filter( - CallStub::new(test_uuid()) + CallStub::new() .with_long_flag("major") .with_long_flag("minor") .create(), @@ -290,14 +340,19 @@ mod tests { let mut plugin = Inc::new(); assert!(plugin - .begin_filter( - CallStub::new(test_uuid()) - .with_parameter("package.version") - .create() - ) + .begin_filter(CallStub::new().with_parameter("package.version").create()) .is_ok()); - assert_eq!(plugin.field, Some("package.version".to_string())); + assert_eq!( + plugin.field.map(|f| f + .iter() + .map(|f| match &f.item { + Value::Primitive(Primitive::String(s)) => s.clone(), + _ => panic!(""), + }) + .collect()), + Some(vec!["package".to_string(), "version".to_string()]) + ); } #[test] @@ -327,7 +382,7 @@ mod tests { assert!(plugin .begin_filter( - CallStub::new(test_uuid()) + CallStub::new() .with_long_flag("major") .with_parameter("version") .create() @@ -355,7 +410,7 @@ mod tests { assert!(plugin .begin_filter( - CallStub::new(test_uuid()) + CallStub::new() .with_long_flag("minor") .with_parameter("version") .create() @@ -384,7 +439,7 @@ mod tests { assert!(plugin .begin_filter( - CallStub::new(test_uuid()) + CallStub::new() .with_long_flag("patch") .with_parameter(&field) .create() @@ -405,8 +460,4 @@ mod tests { _ => {} } } - - fn test_uuid() -> uuid::Uuid { - uuid::Uuid::nil() - } } diff --git a/src/plugins/insert.rs b/src/plugins/insert.rs new file mode 100644 index 0000000000..cfe3a27b5d --- /dev/null +++ b/src/plugins/insert.rs @@ -0,0 +1,106 @@ +use itertools::Itertools; +use nu::{ + serve_plugin, CallInfo, Plugin, ReturnSuccess, ReturnValue, ShellError, Signature, SyntaxShape, + Tagged, TaggedItem, Value, +}; + +pub type ColumnPath = Vec>; + +struct Insert { + field: Option, + value: Option, +} +impl Insert { + fn new() -> Insert { + Insert { + field: None, + value: None, + } + } + + fn insert(&self, value: Tagged) -> Result, ShellError> { + let value_tag = value.tag(); + match (value.item, self.value.clone()) { + (obj @ Value::Row(_), Some(v)) => match &self.field { + Some(f) => match obj.insert_data_at_column_path(value_tag.clone(), f, v) { + Some(v) => return Ok(v), + None => { + return Err(ShellError::labeled_error( + format!( + "add could not find place to insert field {:?} {}", + obj, + f.iter() + .map(|i| { + match &i.item { + Value::Primitive(primitive) => primitive.format(None), + _ => String::from(""), + } + }) + .join(".") + ), + "column name", + &value_tag, + )) + } + }, + None => Err(ShellError::labeled_error( + "add needs a column name when adding a value to a table", + "column name", + value_tag, + )), + }, + (value, _) => Err(ShellError::type_error( + "row", + value.type_name().tagged(value_tag), + )), + } + } +} + +impl Plugin for Insert { + fn config(&mut self) -> Result { + Ok(Signature::build("insert") + .desc("Insert a new column to the table.") + .required( + "column", + SyntaxShape::ColumnPath, + "the column name to insert", + ) + .required( + "value", + SyntaxShape::String, + "the value to give the cell(s)", + ) + .filter()) + } + + fn begin_filter(&mut self, call_info: CallInfo) -> Result, ShellError> { + if let Some(args) = call_info.args.positional { + match &args[0] { + table @ Tagged { + item: Value::Table(_), + .. + } => { + self.field = Some(table.as_column_path()?.item); + } + + value => return Err(ShellError::type_error("table", value.tagged_type_name())), + } + match &args[1] { + Tagged { item: v, .. } => { + self.value = Some(v.clone()); + } + } + } + + Ok(vec![]) + } + + fn filter(&mut self, input: Tagged) -> Result, ShellError> { + Ok(vec![ReturnSuccess::value(self.insert(input)?)]) + } +} + +fn main() { + serve_plugin(&mut Insert::new()); +} diff --git a/src/plugins/match.rs b/src/plugins/match.rs index 1f2aad83fc..eefbf10632 100644 --- a/src/plugins/match.rs +++ b/src/plugins/match.rs @@ -22,8 +22,8 @@ impl Plugin for Match { fn config(&mut self) -> Result { Ok(Signature::build("match") .desc("filter rows by regex") - .required("member", SyntaxShape::Member) - .required("regex", SyntaxShape::String) + .required("member", SyntaxShape::Member, "the column name to match") + .required("regex", SyntaxShape::String, "the regex to match with") .filter()) } fn begin_filter(&mut self, call_info: CallInfo) -> Result, ShellError> { @@ -35,11 +35,12 @@ impl Plugin for Match { } => { self.column = s.clone(); } - _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - args[0] - ))); + Tagged { tag, .. } => { + return Err(ShellError::labeled_error( + "Unrecognized type in params", + "value", + tag, + )); } } match &args[1] { @@ -49,11 +50,12 @@ impl Plugin for Match { } => { self.regex = Regex::new(s).unwrap(); } - _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - args[1] - ))); + Tagged { tag, .. } => { + return Err(ShellError::labeled_error( + "Unrecognized type in params", + "value", + tag, + )); } } } @@ -65,7 +67,7 @@ impl Plugin for Match { match &input { Tagged { item: Value::Row(dict), - .. + tag, } => { if let Some(val) = dict.entries.get(&self.column) { match val { @@ -75,22 +77,20 @@ impl Plugin for Match { } => { flag = self.regex.is_match(s); } - _ => { - return Err(ShellError::string(format!( - "value is not a string! {:?}", - &val - ))); + Tagged { tag, .. } => { + return Err(ShellError::labeled_error("expected string", "value", tag)); } } } else { - return Err(ShellError::string(format!( - "column not in row! {:?} {:?}", - &self.column, dict - ))); + return Err(ShellError::labeled_error( + format!("column not in row! {:?} {:?}", &self.column, dict), + "row", + tag, + )); } } - _ => { - return Err(ShellError::string(format!("Not a row! {:?}", &input))); + Tagged { tag, .. } => { + return Err(ShellError::labeled_error("Expected row", "value", tag)); } } if flag { diff --git a/src/plugins/parse.rs b/src/plugins/parse.rs new file mode 100644 index 0000000000..97ece0693d --- /dev/null +++ b/src/plugins/parse.rs @@ -0,0 +1,156 @@ +use nu::{ + serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, + SyntaxShape, Tagged, TaggedDictBuilder, Value, +}; + +use nom::{ + bytes::complete::{tag, take_while}, + IResult, +}; +use regex::Regex; + +#[derive(Debug)] +enum ParseCommand { + Text(String), + Column(String), +} + +fn parse(input: &str) -> IResult<&str, Vec> { + let mut output = vec![]; + + let mut loop_input = input; + loop { + let (input, before) = take_while(|c| c != '{')(loop_input)?; + if before.len() > 0 { + output.push(ParseCommand::Text(before.to_string())); + } + if input != "" { + // Look for column as we're now at one + let (input, _) = tag("{")(input)?; + let (input, column) = take_while(|c| c != '}')(input)?; + let (input, _) = tag("}")(input)?; + + output.push(ParseCommand::Column(column.to_string())); + loop_input = input; + } else { + loop_input = input; + } + if loop_input == "" { + break; + } + } + + Ok((loop_input, output)) +} + +fn column_names(commands: &[ParseCommand]) -> Vec { + let mut output = vec![]; + + for command in commands { + match command { + ParseCommand::Column(c) => { + output.push(c.clone()); + } + _ => {} + } + } + + output +} + +fn build_regex(commands: &[ParseCommand]) -> String { + let mut output = String::new(); + + for command in commands { + match command { + ParseCommand::Text(s) => { + output.push_str(&s.replace("(", "\\(")); + } + ParseCommand::Column(_) => { + output.push_str("(.*)"); + } + } + } + + return output; +} +struct Parse { + regex: Regex, + column_names: Vec, +} + +impl Parse { + fn new() -> Self { + Parse { + regex: Regex::new("").unwrap(), + column_names: vec![], + } + } +} + +impl Plugin for Parse { + fn config(&mut self) -> Result { + Ok(Signature::build("parse") + .desc("Parse columns from string data using a simple pattern") + .required( + "pattern", + SyntaxShape::Any, + "the pattern to match. Eg) \"{foo}: {bar}\"", + ) + .filter()) + } + fn begin_filter(&mut self, call_info: CallInfo) -> Result, ShellError> { + if let Some(args) = call_info.args.positional { + match &args[0] { + Tagged { + item: Value::Primitive(Primitive::String(pattern)), + .. + } => { + //self.pattern = s.clone(); + let parse_pattern = parse(&pattern).unwrap(); + let parse_regex = build_regex(&parse_pattern.1); + + self.column_names = column_names(&parse_pattern.1); + + self.regex = Regex::new(&parse_regex).unwrap(); + } + Tagged { tag, .. } => { + return Err(ShellError::labeled_error( + "Unrecognized type in params", + "expected a string", + tag, + )); + } + } + } + Ok(vec![]) + } + + fn filter(&mut self, input: Tagged) -> Result, ShellError> { + let mut results = vec![]; + match &input { + Tagged { + tag, + item: Value::Primitive(Primitive::String(s)), + } => { + //self.full_input.push_str(&s); + + for cap in self.regex.captures_iter(&s) { + let mut dict = TaggedDictBuilder::new(tag); + + for (idx, column_name) in self.column_names.iter().enumerate() { + dict.insert(column_name, Value::string(&cap[idx + 1].to_string())); + } + + results.push(ReturnSuccess::value(dict.into_tagged_value())); + } + } + _ => {} + } + Ok(results) + } +} + +fn main() { + serve_plugin(&mut Parse::new()); +} diff --git a/src/plugins/ps.rs b/src/plugins/ps.rs index 1ae9938d34..69c7fa1b43 100644 --- a/src/plugins/ps.rs +++ b/src/plugins/ps.rs @@ -20,7 +20,7 @@ impl Ps { async fn usage(process: Process) -> ProcessResult<(process::Process, Ratio)> { let usage_1 = process.cpu_usage().await?; - futures_timer::Delay::new(Duration::from_millis(100)).await?; + futures_timer::Delay::new(Duration::from_millis(100)).await; let usage_2 = process.cpu_usage().await?; Ok((process, usage_2 - usage_1)) @@ -40,7 +40,7 @@ async fn ps(tag: Tag) -> Vec> { let mut output = vec![]; while let Some(res) = processes.next().await { if let Ok((process, usage)) = res { - let mut dict = TaggedDictBuilder::new(tag); + let mut dict = TaggedDictBuilder::new(&tag); dict.insert("pid", Value::int(process.pid())); if let Ok(name) = process.name().await { dict.insert("name", Value::string(name)); diff --git a/src/plugins/skip.rs b/src/plugins/skip.rs index efd3231525..5ec290fe04 100644 --- a/src/plugins/skip.rs +++ b/src/plugins/skip.rs @@ -17,7 +17,7 @@ impl Plugin for Skip { fn config(&mut self) -> Result { Ok(Signature::build("skip") .desc("Skip a number of rows") - .rest(SyntaxShape::Number) + .rest(SyntaxShape::Number, "the number of rows to skip") .filter()) } fn begin_filter(&mut self, call_info: CallInfo) -> Result, ShellError> { diff --git a/src/plugins/str.rs b/src/plugins/str.rs index 4b74914f09..5741897fd4 100644 --- a/src/plugins/str.rs +++ b/src/plugins/str.rs @@ -1,17 +1,21 @@ use nu::{ - serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, - SyntaxShape, Tagged, Value, + did_you_mean, serve_plugin, tag_for_tagged_list, CallInfo, Plugin, Primitive, ReturnSuccess, + ReturnValue, ShellError, Signature, SyntaxShape, Tagged, TaggedItem, Value, }; +use std::cmp; #[derive(Debug, Eq, PartialEq)] enum Action { Downcase, Upcase, ToInteger, + Substring(usize, usize), } +pub type ColumnPath = Tagged>>; + struct Str { - field: Option, + field: Option, params: Option>, error: Option, action: Option, @@ -31,6 +35,21 @@ impl Str { let applied = match self.action.as_ref() { Some(Action::Downcase) => Value::string(input.to_ascii_lowercase()), Some(Action::Upcase) => Value::string(input.to_ascii_uppercase()), + Some(Action::Substring(s, e)) => { + let end: usize = cmp::min(*e, input.len()); + let start: usize = *s; + if start > input.len() - 1 { + Value::string("") + } else { + Value::string( + &input + .chars() + .skip(start) + .take(end - start) + .collect::(), + ) + } + } Some(Action::ToInteger) => match input.trim() { other => match other.parse::() { Ok(v) => Value::int(v), @@ -43,8 +62,8 @@ impl Str { Ok(applied) } - fn for_field(&mut self, field: &str) { - self.field = Some(String::from(field)); + fn for_field(&mut self, column_path: ColumnPath) { + self.field = Some(column_path); } fn permit(&mut self) -> bool { @@ -79,43 +98,100 @@ impl Str { } } + fn for_substring(&mut self, s: String) { + let v: Vec<&str> = s.split(',').collect(); + let start: usize = match v[0] { + "" => 0, + _ => v[0].trim().parse().unwrap(), + }; + let end: usize = match v[1] { + "" => usize::max_value().clone(), + _ => v[1].trim().parse().unwrap(), + }; + if start > end { + self.log_error("End must be greater than or equal to Start"); + } else if self.permit() { + self.action = Some(Action::Substring(start, end)); + } else { + self.log_error("can only apply one"); + } + } + pub fn usage() -> &'static str { - "Usage: str field [--downcase|--upcase|--to-int]" + "Usage: str field [--downcase|--upcase|--to-int|--substring \"start,end\"]" } } impl Str { fn strutils(&self, value: Tagged) -> Result, ShellError> { match value.item { - Value::Primitive(Primitive::String(ref s)) => { - Ok(Tagged::from_item(self.apply(&s)?, value.tag())) - } + Value::Primitive(Primitive::String(ref s)) => Ok(self.apply(&s)?.tagged(value.tag())), Value::Row(_) => match self.field { Some(ref f) => { - let replacement = match value.item.get_data_by_path(value.tag(), f) { - Some(result) => self.strutils(result.map(|x| x.clone()))?, - None => return Ok(Tagged::from_item(Value::nothing(), value.tag)), + let fields = f.clone(); + + let replace_for = value.item.get_data_by_column_path( + value.tag(), + f, + Box::new(move |(obj_source, column_path_tried)| { + match did_you_mean(&obj_source, &column_path_tried) { + Some(suggestions) => { + return ShellError::labeled_error( + "Unknown column", + format!("did you mean '{}'?", suggestions[0].1), + tag_for_tagged_list(fields.iter().map(|p| p.tag())), + ) + } + None => { + return ShellError::labeled_error( + "Unknown column", + "row does not contain this column", + tag_for_tagged_list(fields.iter().map(|p| p.tag())), + ) + } + } + }), + ); + + let replacement = match replace_for { + Ok(got) => match got { + Some(result) => self.strutils(result.map(|x| x.clone()))?, + None => { + return Err(ShellError::labeled_error( + "inc could not find field to replace", + "column name", + value.tag(), + )) + } + }, + Err(reason) => return Err(reason), }; - match value - .item - .replace_data_at_path(value.tag(), f, replacement.item.clone()) - { + + match value.item.replace_data_at_column_path( + value.tag(), + f, + replacement.item.clone(), + ) { Some(v) => return Ok(v), None => { - return Err(ShellError::string("str could not find field to replace")) + return Err(ShellError::type_error( + "column name", + value.tagged_type_name(), + )) } } } - None => Err(ShellError::string(format!( + None => Err(ShellError::untagged_runtime_error(format!( "{}: {}", "str needs a column when applied to a value in a row", Str::usage() ))), }, - x => Err(ShellError::string(format!( - "Unrecognized type in stream: {:?}", - x - ))), + _ => Err(ShellError::labeled_error( + "Unrecognized type in stream", + value.type_name(), + value.tag, + )), } } } @@ -123,11 +199,16 @@ impl Str { impl Plugin for Str { fn config(&mut self) -> Result { Ok(Signature::build("str") - .desc("Apply string function. Optional use the field of a table") - .switch("downcase") - .switch("upcase") - .switch("to-int") - .rest(SyntaxShape::Member) + .desc("Apply string function. Optional use the column of a table") + .switch("downcase", "convert string to lowercase") + .switch("upcase", "convert string to uppercase") + .switch("to-int", "convert string to integer") + .named( + "substring", + SyntaxShape::String, + "convert string to portion of original, requires \"start,end\"", + ) + .rest(SyntaxShape::ColumnPath, "the column(s) to convert") .filter()) } @@ -143,29 +224,49 @@ impl Plugin for Str { if args.has("to-int") { self.for_to_int(); } - - if let Some(possible_field) = args.nth(0) { - match possible_field { - Tagged { - item: Value::Primitive(Primitive::String(s)), - .. - } => match self.action { - Some(Action::Downcase) - | Some(Action::Upcase) - | Some(Action::ToInteger) - | None => { - self.for_field(&s); + if args.has("substring") { + if let Some(start_end) = args.get("substring") { + match start_end { + Tagged { + item: Value::Primitive(Primitive::String(s)), + .. + } => { + self.for_substring(s.to_string()); + } + _ => { + return Err(ShellError::labeled_error( + "Unrecognized type in params", + start_end.type_name(), + &start_end.tag, + )) } - }, - _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - possible_field - ))) } } } + if let Some(possible_field) = args.nth(0) { + match possible_field { + string @ Tagged { + item: Value::Primitive(Primitive::String(_)), + .. + } => { + self.for_field(string.as_column_path()?); + } + table @ Tagged { + item: Value::Table(_), + .. + } => { + self.field = Some(table.as_column_path()?); + } + _ => { + return Err(ShellError::labeled_error( + "Unrecognized type in params", + possible_field.type_name(), + &possible_field.tag, + )) + } + } + } for param in args.positional_iter() { match param { Tagged { @@ -178,7 +279,11 @@ impl Plugin for Str { match &self.error { Some(reason) => { - return Err(ShellError::string(format!("{}: {}", reason, Str::usage()))) + return Err(ShellError::untagged_runtime_error(format!( + "{}: {}", + reason, + Str::usage() + ))) } None => Ok(vec![]), } @@ -198,13 +303,12 @@ mod tests { use super::{Action, Str}; use indexmap::IndexMap; use nu::{ - CallInfo, EvaluatedArgs, Plugin, Primitive, ReturnSuccess, SourceMap, Tag, Tagged, - TaggedDictBuilder, TaggedItem, Value, + CallInfo, EvaluatedArgs, Plugin, Primitive, ReturnSuccess, Tag, Tagged, TaggedDictBuilder, + TaggedItem, Value, }; use num_bigint::BigInt; struct CallStub { - anchor: uuid::Uuid, positionals: Vec>, flags: IndexMap>, } @@ -212,12 +316,19 @@ mod tests { impl CallStub { fn new() -> CallStub { CallStub { - anchor: uuid::Uuid::nil(), positionals: vec![], flags: indexmap::IndexMap::new(), } } + fn with_named_parameter(&mut self, name: &str, value: &str) -> &mut Self { + self.flags.insert( + name.to_string(), + Value::string(value).tagged(Tag::unknown()), + ); + self + } + fn with_long_flag(&mut self, name: &str) -> &mut Self { self.flags.insert( name.to_string(), @@ -227,16 +338,20 @@ mod tests { } fn with_parameter(&mut self, name: &str) -> &mut Self { + let fields: Vec> = name + .split(".") + .map(|s| Value::string(s.to_string()).tagged(Tag::unknown())) + .collect(); + self.positionals - .push(Value::string(name.to_string()).tagged(Tag::unknown())); + .push(Value::Table(fields).tagged(Tag::unknown())); self } fn create(&self) -> CallInfo { CallInfo { args: EvaluatedArgs::new(Some(self.positionals.clone()), Some(self.flags.clone())), - source_map: SourceMap::new(), - name_tag: Tag::unknown_span(self.anchor), + name_tag: Tag::unknown(), } } } @@ -248,7 +363,7 @@ mod tests { } fn unstructured_sample_record(value: &str) -> Tagged { - Tagged::from_item(Value::string(value), Tag::unknown()) + Value::string(value).tagged(Tag::unknown()) } #[test] @@ -303,7 +418,16 @@ mod tests { ) .is_ok()); - assert_eq!(plugin.field, Some("package.description".to_string())); + assert_eq!( + plugin.field.map(|f| f + .iter() + .map(|f| match &f.item { + Value::Primitive(Primitive::String(s)) => s.clone(), + _ => panic!(""), + }) + .collect()), + Some(vec!["package".to_string(), "description".to_string()]) + ) } #[test] @@ -316,6 +440,7 @@ mod tests { .with_long_flag("upcase") .with_long_flag("downcase") .with_long_flag("to-int") + .with_long_flag("substring") .create(), ) .is_err()); @@ -486,4 +611,141 @@ mod tests { _ => {} } } + + #[test] + fn str_plugin_applies_substring_without_field() { + let mut plugin = Str::new(); + + assert!(plugin + .begin_filter( + CallStub::new() + .with_named_parameter("substring", "0,1") + .create() + ) + .is_ok()); + + let subject = unstructured_sample_record("0123456789"); + let output = plugin.filter(subject).unwrap(); + + match output[0].as_ref().unwrap() { + ReturnSuccess::Value(Tagged { + item: Value::Primitive(Primitive::String(s)), + .. + }) => assert_eq!(*s, String::from("0")), + _ => {} + } + } + + #[test] + fn str_plugin_applies_substring_exceeding_string_length() { + let mut plugin = Str::new(); + + assert!(plugin + .begin_filter( + CallStub::new() + .with_named_parameter("substring", "0,11") + .create() + ) + .is_ok()); + + let subject = unstructured_sample_record("0123456789"); + let output = plugin.filter(subject).unwrap(); + + match output[0].as_ref().unwrap() { + ReturnSuccess::Value(Tagged { + item: Value::Primitive(Primitive::String(s)), + .. + }) => assert_eq!(*s, String::from("0123456789")), + _ => {} + } + } + + #[test] + fn str_plugin_applies_substring_returns_blank_if_start_exceeds_length() { + let mut plugin = Str::new(); + + assert!(plugin + .begin_filter( + CallStub::new() + .with_named_parameter("substring", "20,30") + .create() + ) + .is_ok()); + + let subject = unstructured_sample_record("0123456789"); + let output = plugin.filter(subject).unwrap(); + + match output[0].as_ref().unwrap() { + ReturnSuccess::Value(Tagged { + item: Value::Primitive(Primitive::String(s)), + .. + }) => assert_eq!(*s, String::from("")), + _ => {} + } + } + + #[test] + fn str_plugin_applies_substring_treats_blank_start_as_zero() { + let mut plugin = Str::new(); + + assert!(plugin + .begin_filter( + CallStub::new() + .with_named_parameter("substring", ",5") + .create() + ) + .is_ok()); + + let subject = unstructured_sample_record("0123456789"); + let output = plugin.filter(subject).unwrap(); + + match output[0].as_ref().unwrap() { + ReturnSuccess::Value(Tagged { + item: Value::Primitive(Primitive::String(s)), + .. + }) => assert_eq!(*s, String::from("01234")), + _ => {} + } + } + + #[test] + fn str_plugin_applies_substring_treats_blank_end_as_length() { + let mut plugin = Str::new(); + + assert!(plugin + .begin_filter( + CallStub::new() + .with_named_parameter("substring", "2,") + .create() + ) + .is_ok()); + + let subject = unstructured_sample_record("0123456789"); + let output = plugin.filter(subject).unwrap(); + + match output[0].as_ref().unwrap() { + ReturnSuccess::Value(Tagged { + item: Value::Primitive(Primitive::String(s)), + .. + }) => assert_eq!(*s, String::from("23456789")), + _ => {} + } + } + + #[test] + fn str_plugin_applies_substring_returns_error_if_start_exceeds_end() { + let mut plugin = Str::new(); + + assert!(plugin + .begin_filter( + CallStub::new() + .with_named_parameter("substring", "3,1") + .create() + ) + .is_err()); + assert_eq!( + plugin.error, + Some("End must be greater than or equal to Start".to_string()) + ); + } } diff --git a/src/plugins/sum.rs b/src/plugins/sum.rs index ffb39cb90b..d08d45713d 100644 --- a/src/plugins/sum.rs +++ b/src/plugins/sum.rs @@ -21,20 +21,22 @@ impl Sum { tag, }) => { //TODO: handle overflow - self.total = Some(Value::int(i + j).tagged(*tag)); + self.total = Some(Value::int(i + j).tagged(tag)); Ok(()) } None => { self.total = Some(value.clone()); Ok(()) } - _ => Err(ShellError::string(format!( - "Could not sum non-integer or unrelated types" - ))), + _ => Err(ShellError::labeled_error( + "Could not sum non-integer or unrelated types", + "source", + value.tag, + )), } } Value::Primitive(Primitive::Bytes(b)) => { - match self.total { + match &self.total { Some(Tagged { item: Value::Primitive(Primitive::Bytes(j)), tag, @@ -47,15 +49,18 @@ impl Sum { self.total = Some(value); Ok(()) } - _ => Err(ShellError::string(format!( - "Could not sum non-integer or unrelated types" - ))), + _ => Err(ShellError::labeled_error( + "Could not sum non-integer or unrelated types", + "source", + value.tag, + )), } } - x => Err(ShellError::string(format!( - "Unrecognized type in stream: {:?}", - x - ))), + x => Err(ShellError::labeled_error( + format!("Unrecognized type in stream: {:?}", x), + "source", + value.tag, + )), } } } diff --git a/src/plugins/sys.rs b/src/plugins/sys.rs index 1f86b51d7e..55bf5028bf 100644 --- a/src/plugins/sys.rs +++ b/src/plugins/sys.rs @@ -80,7 +80,7 @@ async fn mem(tag: Tag) -> Tagged { } async fn host(tag: Tag) -> Tagged { - let mut dict = TaggedDictBuilder::with_capacity(tag, 6); + let mut dict = TaggedDictBuilder::with_capacity(&tag, 6); let (platform_result, uptime_result) = futures::future::join(host::platform(), host::uptime()).await; @@ -95,7 +95,7 @@ async fn host(tag: Tag) -> Tagged { // Uptime if let Ok(uptime) = uptime_result { - let mut uptime_dict = TaggedDictBuilder::with_capacity(tag, 4); + let mut uptime_dict = TaggedDictBuilder::with_capacity(&tag, 4); let uptime = uptime.get::().round() as i64; let days = uptime / (60 * 60 * 24); @@ -116,7 +116,10 @@ async fn host(tag: Tag) -> Tagged { let mut user_vec = vec![]; while let Some(user) = users.next().await { if let Ok(user) = user { - user_vec.push(Tagged::from_item(Value::string(user.username()), tag)); + user_vec.push(Tagged { + item: Value::string(user.username()), + tag: tag.clone(), + }); } } let user_list = Value::Table(user_vec); @@ -130,7 +133,7 @@ async fn disks(tag: Tag) -> Option { let mut partitions = disk::partitions_physical(); while let Some(part) = partitions.next().await { if let Ok(part) = part { - let mut dict = TaggedDictBuilder::with_capacity(tag, 6); + let mut dict = TaggedDictBuilder::with_capacity(&tag, 6); dict.insert( "device", Value::string( @@ -176,7 +179,7 @@ async fn battery(tag: Tag) -> Option { if let Ok(batteries) = manager.batteries() { for battery in batteries { if let Ok(battery) = battery { - let mut dict = TaggedDictBuilder::new(tag); + let mut dict = TaggedDictBuilder::new(&tag); if let Some(vendor) = battery.vendor() { dict.insert("vendor", Value::string(vendor)); } @@ -217,7 +220,7 @@ async fn temp(tag: Tag) -> Option { let mut sensors = sensors::temperatures(); while let Some(sensor) = sensors.next().await { if let Ok(sensor) = sensor { - let mut dict = TaggedDictBuilder::new(tag); + let mut dict = TaggedDictBuilder::new(&tag); dict.insert("unit", Value::string(sensor.unit())); if let Some(label) = sensor.label() { dict.insert("label", Value::string(label)); @@ -259,7 +262,7 @@ async fn net(tag: Tag) -> Option { let mut io_counters = net::io_counters(); while let Some(nic) = io_counters.next().await { if let Ok(nic) = nic { - let mut network_idx = TaggedDictBuilder::with_capacity(tag, 3); + let mut network_idx = TaggedDictBuilder::with_capacity(&tag, 3); network_idx.insert("name", Value::string(nic.interface())); network_idx.insert( "sent", @@ -280,11 +283,17 @@ async fn net(tag: Tag) -> Option { } async fn sysinfo(tag: Tag) -> Vec> { - let mut sysinfo = TaggedDictBuilder::with_capacity(tag, 7); + let mut sysinfo = TaggedDictBuilder::with_capacity(&tag, 7); - let (host, cpu, disks, memory, temp) = - futures::future::join5(host(tag), cpu(tag), disks(tag), mem(tag), temp(tag)).await; - let (net, battery) = futures::future::join(net(tag), battery(tag)).await; + let (host, cpu, disks, memory, temp) = futures::future::join5( + host(tag.clone()), + cpu(tag.clone()), + disks(tag.clone()), + mem(tag.clone()), + temp(tag.clone()), + ) + .await; + let (net, battery) = futures::future::join(net(tag.clone()), battery(tag.clone())).await; sysinfo.insert_tagged("host", host); if let Some(cpu) = cpu { diff --git a/src/plugins/textview.rs b/src/plugins/textview.rs index cce8bd7084..88507183e0 100644 --- a/src/plugins/textview.rs +++ b/src/plugins/textview.rs @@ -1,8 +1,7 @@ use crossterm::{cursor, terminal, RawScreen}; use crossterm::{InputEvent, KeyEvent}; use nu::{ - serve_plugin, AnchorLocation, CallInfo, Plugin, Primitive, ShellError, Signature, SourceMap, - Tagged, Value, + serve_plugin, AnchorLocation, CallInfo, Plugin, Primitive, ShellError, Signature, Tagged, Value, }; use syntect::easy::HighlightLines; @@ -29,8 +28,8 @@ impl Plugin for TextView { Ok(Signature::build("textview").desc("Autoview of text data.")) } - fn sink(&mut self, call_info: CallInfo, input: Vec>) { - view_text_value(&input[0], &call_info.source_map); + fn sink(&mut self, _call_info: CallInfo, input: Vec>) { + view_text_value(&input[0]); } } @@ -215,20 +214,18 @@ fn scroll_view(s: &str) { scroll_view_lines_if_needed(v, false); } -fn view_text_value(value: &Tagged, source_map: &SourceMap) { +fn view_text_value(value: &Tagged) { let value_anchor = value.anchor(); match value.item { Value::Primitive(Primitive::String(ref s)) => { - let source = source_map.get(&value_anchor); - - if let Some(source) = source { + if let Some(source) = value_anchor { let extension: Option = match source { AnchorLocation::File(file) => { - let path = Path::new(file); + let path = Path::new(&file); path.extension().map(|x| x.to_string_lossy().to_string()) } AnchorLocation::Url(url) => { - let url = url::Url::parse(url); + let url = url::Url::parse(&url); if let Ok(url) = url { let url = url.clone(); if let Some(mut segments) = url.path_segments() { diff --git a/src/prelude.rs b/src/prelude.rs index eabd778717..1a87da2866 100644 --- a/src/prelude.rs +++ b/src/prelude.rs @@ -1,3 +1,13 @@ +#[macro_export] +macro_rules! return_err { + ($expr:expr) => { + match $expr { + Err(_) => return, + Ok(expr) => expr, + }; + }; +} + #[macro_export] macro_rules! stream { ($($expr:expr),*) => {{ @@ -56,12 +66,15 @@ pub(crate) use crate::commands::RawCommandArgs; pub(crate) use crate::context::CommandRegistry; pub(crate) use crate::context::{AnchorLocation, Context}; pub(crate) use crate::data::base as value; -pub(crate) use crate::data::meta::{Tag, Tagged, TaggedItem}; +pub(crate) use crate::data::meta::{ + tag_for_tagged_list, HasFallibleSpan, HasSpan, Span, Spanned, SpannedItem, Tag, Tagged, + TaggedItem, +}; pub(crate) use crate::data::types::ExtractType; pub(crate) use crate::data::{Primitive, Value}; pub(crate) use crate::env::host::handle_unexpected; pub(crate) use crate::env::Host; -pub(crate) use crate::errors::{CoerceInto, ShellError}; +pub(crate) use crate::errors::{CoerceInto, ParseError, ShellError}; pub(crate) use crate::parser::hir::SyntaxShape; pub(crate) use crate::parser::parse::parser::Number; pub(crate) use crate::parser::registry::Signature; @@ -70,7 +83,7 @@ pub(crate) use crate::shell::help_shell::HelpShell; pub(crate) use crate::shell::shell_manager::ShellManager; pub(crate) use crate::shell::value_shell::ValueShell; pub(crate) use crate::stream::{InputStream, OutputStream}; -pub(crate) use crate::traits::{HasTag, ToDebug}; +pub(crate) use crate::traits::{DebugFormatter, FormatDebug, HasTag, ToDebug}; pub(crate) use crate::Text; pub(crate) use async_stream::stream as async_stream; pub(crate) use bigdecimal::BigDecimal; @@ -81,9 +94,12 @@ pub(crate) use num_traits::cast::{FromPrimitive, ToPrimitive}; pub(crate) use num_traits::identities::Zero; pub(crate) use serde::Deserialize; pub(crate) use std::collections::VecDeque; +pub(crate) use std::fmt::Write; pub(crate) use std::future::Future; pub(crate) use std::sync::{Arc, Mutex}; +pub(crate) use itertools::Itertools; + pub trait FromInputStream { fn from_input_stream(self) -> OutputStream; } @@ -99,6 +115,22 @@ where } } +pub trait ToInputStream { + fn to_input_stream(self) -> InputStream; +} + +impl ToInputStream for T +where + T: Stream + Send + 'static, + U: Into, ShellError>>, +{ + fn to_input_stream(self) -> InputStream { + InputStream { + values: self.map(|item| item.into().unwrap()).boxed(), + } + } +} + pub trait ToOutputStream { fn to_output_stream(self) -> OutputStream; } diff --git a/src/shell/filesystem_shell.rs b/src/shell/filesystem_shell.rs index 3c1ae79ea3..60d5135d11 100644 --- a/src/shell/filesystem_shell.rs +++ b/src/shell/filesystem_shell.rs @@ -3,7 +3,6 @@ use crate::commands::cp::CopyArgs; use crate::commands::mkdir::MkdirArgs; use crate::commands::mv::MoveArgs; use crate::commands::rm::RemoveArgs; -use crate::context::SourceMap; use crate::data::dir_entry_dict; use crate::prelude::*; use crate::shell::completer::NuCompleter; @@ -12,6 +11,8 @@ use crate::utils::FileStructure; use rustyline::completion::FilenameCompleter; use rustyline::hint::{Hinter, HistoryHinter}; use std::path::{Path, PathBuf}; +use std::sync::atomic::Ordering; +use trash as SendToTrash; pub struct FilesystemShell { pub(crate) path: String, @@ -73,7 +74,7 @@ impl FilesystemShell { } impl Shell for FilesystemShell { - fn name(&self, _source_map: &SourceMap) -> String { + fn name(&self) -> String { "filesystem".to_string() } @@ -84,7 +85,7 @@ impl Shell for FilesystemShell { fn ls( &self, pattern: Option>, - command_tag: Tag, + context: &RunnableContext, ) -> Result { let cwd = self.path(); let mut full_path = PathBuf::from(self.path()); @@ -94,7 +95,8 @@ impl Shell for FilesystemShell { _ => {} } - let mut shell_entries = VecDeque::new(); + let ctrl_c = context.ctrl_c.clone(); + let name_tag = context.name.clone(); //If it's not a glob, try to display the contents of the entry if it's a directory let lossy_path = full_path.to_string_lossy(); @@ -114,24 +116,33 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( e.to_string(), e.to_string(), - command_tag, + name_tag, )); } } Ok(o) => o, }; - for entry in entries { - let entry = entry?; - let filepath = entry.path(); - let filename = if let Ok(fname) = filepath.strip_prefix(&cwd) { - fname - } else { - Path::new(&filepath) - }; - let value = dir_entry_dict(filename, &entry.metadata()?, command_tag)?; - shell_entries.push_back(ReturnSuccess::value(value)) - } - return Ok(shell_entries.to_output_stream()); + let stream = async_stream! { + for entry in entries { + if ctrl_c.load(Ordering::SeqCst) { + break; + } + if let Ok(entry) = entry { + let filepath = entry.path(); + if let Ok(metadata) = std::fs::symlink_metadata(&filepath) { + let filename = if let Ok(fname) = filepath.strip_prefix(&cwd) { + fname + } else { + Path::new(&filepath) + }; + + let value = dir_entry_dict(filename, &metadata, &name_tag)?; + yield ReturnSuccess::value(value); + } + } + } + }; + return Ok(stream.to_output_stream()); } } @@ -145,26 +156,33 @@ impl Shell for FilesystemShell { source.tag(), )); } else { - return Err(ShellError::string("Invalid pattern.")); + return Err(ShellError::untagged_runtime_error("Invalid pattern.")); } } }; // Enumerate the entries from the glob and add each - for entry in entries { - if let Ok(entry) = entry { - let filename = if let Ok(fname) = entry.strip_prefix(&cwd) { - fname - } else { - Path::new(&entry) - }; - let metadata = std::fs::metadata(&entry)?; - let value = dir_entry_dict(filename, &metadata, command_tag)?; - shell_entries.push_back(ReturnSuccess::value(value)) - } - } + let stream = async_stream! { + for entry in entries { + if ctrl_c.load(Ordering::SeqCst) { + break; + } + if let Ok(entry) = entry { + if let Ok(metadata) = std::fs::symlink_metadata(&entry) { + let filename = if let Ok(fname) = entry.strip_prefix(&cwd) { + fname + } else { + Path::new(&entry) + }; - Ok(shell_entries.to_output_stream()) + if let Ok(value) = dir_entry_dict(filename, &metadata, &name_tag) { + yield ReturnSuccess::value(value); + } + } + } + } + }; + Ok(stream.to_output_stream()) } fn cd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result { @@ -175,7 +193,7 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( "Can not change to home directory", "can not go to home", - args.call_info.name_tag, + &args.call_info.name_tag, )) } }, @@ -605,26 +623,71 @@ impl Shell for FilesystemShell { } if entry.is_file() { - match std::fs::rename(&entry, &destination) { - Err(e) => { - return Err(ShellError::labeled_error( - format!( - "Rename {:?} to {:?} aborted. {:}", - entry_file_name, - destination_file_name, - e.to_string(), - ), - format!( - "Rename {:?} to {:?} aborted. {:}", - entry_file_name, - destination_file_name, - e.to_string(), - ), - name_tag, - )); - } - Ok(o) => o, - }; + #[cfg(not(windows))] + { + match std::fs::rename(&entry, &destination) { + Err(e) => { + return Err(ShellError::labeled_error( + format!( + "Rename {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + format!( + "Rename {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + name_tag, + )); + } + Ok(o) => o, + }; + } + #[cfg(windows)] + { + match std::fs::copy(&entry, &destination) { + Err(e) => { + return Err(ShellError::labeled_error( + format!( + "Rename {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + format!( + "Rename {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + name_tag, + )); + } + Ok(_) => match std::fs::remove_file(&entry) { + Err(e) => { + return Err(ShellError::labeled_error( + format!( + "Rename {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + format!( + "Rename {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + name_tag, + )); + } + Ok(o) => o, + }, + }; + } } if entry.is_dir() { @@ -727,26 +790,45 @@ impl Shell for FilesystemShell { } if src.is_file() { - match std::fs::rename(src, dst) { + match std::fs::copy(&src, &dst) { Err(e) => { return Err(ShellError::labeled_error( format!( "Rename {:?} to {:?} aborted. {:}", - entry_file_name, + src, destination_file_name, e.to_string(), ), format!( "Rename {:?} to {:?} aborted. {:}", - entry_file_name, + src, destination_file_name, e.to_string(), ), name_tag, )); } - Ok(o) => o, - } + Ok(_) => match std::fs::remove_file(&src) { + Err(e) => { + return Err(ShellError::labeled_error( + format!( + "Rename {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + format!( + "Rename {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + name_tag, + )); + } + Ok(o) => o, + }, + }; } } @@ -806,26 +888,71 @@ impl Shell for FilesystemShell { to.push(entry_file_name); if entry.is_file() { - match std::fs::rename(&entry, &to) { - Err(e) => { - return Err(ShellError::labeled_error( - format!( - "Rename {:?} to {:?} aborted. {:}", - entry_file_name, - destination_file_name, - e.to_string(), - ), - format!( - "Rename {:?} to {:?} aborted. {:}", - entry_file_name, - destination_file_name, - e.to_string(), - ), - name_tag, - )); - } - Ok(o) => o, - }; + #[cfg(not(windows))] + { + match std::fs::rename(&entry, &to) { + Err(e) => { + return Err(ShellError::labeled_error( + format!( + "Rename {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + format!( + "Rename {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + name_tag, + )); + } + Ok(o) => o, + }; + } + #[cfg(windows)] + { + match std::fs::copy(&entry, &to) { + Err(e) => { + return Err(ShellError::labeled_error( + format!( + "Rename {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + format!( + "Rename {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + name_tag, + )); + } + Ok(_) => match std::fs::remove_file(&entry) { + Err(e) => { + return Err(ShellError::labeled_error( + format!( + "Remove {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + format!( + "Remove {:?} to {:?} aborted. {:}", + entry_file_name, + destination_file_name, + e.to_string(), + ), + name_tag, + )); + } + Ok(o) => o, + }, + }; + } } } } @@ -843,7 +970,11 @@ impl Shell for FilesystemShell { fn rm( &self, - RemoveArgs { target, recursive }: RemoveArgs, + RemoveArgs { + target, + recursive, + trash, + }: RemoveArgs, name: Tag, path: &str, ) -> Result { @@ -926,7 +1057,9 @@ impl Shell for FilesystemShell { )); } - if path.is_dir() { + if trash.item { + SendToTrash::remove(path).unwrap(); + } else if path.is_dir() { std::fs::remove_dir_all(&path)?; } else if path.is_file() { std::fs::remove_file(&path)?; @@ -957,7 +1090,7 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( "unable to show current directory", "pwd command failed", - args.call_info.name_tag, + &args.call_info.name_tag, )); } }; @@ -965,7 +1098,7 @@ impl Shell for FilesystemShell { let mut stream = VecDeque::new(); stream.push_back(ReturnSuccess::value( Value::Primitive(Primitive::String(p.to_string_lossy().to_string())) - .tagged(args.call_info.name_tag), + .tagged(&args.call_info.name_tag), )); Ok(stream.into()) diff --git a/src/shell/help_shell.rs b/src/shell/help_shell.rs index 0fedd9ad79..7c0e74bde4 100644 --- a/src/shell/help_shell.rs +++ b/src/shell/help_shell.rs @@ -3,7 +3,6 @@ use crate::commands::cp::CopyArgs; use crate::commands::mkdir::MkdirArgs; use crate::commands::mv::MoveArgs; use crate::commands::rm::RemoveArgs; -use crate::context::SourceMap; use crate::data::{command_dict, TaggedDictBuilder}; use crate::prelude::*; use crate::shell::shell::Shell; @@ -98,8 +97,8 @@ impl HelpShell { } impl Shell for HelpShell { - fn name(&self, source_map: &SourceMap) -> String { - let anchor_name = self.value.anchor_name(source_map); + fn name(&self) -> String { + let anchor_name = self.value.anchor_name(); format!( "{}", match anchor_name { @@ -129,7 +128,7 @@ impl Shell for HelpShell { fn ls( &self, _pattern: Option>, - _command_tag: Tag, + _context: &RunnableContext, ) -> Result { Ok(self .commands() diff --git a/src/shell/helper.rs b/src/shell/helper.rs index fa6c0bb67c..b12c176afb 100644 --- a/src/shell/helper.rs +++ b/src/shell/helper.rs @@ -1,10 +1,11 @@ +use crate::context::Context; +use crate::parser::hir::syntax_shape::{color_fallible_syntax, FlatShape, PipelineShape}; +use crate::parser::hir::TokensIterator; use crate::parser::nom_input; use crate::parser::parse::token_tree::TokenNode; -use crate::parser::parse::tokens::RawToken; -use crate::parser::{Pipeline, PipelineElement}; -use crate::shell::shell_manager::ShellManager; -use crate::Tagged; +use crate::{HasSpan, Spanned, SpannedItem, Tag, Tagged, Text}; use ansi_term::Color; +use log::{log_enabled, trace}; use rustyline::completion::Completer; use rustyline::error::ReadlineError; use rustyline::highlight::Highlighter; @@ -12,12 +13,12 @@ use rustyline::hint::Hinter; use std::borrow::Cow::{self, Owned}; pub(crate) struct Helper { - helper: ShellManager, + context: Context, } impl Helper { - pub(crate) fn new(helper: ShellManager) -> Helper { - Helper { helper } + pub(crate) fn new(context: Context) -> Helper { + Helper { context } } } @@ -29,30 +30,13 @@ impl Completer for Helper { pos: usize, ctx: &rustyline::Context<'_>, ) -> Result<(usize, Vec), ReadlineError> { - self.helper.complete(line, pos, ctx) + self.context.shell_manager.complete(line, pos, ctx) } } -/* -impl Completer for Helper { - type Candidate = rustyline::completion::Pair; - - fn complete( - &self, - line: &str, - pos: usize, - ctx: &rustyline::Context<'_>, - ) -> Result<(usize, Vec), ReadlineError> { - let result = self.helper.complete(line, pos, ctx); - - result.map(|(x, y)| (x, y.iter().map(|z| z.into()).collect())) - } -} -*/ - impl Hinter for Helper { fn hint(&self, line: &str, pos: usize, ctx: &rustyline::Context<'_>) -> Option { - self.helper.hint(line, pos, ctx) + self.context.shell_manager.hint(line, pos, ctx) } } @@ -66,7 +50,7 @@ impl Highlighter for Helper { } fn highlight<'l>(&self, line: &'l str, _pos: usize) -> Cow<'l, str> { - let tokens = crate::parser::pipeline(nom_input(line, uuid::Uuid::nil())); + let tokens = crate::parser::pipeline(nom_input(line)); match tokens { Err(_) => Cow::Borrowed(line), @@ -77,24 +61,49 @@ impl Highlighter for Helper { Ok(v) => v, }; - let Pipeline { parts, post_ws } = pipeline; - let mut iter = parts.into_iter(); + let tokens = vec![TokenNode::Pipeline(pipeline.clone().spanned(v.span()))]; + let mut tokens = TokensIterator::all(&tokens[..], v.span()); - loop { - match iter.next() { - None => { - if let Some(ws) = post_ws { - out.push_str(ws.slice(line)); - } + let text = Text::from(line); + let expand_context = self.context.expand_context(&text); - return Cow::Owned(out); - } - Some(token) => { - let styled = paint_pipeline_element(&token, line); - out.push_str(&styled.to_string()); - } - } + #[cfg(not(coloring_in_tokens))] + let shapes = { + let mut shapes = vec![]; + color_fallible_syntax( + &PipelineShape, + &mut tokens, + &expand_context, + &mut shapes, + ) + .unwrap(); + shapes + }; + + #[cfg(coloring_in_tokens)] + let shapes = { + // We just constructed a token list that only contains a pipeline, so it can't fail + color_fallible_syntax(&PipelineShape, &mut tokens, &expand_context).unwrap(); + tokens.with_color_tracer(|_, tracer| tracer.finish()); + + tokens.state().shapes() + }; + + trace!(target: "nu::color_syntax", "{:#?}", tokens.color_tracer()); + + if log_enabled!(target: "nu::color_syntax", log::Level::Debug) { + println!(""); + ptree::print_tree(&tokens.color_tracer().clone().print(Text::from(line))) + .unwrap(); + println!(""); } + + for shape in shapes { + let styled = paint_flat_shape(&shape, line); + out.push_str(&styled); + } + + Cow::Owned(out) } } } @@ -104,83 +113,55 @@ impl Highlighter for Helper { } } -fn paint_token_node(token_node: &TokenNode, line: &str) -> String { - let styled = match token_node { - TokenNode::Call(..) => Color::Cyan.bold().paint(token_node.tag().slice(line)), - TokenNode::Whitespace(..) => Color::White.normal().paint(token_node.tag().slice(line)), - TokenNode::Flag(..) => Color::Black.bold().paint(token_node.tag().slice(line)), - TokenNode::Member(..) => Color::Yellow.bold().paint(token_node.tag().slice(line)), - TokenNode::Path(..) => Color::Green.bold().paint(token_node.tag().slice(line)), - TokenNode::Error(..) => Color::Red.bold().paint(token_node.tag().slice(line)), - TokenNode::Delimited(..) => Color::White.paint(token_node.tag().slice(line)), - TokenNode::Operator(..) => Color::White.normal().paint(token_node.tag().slice(line)), - TokenNode::Pipeline(..) => Color::Blue.normal().paint(token_node.tag().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::Number(..), - .. - }) => Color::Purple.bold().paint(token_node.tag().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::Size(..), - .. - }) => Color::Purple.bold().paint(token_node.tag().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::GlobPattern, - .. - }) => Color::Cyan.normal().paint(token_node.tag().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::String(..), - .. - }) => Color::Green.normal().paint(token_node.tag().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::Variable(..), - .. - }) => Color::Yellow.bold().paint(token_node.tag().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::Bare, - .. - }) => Color::Green.normal().paint(token_node.tag().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::ExternalCommand(..), - .. - }) => Color::Cyan.bold().paint(token_node.tag().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::ExternalWord, - .. - }) => Color::Black.bold().paint(token_node.tag().slice(line)), - }; +#[allow(unused)] +fn vec_tag(input: Vec>) -> Option { + let mut iter = input.iter(); + let first = iter.next()?.tag.clone(); + let last = iter.last(); - styled.to_string() + Some(match last { + None => first, + Some(last) => first.until(&last.tag), + }) } -fn paint_pipeline_element(pipeline_element: &PipelineElement, line: &str) -> String { - let mut styled = String::new(); - - if let Some(_) = pipeline_element.pipe { - styled.push_str(&Color::Purple.paint("|")); - } - - if let Some(ws) = pipeline_element.pre_ws { - styled.push_str(&Color::White.normal().paint(ws.slice(line))); - } - - styled.push_str( - &Color::Cyan - .bold() - .paint(pipeline_element.call().head().tag().slice(line)) - .to_string(), - ); - - if let Some(children) = pipeline_element.call().children() { - for child in children { - styled.push_str(&paint_token_node(child, line)); +fn paint_flat_shape(flat_shape: &Spanned, line: &str) -> String { + let style = match &flat_shape.item { + FlatShape::OpenDelimiter(_) => Color::White.normal(), + FlatShape::CloseDelimiter(_) => Color::White.normal(), + FlatShape::ItVariable => Color::Purple.bold(), + FlatShape::Variable => Color::Purple.normal(), + FlatShape::Operator => Color::Yellow.normal(), + FlatShape::Dot => Color::White.normal(), + FlatShape::InternalCommand => Color::Cyan.bold(), + FlatShape::ExternalCommand => Color::Cyan.normal(), + FlatShape::ExternalWord => Color::Black.bold(), + FlatShape::BareMember => Color::Yellow.bold(), + FlatShape::StringMember => Color::Yellow.bold(), + FlatShape::String => Color::Green.normal(), + FlatShape::Path => Color::Cyan.normal(), + FlatShape::GlobPattern => Color::Cyan.bold(), + FlatShape::Word => Color::Green.normal(), + FlatShape::Pipe => Color::Purple.bold(), + FlatShape::Flag => Color::Black.bold(), + FlatShape::ShorthandFlag => Color::Black.bold(), + FlatShape::Int => Color::Purple.bold(), + FlatShape::Decimal => Color::Purple.bold(), + FlatShape::Whitespace => Color::White.normal(), + FlatShape::Error => Color::Red.bold(), + FlatShape::Size { number, unit } => { + let number = number.slice(line); + let unit = unit.slice(line); + return format!( + "{}{}", + Color::Purple.bold().paint(number), + Color::Cyan.bold().paint(unit) + ); } - } + }; - if let Some(ws) = pipeline_element.post_ws { - styled.push_str(&Color::White.normal().paint(ws.slice(line))); - } - - styled.to_string() + let body = flat_shape.span.slice(line); + style.paint(body).to_string() } impl rustyline::Helper for Helper {} diff --git a/src/shell/shell.rs b/src/shell/shell.rs index c567e474a3..507fc0517b 100644 --- a/src/shell/shell.rs +++ b/src/shell/shell.rs @@ -3,20 +3,19 @@ use crate::commands::cp::CopyArgs; use crate::commands::mkdir::MkdirArgs; use crate::commands::mv::MoveArgs; use crate::commands::rm::RemoveArgs; -use crate::context::SourceMap; use crate::errors::ShellError; use crate::prelude::*; use crate::stream::OutputStream; use std::path::PathBuf; pub trait Shell: std::fmt::Debug { - fn name(&self, source_map: &SourceMap) -> String; + fn name(&self) -> String; fn homedir(&self) -> Option; fn ls( &self, pattern: Option>, - command_tag: Tag, + context: &RunnableContext, ) -> Result; fn cd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result; fn cp(&self, args: CopyArgs, name: Tag, path: &str) -> Result; diff --git a/src/shell/shell_manager.rs b/src/shell/shell_manager.rs index c4c42367ed..149fdd58d1 100644 --- a/src/shell/shell_manager.rs +++ b/src/shell/shell_manager.rs @@ -10,18 +10,19 @@ use crate::shell::shell::Shell; use crate::stream::OutputStream; use std::error::Error; use std::path::PathBuf; +use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::{Arc, Mutex}; #[derive(Clone, Debug)] pub struct ShellManager { - pub(crate) current_shell: usize, + pub(crate) current_shell: Arc, pub(crate) shells: Arc>>>, } impl ShellManager { pub fn basic(commands: CommandRegistry) -> Result> { Ok(ShellManager { - current_shell: 0, + current_shell: Arc::new(AtomicUsize::new(0)), shells: Arc::new(Mutex::new(vec![Box::new(FilesystemShell::basic( commands, )?)])), @@ -30,24 +31,29 @@ impl ShellManager { pub fn insert_at_current(&mut self, shell: Box) { self.shells.lock().unwrap().push(shell); - self.current_shell = self.shells.lock().unwrap().len() - 1; + self.current_shell + .store(self.shells.lock().unwrap().len() - 1, Ordering::SeqCst); self.set_path(self.path()); } + pub fn current_shell(&self) -> usize { + self.current_shell.load(Ordering::SeqCst) + } + pub fn remove_at_current(&mut self) { { let mut shells = self.shells.lock().unwrap(); if shells.len() > 0 { - if self.current_shell == shells.len() - 1 { + if self.current_shell() == shells.len() - 1 { shells.pop(); let new_len = shells.len(); if new_len > 0 { - self.current_shell = new_len - 1; + self.current_shell.store(new_len - 1, Ordering::SeqCst); } else { return; } } else { - shells.remove(self.current_shell); + shells.remove(self.current_shell()); } } } @@ -59,17 +65,17 @@ impl ShellManager { } pub fn path(&self) -> String { - self.shells.lock().unwrap()[self.current_shell].path() + self.shells.lock().unwrap()[self.current_shell()].path() } pub fn pwd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result { let env = self.shells.lock().unwrap(); - env[self.current_shell].pwd(args) + env[self.current_shell()].pwd(args) } pub fn set_path(&mut self, path: String) { - self.shells.lock().unwrap()[self.current_shell].set_path(path) + self.shells.lock().unwrap()[self.current_shell()].set_path(path) } pub fn complete( @@ -78,20 +84,21 @@ impl ShellManager { pos: usize, ctx: &rustyline::Context<'_>, ) -> Result<(usize, Vec), rustyline::error::ReadlineError> { - self.shells.lock().unwrap()[self.current_shell].complete(line, pos, ctx) + self.shells.lock().unwrap()[self.current_shell()].complete(line, pos, ctx) } pub fn hint(&self, line: &str, pos: usize, ctx: &rustyline::Context<'_>) -> Option { - self.shells.lock().unwrap()[self.current_shell].hint(line, pos, ctx) + self.shells.lock().unwrap()[self.current_shell()].hint(line, pos, ctx) } pub fn next(&mut self) { { let shell_len = self.shells.lock().unwrap().len(); - if self.current_shell == (shell_len - 1) { - self.current_shell = 0; + if self.current_shell() == (shell_len - 1) { + self.current_shell.store(0, Ordering::SeqCst); } else { - self.current_shell += 1; + self.current_shell + .store(self.current_shell() + 1, Ordering::SeqCst); } } self.set_path(self.path()); @@ -100,10 +107,11 @@ impl ShellManager { pub fn prev(&mut self) { { let shell_len = self.shells.lock().unwrap().len(); - if self.current_shell == 0 { - self.current_shell = shell_len - 1; + if self.current_shell() == 0 { + self.current_shell.store(shell_len - 1, Ordering::SeqCst); } else { - self.current_shell -= 1; + self.current_shell + .store(self.current_shell() - 1, Ordering::SeqCst); } } self.set_path(self.path()); @@ -112,23 +120,23 @@ impl ShellManager { pub fn homedir(&self) -> Option { let env = self.shells.lock().unwrap(); - env[self.current_shell].homedir() + env[self.current_shell()].homedir() } pub fn ls( &self, path: Option>, - command_tag: Tag, + context: &RunnableContext, ) -> Result { let env = self.shells.lock().unwrap(); - env[self.current_shell].ls(path, command_tag) + env[self.current_shell()].ls(path, context) } pub fn cd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result { let env = self.shells.lock().unwrap(); - env[self.current_shell].cd(args) + env[self.current_shell()].cd(args) } pub fn cp( @@ -140,13 +148,13 @@ impl ShellManager { match env { Ok(x) => { - let path = x[self.current_shell].path(); - x[self.current_shell].cp(args, context.name, &path) + let path = x[self.current_shell()].path(); + x[self.current_shell()].cp(args, context.name.clone(), &path) } Err(e) => Err(ShellError::labeled_error( format!("Internal error: could not lock {}", e), "Internal error: could not lock", - context.name, + &context.name, )), } } @@ -160,13 +168,13 @@ impl ShellManager { match env { Ok(x) => { - let path = x[self.current_shell].path(); - x[self.current_shell].rm(args, context.name, &path) + let path = x[self.current_shell()].path(); + x[self.current_shell()].rm(args, context.name.clone(), &path) } Err(e) => Err(ShellError::labeled_error( format!("Internal error: could not lock {}", e), "Internal error: could not lock", - context.name, + &context.name, )), } } @@ -180,13 +188,13 @@ impl ShellManager { match env { Ok(x) => { - let path = x[self.current_shell].path(); - x[self.current_shell].mkdir(args, context.name, &path) + let path = x[self.current_shell()].path(); + x[self.current_shell()].mkdir(args, context.name.clone(), &path) } Err(e) => Err(ShellError::labeled_error( format!("Internal error: could not lock {}", e), "Internal error: could not lock", - context.name, + &context.name, )), } } @@ -200,13 +208,13 @@ impl ShellManager { match env { Ok(x) => { - let path = x[self.current_shell].path(); - x[self.current_shell].mv(args, context.name, &path) + let path = x[self.current_shell()].path(); + x[self.current_shell()].mv(args, context.name.clone(), &path) } Err(e) => Err(ShellError::labeled_error( format!("Internal error: could not lock {}", e), "Internal error: could not lock", - context.name, + &context.name, )), } } diff --git a/src/shell/value_shell.rs b/src/shell/value_shell.rs index d95d07cb97..0aa9e341bb 100644 --- a/src/shell/value_shell.rs +++ b/src/shell/value_shell.rs @@ -3,7 +3,6 @@ use crate::commands::cp::CopyArgs; use crate::commands::mkdir::MkdirArgs; use crate::commands::mv::MoveArgs; use crate::commands::rm::RemoveArgs; -use crate::context::SourceMap; use crate::prelude::*; use crate::shell::shell::Shell; use crate::utils::ValueStructure; @@ -72,8 +71,8 @@ impl ValueShell { } impl Shell for ValueShell { - fn name(&self, source_map: &SourceMap) -> String { - let anchor_name = self.value.anchor_name(source_map); + fn name(&self) -> String { + let anchor_name = self.value.anchor_name(); format!( "{}", match anchor_name { @@ -90,9 +89,10 @@ impl Shell for ValueShell { fn ls( &self, target: Option>, - command_name: Tag, + context: &RunnableContext, ) -> Result { let mut full_path = PathBuf::from(self.path()); + let name_tag = context.name.clone(); match &target { Some(value) => full_path.push(value.as_ref()), @@ -114,7 +114,7 @@ impl Shell for ValueShell { return Err(ShellError::labeled_error( "Can not list entries inside", "No such path exists", - command_name, + name_tag, )); } @@ -166,7 +166,7 @@ impl Shell for ValueShell { return Err(ShellError::labeled_error( "Can not change to path inside", "No such path exists", - args.call_info.name_tag, + &args.call_info.name_tag, )); } @@ -213,10 +213,9 @@ impl Shell for ValueShell { fn pwd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result { let mut stream = VecDeque::new(); - stream.push_back(ReturnSuccess::value(Tagged::from_item( - Value::string(self.path()), - args.call_info.name_tag, - ))); + stream.push_back(ReturnSuccess::value( + Value::string(self.path()).tagged(&args.call_info.name_tag), + )); Ok(stream.into()) } diff --git a/src/stream.rs b/src/stream.rs index 066acb74a1..f6f2d5e2e1 100644 --- a/src/stream.rs +++ b/src/stream.rs @@ -23,6 +23,17 @@ impl InputStream { } } +impl Stream for InputStream { + type Item = Tagged; + + fn poll_next( + mut self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> core::task::Poll> { + Stream::poll_next(std::pin::Pin::new(&mut self.values), cx) + } +} + impl From>> for InputStream { fn from(input: BoxStream<'static, Tagged>) -> InputStream { InputStream { values: input } diff --git a/src/traits.rs b/src/traits.rs index 677d019ad8..a33453ab23 100644 --- a/src/traits.rs +++ b/src/traits.rs @@ -1,14 +1,28 @@ use crate::prelude::*; -use std::fmt; +use derive_new::new; +use std::fmt::{self, Write}; -pub struct Debuggable<'a, T: ToDebug> { +pub struct Debuggable<'a, T: FormatDebug> { inner: &'a T, source: &'a str, } +impl FormatDebug for str { + fn fmt_debug(&self, f: &mut DebugFormatter, _source: &str) -> fmt::Result { + write!(f, "{}", self) + } +} + impl fmt::Display for Debuggable<'_, T> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.inner.fmt_debug(f, self.source) + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.inner.fmt_debug( + &mut DebugFormatter::new( + f, + ansi_term::Color::White.bold(), + ansi_term::Color::Black.bold(), + ), + self.source, + ) } } @@ -16,13 +30,109 @@ pub trait HasTag { fn tag(&self) -> Tag; } -pub trait ToDebug: Sized { +#[derive(new)] +pub struct DebugFormatter<'me, 'args> { + formatter: &'me mut std::fmt::Formatter<'args>, + style: ansi_term::Style, + default_style: ansi_term::Style, +} + +impl<'me, 'args> DebugFormatter<'me, 'args> { + pub fn say<'debuggable>( + &mut self, + kind: &str, + debuggable: Debuggable<'debuggable, impl FormatDebug>, + ) -> std::fmt::Result { + write!(self, "{}", self.style.paint(kind))?; + write!(self, "{}", self.default_style.paint(" "))?; + write!( + self, + "{}", + self.default_style.paint(format!("{}", debuggable)) + ) + } + + pub fn say_str<'debuggable>( + &mut self, + kind: &str, + string: impl AsRef, + ) -> std::fmt::Result { + write!(self, "{}", self.style.paint(kind))?; + write!(self, "{}", self.default_style.paint(" "))?; + write!(self, "{}", self.default_style.paint(string.as_ref())) + } + + pub fn say_block( + &mut self, + kind: &str, + block: impl FnOnce(&mut Self) -> std::fmt::Result, + ) -> std::fmt::Result { + write!(self, "{}", self.style.paint(kind))?; + write!(self, "{}", self.default_style.paint(" "))?; + block(self) + } + + pub fn say_dict<'debuggable>( + &mut self, + kind: &str, + dict: indexmap::IndexMap<&str, String>, + ) -> std::fmt::Result { + write!(self, "{}", self.style.paint(kind))?; + write!(self, "{}", self.default_style.paint(" "))?; + + let last = dict.len() - 1; + + for (i, (key, value)) in dict.into_iter().enumerate() { + write!(self, "{}", self.default_style.paint(key))?; + write!(self, "{}", self.default_style.paint("=["))?; + write!(self, "{}", self.style.paint(value))?; + write!(self, "{}", self.default_style.paint("]"))?; + + if i != last { + write!(self, "{}", self.default_style.paint(" "))?; + } + } + + Ok(()) + } +} + +impl<'a, 'b> std::fmt::Write for DebugFormatter<'a, 'b> { + fn write_str(&mut self, s: &str) -> std::fmt::Result { + self.formatter.write_str(s) + } + + fn write_char(&mut self, c: char) -> std::fmt::Result { + self.formatter.write_char(c) + } + + fn write_fmt(self: &mut Self, args: std::fmt::Arguments<'_>) -> std::fmt::Result { + self.formatter.write_fmt(args) + } +} + +pub trait FormatDebug: std::fmt::Debug { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result; +} + +pub trait ToDebug: Sized + FormatDebug { + fn debug<'a>(&'a self, source: &'a str) -> Debuggable<'a, Self>; +} + +impl FormatDebug for Box { + fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result { + (&**self).fmt_debug(f, source) + } +} + +impl ToDebug for T +where + T: FormatDebug + Sized, +{ fn debug<'a>(&'a self, source: &'a str) -> Debuggable<'a, Self> { Debuggable { inner: self, source, } } - - fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result; } diff --git a/src/utils.rs b/src/utils.rs index 6b1318f9e8..cef4aad193 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -5,6 +5,32 @@ use std::fmt; use std::ops::Div; use std::path::{Component, Path, PathBuf}; +pub fn did_you_mean( + obj_source: &Value, + field_tried: &Tagged, +) -> Option> { + let field_tried = field_tried.as_string().unwrap(); + + let possibilities = obj_source.data_descriptors(); + + let mut possible_matches: Vec<_> = possibilities + .into_iter() + .map(|x| { + let word = x.clone(); + let distance = natural::distance::levenshtein_distance(&word, &field_tried); + + (distance, word) + }) + .collect(); + + if possible_matches.len() > 0 { + possible_matches.sort(); + return Some(possible_matches); + } + + None +} + pub struct AbsoluteFile { inner: PathBuf, } @@ -448,6 +474,10 @@ mod tests { loc: fixtures().join("cargo_sample.toml"), at: 0 }, + Res { + loc: fixtures().join("fileA.txt"), + at: 0 + }, Res { loc: fixtures().join("jonathan.xml"), at: 0 diff --git a/tests/command_config_test.rs b/tests/command_config_test.rs index dd0f4e0ebb..8a45be47c5 100644 --- a/tests/command_config_test.rs +++ b/tests/command_config_test.rs @@ -86,30 +86,30 @@ fn sets_configuration_value() { h::delete_file_at(nu::config_path().unwrap().join("test_4.toml")); } -#[test] -fn removes_configuration_value() { - Playground::setup("config_test_5", |dirs, sandbox| { - sandbox.with_files(vec![FileWithContent( - "test_5.toml", - r#" - caballeros = [1, 1, 1] - podershell = [1, 1, 1] - "#, - )]); +// #[test] +// fn removes_configuration_value() { +// Playground::setup("config_test_5", |dirs, sandbox| { +// sandbox.with_files(vec![FileWithContent( +// "test_5.toml", +// r#" +// caballeros = [1, 1, 1] +// podershell = [1, 1, 1] +// "#, +// )]); - nu!( - cwd: dirs.test(), - "config --load test_5.toml --remove podershell" - ); +// nu!( +// cwd: dirs.test(), +// "config --load test_5.toml --remove podershell" +// ); - let actual = nu_error!( - cwd: dirs.root(), - r#"open "{}/test_5.toml" | get podershell | echo $it"#, - dirs.config_path() - ); +// let actual = nu_error!( +// cwd: dirs.root(), +// r#"open "{}/test_5.toml" | get podershell | echo $it"#, +// dirs.config_path() +// ); - assert!(actual.contains("Unknown column")); - }); +// assert!(actual.contains("Unknown column")); +// }); - h::delete_file_at(nu::config_path().unwrap().join("test_5.toml")); -} +// h::delete_file_at(nu::config_path().unwrap().join("test_5.toml")); +// } diff --git a/tests/command_enter_test.rs b/tests/command_enter_test.rs index fe22b56dbe..fc4a437a23 100644 --- a/tests/command_enter_test.rs +++ b/tests/command_enter_test.rs @@ -73,3 +73,16 @@ fn knows_the_filesystems_entered() { )); }) } + +#[test] +fn errors_if_file_not_found() { + Playground::setup("enter_test_2", |dirs, _| { + let actual = nu_error!( + cwd: dirs.test(), + "enter i_dont_exist.csv" + ); + + assert!(actual.contains("File could not be opened")); + assert!(actual.contains("file not found")); + }) +} diff --git a/tests/command_get_tests.rs b/tests/command_get_tests.rs new file mode 100644 index 0000000000..71fde763a9 --- /dev/null +++ b/tests/command_get_tests.rs @@ -0,0 +1,223 @@ +mod helpers; + +use helpers as h; +use helpers::{Playground, Stub::*}; + +#[test] +fn get() { + Playground::setup("get_test_1", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" + nu_party_venue = "zion" + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open sample.toml + | get nu_party_venue + | echo $it + "# + )); + + assert_eq!(actual, "zion"); + }) +} + +#[test] +fn fetches_by_index() { + Playground::setup("get_test_2", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" + [package] + name = "nu" + version = "0.4.1" + authors = ["Yehuda Katz ", "Jonathan Turner ", "Andrés N. Robalino "] + description = "When arepas shells are tasty and fun." + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open sample.toml + | get package.authors.2 + | echo $it + "# + )); + + assert_eq!(actual, "Andrés N. Robalino "); + }) +} +#[test] +fn fetches_by_column_path() { + Playground::setup("get_test_3", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" + [package] + name = "nu" + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open sample.toml + | get package.name + | echo $it + "# + )); + + assert_eq!(actual, "nu"); + }) +} + +#[test] +fn column_paths_are_either_double_quoted_or_regular_unquoted_words_separated_by_dot() { + Playground::setup("get_test_4", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" + [package] + 9999 = ["Yehuda Katz ", "Jonathan Turner ", "Andrés N. Robalino "] + description = "When arepas shells are tasty and fun." + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open sample.toml + | get package."9999" + | count + | echo $it + "# + )); + + assert_eq!(actual, "3"); + }) +} + +#[test] +fn fetches_more_than_one_column_path() { + Playground::setup("get_test_5", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" + [[fortune_tellers]] + name = "Andrés N. Robalino" + arepas = 1 + + [[fortune_tellers]] + name = "Jonathan Turner" + arepas = 1 + + [[fortune_tellers]] + name = "Yehuda Katz" + arepas = 1 + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open sample.toml + | get fortune_tellers.2.name fortune_tellers.0.name fortune_tellers.1.name + | nth 2 + | echo $it + "# + )); + + assert_eq!(actual, "Jonathan Turner"); + }) +} + +#[test] +fn errors_fetching_by_column_not_present() { + Playground::setup("get_test_6", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" + [taconushell] + sentence_words = ["Yo", "quiero", "taconushell"] + "#, + )]); + + let actual = nu_error!( + cwd: dirs.test(), h::pipeline( + r#" + open sample.toml + | get taco + "# + )); + + assert!(actual.contains("Unknown column")); + assert!(actual.contains("did you mean 'taconushell'?")); + }) +} + +#[test] +#[should_panic] +fn errors_fetching_by_column_using_a_number() { + Playground::setup("get_test_7", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" + [spanish_lesson] + 0 = "can only be fetched with 0 double quoted." + "#, + )]); + + let actual = nu_error!( + cwd: dirs.test(), h::pipeline( + r#" + open sample.toml + | get spanish_lesson.9 + "# + )); + + assert!(actual.contains("No rows available")); + assert!(actual.contains(r#"Not a table. Perhaps you meant to get the column "0" instead?"#)) + }) +} +#[test] +fn errors_fetching_by_index_out_of_bounds() { + Playground::setup("get_test_8", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" + [spanish_lesson] + sentence_words = ["Yo", "quiero", "taconushell"] + "#, + )]); + + let actual = nu_error!( + cwd: dirs.test(), h::pipeline( + r#" + open sample.toml + | get spanish_lesson.sentence_words.3 + "# + )); + + assert!(actual.contains("Row not found")); + assert!(actual.contains("There isn't a row indexed at '3'")); + assert!(actual.contains("The table only has 3 rows (0..2)")) + }) +} + +#[test] +fn requires_at_least_one_column_member_path() { + Playground::setup("get_test_9", |dirs, sandbox| { + sandbox.with_files(vec![EmptyFile("andres.txt")]); + + let actual = nu_error!( + cwd: dirs.test(), "ls | get" + ); + + assert!(actual.contains("requires member parameter")); + }) +} diff --git a/tests/command_ls_tests.rs b/tests/command_ls_tests.rs index f6f5f39f86..a0ae959e12 100644 --- a/tests/command_ls_tests.rs +++ b/tests/command_ls_tests.rs @@ -7,26 +7,21 @@ use helpers::{Playground, Stub::*}; fn ls_lists_regular_files() { Playground::setup("ls_test_1", |dirs, sandbox| { sandbox.with_files(vec![ - EmptyFile("yehuda.10.txt"), - EmptyFile("jonathan.10.txt"), - EmptyFile("andres.10.txt"), + EmptyFile("yehuda.txt"), + EmptyFile("jonathan.txt"), + EmptyFile("andres.txt"), ]); let actual = nu!( cwd: dirs.test(), h::pipeline( r#" ls - | get name - | lines - | split-column "." - | get Column2 - | str --to-int - | sum + | count | echo $it "# )); - assert_eq!(actual, "30"); + assert_eq!(actual, "3"); }) } @@ -34,22 +29,17 @@ fn ls_lists_regular_files() { fn ls_lists_regular_files_using_asterisk_wildcard() { Playground::setup("ls_test_2", |dirs, sandbox| { sandbox.with_files(vec![ - EmptyFile("los.1.txt"), - EmptyFile("tres.1.txt"), - EmptyFile("amigos.1.txt"), - EmptyFile("arepas.1.clu"), + EmptyFile("los.txt"), + EmptyFile("tres.txt"), + EmptyFile("amigos.txt"), + EmptyFile("arepas.clu"), ]); let actual = nu!( cwd: dirs.test(), h::pipeline( r#" ls *.txt - | get name - | lines - | split-column "." - | get Column2 - | str --to-int - | sum + | count | echo $it "# )); @@ -72,16 +62,11 @@ fn ls_lists_regular_files_using_question_mark_wildcard() { cwd: dirs.test(), h::pipeline( r#" ls *.??.txt - | get name - | lines - | split-column "." - | get Column2 - | str --to-int - | sum + | count | echo $it "# )); - assert_eq!(actual, "30"); + assert_eq!(actual, "3"); }) } diff --git a/tests/command_open_tests.rs b/tests/command_open_tests.rs index 54dc7ad54d..48f438f3d6 100644 --- a/tests/command_open_tests.rs +++ b/tests/command_open_tests.rs @@ -212,7 +212,7 @@ fn open_can_parse_ini() { fn open_can_parse_utf16_ini() { let actual = nu!( cwd: "tests/fixtures/formats", - "open utf16.ini | get .ShellClassInfo | get IconIndex | echo $it" + "open utf16.ini | get '.ShellClassInfo' | get IconIndex | echo $it" ); assert_eq!(actual, "-236") @@ -222,8 +222,9 @@ fn open_can_parse_utf16_ini() { fn errors_if_file_not_found() { let actual = nu_error!( cwd: "tests/fixtures/formats", - "open i_dont_exist.txt | echo $it" + "open i_dont_exist.txt" ); assert!(actual.contains("File could not be opened")); + assert!(actual.contains("file not found")); } diff --git a/tests/commands_test.rs b/tests/commands_test.rs index 30636eafca..661d14023e 100644 --- a/tests/commands_test.rs +++ b/tests/commands_test.rs @@ -4,25 +4,25 @@ use helpers as h; use helpers::{Playground, Stub::*}; #[test] -fn first_gets_first_rows_by_amount() { - Playground::setup("first_test_1", |dirs, sandbox| { - sandbox.with_files(vec![ - EmptyFile("los.1.txt"), - EmptyFile("tres.1.txt"), - EmptyFile("amigos.1.txt"), - EmptyFile("arepas.1.clu"), - ]); +fn group_by() { + Playground::setup("group_by_test_1", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContentToBeTrimmed( + "los_tres_caballeros.csv", + r#" + first_name,last_name,rusty_at,type + Andrés,Robalino,10/11/2013,A + Jonathan,Turner,10/12/2013,B + Yehuda,Katz,10/11/2013,A + "#, + )]); let actual = nu!( cwd: dirs.test(), h::pipeline( r#" - ls - | get name - | first 2 - | split-column "." - | get Column2 - | str --to-int - | sum + open los_tres_caballeros.csv + | group-by rusty_at + | get "10/11/2013" + | count | echo $it "# )); @@ -32,87 +32,218 @@ fn first_gets_first_rows_by_amount() { } #[test] -fn first_requires_an_amount() { - Playground::setup("first_test_2", |dirs, _| { +fn histogram() { + Playground::setup("histogram_test_1", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContentToBeTrimmed( + "los_tres_caballeros.csv", + r#" + first_name,last_name,rusty_at + Andrés,Robalino,Ecuador + Jonathan,Turner,Estados Unidos + Yehuda,Katz,Estados Unidos + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open los_tres_caballeros.csv + | histogram rusty_at countries + | where rusty_at == "Ecuador" + | get countries + | echo $it + "# + )); + + assert_eq!(actual, "**************************************************"); + // 50% + }) +} + +#[test] +fn group_by_errors_if_unknown_column_name() { + Playground::setup("group_by_test_2", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContentToBeTrimmed( + "los_tres_caballeros.csv", + r#" + first_name,last_name,rusty_at,type + Andrés,Robalino,10/11/2013,A + Jonathan,Turner,10/12/2013,B + Yehuda,Katz,10/11/2013,A + "#, + )]); + let actual = nu_error!( - cwd: dirs.test(), "ls | first" - ); + cwd: dirs.test(), h::pipeline( + r#" + open los_tres_caballeros.csv + | group-by ttype + "# + )); - assert!(actual.contains("requires amount parameter")); + assert!(actual.contains("Unknown column")); }) } #[test] -fn get() { - Playground::setup("get_test_1", |dirs, sandbox| { - sandbox.with_files(vec![FileWithContent( - "sample.toml", +fn split_by() { + Playground::setup("split_by_test_1", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContentToBeTrimmed( + "los_tres_caballeros.csv", r#" - nu_party_venue = "zion" + first_name,last_name,rusty_at,type + Andrés,Robalino,10/11/2013,A + Jonathan,Turner,10/12/2013,B + Yehuda,Katz,10/11/2013,A "#, )]); let actual = nu!( cwd: dirs.test(), h::pipeline( r#" - open sample.toml - | get nu_party_venue + open los_tres_caballeros.csv + | group-by rusty_at + | split-by type + | get A."10/11/2013" + | count | echo $it "# )); - assert_eq!(actual, "zion"); + assert_eq!(actual, "2"); }) } #[test] -fn get_more_than_one_member() { - Playground::setup("get_test_2", |dirs, sandbox| { - sandbox.with_files(vec![FileWithContent( - "sample.toml", +fn split_by_errors_if_no_table_given_as_input() { + Playground::setup("split_by_test_2", |dirs, sandbox| { + sandbox.with_files(vec![ + EmptyFile("los.txt"), + EmptyFile("tres.txt"), + EmptyFile("amigos.txt"), + EmptyFile("arepas.clu"), + ]); + + let actual = nu_error!( + cwd: dirs.test(), h::pipeline( r#" - [[fortune_tellers]] - name = "Andrés N. Robalino" - arepas = 1 - broken_builds = 0 + ls + | get name + | split-by type + "# + )); - [[fortune_tellers]] - name = "Jonathan Turner" - arepas = 1 - broken_builds = 1 + assert!(actual.contains("Expected table from pipeline")); + }) +} - [[fortune_tellers]] - name = "Yehuda Katz" - arepas = 1 - broken_builds = 1 - "#, - )]); +#[test] +fn first_gets_first_rows_by_amount() { + Playground::setup("first_test_1", |dirs, sandbox| { + sandbox.with_files(vec![ + EmptyFile("los.txt"), + EmptyFile("tres.txt"), + EmptyFile("amigos.txt"), + EmptyFile("arepas.clu"), + ]); let actual = nu!( cwd: dirs.test(), h::pipeline( r#" - open sample.toml - | get fortune_tellers - | get arepas broken_builds - | sum + ls + | first 3 + | count | echo $it "# )); - assert_eq!(actual, "5"); + assert_eq!(actual, "3"); }) } #[test] -fn get_requires_at_least_one_member() { +fn first_gets_all_rows_if_amount_higher_than_all_rows() { + Playground::setup("first_test_2", |dirs, sandbox| { + sandbox.with_files(vec![ + EmptyFile("los.txt"), + EmptyFile("tres.txt"), + EmptyFile("amigos.txt"), + EmptyFile("arepas.clu"), + ]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + ls + | first 99 + | count + | echo $it + "# + )); + + assert_eq!(actual, "4"); + }) +} + +#[test] +fn first_gets_first_row_when_no_amount_given() { Playground::setup("first_test_3", |dirs, sandbox| { - sandbox.with_files(vec![EmptyFile("andres.txt")]); + sandbox.with_files(vec![EmptyFile("caballeros.txt"), EmptyFile("arepas.clu")]); - let actual = nu_error!( - cwd: dirs.test(), "ls | get" - ); + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + ls + | first + | count + | echo $it + "# + )); - assert!(actual.contains("requires member parameter")); + assert_eq!(actual, "1"); + }) +} + +#[test] +fn last_gets_last_rows_by_amount() { + Playground::setup("last_test_1", |dirs, sandbox| { + sandbox.with_files(vec![ + EmptyFile("los.txt"), + EmptyFile("tres.txt"), + EmptyFile("amigos.txt"), + EmptyFile("arepas.clu"), + ]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + ls + | last 3 + | count + | echo $it + "# + )); + + assert_eq!(actual, "3"); + }) +} + +#[test] +fn last_gets_last_row_when_no_amount_given() { + Playground::setup("last_test_2", |dirs, sandbox| { + sandbox.with_files(vec![EmptyFile("caballeros.txt"), EmptyFile("arepas.clu")]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + ls + | last + | count + | echo $it + "# + )); + + assert_eq!(actual, "1"); }) } @@ -164,6 +295,33 @@ fn save_figures_out_intelligently_where_to_write_out_with_metadata() { }) } +#[test] +fn it_arg_works_with_many_inputs_to_external_command() { + Playground::setup("it_arg_works_with_many_inputs", |dirs, sandbox| { + sandbox.with_files(vec![ + FileWithContent("file1", "text"), + FileWithContent("file2", " and more text"), + ]); + + let (stdout, stderr) = nu_combined!( + cwd: dirs.test(), h::pipeline( + r#" + echo hello world + | split-row " " + | ^echo $it + "# + )); + + #[cfg(windows)] + assert_eq!("hello world", stdout); + + #[cfg(not(windows))] + assert_eq!("helloworld", stdout); + + assert!(!stderr.contains("No such file or directory")); + }) +} + #[test] fn save_can_write_out_csv() { Playground::setup("save_test_2", |dirs, _| { diff --git a/tests/filters_test.rs b/tests/filters_test.rs index f994fa4494..e18f20be67 100644 --- a/tests/filters_test.rs +++ b/tests/filters_test.rs @@ -100,13 +100,41 @@ fn converts_from_csv_text_to_structured_table() { }) } +#[test] +fn converts_from_csv_text_with_separator_to_structured_table() { + Playground::setup("filter_from_csv_test_1", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContentToBeTrimmed( + "los_tres_caballeros.txt", + r#" + first_name;last_name;rusty_luck + Andrés;Robalino;1 + Jonathan;Turner;1 + Yehuda;Katz;1 + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open los_tres_caballeros.txt + | from-csv --separator ';' + | get rusty_luck + | str --to-int + | sum + | echo $it + "# + )); + + assert_eq!(actual, "3"); + }) +} + #[test] fn converts_from_csv_text_skipping_headers_to_structured_table() { Playground::setup("filter_from_csv_test_2", |dirs, sandbox| { sandbox.with_files(vec![FileWithContentToBeTrimmed( "los_tres_amigos.txt", r#" - first_name,last_name,rusty_luck Andrés,Robalino,1 Jonathan,Turner,1 Yehuda,Katz,1 @@ -332,7 +360,6 @@ fn converts_from_tsv_text_skipping_headers_to_structured_table() { sandbox.with_files(vec![FileWithContentToBeTrimmed( "los_tres_amigos.txt", r#" - first Name Last Name rusty_luck Andrés Robalino 1 Jonathan Turner 1 Yehuda Katz 1 @@ -355,6 +382,101 @@ fn converts_from_tsv_text_skipping_headers_to_structured_table() { }) } +#[test] +fn converts_from_ssv_text_to_structured_table() { + Playground::setup("filter_from_ssv_test_1", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContentToBeTrimmed( + "oc_get_svc.txt", + r#" + NAME LABELS SELECTOR IP PORT(S) + docker-registry docker-registry=default docker-registry=default 172.30.78.158 5000/TCP + kubernetes component=apiserver,provider=kubernetes 172.30.0.2 443/TCP + kubernetes-ro component=apiserver,provider=kubernetes 172.30.0.1 80/TCP + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open oc_get_svc.txt + | from-ssv + | nth 0 + | get IP + | echo $it + "# + )); + + assert_eq!(actual, "172.30.78.158"); + }) +} + +#[test] +fn converts_from_ssv_text_to_structured_table_with_separator_specified() { + Playground::setup("filter_from_ssv_test_1", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContentToBeTrimmed( + "oc_get_svc.txt", + r#" + NAME LABELS SELECTOR IP PORT(S) + docker-registry docker-registry=default docker-registry=default 172.30.78.158 5000/TCP + kubernetes component=apiserver,provider=kubernetes 172.30.0.2 443/TCP + kubernetes-ro component=apiserver,provider=kubernetes 172.30.0.1 80/TCP + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open oc_get_svc.txt + | from-ssv --minimum-spaces 3 + | nth 0 + | get IP + | echo $it + "# + )); + + assert_eq!(actual, "172.30.78.158"); + }) +} + +#[test] +fn converts_from_ssv_text_treating_first_line_as_data_with_flag() { + Playground::setup("filter_from_ssv_test_2", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContentToBeTrimmed( + "oc_get_svc.txt", + r#" + docker-registry docker-registry=default docker-registry=default 172.30.78.158 5000/TCP + kubernetes component=apiserver,provider=kubernetes 172.30.0.2 443/TCP + kubernetes-ro component=apiserver,provider=kubernetes 172.30.0.1 80/TCP + "#, + )]); + + let aligned_columns = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open oc_get_svc.txt + | from-ssv --headerless --aligned-columns + | first + | get Column1 + | echo $it + "# + )); + + let separator_based = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open oc_get_svc.txt + | from-ssv --headerless + | first + | get Column1 + | echo $it + "# + )); + + assert_eq!(aligned_columns, separator_based); + assert_eq!(separator_based, "docker-registry"); + }) +} + #[test] fn can_convert_table_to_bson_and_back_into_table() { let actual = nu!( @@ -495,6 +617,31 @@ fn can_sum() { assert_eq!(actual, "203") } +#[test] +fn can_average_numbers() { + let actual = nu!( + cwd: "tests/fixtures/formats", h::pipeline( + r#" + open sgml_description.json + | get glossary.GlossDiv.GlossList.GlossEntry.Sections + | average + | echo $it + "# + )); + + assert_eq!(actual, "101.5000000000000") +} + +#[test] +fn can_average_bytes() { + let actual = nu!( + cwd: "tests/fixtures/formats", + "ls | sort-by name | skip 1 | first 2 | get size | average | echo $it" + ); + + assert_eq!(actual, "1600.000000000000"); +} + #[test] fn can_filter_by_unit_size_comparison() { let actual = nu!( diff --git a/tests/fixtures/formats/fileA.txt b/tests/fixtures/formats/fileA.txt new file mode 100644 index 0000000000..0ce9fb3fa2 --- /dev/null +++ b/tests/fixtures/formats/fileA.txt @@ -0,0 +1,3 @@ +VAR1=Chill +VAR2=StupidLongName +VAR3=AlsoChill diff --git a/tests/helpers/mod.rs b/tests/helpers/mod.rs index 04fd889925..86c8a10e7f 100644 --- a/tests/helpers/mod.rs +++ b/tests/helpers/mod.rs @@ -93,6 +93,7 @@ macro_rules! nu { .write_all(commands.as_bytes()) .expect("couldn't write to stdin"); + let output = process .wait_with_output() .expect("couldn't read from stdout"); @@ -154,6 +155,60 @@ macro_rules! nu_error { }}; } +#[macro_export] +macro_rules! nu_combined { + (cwd: $cwd:expr, $path:expr, $($part:expr),*) => {{ + use $crate::helpers::DisplayPath; + + let path = format!($path, $( + $part.display_path() + ),*); + + nu_combined!($cwd, &path) + }}; + + (cwd: $cwd:expr, $path:expr) => {{ + nu_combined!($cwd, $path) + }}; + + ($cwd:expr, $path:expr) => {{ + pub use std::error::Error; + pub use std::io::prelude::*; + pub use std::process::{Command, Stdio}; + + let commands = &*format!( + " + cd {} + {} + exit", + $crate::helpers::in_directory($cwd), + $crate::helpers::DisplayPath::display_path(&$path) + ); + + let mut process = Command::new(helpers::executable_path()) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn() + .expect("couldn't run test"); + + let stdin = process.stdin.as_mut().expect("couldn't open stdin"); + stdin + .write_all(commands.as_bytes()) + .expect("couldn't write to stdin"); + + let output = process + .wait_with_output() + .expect("couldn't read from stdout/stderr"); + + let err = String::from_utf8_lossy(&output.stderr).into_owned(); + let out = String::from_utf8_lossy(&output.stdout).into_owned(); + let out = out.replace("\r\n", ""); + let out = out.replace("\n", ""); + (out, err) + }}; +} + pub enum Stub<'a> { FileWithContent(&'a str, &'a str), FileWithContentToBeTrimmed(&'a str, &'a str), diff --git a/tests/tests.rs b/tests/tests.rs index 25337edb09..8799f5077d 100644 --- a/tests/tests.rs +++ b/tests/tests.rs @@ -42,12 +42,12 @@ fn external_has_correct_quotes() { } #[test] -fn add_plugin() { +fn insert_plugin() { let actual = nu!( cwd: "tests/fixtures/formats", h::pipeline( r#" open cargo_sample.toml - | add dev-dependencies.newdep "1" + | insert dev-dependencies.newdep "1" | get dev-dependencies.newdep | echo $it "# @@ -56,6 +56,69 @@ fn add_plugin() { assert_eq!(actual, "1"); } +#[test] +fn parse_plugin() { + let actual = nu!( + cwd: "tests/fixtures/formats", h::pipeline( + r#" + open fileA.txt + | parse "{Name}={Value}" + | nth 1 + | get Value + | echo $it + "# + )); + + assert_eq!(actual, "StupidLongName"); +} + +#[test] +fn format_plugin() { + let actual = nu!( + cwd: "tests/fixtures/formats", h::pipeline( + r#" + open cargo_sample.toml + | get package + | format "{name} has license {license}" + | echo $it + "# + )); + + assert_eq!(actual, "nu has license ISC"); +} + +#[test] +fn prepend_plugin() { + let actual = nu!( + cwd: "tests/fixtures/formats", h::pipeline( + r#" + open fileA.txt + | lines + | prepend "testme" + | nth 0 + | echo $it + "# + )); + + assert_eq!(actual, "testme"); +} + +#[test] +fn append_plugin() { + let actual = nu!( + cwd: "tests/fixtures/formats", h::pipeline( + r#" + open fileA.txt + | lines + | append "testme" + | nth 3 + | echo $it + "# + )); + + assert_eq!(actual, "testme"); +} + #[test] fn edit_plugin() { let actual = nu!(