Merge branch 'main' into dd-alarm-timer

This commit is contained in:
Sylvestre Ledru 2023-06-11 18:27:41 +02:00 committed by GitHub
commit e7557c2bae
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
591 changed files with 15288 additions and 7472 deletions

View file

@ -3,18 +3,11 @@ linker = "x86_64-unknown-redox-gcc"
[target.'cfg(feature = "cargo-clippy")']
rustflags = [
"-Wclippy::use_self",
"-Wclippy::needless_pass_by_value",
"-Wclippy::semicolon_if_nothing_returned",
"-Wclippy::single_char_pattern",
"-Wclippy::explicit_iter_loop",
"-Wclippy::use_self",
"-Wclippy::needless_pass_by_value",
"-Wclippy::semicolon_if_nothing_returned",
"-Wclippy::single_char_pattern",
"-Wclippy::explicit_iter_loop",
"-Wclippy::if_not_else",
]
[build]
# See https://github.com/time-rs/time/issues/293#issuecomment-1005002386. The
# unsoundness here is not in the `time` library, but in the Rust stdlib, and as
# such it needs to be fixed there.
rustflags = ["--cfg", "unsound_local_offset"]
[target.'cfg(target_os = "linux")']
rustflags = ["--cfg", "unsound_local_offset"]

View file

@ -1 +1,2 @@
msrv = "1.64.0"
cognitive-complexity-threshold = 10

6
.config/nextest.toml Normal file
View file

@ -0,0 +1,6 @@
[profile.ci]
retries = 2
status-level = "all"
final-status-level = "skip"
failure-output = "immediate-final"
fail-fast = false

View file

@ -1,12 +0,0 @@
version: 2
updates:
- package-ecosystem: "cargo"
directory: "/"
schedule:
interval: weekly
open-pull-requests-limit: 10
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: weekly
open-pull-requests-limit: 5

View file

@ -1,9 +1,9 @@
name: CICD
# spell-checker:ignore (abbrev/names) CICD CodeCOV MacOS MinGW MSVC musl
# spell-checker:ignore (env/flags) Awarnings Ccodegen Coverflow Cpanic Dwarnings RUSTDOCFLAGS RUSTFLAGS Zpanic
# spell-checker:ignore (jargon) SHAs deps dequote softprops subshell toolchain
# spell-checker:ignore (people) Peltoche rivy
# spell-checker:ignore (abbrev/names) CICD CodeCOV MacOS MinGW MSVC musl taiki
# spell-checker:ignore (env/flags) Awarnings Ccodegen Coverflow Cpanic Dwarnings RUSTDOCFLAGS RUSTFLAGS Zpanic CARGOFLAGS
# spell-checker:ignore (jargon) SHAs deps dequote softprops subshell toolchain fuzzers
# spell-checker:ignore (people) Peltoche rivy dtolnay
# spell-checker:ignore (shell/tools) choco clippy dmake dpkg esac fakeroot fdesc fdescfs gmake grcov halium lcov libssl mkdir popd printf pushd rsync rustc rustfmt rustup shopt utmpdump xargs
# spell-checker:ignore (misc) aarch alnum armhf bindir busytest coreutils defconfig DESTDIR gecos gnueabihf issuecomment maint multisize nullglob onexitbegin onexitend pell runtest Swatinem tempfile testsuite toybox uutils
@ -37,8 +37,6 @@ jobs:
## ToDO: [2021-11-10; rivy] 'Style/deps' needs more informative output and better integration of results into the GHA dashboard
name: Style/deps
runs-on: ${{ matrix.job.os }}
# env:
# STYLE_FAIL_ON_FAULT: false # overrides workflow default
strategy:
fail-fast: false
matrix:
@ -50,6 +48,11 @@ jobs:
- { os: windows-latest , features: feat_os_windows }
steps:
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@nightly
## note: requires 'nightly' toolchain b/c `cargo-udeps` uses the `rustc` '-Z save-analysis' option
## * ... ref: <https://github.com/est31/cargo-udeps/issues/73>
- uses: taiki-e/install-action@cargo-udeps
- uses: Swatinem/rust-cache@v2
- name: Initialize workflow variables
id: vars
shell: bash
@ -67,17 +70,6 @@ jobs:
CARGO_FEATURES_OPTION='' ;
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi
outputs CARGO_FEATURES_OPTION
## note: requires 'nightly' toolchain b/c `cargo-udeps` uses the `rustc` '-Z save-analysis' option
## * ... ref: <https://github.com/est31/cargo-udeps/issues/73>
- name: Install `rust` toolchain
run: |
rustup toolchain install nightly --no-self-update --profile minimal
rustup default nightly
- uses: Swatinem/rust-cache@v2
- name: Install `cargo-udeps`
run: cargo install cargo-udeps
env:
RUSTUP_TOOLCHAIN: stable
- name: Detect unused dependencies
shell: bash
run: |
@ -93,8 +85,6 @@ jobs:
style_format:
name: Style/format
runs-on: ${{ matrix.job.os }}
# env:
# STYLE_FAIL_ON_FAULT: false # overrides workflow default
strategy:
fail-fast: false
matrix:
@ -102,6 +92,11 @@ jobs:
- { os: ubuntu-latest , features: feat_os_unix }
steps:
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@master
with:
toolchain: stable
components: rustfmt
- uses: Swatinem/rust-cache@v2
- name: Initialize workflow variables
id: vars
shell: bash
@ -119,12 +114,6 @@ jobs:
CARGO_FEATURES_OPTION='' ;
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi
outputs CARGO_FEATURES_OPTION
- name: Install `rust` toolchain
run: |
## Install `rust` toolchain
rustup toolchain install stable --no-self-update -c rustfmt --profile minimal
rustup default stable
- uses: Swatinem/rust-cache@v2
- name: "`cargo fmt` testing"
shell: bash
run: |
@ -136,11 +125,48 @@ jobs:
S=$(cargo fmt -- --check) && printf "%s\n" "$S" || { printf "%s\n" "$S" ; printf "%s\n" "$S" | sed -E -n -e "s/^Diff[[:space:]]+in[[:space:]]+${PWD//\//\\/}\/(.*)[[:space:]]+at[[:space:]]+[^0-9]+([0-9]+).*$/::${fault_type} file=\1,line=\2::${fault_prefix}: \`cargo fmt\`: style violation (file:'\1', line:\2; use \`cargo fmt -- \"\1\"\`)/p" ; fault=true ; }
if [ -n "${{ steps.vars.outputs.FAIL_ON_FAULT }}" ] && [ -n "$fault" ]; then exit 1 ; fi
fuzz:
name: Run the fuzzers
runs-on: ubuntu-latest
env:
RUN_FOR: 60
steps:
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@nightly
- name: Install `cargo-fuzz`
run: cargo install cargo-fuzz
- uses: Swatinem/rust-cache@v2
- name: Run fuzz_date for XX seconds
shell: bash
run: |
## Run it
cd fuzz
cargo +nightly fuzz run fuzz_date -- -max_total_time=${{ env.RUN_FOR }} -detect_leaks=0
- name: Run fuzz_parse_glob for XX seconds
shell: bash
run: |
## Run it
cd fuzz
cargo +nightly fuzz run fuzz_parse_glob -- -max_total_time=${{ env.RUN_FOR }} -detect_leaks=0
- name: Run fuzz_parse_size for XX seconds
shell: bash
run: |
## Run it
cd fuzz
cargo +nightly fuzz run fuzz_parse_size -- -max_total_time=${{ env.RUN_FOR }} -detect_leaks=0
- name: Run fuzz_parse_time for XX seconds
shell: bash
run: |
## Run it
cd fuzz
cargo +nightly fuzz run fuzz_parse_time -- -max_total_time=${{ env.RUN_FOR }} -detect_leaks=0
style_lint:
name: Style/lint
runs-on: ${{ matrix.job.os }}
# env:
# STYLE_FAIL_ON_FAULT: false # overrides workflow default
env:
SCCACHE_GHA_ENABLED: "true"
RUSTC_WRAPPER: "sccache"
strategy:
fail-fast: false
matrix:
@ -150,6 +176,13 @@ jobs:
- { os: windows-latest , features: feat_os_windows }
steps:
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@master
with:
toolchain: stable
components: clippy
- uses: Swatinem/rust-cache@v2
- name: Run sccache-cache
uses: mozilla-actions/sccache-action@v0.0.3
- name: Initialize workflow variables
id: vars
shell: bash
@ -165,7 +198,7 @@ jobs:
# target-specific options
# * CARGO_FEATURES_OPTION
CARGO_FEATURES_OPTION='--all-features' ;
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features ${{ matrix.job.features }}' ; fi
outputs CARGO_FEATURES_OPTION
# * determine sub-crate utility list
UTILITY_LIST="$(./util/show-utils.sh ${CARGO_FEATURES_OPTION})"
@ -179,12 +212,6 @@ jobs:
case '${{ matrix.job.os }}' in
macos-latest) brew install coreutils ;; # needed for show-utils.sh
esac
- name: Install `rust` toolchain
run: |
## Install `rust` toolchain
rustup toolchain install stable --no-self-update -c clippy --profile minimal
rustup default stable
- uses: Swatinem/rust-cache@v2
- name: "`cargo clippy` lint testing"
shell: bash
run: |
@ -193,21 +220,18 @@ jobs:
fault_type="${{ steps.vars.outputs.FAULT_TYPE }}"
fault_prefix=$(echo "$fault_type" | tr '[:lower:]' '[:upper:]')
# * convert any warnings to GHA UI annotations; ref: <https://help.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-a-warning-message>
S=$(cargo clippy --all-targets ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} ${{ steps.vars.outputs.CARGO_UTILITY_LIST_OPTIONS }} -- -W clippy::manual_string_new -D warnings 2>&1) && printf "%s\n" "$S" || { printf "%s\n" "$S" ; printf "%s" "$S" | sed -E -n -e '/^error:/{' -e "N; s/^error:[[:space:]]+(.*)\\n[[:space:]]+-->[[:space:]]+(.*):([0-9]+):([0-9]+).*$/::${fault_type} file=\2,line=\3,col=\4::${fault_prefix}: \`cargo clippy\`: \1 (file:'\2', line:\3)/p;" -e '}' ; fault=true ; }
S=$(cargo clippy --all-targets ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_UTILITY_LIST_OPTIONS }} -- -W clippy::manual_string_new -D warnings 2>&1) && printf "%s\n" "$S" || { printf "%s\n" "$S" ; printf "%s" "$S" | sed -E -n -e '/^error:/{' -e "N; s/^error:[[:space:]]+(.*)\\n[[:space:]]+-->[[:space:]]+(.*):([0-9]+):([0-9]+).*$/::${fault_type} file=\2,line=\3,col=\4::${fault_prefix}: \`cargo clippy\`: \1 (file:'\2', line:\3)/p;" -e '}' ; fault=true ; }
if [ -n "${{ steps.vars.outputs.FAIL_ON_FAULT }}" ] && [ -n "$fault" ]; then exit 1 ; fi
style_spellcheck:
name: Style/spelling
runs-on: ${{ matrix.job.os }}
# env:
# STYLE_FAIL_ON_FAULT: false # overrides workflow default
strategy:
matrix:
job:
- { os: ubuntu-latest , features: feat_os_unix }
steps:
- uses: actions/checkout@v3
- uses: Swatinem/rust-cache@v2
- name: Initialize workflow variables
id: vars
shell: bash
@ -247,6 +271,9 @@ jobs:
doc_warnings:
name: Documentation/warnings
runs-on: ${{ matrix.job.os }}
env:
SCCACHE_GHA_ENABLED: "true"
RUSTC_WRAPPER: "sccache"
strategy:
fail-fast: false
matrix:
@ -259,6 +286,13 @@ jobs:
# - { os: windows-latest , features: feat_os_windows }
steps:
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@master
with:
toolchain: stable
components: clippy
- uses: Swatinem/rust-cache@v2
- name: Run sccache-cache
uses: mozilla-actions/sccache-action@v0.0.3
- name: Initialize workflow variables
id: vars
shell: bash
@ -274,34 +308,45 @@ jobs:
# target-specific options
# * CARGO_FEATURES_OPTION
CARGO_FEATURES_OPTION='--all-features' ;
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features ${{ matrix.job.features }}' ; fi
outputs CARGO_FEATURES_OPTION
# * determine sub-crate utility list
UTILITY_LIST="$(./util/show-utils.sh ${CARGO_FEATURES_OPTION})"
echo UTILITY_LIST=${UTILITY_LIST}
CARGO_UTILITY_LIST_OPTIONS="$(for u in ${UTILITY_LIST}; do echo -n "-puu_${u} "; done;)"
outputs CARGO_UTILITY_LIST_OPTIONS
- name: Install `rust` toolchain
run: |
## Install `rust` toolchain
rustup toolchain install stable --no-self-update -c clippy --profile minimal
rustup default stable
- uses: Swatinem/rust-cache@v2
- name: "`cargo doc` with warnings"
shell: bash
run: |
RUSTDOCFLAGS="-Dwarnings" cargo doc ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} --no-deps --workspace --document-private-items
- uses: DavidAnson/markdownlint-cli2-action@v10
with:
command: fix
globs: |
*.md
docs/src/*.md
src/uu/*/*.md
min_version:
name: MinRustV # Minimum supported rust version (aka, MinSRV or MSRV)
runs-on: ${{ matrix.job.os }}
env:
SCCACHE_GHA_ENABLED: "true"
RUSTC_WRAPPER: "sccache"
strategy:
matrix:
job:
- { os: ubuntu-latest , features: feat_os_unix }
steps:
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@master
with:
toolchain: ${{ env.RUST_MIN_SRV }}
components: rustfmt
- uses: taiki-e/install-action@nextest
- uses: Swatinem/rust-cache@v2
- name: Run sccache-cache
uses: mozilla-actions/sccache-action@v0.0.3
- name: Initialize workflow variables
id: vars
shell: bash
@ -313,12 +358,6 @@ jobs:
unset CARGO_FEATURES_OPTION
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi
outputs CARGO_FEATURES_OPTION
- name: Install `rust` toolchain (v${{ env.RUST_MIN_SRV }})
run: |
## Install `rust` toolchain (v${{ env.RUST_MIN_SRV }})
rustup toolchain install --no-self-update ${{ env.RUST_MIN_SRV }} --profile minimal
rustup default ${{ env.RUST_MIN_SRV }}
- uses: Swatinem/rust-cache@v2
- name: Confirm MinSRV compatible 'Cargo.lock'
shell: bash
run: |
@ -353,9 +392,9 @@ jobs:
RUSTUP_TOOLCHAIN=stable cargo fetch --locked --quiet
RUSTUP_TOOLCHAIN=stable cargo tree --all --locked --no-dev-dependencies --no-indent ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} | grep -vE "$PWD" | sort --unique
- name: Test
run: cargo test -v ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} -p uucore -p coreutils
run: cargo nextest run --hide-progress-bar --profile ci ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} -p uucore -p coreutils
env:
RUSTFLAGS: "-Awarnings --cfg unsound_local_offset"
RUSTFLAGS: "-Awarnings"
RUST_BACKTRACE: "1"
deps:
@ -368,11 +407,7 @@ jobs:
- { os: ubuntu-latest , features: feat_os_unix }
steps:
- uses: actions/checkout@v3
- name: Install `rust` toolchain
run: |
## Install `rust` toolchain
rustup toolchain install stable --no-self-update --profile minimal
rustup default stable
- uses: dtolnay/rust-toolchain@stable
- uses: Swatinem/rust-cache@v2
- name: "`cargo update` testing"
shell: bash
@ -385,6 +420,9 @@ jobs:
name: Build/Makefile
needs: [ min_version, deps ]
runs-on: ${{ matrix.job.os }}
env:
SCCACHE_GHA_ENABLED: "true"
RUSTC_WRAPPER: "sccache"
strategy:
fail-fast: false
matrix:
@ -392,29 +430,39 @@ jobs:
- { os: ubuntu-latest , features: feat_os_unix }
steps:
- uses: actions/checkout@v3
- name: Install `rust` toolchain
run: |
## Install `rust` toolchain
rustup toolchain install stable --no-self-update --profile minimal
rustup default stable
- uses: dtolnay/rust-toolchain@stable
- uses: taiki-e/install-action@nextest
- uses: Swatinem/rust-cache@v2
- name: Run sccache-cache
uses: mozilla-actions/sccache-action@v0.0.3
- name: "`make build`"
shell: bash
run: |
make build
- name: "`make test`"
- name: "`make nextest`"
shell: bash
run: |
make test
run: make nextest CARGOFLAGS="--profile ci --hide-progress-bar"
env:
RUST_BACKTRACE: "1"
- name: "`make install`"
shell: bash
run: |
DESTDIR=/tmp/ make PROFILE=release install
# Check that the manpage is present
test -f /tmp/usr/local/share/man/man1/whoami.1
# Check that the completion is present
test -f /tmp/usr/local/share/zsh/site-functions/_install
env:
RUST_BACKTRACE: "1"
build_rust_stable:
name: Build/stable
needs: [ min_version, deps ]
runs-on: ${{ matrix.job.os }}
timeout-minutes: 90
env:
SCCACHE_GHA_ENABLED: "true"
RUSTC_WRAPPER: "sccache"
strategy:
fail-fast: false
matrix:
@ -424,14 +472,13 @@ jobs:
- { os: windows-latest , features: feat_os_windows }
steps:
- uses: actions/checkout@v3
- name: Install `rust` toolchain
run: |
## Install `rust` toolchain
rustup toolchain install stable --no-self-update --profile minimal
rustup default stable
- uses: dtolnay/rust-toolchain@stable
- uses: taiki-e/install-action@nextest
- uses: Swatinem/rust-cache@v2
- name: Run sccache-cache
uses: mozilla-actions/sccache-action@v0.0.3
- name: Test
run: cargo test ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }}
run: cargo nextest run --hide-progress-bar --profile ci ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }}
env:
RUST_BACKTRACE: "1"
@ -440,6 +487,9 @@ jobs:
needs: [ min_version, deps ]
runs-on: ${{ matrix.job.os }}
timeout-minutes: 90
env:
SCCACHE_GHA_ENABLED: "true"
RUSTC_WRAPPER: "sccache"
strategy:
fail-fast: false
matrix:
@ -449,14 +499,13 @@ jobs:
- { os: windows-latest , features: feat_os_windows }
steps:
- uses: actions/checkout@v3
- name: Install `rust` toolchain
run: |
## Install `rust` toolchain
rustup toolchain install nightly --no-self-update --profile minimal
rustup default nightly
- uses: dtolnay/rust-toolchain@nightly
- uses: taiki-e/install-action@nextest
- uses: Swatinem/rust-cache@v2
- name: Run sccache-cache
uses: mozilla-actions/sccache-action@v0.0.3
- name: Test
run: cargo test ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }}
run: cargo nextest run --hide-progress-bar --profile ci ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }}
env:
RUST_BACKTRACE: "1"
@ -464,6 +513,9 @@ jobs:
name: Binary sizes
needs: [ min_version, deps ]
runs-on: ${{ matrix.job.os }}
env:
SCCACHE_GHA_ENABLED: "true"
RUSTC_WRAPPER: "sccache"
strategy:
fail-fast: false
matrix:
@ -471,18 +523,16 @@ jobs:
- { os: ubuntu-latest , features: feat_os_unix }
steps:
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@stable
- uses: Swatinem/rust-cache@v2
- name: Run sccache-cache
uses: mozilla-actions/sccache-action@v0.0.3
- name: Install dependencies
shell: bash
run: |
## Install dependencies
sudo apt-get update
sudo apt-get install jq
- name: Install `rust` toolchain
run: |
## Install `rust` toolchain
rustup toolchain install stable --no-self-update --profile minimal
rustup default stable
- uses: Swatinem/rust-cache@v2
- name: "`make install`"
shell: bash
run: |
@ -495,15 +545,70 @@ jobs:
shell: bash
run: |
## Compute uutil release sizes
SIZE=$(du -s target/size-release/usr/local/bin/|awk '{print $1}')
SIZE_MULTI=$(du -s target/size-multi-release/usr/local/bin/|awk '{print $1}')
DATE=$(date --rfc-email)
find target/size-release/usr/local/bin -type f -printf '%f\0' | sort -z |
while IFS= read -r -d '' name; do
size=$(du -s target/size-release/usr/local/bin/$name | awk '{print $1}')
echo "\"$name\""
echo "$size"
done | \
jq -n \
--arg date "$DATE" \
--arg sha "$GITHUB_SHA" \
'reduce inputs as $name ({}; . + { ($name): input }) | { ($date): {sha: $sha, sizes: map_values(.)} }' > individual-size-result.json
SIZE=$(cat individual-size-result.json | jq '[.[] | .sizes | .[]] | reduce .[] as $num (0; . + $num)')
SIZE_MULTI=$(du -s target/size-multi-release/usr/local/bin/coreutils | awk '{print $1}')
jq -n \
--arg date "$(date --rfc-email)" \
--arg date "$DATE" \
--arg sha "$GITHUB_SHA" \
--arg size "$SIZE" \
--arg multisize "$SIZE_MULTI" \
'{($date): { sha: $sha, size: $size, multisize: $multisize, }}' > size-result.json
- uses: actions/upload-artifact@v3
- name: Download the previous individual size result
uses: dawidd6/action-download-artifact@v2
with:
workflow: CICD.yml
name: individual-size-result
repo: uutils/coreutils
path: dl
- name: Download the previous size result
uses: dawidd6/action-download-artifact@v2
with:
workflow: CICD.yml
name: size-result
repo: uutils/coreutils
path: dl
- name: Check uutil release sizes
shell: bash
run: |
check() {
# Warn if the size increases by more than 5%
threshold='1.05'
ratio=$(jq -n "$2 / $3")
echo "$1: size=$2, previous_size=$3, ratio=$ratio, threshold=$threshold"
if [[ "$(jq -n "$ratio > $threshold")" == 'true' ]]; then
echo "::warning file=$4::Size of $1 increases by more than 5%"
fi
}
## Check individual size result
while read -r name previous_size; do
size=$(cat individual-size-result.json | jq -r ".[] | .sizes | .\"$name\"")
check "\`$name\` binary" "$size" "$previous_size" 'individual-size-result.json'
done < <(cat dl/individual-size-result.json | jq -r '.[] | .sizes | to_entries[] | "\(.key) \(.value)"')
## Check size result
size=$(cat size-result.json | jq -r '.[] | .size')
previous_size=$(cat dl/size-result.json | jq -r '.[] | .size')
check 'multiple binaries' "$size" "$previous_size" 'size-result.json'
multisize=$(cat size-result.json | jq -r '.[] | .multisize')
previous_multisize=$(cat dl/size-result.json | jq -r '.[] | .multisize')
check 'multicall binary' "$multisize" "$previous_multisize" 'size-result.json'
- name: Upload the individual size result
uses: actions/upload-artifact@v3
with:
name: individual-size-result
path: individual-size-result.json
- name: Upload the size result
uses: actions/upload-artifact@v3
with:
name: size-result
path: size-result.json
@ -518,6 +623,8 @@ jobs:
timeout-minutes: 90
env:
DOCKER_OPTS: '--volume /etc/passwd:/etc/passwd --volume /etc/group:/etc/group'
SCCACHE_GHA_ENABLED: "true"
RUSTC_WRAPPER: "sccache"
strategy:
fail-fast: false
matrix:
@ -525,25 +632,26 @@ jobs:
# { os , target , cargo-options , features , use-cross , toolchain }
- { os: ubuntu-latest , target: arm-unknown-linux-gnueabihf, features: feat_os_unix_gnueabihf, use-cross: use-cross, }
- { os: ubuntu-latest , target: aarch64-unknown-linux-gnu , features: feat_os_unix_gnueabihf , use-cross: use-cross }
- { os: ubuntu-latest , target: x86_64-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross }
# - { os: ubuntu-latest , target: x86_64-unknown-linux-gnu , features: feat_selinux , use-cross: use-cross }
# - { os: ubuntu-18.04 , target: i586-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross } ## note: older windows platform; not required, dev-FYI only
# - { os: ubuntu-18.04 , target: i586-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross } ## note: older windows platform; not required, dev-FYI only
- { os: ubuntu-latest , target: i686-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross }
- { os: ubuntu-latest , target: i686-unknown-linux-musl , features: feat_os_unix_musl , use-cross: use-cross }
- { os: ubuntu-latest , target: x86_64-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross }
- { os: ubuntu-latest , target: x86_64-unknown-linux-musl , features: feat_os_unix_musl , use-cross: use-cross }
# Commented until https://github.com/uutils/coreutils/issues/3210 is fixed
#- { os: ubuntu-18.04 , target: i686-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross }
#- { os: ubuntu-18.04 , target: i686-unknown-linux-musl , features: feat_os_unix_musl , use-cross: use-cross }
#- { os: ubuntu-18.04 , target: x86_64-unknown-linux-gnu , features: feat_os_unix , use-cross: use-cross }
#- { os: ubuntu-18.04 , target: x86_64-unknown-linux-musl , features: feat_os_unix_musl , use-cross: use-cross }
- { os: macos-latest , target: x86_64-apple-darwin , features: feat_os_macos }
- { os: windows-latest , target: i686-pc-windows-msvc , features: feat_os_windows }
- { os: windows-latest , target: x86_64-pc-windows-gnu , features: feat_os_windows } ## note: requires rust >= 1.43.0 to link correctly
- { os: windows-latest , target: x86_64-pc-windows-msvc , features: feat_os_windows }
steps:
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@master
with:
toolchain: ${{ env.RUST_MIN_SRV }}
targets: ${{ matrix.job.target }}
- uses: Swatinem/rust-cache@v2
with:
key: "${{ matrix.job.os }}_${{ matrix.job.target }}"
- name: Run sccache-cache
uses: mozilla-actions/sccache-action@v0.0.3
- name: Initialize workflow variables
id: vars
shell: bash
@ -619,8 +727,7 @@ jobs:
outputs CARGO_CMD
# ** pass needed environment into `cross` container (iff `cross` not already configured via "Cross.toml")
if [ "${CARGO_CMD}" = 'cross' ] && [ ! -e "Cross.toml" ] ; then
cargo install --version 0.2.1 cross
printf "[build.env]\npassthrough = [\"CI\", \"RUST_BACKTRACE\"]\n" > Cross.toml
printf "[build.env]\npassthrough = [\"CI\", \"RUST_BACKTRACE\", \"CARGO_TERM_COLOR\"]\n" > Cross.toml
fi
# * test only library and/or binaries for arm-type targets
unset CARGO_TEST_OPTIONS ; case '${{ matrix.job.target }}' in aarch64-* | arm-*) CARGO_TEST_OPTIONS="--bins" ;; esac;
@ -633,6 +740,10 @@ jobs:
*-pc-windows-msvc) STRIP="" ;;
esac;
outputs STRIP
- uses: taiki-e/install-action@v2
if: steps.vars.outputs.CARGO_CMD == 'cross'
with:
tool: cross@0.2.1
- name: Create all needed build/work directories
shell: bash
run: |
@ -666,14 +777,6 @@ jobs:
echo "foo" > /home/runner/.plan
;;
esac
- name: rust toolchain ~ install
run: |
## rust toolchain ~ install
rustup toolchain install --no-self-update ${{ env.RUST_MIN_SRV }} -t ${{ matrix.job.target }} --profile minimal
rustup default ${{ env.RUST_MIN_SRV }}
- uses: Swatinem/rust-cache@v2
with:
key: ${{ matrix.job.os }}-${{ matrix.job.target }}
- name: Initialize toolchain-dependent workflow variables
id: dep_vars
shell: bash
@ -783,6 +886,9 @@ jobs:
name: Tests/BusyBox test suite
needs: [ min_version, deps ]
runs-on: ${{ matrix.job.os }}
env:
SCCACHE_GHA_ENABLED: "true"
RUSTC_WRAPPER: "sccache"
strategy:
fail-fast: false
matrix:
@ -797,6 +903,8 @@ jobs:
echo "TEST_SUMMARY_FILE=busybox-result.json" >> $GITHUB_OUTPUT
- uses: actions/checkout@v3
- uses: Swatinem/rust-cache@v2
- name: Run sccache-cache
uses: mozilla-actions/sccache-action@v0.0.3
- name: Install/setup prerequisites
shell: bash
run: |
@ -856,6 +964,9 @@ jobs:
name: Tests/Toybox test suite
needs: [ min_version, deps ]
runs-on: ${{ matrix.job.os }}
env:
SCCACHE_GHA_ENABLED: "true"
RUSTC_WRAPPER: "sccache"
strategy:
fail-fast: false
matrix:
@ -871,12 +982,13 @@ jobs:
TEST_SUMMARY_FILE="toybox-result.json"
outputs TEST_SUMMARY_FILE
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@master
with:
toolchain: ${{ env.RUST_MIN_SRV }}
components: rustfmt
- uses: Swatinem/rust-cache@v2
- name: rust toolchain ~ install
run: |
## rust toolchain ~ install
rustup toolchain install --no-self-update ${{ env.RUST_MIN_SRV }} --profile minimal
rustup default ${{ env.RUST_MIN_SRV }}
- name: Run sccache-cache
uses: mozilla-actions/sccache-action@v0.0.3
- name: Build coreutils as multiple binaries
shell: bash
run: |
@ -936,150 +1048,40 @@ jobs:
name: toybox-result.json
path: ${{ steps.vars.outputs.TEST_SUMMARY_FILE }}
test_android:
name: Test Android builds
needs: [ min_version, deps ]
runs-on: macos-latest
timeout-minutes: 90
strategy:
fail-fast: false
matrix:
api-level: [28]
target: [default]
arch: [x86] # , arm64-v8a
env:
TERMUX: v0.118.0
toml_format:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: AVD cache
uses: actions/cache@v3
id: avd-cache
with:
path: |
~/.android/avd/*
~/.android/avd/*/snapshots/*
~/.android/adb*
key: avd-${{ matrix.api-level }}-${{ matrix.arch }}+termux-${{ env.TERMUX }}
- name: Create and cache emulator image
if: steps.avd-cache.outputs.cache-hit != 'true'
uses: reactivecircus/android-emulator-runner@v2
with:
api-level: ${{ matrix.api-level }}
target: ${{ matrix.target }}
arch: ${{ matrix.arch }}
ram-size: 2048M
disk-size: 5120M
force-avd-creation: true
emulator-options: -no-snapshot-load -noaudio -no-boot-anim -camera-back none
script: |
wget https://github.com/termux/termux-app/releases/download/${{ env.TERMUX }}/termux-app_${{ env.TERMUX }}+github-debug_${{ matrix.arch }}.apk
util/android-commands.sh snapshot termux-app_${{ env.TERMUX }}+github-debug_${{ matrix.arch }}.apk
adb -s emulator-5554 emu avd snapshot save ${{ matrix.api-level }}-${{ matrix.arch }}+termux-${{ env.TERMUX }}
echo "Emulator image created."
pkill -9 qemu-system-x86_64
- name: Build and Test on Android
uses: reactivecircus/android-emulator-runner@v2
with:
api-level: ${{ matrix.api-level }}
target: ${{ matrix.target }}
arch: ${{ matrix.arch }}
ram-size: 2048M
disk-size: 5120M
force-avd-creation: false
emulator-options: -no-snapshot-save -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none -snapshot ${{ matrix.api-level }}-${{ matrix.arch }}+termux-${{ env.TERMUX }}
script: |
util/android-commands.sh sync
util/android-commands.sh build
util/android-commands.sh tests
- name: Clone repository
uses: actions/checkout@v3
test_freebsd:
name: Tests/FreeBSD test suite
needs: [ min_version, deps ]
runs-on: ${{ matrix.job.os }}
timeout-minutes: 90
strategy:
fail-fast: false
matrix:
job:
- { os: macos-12 , features: unix } ## GHA MacOS-11.0 VM won't have VirtualBox; refs: <https://github.com/actions/virtual-environments/issues/4060> , <https://github.com/actions/virtual-environments/pull/4010>
env:
mem: 4096
steps:
- uses: actions/checkout@v3
- uses: Swatinem/rust-cache@v2
- name: Prepare, build and test
## spell-checker:ignore (ToDO) sshfs usesh vmactions
uses: vmactions/freebsd-vm@v0.3.0
with:
usesh: true
# sync: sshfs
prepare: pkg install -y curl gmake sudo
run: |
## Prepare, build, and test
# implementation modelled after ref: <https://github.com/rust-lang/rustup/pull/2783>
# * NOTE: All steps need to be run in this block, otherwise, we are operating back on the mac host
set -e
#
# We need a file-descriptor file system to test test_ls::test_ls_io_errors
mount -t fdescfs fdesc /dev/fd
#
TEST_USER=tester
REPO_NAME=${GITHUB_WORKSPACE##*/}
WORKSPACE_PARENT="/Users/runner/work/${REPO_NAME}"
WORKSPACE="${WORKSPACE_PARENT}/${REPO_NAME}"
#
pw adduser -n ${TEST_USER} -d /root/ -g wheel -c "Coreutils user to build" -w random
# chown -R ${TEST_USER}:wheel /root/ "${WORKSPACE_PARENT}"/
chown -R ${TEST_USER}:wheel /root/ "/Users/runner/work/${REPO_NAME}"/
whoami
#
# Further work needs to be done in a sudo as we are changing users
sudo -i -u ${TEST_USER} sh << EOF
set -e
whoami
curl https://sh.rustup.rs -sSf --output rustup.sh
sh rustup.sh -y --profile=minimal
. $HOME/.cargo/env
## Info
# environment
echo "## environment"
echo "CI='${CI}'"
echo "REPO_NAME='${REPO_NAME}'"
echo "TEST_USER='${TEST_USER}'"
echo "WORKSPACE_PARENT='${WORKSPACE_PARENT}'"
echo "WORKSPACE='${WORKSPACE}'"
env | sort
# tooling info
echo "## tooling info"
cargo -V
rustc -V
#
# To ensure that files are cleaned up, we don't want to exit on error
set +e
cd "${WORKSPACE}"
unset FAULT
cargo build || FAULT=1
export RUST_BACKTRACE=1
if (test -z "\$FAULT"); then cargo test --features '${{ matrix.job.features }}' || FAULT=1 ; fi
if (test -z "\$FAULT"); then cargo test --all-features -p uucore || FAULT=1 ; fi
# Clean to avoid to rsync back the files
cargo clean
if (test -n "\$FAULT"); then exit 1 ; fi
EOF
- name: Check
run: npx --yes @taplo/cli fmt --check
coverage:
name: Code Coverage
runs-on: ${{ matrix.job.os }}
timeout-minutes: 90
env:
SCCACHE_GHA_ENABLED: "true"
RUSTC_WRAPPER: "sccache"
strategy:
fail-fast: false
matrix:
job:
- { os: ubuntu-latest , features: unix }
- { os: macos-latest , features: macos }
- { os: windows-latest , features: windows }
- { os: ubuntu-latest , features: unix, toolchain: nightly }
- { os: macos-latest , features: macos, toolchain: nightly }
- { os: windows-latest , features: windows, toolchain: nightly-x86_64-pc-windows-gnu }
steps:
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@master
with:
toolchain: ${{ matrix.job.toolchain }}
components: rustfmt
- uses: taiki-e/install-action@nextest
- uses: taiki-e/install-action@grcov
- uses: Swatinem/rust-cache@v2
- name: Run sccache-cache
uses: mozilla-actions/sccache-action@v0.0.3
# - name: Reattach HEAD ## may be needed for accurate code coverage info
# run: git checkout ${{ github.head_ref }}
- name: Initialize workflow variables
@ -1128,12 +1130,6 @@ jobs:
echo "foo" > /home/runner/.plan
;;
esac
- name: rust toolchain ~ install
run: |
## rust toolchain ~ install
rustup toolchain install ${{ steps.vars.outputs.TOOLCHAIN }} --no-self-update --profile minimal
rustup default ${{ steps.vars.outputs.TOOLCHAIN }}
- uses: Swatinem/rust-cache@v2
- name: Initialize toolchain-dependent workflow variables
id: dep_vars
shell: bash
@ -1145,35 +1141,29 @@ jobs:
CARGO_UTILITY_LIST_OPTIONS="$(for u in ${UTILITY_LIST}; do echo -n "-puu_${u} "; done;)"
outputs CARGO_UTILITY_LIST_OPTIONS
- name: Test uucore
run: cargo test --no-fail-fast -p uucore
run: cargo nextest run --profile ci --hide-progress-bar -p uucore
env:
CARGO_INCREMENTAL: "0"
RUSTC_WRAPPER: ""
RUSTFLAGS: "-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort"
RUSTDOCFLAGS: "-Cpanic=abort"
RUST_BACKTRACE: "1"
# RUSTUP_TOOLCHAIN: ${{ steps.vars.outputs.TOOLCHAIN }}
- name: Test
run: cargo test ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} --no-fail-fast
run: cargo nextest run --profile ci --hide-progress-bar ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }}
env:
CARGO_INCREMENTAL: "0"
RUSTC_WRAPPER: ""
RUSTFLAGS: "-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort"
RUSTDOCFLAGS: "-Cpanic=abort"
RUST_BACKTRACE: "1"
# RUSTUP_TOOLCHAIN: ${{ steps.vars.outputs.TOOLCHAIN }}
- name: Test individual utilities
run: cargo test --no-fail-fast ${{ steps.dep_vars.outputs.CARGO_UTILITY_LIST_OPTIONS }}
run: cargo nextest run --profile ci --hide-progress-bar ${{ steps.dep_vars.outputs.CARGO_UTILITY_LIST_OPTIONS }}
env:
CARGO_INCREMENTAL: "0"
RUSTC_WRAPPER: ""
RUSTFLAGS: "-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort"
RUSTDOCFLAGS: "-Cpanic=abort"
RUST_BACKTRACE: "1"
# RUSTUP_TOOLCHAIN: ${{ steps.vars.outputs.TOOLCHAIN }}
- name: "`grcov` ~ install"
id: build_grcov
run: cargo install grcov
- name: Generate coverage data (via `grcov`)
id: coverage
shell: bash
@ -1199,3 +1189,4 @@ jobs:
flags: ${{ steps.vars.outputs.CODECOV_FLAGS }}
name: codecov-umbrella
fail_ci_if_error: false

View file

@ -1,6 +1,6 @@
name: FixPR
# spell-checker:ignore Swatinem
# spell-checker:ignore Swatinem dtolnay
# Trigger automated fixes for PRs being merged (with associated commits)
@ -36,11 +36,7 @@ jobs:
# surface MSRV from CICD workflow
RUST_MIN_SRV=$(grep -P "^\s+RUST_MIN_SRV:" .github/workflows/CICD.yml | grep -Po "(?<=\x22)\d+[.]\d+(?:[.]\d+)?(?=\x22)" )
outputs RUST_MIN_SRV
- name: Install `rust` toolchain (v${{ steps.vars.outputs.RUST_MIN_SRV }})
run: |
## Install `rust` toolchain (v${{ steps.vars.outputs.RUST_MIN_SRV }})
rustup toolchain install ${{ steps.vars.outputs.RUST_MIN_SRV }} --profile minimal
rustup default ${{ steps.vars.outputs.RUST_MIN_SRV }}
- uses: dtolnay/rust-toolchain@${{ steps.vars.outputs.RUST_MIN_SRV }}
- uses: Swatinem/rust-cache@v2
- name: Ensure updated 'Cargo.lock'
shell: bash
@ -101,12 +97,10 @@ jobs:
CARGO_FEATURES_OPTION='' ;
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi
outputs CARGO_FEATURES_OPTION
- name: Install `rust` toolchain
run: |
## Install `rust` toolchain
rm -f "${HOME}/.cargo/bin/"{rustfmt,cargo-fmt}
rustup toolchain install stable -c rustfmt --profile minimal
rustup default stable
- uses: dtolnay/rust-toolchain@master
with:
toolchain: stable
components: rustfmt
- uses: Swatinem/rust-cache@v2
- name: "`cargo fmt`"
shell: bash

View file

@ -1,10 +1,10 @@
name: GnuTests
# spell-checker:ignore (abbrev/names) CodeCov gnulib GnuTests
# spell-checker:ignore (abbrev/names) CodeCov gnulib GnuTests Swatinem
# spell-checker:ignore (jargon) submodules
# spell-checker:ignore (libs/utils) autopoint chksum gperf lcov libexpect pyinotify shopt texinfo valgrind
# spell-checker:ignore (libs/utils) autopoint chksum gperf lcov libexpect pyinotify shopt texinfo valgrind libattr libcap taiki-e
# spell-checker:ignore (options) Ccodegen Coverflow Cpanic Zpanic
# spell-checker:ignore (people) Dawid Dziurla * dawidd
# spell-checker:ignore (people) Dawid Dziurla * dawidd dtolnay
# spell-checker:ignore (vars) FILESET SUBDIRS XPASS
# * note: to run a single test => `REPO/util/run-gnu-test.sh PATH/TO/TEST/SCRIPT`
@ -42,7 +42,7 @@ jobs:
outputs path_GNU path_GNU_tests path_reference path_UUTILS
#
repo_default_branch="${{ github.event.repository.default_branch }}"
repo_GNU_ref="v9.1"
repo_GNU_ref="v9.3"
repo_reference_branch="${{ github.event.repository.default_branch }}"
outputs repo_default_branch repo_GNU_ref repo_reference_branch
#
@ -58,6 +58,13 @@ jobs:
uses: actions/checkout@v3
with:
path: '${{ steps.vars.outputs.path_UUTILS }}'
- uses: dtolnay/rust-toolchain@master
with:
toolchain: stable
components: rustfmt
- uses: Swatinem/rust-cache@v2
with:
workspaces: "./${{ steps.vars.outputs.path_UUTILS }} -> target"
- name: Checkout code (GNU coreutils)
uses: actions/checkout@v3
with:
@ -75,18 +82,12 @@ jobs:
# workflow_conclusion: success ## (default); * but, if commit with failed GnuTests is merged into the default branch, future commits will all show regression errors in GnuTests CI until o/w fixed
workflow_conclusion: completed ## continually recalibrates to last commit of default branch with a successful GnuTests (ie, "self-heals" from GnuTest regressions, but needs more supervision for/of regressions)
path: "${{ steps.vars.outputs.path_reference }}"
- name: Install `rust` toolchain
run: |
## Install `rust` toolchain
rm -f "${HOME}/.cargo/bin/"{rustfmt,cargo-fmt}
rustup toolchain install stable -c rustfmt --profile minimal
rustup default stable
- name: Install dependencies
shell: bash
run: |
## Install dependencies
sudo apt-get update
sudo apt-get install autoconf autopoint bison texinfo gperf gcc g++ gdb python3-pyinotify jq valgrind libexpect-perl
sudo apt-get install -y autoconf autopoint bison texinfo gperf gcc g++ gdb python3-pyinotify jq valgrind libexpect-perl libacl1-dev libattr1-dev libcap-dev
- name: Add various locales
shell: bash
run: |
@ -201,9 +202,10 @@ jobs:
REF_LOG_FILE='${{ steps.vars.outputs.path_reference }}/test-logs/test-suite.log'
REF_SUMMARY_FILE='${{ steps.vars.outputs.path_reference }}/test-summary/gnu-result.json'
REPO_DEFAULT_BRANCH='${{ steps.vars.outputs.repo_default_branch }}'
path_UUTILS='${{ steps.vars.outputs.path_UUTILS }}'
# https://github.com/uutils/coreutils/issues/4294
# https://github.com/uutils/coreutils/issues/4295
IGNORE_INTERMITTENT='tests/tail-2/inotify-dir-recreate tests/misc/timeout tests/rm/rm1'
IGNORE_INTERMITTENT='${path_UUTILS}/.github/workflows/ignore-intermittent.txt'
mkdir -p ${{ steps.vars.outputs.path_reference }}
@ -226,9 +228,17 @@ jobs:
for LINE in ${REF_FAILING}
do
if ! grep -Fxq ${LINE}<<<"${NEW_FAILING}"; then
MSG="Congrats! The gnu test ${LINE} is no longer failing!"
echo "::warning ::$MSG"
echo $MSG >> ${COMMENT_LOG}
if ! grep ${LINE} ${IGNORE_INTERMITTENT}
then
MSG="Congrats! The gnu test ${LINE} is no longer failing!"
echo "::warning ::$MSG"
echo $MSG >> ${COMMENT_LOG}
else
MSG="Skipping an intermittent issue ${LINE}"
echo "::warning ::$MSG"
echo $MSG >> ${COMMENT_LOG}
echo ""
fi
fi
done
for LINE in ${NEW_FAILING}
@ -305,19 +315,21 @@ jobs:
with:
repository: 'coreutils/coreutils'
path: 'gnu'
ref: 'v9.1'
ref: 'v9.3'
submodules: recursive
- name: Install `rust` toolchain
run: |
## Install `rust` toolchain
rm -f "${HOME}/.cargo/bin/"{rustfmt,cargo-fmt}
rustup toolchain install nightly -c rustfmt --profile minimal
rustup default nightly
- uses: dtolnay/rust-toolchain@master
with:
toolchain: nightly
components: rustfmt
- uses: taiki-e/install-action@grcov
- uses: Swatinem/rust-cache@v2
with:
workspaces: "./uutils -> target"
- name: Install dependencies
run: |
## Install dependencies
sudo apt update
sudo apt install autoconf autopoint bison texinfo gperf gcc g++ gdb python3-pyinotify jq valgrind libexpect-perl -y
sudo apt-get update
sudo apt-get install -y autoconf autopoint bison texinfo gperf gcc g++ gdb python3-pyinotify jq valgrind libexpect-perl libacl1-dev libattr1-dev libcap-dev
- name: Add various locales
run: |
## Add various locales
@ -333,7 +345,6 @@ jobs:
locale -a
- name: Build binaries
env:
CARGO_INCREMENTAL: "0"
RUSTFLAGS: "-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort"
RUSTDOCFLAGS: "-Cpanic=abort"
run: |
@ -342,8 +353,6 @@ jobs:
UU_MAKE_PROFILE=debug bash util/build-gnu.sh
- name: Run GNU tests
run: bash uutils/util/run-gnu-test.sh
- name: "`grcov` ~ install"
run: cargo install grcov
- name: Generate coverage data (via `grcov`)
id: coverage
run: |

100
.github/workflows/android.yml vendored Normal file
View file

@ -0,0 +1,100 @@
name: Android
# spell-checker:ignore TERMUX reactivecircus Swatinem noaudio pkill swiftshader dtolnay juliangruber
on: [push, pull_request]
permissions:
contents: read # to fetch code (actions/checkout)
# End the current execution if there is a new changeset in the PR.
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
jobs:
test_android:
name: Test builds
runs-on: macos-latest
timeout-minutes: 90
strategy:
fail-fast: false
matrix:
api-level: [28]
target: [default]
arch: [x86] # , arm64-v8a
env:
TERMUX: v0.118.0
steps:
- uses: actions/checkout@v3
- name: Restore AVD cache
uses: actions/cache/restore@v3
id: avd-cache
with:
path: |
~/.android/avd/*
~/.android/avd/*/snapshots/*
~/.android/adb*
~/__rustc_hash__
key: avd-${{ matrix.api-level }}-${{ matrix.arch }}+termux-${{ env.TERMUX }}+nextest+rustc-hash
- name: Create and cache emulator image
if: steps.avd-cache.outputs.cache-hit != 'true'
uses: reactivecircus/android-emulator-runner@v2
with:
api-level: ${{ matrix.api-level }}
target: ${{ matrix.target }}
arch: ${{ matrix.arch }}
ram-size: 2048M
disk-size: 7GB
force-avd-creation: true
emulator-options: -no-snapshot-load -noaudio -no-boot-anim -camera-back none
script: |
util/android-commands.sh init "${{ matrix.arch }}" "${{ matrix.api-level }}" "${{ env.TERMUX }}"
- name: Save AVD cache
if: steps.avd-cache.outputs.cache-hit != 'true'
uses: actions/cache/save@v3
with:
path: |
~/.android/avd/*
~/.android/avd/*/snapshots/*
~/.android/adb*
~/__rustc_hash__
key: avd-${{ matrix.api-level }}-${{ matrix.arch }}+termux-${{ env.TERMUX }}+nextest+rustc-hash
- uses: juliangruber/read-file-action@v1
id: read_rustc_hash
with:
# ~ expansion didn't work
path: /Users/runner/__rustc_hash__
trim: true
- name: Restore rust cache
id: rust-cache
uses: actions/cache/restore@v3
with:
path: ~/__rust_cache__
# The version vX at the end of the key is just a development version to avoid conflicts in
# the github cache during the development of this workflow
key: ${{ matrix.arch }}_${{ matrix.target}}_${{ steps.read_rustc_hash.outputs.content }}_${{ hashFiles('**/Cargo.toml', '**/Cargo.lock') }}_v3
- name: Build and Test
uses: reactivecircus/android-emulator-runner@v2
with:
api-level: ${{ matrix.api-level }}
target: ${{ matrix.target }}
arch: ${{ matrix.arch }}
ram-size: 2048M
disk-size: 7GB
force-avd-creation: false
emulator-options: -no-snapshot-save -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none -snapshot ${{ matrix.api-level }}-${{ matrix.arch }}+termux-${{ env.TERMUX }}
# This is not a usual script. Every line is executed in a separate shell with `sh -c`. If
# one of the lines returns with error the whole script is failed (like running a script with
# set -e) and in consequences the other lines (shells) are not executed.
script: |
util/android-commands.sh sync_host
util/android-commands.sh build
util/android-commands.sh tests
if [[ "${{ steps.rust-cache.outputs.cache-hit }}" != 'true' ]]; then util/android-commands.sh sync_image; fi; exit 0
- name: Save rust cache
if: steps.rust-cache.outputs.cache-hit != 'true'
uses: actions/cache/save@v3
with:
path: ~/__rust_cache__
key: ${{ matrix.arch }}_${{ matrix.target}}_${{ steps.read_rustc_hash.outputs.content }}_${{ hashFiles('**/Cargo.toml', '**/Cargo.lock') }}_v3

190
.github/workflows/freebsd.yml vendored Normal file
View file

@ -0,0 +1,190 @@
name: FreeBSD
# spell-checker:ignore sshfs usesh vmactions taiki Swatinem esac fdescfs fdesc
env:
# * style job configuration
STYLE_FAIL_ON_FAULT: true ## (bool) fail the build if a style job contains a fault (error or warning); may be overridden on a per-job basis
on: [push, pull_request]
permissions:
contents: read # to fetch code (actions/checkout)
# End the current execution if there is a new changeset in the PR.
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
jobs:
style:
name: Style and Lint
runs-on: ${{ matrix.job.os }}
timeout-minutes: 90
strategy:
fail-fast: false
matrix:
job:
- { os: macos-12 , features: unix } ## GHA MacOS-11.0 VM won't have VirtualBox; refs: <https://github.com/actions/virtual-environments/issues/4060> , <https://github.com/actions/virtual-environments/pull/4010>
env:
SCCACHE_GHA_ENABLED: "true"
RUSTC_WRAPPER: "sccache"
steps:
- uses: actions/checkout@v3
- uses: Swatinem/rust-cache@v2
- name: Run sccache-cache
uses: mozilla-actions/sccache-action@v0.0.3
- name: Prepare, build and test
uses: vmactions/freebsd-vm@v0.3.0
with:
usesh: true
# We need jq to run show-utils.sh and bash to use inline shell string replacement
prepare: pkg install -y curl sudo jq bash
run: |
## Prepare, build, and test
# implementation modelled after ref: <https://github.com/rust-lang/rustup/pull/2783>
# * NOTE: All steps need to be run in this block, otherwise, we are operating back on the mac host
set -e
#
TEST_USER=tester
REPO_NAME=${GITHUB_WORKSPACE##*/}
WORKSPACE_PARENT="/Users/runner/work/${REPO_NAME}"
WORKSPACE="${WORKSPACE_PARENT}/${REPO_NAME}"
#
pw adduser -n ${TEST_USER} -d /root/ -g wheel -c "Coreutils user to build" -w random
chown -R ${TEST_USER}:wheel /root/ "/Users/runner/work/${REPO_NAME}"/
whoami
#
# Further work needs to be done in a sudo as we are changing users
sudo -i -u ${TEST_USER} bash << EOF
set -e
whoami
curl https://sh.rustup.rs -sSf --output rustup.sh
sh rustup.sh -y -c rustfmt,clippy --profile=minimal -t stable
. ${HOME}/.cargo/env
## VARs setup
cd "${WORKSPACE}"
unset FAIL_ON_FAULT ; case '${{ env.STYLE_FAIL_ON_FAULT }}' in
''|0|f|false|n|no|off) FAULT_TYPE=warning ;;
*) FAIL_ON_FAULT=true ; FAULT_TYPE=error ;;
esac;
FAULT_PREFIX=\$(echo "\${FAULT_TYPE}" | tr '[:lower:]' '[:upper:]')
# * determine sub-crate utility list
UTILITY_LIST="\$(./util/show-utils.sh --features ${{ matrix.job.features }})"
CARGO_UTILITY_LIST_OPTIONS="\$(for u in \${UTILITY_LIST}; do echo -n "-puu_\${u} "; done;)"
## Info
# environment
echo "## environment"
echo "CI='${CI}'"
echo "REPO_NAME='${REPO_NAME}'"
echo "TEST_USER='${TEST_USER}'"
echo "WORKSPACE_PARENT='${WORKSPACE_PARENT}'"
echo "WORKSPACE='${WORKSPACE}'"
echo "FAULT_PREFIX='\${FAULT_PREFIX}'"
echo "UTILITY_LIST='\${UTILITY_LIST}'"
env | sort
# tooling info
echo "## tooling info"
cargo -V
rustc -V
#
# To ensure that files are cleaned up, we don't want to exit on error
set +e
unset FAULT
## cargo fmt testing
echo "## cargo fmt testing"
# * convert any errors/warnings to GHA UI annotations; ref: <https://help.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-a-warning-message>
S=\$(cargo fmt -- --check) && printf "%s\n" "\$S" || { printf "%s\n" "\$S" ; printf "%s\n" "\$S" | sed -E -n -e "s/^Diff[[:space:]]+in[[:space:]]+\${PWD//\//\\\\/}\/(.*)[[:space:]]+at[[:space:]]+[^0-9]+([0-9]+).*\$/::\${FAULT_TYPE} file=\1,line=\2::\${FAULT_PREFIX}: \\\`cargo fmt\\\`: style violation (file:'\1', line:\2; use \\\`cargo fmt -- \"\1\"\\\`)/p" ; FAULT=true ; }
## cargo clippy lint testing
if [ -z "\${FAULT}" ]; then
echo "## cargo clippy lint testing"
# * convert any warnings to GHA UI annotations; ref: <https://help.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-a-warning-message>
S=\$(cargo clippy --all-targets \${CARGO_UTILITY_LIST_OPTIONS} -- -W clippy::manual_string_new -D warnings 2>&1) && printf "%s\n" "\$S" || { printf "%s\n" "\$S" ; printf "%s" "\$S" | sed -E -n -e '/^error:/{' -e "N; s/^error:[[:space:]]+(.*)\\n[[:space:]]+-->[[:space:]]+(.*):([0-9]+):([0-9]+).*\$/::\${FAULT_TYPE} file=\2,line=\3,col=\4::\${FAULT_PREFIX}: \\\`cargo clippy\\\`: \1 (file:'\2', line:\3)/p;" -e '}' ; FAULT=true ; }
fi
# Clean to avoid to rsync back the files
cargo clean
if [ -n "\${FAIL_ON_FAULT}" ] && [ -n "\${FAULT}" ]; then exit 1 ; fi
EOF
test:
name: Tests
runs-on: ${{ matrix.job.os }}
timeout-minutes: 90
strategy:
fail-fast: false
matrix:
job:
- { os: macos-12 , features: unix } ## GHA MacOS-11.0 VM won't have VirtualBox; refs: <https://github.com/actions/virtual-environments/issues/4060> , <https://github.com/actions/virtual-environments/pull/4010>
env:
mem: 4096
SCCACHE_GHA_ENABLED: "true"
RUSTC_WRAPPER: "sccache"
steps:
- uses: actions/checkout@v3
- uses: Swatinem/rust-cache@v2
- name: Run sccache-cache
uses: mozilla-actions/sccache-action@v0.0.3
- name: Prepare, build and test
uses: vmactions/freebsd-vm@v0.3.0
with:
usesh: true
# sync: sshfs
prepare: pkg install -y curl gmake sudo
run: |
## Prepare, build, and test
# implementation modelled after ref: <https://github.com/rust-lang/rustup/pull/2783>
# * NOTE: All steps need to be run in this block, otherwise, we are operating back on the mac host
set -e
#
# We need a file-descriptor file system to test test_ls::test_ls_io_errors
mount -t fdescfs fdesc /dev/fd
#
TEST_USER=tester
REPO_NAME=${GITHUB_WORKSPACE##*/}
WORKSPACE_PARENT="/Users/runner/work/${REPO_NAME}"
WORKSPACE="${WORKSPACE_PARENT}/${REPO_NAME}"
#
pw adduser -n ${TEST_USER} -d /root/ -g wheel -c "Coreutils user to build" -w random
# chown -R ${TEST_USER}:wheel /root/ "${WORKSPACE_PARENT}"/
chown -R ${TEST_USER}:wheel /root/ "/Users/runner/work/${REPO_NAME}"/
whoami
#
# Further work needs to be done in a sudo as we are changing users
sudo -i -u ${TEST_USER} sh << EOF
set -e
whoami
curl https://sh.rustup.rs -sSf --output rustup.sh
sh rustup.sh -y --profile=minimal
. $HOME/.cargo/env
# Install nextest
mkdir -p ~/.cargo/bin
curl -LsSf https://get.nexte.st/latest/freebsd | tar zxf - -C ~/.cargo/bin
## Info
# environment
echo "## environment"
echo "CI='${CI}'"
echo "REPO_NAME='${REPO_NAME}'"
echo "TEST_USER='${TEST_USER}'"
echo "WORKSPACE_PARENT='${WORKSPACE_PARENT}'"
echo "WORKSPACE='${WORKSPACE}'"
env | sort
# tooling info
echo "## tooling info"
cargo -V
cargo nextest --version
rustc -V
#
# To ensure that files are cleaned up, we don't want to exit on error
set +e
cd "${WORKSPACE}"
unset FAULT
cargo build || FAULT=1
export PATH=~/.cargo/bin:${PATH}
export RUST_BACKTRACE=1
export CARGO_TERM_COLOR=always
if (test -z "\$FAULT"); then cargo nextest run --hide-progress-bar --profile ci --features '${{ matrix.job.features }}' || FAULT=1 ; fi
if (test -z "\$FAULT"); then cargo nextest run --hide-progress-bar --profile ci --all-features -p uucore || FAULT=1 ; fi
# Clean to avoid to rsync back the files
cargo clean
if (test -n "\$FAULT"); then exit 1 ; fi
EOF

View file

@ -0,0 +1,3 @@
tests/tail-2/inotify-dir-recreate
tests/misc/timeout
tests/rm/rm1

6
.markdownlint.yaml Normal file
View file

@ -0,0 +1,6 @@
# Disable 'Line length'. Doesn't provide much values
MD013: false
# Disable 'Fenced code blocks should have a language specified'
# Doesn't provide much in src/ to enforce it
MD040: false

View file

@ -50,6 +50,7 @@ Gmail
GNU
Illumos
Irix
libfuzzer
MS-DOS
MSDOS
MacOS

View file

@ -20,6 +20,7 @@ exacl
filetime
formatteriteminfo
fsext
fundu
getopts
getrandom
globset

View file

@ -116,7 +116,7 @@ the community.
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
<https://www.contributor-covenant.org/version/2/0/code_of_conduct.html>.
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
@ -124,5 +124,5 @@ enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.
<https://www.contributor-covenant.org/faq>. Translations are available at
<https://www.contributor-covenant.org/translations>.

View file

@ -1,20 +1,12 @@
<!-- spell-checker:ignore reimplementing toybox RUNTEST CARGOFLAGS nextest -->
# Contributing to coreutils
Contributions are very welcome, and should target Rust's main branch until the
standard libraries are stabilized. You may *claim* an item on the to-do list by
following these steps:
1. Open an issue named "Implement [the utility of your choice]", e.g. "Implement
ls".
1. State that you are working on this utility.
1. Develop the utility.
1. Add integration tests.
1. Add the reference to your utility into Cargo.toml and Makefile.
1. Remove utility from the to-do list in the README.
1. Submit a pull request and close the issue.
The steps above imply that, before starting to work on a utility, you should
search the issues to make sure no one else is working on it.
Contributions are very welcome via Pull Requests. If you don't know where to
start, take a look at the
[`good-first-issues`](https://github.com/uutils/coreutils/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22).
If you have any questions, feel free to ask them in the issues or on
[Discord](https://discord.gg/wQVJbvJ).
## Best practices
@ -38,37 +30,259 @@ search the issues to make sure no one else is working on it.
## Platforms
We take pride in supporting many operating systems and architectures.
We take pride in supporting many operating systems and architectures. Any code
you contribute must at least compile without warnings for all platforms in the
CI. However, you can use `#[cfg(...)]` attributes to create platform dependent features.
**Tip:**
For Windows, Microsoft provides some images (VMWare, Hyper-V, VirtualBox and Parallels)
for development:
https://developer.microsoft.com/windows/downloads/virtual-machines/
**Tip:** For Windows, Microsoft provides some images (VMWare, Hyper-V,
VirtualBox and Parallels) for development:
<https://developer.microsoft.com/windows/downloads/virtual-machines/>
## Tools
We have an extensive CI that will check your code before it can be merged. This
section explains how to run those checks locally to avoid waiting for the CI.
### pre-commit hooks
A configuration for `pre-commit` is provided in the repository. It allows
automatically checking every git commit you make to ensure it compiles, and
passes `clippy` and `rustfmt` without warnings.
To use the provided hook:
1. [Install `pre-commit`](https://pre-commit.com/#install)
1. Run `pre-commit install` while in the repository directory
Your git commits will then automatically be checked. If a check fails, an error
message will explain why, and your commit will be canceled. You can then make
the suggested changes, and run `git commit ...` again.
### clippy
```shell
cargo clippy --all-targets --all-features
```
The `msrv` key in the clippy configuration file `clippy.toml` is used to disable
lints pertaining to newer features by specifying the minimum supported Rust
version (MSRV).
### rustfmt
```shell
cargo fmt --all
```
### cargo-deny
This project uses [cargo-deny](https://github.com/EmbarkStudios/cargo-deny/) to
detect duplicate dependencies, checks licenses, etc. To run it locally, first
install it and then run with:
```
cargo deny --all-features check all
```
### Markdown linter
We use [markdownlint](https://github.com/DavidAnson/markdownlint) to lint the
Markdown files in the repository.
### Spell checker
We use `cspell` as spell checker for all files in the project. If you are using
VS Code, you can install the
[code spell checker](https://marketplace.visualstudio.com/items?itemName=streetsidesoftware.code-spell-checker)
extension to enable spell checking within your editor. Otherwise, you can
install [cspell](https://cspell.org/) separately.
If you want to make the spell checker ignore a word, you can add
```rust
// spell-checker:ignore word_to_ignore
```
at the top of the file.
## Testing
Testing can be done using either Cargo or `make`.
### Testing with Cargo
Just like with building, we follow the standard procedure for testing using
Cargo:
```shell
cargo test
```
By default, `cargo test` only runs the common programs. To run also platform
specific tests, run:
```shell
cargo test --features unix
```
If you would prefer to test a select few utilities:
```shell
cargo test --features "chmod mv tail" --no-default-features
```
If you also want to test the core utilities:
```shell
cargo test -p uucore -p coreutils
```
Running the complete test suite might take a while. We use [nextest](https://nexte.st/index.html) in
the CI and you might want to try it out locally. It can speed up the execution time of the whole
test run significantly if the cpu has multiple cores.
```shell
cargo nextest run --features unix --no-fail-fast
```
To debug:
```shell
gdb --args target/debug/coreutils ls
(gdb) b ls.rs:79
(gdb) run
```
### Testing with GNU Make
To simply test all available utilities:
```shell
make test
```
To test all but a few of the available utilities:
```shell
make SKIP_UTILS='UTILITY_1 UTILITY_2' test
```
To test only a few of the available utilities:
```shell
make UTILS='UTILITY_1 UTILITY_2' test
```
To include tests for unimplemented behavior:
```shell
make UTILS='UTILITY_1 UTILITY_2' SPEC=y test
```
To run tests with `nextest` just use the nextest target. Note you'll need to
[install](https://nexte.st/book/installation.html) `nextest` first. The `nextest` target accepts the
same arguments like the default `test` target, so it's possible to pass arguments to `nextest run`
via `CARGOFLAGS`:
```shell
make CARGOFLAGS='--no-fail-fast' UTILS='UTILITY_1 UTILITY_2' nextest
```
### Run Busybox Tests
This testing functionality is only available on *nix operating systems and
requires `make`.
To run busybox tests for all utilities for which busybox has tests
```shell
make busytest
```
To run busybox tests for a few of the available utilities
```shell
make UTILS='UTILITY_1 UTILITY_2' busytest
```
To pass an argument like "-v" to the busybox test runtime
```shell
make UTILS='UTILITY_1 UTILITY_2' RUNTEST_ARGS='-v' busytest
```
### Comparing with GNU
To run uutils against the GNU test suite locally, run the following commands:
```shell
bash util/build-gnu.sh
# Build uutils without release optimizations
UU_MAKE_PROFILE=debug bash util/build-gnu.sh
bash util/run-gnu-test.sh
# To run a single test:
bash util/run-gnu-test.sh tests/touch/not-owner.sh # for example
# To run several tests:
bash util/run-gnu-test.sh tests/touch/not-owner.sh tests/rm/no-give-up.sh # for example
# If this is a perl (.pl) test, to run in debug:
DEBUG=1 bash util/run-gnu-test.sh tests/misc/sm3sum.pl
```
Note that it relies on individual utilities (not the multicall binary).
### Improving the GNU compatibility
The Python script `./util/remaining-gnu-error.py` shows the list of failing
tests in the CI.
To improve the GNU compatibility, the following process is recommended:
1. Identify a test (the smaller, the better) on a program that you understand or
is easy to understand. You can use the `./util/remaining-gnu-error.py` script
to help with this decision.
1. Build both the GNU and Rust coreutils using: `bash util/build-gnu.sh`
1. Run the test with `bash util/run-gnu-test.sh <your test>`
1. Start to modify `<your test>` to understand what is wrong. Examples:
1. Add `set -v` to have the bash verbose mode
1. Add `echo $?` where needed
1. When the variable `fail` is used in the test, `echo $fail` to see when the
test started to fail
1. Bump the content of the output (ex: `cat err`)
1. ...
1. Or, if the test is simple, extract the relevant information to create a new
test case running both GNU & Rust implementation
1. Start to modify the Rust implementation to match the expected behavior
1. Add a test to make sure that we don't regress (our test suite is super quick)
## Commit messages
To help the project maintainers review pull requests from contributors across
numerous utilities, the team has settled on conventions for commit messages.
From http://git-scm.com/book/ch5-2.html:
From <https://git-scm.com/book/ch5-2.html>:
```
Short (50 chars or less) summary of changes
Capitalized, short (50 chars or less) summary
More detailed explanatory text, if necessary. Wrap it to about 72
characters or so. In some contexts, the first line is treated as the
subject of an email and the rest of the text as the body. The blank
line separating the summary from the body is critical (unless you omit
the body entirely); tools like rebase can get confused if you run the
the body entirely); tools like rebase will confuse you if you run the
two together.
Write your commit message in the imperative: "Fix bug" and not "Fixed bug"
or "Fixes bug." This convention matches up with commit messages generated
by commands like git merge and git revert.
Further paragraphs come after blank lines.
- Bullet points are okay, too
- Typically a hyphen or asterisk is used for the bullet, preceded by a
- Typically a hyphen or asterisk is used for the bullet, followed by a
single space, with blank lines in between, but conventions vary here
- Use a hanging indent
```
Furthermore, here are a few examples for a summary line:
@ -104,15 +318,47 @@ uutils: add new utility
gitignore: add temporary files
```
## cargo-deny
## Code coverage
This project uses [cargo-deny](https://github.com/EmbarkStudios/cargo-deny/) to
detect duplicate dependencies, checks licenses, etc. To run it locally, first
install it and then run with:
<!-- spell-checker:ignore (flags) Ccodegen Coverflow Cpanic Zinstrument Zpanic -->
Code coverage report can be generated using [grcov](https://github.com/mozilla/grcov).
### Using Nightly Rust
To generate [gcov-based](https://github.com/mozilla/grcov#example-how-to-generate-gcda-files-for-a-rust-project) coverage report
```shell
export CARGO_INCREMENTAL=0
export RUSTFLAGS="-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort"
export RUSTDOCFLAGS="-Cpanic=abort"
cargo build <options...> # e.g., --features feat_os_unix
cargo test <options...> # e.g., --features feat_os_unix test_pathchk
grcov . -s . --binary-path ./target/debug/ -t html --branch --ignore-not-existing --ignore build.rs --excl-br-line "^\s*((debug_)?assert(_eq|_ne)?\#\[derive\()" -o ./target/debug/coverage/
# open target/debug/coverage/index.html in browser
```
cargo deny --all-features check all
```
if changes are not reflected in the report then run `cargo clean` and run the above commands.
### Using Stable Rust
If you are using stable version of Rust that doesn't enable code coverage instrumentation by default
then add `-Z-Zinstrument-coverage` flag to `RUSTFLAGS` env variable specified above.
## Other implementations
The Coreutils have different implementations, with different levels of completions:
* [GNU's](https://git.savannah.gnu.org/gitweb/?p=coreutils.git)
* [OpenBSD](https://github.com/openbsd/src/tree/master/bin)
* [Busybox](https://github.com/mirror/busybox/tree/master/coreutils)
* [Toybox (Android)](https://github.com/landley/toybox/tree/master/toys/posix)
* [V lang](https://github.com/vlang/coreutils)
* [SerenityOS](https://github.com/SerenityOS/serenity/tree/master/Userland/Utilities)
* [Initial Unix](https://github.com/dspinellis/unix-history-repo)
However, when reimplementing the tools/options in Rust, don't read their source codes
when they are using reciprocal licenses (ex: GNU GPL, GNU LGPL, etc).
## Licensing

958
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,11 +1,11 @@
# coreutils (uutils)
# * see the repository LICENSE, README, and CONTRIBUTING files for more information
# spell-checker:ignore (libs) libselinux gethostid procfs bigdecimal kqueue fundu
# spell-checker:ignore (libs) libselinux gethostid procfs bigdecimal kqueue fundu mangen datetime uuhelp
[package]
name = "coreutils"
version = "0.0.17"
version = "0.0.19"
authors = ["uutils developers"]
license = "MIT"
description = "coreutils ~ GNU coreutils (updated); implemented as universal (cross-platform) utils, written in Rust"
@ -22,16 +22,16 @@ edition = "2021"
build = "build.rs"
[features]
default = [ "feat_common_core" ]
default = ["feat_common_core"]
## OS feature shortcodes
macos = [ "feat_os_macos" ]
unix = [ "feat_os_unix" ]
windows = [ "feat_os_windows" ]
macos = ["feat_os_macos"]
unix = ["feat_os_unix"]
windows = ["feat_os_windows"]
## project-specific feature shortcodes
nightly = []
test_unimplemented = []
# * only build `uudoc` when `--feature uudoc` is activated
uudoc = ["zip"]
uudoc = ["zip", "dep:uuhelp_parser"]
## features
# "feat_acl" == enable support for ACLs (access control lists; by using`--features feat_acl`)
# NOTE:
@ -42,426 +42,429 @@ feat_acl = ["cp/feat_acl"]
# NOTE:
# * The selinux(-sys) crate requires `libselinux` headers and shared library to be accessible in the C toolchain at compile time.
# * Running a uutils compiled with `feat_selinux` requires an SELinux enabled Kernel at run time.
feat_selinux = ["cp/selinux", "id/selinux", "ls/selinux", "selinux", "feat_require_selinux"]
feat_selinux = [
"cp/selinux",
"id/selinux",
"ls/selinux",
"selinux",
"feat_require_selinux",
]
##
## feature sets
## (common/core and Tier1) feature sets
# "feat_common_core" == baseline core set of utilities which can be built/run on most targets
feat_common_core = [
"base32",
"base64",
"basename",
"basenc",
"cat",
"cksum",
"comm",
"cp",
"csplit",
"cut",
"date",
"df",
"dir",
"dircolors",
"dirname",
"dd",
"du",
"echo",
"env",
"expand",
"expr",
"factor",
"false",
"fmt",
"fold",
"hashsum",
"head",
"join",
"link",
"ln",
"ls",
"mkdir",
"mktemp",
"more",
"mv",
"nl",
"numfmt",
"od",
"paste",
"pr",
"printenv",
"printf",
"ptx",
"pwd",
"readlink",
"realpath",
"relpath",
"rm",
"rmdir",
"seq",
"shred",
"shuf",
"sleep",
"sort",
"split",
"sum",
"tac",
"tail",
"tee",
"test",
"tr",
"true",
"truncate",
"tsort",
"touch",
"unexpand",
"uniq",
"unlink",
"vdir",
"wc",
"yes",
"base32",
"base64",
"basename",
"basenc",
"cat",
"cksum",
"comm",
"cp",
"csplit",
"cut",
"date",
"df",
"dir",
"dircolors",
"dirname",
"dd",
"du",
"echo",
"env",
"expand",
"expr",
"factor",
"false",
"fmt",
"fold",
"hashsum",
"head",
"join",
"link",
"ln",
"ls",
"mkdir",
"mktemp",
"more",
"mv",
"nl",
"numfmt",
"od",
"paste",
"pr",
"printenv",
"printf",
"ptx",
"pwd",
"readlink",
"realpath",
"relpath",
"rm",
"rmdir",
"seq",
"shred",
"shuf",
"sleep",
"sort",
"split",
"sum",
"tac",
"tail",
"tee",
"test",
"tr",
"true",
"truncate",
"tsort",
"touch",
"unexpand",
"uniq",
"unlink",
"vdir",
"wc",
"yes",
]
# "feat_Tier1" == expanded set of utilities which can be built/run on the usual rust "Tier 1" target platforms (ref: <https://forge.rust-lang.org/release/platform-support.html>)
feat_Tier1 = [
"feat_common_core",
#
"arch",
"hostname",
"nproc",
"sync",
"touch",
"uname",
"whoami",
"feat_common_core",
#
"arch",
"hostname",
"nproc",
"sync",
"touch",
"uname",
"whoami",
]
## (primary platforms) feature sets
# "feat_os_macos" == set of utilities which can be built/run on the MacOS platform
feat_os_macos = [
"feat_os_unix", ## == a modern/usual *nix platform
#
"feat_require_unix_hostid",
"feat_os_unix", ## == a modern/usual *nix platform
#
"feat_require_unix_hostid",
]
# "feat_os_unix" == set of utilities which can be built/run on modern/usual *nix platforms
feat_os_unix = [
"feat_Tier1",
#
"feat_require_crate_cpp",
"feat_require_unix",
"feat_require_unix_utmpx",
"feat_Tier1",
#
"feat_require_crate_cpp",
"feat_require_unix",
"feat_require_unix_utmpx",
]
# "feat_os_windows" == set of utilities which can be built/run on modern/usual windows platforms
feat_os_windows = [
"feat_Tier1", ## == "feat_os_windows_legacy" + "hostname"
"feat_Tier1", ## == "feat_os_windows_legacy" + "hostname"
]
## (secondary platforms) feature sets
# "feat_os_unix_gnueabihf" == set of utilities which can be built/run on the "arm-unknown-linux-gnueabihf" target (ARMv6 Linux [hardfloat])
feat_os_unix_gnueabihf = [
"feat_Tier1",
#
"feat_require_unix",
"feat_require_unix_hostid",
"feat_require_unix_utmpx",
"feat_Tier1",
#
"feat_require_unix",
"feat_require_unix_hostid",
"feat_require_unix_utmpx",
]
# "feat_os_unix_musl" == set of utilities which can be built/run on targets binding to the "musl" library (ref: <https://musl.libc.org/about.html>)
feat_os_unix_musl = [
"feat_Tier1",
#
"feat_require_unix",
"feat_require_unix_hostid",
"feat_Tier1",
#
"feat_require_unix",
"feat_require_unix_hostid",
]
feat_os_unix_android = [
"feat_Tier1",
#
"feat_require_unix",
"feat_Tier1",
#
"feat_require_unix",
]
## feature sets with requirements (restricting cross-platform availability)
#
# ** NOTE: these `feat_require_...` sets should be minimized as much as possible to encourage cross-platform availability of utilities
#
# "feat_require_crate_cpp" == set of utilities requiring the `cpp` crate (which fail to compile on several platforms; as of 2020-04-23)
feat_require_crate_cpp = [
"stdbuf",
]
feat_require_crate_cpp = ["stdbuf"]
# "feat_require_unix" == set of utilities requiring support which is only available on unix platforms (as of 2020-04-23)
feat_require_unix = [
"chgrp",
"chmod",
"chown",
"chroot",
"groups",
"id",
"install",
"kill",
"logname",
"mkfifo",
"mknod",
"nice",
"nohup",
"pathchk",
"stat",
"stty",
"timeout",
"tty",
"chgrp",
"chmod",
"chown",
"chroot",
"groups",
"id",
"install",
"kill",
"logname",
"mkfifo",
"mknod",
"nice",
"nohup",
"pathchk",
"stat",
"stty",
"timeout",
"tty",
]
# "feat_require_unix_utmpx" == set of utilities requiring unix utmp/utmpx support
# * ref: <https://wiki.musl-libc.org/faq.html#Q:-Why-is-the-utmp/wtmp-functionality-only-implemented-as-stubs?>
feat_require_unix_utmpx = [
"pinky",
"uptime",
"users",
"who",
]
feat_require_unix_utmpx = ["pinky", "uptime", "users", "who"]
# "feat_require_unix_hostid" == set of utilities requiring gethostid in libc (only some unixes provide)
feat_require_unix_hostid = [
"hostid",
]
feat_require_unix_hostid = ["hostid"]
# "feat_require_selinux" == set of utilities depending on SELinux.
feat_require_selinux = [
"chcon",
"runcon",
]
feat_require_selinux = ["chcon", "runcon"]
## (alternate/newer/smaller platforms) feature sets
# "feat_os_unix_fuchsia" == set of utilities which can be built/run on the "Fuchsia" OS (refs: <https://fuchsia.dev>; <https://en.wikipedia.org/wiki/Google_Fuchsia>)
feat_os_unix_fuchsia = [
"feat_common_core",
#
"feat_require_crate_cpp",
#
"chgrp",
"chmod",
"chown",
"du",
"groups",
"hostid",
"install",
"logname",
"mkfifo",
"mknod",
"nice",
"pathchk",
"tty",
"uname",
"unlink",
"feat_common_core",
#
"feat_require_crate_cpp",
#
"chgrp",
"chmod",
"chown",
"du",
"groups",
"hostid",
"install",
"logname",
"mkfifo",
"mknod",
"nice",
"pathchk",
"tty",
"uname",
"unlink",
]
# "feat_os_unix_redox" == set of utilities which can be built/run on "Redox OS" (refs: <https://www.redox-os.org>; <https://en.wikipedia.org/wiki/Redox_(operating_system)>)
feat_os_unix_redox = [
"feat_common_core",
#
"chmod",
"uname",
"feat_common_core",
#
"chmod",
"uname",
]
# "feat_os_windows_legacy" == slightly restricted set of utilities which can be built/run on early windows platforms (eg, "WinXP")
feat_os_windows_legacy = [
"feat_common_core",
#
"arch",
"nproc",
"sync",
"touch",
"whoami",
"feat_common_core",
#
"arch",
"nproc",
"sync",
"touch",
"whoami",
]
##
# * bypass/override ~ translate 'test' feature name to avoid dependency collision with rust core 'test' crate (o/w surfaces as compiler errors during testing)
test = [ "uu_test" ]
test = ["uu_test"]
[workspace.dependencies]
bigdecimal = "0.3"
binary-heap-plus = "0.5.0"
bstr = "1.0"
bstr = "1.5"
bytecount = "0.6.3"
byteorder = "1.3.2"
chrono = { version="^0.4.23", default-features=false, features=["std", "alloc", "clock"]}
clap = { version = "4.0", features = ["wrap_help", "cargo"] }
clap_complete = "4.0"
byteorder = "1.4.3"
chrono = { version = "^0.4.26", default-features = false, features = [
"std",
"alloc",
"clock",
] }
clap = { version = "4.3", features = ["wrap_help", "cargo"] }
clap_complete = "4.3"
clap_mangen = "0.2"
compare = "0.1.0"
coz = { version = "0.1.3" }
crossterm = ">=0.19"
ctrlc = { version = "3.0", features = ["termination"] }
exacl = "0.9.0"
crossterm = ">=0.26.1"
ctrlc = { version = "3.4", features = ["termination"] }
exacl = "0.10.0"
file_diff = "1.0.0"
filetime = "0.2"
fnv = "1.0.7"
fs_extra = "1.1.0"
fs_extra = "1.3.0"
fts-sys = "0.2"
fundu = "0.3.0"
gcd = "2.2"
glob = "0.3.0"
half = "2.1"
fundu = "1.0.0"
gcd = "2.3"
glob = "0.3.1"
half = "2.2"
indicatif = "0.17"
is-terminal = "0.4.3"
itertools = "0.10.0"
libc = "0.2.139"
lscolors = { version = "0.13.0", default-features=false, features = ["nu-ansi-term"] }
is-terminal = "0.4.7"
itertools = "0.10.5"
libc = "0.2.146"
lscolors = { version = "0.14.0", default-features = false, features = [
"nu-ansi-term",
] }
memchr = "2"
nix = { version="0.26", default-features=false }
nom = "7.1.1"
notify = { version = "=5.0.0", features=["macos_kqueue"]}
num_cpus = "1.14"
num-bigint = "0.4.0"
nix = { version = "0.26", default-features = false }
nom = "7.1.3"
notify = { version = "=6.0.0", features = ["macos_kqueue"] }
num-bigint = "0.4.3"
num-traits = "0.2.15"
number_prefix = "0.4"
once_cell = "1.13.1"
once_cell = "1.18.0"
onig = { version = "~6.4", default-features = false }
ouroboros = "0.15.5"
ouroboros = "0.15.6"
parse_datetime = "0.4.0"
phf = "0.11.1"
phf_codegen = "0.11.1"
platform-info = "1.0.2"
platform-info = "2.0.1"
quick-error = "2.0.1"
rand = { version = "0.8", features = ["small_rng"] }
rand_core = "0.6"
rayon = "1.5"
redox_syscall = "0.2"
regex = "1.7.1"
rayon = "1.7"
redox_syscall = "0.3"
regex = "1.8.4"
rstest = "0.17.0"
rust-ini = "0.18.0"
same-file = "1.0.6"
selinux = "0.4"
signal-hook = "0.3.14"
signal-hook = "0.3.15"
smallvec = { version = "1.10", features = ["union"] }
strum = "0.24.1"
strum_macros = "0.24.2"
tempfile = "3.3.0"
tempfile = "3.6.0"
term_grid = "0.1.5"
terminal_size = "0.2.2"
textwrap = { version="0.16.0", features=["terminal_size"] }
terminal_size = "0.2.6"
textwrap = { version = "0.16.0", features = ["terminal_size"] }
thiserror = "1.0"
time = { version="0.3" }
unicode-segmentation = "1.9.0"
unicode-width = "0.1.8"
time = { version = "0.3" }
unicode-segmentation = "1.10.1"
unicode-width = "0.1.10"
utf-8 = "0.7.6"
walkdir = "2.2"
walkdir = "2.3"
winapi-util = "0.1.5"
windows-sys = { version="0.42.0", default-features=false }
xattr = "0.2.3"
zip = { version = "0.6.3", default_features=false, features=["deflate"] }
windows-sys = { version = "0.48.0", default-features = false }
xattr = "1.0.0"
zip = { version = "0.6.6", default_features = false, features = ["deflate"] }
hex = "0.4.3"
md-5 = "0.10.5"
sha1 = "0.10.1"
sha2 = "0.10.2"
sha3 = "0.10.6"
sha1 = "0.10.5"
sha2 = "0.10.6"
sha3 = "0.10.8"
blake2b_simd = "1.0.1"
blake3 = "1.3.2"
sm3 = "0.4.1"
digest = "0.10.6"
blake3 = "1.4.0"
sm3 = "0.4.2"
digest = "0.10.7"
uucore = { version=">=0.0.17", package="uucore", path="src/uucore" }
uucore_procs = { version=">=0.0.17", package="uucore_procs", path="src/uucore_procs" }
uu_ls = { version=">=0.0.17", path="src/uu/ls" }
uu_base32 = { version=">=0.0.17", path="src/uu/base32"}
uucore = { version = ">=0.0.19", package = "uucore", path = "src/uucore" }
uucore_procs = { version = ">=0.0.19", package = "uucore_procs", path = "src/uucore_procs" }
uu_ls = { version = ">=0.0.18", path = "src/uu/ls" }
uu_base32 = { version = ">=0.0.18", path = "src/uu/base32" }
[dependencies]
clap = { workspace=true }
once_cell = { workspace=true }
uucore = { workspace=true }
clap_complete = { workspace=true }
phf = { workspace=true }
selinux = { workspace=true, optional = true }
textwrap = { workspace=true }
zip = { workspace=true, optional = true }
clap = { workspace = true }
once_cell = { workspace = true }
uucore = { workspace = true }
clap_complete = { workspace = true }
clap_mangen = { workspace = true }
phf = { workspace = true }
selinux = { workspace = true, optional = true }
textwrap = { workspace = true }
zip = { workspace = true, optional = true }
uuhelp_parser = { optional = true, version = ">=0.0.19", path = "src/uuhelp_parser" }
# * uutils
uu_test = { optional=true, version="0.0.17", package="uu_test", path="src/uu/test" }
uu_test = { optional = true, version = "0.0.19", package = "uu_test", path = "src/uu/test" }
#
arch = { optional=true, version="0.0.17", package="uu_arch", path="src/uu/arch" }
base32 = { optional=true, version="0.0.17", package="uu_base32", path="src/uu/base32" }
base64 = { optional=true, version="0.0.17", package="uu_base64", path="src/uu/base64" }
basename = { optional=true, version="0.0.17", package="uu_basename", path="src/uu/basename" }
basenc = { optional=true, version="0.0.17", package="uu_basenc", path="src/uu/basenc" }
cat = { optional=true, version="0.0.17", package="uu_cat", path="src/uu/cat" }
chcon = { optional=true, version="0.0.17", package="uu_chcon", path="src/uu/chcon" }
chgrp = { optional=true, version="0.0.17", package="uu_chgrp", path="src/uu/chgrp" }
chmod = { optional=true, version="0.0.17", package="uu_chmod", path="src/uu/chmod" }
chown = { optional=true, version="0.0.17", package="uu_chown", path="src/uu/chown" }
chroot = { optional=true, version="0.0.17", package="uu_chroot", path="src/uu/chroot" }
cksum = { optional=true, version="0.0.17", package="uu_cksum", path="src/uu/cksum" }
comm = { optional=true, version="0.0.17", package="uu_comm", path="src/uu/comm" }
cp = { optional=true, version="0.0.17", package="uu_cp", path="src/uu/cp" }
csplit = { optional=true, version="0.0.17", package="uu_csplit", path="src/uu/csplit" }
cut = { optional=true, version="0.0.17", package="uu_cut", path="src/uu/cut" }
date = { optional=true, version="0.0.17", package="uu_date", path="src/uu/date" }
dd = { optional=true, version="0.0.17", package="uu_dd", path="src/uu/dd" }
df = { optional=true, version="0.0.17", package="uu_df", path="src/uu/df" }
dir = { optional=true, version="0.0.17", package="uu_dir", path="src/uu/dir" }
dircolors= { optional=true, version="0.0.17", package="uu_dircolors", path="src/uu/dircolors" }
dirname = { optional=true, version="0.0.17", package="uu_dirname", path="src/uu/dirname" }
du = { optional=true, version="0.0.17", package="uu_du", path="src/uu/du" }
echo = { optional=true, version="0.0.17", package="uu_echo", path="src/uu/echo" }
env = { optional=true, version="0.0.17", package="uu_env", path="src/uu/env" }
expand = { optional=true, version="0.0.17", package="uu_expand", path="src/uu/expand" }
expr = { optional=true, version="0.0.17", package="uu_expr", path="src/uu/expr" }
factor = { optional=true, version="0.0.17", package="uu_factor", path="src/uu/factor" }
false = { optional=true, version="0.0.17", package="uu_false", path="src/uu/false" }
fmt = { optional=true, version="0.0.17", package="uu_fmt", path="src/uu/fmt" }
fold = { optional=true, version="0.0.17", package="uu_fold", path="src/uu/fold" }
groups = { optional=true, version="0.0.17", package="uu_groups", path="src/uu/groups" }
hashsum = { optional=true, version="0.0.17", package="uu_hashsum", path="src/uu/hashsum" }
head = { optional=true, version="0.0.17", package="uu_head", path="src/uu/head" }
hostid = { optional=true, version="0.0.17", package="uu_hostid", path="src/uu/hostid" }
hostname = { optional=true, version="0.0.17", package="uu_hostname", path="src/uu/hostname" }
id = { optional=true, version="0.0.17", package="uu_id", path="src/uu/id" }
install = { optional=true, version="0.0.17", package="uu_install", path="src/uu/install" }
join = { optional=true, version="0.0.17", package="uu_join", path="src/uu/join" }
kill = { optional=true, version="0.0.17", package="uu_kill", path="src/uu/kill" }
link = { optional=true, version="0.0.17", package="uu_link", path="src/uu/link" }
ln = { optional=true, version="0.0.17", package="uu_ln", path="src/uu/ln" }
ls = { optional=true, version="0.0.17", package="uu_ls", path="src/uu/ls" }
logname = { optional=true, version="0.0.17", package="uu_logname", path="src/uu/logname" }
mkdir = { optional=true, version="0.0.17", package="uu_mkdir", path="src/uu/mkdir" }
mkfifo = { optional=true, version="0.0.17", package="uu_mkfifo", path="src/uu/mkfifo" }
mknod = { optional=true, version="0.0.17", package="uu_mknod", path="src/uu/mknod" }
mktemp = { optional=true, version="0.0.17", package="uu_mktemp", path="src/uu/mktemp" }
more = { optional=true, version="0.0.17", package="uu_more", path="src/uu/more" }
mv = { optional=true, version="0.0.17", package="uu_mv", path="src/uu/mv" }
nice = { optional=true, version="0.0.17", package="uu_nice", path="src/uu/nice" }
nl = { optional=true, version="0.0.17", package="uu_nl", path="src/uu/nl" }
nohup = { optional=true, version="0.0.17", package="uu_nohup", path="src/uu/nohup" }
nproc = { optional=true, version="0.0.17", package="uu_nproc", path="src/uu/nproc" }
numfmt = { optional=true, version="0.0.17", package="uu_numfmt", path="src/uu/numfmt" }
od = { optional=true, version="0.0.17", package="uu_od", path="src/uu/od" }
paste = { optional=true, version="0.0.17", package="uu_paste", path="src/uu/paste" }
pathchk = { optional=true, version="0.0.17", package="uu_pathchk", path="src/uu/pathchk" }
pinky = { optional=true, version="0.0.17", package="uu_pinky", path="src/uu/pinky" }
pr = { optional=true, version="0.0.17", package="uu_pr", path="src/uu/pr" }
printenv = { optional=true, version="0.0.17", package="uu_printenv", path="src/uu/printenv" }
printf = { optional=true, version="0.0.17", package="uu_printf", path="src/uu/printf" }
ptx = { optional=true, version="0.0.17", package="uu_ptx", path="src/uu/ptx" }
pwd = { optional=true, version="0.0.17", package="uu_pwd", path="src/uu/pwd" }
readlink = { optional=true, version="0.0.17", package="uu_readlink", path="src/uu/readlink" }
realpath = { optional=true, version="0.0.17", package="uu_realpath", path="src/uu/realpath" }
relpath = { optional=true, version="0.0.17", package="uu_relpath", path="src/uu/relpath" }
rm = { optional=true, version="0.0.17", package="uu_rm", path="src/uu/rm" }
rmdir = { optional=true, version="0.0.17", package="uu_rmdir", path="src/uu/rmdir" }
runcon = { optional=true, version="0.0.17", package="uu_runcon", path="src/uu/runcon" }
seq = { optional=true, version="0.0.17", package="uu_seq", path="src/uu/seq" }
shred = { optional=true, version="0.0.17", package="uu_shred", path="src/uu/shred" }
shuf = { optional=true, version="0.0.17", package="uu_shuf", path="src/uu/shuf" }
sleep = { optional=true, version="0.0.17", package="uu_sleep", path="src/uu/sleep" }
sort = { optional=true, version="0.0.17", package="uu_sort", path="src/uu/sort" }
split = { optional=true, version="0.0.17", package="uu_split", path="src/uu/split" }
stat = { optional=true, version="0.0.17", package="uu_stat", path="src/uu/stat" }
stdbuf = { optional=true, version="0.0.17", package="uu_stdbuf", path="src/uu/stdbuf" }
stty = { optional=true, version="0.0.17", package="uu_stty", path="src/uu/stty" }
sum = { optional=true, version="0.0.17", package="uu_sum", path="src/uu/sum" }
sync = { optional=true, version="0.0.17", package="uu_sync", path="src/uu/sync" }
tac = { optional=true, version="0.0.17", package="uu_tac", path="src/uu/tac" }
tail = { optional=true, version="0.0.17", package="uu_tail", path="src/uu/tail" }
tee = { optional=true, version="0.0.17", package="uu_tee", path="src/uu/tee" }
timeout = { optional=true, version="0.0.17", package="uu_timeout", path="src/uu/timeout" }
touch = { optional=true, version="0.0.17", package="uu_touch", path="src/uu/touch" }
tr = { optional=true, version="0.0.17", package="uu_tr", path="src/uu/tr" }
true = { optional=true, version="0.0.17", package="uu_true", path="src/uu/true" }
truncate = { optional=true, version="0.0.17", package="uu_truncate", path="src/uu/truncate" }
tsort = { optional=true, version="0.0.17", package="uu_tsort", path="src/uu/tsort" }
tty = { optional=true, version="0.0.17", package="uu_tty", path="src/uu/tty" }
uname = { optional=true, version="0.0.17", package="uu_uname", path="src/uu/uname" }
unexpand = { optional=true, version="0.0.17", package="uu_unexpand", path="src/uu/unexpand" }
uniq = { optional=true, version="0.0.17", package="uu_uniq", path="src/uu/uniq" }
unlink = { optional=true, version="0.0.17", package="uu_unlink", path="src/uu/unlink" }
uptime = { optional=true, version="0.0.17", package="uu_uptime", path="src/uu/uptime" }
users = { optional=true, version="0.0.17", package="uu_users", path="src/uu/users" }
vdir = { optional=true, version="0.0.17", package="uu_vdir", path="src/uu/vdir" }
wc = { optional=true, version="0.0.17", package="uu_wc", path="src/uu/wc" }
who = { optional=true, version="0.0.17", package="uu_who", path="src/uu/who" }
whoami = { optional=true, version="0.0.17", package="uu_whoami", path="src/uu/whoami" }
yes = { optional=true, version="0.0.17", package="uu_yes", path="src/uu/yes" }
arch = { optional = true, version = "0.0.19", package = "uu_arch", path = "src/uu/arch" }
base32 = { optional = true, version = "0.0.19", package = "uu_base32", path = "src/uu/base32" }
base64 = { optional = true, version = "0.0.19", package = "uu_base64", path = "src/uu/base64" }
basename = { optional = true, version = "0.0.19", package = "uu_basename", path = "src/uu/basename" }
basenc = { optional = true, version = "0.0.19", package = "uu_basenc", path = "src/uu/basenc" }
cat = { optional = true, version = "0.0.19", package = "uu_cat", path = "src/uu/cat" }
chcon = { optional = true, version = "0.0.19", package = "uu_chcon", path = "src/uu/chcon" }
chgrp = { optional = true, version = "0.0.19", package = "uu_chgrp", path = "src/uu/chgrp" }
chmod = { optional = true, version = "0.0.19", package = "uu_chmod", path = "src/uu/chmod" }
chown = { optional = true, version = "0.0.19", package = "uu_chown", path = "src/uu/chown" }
chroot = { optional = true, version = "0.0.19", package = "uu_chroot", path = "src/uu/chroot" }
cksum = { optional = true, version = "0.0.19", package = "uu_cksum", path = "src/uu/cksum" }
comm = { optional = true, version = "0.0.19", package = "uu_comm", path = "src/uu/comm" }
cp = { optional = true, version = "0.0.19", package = "uu_cp", path = "src/uu/cp" }
csplit = { optional = true, version = "0.0.19", package = "uu_csplit", path = "src/uu/csplit" }
cut = { optional = true, version = "0.0.19", package = "uu_cut", path = "src/uu/cut" }
date = { optional = true, version = "0.0.19", package = "uu_date", path = "src/uu/date" }
dd = { optional = true, version = "0.0.19", package = "uu_dd", path = "src/uu/dd" }
df = { optional = true, version = "0.0.19", package = "uu_df", path = "src/uu/df" }
dir = { optional = true, version = "0.0.19", package = "uu_dir", path = "src/uu/dir" }
dircolors = { optional = true, version = "0.0.19", package = "uu_dircolors", path = "src/uu/dircolors" }
dirname = { optional = true, version = "0.0.19", package = "uu_dirname", path = "src/uu/dirname" }
du = { optional = true, version = "0.0.19", package = "uu_du", path = "src/uu/du" }
echo = { optional = true, version = "0.0.19", package = "uu_echo", path = "src/uu/echo" }
env = { optional = true, version = "0.0.19", package = "uu_env", path = "src/uu/env" }
expand = { optional = true, version = "0.0.19", package = "uu_expand", path = "src/uu/expand" }
expr = { optional = true, version = "0.0.19", package = "uu_expr", path = "src/uu/expr" }
factor = { optional = true, version = "0.0.19", package = "uu_factor", path = "src/uu/factor" }
false = { optional = true, version = "0.0.19", package = "uu_false", path = "src/uu/false" }
fmt = { optional = true, version = "0.0.19", package = "uu_fmt", path = "src/uu/fmt" }
fold = { optional = true, version = "0.0.19", package = "uu_fold", path = "src/uu/fold" }
groups = { optional = true, version = "0.0.19", package = "uu_groups", path = "src/uu/groups" }
hashsum = { optional = true, version = "0.0.19", package = "uu_hashsum", path = "src/uu/hashsum" }
head = { optional = true, version = "0.0.19", package = "uu_head", path = "src/uu/head" }
hostid = { optional = true, version = "0.0.19", package = "uu_hostid", path = "src/uu/hostid" }
hostname = { optional = true, version = "0.0.19", package = "uu_hostname", path = "src/uu/hostname" }
id = { optional = true, version = "0.0.19", package = "uu_id", path = "src/uu/id" }
install = { optional = true, version = "0.0.19", package = "uu_install", path = "src/uu/install" }
join = { optional = true, version = "0.0.19", package = "uu_join", path = "src/uu/join" }
kill = { optional = true, version = "0.0.19", package = "uu_kill", path = "src/uu/kill" }
link = { optional = true, version = "0.0.19", package = "uu_link", path = "src/uu/link" }
ln = { optional = true, version = "0.0.19", package = "uu_ln", path = "src/uu/ln" }
ls = { optional = true, version = "0.0.19", package = "uu_ls", path = "src/uu/ls" }
logname = { optional = true, version = "0.0.19", package = "uu_logname", path = "src/uu/logname" }
mkdir = { optional = true, version = "0.0.19", package = "uu_mkdir", path = "src/uu/mkdir" }
mkfifo = { optional = true, version = "0.0.19", package = "uu_mkfifo", path = "src/uu/mkfifo" }
mknod = { optional = true, version = "0.0.19", package = "uu_mknod", path = "src/uu/mknod" }
mktemp = { optional = true, version = "0.0.19", package = "uu_mktemp", path = "src/uu/mktemp" }
more = { optional = true, version = "0.0.19", package = "uu_more", path = "src/uu/more" }
mv = { optional = true, version = "0.0.19", package = "uu_mv", path = "src/uu/mv" }
nice = { optional = true, version = "0.0.19", package = "uu_nice", path = "src/uu/nice" }
nl = { optional = true, version = "0.0.19", package = "uu_nl", path = "src/uu/nl" }
nohup = { optional = true, version = "0.0.19", package = "uu_nohup", path = "src/uu/nohup" }
nproc = { optional = true, version = "0.0.19", package = "uu_nproc", path = "src/uu/nproc" }
numfmt = { optional = true, version = "0.0.19", package = "uu_numfmt", path = "src/uu/numfmt" }
od = { optional = true, version = "0.0.19", package = "uu_od", path = "src/uu/od" }
paste = { optional = true, version = "0.0.19", package = "uu_paste", path = "src/uu/paste" }
pathchk = { optional = true, version = "0.0.19", package = "uu_pathchk", path = "src/uu/pathchk" }
pinky = { optional = true, version = "0.0.19", package = "uu_pinky", path = "src/uu/pinky" }
pr = { optional = true, version = "0.0.19", package = "uu_pr", path = "src/uu/pr" }
printenv = { optional = true, version = "0.0.19", package = "uu_printenv", path = "src/uu/printenv" }
printf = { optional = true, version = "0.0.19", package = "uu_printf", path = "src/uu/printf" }
ptx = { optional = true, version = "0.0.19", package = "uu_ptx", path = "src/uu/ptx" }
pwd = { optional = true, version = "0.0.19", package = "uu_pwd", path = "src/uu/pwd" }
readlink = { optional = true, version = "0.0.19", package = "uu_readlink", path = "src/uu/readlink" }
realpath = { optional = true, version = "0.0.19", package = "uu_realpath", path = "src/uu/realpath" }
relpath = { optional = true, version = "0.0.19", package = "uu_relpath", path = "src/uu/relpath" }
rm = { optional = true, version = "0.0.19", package = "uu_rm", path = "src/uu/rm" }
rmdir = { optional = true, version = "0.0.19", package = "uu_rmdir", path = "src/uu/rmdir" }
runcon = { optional = true, version = "0.0.19", package = "uu_runcon", path = "src/uu/runcon" }
seq = { optional = true, version = "0.0.19", package = "uu_seq", path = "src/uu/seq" }
shred = { optional = true, version = "0.0.19", package = "uu_shred", path = "src/uu/shred" }
shuf = { optional = true, version = "0.0.19", package = "uu_shuf", path = "src/uu/shuf" }
sleep = { optional = true, version = "0.0.19", package = "uu_sleep", path = "src/uu/sleep" }
sort = { optional = true, version = "0.0.19", package = "uu_sort", path = "src/uu/sort" }
split = { optional = true, version = "0.0.19", package = "uu_split", path = "src/uu/split" }
stat = { optional = true, version = "0.0.19", package = "uu_stat", path = "src/uu/stat" }
stdbuf = { optional = true, version = "0.0.19", package = "uu_stdbuf", path = "src/uu/stdbuf" }
stty = { optional = true, version = "0.0.19", package = "uu_stty", path = "src/uu/stty" }
sum = { optional = true, version = "0.0.19", package = "uu_sum", path = "src/uu/sum" }
sync = { optional = true, version = "0.0.19", package = "uu_sync", path = "src/uu/sync" }
tac = { optional = true, version = "0.0.19", package = "uu_tac", path = "src/uu/tac" }
tail = { optional = true, version = "0.0.19", package = "uu_tail", path = "src/uu/tail" }
tee = { optional = true, version = "0.0.19", package = "uu_tee", path = "src/uu/tee" }
timeout = { optional = true, version = "0.0.19", package = "uu_timeout", path = "src/uu/timeout" }
touch = { optional = true, version = "0.0.19", package = "uu_touch", path = "src/uu/touch" }
tr = { optional = true, version = "0.0.19", package = "uu_tr", path = "src/uu/tr" }
true = { optional = true, version = "0.0.19", package = "uu_true", path = "src/uu/true" }
truncate = { optional = true, version = "0.0.19", package = "uu_truncate", path = "src/uu/truncate" }
tsort = { optional = true, version = "0.0.19", package = "uu_tsort", path = "src/uu/tsort" }
tty = { optional = true, version = "0.0.19", package = "uu_tty", path = "src/uu/tty" }
uname = { optional = true, version = "0.0.19", package = "uu_uname", path = "src/uu/uname" }
unexpand = { optional = true, version = "0.0.19", package = "uu_unexpand", path = "src/uu/unexpand" }
uniq = { optional = true, version = "0.0.19", package = "uu_uniq", path = "src/uu/uniq" }
unlink = { optional = true, version = "0.0.19", package = "uu_unlink", path = "src/uu/unlink" }
uptime = { optional = true, version = "0.0.19", package = "uu_uptime", path = "src/uu/uptime" }
users = { optional = true, version = "0.0.19", package = "uu_users", path = "src/uu/users" }
vdir = { optional = true, version = "0.0.19", package = "uu_vdir", path = "src/uu/vdir" }
wc = { optional = true, version = "0.0.19", package = "uu_wc", path = "src/uu/wc" }
who = { optional = true, version = "0.0.19", package = "uu_who", path = "src/uu/who" }
whoami = { optional = true, version = "0.0.19", package = "uu_whoami", path = "src/uu/whoami" }
yes = { optional = true, version = "0.0.19", package = "uu_yes", path = "src/uu/yes" }
# this breaks clippy linting with: "tests/by-util/test_factor_benches.rs: No such file or directory (os error 2)"
# factor_benches = { optional = true, version = "0.0.0", package = "uu_factor_benches", path = "tests/benches/factor" }
@ -472,35 +475,34 @@ yes = { optional=true, version="0.0.17", package="uu_yes", path="src/uu/yes
#pin_cc = { version="1.0.61, < 1.0.62", package="cc" } ## cc v1.0.62 has compiler errors for MinRustV v1.32.0, requires 1.34 (for `std::str::split_ascii_whitespace()`)
[dev-dependencies]
chrono = { workspace=true }
chrono = { workspace = true }
conv = "0.3"
filetime = { workspace=true }
glob = { workspace=true }
libc = { workspace=true }
filetime = { workspace = true }
glob = { workspace = true }
libc = { workspace = true }
pretty_assertions = "1"
rand = { workspace=true }
regex = { workspace=true }
sha1 = { version="0.10", features=["std"] }
tempfile = { workspace=true }
time = { workspace=true, features=["local-offset"] }
unindent = "0.1"
uucore = { workspace=true, features=["entries", "process", "signals"] }
walkdir = { workspace=true }
is-terminal = { workspace=true }
hex-literal = "0.3.1"
rstest = "0.16.0"
rand = { workspace = true }
regex = { workspace = true }
sha1 = { version = "0.10", features = ["std"] }
tempfile = { workspace = true }
time = { workspace = true, features = ["local-offset"] }
unindent = "0.2"
uucore = { workspace = true, features = ["entries", "process", "signals"] }
walkdir = { workspace = true }
is-terminal = { workspace = true }
hex-literal = "0.4.1"
rstest = "0.17.0"
[target.'cfg(any(target_os = "linux", target_os = "android"))'.dev-dependencies]
procfs = { version = "0.14", default-features = false }
procfs = { version = "0.15", default-features = false }
rlimit = "0.9.1"
[target.'cfg(unix)'.dev-dependencies]
nix = { workspace=true, features=["process", "signal", "user"] }
rust-users = { version="0.11", package="users" }
nix = { workspace = true, features = ["process", "signal", "user"] }
rand_pcg = "0.3"
[build-dependencies]
phf_codegen = { workspace=true }
phf_codegen = { workspace = true }
[[bin]]
name = "coreutils"
@ -510,3 +512,22 @@ path = "src/bin/coreutils.rs"
name = "uudoc"
path = "src/bin/uudoc.rs"
required-features = ["uudoc"]
# The default release profile. It contains all optimizations, without
# sacrificing debug info. With this profile (like in the standard
# release profile), the debug info and the stack traces will still be available.
[profile.release]
lto = true
# A release-like profile that is tuned to be fast, even when being fast
# compromises on binary size. This includes aborting on panic.
[profile.release-fast]
inherits = "release"
panic = "abort"
# A release-like profile that is as small as possible.
[profile.release-small]
inherits = "release"
opt-level = "z"
panic = "abort"
strip = true

View file

@ -1,70 +0,0 @@
Documentation
-------------
The source of the documentation is available on:
https://uutils.github.io/dev/coreutils/
The documentation is updated everyday on this repository:
https://github.com/uutils/uutils.github.io/
Running GNU tests
-----------------
<!-- spell-checker:ignore gnulib -->
- Check out https://github.com/coreutils/coreutils next to your fork as gnu
- Check out https://github.com/coreutils/gnulib next to your fork as gnulib
- Rename the checkout of your fork to uutils
At the end you should have uutils, gnu and gnulib checked out next to each other.
- Run `cd uutils && ./util/build-gnu.sh && cd ..` to get everything ready (this may take a while)
- Finally, you can run tests with `bash uutils/util/run-gnu-test.sh <tests>`. Instead of `<tests>` insert the tests you want to run, e.g. `tests/misc/wc-proc.sh`.
Code Coverage Report Generation
---------------------------------
<!-- spell-checker:ignore (flags) Ccodegen Coverflow Cpanic Zinstrument Zpanic -->
Code coverage report can be generated using [grcov](https://github.com/mozilla/grcov).
### Using Nightly Rust
To generate [gcov-based](https://github.com/mozilla/grcov#example-how-to-generate-gcda-files-for-a-rust-project) coverage report
```bash
$ export CARGO_INCREMENTAL=0
$ export RUSTFLAGS="-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort"
$ export RUSTDOCFLAGS="-Cpanic=abort"
$ cargo build <options...> # e.g., --features feat_os_unix
$ cargo test <options...> # e.g., --features feat_os_unix test_pathchk
$ grcov . -s . --binary-path ./target/debug/ -t html --branch --ignore-not-existing --ignore build.rs --excl-br-line "^\s*((debug_)?assert(_eq|_ne)?\#\[derive\()" -o ./target/debug/coverage/
$ # open target/debug/coverage/index.html in browser
```
if changes are not reflected in the report then run `cargo clean` and run the above commands.
### Using Stable Rust
If you are using stable version of Rust that doesn't enable code coverage instrumentation by default
then add `-Z-Zinstrument-coverage` flag to `RUSTFLAGS` env variable specified above.
pre-commit hooks
----------------
A configuration for `pre-commit` is provided in the repository. It allows automatically checking every git commit you make to ensure it compiles, and passes `clippy` and `rustfmt` without warnings.
To use the provided hook:
1. [Install `pre-commit`](https://pre-commit.com/#install)
2. Run `pre-commit install` while in the repository directory
Your git commits will then automatically be checked. If a check fails, an error message will explain why, and your commit will be canceled. You can then make the suggested changes, and run `git commit ...` again.
### Using Clippy
The `msrv` key in the clippy configuration file `clippy.toml` is used to disable lints pertaining to newer features by specifying the minimum supported Rust version (MSRV). However, this key is only supported on `nightly`. To invoke clippy without errors, use `cargo +nightly clippy`. In order to also check tests and non-default crate features, use `cargo +nightly clippy --all-targets --all-features`.

View file

@ -1,4 +1,4 @@
# spell-checker:ignore (misc) testsuite runtest findstring (targets) busytest toybox distclean pkgs ; (vars/env) BINDIR BUILDDIR CARGOFLAGS DESTDIR DOCSDIR INSTALLDIR INSTALLEES MULTICALL DATAROOTDIR TESTDIR
# spell-checker:ignore (misc) testsuite runtest findstring (targets) busytest toybox distclean pkgs nextest ; (vars/env) BINDIR BUILDDIR CARGOFLAGS DESTDIR DOCSDIR INSTALLDIR INSTALLEES MULTICALL DATAROOTDIR TESTDIR
# Config options
PROFILE ?= debug
@ -289,6 +289,9 @@ $(foreach test,$(filter-out $(SKIP_UTILS),$(PROGS)),$(eval $(call TEST_BUSYBOX,$
test:
${CARGO} test ${CARGOFLAGS} --features "$(TESTS) $(TEST_SPEC_FEATURE)" --no-default-features $(TEST_NO_FAIL_FAST)
nextest:
${CARGO} nextest run ${CARGOFLAGS} --features "$(TESTS) $(TEST_SPEC_FEATURE)" --no-default-features $(TEST_NO_FAIL_FAST)
test_toybox:
-(cd $(TOYBOX_SRC)/ && make tests)
@ -349,10 +352,12 @@ endif
mkdir -p $(DESTDIR)$(DATAROOTDIR)/zsh/site-functions
mkdir -p $(DESTDIR)$(DATAROOTDIR)/bash-completion/completions
mkdir -p $(DESTDIR)$(DATAROOTDIR)/fish/vendor_completions.d
mkdir -p $(DESTDIR)$(DATAROOTDIR)/man/man1
$(foreach prog, $(INSTALLEES), \
$(BUILDDIR)/coreutils completion $(prog) zsh > $(DESTDIR)$(DATAROOTDIR)/zsh/site-functions/_$(PROG_PREFIX)$(prog); \
$(BUILDDIR)/coreutils completion $(prog) bash > $(DESTDIR)$(DATAROOTDIR)/bash-completion/completions/$(PROG_PREFIX)$(prog); \
$(BUILDDIR)/coreutils completion $(prog) fish > $(DESTDIR)$(DATAROOTDIR)/fish/vendor_completions.d/$(PROG_PREFIX)$(prog).fish; \
$(BUILDDIR)/coreutils manpage $(prog) > $(DESTDIR)$(DATAROOTDIR)/man/man1/$(PROG_PREFIX)$(prog).1; \
)
uninstall:

View file

@ -20,15 +20,12 @@ run_task = "_init"
[tasks._init]
private = true
dependencies = [
"_init-vars",
]
dependencies = ["_init-vars"]
[tasks._init-vars]
private = true
script_runner = "@duckscript"
script = [
'''
script = ['''
# reset build/test flags
set_env CARGO_MAKE_CARGO_BUILD_TEST_FLAGS ""
# determine features
@ -90,54 +87,36 @@ for arg in "${args_utils_list}"
end
args_utils = trim "${args_utils}"
set_env CARGO_MAKE_TASK_BUILD_UTILS_ARGS "${args_utils}"
'''
]
''']
### tasks
[tasks.default]
description = "## *DEFAULT* Build (debug-mode) and test project"
category = "[project]"
dependencies = [
"action-build-debug",
"test-terse",
]
dependencies = ["action-build-debug", "test-terse"]
##
[tasks.build]
description = "## Build (release-mode) project"
category = "[project]"
dependencies = [
"core::pre-build",
"action-build-release",
"core::post-build",
]
dependencies = ["core::pre-build", "action-build-release", "core::post-build"]
[tasks.build-debug]
description = "## Build (debug-mode) project"
category = "[project]"
dependencies = [
"action-build-debug",
]
dependencies = ["action-build-debug"]
[tasks.build-examples]
description = "## Build (release-mode) project example(s); usage: `cargo make (build-examples | examples) [EXAMPLE]...`"
category = "[project]"
dependencies = [
"core::pre-build",
"action-build-examples",
"core::post-build",
]
dependencies = ["core::pre-build", "action-build-examples", "core::post-build"]
[tasks.build-features]
description = "## Build (with features; release-mode) project; usage: `cargo make (build-features | features) FEATURE...`"
category = "[project]"
dependencies = [
"core::pre-build",
"action-build-features",
"core::post-build",
]
dependencies = ["core::pre-build", "action-build-features", "core::post-build"]
[tasks.build-release]
alias = "build"
@ -148,9 +127,7 @@ alias = "build-debug"
[tasks.example]
description = "hidden singular-form alias for 'examples'"
category = "[project]"
dependencies = [
"examples",
]
dependencies = ["examples"]
[tasks.examples]
alias = "build-examples"
@ -161,17 +138,12 @@ alias = "build-features"
[tasks.format]
description = "## Format code files (with `cargo fmt`; includes tests)"
category = "[project]"
dependencies = [
"action-format",
"action-format-tests",
]
dependencies = ["action-format", "action-format-tests"]
[tasks.help]
description = "## Display help"
category = "[project]"
dependencies = [
"action-display-help",
]
dependencies = ["action-display-help"]
[tasks.install]
description = "## Install project binary (to $HOME/.cargo/bin)"
@ -182,10 +154,7 @@ args = ["install", "--path", "."]
[tasks.lint]
description = "## Display lint report"
category = "[project]"
dependencies = [
"action-clippy",
"action-fmt_report",
]
dependencies = ["action-clippy", "action-fmt_report"]
[tasks.release]
alias = "build"
@ -193,48 +162,32 @@ alias = "build"
[tasks.test]
description = "## Run project tests"
category = "[project]"
dependencies = [
"core::pre-test",
"core::test",
"core::post-test",
]
dependencies = ["core::pre-test", "core::test", "core::post-test"]
[tasks.test-terse]
description = "## Run project tests (with terse/summary output)"
category = "[project]"
dependencies = [
"core::pre-test",
"action-test_quiet",
"core::post-test",
]
dependencies = ["core::pre-test", "action-test_quiet", "core::post-test"]
[tasks.test-util]
description = "## Test (individual) utilities; usage: `cargo make (test-util | test-uutil) [UTIL_NAME...]`"
category = "[project]"
dependencies = [
"action-test-utils",
]
dependencies = ["action-test-utils"]
[tasks.test-utils]
description = "hidden plural-form alias for 'test-util'"
category = "[project]"
dependencies = [
"test-util",
]
dependencies = ["test-util"]
[tasks.test-uutil]
description = "hidden alias for 'test-util'"
category = "[project]"
dependencies = [
"test-util",
]
dependencies = ["test-util"]
[tasks.test-uutils]
description = "hidden alias for 'test-util'"
category = "[project]"
dependencies = [
"test-util",
]
dependencies = ["test-util"]
[tasks.uninstall]
description = "## Remove project binary (from $HOME/.cargo/bin)"
@ -246,63 +199,66 @@ args = ["uninstall"]
description = "## Build (individual; release-mode) utilities; usage: `cargo make (util | uutil) [UTIL_NAME...]`"
category = "[project]"
dependencies = [
"core::pre-build",
"action-determine-utils",
"action-build-utils",
"core::post-build",
"core::pre-build",
"action-determine-utils",
"action-build-utils",
"core::post-build",
]
[tasks.utils]
description = "hidden plural-form alias for 'util'"
category = "[project]"
dependencies = [
"util",
]
dependencies = ["util"]
[tasks.uutil]
description = "hidden alias for 'util'"
category = "[project]"
dependencies = [
"util",
]
dependencies = ["util"]
[tasks.uutils]
description = "hidden plural-form alias for 'util'"
category = "[project]"
dependencies = [
"util",
]
dependencies = ["util"]
### actions
[tasks.action-build-release]
description = "`cargo build --release`"
command = "cargo"
args = ["build", "--release", "@@split(CARGO_MAKE_CARGO_BUILD_TEST_FLAGS, )" ]
args = ["build", "--release", "@@split(CARGO_MAKE_CARGO_BUILD_TEST_FLAGS, )"]
[tasks.action-build-debug]
description = "`cargo build`"
command = "cargo"
args = ["build", "@@split(CARGO_MAKE_CARGO_BUILD_TEST_FLAGS, )" ]
args = ["build", "@@split(CARGO_MAKE_CARGO_BUILD_TEST_FLAGS, )"]
[tasks.action-build-examples]
description = "`cargo build (--examples|(--example EXAMPLE)...)`"
command = "cargo"
args = ["build", "--release", "@@split(CARGO_MAKE_CARGO_BUILD_TEST_FLAGS, )", "${CARGO_MAKE_TASK_BUILD_EXAMPLES_ARGS}" ]
args = [
"build",
"--release",
"@@split(CARGO_MAKE_CARGO_BUILD_TEST_FLAGS, )",
"${CARGO_MAKE_TASK_BUILD_EXAMPLES_ARGS}",
]
[tasks.action-build-features]
description = "`cargo build --release --features FEATURES`"
command = "cargo"
args = ["build", "--release", "--no-default-features", "--features", "${CARGO_MAKE_TASK_BUILD_FEATURES_ARGS}" ]
args = [
"build",
"--release",
"--no-default-features",
"--features",
"${CARGO_MAKE_TASK_BUILD_FEATURES_ARGS}",
]
[tasks.action-build-utils]
description = "Build individual utilities"
dependencies = [
"action-determine-utils",
]
dependencies = ["action-determine-utils"]
command = "cargo"
# args = ["build", "@@remove-empty(CARGO_MAKE_TASK_BUILD_UTILS_ARGS)" ]
args = ["build", "--release", "@@split(CARGO_MAKE_TASK_BUILD_UTILS_ARGS, )" ]
args = ["build", "--release", "@@split(CARGO_MAKE_TASK_BUILD_UTILS_ARGS, )"]
[tasks.action-clippy]
description = "`cargo clippy` lint report"
@ -311,8 +267,7 @@ args = ["clippy", "@@split(CARGO_MAKE_CARGO_BUILD_TEST_FLAGS, )"]
[tasks.action-determine-utils]
script_runner = "@duckscript"
script = [
'''
script = ['''
package_options = get_env CARGO_MAKE_TASK_BUILD_UTILS_ARGS
if is_empty "${package_options}"
show_utils = get_env CARGO_MAKE_VAR_SHOW_UTILS
@ -335,13 +290,11 @@ if is_empty "${package_options}"
package_options = trim "${package_options}"
end_if
set_env CARGO_MAKE_TASK_BUILD_UTILS_ARGS "${package_options}"
'''
]
''']
[tasks.action-determine-tests]
script_runner = "@duckscript"
script = [
'''
script = ['''
test_files = glob_array tests/**/*.rs
for file in ${test_files}
file = replace "${file}" "\\" "/"
@ -354,8 +307,7 @@ for file in ${test_files}
end_if
end
set_env CARGO_MAKE_VAR_TESTS "${tests}"
'''
]
''']
[tasks.action-format]
description = "`cargo fmt`"
@ -364,9 +316,7 @@ args = ["fmt"]
[tasks.action-format-tests]
description = "`cargo fmt` tests"
dependencies = [
"action-determine-tests",
]
dependencies = ["action-determine-tests"]
command = "cargo"
args = ["fmt", "--", "@@split(CARGO_MAKE_VAR_TESTS, )"]
@ -381,16 +331,18 @@ args = ["fmt", "--", "--check"]
[tasks.action-spellcheck-codespell]
description = "`codespell` spellcheck repository"
command = "codespell" # (from `pip install codespell`)
args = [".", "--skip=*/.git,./target,./tests/fixtures", "--ignore-words-list=mut,od"]
args = [
".",
"--skip=*/.git,./target,./tests/fixtures",
"--ignore-words-list=mut,od",
]
[tasks.action-test-utils]
description = "Build individual utilities"
dependencies = [
"action-determine-utils",
]
dependencies = ["action-determine-utils"]
command = "cargo"
# args = ["build", "@@remove-empty(CARGO_MAKE_TASK_BUILD_UTILS_ARGS)" ]
args = ["test", "@@split(CARGO_MAKE_TASK_BUILD_UTILS_ARGS, )" ]
args = ["test", "@@split(CARGO_MAKE_TASK_BUILD_UTILS_ARGS, )"]
[tasks.action-test_quiet]
description = "Test (in `--quiet` mode)"
@ -399,8 +351,7 @@ args = ["test", "--quiet", "@@split(CARGO_MAKE_CARGO_BUILD_TEST_FLAGS, )"]
[tasks.action-display-help]
script_runner = "@duckscript"
script = [
'''
script = ['''
echo ""
echo "usage: `cargo make TARGET [ARGS...]`"
echo ""
@ -432,5 +383,4 @@ script = [
end_if
end
echo ""
'''
]
''']

460
README.md
View file

@ -1,114 +1,124 @@
<!-- markdownlint-disable MD033 MD041 MD002 -->
<!-- markdownlint-disable commands-show-output no-duplicate-heading -->
<!-- spell-checker:ignore markdownlint ; (options) DESTDIR UTILNAME manpages reimplementation -->
<div align="center">
![uutils logo](docs/src/logo.svg)
# uutils coreutils
[![Crates.io](https://img.shields.io/crates/v/coreutils.svg)](https://crates.io/crates/coreutils)
[![Discord](https://img.shields.io/badge/discord-join-7289DA.svg?logo=discord&longCache=true&style=flat)](https://discord.gg/wQVJbvJ)
[![License](http://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/uutils/coreutils/blob/main/LICENSE)
[![LOC](https://tokei.rs/b1/github/uutils/coreutils?category=code)](https://github.com/Aaronepower/tokei)
[![dependency status](https://deps.rs/repo/github/uutils/coreutils/status.svg)](https://deps.rs/repo/github/uutils/coreutils)
[![CodeCov](https://codecov.io/gh/uutils/coreutils/branch/master/graph/badge.svg)](https://codecov.io/gh/uutils/coreutils)
![MSRV](https://img.shields.io/badge/MSRV-1.64.0-brightgreen)
-----------------------------------------------
</div>
<!-- markdownlint-disable commands-show-output no-duplicate-heading -->
<!-- spell-checker:ignore markdownlint ; (options) DESTDIR RUNTEST UTILNAME -->
---
uutils is an attempt at writing universal (as in cross-platform) CLI
utilities in [Rust](http://www.rust-lang.org).
While all programs have been implemented, some options might be missing
or different behavior might be experienced.
uutils coreutils is a cross-platform reimplementation of the GNU coreutils in
[Rust](http://www.rust-lang.org). While all programs have been implemented, some
options might be missing or different behavior might be experienced.
To install it:
```
$ cargo install coreutils
$ ~/.cargo/bin/coreutils
```shell
cargo install coreutils
~/.cargo/bin/coreutils
```
## Why?
<!-- markdownlint-disable-next-line MD026 -->
uutils aims to work on as many platforms as possible, to be able to use the
same utils on Linux, Mac, Windows and other platforms. This ensures, for
example, that scripts can be easily transferred between platforms. Rust was
chosen not only because it is fast and safe, but is also excellent for
writing cross-platform code.
## Goals
uutils aims to be a drop-in replacement for the GNU utils. Differences with GNU
are treated as bugs.
uutils aims to work on as many platforms as possible, to be able to use the same
utils on Linux, Mac, Windows and other platforms. This ensures, for example,
that scripts can be easily transferred between platforms.
## Documentation
uutils has both user and developer documentation available:
- [User Manual](https://uutils.github.io/user/)
- [Developer Documentation](https://uutils.github.io/dev/coreutils/)
Both can also be generated locally, the instructions for that can be found in the
[coreutils docs](https://github.com/uutils/uutils.github.io) repository.
Both can also be generated locally, the instructions for that can be found in
the [coreutils docs](https://github.com/uutils/uutils.github.io) repository.
<!-- ANCHOR: build (this mark is needed for mdbook) -->
## Requirements
* Rust (`cargo`, `rustc`)
* GNU Make (optional)
- Rust (`cargo`, `rustc`)
- GNU Make (optional)
### Rust Version
uutils follows Rust's release channels and is tested against stable, beta and nightly.
The current Minimum Supported Rust Version (MSRV) is `1.64.0`.
uutils follows Rust's release channels and is tested against stable, beta and
nightly. The current Minimum Supported Rust Version (MSRV) is `1.64.0`.
## Building
There are currently two methods to build the uutils binaries: either Cargo
or GNU Make.
There are currently two methods to build the uutils binaries: either Cargo or
GNU Make.
> Building the full package, including all documentation, requires both Cargo
> and Gnu Make on a Unix platform.
For either method, we first need to fetch the repository:
```bash
$ git clone https://github.com/uutils/coreutils
$ cd coreutils
```shell
git clone https://github.com/uutils/coreutils
cd coreutils
```
### Cargo
Building uutils using Cargo is easy because the process is the same as for
every other Rust program:
Building uutils using Cargo is easy because the process is the same as for every
other Rust program:
```bash
$ cargo build --release
```shell
cargo build --release
```
This command builds the most portable common core set of uutils into a multicall
(BusyBox-type) binary, named 'coreutils', on most Rust-supported platforms.
Additional platform-specific uutils are often available. Building these
expanded sets of uutils for a platform (on that platform) is as simple as
specifying it as a feature:
Additional platform-specific uutils are often available. Building these expanded
sets of uutils for a platform (on that platform) is as simple as specifying it
as a feature:
```bash
$ cargo build --release --features macos
```shell
cargo build --release --features macos
# or ...
$ cargo build --release --features windows
cargo build --release --features windows
# or ...
$ cargo build --release --features unix
cargo build --release --features unix
```
If you don't want to build every utility available on your platform into the
final binary, you can also specify which ones you want to build manually.
For example:
final binary, you can also specify which ones you want to build manually. For
example:
```bash
$ cargo build --features "base32 cat echo rm" --no-default-features
```shell
cargo build --features "base32 cat echo rm" --no-default-features
```
If you don't want to build the multicall binary and would prefer to build
the utilities as individual binaries, that is also possible. Each utility
is contained in its own package within the main repository, named
"uu_UTILNAME". To build individual utilities, use cargo to build just the
specific packages (using the `--package` [aka `-p`] option). For example:
If you don't want to build the multicall binary and would prefer to build the
utilities as individual binaries, that is also possible. Each utility is
contained in its own package within the main repository, named "uu_UTILNAME". To
build individual utilities, use cargo to build just the specific packages (using
the `--package` [aka `-p`] option). For example:
```bash
$ cargo build -p uu_base32 -p uu_cat -p uu_echo -p uu_rm
```shell
cargo build -p uu_base32 -p uu_cat -p uu_echo -p uu_rm
```
### GNU Make
@ -117,80 +127,88 @@ Building using `make` is a simple process as well.
To simply build all available utilities:
```bash
$ make
```shell
make
```
In release mode:
```shell
make PROFILE=release
```
To build all but a few of the available utilities:
```bash
$ make SKIP_UTILS='UTILITY_1 UTILITY_2'
```shell
make SKIP_UTILS='UTILITY_1 UTILITY_2'
```
To build only a few of the available utilities:
```bash
$ make UTILS='UTILITY_1 UTILITY_2'
```shell
make UTILS='UTILITY_1 UTILITY_2'
```
## Installation
### Cargo
### Install with Cargo
Likewise, installing can simply be done using:
```bash
$ cargo install --path .
```shell
cargo install --path . --locked
```
This command will install uutils into Cargo's *bin* folder (*e.g.* `$HOME/.cargo/bin`).
This command will install uutils into Cargo's _bin_ folder (_e.g._
`$HOME/.cargo/bin`).
This does not install files necessary for shell completion. For shell completion to work,
use `GNU Make` or see `Manually install shell completions`.
This does not install files necessary for shell completion or manpages. For
manpages or shell completion to work, use `GNU Make` or see
`Manually install shell completions`/`Manually install manpages`.
### GNU Make
### Install with GNU Make
To install all available utilities:
```bash
$ make install
```shell
make install
```
To install using `sudo` switch `-E` must be used:
```bash
$ sudo -E make install
```shell
sudo -E make install
```
To install all but a few of the available utilities:
```bash
$ make SKIP_UTILS='UTILITY_1 UTILITY_2' install
```shell
make SKIP_UTILS='UTILITY_1 UTILITY_2' install
```
To install only a few of the available utilities:
```bash
$ make UTILS='UTILITY_1 UTILITY_2' install
```shell
make UTILS='UTILITY_1 UTILITY_2' install
```
To install every program with a prefix (e.g. uu-echo uu-cat):
```bash
$ make PROG_PREFIX=PREFIX_GOES_HERE install
```shell
make PROG_PREFIX=PREFIX_GOES_HERE install
```
To install the multicall binary:
```bash
$ make MULTICALL=y install
```shell
make MULTICALL=y install
```
Set install parent directory (default value is /usr/local):
```bash
```shell
# DESTDIR is also supported
$ make PREFIX=/my/path install
make PREFIX=/my/path install
```
Installing with `make` installs shell completions for all installed utilities
@ -199,302 +217,94 @@ be generated; See `Manually install shell completions`.
### Manually install shell completions
The `coreutils` binary can generate completions for the `bash`, `elvish`, `fish`, `powershell`
and `zsh` shells. It prints the result to stdout.
The `coreutils` binary can generate completions for the `bash`, `elvish`,
`fish`, `powershell` and `zsh` shells. It prints the result to stdout.
The syntax is:
```bash
```shell
cargo run completion <utility> <shell>
```
So, to install completions for `ls` on `bash` to `/usr/local/share/bash-completion/completions/ls`,
run:
So, to install completions for `ls` on `bash` to
`/usr/local/share/bash-completion/completions/ls`, run:
```shell
cargo run completion ls bash > /usr/local/share/bash-completion/completions/ls
```
### Manually install manpages
To generate manpages, the syntax is:
```bash
cargo run completion ls bash > /usr/local/share/bash-completion/completions/ls
cargo run manpage <utility>
```
So, to install the manpage for `ls` to `/usr/local/share/man/man1/ls.1` run:
```bash
cargo run manpage ls > /usr/local/share/man/man1/ls.1
```
## Un-installation
Un-installation differs depending on how you have installed uutils. If you used
Cargo to install, use Cargo to uninstall. If you used GNU Make to install, use
Un-installation differs depending on how you have installed uutils. If you used
Cargo to install, use Cargo to uninstall. If you used GNU Make to install, use
Make to uninstall.
### Cargo
### Uninstall with Cargo
To uninstall uutils:
```bash
$ cargo uninstall uutils
```shell
cargo uninstall uutils
```
### GNU Make
### Uninstall with GNU Make
To uninstall all utilities:
```bash
$ make uninstall
```shell
make uninstall
```
To uninstall every program with a set prefix:
```bash
$ make PROG_PREFIX=PREFIX_GOES_HERE uninstall
```shell
make PROG_PREFIX=PREFIX_GOES_HERE uninstall
```
To uninstall the multicall binary:
```bash
$ make MULTICALL=y uninstall
```shell
make MULTICALL=y uninstall
```
To uninstall from a custom parent directory:
```bash
```shell
# DESTDIR is also supported
$ make PREFIX=/my/path uninstall
make PREFIX=/my/path uninstall
```
<!-- ANCHOR_END: build (this mark is needed for mdbook) -->
## Testing
Testing can be done using either Cargo or `make`.
### Cargo
Just like with building, we follow the standard procedure for testing using
Cargo:
```bash
$ cargo test
```
By default, `cargo test` only runs the common programs. To run also platform
specific tests, run:
```bash
$ cargo test --features unix
```
If you would prefer to test a select few utilities:
```bash
$ cargo test --features "chmod mv tail" --no-default-features
```
If you also want to test the core utilities:
```bash
$ cargo test -p uucore -p coreutils
```
To debug:
```bash
$ gdb --args target/debug/coreutils ls
(gdb) b ls.rs:79
(gdb) run
```
### GNU Make
To simply test all available utilities:
```bash
$ make test
```
To test all but a few of the available utilities:
```bash
$ make SKIP_UTILS='UTILITY_1 UTILITY_2' test
```
To test only a few of the available utilities:
```bash
$ make UTILS='UTILITY_1 UTILITY_2' test
```
To include tests for unimplemented behavior:
```bash
$ make UTILS='UTILITY_1 UTILITY_2' SPEC=y test
```
### Run Busybox Tests
This testing functionality is only available on *nix operating systems and
requires `make`.
To run busybox tests for all utilities for which busybox has tests
```bash
$ make busytest
```
To run busybox tests for a few of the available utilities
```bash
$ make UTILS='UTILITY_1 UTILITY_2' busytest
```
To pass an argument like "-v" to the busybox test runtime
```bash
$ make UTILS='UTILITY_1 UTILITY_2' RUNTEST_ARGS='-v' busytest
```
### Comparing with GNU
## GNU test suite compatibility
Below is the evolution of how many GNU tests uutils passes. A more detailed
breakdown of the GNU test results of the main branch can be found
[in the user manual](https://uutils.github.io/user/test_coverage.html).
See <https://github.com/uutils/coreutils/issues/3336> for the main meta bugs
(many are missing).
![Evolution over time](https://github.com/uutils/coreutils-tracking/blob/main/gnu-results.png?raw=true)
To run locally:
```bash
$ bash util/build-gnu.sh
$ bash util/run-gnu-test.sh
# To run a single test:
$ bash util/run-gnu-test.sh tests/touch/not-owner.sh # for example
# To run several tests:
$ bash util/run-gnu-test.sh tests/touch/not-owner.sh tests/rm/no-give-up.sh # for example
# If this is a perl (.pl) test, to run in debug:
$ DEBUG=1 bash util/run-gnu-test.sh tests/misc/sm3sum.pl
```
Note that it relies on individual utilities (not the multicall binary).
### Improving the GNU compatibility
The Python script `./util/remaining-gnu-error.py` shows the list of failing tests in the CI.
To improve the GNU compatibility, the following process is recommended:
1. Identify a test (the smaller, the better) on a program that you understand or is easy to understand. You can use the `./util/remaining-gnu-error.py` script to help with this decision.
1. Build both the GNU and Rust coreutils using: `bash util/build-gnu.sh`
1. Run the test with `bash util/run-gnu-test.sh <your test>`
1. Start to modify `<your test>` to understand what is wrong. Examples:
1. Add `set -v` to have the bash verbose mode
1. Add `echo $?` where needed
1. When the variable `fail` is used in the test, `echo $fail` to see when the test started to fail
1. Bump the content of the output (ex: `cat err`)
1. ...
1. Or, if the test is simple, extract the relevant information to create a new test case running both GNU & Rust implementation
1. Start to modify the Rust implementation to match the expected behavior
1. Add a test to make sure that we don't regress (our test suite is super quick)
## Contributing
To contribute to uutils, please see [CONTRIBUTING](CONTRIBUTING.md).
## Utilities
Please note that this is not fully accurate:
* Some new options can be added / removed in the GNU implementation;
* Some error management might be missing;
* Some behaviors might be different.
See https://github.com/uutils/coreutils/issues/3336 for the main meta bugs
(many are missing).
| Done | WIP |
|-----------|-----------|
| arch | cp |
| base32 | date |
| base64 | dd |
| basename | df |
| basenc | expr |
| cat | install |
| chcon | ls |
| chgrp | more |
| chmod | numfmt |
| chown | od (`--strings` and 128-bit data types missing) |
| chroot | pr |
| cksum | printf |
| comm | sort |
| csplit | split |
| cut | tac |
| dircolors | test |
| dirname | dir |
| du | vdir |
| echo | stty |
| env | |
| expand | |
| factor | |
| false | |
| fmt | |
| fold | |
| groups | |
| hashsum | |
| head | |
| hostid | |
| hostname | |
| id | |
| join | |
| kill | |
| link | |
| ln | |
| logname | |
| ~~md5sum~~ (replaced by [hashsum](https://github.com/uutils/coreutils/blob/main/src/uu/hashsum/src/hashsum.rs)) | |
| ~~sha1sum~~ (replaced by [hashsum](https://github.com/uutils/coreutils/blob/main/src/uu/hashsum/src/hashsum.rs)) | |
| ~~sha224sum~~ (replaced by [hashsum](https://github.com/uutils/coreutils/blob/main/src/uu/hashsum/src/hashsum.rs)) | |
| ~~sha256sum~~ (replaced by [hashsum](https://github.com/uutils/coreutils/blob/main/src/uu/hashsum/src/hashsum.rs)) | |
| ~~sha384sum~~ (replaced by [hashsum](https://github.com/uutils/coreutils/blob/main/src/uu/hashsum/src/hashsum.rs)) | |
| ~~sha512sum~~ (replaced by [hashsum](https://github.com/uutils/coreutils/blob/main/src/uu/hashsum/src/hashsum.rs)) | |
| mkdir | |
| mkfifo | |
| mknod | |
| mktemp | |
| mv | |
| nice | |
| nl | |
| nohup | |
| nproc | |
| paste | |
| pathchk | |
| pinky | |
| printenv | |
| ptx | |
| pwd | |
| readlink | |
| realpath | |
| relpath | |
| rm | |
| rmdir | |
| runcon | |
| seq | |
| shred | |
| shuf | |
| sleep | |
| stat | |
| stdbuf | |
| sum | |
| sync | |
| tail | |
| tee | |
| timeout | |
| touch | |
| tr | |
| true | |
| truncate | |
| tsort | |
| tty | |
| uname | |
| unexpand | |
| uniq | |
| unlink | |
| uptime | |
| users | |
| wc | |
| who | |
| whoami | |
| yes | |
## License
uutils is licensed under the MIT License - see the `LICENSE` file for details

View file

@ -20,6 +20,8 @@ pub fn main() {
for (key, val) in env::vars() {
if val == "1" && key.starts_with(ENV_FEATURE_PREFIX) {
let krate = key[ENV_FEATURE_PREFIX.len()..].to_lowercase();
// Allow this as we have a bunch of info in the comments
#[allow(clippy::match_same_arms)]
match krate.as_ref() {
"default" | "macos" | "unix" | "windows" | "selinux" | "zip" => continue, // common/standard feature names
"nightly" | "test_unimplemented" => continue, // crate-local custom features

View file

@ -11,7 +11,7 @@ unmaintained = "warn"
yanked = "warn"
notice = "warn"
ignore = [
#"RUSTSEC-0000-0000",
#"RUSTSEC-0000-0000",
]
# This section is considered when running `cargo deny check licenses`
@ -20,15 +20,15 @@ ignore = [
[licenses]
unlicensed = "deny"
allow = [
"MIT",
"Apache-2.0",
"ISC",
"BSD-2-Clause",
"BSD-2-Clause-FreeBSD",
"BSD-3-Clause",
"CC0-1.0",
"MPL-2.0", # XXX considered copyleft?
"Unicode-DFS-2016",
"MIT",
"Apache-2.0",
"ISC",
"BSD-2-Clause",
"BSD-2-Clause-FreeBSD",
"BSD-3-Clause",
"CC0-1.0",
"MPL-2.0", # XXX considered copyleft?
"Unicode-DFS-2016",
]
copyleft = "deny"
allow-osi-fsf-free = "neither"
@ -59,16 +59,34 @@ highlight = "all"
# introduces it.
# spell-checker: disable
skip = [
# is-terminal
{ name = "hermit-abi", version = "0.3.1" },
# is-terminal
{ name = "rustix", version = "0.36.8" },
# is-terminal (via rustix)
{ name = "io-lifetimes", version = "1.0.5" },
# is-terminal
{ name = "linux-raw-sys", version = "0.1.4" },
# is-terminal
{ name = "windows-sys", version = "0.45.0" },
# is-terminal
{ name = "hermit-abi", version = "0.3.1" },
# procfs
{ name = "rustix", version = "0.36.14" },
# rustix
{ name = "linux-raw-sys", version = "0.1.4" },
# various crates
{ name = "windows-sys", version = "0.45.0" },
# windows-sys
{ name = "windows-targets", version = "0.42.2" },
# windows-targets
{ name = "windows_aarch64_gnullvm", version = "0.42.2" },
# windows-targets
{ name = "windows_aarch64_msvc", version = "0.42.2" },
# windows-targets
{ name = "windows_i686_gnu", version = "0.42.2" },
# windows-targets
{ name = "windows_i686_msvc", version = "0.42.2" },
# windows-targets
{ name = "windows_x86_64_gnu", version = "0.42.2" },
# windows-targets
{ name = "windows_x86_64_gnullvm", version = "0.42.2" },
# windows-targets
{ name = "windows_x86_64_msvc", version = "0.42.2" },
# tempfile
{ name = "redox_syscall", version = "0.3.5" },
# cpp_macros
{ name = "aho-corasick", version = "0.7.19" },
]
# spell-checker: enable

View file

@ -10,4 +10,4 @@ git-repository-url = "https://github.com/rust-lang/cargo/tree/master/src/doc/src
[preprocessor.toc]
command = "mdbook-toc"
renderer = ["html"]
renderer = ["html"]

View file

@ -1,3 +1,3 @@
# Build from source
{{#include ../../README.md:build }}
{{#include ../../README.md:build }}

View file

@ -1 +1,3 @@
{{ #include ../../CONTRIBUTING.md }}
<!-- markdownlint-disable MD041 -->
{{ #include ../../CONTRIBUTING.md }}

View file

@ -5,6 +5,21 @@ features that are not supported by GNU coreutils. We take care not to introduce
features that are incompatible with the GNU coreutils. Below is a list of uutils
extensions.
## General
GNU coreutils provides two ways to define short options taking an argument:
```
$ ls -w 80
$ ls -w80
```
We support a third way:
```
$ ls -w=80
```
## `env`
`env` has an additional `-f`/`--file` flag that can parse `.env` files and set
@ -43,3 +58,10 @@ therefore welcomed.
`cut` can separate fields by whitespace (Space and Tab) with `-w` flag. This
feature is adopted from [FreeBSD](https://www.freebsd.org/cgi/man.cgi?cut).
## `fmt`
`fmt` has additional flags for prefixes: `-P/--skip-prefix`, `-x/--exact-prefix`, and
`-X/--exact-skip-prefix`. With `-m/--preserve-headers`, an attempt is made to detect and preserve
mail headers in the input. `-q/--quick` breaks lines more quickly. And `-T/--tab-width` defines the
number of spaces representing a tab when determining the line length.

View file

@ -1,5 +1,9 @@
<!-- markdownlint-disable MD041 -->
{{#include logo.svg}}
<!-- markdownlint-disable MD033 -->
<style>
/* Make the logo a bit bigger and center */
#logo {

View file

@ -11,9 +11,10 @@ You can also [build uutils from source](/build.md).
<!-- toc -->
## Cargo
[![crates.io package](https://repology.org/badge/version-for-repo/crates_io/uutils-coreutils.svg)](https://repology.org/project/uutils-coreutils/versions)
```bash
```shell
# Linux
cargo install coreutils --features unix
# MacOs
@ -23,11 +24,12 @@ cargo install coreutils --features windows
```
## Linux
### Alpine
[![Alpine Linux Edge package](https://repology.org/badge/version-for-repo/alpine_edge/uutils-coreutils.svg)](https://pkgs.alpinelinux.org/packages?name=uutils-coreutils)
```bash
```shell
apk update uutils-coreutils
```
@ -37,7 +39,7 @@ apk update uutils-coreutils
[![Arch package](https://repology.org/badge/version-for-repo/arch/uutils-coreutils.svg)](https://archlinux.org/packages/community/x86_64/uutils-coreutils/)
```bash
```shell
pacman -S uutils-coreutils
```
@ -45,7 +47,7 @@ pacman -S uutils-coreutils
[![Debian package](https://repology.org/badge/version-for-repo/debian_unstable/uutils-coreutils.svg)](https://packages.debian.org/sid/source/rust-coreutils)
```bash
```shell
apt install rust-coreutils
# To use it:
export PATH=/usr/lib/cargo/bin/coreutils:$PATH
@ -57,32 +59,35 @@ export PATH=/usr/lib/cargo/bin/coreutils:$PATH
[![Gentoo package](https://repology.org/badge/version-for-repo/gentoo/uutils-coreutils.svg)](https://packages.gentoo.org/packages/sys-apps/uutils-coreutils)
```bash
```shell
emerge -pv sys-apps/uutils-coreutils
```
### Manjaro
![Manjaro Stable package](https://repology.org/badge/version-for-repo/manjaro_stable/uutils-coreutils.svg)
[![Manjaro Testing package](https://repology.org/badge/version-for-repo/manjaro_testing/uutils-coreutils.svg)](https://repology.org/project/uutils-coreutils/versions)
[![Manjaro Unstable package](https://repology.org/badge/version-for-repo/manjaro_unstable/uutils-coreutils.svg)](https://repology.org/project/uutils-coreutils/versions)
```bash
```shell
pacman -S uutils-coreutils
# or
pamac install uutils-coreutils
```
### NixOS
[![nixpkgs unstable package](https://repology.org/badge/version-for-repo/nix_unstable/uutils-coreutils.svg)](https://repology.org/project/uutils-coreutils/versions)
```bash
```shell
nix-env -iA nixos.uutils-coreutils
```
### OpenMandriva Lx
[![openmandriva cooker package](https://repology.org/badge/version-for-repo/openmandriva_cooker/uutils-coreutils.svg)](https://repology.org/project/uutils-coreutils/versions)
```bash
```shell
dnf install uutils-coreutils
```
@ -90,7 +95,7 @@ dnf install uutils-coreutils
[![Ubuntu package](https://repology.org/badge/version-for-repo/ubuntu_23_04/uutils-coreutils.svg)](https://packages.ubuntu.com/source/lunar/rust-coreutils)
```bash
```shell
apt install rust-coreutils
# To use it:
export PATH=/usr/lib/cargo/bin/coreutils:$PATH
@ -101,13 +106,15 @@ export PATH=/usr/lib/cargo/bin/coreutils:$PATH
## MacOS
### Homebrew
[![Homebrew package](https://repology.org/badge/version-for-repo/homebrew/uutils-coreutils.svg)](https://formulae.brew.sh/formula/uutils-coreutils)
```bash
```shell
brew install uutils-coreutils
```
### MacPorts
[![MacPorts package](https://repology.org/badge/version-for-repo/macports/uutils-coreutils.svg)](https://ports.macports.org/port/coreutils-uutils/)
```
@ -115,18 +122,20 @@ port install coreutils-uutils
```
## FreeBSD
[![FreeBSD port](https://repology.org/badge/version-for-repo/freebsd/uutils-coreutils.svg)](https://repology.org/project/uutils-coreutils/versions)
```sh
pkg install uutils
pkg install rust-coreutils
```
## Windows
### Scoop
[![Scoop package](https://repology.org/badge/version-for-repo/scoop/uutils-coreutils.svg)](https://scoop.sh/#/apps?q=uutils-coreutils&s=0&d=1&o=true)
```bash
```shell
scoop install uutils-coreutils
```
@ -136,4 +145,6 @@ scoop install uutils-coreutils
[![AUR package](https://repology.org/badge/version-for-repo/aur/coreutils-hybrid.svg)](https://aur.archlinux.org/packages/coreutils-hybrid)
A GNU coreutils / uutils coreutils hybrid package. Uses stable uutils programs mixed with GNU counterparts if uutils counterpart is unfinished or buggy.
A GNU coreutils / uutils coreutils hybrid package. Uses stable uutils
programs mixed with GNU counterparts if uutils counterpart is
unfinished or buggy.

View file

@ -1,4 +1,5 @@
# Multi-call binary
# Multi-call binary
uutils includes a multi-call binary from which the utils can be invoked. This
reduces the binary size of the binary and can be useful for portability.
@ -12,6 +13,7 @@ coreutils [util] [util options]
The `--help` flag will print a list of available utils.
## Example
```
```shell
coreutils ls -l
```
```

86
docs/src/packaging.md Normal file
View file

@ -0,0 +1,86 @@
# Packaging coreutils
<!-- spell-checker:ignore debuginfo manpages backtraces -->
> **Note**: This page is intended as a guide for packaging the uutils coreutils
> for package maintainers. Normal users probably do not need to read this. If you
> just want to install the coreutils, look at the
> [installation](installation.md) instructions.
The maintainers of this project do not have the capacity to maintain packages
for every distribution and package manager out there. Therefore, we encourage
other people to package the uutils coreutils for their preferred distributions.
You do not need to ask permission for this and you can do this however you want
as long as you comply with the license. However, we do like to hear and
advertise where the uutils coreutils are available, so please do let us know!
## License
The uutils coreutils are licensed under the MIT license. See the
[LICENSE](https://github.com/uutils/coreutils/blob/main/LICENSE) for the full
license text. Make sure to add attribution and the license text to the package
to comply with the license.
## Package
We recommend to name the package `uutils-coreutils`. Just `uutils` is incorrect,
because that is the name of the organization, which also includes other
projects.
## Selecting the utils to include
Not all utils are available on all platforms. To get the full set of utils for a
particular platform, you must enable the feature flag with the platform name.
For example, on Unix-like system, use `--features unix` and `--features windows`
on Windows.
For a more fine-grained selection, you can enable just the features with the
name of the utils you want to include and disable the default feature set.
Additionally, support for SELinux must explicitly enabled with the
`feat_selinux` feature.
We recommend including all the utilities that a platform supports.
## Compilation parameters
There are several compile-time flags that allow you to tune the coreutils to
your particular needs. Some distributions, for example, might choose to
minimize the binary size as much as possible.
This can be achieved by customizing the configuration passed to cargo. You can
view the full documentation in the
[cargo documentation](https://doc.rust-lang.org/cargo/reference/profiles.html).
We provide three release profiles out of the box, though you may want to tweak
them:
- `release`: This is the standard Rust release profile, but with link-time
optimization enabled. It is a balance between compile time, performance and a
reasonable amount of debug info. The main drawback of this profile is that the
binary is quite large (roughly 2x the GNU coreutils).
- `release-fast`: Every setting is tuned for the best performance, at the cost
of compile time. This binary is still quite large.
- `release-small`: Generates the smallest binary possible. This strips _all_
debug info from the binary and leads to worse backtraces. The performance of
this profile is also really good as it is close to the `release-fast` profile,
but with all debuginfo stripped.
For the precise definition of these profiles, you can look at the root
[`Cargo.toml`](https://github.com/uutils/coreutils/blob/main/Cargo.toml).
The profiles above are just examples. We encourage package maintainers to decide
for themselves what the best parameters for their distribution are. For example,
a distribution focused on embedded systems would probably choose
`release-small`, but another distribution focused on security might enable
bounds checks.
It is also possible to split the debuginfo into a separate package. See the
[`split-debuginfo`](https://doc.rust-lang.org/cargo/reference/profiles.html#split-debuginfo)
option in `cargo`.
## Additional artifacts
This project supports automatically generating manpages and shell completion
files which you may want to include in the package. See the page on
[building from source](build.md) for how to generate these.

View file

@ -1,5 +1,7 @@
# GNU Test Coverage
<!-- markdownlint-disable MD033 -->
uutils is actively tested against the GNU coreutils test suite. The results
below are automatically updated every day.

3
fuzz/.gitignore vendored Normal file
View file

@ -0,0 +1,3 @@
target
corpus
artifacts

45
fuzz/Cargo.toml Normal file
View file

@ -0,0 +1,45 @@
[package]
name = "uucore-fuzz"
version = "0.0.0"
publish = false
edition = "2021"
[package.metadata]
cargo-fuzz = true
[dependencies]
libfuzzer-sys = "0.4"
[dependencies.uucore]
path = "../src/uucore/"
[dependencies.uu_date]
path = "../src/uu/date/"
# Prevent this from interfering with workspaces
[workspace]
members = ["."]
[[bin]]
name = "fuzz_date"
path = "fuzz_targets/fuzz_date.rs"
test = false
doc = false
[[bin]]
name = "fuzz_parse_glob"
path = "fuzz_targets/fuzz_parse_glob.rs"
test = false
doc = false
[[bin]]
name = "fuzz_parse_size"
path = "fuzz_targets/fuzz_parse_size.rs"
test = false
doc = false
[[bin]]
name = "fuzz_parse_time"
path = "fuzz_targets/fuzz_parse_time.rs"
test = false
doc = false

View file

@ -0,0 +1,14 @@
#![no_main]
use libfuzzer_sys::fuzz_target;
use std::ffi::OsString;
use uu_date::uumain;
fuzz_target!(|data: &[u8]| {
let delim: u8 = 0; // Null byte
let args = data
.split(|b| *b == delim)
.filter_map(|e| std::str::from_utf8(e).ok())
.map(|e| OsString::from(e));
uumain(args);
});

View file

@ -0,0 +1,10 @@
#![no_main]
use libfuzzer_sys::fuzz_target;
use uucore::parse_glob;
fuzz_target!(|data: &[u8]| {
if let Ok(s) = std::str::from_utf8(data) {
_ = parse_glob::from_str(s)
}
});

View file

@ -0,0 +1,10 @@
#![no_main]
use libfuzzer_sys::fuzz_target;
use uucore::parse_size::parse_size;
fuzz_target!(|data: &[u8]| {
if let Ok(s) = std::str::from_utf8(data) {
_ = parse_size(s);
}
});

View file

@ -0,0 +1,10 @@
#![no_main]
use libfuzzer_sys::fuzz_target;
use uucore::parse_time;
fuzz_target!(|data: &[u8]| {
if let Ok(s) = std::str::from_utf8(data) {
_ = parse_time::from_str(s);
}
});

View file

@ -5,6 +5,8 @@
// For the full copyright and license information, please view the LICENSE
// file that was distributed with this source code.
// spell-checker:ignore manpages mangen
use clap::{Arg, Command};
use clap_complete::Shell;
use std::cmp;
@ -41,10 +43,11 @@ fn binary_path(args: &mut impl Iterator<Item = OsString>) -> PathBuf {
}
}
fn name(binary_path: &Path) -> &str {
binary_path.file_stem().unwrap().to_str().unwrap()
fn name(binary_path: &Path) -> Option<&str> {
binary_path.file_stem()?.to_str()
}
#[allow(clippy::cognitive_complexity)]
fn main() {
uucore::panic::mute_sigpipe_panic();
@ -52,7 +55,10 @@ fn main() {
let mut args = uucore::args_os();
let binary = binary_path(&mut args);
let binary_as_util = name(&binary);
let binary_as_util = name(&binary).unwrap_or_else(|| {
usage(&utils, "<unknown binary name>");
process::exit(0);
});
// binary name equals util name?
if let Some(&(uumain, _)) = utils.get(binary_as_util) {
@ -90,6 +96,10 @@ fn main() {
gen_completions(args, &utils);
}
if util == "manpage" {
gen_manpage(args, &utils);
}
match utils.get(util) {
Some(&(uumain, _)) => {
process::exit(uumain((vec![util_os].into_iter()).chain(args)));
@ -167,6 +177,39 @@ fn gen_completions<T: uucore::Args>(
process::exit(0);
}
/// Generate the manpage for the utility in the first parameter
fn gen_manpage<T: uucore::Args>(
args: impl Iterator<Item = OsString>,
util_map: &UtilityMap<T>,
) -> ! {
let all_utilities: Vec<_> = std::iter::once("coreutils")
.chain(util_map.keys().copied())
.collect();
let matches = Command::new("manpage")
.about("Prints manpage to stdout")
.arg(
Arg::new("utility")
.value_parser(clap::builder::PossibleValuesParser::new(all_utilities))
.required(true),
)
.get_matches_from(std::iter::once(OsString::from("manpage")).chain(args));
let utility = matches.get_one::<String>("utility").unwrap();
let command = if utility == "coreutils" {
gen_coreutils_app(util_map)
} else {
util_map.get(utility).unwrap().1()
};
let man = clap_mangen::Man::new(command);
man.render(&mut io::stdout())
.expect("Man page generation failed");
io::stdout().flush().unwrap();
process::exit(0);
}
fn gen_coreutils_app<T: uucore::Args>(util_map: &UtilityMap<T>) -> Command {
let mut command = Command::new("coreutils");
for (_, (_, sub_app)) in util_map {

View file

@ -2,7 +2,7 @@
//
// For the full copyright and license information, please view the LICENSE
// file that was distributed with this source code.
// spell-checker:ignore tldr
// spell-checker:ignore tldr uuhelp
use clap::Command;
use std::collections::HashMap;
@ -133,7 +133,7 @@ impl<'a, 'b> MDWriter<'a, 'b> {
write!(self.w, "# {}\n\n", self.name)?;
self.additional()?;
self.usage()?;
self.description()?;
self.about()?;
self.options()?;
self.after_help()?;
self.examples()
@ -177,54 +177,34 @@ impl<'a, 'b> MDWriter<'a, 'b> {
}
fn usage(&mut self) -> io::Result<()> {
writeln!(self.w, "\n```")?;
let mut usage: String = self
.command
.render_usage()
.to_string()
.lines()
.map(|l| l.strip_prefix("Usage:").unwrap_or(l))
.map(|l| l.trim())
.filter(|l| !l.is_empty())
.collect::<Vec<_>>()
.join("\n");
usage = usage
.to_string()
.replace(uucore::execution_phrase(), self.name);
writeln!(self.w, "{}", usage)?;
writeln!(self.w, "```")
if let Some(markdown) = &self.markdown {
let usage = uuhelp_parser::parse_usage(markdown);
let usage = usage.replace("{}", self.name);
writeln!(self.w, "\n```")?;
writeln!(self.w, "{}", usage)?;
writeln!(self.w, "```")
} else {
Ok(())
}
}
fn description(&mut self) -> io::Result<()> {
if let Some(after_help) = self.markdown_section("about") {
return writeln!(self.w, "\n\n{}", after_help);
}
if let Some(about) = self
.command
.get_long_about()
.or_else(|| self.command.get_about())
{
writeln!(self.w, "{}", about)
fn about(&mut self) -> io::Result<()> {
if let Some(markdown) = &self.markdown {
writeln!(self.w, "{}", uuhelp_parser::parse_about(markdown))
} else {
Ok(())
}
}
fn after_help(&mut self) -> io::Result<()> {
if let Some(after_help) = self.markdown_section("after help") {
return writeln!(self.w, "\n\n{}", after_help);
if let Some(markdown) = &self.markdown {
if let Some(after_help) = uuhelp_parser::parse_section("after help", markdown) {
return writeln!(self.w, "\n\n{after_help}");
}
}
if let Some(after_help) = self
.command
.get_after_long_help()
.or_else(|| self.command.get_after_help())
{
writeln!(self.w, "\n\n{}", after_help)
} else {
Ok(())
}
Ok(())
}
fn examples(&mut self) -> io::Result<()> {
@ -236,6 +216,10 @@ impl<'a, 'b> MDWriter<'a, 'b> {
} else if let Some(f) = get_zip_content(zip, &format!("pages/linux/{}.md", self.name)) {
f
} else {
println!(
"Warning: Could not find tldr examples for page '{}'",
self.name
);
return Ok(());
};
@ -274,10 +258,10 @@ impl<'a, 'b> MDWriter<'a, 'b> {
write!(self.w, "<dt>")?;
let mut first = true;
for l in arg.get_long_and_visible_aliases().unwrap_or_default() {
if !first {
write!(self.w, ", ")?;
} else {
if first {
first = false;
} else {
write!(self.w, ", ")?;
}
write!(self.w, "<code>")?;
write!(self.w, "--{}", l)?;
@ -295,10 +279,10 @@ impl<'a, 'b> MDWriter<'a, 'b> {
write!(self.w, "</code>")?;
}
for s in arg.get_short_and_visible_aliases().unwrap_or_default() {
if !first {
write!(self.w, ", ")?;
} else {
if first {
first = false;
} else {
write!(self.w, ", ")?;
}
write!(self.w, "<code>")?;
write!(self.w, "-{}", s)?;
@ -327,32 +311,6 @@ impl<'a, 'b> MDWriter<'a, 'b> {
}
writeln!(self.w, "</dl>\n")
}
fn markdown_section(&self, section: &str) -> Option<String> {
let md = self.markdown.as_ref()?;
let section = section.to_lowercase();
fn is_section_header(line: &str, section: &str) -> bool {
line.strip_prefix("##")
.map_or(false, |l| l.trim().to_lowercase() == section)
}
let result = md
.lines()
.skip_while(|&l| !is_section_header(l, &section))
.skip(1)
.take_while(|l| !l.starts_with("##"))
.collect::<Vec<_>>()
.join("\n")
.trim()
.to_string();
if !result.is_empty() {
Some(result)
} else {
None
}
}
}
fn get_zip_content(archive: &mut ZipArchive<impl Read + Seek>, name: &str) -> Option<String> {

View file

@ -1,6 +1,6 @@
[package]
name = "uu_arch"
version = "0.0.17"
version = "0.0.19"
authors = ["uutils developers"]
license = "MIT"
description = "arch ~ (uutils) display machine architecture"
@ -15,9 +15,9 @@ edition = "2021"
path = "src/arch.rs"
[dependencies]
platform-info = { workspace=true }
clap = { workspace=true }
uucore = { workspace=true }
platform-info = { workspace = true }
clap = { workspace = true }
uucore = { workspace = true }
[[bin]]
name = "arch"

View file

@ -4,11 +4,8 @@
arch
```
Display machine architecture
## After Help
Determine architecture name for current machine.

View file

@ -9,7 +9,7 @@
use platform_info::*;
use clap::{crate_version, Command};
use uucore::error::{FromIo, UResult};
use uucore::error::{UResult, USimpleError};
use uucore::{help_about, help_section};
static ABOUT: &str = help_about!("arch.md");
@ -19,8 +19,9 @@ static SUMMARY: &str = help_section!("after help", "arch.md");
pub fn uumain(args: impl uucore::Args) -> UResult<()> {
uu_app().try_get_matches_from(args)?;
let uts = PlatformInfo::new().map_err_context(|| "cannot get system name".to_string())?;
println!("{}", uts.machine().trim());
let uts = PlatformInfo::new().map_err(|_e| USimpleError::new(1, "cannot get system name"))?;
println!("{}", uts.machine().to_string_lossy().trim());
Ok(())
}

View file

@ -1,6 +1,6 @@
[package]
name = "uu_base32"
version = "0.0.17"
version = "0.0.19"
authors = ["uutils developers"]
license = "MIT"
description = "base32 ~ (uutils) decode/encode input (base32-encoding)"
@ -15,8 +15,8 @@ edition = "2021"
path = "src/base32.rs"
[dependencies]
clap = { workspace=true }
uucore = { workspace=true, features = ["encoding"] }
clap = { workspace = true }
uucore = { workspace = true, features = ["encoding"] }
[[bin]]
name = "base32"

View file

@ -7,8 +7,8 @@ base32 [OPTION]... [FILE]
encode/decode data and print to standard output
With no FILE, or when FILE is -, read standard input.
The data are encoded as described for the base32 alphabet in RFC
4648. When decoding, the input may contain newlines in addition
The data are encoded as described for the base32 alphabet in RFC 4648.
When decoding, the input may contain newlines in addition
to the bytes of the formal base32 alphabet. Use --ignore-garbage
to attempt to recover from any other non-alphabet bytes in the
encoded stream.

View file

@ -160,18 +160,7 @@ pub fn handle_input<R: Read>(
data = data.line_wrap(wrap);
}
if !decode {
match data.encode() {
Ok(s) => {
wrap_print(&data, &s);
Ok(())
}
Err(_) => Err(USimpleError::new(
1,
"error: invalid input (length must be multiple of 4 characters)",
)),
}
} else {
if decode {
match data.decode() {
Ok(s) => {
// Silent the warning as we want to the error message
@ -184,5 +173,16 @@ pub fn handle_input<R: Read>(
}
Err(_) => Err(USimpleError::new(1, "error: invalid input")),
}
} else {
match data.encode() {
Ok(s) => {
wrap_print(&data, &s);
Ok(())
}
Err(_) => Err(USimpleError::new(
1,
"error: invalid input (length must be multiple of 4 characters)",
)),
}
}
}

View file

@ -1,6 +1,6 @@
[package]
name = "uu_base64"
version = "0.0.17"
version = "0.0.19"
authors = ["uutils developers"]
license = "MIT"
description = "base64 ~ (uutils) decode/encode input (base64-encoding)"
@ -15,8 +15,8 @@ edition = "2021"
path = "src/base64.rs"
[dependencies]
uucore = { workspace=true, features = ["encoding"] }
uu_base32 = { workspace=true }
uucore = { workspace = true, features = ["encoding"] }
uu_base32 = { workspace = true }
[[bin]]
name = "base64"

View file

@ -7,8 +7,8 @@ base64 [OPTION]... [FILE]
encode/decode data and print to standard output
With no FILE, or when FILE is -, read standard input.
The data are encoded as described for the base64 alphabet in RFC
3548. When decoding, the input may contain newlines in addition
The data are encoded as described for the base64 alphabet in RFC 3548.
When decoding, the input may contain newlines in addition
to the bytes of the formal base64 alphabet. Use --ignore-garbage
to attempt to recover from any other non-alphabet bytes in the
encoded stream.

View file

@ -1,6 +1,6 @@
[package]
name = "uu_basename"
version = "0.0.17"
version = "0.0.19"
authors = ["uutils developers"]
license = "MIT"
description = "basename ~ (uutils) display PATHNAME with leading directory components removed"
@ -15,8 +15,8 @@ edition = "2021"
path = "src/basename.rs"
[dependencies]
clap = { workspace=true }
uucore = { workspace=true }
clap = { workspace = true }
uucore = { workspace = true }
[[bin]]
name = "basename"

View file

@ -0,0 +1,9 @@
# basename
```
basename NAME [SUFFIX]
basename OPTION... NAME...
```
Print NAME with any leading directory components removed
If specified, also remove a trailing SUFFIX

View file

@ -11,13 +11,11 @@ use clap::{crate_version, Arg, ArgAction, Command};
use std::path::{is_separator, PathBuf};
use uucore::display::Quotable;
use uucore::error::{UResult, UUsageError};
use uucore::format_usage;
use uucore::{format_usage, help_about, help_usage};
static ABOUT: &str = r#"Print NAME with any leading directory components removed
If specified, also remove a trailing SUFFIX"#;
static ABOUT: &str = help_about!("basename.md");
const USAGE: &str = "{} NAME [SUFFIX]
{} OPTION... NAME...";
const USAGE: &str = help_usage!("basename.md");
pub mod options {
pub static MULTIPLE: &str = "multiple";

View file

@ -1,6 +1,6 @@
[package]
name = "uu_basenc"
version = "0.0.17"
version = "0.0.19"
authors = ["uutils developers"]
license = "MIT"
description = "basenc ~ (uutils) decode/encode input"
@ -15,9 +15,9 @@ edition = "2021"
path = "src/basenc.rs"
[dependencies]
clap = { workspace=true }
uucore = { workspace=true, features = ["encoding"] }
uu_base32 = { workspace=true }
clap = { workspace = true }
uucore = { workspace = true, features = ["encoding"] }
uu_base32 = { workspace = true }
[[bin]]
name = "basenc"

View file

@ -24,15 +24,33 @@ use uucore::{help_about, help_usage};
const ABOUT: &str = help_about!("basenc.md");
const USAGE: &str = help_usage!("basenc.md");
const ENCODINGS: &[(&str, Format)] = &[
("base64", Format::Base64),
("base64url", Format::Base64Url),
("base32", Format::Base32),
("base32hex", Format::Base32Hex),
("base16", Format::Base16),
("base2lsbf", Format::Base2Lsbf),
("base2msbf", Format::Base2Msbf),
("z85", Format::Z85),
const ENCODINGS: &[(&str, Format, &str)] = &[
("base64", Format::Base64, "same as 'base64' program"),
("base64url", Format::Base64Url, "file- and url-safe base64"),
("base32", Format::Base32, "same as 'base32' program"),
(
"base32hex",
Format::Base32Hex,
"extended hex alphabet base32",
),
("base16", Format::Base16, "hex encoding"),
(
"base2lsbf",
Format::Base2Lsbf,
"bit string with least significant bit (lsb) first",
),
(
"base2msbf",
Format::Base2Msbf,
"bit string with most significant bit (msb) first",
),
(
"z85",
Format::Z85,
"ascii85-like encoding;\n\
when encoding, input length must be a multiple of 4;\n\
when decoding, input length must be a multiple of 5",
),
];
pub fn uu_app() -> Command {
@ -41,6 +59,7 @@ pub fn uu_app() -> Command {
command = command.arg(
Arg::new(encoding.0)
.long(encoding.0)
.help(encoding.2)
.action(ArgAction::SetTrue),
);
}

View file

@ -1,6 +1,6 @@
[package]
name = "uu_cat"
version = "0.0.17"
version = "0.0.19"
authors = ["uutils developers"]
license = "MIT"
description = "cat ~ (uutils) concatenate and display input"
@ -15,13 +15,13 @@ edition = "2021"
path = "src/cat.rs"
[dependencies]
clap = { workspace=true }
clap = { workspace = true }
thiserror = { workspace = true }
is-terminal = { workspace = true }
uucore = { workspace=true, features=["fs", "pipes"] }
uucore = { workspace = true, features = ["fs", "pipes"] }
[target.'cfg(unix)'.dependencies]
nix = { workspace=true }
nix = { workspace = true }
[[bin]]
name = "cat"

View file

@ -456,6 +456,7 @@ fn write_fast<R: FdReadable>(handle: &mut InputHandle<R>) -> CatResult<()> {
/// Outputs file contents to stdout in a line-by-line fashion,
/// propagating any errors that might occur.
#[allow(clippy::cognitive_complexity)]
fn write_lines<R: FdReadable>(
handle: &mut InputHandle<R>,
options: &OutputOptions,

View file

@ -1,6 +1,6 @@
[package]
name = "uu_chcon"
version = "0.0.17"
version = "0.0.19"
authors = ["uutils developers"]
license = "MIT"
description = "chcon ~ (uutils) change file security context"
@ -14,12 +14,12 @@ edition = "2021"
path = "src/chcon.rs"
[dependencies]
clap = { workspace=true }
uucore = { workspace=true, features=["entries", "fs", "perms"] }
selinux = { workspace=true }
clap = { workspace = true }
uucore = { workspace = true, features = ["entries", "fs", "perms"] }
selinux = { workspace = true }
thiserror = { workspace = true }
libc = { workspace=true }
fts-sys = { workspace=true }
libc = { workspace = true }
fts-sys = { workspace = true }
[[bin]]
name = "chcon"

11
src/uu/chcon/chcon.md Normal file
View file

@ -0,0 +1,11 @@
<!-- spell-checker:ignore (vars) RFILE -->
# chcon
```
chcon [OPTION]... CONTEXT FILE...
chcon [OPTION]... [-u USER] [-r ROLE] [-l RANGE] [-t TYPE] FILE...
chcon [OPTION]... --reference=RFILE FILE...
```
Change the SELinux security context of each FILE to CONTEXT.
With --reference, change the security context of each FILE to that of RFILE.

View file

@ -1,13 +1,11 @@
// spell-checker:ignore (vars) RFILE
#![allow(clippy::upper_case_acronyms)]
use clap::builder::ValueParser;
use uucore::error::{UResult, USimpleError, UUsageError};
use uucore::format_usage;
use uucore::{display::Quotable, show_error, show_warning};
use uucore::{display::Quotable, format_usage, help_about, help_usage, show_error, show_warning};
use clap::{Arg, ArgAction, Command};
use clap::{crate_version, Arg, ArgAction, Command};
use selinux::{OpaqueSecurityContext, SecurityContext};
use std::borrow::Cow;
@ -21,13 +19,8 @@ mod fts;
use errors::*;
static VERSION: &str = env!("CARGO_PKG_VERSION");
static ABOUT: &str = "Change the SELinux security context of each FILE to CONTEXT. \n\
With --reference, change the security context of each FILE to that of RFILE.";
const USAGE: &str = "\
{} [OPTION]... CONTEXT FILE... \n \
{} [OPTION]... [-u USER] [-r ROLE] [-l RANGE] [-t TYPE] FILE... \n \
{} [OPTION]... --reference=RFILE FILE...";
const ABOUT: &str = help_about!("chcon.md");
const USAGE: &str = help_usage!("chcon.md");
pub mod options {
pub static HELP: &str = "help";
@ -152,7 +145,7 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
pub fn uu_app() -> Command {
Command::new(uucore::util_name())
.version(VERSION)
.version(crate_version!())
.about(ABOUT)
.override_usage(format_usage(USAGE))
.infer_long_args(true)

View file

@ -1,6 +1,6 @@
[package]
name = "uu_chgrp"
version = "0.0.17"
version = "0.0.19"
authors = ["uutils developers"]
license = "MIT"
description = "chgrp ~ (uutils) change the group ownership of FILE"
@ -15,8 +15,8 @@ edition = "2021"
path = "src/chgrp.rs"
[dependencies]
clap = { workspace=true }
uucore = { workspace=true, features=["entries", "fs", "perms"] }
clap = { workspace = true }
uucore = { workspace = true, features = ["entries", "fs", "perms"] }
[[bin]]
name = "chgrp"

10
src/uu/chgrp/chgrp.md Normal file
View file

@ -0,0 +1,10 @@
<!-- spell-checker:ignore (vars) RFILE -->
# chgrp
```
chgrp [OPTION]... GROUP FILE...
chgrp [OPTION]... --reference=RFILE FILE...
```
Change the group of each FILE to GROUP.

View file

@ -10,31 +10,33 @@
use uucore::display::Quotable;
pub use uucore::entries;
use uucore::error::{FromIo, UResult, USimpleError};
use uucore::format_usage;
use uucore::perms::{chown_base, options, IfFrom};
use uucore::perms::{chown_base, options, GidUidOwnerFilter, IfFrom};
use uucore::{format_usage, help_about, help_usage};
use clap::{Arg, ArgAction, ArgMatches, Command};
use clap::{crate_version, Arg, ArgAction, ArgMatches, Command};
use std::fs;
use std::os::unix::fs::MetadataExt;
static ABOUT: &str = "Change the group of each FILE to GROUP.";
static VERSION: &str = env!("CARGO_PKG_VERSION");
const ABOUT: &str = help_about!("chgrp.md");
const USAGE: &str = help_usage!("chgrp.md");
const USAGE: &str = "\
{} [OPTION]... GROUP FILE...\n \
{} [OPTION]... --reference=RFILE FILE...";
fn parse_gid_and_uid(matches: &ArgMatches) -> UResult<(Option<u32>, Option<u32>, IfFrom)> {
fn parse_gid_and_uid(matches: &ArgMatches) -> UResult<GidUidOwnerFilter> {
let mut raw_group: String = String::new();
let dest_gid = if let Some(file) = matches.get_one::<String>(options::REFERENCE) {
fs::metadata(file)
.map(|meta| Some(meta.gid()))
.map(|meta| {
let gid = meta.gid();
raw_group = entries::gid2grp(gid).unwrap_or_else(|_| gid.to_string());
Some(gid)
})
.map_err_context(|| format!("failed to get attributes of {}", file.quote()))?
} else {
let group = matches
.get_one::<String>(options::ARG_GROUP)
.map(|s| s.as_str())
.unwrap_or_default();
raw_group = group.to_string();
if group.is_empty() {
None
} else {
@ -49,7 +51,12 @@ fn parse_gid_and_uid(matches: &ArgMatches) -> UResult<(Option<u32>, Option<u32>,
}
}
};
Ok((dest_gid, None, IfFrom::All))
Ok(GidUidOwnerFilter {
dest_gid,
dest_uid: None,
raw_owner: raw_group,
filter: IfFrom::All,
})
}
#[uucore::main]
@ -59,7 +66,7 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
pub fn uu_app() -> Command {
Command::new(uucore::util_name())
.version(VERSION)
.version(crate_version!())
.about(ABOUT)
.override_usage(format_usage(USAGE))
.infer_long_args(true)

View file

@ -1,6 +1,6 @@
[package]
name = "uu_chmod"
version = "0.0.17"
version = "0.0.19"
authors = ["uutils developers"]
license = "MIT"
description = "chmod ~ (uutils) change mode of FILE"
@ -15,9 +15,9 @@ edition = "2021"
path = "src/chmod.rs"
[dependencies]
clap = { workspace=true }
libc = { workspace=true }
uucore = { workspace=true, features=["fs", "mode"] }
clap = { workspace = true }
libc = { workspace = true }
uucore = { workspace = true, features = ["fs", "mode"] }
[[bin]]
name = "chmod"

16
src/uu/chmod/chmod.md Normal file
View file

@ -0,0 +1,16 @@
<!-- spell-checker:ignore RFILE ugoa -->
# chmod
```
chmod [OPTION]... MODE[,MODE]... FILE...
chmod [OPTION]... OCTAL-MODE FILE...
chmod [OPTION]... --reference=RFILE FILE...
```
Change the mode of each FILE to MODE.
With --reference, change the mode of each FILE to that of RFILE.
## After Help
Each MODE is of the form '[ugoa]*([-+=]([rwxXst]*|[ugo]))+|[-+=]?[0-7]+'.

View file

@ -8,6 +8,7 @@
// spell-checker:ignore (ToDO) Chmoder cmode fmode fperm fref ugoa RFILE RFILE's
use clap::{crate_version, Arg, ArgAction, Command};
use std::ffi::OsString;
use std::fs;
use std::os::unix::fs::{MetadataExt, PermissionsExt};
use std::path::Path;
@ -17,16 +18,11 @@ use uucore::fs::display_permissions_unix;
use uucore::libc::mode_t;
#[cfg(not(windows))]
use uucore::mode;
use uucore::{format_usage, show, show_error};
use uucore::{format_usage, help_about, help_section, help_usage, show, show_error};
const ABOUT: &str = "Change the mode of each FILE to MODE.\n\
With --reference, change the mode of each FILE to that of RFILE.";
const USAGE: &str = "\
{} [OPTION]... MODE[,MODE]... FILE...
{} [OPTION]... OCTAL-MODE FILE...
{} [OPTION]... --reference=RFILE FILE...";
const LONG_USAGE: &str =
"Each MODE is of the form '[ugoa]*([-+=]([rwxXst]*|[ugo]))+|[-+=]?[0-7]+'.";
const ABOUT: &str = help_about!("chmod.md");
const USAGE: &str = help_usage!("chmod.md");
const LONG_USAGE: &str = help_section!("after help", "chmod.md");
mod options {
pub const CHANGES: &str = "changes";
@ -40,14 +36,64 @@ mod options {
pub const FILE: &str = "FILE";
}
/// Extract negative modes (starting with '-') from the rest of the arguments.
///
/// This is mainly required for GNU compatibility, where "non-positional negative" modes are used
/// as the actual positional MODE. Some examples of these cases are:
/// * "chmod -w -r file", which is the same as "chmod -w,-r file"
/// * "chmod -w file -r", which is the same as "chmod -w,-r file"
///
/// These can currently not be handled by clap.
/// Therefore it might be possible that a pseudo MODE is inserted to pass clap parsing.
/// The pseudo MODE is later replaced by the extracted (and joined) negative modes.
fn extract_negative_modes(mut args: impl uucore::Args) -> (Option<String>, Vec<OsString>) {
// we look up the args until "--" is found
// "-mode" will be extracted into parsed_cmode_vec
let (parsed_cmode_vec, pre_double_hyphen_args): (Vec<OsString>, Vec<OsString>) =
args.by_ref().take_while(|a| a != "--").partition(|arg| {
let arg = if let Some(arg) = arg.to_str() {
arg.to_string()
} else {
return false;
};
arg.len() >= 2
&& arg.starts_with('-')
&& matches!(
arg.chars().nth(1).unwrap(),
'r' | 'w' | 'x' | 'X' | 's' | 't' | 'u' | 'g' | 'o' | '0'..='7'
)
});
let mut clean_args = Vec::new();
if !parsed_cmode_vec.is_empty() {
// we need a pseudo cmode for clap, which won't be used later.
// this is required because clap needs the default "chmod MODE FILE" scheme.
clean_args.push("w".into());
}
clean_args.extend(pre_double_hyphen_args);
if let Some(arg) = args.next() {
// as there is still something left in the iterator, we previously consumed the "--"
// -> add it to the args again
clean_args.push("--".into());
clean_args.push(arg);
}
clean_args.extend(args);
let parsed_cmode = Some(
parsed_cmode_vec
.iter()
.map(|s| s.to_str().unwrap())
.collect::<Vec<&str>>()
.join(","),
)
.filter(|s| !s.is_empty());
(parsed_cmode, clean_args)
}
#[uucore::main]
pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let mut args = args.collect_lossy();
// Before we can parse 'args' with clap (and previously getopts),
// a possible MODE prefix '-' needs to be removed (e.g. "chmod -x FILE").
let mode_had_minus_prefix = mode::strip_minus_from_mode(&mut args);
let (parsed_cmode, args) = extract_negative_modes(args.skip(1)); // skip binary name
let matches = uu_app().after_help(LONG_USAGE).try_get_matches_from(args)?;
let changes = matches.get_flag(options::CHANGES);
@ -67,13 +113,14 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
},
None => None,
};
let modes = matches.get_one::<String>(options::MODE).unwrap(); // should always be Some because required
let cmode = if mode_had_minus_prefix {
// clap parsing is finished, now put prefix back
format!("-{modes}")
let modes = matches.get_one::<String>(options::MODE);
let cmode = if let Some(parsed_cmode) = parsed_cmode {
parsed_cmode
} else {
modes.to_string()
modes.unwrap().to_string() // modes is required
};
// FIXME: enable non-utf8 paths
let mut files: Vec<String> = matches
.get_many::<String>(options::FILE)
.map(|v| v.map(ToString::to_string).collect())
@ -112,6 +159,7 @@ pub fn uu_app() -> Command {
.override_usage(format_usage(USAGE))
.args_override_self(true)
.infer_long_args(true)
.no_binary_name(true)
.arg(
Arg::new(options::CHANGES)
.long(options::CHANGES)
@ -226,10 +274,10 @@ impl Chmoder {
)
));
}
if !self.recursive {
r = self.chmod_file(file).and(r);
} else {
if self.recursive {
r = self.walk_dir(file);
} else {
r = self.chmod_file(file).and(r);
}
}
r
@ -312,10 +360,10 @@ impl Chmoder {
naively_expected_new_mode = naive_mode;
}
Err(f) => {
if !self.quiet {
return Err(USimpleError::new(1, f));
} else {
if self.quiet {
return Err(ExitCode::new(1));
} else {
return Err(USimpleError::new(1, f));
}
}
}
@ -381,3 +429,34 @@ impl Chmoder {
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_extract_negative_modes() {
// "chmod -w -r file" becomes "chmod -w,-r file". clap does not accept "-w,-r" as MODE.
// Therefore, "w" is added as pseudo mode to pass clap.
let (c, a) = extract_negative_modes(vec!["-w", "-r", "file"].iter().map(OsString::from));
assert_eq!(c, Some("-w,-r".to_string()));
assert_eq!(a, vec!["w", "file"]);
// "chmod -w file -r" becomes "chmod -w,-r file". clap does not accept "-w,-r" as MODE.
// Therefore, "w" is added as pseudo mode to pass clap.
let (c, a) = extract_negative_modes(vec!["-w", "file", "-r"].iter().map(OsString::from));
assert_eq!(c, Some("-w,-r".to_string()));
assert_eq!(a, vec!["w", "file"]);
// "chmod -w -- -r file" becomes "chmod -w -r file", where "-r" is interpreted as file.
// Again, "w" is needed as pseudo mode.
let (c, a) = extract_negative_modes(vec!["-w", "--", "-r", "f"].iter().map(OsString::from));
assert_eq!(c, Some("-w".to_string()));
assert_eq!(a, vec!["w", "--", "-r", "f"]);
// "chmod -- -r file" becomes "chmod -r file".
let (c, a) = extract_negative_modes(vec!["--", "-r", "file"].iter().map(OsString::from));
assert_eq!(c, None);
assert_eq!(a, vec!["--", "-r", "file"]);
}
}

View file

@ -1,6 +1,6 @@
[package]
name = "uu_chown"
version = "0.0.17"
version = "0.0.19"
authors = ["uutils developers"]
license = "MIT"
description = "chown ~ (uutils) change the ownership of FILE"
@ -15,8 +15,8 @@ edition = "2021"
path = "src/chown.rs"
[dependencies]
clap = { workspace=true }
uucore = { workspace=true, features=["entries", "fs", "perms"] }
clap = { workspace = true }
uucore = { workspace = true, features = ["entries", "fs", "perms"] }
[[bin]]
name = "chown"

9
src/uu/chown/chown.md Normal file
View file

@ -0,0 +1,9 @@
<!-- spell-checker:ignore RFILE -->
# chown
```
chown [OPTION]... [OWNER][:[GROUP]] FILE...
chown [OPTION]... --reference=RFILE FILE...
```
Change file owner and group

View file

@ -9,8 +9,8 @@
use uucore::display::Quotable;
pub use uucore::entries::{self, Group, Locate, Passwd};
use uucore::format_usage;
use uucore::perms::{chown_base, options, IfFrom};
use uucore::perms::{chown_base, options, GidUidOwnerFilter, IfFrom};
use uucore::{format_usage, help_about, help_usage};
use uucore::error::{FromIo, UResult, USimpleError};
@ -19,13 +19,11 @@ use clap::{crate_version, Arg, ArgAction, ArgMatches, Command};
use std::fs;
use std::os::unix::fs::MetadataExt;
static ABOUT: &str = "Change file owner and group";
static ABOUT: &str = help_about!("chown.md");
const USAGE: &str = "\
{} [OPTION]... [OWNER][:[GROUP]] FILE...
{} [OPTION]... --reference=RFILE FILE...";
const USAGE: &str = help_usage!("chown.md");
fn parse_gid_uid_and_filter(matches: &ArgMatches) -> UResult<(Option<u32>, Option<u32>, IfFrom)> {
fn parse_gid_uid_and_filter(matches: &ArgMatches) -> UResult<GidUidOwnerFilter> {
let filter = if let Some(spec) = matches.get_one::<String>(options::FROM) {
match parse_spec(spec, ':')? {
(Some(uid), None) => IfFrom::User(uid),
@ -39,17 +37,34 @@ fn parse_gid_uid_and_filter(matches: &ArgMatches) -> UResult<(Option<u32>, Optio
let dest_uid: Option<u32>;
let dest_gid: Option<u32>;
let raw_owner: String;
if let Some(file) = matches.get_one::<String>(options::REFERENCE) {
let meta = fs::metadata(file)
.map_err_context(|| format!("failed to get attributes of {}", file.quote()))?;
dest_gid = Some(meta.gid());
dest_uid = Some(meta.uid());
let gid = meta.gid();
let uid = meta.uid();
dest_gid = Some(gid);
dest_uid = Some(uid);
raw_owner = format!(
"{}:{}",
entries::uid2usr(uid).unwrap_or_else(|_| uid.to_string()),
entries::gid2grp(gid).unwrap_or_else(|_| gid.to_string())
);
} else {
let (u, g) = parse_spec(matches.get_one::<String>(options::ARG_OWNER).unwrap(), ':')?;
raw_owner = matches
.get_one::<String>(options::ARG_OWNER)
.unwrap()
.into();
let (u, g) = parse_spec(&raw_owner, ':')?;
dest_uid = u;
dest_gid = g;
}
Ok((dest_gid, dest_uid, filter))
Ok(GidUidOwnerFilter {
dest_gid,
dest_uid,
raw_owner,
filter,
})
}
#[uucore::main]
@ -200,7 +215,9 @@ fn parse_spec(spec: &str, sep: char) -> UResult<(Option<u32>, Option<u32>)> {
let user = args.next().unwrap_or("");
let group = args.next().unwrap_or("");
let uid = if !user.is_empty() {
let uid = if user.is_empty() {
None
} else {
Some(match Passwd::locate(user) {
Ok(u) => u.uid, // We have been able to get the uid
Err(_) =>
@ -227,10 +244,10 @@ fn parse_spec(spec: &str, sep: char) -> UResult<(Option<u32>, Option<u32>)> {
}
}
})
} else {
None
};
let gid = if !group.is_empty() {
let gid = if group.is_empty() {
None
} else {
Some(match Group::locate(group) {
Ok(g) => g.gid,
Err(_) => match group.parse() {
@ -243,8 +260,6 @@ fn parse_spec(spec: &str, sep: char) -> UResult<(Option<u32>, Option<u32>)> {
}
},
})
} else {
None
};
if user.chars().next().map(char::is_numeric).unwrap_or(false)

View file

@ -1,6 +1,6 @@
[package]
name = "uu_chroot"
version = "0.0.17"
version = "0.0.19"
authors = ["uutils developers"]
license = "MIT"
description = "chroot ~ (uutils) run COMMAND under a new root directory"
@ -15,8 +15,8 @@ edition = "2021"
path = "src/chroot.rs"
[dependencies]
clap = { workspace=true }
uucore = { workspace=true, features=["entries", "fs"] }
clap = { workspace = true }
uucore = { workspace = true, features = ["entries", "fs"] }
[[bin]]
name = "chroot"

8
src/uu/chroot/chroot.md Normal file
View file

@ -0,0 +1,8 @@
<!-- spell-checker:ignore NEWROOT -->
# chroot
```
chroot [OPTION]... NEWROOT [COMMAND [ARG]...]
```
Run COMMAND with root directory set to NEWROOT.

View file

@ -19,10 +19,10 @@ use std::process;
use uucore::error::{set_exit_code, UClapError, UResult, UUsageError};
use uucore::fs::{canonicalize, MissingHandling, ResolveMode};
use uucore::libc::{self, chroot, setgid, setgroups, setuid};
use uucore::{entries, format_usage};
use uucore::{entries, format_usage, help_about, help_usage};
static ABOUT: &str = "Run COMMAND with root directory set to NEWROOT.";
static USAGE: &str = "{} [OPTION]... NEWROOT [COMMAND [ARG]...]";
static ABOUT: &str = help_about!("chroot.md");
static USAGE: &str = help_usage!("chroot.md");
mod options {
pub const NEWROOT: &str = "newroot";

View file

@ -1,6 +1,6 @@
[package]
name = "uu_cksum"
version = "0.0.17"
version = "0.0.19"
authors = ["uutils developers"]
license = "MIT"
description = "cksum ~ (uutils) display CRC and size of input"
@ -15,9 +15,9 @@ edition = "2021"
path = "src/cksum.rs"
[dependencies]
clap = { workspace=true }
uucore = { workspace=true, features=["sum"] }
hex = { workspace=true }
clap = { workspace = true }
uucore = { workspace = true, features = ["sum"] }
hex = { workspace = true }
[[bin]]
name = "cksum"

23
src/uu/cksum/cksum.md Normal file
View file

@ -0,0 +1,23 @@
# cksum
```
cksum [OPTIONS] [FILE]...
```
Print CRC and size for each file
## After Help
DIGEST determines the digest algorithm and default output format:
- `sysv`: (equivalent to sum -s)
- `bsd`: (equivalent to sum -r)
- `crc`: (equivalent to cksum)
- `md5`: (equivalent to md5sum)
- `sha1`: (equivalent to sha1sum)
- `sha224`: (equivalent to sha224sum)
- `sha256`: (equivalent to sha256sum)
- `sha384`: (equivalent to sha384sum)
- `sha512`: (equivalent to sha512sum)
- `blake2b`: (equivalent to b2sum)
- `sm3`: (only available through cksum)

View file

@ -6,7 +6,7 @@
// file that was distributed with this source code.
// spell-checker:ignore (ToDO) fname, algo
use clap::{crate_version, Arg, Command};
use clap::{crate_version, Arg, ArgAction, Command};
use hex::encode;
use std::ffi::OsStr;
use std::fs::File;
@ -15,15 +15,16 @@ use std::iter;
use std::path::Path;
use uucore::{
error::{FromIo, UResult},
format_usage,
format_usage, help_about, help_section, help_usage,
sum::{
div_ceil, Blake2b, Digest, DigestWriter, Md5, Sha1, Sha224, Sha256, Sha384, Sha512, Sm3,
BSD, CRC, SYSV,
},
};
const USAGE: &str = "{} [OPTIONS] [FILE]...";
const ABOUT: &str = "Print CRC and size for each file";
const USAGE: &str = help_usage!("cksum.md");
const ABOUT: &str = help_about!("cksum.md");
const AFTER_HELP: &str = help_section!("after help", "cksum.md");
const ALGORITHM_OPTIONS_SYSV: &str = "sysv";
const ALGORITHM_OPTIONS_BSD: &str = "bsd";
@ -102,6 +103,7 @@ struct Options {
algo_name: &'static str,
digest: Box<dyn Digest + 'static>,
output_bits: usize,
untagged: bool,
}
/// Calculate checksum
@ -158,8 +160,22 @@ where
div_ceil(sz, options.output_bits),
filename.display()
),
(_, true) => println!("{sum} {sz}"),
(_, false) => println!("{sum} {sz} {}", filename.display()),
(ALGORITHM_OPTIONS_CRC, true) => println!("{sum} {sz}"),
(ALGORITHM_OPTIONS_CRC, false) => println!("{sum} {sz} {}", filename.display()),
(ALGORITHM_OPTIONS_BLAKE2B, _) if !options.untagged => {
println!("BLAKE2b ({}) = {sum}", filename.display());
}
_ => {
if options.untagged {
println!("{sum} {}", filename.display());
} else {
println!(
"{} ({}) = {sum}",
options.algo_name.to_ascii_uppercase(),
filename.display()
);
}
}
}
}
@ -201,25 +217,11 @@ fn digest_read<T: Read>(
}
mod options {
pub static FILE: &str = "file";
pub static ALGORITHM: &str = "algorithm";
pub const ALGORITHM: &str = "algorithm";
pub const FILE: &str = "file";
pub const UNTAGGED: &str = "untagged";
}
const ALGORITHM_HELP_DESC: &str =
"DIGEST determines the digest algorithm and default output format:\n\
\n\
-a=sysv: (equivalent to sum -s)\n\
-a=bsd: (equivalent to sum -r)\n\
-a=crc: (equivalent to cksum)\n\
-a=md5: (equivalent to md5sum)\n\
-a=sha1: (equivalent to sha1sum)\n\
-a=sha224: (equivalent to sha224sum)\n\
-a=sha256: (equivalent to sha256sum)\n\
-a=sha384: (equivalent to sha384sum)\n\
-a=sha512: (equivalent to sha512sum)\n\
-a=blake2b: (equivalent to b2sum)\n\
-a=sm3: (only available through cksum)\n";
#[uucore::main]
pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let args = args.collect_ignore();
@ -236,6 +238,7 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
algo_name: name,
digest: algo,
output_bits: bits,
untagged: matches.get_flag(options::UNTAGGED),
};
match matches.get_many::<String>(options::FILE) {
@ -278,5 +281,11 @@ pub fn uu_app() -> Command {
ALGORITHM_OPTIONS_SM3,
]),
)
.after_help(ALGORITHM_HELP_DESC)
.arg(
Arg::new(options::UNTAGGED)
.long(options::UNTAGGED)
.help("create a reversed style checksum, without digest type")
.action(ArgAction::SetTrue),
)
.after_help(AFTER_HELP)
}

View file

@ -1,6 +1,6 @@
[package]
name = "uu_comm"
version = "0.0.17"
version = "0.0.19"
authors = ["uutils developers"]
license = "MIT"
description = "comm ~ (uutils) compare sorted inputs"
@ -15,8 +15,8 @@ edition = "2021"
path = "src/comm.rs"
[dependencies]
clap = { workspace=true }
uucore = { workspace=true }
clap = { workspace = true }
uucore = { workspace = true }
[[bin]]
name = "comm"

View file

@ -8,11 +8,11 @@
// spell-checker:ignore (ToDO) delim mkdelim
use std::cmp::Ordering;
use std::fmt::Display;
use std::fs::File;
use std::io::{self, stdin, BufRead, BufReader, Stdin};
use std::path::Path;
use uucore::error::FromIo;
use uucore::error::UResult;
use uucore::error::{FromIo, UResult};
use uucore::{format_usage, help_about, help_usage};
use clap::{crate_version, Arg, ArgAction, ArgMatches, Command};
@ -29,6 +29,7 @@ mod options {
pub const FILE_1: &str = "FILE1";
pub const FILE_2: &str = "FILE2";
pub const TOTAL: &str = "total";
pub const ZERO_TERMINATED: &str = "zero-terminated";
}
fn column_width(col: &str, opts: &ArgMatches) -> usize {
@ -39,23 +40,66 @@ fn column_width(col: &str, opts: &ArgMatches) -> usize {
}
}
fn ensure_nl(line: &mut String) {
if !line.ends_with('\n') {
line.push('\n');
#[repr(u8)]
#[derive(Clone, Copy)]
enum LineEnding {
Newline = b'\n',
Nul = 0,
}
impl From<LineEnding> for u8 {
fn from(line_ending: LineEnding) -> Self {
line_ending as Self
}
}
enum LineReader {
impl From<bool> for LineEnding {
fn from(is_zero_terminated: bool) -> Self {
if is_zero_terminated {
Self::Nul
} else {
Self::Newline
}
}
}
impl Display for LineEnding {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Newline => writeln!(f),
Self::Nul => write!(f, "\0"),
}
}
}
enum Input {
Stdin(Stdin),
FileIn(BufReader<File>),
}
struct LineReader {
line_ending: LineEnding,
input: Input,
}
impl LineReader {
fn read_line(&mut self, buf: &mut String) -> io::Result<usize> {
match *self {
Self::Stdin(ref mut r) => r.read_line(buf),
Self::FileIn(ref mut r) => r.read_line(buf),
fn new(input: Input, line_ending: LineEnding) -> Self {
Self { input, line_ending }
}
fn read_line(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {
let line_ending = self.line_ending.into();
let result = match &mut self.input {
Input::Stdin(r) => r.lock().read_until(line_ending, buf),
Input::FileIn(r) => r.read_until(line_ending, buf),
};
if !buf.ends_with(&[line_ending]) {
buf.push(line_ending);
}
result
}
}
@ -71,9 +115,9 @@ fn comm(a: &mut LineReader, b: &mut LineReader, opts: &ArgMatches) {
let delim_col_2 = delim.repeat(width_col_1);
let delim_col_3 = delim.repeat(width_col_1 + width_col_2);
let ra = &mut String::new();
let ra = &mut Vec::new();
let mut na = a.read_line(ra);
let rb = &mut String::new();
let rb = &mut Vec::new();
let mut nb = b.read_line(rb);
let mut total_col_1 = 0;
@ -96,8 +140,7 @@ fn comm(a: &mut LineReader, b: &mut LineReader, opts: &ArgMatches) {
match ord {
Ordering::Less => {
if !opts.get_flag(options::COLUMN_1) {
ensure_nl(ra);
print!("{ra}");
print!("{}", String::from_utf8_lossy(ra));
}
ra.clear();
na = a.read_line(ra);
@ -105,8 +148,7 @@ fn comm(a: &mut LineReader, b: &mut LineReader, opts: &ArgMatches) {
}
Ordering::Greater => {
if !opts.get_flag(options::COLUMN_2) {
ensure_nl(rb);
print!("{delim_col_2}{rb}");
print!("{delim_col_2}{}", String::from_utf8_lossy(rb));
}
rb.clear();
nb = b.read_line(rb);
@ -114,8 +156,7 @@ fn comm(a: &mut LineReader, b: &mut LineReader, opts: &ArgMatches) {
}
Ordering::Equal => {
if !opts.get_flag(options::COLUMN_3) {
ensure_nl(ra);
print!("{delim_col_3}{ra}");
print!("{delim_col_3}{}", String::from_utf8_lossy(ra));
}
ra.clear();
rb.clear();
@ -127,17 +168,20 @@ fn comm(a: &mut LineReader, b: &mut LineReader, opts: &ArgMatches) {
}
if opts.get_flag(options::TOTAL) {
println!("{total_col_1}{delim}{total_col_2}{delim}{total_col_3}{delim}total");
let line_ending = LineEnding::from(opts.get_flag(options::ZERO_TERMINATED));
print!("{total_col_1}{delim}{total_col_2}{delim}{total_col_3}{delim}total{line_ending}");
}
}
fn open_file(name: &str) -> io::Result<LineReader> {
match name {
"-" => Ok(LineReader::Stdin(stdin())),
_ => {
let f = File::open(Path::new(name))?;
Ok(LineReader::FileIn(BufReader::new(f)))
}
fn open_file(name: &str, line_ending: LineEnding) -> io::Result<LineReader> {
if name == "-" {
Ok(LineReader::new(Input::Stdin(stdin()), line_ending))
} else {
let f = File::open(Path::new(name))?;
Ok(LineReader::new(
Input::FileIn(BufReader::new(f)),
line_ending,
))
}
}
@ -146,10 +190,11 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let args = args.collect_lossy();
let matches = uu_app().try_get_matches_from(args)?;
let line_ending = LineEnding::from(matches.get_flag(options::ZERO_TERMINATED));
let filename1 = matches.get_one::<String>(options::FILE_1).unwrap();
let filename2 = matches.get_one::<String>(options::FILE_2).unwrap();
let mut f1 = open_file(filename1).map_err_context(|| filename1.to_string())?;
let mut f2 = open_file(filename2).map_err_context(|| filename2.to_string())?;
let mut f1 = open_file(filename1, line_ending).map_err_context(|| filename1.to_string())?;
let mut f2 = open_file(filename2, line_ending).map_err_context(|| filename2.to_string())?;
comm(&mut f1, &mut f2, &matches);
Ok(())
@ -187,6 +232,14 @@ pub fn uu_app() -> Command {
.default_value(options::DELIMITER_DEFAULT)
.hide_default_value(true),
)
.arg(
Arg::new(options::ZERO_TERMINATED)
.long(options::ZERO_TERMINATED)
.short('z')
.overrides_with(options::ZERO_TERMINATED)
.help("line delimiter is NUL, not newline")
.action(ArgAction::SetTrue),
)
.arg(
Arg::new(options::FILE_1)
.required(true)

View file

@ -1,10 +1,10 @@
[package]
name = "uu_cp"
version = "0.0.17"
version = "0.0.19"
authors = [
"Jordy Dickinson <jordy.dickinson@gmail.com>",
"Joshua S. Miller <jsmiller@uchicago.edu>",
"uutils developers",
"Jordy Dickinson <jordy.dickinson@gmail.com>",
"Joshua S. Miller <jsmiller@uchicago.edu>",
"uutils developers",
]
license = "MIT"
description = "cp ~ (uutils) copy SOURCE to DESTINATION"
@ -19,18 +19,18 @@ edition = "2021"
path = "src/cp.rs"
[dependencies]
clap = { workspace=true }
filetime = { workspace=true }
libc = { workspace=true }
quick-error = { workspace=true }
selinux = { workspace=true, optional=true }
uucore = { workspace=true, features=["entries", "fs", "perms", "mode"] }
walkdir = { workspace=true }
indicatif = { workspace=true }
clap = { workspace = true }
filetime = { workspace = true }
libc = { workspace = true }
quick-error = { workspace = true }
selinux = { workspace = true, optional = true }
uucore = { workspace = true, features = ["entries", "fs", "perms", "mode"] }
walkdir = { workspace = true }
indicatif = { workspace = true }
[target.'cfg(unix)'.dependencies]
xattr = { workspace=true }
exacl = { workspace=true, optional=true }
xattr = { workspace = true }
exacl = { workspace = true, optional = true }
[[bin]]
name = "cp"

View file

@ -1,18 +1,18 @@
<!-- markdownlint-disable first-line-heading -->
<!-- spell-checker:ignore (markdown) markdownlint -->
## Feature list
# Feature list
<!-- spell-checker:ignore (options) linkgs reflink -->
### To Do
## To Do
- [ ] cli-symbolic-links
- [ ] context
- [ ] copy-contents
- [ ] sparse
### Completed
## Completed
- [x] archive
- [x] attributes-only

View file

@ -7,3 +7,19 @@ cp [OPTION]... -t DIRECTORY SOURCE...
```
Copy SOURCE to DEST, or multiple SOURCE(s) to DIRECTORY.
## After Help
Do not copy a non-directory that has an existing destination with the same or newer modification timestamp;
instead, silently skip the file without failing. If timestamps are being preserved, the comparison is to the
source timestamp truncated to the resolutions of the destination file system and of the system calls used to
update timestamps; this avoids duplicate work if several `cp -pu` commands are executed with the same source
and destination. This option is ignored if the `-n` or `--no-clobber` option is also specified. Also, if
`--preserve=links` is also specified (like with `cp -au` for example), that will take precedence; consequently,
depending on the order that files are processed from the source, newer files in the destination may be replaced,
to mirror hard links in the source. which gives more control over which existing files in the destination are
replaced, and its value can be one of the following:
* `all` This is the default operation when an `--update` option is not specified, and results in all existing files in the destination being replaced.
* `none` This is similar to the `--no-clobber` option, in that no files in the destination are replaced, but also skipping a file does not induce a failure.
* `older` This is the default operation when `--update` is specified, and results in files being replaced if theyre older than the corresponding source file.

View file

@ -404,8 +404,18 @@ pub(crate) fn copy_directory(
Err(e) => show_error!("{}", e),
}
}
// Copy the attributes from the root directory to the target directory.
copy_attributes(root, target, &options.attributes)?;
if options.parents {
let dest = target.join(root.file_name().unwrap());
copy_attributes(root, dest.as_path(), &options.attributes)?;
for (x, y) in aligned_ancestors(root, dest.as_path()) {
copy_attributes(x, y, &options.attributes)?;
}
} else {
copy_attributes(root, target, &options.attributes)?;
}
Ok(())
}

View file

@ -26,7 +26,7 @@ use std::os::unix::fs::{FileTypeExt, PermissionsExt};
use std::path::{Path, PathBuf, StripPrefixError};
use std::string::ToString;
use clap::{crate_version, Arg, ArgAction, ArgMatches, Command};
use clap::{builder::ValueParser, crate_version, Arg, ArgAction, ArgMatches, Command};
use filetime::FileTime;
use indicatif::{ProgressBar, ProgressStyle};
#[cfg(unix)]
@ -38,9 +38,14 @@ use uucore::backup_control::{self, BackupMode};
use uucore::display::Quotable;
use uucore::error::{set_exit_code, UClapError, UError, UResult, UUsageError};
use uucore::fs::{
canonicalize, paths_refer_to_same_file, FileInformation, MissingHandling, ResolveMode,
canonicalize, is_symlink_loop, paths_refer_to_same_file, FileInformation, MissingHandling,
ResolveMode,
};
use uucore::update_control::{self, UpdateMode};
use uucore::{
crash, format_usage, help_about, help_section, help_usage, prompt_yes, show_error,
show_warning, util_name,
};
use uucore::{crash, format_usage, help_about, help_usage, prompt_yes, show_error, show_warning};
use crate::copydir::copy_directory;
@ -78,6 +83,7 @@ quick_error! {
StripPrefixError(err: StripPrefixError) { from() }
/// Result of a skipped file
/// Currently happens when "no" is selected in interactive mode
Skipped { }
/// Result of a skipped file
@ -91,7 +97,7 @@ quick_error! {
/// Invalid arguments to backup
Backup(description: String) { display("{}\nTry '{} --help' for more information.", description, uucore::execution_phrase()) }
NotADirectory(path: String) { display("'{}' is not a directory", path) }
NotADirectory(path: PathBuf) { display("'{}' is not a directory", path.display()) }
}
}
@ -222,14 +228,84 @@ pub struct Options {
attributes: Attributes,
recursive: bool,
backup_suffix: String,
target_dir: Option<String>,
update: bool,
target_dir: Option<PathBuf>,
update: UpdateMode,
debug: bool,
verbose: bool,
progress_bar: bool,
}
/// Enum representing various debug states of the offload and reflink actions.
#[derive(Debug)]
#[allow(dead_code)] // All of them are used on Linux
enum OffloadReflinkDebug {
Unknown,
No,
Yes,
Avoided,
Unsupported,
}
/// Enum representing various debug states of the sparse detection.
#[derive(Debug)]
#[allow(dead_code)] // silent for now until we use them
enum SparseDebug {
Unknown,
No,
Zeros,
SeekHole,
SeekHoleZeros,
Unsupported,
}
/// Struct that contains the debug state for each action in a file copy operation.
#[derive(Debug)]
struct CopyDebug {
offload: OffloadReflinkDebug,
reflink: OffloadReflinkDebug,
sparse_detection: SparseDebug,
}
impl OffloadReflinkDebug {
fn to_string(&self) -> &'static str {
match self {
Self::No => "no",
Self::Yes => "yes",
Self::Avoided => "avoided",
Self::Unsupported => "unsupported",
Self::Unknown => "unknown",
}
}
}
impl SparseDebug {
fn to_string(&self) -> &'static str {
match self {
Self::No => "no",
Self::Zeros => "zeros",
Self::SeekHole => "SEEK_HOLE",
Self::SeekHoleZeros => "SEEK_HOLE + zeros",
Self::Unsupported => "unsupported",
Self::Unknown => "unknown",
}
}
}
/// This function prints the debug information of a file copy operation if
/// no hard link or symbolic link is required, and data copy is required.
/// It prints the debug information of the offload, reflink, and sparse detection actions.
fn show_debug(copy_debug: &CopyDebug) {
println!(
"copy offload: {}, reflink: {}, sparse detection: {}",
copy_debug.offload.to_string(),
copy_debug.reflink.to_string(),
copy_debug.sparse_detection.to_string(),
);
}
const ABOUT: &str = help_about!("cp.md");
const USAGE: &str = help_usage!("cp.md");
const AFTER_HELP: &str = help_section!("after help", "cp.md");
static EXIT_ERR: i32 = 1;
@ -263,7 +339,7 @@ mod options {
pub const STRIP_TRAILING_SLASHES: &str = "strip-trailing-slashes";
pub const SYMBOLIC_LINK: &str = "symbolic-link";
pub const TARGET_DIRECTORY: &str = "target-directory";
pub const UPDATE: &str = "update";
pub const DEBUG: &str = "debug";
pub const VERBOSE: &str = "verbose";
}
@ -273,14 +349,22 @@ static PRESERVABLE_ATTRIBUTES: &[&str] = &[
"ownership",
"timestamps",
"context",
"link",
"links",
"xattr",
"all",
];
#[cfg(not(unix))]
static PRESERVABLE_ATTRIBUTES: &[&str] =
&["mode", "timestamps", "context", "links", "xattr", "all"];
static PRESERVABLE_ATTRIBUTES: &[&str] = &[
"mode",
"timestamps",
"context",
"link",
"links",
"xattr",
"all",
];
pub fn uu_app() -> Command {
const MODE_ARGS: &[&str] = &[
@ -294,6 +378,10 @@ pub fn uu_app() -> Command {
.version(crate_version!())
.about(ABOUT)
.override_usage(format_usage(USAGE))
.after_help(format!(
"{AFTER_HELP}\n\n{}",
backup_control::BACKUP_CONTROL_LONG_HELP
))
.infer_long_args(true)
.arg(
Arg::new(options::TARGET_DIRECTORY)
@ -302,6 +390,7 @@ pub fn uu_app() -> Command {
.long(options::TARGET_DIRECTORY)
.value_name(options::TARGET_DIRECTORY)
.value_hint(clap::ValueHint::DirPath)
.value_parser(ValueParser::path_buf())
.help("copy all SOURCE arguments into target-directory"),
)
.arg(
@ -351,6 +440,12 @@ pub fn uu_app() -> Command {
.help("remove any trailing slashes from each SOURCE argument")
.action(ArgAction::SetTrue),
)
.arg(
Arg::new(options::DEBUG)
.long(options::DEBUG)
.help("explain how a file is copied. Implies -v")
.action(ArgAction::SetTrue),
)
.arg(
Arg::new(options::VERBOSE)
.short('v')
@ -391,16 +486,8 @@ pub fn uu_app() -> Command {
.arg(backup_control::arguments::backup())
.arg(backup_control::arguments::backup_no_args())
.arg(backup_control::arguments::suffix())
.arg(
Arg::new(options::UPDATE)
.short('u')
.long(options::UPDATE)
.help(
"copy only when the SOURCE file is newer than the destination file \
or when the destination file is missing",
)
.action(ArgAction::SetTrue),
)
.arg(update_control::arguments::update())
.arg(update_control::arguments::update_no_args())
.arg(
Arg::new(options::REFLINK)
.long(options::REFLINK)
@ -428,6 +515,7 @@ pub fn uu_app() -> Command {
PRESERVABLE_ATTRIBUTES,
))
.num_args(0..)
.require_equals(true)
.value_name("ATTR_LIST")
.overrides_with_all([
options::ARCHIVE,
@ -555,19 +643,18 @@ pub fn uu_app() -> Command {
.arg(
Arg::new(options::PATHS)
.action(ArgAction::Append)
.value_hint(clap::ValueHint::AnyPath),
.value_hint(clap::ValueHint::AnyPath)
.value_parser(ValueParser::path_buf()),
)
}
#[uucore::main]
pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let matches = uu_app()
.after_help(backup_control::BACKUP_CONTROL_LONG_HELP)
.try_get_matches_from(args);
let matches = uu_app().try_get_matches_from(args);
// The error is parsed here because we do not want version or help being printed to stderr.
if let Err(e) = matches {
let mut app = uu_app().after_help(backup_control::BACKUP_CONTROL_LONG_HELP);
let mut app = uu_app();
match e.kind() {
clap::error::ErrorKind::DisplayHelp => {
@ -576,7 +663,7 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
clap::error::ErrorKind::DisplayVersion => print!("{}", app.render_version()),
_ => return Err(Box::new(e.with_exit_code(1))),
};
} else if let Ok(matches) = matches {
} else if let Ok(mut matches) = matches {
let options = Options::from_matches(&matches)?;
if options.overwrite == OverwriteMode::NoClobber && options.backup != BackupMode::NoBackup {
@ -586,12 +673,12 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
));
}
let paths: Vec<String> = matches
.get_many::<String>(options::PATHS)
.map(|v| v.map(ToString::to_string).collect())
let paths: Vec<PathBuf> = matches
.remove_many::<PathBuf>(options::PATHS)
.map(|v| v.collect())
.unwrap_or_default();
let (sources, target) = parse_path_args(&paths, &options)?;
let (sources, target) = parse_path_args(paths, &options)?;
if let Err(error) = copy(&sources, &target, &options) {
match error {
@ -638,7 +725,11 @@ impl CopyMode {
Self::Link
} else if matches.get_flag(options::SYMBOLIC_LINK) {
Self::SymLink
} else if matches.get_flag(options::UPDATE) {
} else if matches
.get_one::<String>(update_control::arguments::OPT_UPDATE)
.is_some()
|| matches.get_flag(update_control::arguments::OPT_UPDATE_NO_ARG)
{
Self::Update
} else if matches.get_flag(options::ATTRIBUTES_ONLY) {
Self::AttrOnly
@ -708,7 +799,7 @@ impl Attributes {
"ownership" => self.ownership = preserve_yes_required,
"timestamps" => self.timestamps = preserve_yes_required,
"context" => self.context = preserve_yes_required,
"links" => self.links = preserve_yes_required,
"link" | "links" => self.links = preserve_yes_required,
"xattr" => self.xattr = preserve_yes_required,
_ => {
return Err(Error::InvalidArgument(format!(
@ -722,6 +813,7 @@ impl Attributes {
}
impl Options {
#[allow(clippy::cognitive_complexity)]
fn from_matches(matches: &ArgMatches) -> CopyResult<Self> {
let not_implemented_opts = vec![
#[cfg(not(any(windows, unix)))]
@ -746,6 +838,7 @@ impl Options {
Err(e) => return Err(Error::Backup(format!("{e}"))),
Ok(mode) => mode,
};
let update_mode = update_control::determine_update_mode(matches);
let backup_suffix = backup_control::determine_backup_suffix(matches);
@ -754,11 +847,11 @@ impl Options {
// Parse target directory options
let no_target_dir = matches.get_flag(options::NO_TARGET_DIRECTORY);
let target_dir = matches
.get_one::<String>(options::TARGET_DIRECTORY)
.map(ToString::to_string);
.get_one::<PathBuf>(options::TARGET_DIRECTORY)
.cloned();
if let Some(dir) = &target_dir {
if !Path::new(dir).is_dir() {
if !dir.is_dir() {
return Err(Error::NotADirectory(dir.clone()));
}
};
@ -823,8 +916,9 @@ impl Options {
|| matches.get_flag(options::DEREFERENCE),
one_file_system: matches.get_flag(options::ONE_FILE_SYSTEM),
parents: matches.get_flag(options::PARENTS),
update: matches.get_flag(options::UPDATE),
verbose: matches.get_flag(options::VERBOSE),
update: update_mode,
debug: matches.get_flag(options::DEBUG),
verbose: matches.get_flag(options::VERBOSE) || matches.get_flag(options::DEBUG),
strip_trailing_slashes: matches.get_flag(options::STRIP_TRAILING_SLASHES),
reflink_mode: {
if let Some(reflink) = matches.get_one::<String>(options::REFLINK) {
@ -915,9 +1009,7 @@ impl TargetType {
}
/// Returns tuple of (Source paths, Target)
fn parse_path_args(path_args: &[String], options: &Options) -> CopyResult<(Vec<Source>, Target)> {
let mut paths = path_args.iter().map(PathBuf::from).collect::<Vec<_>>();
fn parse_path_args(mut paths: Vec<Source>, options: &Options) -> CopyResult<(Vec<Source>, Target)> {
if paths.is_empty() {
// No files specified
return Err("missing file operand".into());
@ -933,7 +1025,7 @@ fn parse_path_args(path_args: &[String], options: &Options) -> CopyResult<(Vec<S
Some(ref target) => {
// All path args are sources, and the target dir was
// specified separately
PathBuf::from(target)
target.clone()
}
None => {
// If there was no explicit target-dir, then use the last
@ -1012,19 +1104,21 @@ fn preserve_hardlinks(
}
/// When handling errors, we don't always want to show them to the user. This function handles that.
/// If the error is printed, returns true, false otherwise.
fn show_error_if_needed(error: &Error) -> bool {
fn show_error_if_needed(error: &Error) {
match error {
// When using --no-clobber, we don't want to show
// an error message
Error::NotAllFilesCopied => (),
Error::Skipped => (),
Error::NotAllFilesCopied => {
// Need to return an error code
}
Error::Skipped => {
// touch a b && echo "n"|cp -i a b && echo $?
// should return an error from GNU 9.2
}
_ => {
show_error!("{}", error);
return true;
}
}
false
}
/// Copy all `sources` to `target`. Returns an
@ -1081,9 +1175,8 @@ fn copy(sources: &[Source], target: &TargetSlice, options: &Options) -> CopyResu
options,
&mut symlinked_files,
) {
if show_error_if_needed(&error) {
non_fatal_errors = true;
}
show_error_if_needed(&error);
non_fatal_errors = true;
}
}
seen_sources.insert(source);
@ -1142,25 +1235,41 @@ fn copy_source(
} else {
// Copy as file
let dest = construct_dest_path(source_path, target, target_type, options)?;
copy_file(
let res = copy_file(
progress_bar,
source_path,
dest.as_path(),
options,
symlinked_files,
true,
)
);
if options.parents {
for (x, y) in aligned_ancestors(source, dest.as_path()) {
copy_attributes(x, y, &options.attributes)?;
}
}
res
}
}
impl OverwriteMode {
fn verify(&self, path: &Path) -> CopyResult<()> {
fn verify(&self, path: &Path, verbose: bool) -> CopyResult<()> {
match *self {
Self::NoClobber => Err(Error::NotAllFilesCopied),
Self::NoClobber => {
if verbose {
println!("skipped {}", path.quote());
} else {
eprintln!("{}: not replacing {}", util_name(), path.quote());
}
Err(Error::NotAllFilesCopied)
}
Self::Interactive(_) => {
if prompt_yes!("overwrite {}?", path.quote()) {
Ok(())
} else {
if verbose {
println!("skipped {}", path.quote());
}
Err(Error::Skipped)
}
}
@ -1368,7 +1477,7 @@ fn handle_existing_dest(
return Err(format!("{} and {} are the same file", source.quote(), dest.quote()).into());
}
options.overwrite.verify(dest)?;
options.overwrite.verify(dest, options.verbose)?;
let backup_path = backup_control::get_backup_path(options.backup, dest, &options.backup_suffix);
if let Some(backup_path) = backup_path {
@ -1383,11 +1492,10 @@ fn handle_existing_dest(
backup_dest(dest, &backup_path)?;
}
}
match options.overwrite {
// FIXME: print that the file was removed if --verbose is enabled
OverwriteMode::Clobber(ClobberMode::Force) => {
if fs::metadata(dest)?.permissions().readonly() {
if is_symlink_loop(dest) || fs::metadata(dest)?.permissions().readonly() {
fs::remove_file(dest)?;
}
}
@ -1460,6 +1568,7 @@ fn aligned_ancestors<'a>(source: &'a Path, dest: &'a Path) -> Vec<(&'a Path, &'a
///
/// The original permissions of `source` will be copied to `dest`
/// after a successful copy.
#[allow(clippy::cognitive_complexity)]
fn copy_file(
progress_bar: &Option<ProgressBar>,
source: &Path,
@ -1468,7 +1577,9 @@ fn copy_file(
symlinked_files: &mut HashSet<FileInformation>,
source_in_command_line: bool,
) -> CopyResult<()> {
if options.update && options.overwrite == OverwriteMode::Interactive(ClobberMode::Standard) {
if (options.update == UpdateMode::ReplaceIfOlder || options.update == UpdateMode::ReplaceNone)
&& options.overwrite == OverwriteMode::Interactive(ClobberMode::Standard)
{
// `cp -i --update old new` when `new` exists doesn't copy anything
// and exit with 0
return Ok(());
@ -1493,6 +1604,8 @@ fn copy_file(
options.overwrite,
OverwriteMode::Clobber(ClobberMode::RemoveDestination)
)
&& !is_symlink_loop(dest)
&& std::env::var_os("POSIXLY_CORRECT").is_none()
{
return Err(Error::Error(format!(
"not writing through dangling symlink '{}'",
@ -1624,22 +1737,38 @@ fn copy_file(
}
CopyMode::Update => {
if dest.exists() {
let dest_metadata = fs::symlink_metadata(dest)?;
match options.update {
update_control::UpdateMode::ReplaceAll => {
copy_helper(
source,
dest,
options,
context,
source_is_symlink,
source_is_fifo,
symlinked_files,
)?;
}
update_control::UpdateMode::ReplaceNone => return Ok(()),
update_control::UpdateMode::ReplaceIfOlder => {
let dest_metadata = fs::symlink_metadata(dest)?;
let src_time = source_metadata.modified()?;
let dest_time = dest_metadata.modified()?;
if src_time <= dest_time {
return Ok(());
} else {
copy_helper(
source,
dest,
options,
context,
source_is_symlink,
source_is_fifo,
symlinked_files,
)?;
let src_time = source_metadata.modified()?;
let dest_time = dest_metadata.modified()?;
if src_time <= dest_time {
return Ok(());
} else {
copy_helper(
source,
dest,
options,
context,
source_is_symlink,
source_is_fifo,
symlinked_files,
)?;
}
}
}
} else {
copy_helper(
@ -1706,11 +1835,11 @@ fn copy_helper(
File::create(dest).context(dest.display().to_string())?;
} else if source_is_fifo && options.recursive && !options.copy_contents {
#[cfg(unix)]
copy_fifo(dest, options.overwrite)?;
copy_fifo(dest, options.overwrite, options.verbose)?;
} else if source_is_symlink {
copy_link(source, dest, symlinked_files)?;
} else {
copy_on_write(
let copy_debug = copy_on_write(
source,
dest,
options.reflink_mode,
@ -1719,6 +1848,10 @@ fn copy_helper(
#[cfg(any(target_os = "linux", target_os = "android", target_os = "macos"))]
source_is_fifo,
)?;
if !options.attributes_only && options.debug {
show_debug(&copy_debug);
}
}
Ok(())
@ -1727,9 +1860,9 @@ fn copy_helper(
// "Copies" a FIFO by creating a new one. This workaround is because Rust's
// built-in fs::copy does not handle FIFOs (see rust-lang/rust/issues/79390).
#[cfg(unix)]
fn copy_fifo(dest: &Path, overwrite: OverwriteMode) -> CopyResult<()> {
fn copy_fifo(dest: &Path, overwrite: OverwriteMode, verbose: bool) -> CopyResult<()> {
if dest.exists() {
overwrite.verify(dest)?;
overwrite.verify(dest, verbose)?;
fs::remove_file(dest)?;
}

View file

@ -13,7 +13,7 @@ use quick_error::ResultExt;
use uucore::mode::get_umask;
use crate::{CopyResult, ReflinkMode, SparseMode};
use crate::{CopyDebug, CopyResult, OffloadReflinkDebug, ReflinkMode, SparseDebug, SparseMode};
// From /usr/include/linux/fs.h:
// #define FICLONE _IOW(0x94, 9, int)
@ -145,24 +145,51 @@ pub(crate) fn copy_on_write(
sparse_mode: SparseMode,
context: &str,
source_is_fifo: bool,
) -> CopyResult<()> {
) -> CopyResult<CopyDebug> {
let mut copy_debug = CopyDebug {
offload: OffloadReflinkDebug::Unknown,
reflink: OffloadReflinkDebug::Unsupported,
sparse_detection: SparseDebug::No,
};
let result = match (reflink_mode, sparse_mode) {
(ReflinkMode::Never, SparseMode::Always) => sparse_copy(source, dest),
(ReflinkMode::Never, _) => std::fs::copy(source, dest).map(|_| ()),
(ReflinkMode::Auto, SparseMode::Always) => sparse_copy(source, dest),
(ReflinkMode::Never, SparseMode::Always) => {
copy_debug.sparse_detection = SparseDebug::Zeros;
copy_debug.offload = OffloadReflinkDebug::Avoided;
copy_debug.reflink = OffloadReflinkDebug::No;
sparse_copy(source, dest)
}
(ReflinkMode::Never, _) => {
copy_debug.sparse_detection = SparseDebug::No;
copy_debug.reflink = OffloadReflinkDebug::No;
std::fs::copy(source, dest).map(|_| ())
}
(ReflinkMode::Auto, SparseMode::Always) => {
copy_debug.offload = OffloadReflinkDebug::Avoided;
copy_debug.sparse_detection = SparseDebug::Zeros;
copy_debug.reflink = OffloadReflinkDebug::Unsupported;
sparse_copy(source, dest)
}
(ReflinkMode::Auto, _) => {
copy_debug.sparse_detection = SparseDebug::No;
copy_debug.reflink = OffloadReflinkDebug::Unsupported;
if source_is_fifo {
copy_fifo_contents(source, dest).map(|_| ())
} else {
clone(source, dest, CloneFallback::FSCopy)
}
}
(ReflinkMode::Always, SparseMode::Auto) => clone(source, dest, CloneFallback::Error),
(ReflinkMode::Always, SparseMode::Auto) => {
copy_debug.sparse_detection = SparseDebug::No;
copy_debug.reflink = OffloadReflinkDebug::Yes;
clone(source, dest, CloneFallback::Error)
}
(ReflinkMode::Always, _) => {
return Err("`--reflink=always` can be used only with --sparse=auto".into())
}
};
result.context(context)?;
Ok(())
Ok(copy_debug)
}

View file

@ -11,7 +11,7 @@ use std::path::Path;
use quick_error::ResultExt;
use crate::{CopyResult, ReflinkMode, SparseMode};
use crate::{CopyDebug, CopyResult, OffloadReflinkDebug, ReflinkMode, SparseDebug, SparseMode};
/// Copies `source` to `dest` using copy-on-write if possible.
///
@ -24,10 +24,15 @@ pub(crate) fn copy_on_write(
sparse_mode: SparseMode,
context: &str,
source_is_fifo: bool,
) -> CopyResult<()> {
) -> CopyResult<CopyDebug> {
if sparse_mode != SparseMode::Auto {
return Err("--sparse is only supported on linux".to_string().into());
}
let mut copy_debug = CopyDebug {
offload: OffloadReflinkDebug::Unknown,
reflink: OffloadReflinkDebug::Unsupported,
sparse_detection: SparseDebug::Unsupported,
};
// Extract paths in a form suitable to be passed to a syscall.
// The unwrap() is safe because they come from the command-line and so contain non nul
@ -72,6 +77,7 @@ pub(crate) fn copy_on_write(
return Err(format!("failed to clone {source:?} from {dest:?}: {error}").into())
}
_ => {
copy_debug.reflink = OffloadReflinkDebug::Yes;
if source_is_fifo {
let mut src_file = File::open(source)?;
let mut dst_file = File::create(dest)?;
@ -83,5 +89,5 @@ pub(crate) fn copy_on_write(
};
}
Ok(())
Ok(copy_debug)
}

View file

@ -8,7 +8,7 @@ use std::path::Path;
use quick_error::ResultExt;
use crate::{CopyResult, ReflinkMode, SparseMode};
use crate::{CopyDebug, CopyResult, OffloadReflinkDebug, ReflinkMode, SparseDebug, SparseMode};
/// Copies `source` to `dest` for systems without copy-on-write
pub(crate) fn copy_on_write(
@ -17,7 +17,7 @@ pub(crate) fn copy_on_write(
reflink_mode: ReflinkMode,
sparse_mode: SparseMode,
context: &str,
) -> CopyResult<()> {
) -> CopyResult<CopyDebug> {
if reflink_mode != ReflinkMode::Never {
return Err("--reflink is only supported on linux and macOS"
.to_string()
@ -26,8 +26,12 @@ pub(crate) fn copy_on_write(
if sparse_mode != SparseMode::Auto {
return Err("--sparse is only supported on linux".to_string().into());
}
let copy_debug = CopyDebug {
offload: OffloadReflinkDebug::Unsupported,
reflink: OffloadReflinkDebug::Unsupported,
sparse_detection: SparseDebug::Unsupported,
};
fs::copy(source, dest).context(context)?;
Ok(())
Ok(copy_debug)
}

View file

@ -1,6 +1,6 @@
[package]
name = "uu_csplit"
version = "0.0.17"
version = "0.0.19"
authors = ["uutils developers"]
license = "MIT"
description = "csplit ~ (uutils) Output pieces of FILE separated by PATTERN(s) to files 'xx00', 'xx01', ..., and output byte counts of each piece to standard output"
@ -15,10 +15,10 @@ edition = "2021"
path = "src/csplit.rs"
[dependencies]
clap = { workspace=true }
clap = { workspace = true }
thiserror = { workspace = true }
regex = { workspace=true }
uucore = { workspace=true, features=["entries", "fs"] }
regex = { workspace = true }
uucore = { workspace = true, features = ["entries", "fs"] }
[[bin]]
name = "csplit"

11
src/uu/csplit/csplit.md Normal file
View file

@ -0,0 +1,11 @@
# csplit
```
csplit [OPTION]... FILE PATTERN...
```
Split a file into sections determined by context lines
## After Help
Output pieces of FILE separated by PATTERN(s) to files 'xx00', 'xx01', ..., and output byte counts of each piece to standard output.

View file

@ -13,7 +13,7 @@ use clap::{crate_version, Arg, ArgAction, ArgMatches, Command};
use regex::Regex;
use uucore::display::Quotable;
use uucore::error::{FromIo, UResult};
use uucore::{crash_if_err, format_usage};
use uucore::{crash_if_err, format_usage, help_about, help_section, help_usage};
mod csplit_error;
mod patterns;
@ -22,9 +22,9 @@ mod split_name;
use crate::csplit_error::CsplitError;
use crate::split_name::SplitName;
static ABOUT: &str = "Split a file into sections determined by context lines";
static LONG_HELP: &str = "Output pieces of FILE separated by PATTERN(s) to files 'xx00', 'xx01', ..., and output byte counts of each piece to standard output.";
const USAGE: &str = "{} [OPTION]... FILE PATTERN...";
const ABOUT: &str = help_about!("csplit.md");
const AFTER_HELP: &str = help_section!("after help", "csplit.md");
const USAGE: &str = help_usage!("csplit.md");
mod options {
pub const SUFFIX_FORMAT: &str = "suffix-format";
@ -356,6 +356,7 @@ impl<'a> SplitWriter<'a> {
/// - if no line matched, an [`CsplitError::MatchNotFound`].
/// - if there are not enough lines to accommodate the offset, an
/// [`CsplitError::LineOutOfRange`].
#[allow(clippy::cognitive_complexity)]
fn do_to_match<I>(
&mut self,
pattern_as_str: &str,
@ -814,5 +815,5 @@ pub fn uu_app() -> Command {
.action(clap::ArgAction::Append)
.required(true),
)
.after_help(LONG_HELP)
.after_help(AFTER_HELP)
}

View file

@ -1,46 +1,45 @@
## Benchmarking cut
# Benchmarking cut
### Performance profile
## Performance profile
In normal use cases a significant amount of the total execution time of `cut`
is spent performing I/O. When invoked with the `-f` option (cut fields) some
CPU time is spent on detecting fields (in `Searcher::next`). Other than that
some small amount of CPU time is spent on breaking the input stream into lines.
### How to
## How to
When fixing bugs or adding features you might want to compare
performance before and after your code changes.
- `hyperfine` can be used to accurately measure and compare the total
- `hyperfine` can be used to accurately measure and compare the total
execution time of one or more commands.
```
$ cargo build --release --package uu_cut
```shell
cargo build --release --package uu_cut
$ hyperfine -w3 "./target/release/cut -f2-4,8 -d' ' input.txt" "cut -f2-4,8 -d' ' input.txt"
hyperfine -w3 "./target/release/cut -f2-4,8 -d' ' input.txt" "cut -f2-4,8 -d' ' input.txt"
```
You can put those two commands in a shell script to be sure that you don't
forget to build after making any changes.
When optimizing or fixing performance regressions seeing the number of times a
function is called, and the amount of time it takes can be useful.
- `cargo flamegraph` generates flame graphs from function level metrics it records using `perf` or `dtrace`
- `cargo flamegraph` generates flame graphs from function level metrics it records using `perf` or `dtrace`
```
$ cargo flamegraph --bin cut --package uu_cut -- -f1,3-4 input.txt > /dev/null
```shell
cargo flamegraph --bin cut --package uu_cut -- -f1,3-4 input.txt > /dev/null
```
### What to benchmark
## What to benchmark
There are four different performance paths in `cut` to benchmark.
- Byte ranges `-c`/`--characters` or `-b`/`--bytes` e.g. `cut -c 2,4,6-`
- Byte ranges with output delimiters e.g. `cut -c 4- --output-delimiter=/`
- Fields e.g. `cut -f -4`
- Fields with output delimiters e.g. `cut -f 7-10 --output-delimiter=:`
- Byte ranges `-c`/`--characters` or `-b`/`--bytes` e.g. `cut -c 2,4,6-`
- Byte ranges with output delimiters e.g. `cut -c 4- --output-delimiter=/`
- Fields e.g. `cut -f -4`
- Fields with output delimiters e.g. `cut -f 7-10 --output-delimiter=:`
Choose a test input file with large number of lines so that program startup time does not significantly affect the benchmark.

View file

@ -1,6 +1,6 @@
[package]
name = "uu_cut"
version = "0.0.17"
version = "0.0.19"
authors = ["uutils developers"]
license = "MIT"
description = "cut ~ (uutils) display byte/field columns of input lines"
@ -15,11 +15,11 @@ edition = "2021"
path = "src/cut.rs"
[dependencies]
clap = { workspace=true }
uucore = { workspace=true }
memchr = { workspace=true }
bstr = { workspace=true }
is-terminal = { workspace=true }
clap = { workspace = true }
uucore = { workspace = true }
memchr = { workspace = true }
bstr = { workspace = true }
is-terminal = { workspace = true }
[[bin]]
name = "cut"

112
src/uu/cut/cut.md Normal file
View file

@ -0,0 +1,112 @@
# cut
<!-- spell-checker:ignore sourcefile sourcefiles -->
```
cut [-d|-w] [-s] [-z] [--output-delimiter] ((-f|-b|-c) {{sequence}}) {{sourcefile}}+
```
Prints specified byte or field columns from each line of stdin or the input files
## After Help
Each call must specify a mode (what to use for columns),
a sequence (which columns to print), and provide a data source
### Specifying a mode
Use `--bytes` (`-b`) or `--characters` (`-c`) to specify byte mode
Use `--fields` (`-f`) to specify field mode, where each line is broken into
fields identified by a delimiter character. For example for a typical CSV
you could use this in combination with setting comma as the delimiter
### Specifying a sequence
A sequence is a group of 1 or more numbers or inclusive ranges separated
by a commas.
```
cut -f 2,5-7 some_file.txt
```
will display the 2nd, 5th, 6th, and 7th field for each source line
Ranges can extend to the end of the row by excluding the second number
```
cut -f 3- some_file.txt
```
will display the 3rd field and all fields after for each source line
The first number of a range can be excluded, and this is effectively the
same as using 1 as the first number: it causes the range to begin at the
first column. Ranges can also display a single column
```
cut -f 1,3-5 some_file.txt
```
will display the 1st, 3rd, 4th, and 5th field for each source line
The `--complement` option, when used, inverts the effect of the sequence
```
cut --complement -f 4-6 some_file.txt
```
will display the every field but the 4th, 5th, and 6th
### Specifying a data source
If no `sourcefile` arguments are specified, stdin is used as the source of
lines to print
If `sourcefile` arguments are specified, stdin is ignored and all files are
read in consecutively if a `sourcefile` is not successfully read, a warning
will print to stderr, and the eventual status code will be 1, but cut
will continue to read through proceeding `sourcefiles`
To print columns from both STDIN and a file argument, use `-` (dash) as a
`sourcefile` argument to represent stdin.
### Field Mode options
The fields in each line are identified by a delimiter (separator)
#### Set the delimiter
Set the delimiter which separates fields in the file using the
`--delimiter` (`-d`) option. Setting the delimiter is optional.
If not set, a default delimiter of Tab will be used.
If the `-w` option is provided, fields will be separated by any number
of whitespace characters (Space and Tab). The output delimiter will
be a Tab unless explicitly specified. Only one of `-d` or `-w` option can be specified.
This is an extension adopted from FreeBSD.
#### Optionally Filter based on delimiter
If the `--only-delimited` (`-s`) flag is provided, only lines which
contain the delimiter will be printed
#### Replace the delimiter
If the `--output-delimiter` option is provided, the argument used for
it will replace the delimiter character in each line printed. This is
useful for transforming tabular data - e.g. to convert a CSV to a
TSV (tab-separated file)
### Line endings
When the `--zero-terminated` (`-z`) option is used, cut sees \\0 (null) as the
'line ending' character (both for the purposes of reading lines and
separating printed lines) instead of \\n (newline). This is useful for
tabular data where some of the cells may contain newlines
```
echo 'ab\\0cd' | cut -z -c 1
```
will result in 'a\\0c\\0'

View file

@ -19,99 +19,14 @@ use uucore::error::{FromIo, UResult, USimpleError};
use self::searcher::Searcher;
use matcher::{ExactMatcher, Matcher, WhitespaceMatcher};
use uucore::ranges::Range;
use uucore::{format_usage, show, show_error, show_if_err};
use uucore::{format_usage, help_about, help_section, help_usage, show, show_error, show_if_err};
mod matcher;
mod searcher;
static USAGE: &str =
"{} [-d|-w] [-s] [-z] [--output-delimiter] ((-f|-b|-c) {{sequence}}) {{sourcefile}}+";
static ABOUT: &str =
"Prints specified byte or field columns from each line of stdin or the input files";
static LONG_HELP: &str = "
Each call must specify a mode (what to use for columns),
a sequence (which columns to print), and provide a data source
Specifying a mode
Use --bytes (-b) or --characters (-c) to specify byte mode
Use --fields (-f) to specify field mode, where each line is broken into
fields identified by a delimiter character. For example for a typical CSV
you could use this in combination with setting comma as the delimiter
Specifying a sequence
A sequence is a group of 1 or more numbers or inclusive ranges separated
by a commas.
cut -f 2,5-7 some_file.txt
will display the 2nd, 5th, 6th, and 7th field for each source line
Ranges can extend to the end of the row by excluding the the second number
cut -f 3- some_file.txt
will display the 3rd field and all fields after for each source line
The first number of a range can be excluded, and this is effectively the
same as using 1 as the first number: it causes the range to begin at the
first column. Ranges can also display a single column
cut -f 1,3-5 some_file.txt
will display the 1st, 3rd, 4th, and 5th field for each source line
The --complement option, when used, inverts the effect of the sequence
cut --complement -f 4-6 some_file.txt
will display the every field but the 4th, 5th, and 6th
Specifying a data source
If no sourcefile arguments are specified, stdin is used as the source of
lines to print
If sourcefile arguments are specified, stdin is ignored and all files are
read in consecutively if a sourcefile is not successfully read, a warning
will print to stderr, and the eventual status code will be 1, but cut
will continue to read through proceeding sourcefiles
To print columns from both STDIN and a file argument, use - (dash) as a
sourcefile argument to represent stdin.
Field Mode options
The fields in each line are identified by a delimiter (separator)
Set the delimiter
Set the delimiter which separates fields in the file using the
--delimiter (-d) option. Setting the delimiter is optional.
If not set, a default delimiter of Tab will be used.
If the -w option is provided, fields will be separated by any number
of whitespace characters (Space and Tab). The output delimiter will
be a Tab unless explicitly specified. Only one of -d or -w option can be specified.
This is an extension adopted from FreeBSD.
Optionally Filter based on delimiter
If the --only-delimited (-s) flag is provided, only lines which
contain the delimiter will be printed
Replace the delimiter
If the --output-delimiter option is provided, the argument used for
it will replace the delimiter character in each line printed. This is
useful for transforming tabular data - e.g. to convert a CSV to a
TSV (tab-separated file)
Line endings
When the --zero-terminated (-z) option is used, cut sees \\0 (null) as the
'line ending' character (both for the purposes of reading lines and
separating printed lines) instead of \\n (newline). This is useful for
tabular data where some of the cells may contain newlines
echo 'ab\\0cd' | cut -z -c 1
will result in 'a\\0c\\0'
";
const USAGE: &str = help_usage!("cut.md");
const ABOUT: &str = help_about!("cut.md");
const AFTER_HELP: &str = help_section!("after help", "cut.md");
struct Options {
out_delim: Option<String>,
@ -594,7 +509,7 @@ pub fn uu_app() -> Command {
.version(crate_version!())
.override_usage(format_usage(USAGE))
.about(ABOUT)
.after_help(LONG_HELP)
.after_help(AFTER_HELP)
.infer_long_args(true)
.arg(
Arg::new(options::BYTES)

View file

@ -1,6 +1,7 @@
# spell-checker:ignore datetime
[package]
name = "uu_date"
version = "0.0.17"
version = "0.0.19"
authors = ["uutils developers"]
license = "MIT"
description = "date ~ (uutils) display or set the current time"
@ -15,15 +16,19 @@ edition = "2021"
path = "src/date.rs"
[dependencies]
chrono = { workspace=true }
clap = { workspace=true }
uucore = { workspace=true }
chrono = { workspace = true }
clap = { workspace = true }
uucore = { workspace = true }
parse_datetime = { workspace = true }
[target.'cfg(unix)'.dependencies]
libc = { workspace=true }
libc = { workspace = true }
[target.'cfg(windows)'.dependencies]
windows-sys = { workspace=true, features = ["Win32_Foundation", "Win32_System_SystemInformation"] }
windows-sys = { workspace = true, features = [
"Win32_Foundation",
"Win32_System_SystemInformation",
] }
[[bin]]
name = "date"

87
src/uu/date/date-usage.md Normal file
View file

@ -0,0 +1,87 @@
# `date` usage
<!-- spell-checker:ignore (format) hhmm -->
FORMAT controls the output. Interpreted sequences are:
| Sequence | Description | Example |
| -------- | -------------------------------------------------------------------- | ---------------------- |
| %% | a literal % | % |
| %a | locale's abbreviated weekday name | Sun |
| %A | locale's full weekday name | Sunday |
| %b | locale's abbreviated month name | Jan |
| %B | locale's full month name | January |
| %c | locale's date and time | Thu Mar 3 23:05:25 2005|
| %C | century; like %Y, except omit last two digits | 20 |
| %d | day of month | 01 |
| %D | date; same as %m/%d/%y | 12/31/99 |
| %e | day of month, space padded; same as %_d | 3 |
| %F | full date; same as %Y-%m-%d | 2005-03-03 |
| %g | last two digits of year of ISO week number (see %G) | 05 |
| %G | year of ISO week number (see %V); normally useful only with %V | 2005 |
| %h | same as %b | Jan |
| %H | hour (00..23) | 23 |
| %I | hour (01..12) | 11 |
| %j | day of year (001..366) | 062 |
| %k | hour, space padded ( 0..23); same as %_H | 3 |
| %l | hour, space padded ( 1..12); same as %_I | 9 |
| %m | month (01..12) | 03 |
| %M | minute (00..59) | 30 |
| %n | a newline | \n |
| %N | nanoseconds (000000000..999999999) | 123456789 |
| %p | locale's equivalent of either AM or PM; blank if not known | PM |
| %P | like %p, but lower case | pm |
| %q | quarter of year (1..4) | 1 |
| %r | locale's 12-hour clock time | 11:11:04 PM |
| %R | 24-hour hour and minute; same as %H:%M | 23:30 |
| %s | seconds since 1970-01-01 00:00:00 UTC | 1615432800 |
| %S | second (00..60) | 30 |
| %t | a tab | \t |
| %T | time; same as %H:%M:%S | 23:30:30 |
| %u | day of week (1..7); 1 is Monday | 4 |
| %U | week number of year, with Sunday as first day of week (00..53) | 10 |
| %V | ISO week number, with Monday as first day of week (01..53) | 12 |
| %w | day of week (0..6); 0 is Sunday | 4 |
| %W | week number of year, with Monday as first day of week (00..53) | 11 |
| %x | locale's date representation | 03/03/2005 |
| %X | locale's time representation | 23:30:30 |
| %y | last two digits of year (00..99) | 05 |
| %Y | year | 2005 |
| %z | +hhmm numeric time zone | -0400 |
| %:z | +hh:mm numeric time zone | -04:00 |
| %::z | +hh:mm:ss numeric time zone | -04:00:00 |
| %:::z | numeric time zone with : to necessary precision | -04, +05:30 |
| %Z | alphabetic time zone abbreviation | EDT |
By default, date pads numeric fields with zeroes.
The following optional flags may follow '%':
* `-` (hyphen) do not pad the field
* `_` (underscore) pad with spaces
* `0` (zero) pad with zeros
* `^` use upper case if possible
* `#` use opposite case if possible
After any flags comes an optional field width, as a decimal number;
then an optional modifier, which is either
E to use the locale's alternate representations if available, or
O to use the locale's alternate numeric symbols if available.
Examples:
Convert seconds since the epoch (1970-01-01 UTC) to a date
```
date --date='@2147483647'
```
Show the time on the west coast of the US (use tzselect(1) to find TZ)
```
TZ='America/Los_Angeles' date
```
Show the local time for 9AM next Friday on the west coast of the US
```
date --date='TZ="America/Los_Angeles" 09:00 next Fri'
```

View file

@ -1,78 +0,0 @@
# `date` usage
<!-- spell-checker:ignore (format) hhmm -->
``` text
FORMAT controls the output. Interpreted sequences are:
%% a literal %
%a locale's abbreviated weekday name (e.g., Sun)
%A locale's full weekday name (e.g., Sunday)
%b locale's abbreviated month name (e.g., Jan)
%B locale's full month name (e.g., January)
%c locale's date and time (e.g., Thu Mar 3 23:05:25 2005)
%C century; like %Y, except omit last two digits (e.g., 20)
%d day of month (e.g., 01)
%D date; same as %m/%d/%y
%e day of month, space padded; same as %_d
%F full date; same as %Y-%m-%d
%g last two digits of year of ISO week number (see %G)
%G year of ISO week number (see %V); normally useful only with %V
%h same as %b
%H hour (00..23)
%I hour (01..12)
%j day of year (001..366)
%k hour, space padded ( 0..23); same as %_H
%l hour, space padded ( 1..12); same as %_I
%m month (01..12)
%M minute (00..59)
%n a newline
%N nanoseconds (000000000..999999999)
%p locale's equivalent of either AM or PM; blank if not known
%P like %p, but lower case
%q quarter of year (1..4)
%r locale's 12-hour clock time (e.g., 11:11:04 PM)
%R 24-hour hour and minute; same as %H:%M
%s seconds since 1970-01-01 00:00:00 UTC
%S second (00..60)
%t a tab
%T time; same as %H:%M:%S
%u day of week (1..7); 1 is Monday
%U week number of year, with Sunday as first day of week (00..53)
%V ISO week number, with Monday as first day of week (01..53)
%w day of week (0..6); 0 is Sunday
%W week number of year, with Monday as first day of week (00..53)
%x locale's date representation (e.g., 12/31/99)
%X locale's time representation (e.g., 23:13:48)
%y last two digits of year (00..99)
%Y year
%z +hhmm numeric time zone (e.g., -0400)
%:z +hh:mm numeric time zone (e.g., -04:00)
%::z +hh:mm:ss numeric time zone (e.g., -04:00:00)
%:::z numeric time zone with : to necessary precision (e.g., -04, +05:30)
%Z alphabetic time zone abbreviation (e.g., EDT)
By default, date pads numeric fields with zeroes.
The following optional flags may follow '%':
- (hyphen) do not pad the field
_ (underscore) pad with spaces
0 (zero) pad with zeros
^ use upper case if possible
# use opposite case if possible
After any flags comes an optional field width, as a decimal number;
then an optional modifier, which is either
E to use the locale's alternate representations if available, or
O to use the locale's alternate numeric symbols if available.
Examples:
Convert seconds since the epoch (1970-01-01 UTC) to a date
$ date --date='@2147483647'
Show the time on the west coast of the US (use tzselect(1) to find TZ)
$ TZ='America/Los_Angeles' date
Show the local time for 9AM next Friday on the west coast of the US
$ date --date='TZ="America/Los_Angeles" 09:00 next Fri'
```

10
src/uu/date/date.md Normal file
View file

@ -0,0 +1,10 @@
<!-- spell-checker:ignore Dhhmm -->
# date
```
date [OPTION]... [+FORMAT]...
date [OPTION]... [MMDDhhmm[[CC]YY][.ss]]
```
Print or set the system date and time

View file

@ -9,7 +9,7 @@
// spell-checker:ignore (chrono) Datelike Timelike ; (format) DATEFILE MMDDhhmm ; (vars) datetime datetimes
use chrono::format::{Item, StrftimeItems};
use chrono::{DateTime, FixedOffset, Local, Offset, Utc};
use chrono::{DateTime, Duration, FixedOffset, Local, Offset, Utc};
#[cfg(windows)]
use chrono::{Datelike, Timelike};
use clap::{crate_version, Arg, ArgAction, Command};
@ -19,10 +19,10 @@ use std::fs::File;
use std::io::{BufRead, BufReader};
use std::path::PathBuf;
use uucore::display::Quotable;
#[cfg(not(any(target_os = "macos", target_os = "redox")))]
#[cfg(not(any(target_os = "redox")))]
use uucore::error::FromIo;
use uucore::error::{UResult, USimpleError};
use uucore::{format_usage, show_error};
use uucore::{format_usage, help_about, help_usage, show};
#[cfg(windows)]
use windows_sys::Win32::{Foundation::SYSTEMTIME, System::SystemInformation::SetSystemTime};
@ -36,10 +36,8 @@ const MINUTE: &str = "minute";
const SECOND: &str = "second";
const NS: &str = "ns";
const ABOUT: &str = "Print or set the system date and time";
const USAGE: &str = "\
{} [OPTION]... [+FORMAT]...
{} [OPTION]... [MMDDhhmm[[CC]YY][.ss]]";
const ABOUT: &str = help_about!("date.md");
const USAGE: &str = help_usage!("date.md");
const OPT_DATE: &str = "date";
const OPT_FORMAT: &str = "format";
@ -98,6 +96,7 @@ enum DateSource {
Now,
Custom(String),
File(PathBuf),
Human(Duration),
}
enum Iso8601Format {
@ -116,8 +115,8 @@ impl<'a> From<&'a str> for Iso8601Format {
SECONDS | SECOND => Self::Seconds,
NS => Self::Ns,
DATE => Self::Date,
// Should be caught by clap
_ => panic!("Invalid format: {s}"),
// Note: This is caught by clap via `possible_values`
_ => unreachable!(),
}
}
}
@ -141,6 +140,7 @@ impl<'a> From<&'a str> for Rfc3339Format {
}
#[uucore::main]
#[allow(clippy::cognitive_complexity)]
pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let matches = uu_app().try_get_matches_from(args)?;
@ -170,7 +170,11 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
};
let date_source = if let Some(date) = matches.get_one::<String>(OPT_DATE) {
DateSource::Custom(date.into())
if let Ok(duration) = parse_datetime::from_str(date.as_str()) {
DateSource::Human(duration)
} else {
DateSource::Custom(date.into())
}
} else if let Some(file) = matches.get_one::<String>(OPT_FILE) {
DateSource::File(file.into())
} else {
@ -205,9 +209,6 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
return set_system_datetime(date);
} else {
// Declare a file here because it needs to outlive the `dates` iterator.
let file: File;
// Get the current time, either in the local time zone or UTC.
let now: DateTime<FixedOffset> = if settings.utc {
let now = Utc::now();
@ -224,10 +225,23 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let iter = std::iter::once(date);
Box::new(iter)
}
DateSource::Human(relative_time) => {
// Get the current DateTime<FixedOffset> for things like "1 year ago"
let current_time = DateTime::<FixedOffset>::from(Local::now());
let iter = std::iter::once(Ok(current_time + relative_time));
Box::new(iter)
}
DateSource::File(ref path) => {
file = File::open(path).unwrap();
if path.is_dir() {
return Err(USimpleError::new(
2,
format!("expected file, got directory {}", path.quote()),
));
}
let file = File::open(path)
.map_err_context(|| path.as_os_str().to_string_lossy().to_string())?;
let lines = BufReader::new(file).lines();
let iter = lines.filter_map(Result::ok).map(parse_date);
let iter = lines.map_while(Result::ok).map(parse_date);
Box::new(iter)
}
DateSource::Now => {
@ -244,6 +258,13 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
Ok(date) => {
// GNU `date` uses `%N` for nano seconds, however crate::chrono uses `%f`
let format_string = &format_string.replace("%N", "%f");
// Refuse to pass this string to chrono as it is crashing in this crate
if format_string.contains("%#z") {
return Err(USimpleError::new(
1,
format!("invalid format {}", format_string.replace("%f", "%N")),
));
}
// Hack to work around panic in chrono,
// TODO - remove when a fix for https://github.com/chronotope/chrono/issues/623 is released
let format_items = StrftimeItems::new(format_string);
@ -259,7 +280,10 @@ pub fn uumain(args: impl uucore::Args) -> UResult<()> {
.replace("%f", "%N");
println!("{formatted}");
}
Err((input, _err)) => show_error!("invalid date {}", input.quote()),
Err((input, _err)) => show!(USimpleError::new(
1,
format!("invalid date {}", input.quote())
)),
}
}
}
@ -293,6 +317,9 @@ pub fn uu_app() -> Command {
.short('I')
.long(OPT_ISO_8601)
.value_name("FMT")
.value_parser([DATE, HOUR, HOURS, MINUTE, MINUTES, SECOND, SECONDS, NS])
.num_args(0..=1)
.default_missing_value(OPT_DATE)
.help(ISO_8601_HELP_STRING),
)
.arg(
@ -306,6 +333,7 @@ pub fn uu_app() -> Command {
Arg::new(OPT_RFC_3339)
.long(OPT_RFC_3339)
.value_name("FMT")
.value_parser([DATE, SECOND, SECONDS, NS])
.help(RFC_3339_HELP_STRING),
)
.arg(
@ -405,10 +433,10 @@ fn set_system_datetime(date: DateTime<Utc>) -> UResult<()> {
let result = unsafe { clock_settime(CLOCK_REALTIME, &timespec) };
if result != 0 {
Err(std::io::Error::last_os_error().map_err_context(|| "cannot set date".to_string()))
} else {
if result == 0 {
Ok(())
} else {
Err(std::io::Error::last_os_error().map_err_context(|| "cannot set date".to_string()))
}
}

View file

@ -45,7 +45,7 @@ be roughly equivalent to the total bytes copied (`blocksize` x `count`).
Some useful invocations for testing would be the following:
```
```shell
hyperfine "./target/release/dd bs=4k count=1000000 < /dev/zero > /dev/null"
hyperfine "./target/release/dd bs=1M count=20000 < /dev/zero > /dev/null"
hyperfine "./target/release/dd bs=1G count=10 < /dev/zero > /dev/null"
@ -57,7 +57,7 @@ Typically you would choose a small blocksize for measuring the performance of
typically does some set amount of work per block which only depends on the size
of the block if conversions are used.
As an example, https://github.com/uutils/coreutils/pull/3600 made a change to
As an example, <https://github.com/uutils/coreutils/pull/3600> made a change to
reuse the same buffer between block copies, avoiding the need to reallocate a
new block of memory for each copy. The impact of that change mostly had an
impact on large block size copies because those are the circumstances where the

View file

@ -1,6 +1,6 @@
[package]
name = "uu_dd"
version = "0.0.17"
version = "0.0.19"
authors = ["uutils developers"]
license = "MIT"
description = "dd ~ (uutils) copy and convert files"
@ -15,13 +15,16 @@ edition = "2021"
path = "src/dd.rs"
[dependencies]
clap = { workspace=true }
gcd = { workspace=true }
libc = { workspace=true }
uucore = { workspace=true }
clap = { workspace = true }
gcd = { workspace = true }
libc = { workspace = true }
uucore = { workspace = true, features = ["memo"] }
[target.'cfg(any(target_os = "linux"))'.dependencies]
nix = { workspace = true, features = ["fs"] }
[target.'cfg(any(target_os = "linux", target_os = "android"))'.dependencies]
signal-hook = { workspace=true }
signal-hook = { workspace = true }
[[bin]]
name = "dd"

View file

@ -1,6 +1,7 @@
<!-- spell-checker:ignore convs iseek oseek -->
# dd
<!-- spell-checker:ignore convs iseek oseek -->
```
dd [OPERAND]...
dd OPTION
@ -10,117 +11,116 @@ Copy, and optionally convert, a file system resource
## After Help
OPERANDS:
### Operands
bs=BYTES read and write up to BYTES bytes at a time (default: 512);
overwrites ibs and obs.
cbs=BYTES the 'conversion block size' in bytes. Applies to
the conv=block, and conv=unblock operations.
conv=CONVS a comma-separated list of conversion options or
(for legacy reasons) file flags.
count=N stop reading input after N ibs-sized read operations rather
than proceeding until EOF. See iflag=count_bytes if stopping
after N bytes is preferred
ibs=N the size of buffer used for reads (default: 512)
if=FILE the file used for input. When not specified, stdin is used instead
iflag=FLAGS a comma-separated list of input flags which specify how the input
source is treated. FLAGS may be any of the input-flags or
general-flags specified below.
skip=N (or iseek=N) skip N ibs-sized records into input before beginning
copy/convert operations. See iflag=seek_bytes if seeking N bytes
is preferred.
obs=N the size of buffer used for writes (default: 512)
of=FILE the file used for output. When not specified, stdout is used
instead
oflag=FLAGS comma separated list of output flags which specify how the output
source is treated. FLAGS may be any of the output flags or
general flags specified below
seek=N (or oseek=N) seeks N obs-sized records into output before
beginning copy/convert operations. See oflag=seek_bytes if
seeking N bytes is preferred
status=LEVEL controls whether volume and performance stats are written to
stderr.
When unspecified, dd will print stats upon completion. An example is below.
6+0 records in
16+0 records out
8192 bytes (8.2 kB, 8.0 KiB) copied, 0.00057009 s, 14.4 MB/s
The first two lines are the 'volume' stats and the final line is
the 'performance' stats.
The volume stats indicate the number of complete and partial
ibs-sized reads, or obs-sized writes that took place during the
copy. The format of the volume stats is
<complete>+<partial>. If records have been truncated (see
conv=block), the volume stats will contain the number of
truncated records.
Possible LEVEL values are:
progress: Print periodic performance stats as the copy
proceeds.
noxfer: Print final volume stats, but not performance stats.
none: Do not print any stats.
- `Bs=BYTES` : read and write up to BYTES bytes at a time (default: 512);
overwrites `ibs` and `obs`.
- `cbs=BYTES` : the 'conversion block size' in bytes. Applies to the
`conv=block`, and `conv=unblock` operations.
- `conv=CONVS` : a comma-separated list of conversion options or (for legacy
reasons) file flags.
- `count=N` : stop reading input after N ibs-sized read operations rather
than proceeding until EOF. See `iflag=count_bytes` if stopping after N bytes
is preferred
- `ibs=N` : the size of buffer used for reads (default: 512)
- `if=FILE` : the file used for input. When not specified, stdin is used instead
- `iflag=FLAGS` : a comma-separated list of input flags which specify how the
input source is treated. FLAGS may be any of the input-flags or general-flags
specified below.
- `skip=N` (or `iseek=N`) : skip N ibs-sized records into input before beginning
copy/convert operations. See iflag=seek_bytes if seeking N bytes is preferred.
- `obs=N` : the size of buffer used for writes (default: 512)
- `of=FILE` : the file used for output. When not specified, stdout is used
instead
- `oflag=FLAGS` : comma separated list of output flags which specify how the
output source is treated. FLAGS may be any of the output flags or general
flags specified below
- `seek=N` (or `oseek=N`) : seeks N obs-sized records into output before
beginning copy/convert operations. See oflag=seek_bytes if seeking N bytes is
preferred
- `status=LEVEL` : controls whether volume and performance stats are written to
stderr.
Printing performance stats is also triggered by the INFO signal
(where supported), or the USR1 signal. Setting the
POSIXLY_CORRECT environment variable to any value (including an
empty value) will cause the USR1 signal to be ignored.
When unspecified, dd will print stats upon completion. An example is below.
CONVERSION OPTIONS:
```plain
6+0 records in
16+0 records out
8192 bytes (8.2 kB, 8.0 KiB) copied, 0.00057009 s,
14.4 MB/s
```
ascii convert from EBCDIC to ASCII. This is the inverse of the 'ebcdic'
option. Implies conv=unblock.
ebcdic convert from ASCII to EBCDIC. This is the inverse of the 'ascii'
option. Implies conv=block.
ibm convert from ASCII to EBCDIC, applying the conventions for '[', ']'
and '~' specified in POSIX. Implies conv=block.
The first two lines are the 'volume' stats and the final line is the
'performance' stats.
The volume stats indicate the number of complete and partial ibs-sized reads,
or obs-sized writes that took place during the copy. The format of the volume
stats is `<complete>+<partial>`. If records have been truncated (see
`conv=block`), the volume stats will contain the number of truncated records.
ucase convert from lower-case to upper-case
lcase converts from upper-case to lower-case.
Possible LEVEL values are:
- `progress` : Print periodic performance stats as the copy proceeds.
- `noxfer` : Print final volume stats, but not performance stats.
- `none` : Do not print any stats.
block for each newline less than the size indicated by cbs=BYTES, remove
the newline and pad with spaces up to cbs. Lines longer than cbs are
truncated.
unblock for each block of input of the size indicated by cbs=BYTES, remove
right-trailing spaces and replace with a newline character.
Printing performance stats is also triggered by the INFO signal (where supported),
or the USR1 signal. Setting the POSIXLY_CORRECT environment variable to any value
(including an empty value) will cause the USR1 signal to be ignored.
sparse attempts to seek the output when an obs-sized block consists of only
zeros.
swab swaps each adjacent pair of bytes. If an odd number of bytes is
present, the final byte is omitted.
sync pad each ibs-sided block with zeros. If 'block' or 'unblock' is
specified, pad with spaces instead.
excl the output file must be created. Fail if the output file is already
present.
nocreat the output file will not be created. Fail if the output file in not
already present.
notrunc the output file will not be truncated. If this option is not
present, output will be truncated when opened.
noerror all read errors will be ignored. If this option is not present, dd
will only ignore Error::Interrupted.
fdatasync data will be written before finishing.
fsync data and metadata will be written before finishing.
### Conversion Options
INPUT FLAGS:
- `ascii` : convert from EBCDIC to ASCII. This is the inverse of the `ebcdic`
option. Implies `conv=unblock`.
- `ebcdic` : convert from ASCII to EBCDIC. This is the inverse of the `ascii`
option. Implies `conv=block`.
- `ibm` : convert from ASCII to EBCDIC, applying the conventions for `[`, `]`
and `~` specified in POSIX. Implies `conv=block`.
count_bytes a value to count=N will be interpreted as bytes.
skip_bytes a value to skip=N will be interpreted as bytes.
fullblock wait for ibs bytes from each read. zero-length reads are still
considered EOF.
- `ucase` : convert from lower-case to upper-case.
- `lcase` : converts from upper-case to lower-case.
OUTPUT FLAGS:
- `block` : for each newline less than the size indicated by cbs=BYTES, remove
the newline and pad with spaces up to cbs. Lines longer than cbs are truncated.
- `unblock` : for each block of input of the size indicated by cbs=BYTES, remove
right-trailing spaces and replace with a newline character.
append open file in append mode. Consider setting conv=notrunc as well.
seek_bytes a value to seek=N will be interpreted as bytes.
- `sparse` : attempts to seek the output when an obs-sized block consists of
only zeros.
- `swab` : swaps each adjacent pair of bytes. If an odd number of bytes is
present, the final byte is omitted.
- `sync` : pad each ibs-sided block with zeros. If `block` or `unblock` is
specified, pad with spaces instead.
- `excl` : the output file must be created. Fail if the output file is already
present.
- `nocreat` : the output file will not be created. Fail if the output file in
not already present.
- `notrunc` : the output file will not be truncated. If this option is not
present, output will be truncated when opened.
- `noerror` : all read errors will be ignored. If this option is not present,
dd will only ignore Error::Interrupted.
- `fdatasync` : data will be written before finishing.
- `fsync` : data and metadata will be written before finishing.
GENERAL FLAGS:
### Input flags
direct use direct I/O for data.
directory fail unless the given input (if used as an iflag) or output (if used
as an oflag) is a directory.
dsync use synchronized I/O for data.
sync use synchronized I/O for data and metadata.
nonblock use non-blocking I/O.
noatime do not update access time.
nocache request that OS drop cache.
noctty do not assign a controlling tty.
nofollow do not follow system links.
- `count_bytes` : a value to `count=N` will be interpreted as bytes.
- `skip_bytes` : a value to `skip=N` will be interpreted as bytes.
- `fullblock` : wait for ibs bytes from each read. zero-length reads are still
considered EOF.
### Output flags
- `append` : open file in append mode. Consider setting conv=notrunc as well.
- `seek_bytes` : a value to seek=N will be interpreted as bytes.
### General Flags
- `Direct` : use direct I/O for data.
- `directory` : fail unless the given input (if used as an iflag) or
output (if used as an oflag) is a directory.
- `dsync` : use synchronized I/O for data.
- `sync` : use synchronized I/O for data and metadata.
- `nonblock` : use non-blocking I/O.
- `noatime` : do not update access time.
- `nocache` : request that OS drop cache.
- `noctty` : do not assign a controlling tty.
- `nofollow` : do not follow system links.

View file

@ -5,7 +5,7 @@
// For the full copyright and license information, please view the LICENSE
// file that was distributed with this source code.
// spell-checker:ignore fname, tname, fpath, specfile, testfile, unspec, ifile, ofile, outfile, fullblock, urand, fileio, atoe, atoibm, behaviour, bmax, bremain, cflags, creat, ctable, ctty, datastructures, doesnt, etoa, fileout, fname, gnudd, iconvflags, iseek, nocache, noctty, noerror, nofollow, nolinks, nonblock, oconvflags, oseek, outfile, parseargs, rlen, rmax, rremain, rsofar, rstat, sigusr, wlen, wstat seekable oconv canonicalized
// spell-checker:ignore fname, tname, fpath, specfile, testfile, unspec, ifile, ofile, outfile, fullblock, urand, fileio, atoe, atoibm, behaviour, bmax, bremain, cflags, creat, ctable, ctty, datastructures, doesnt, etoa, fileout, fname, gnudd, iconvflags, iseek, nocache, noctty, noerror, nofollow, nolinks, nonblock, oconvflags, oseek, outfile, parseargs, rlen, rmax, rremain, rsofar, rstat, sigusr, wlen, wstat seekable oconv canonicalized fadvise Fadvise FADV DONTNEED ESPIPE
mod datastructures;
use datastructures::*;
@ -27,11 +27,14 @@ use std::cmp;
use std::env;
use std::ffi::OsString;
use std::fs::{File, OpenOptions};
use std::io::{self, Read, Seek, SeekFrom, Stdin, Stdout, Write};
#[cfg(unix)]
use std::os::unix::fs::FileTypeExt;
use std::io::{self, Read, Seek, SeekFrom, Stdout, Write};
#[cfg(any(target_os = "linux", target_os = "android"))]
use std::os::unix::fs::OpenOptionsExt;
#[cfg(unix)]
use std::os::unix::{
fs::FileTypeExt,
io::{AsRawFd, FromRawFd},
};
use std::path::Path;
use std::sync::{
atomic::{AtomicBool, Ordering::Relaxed},
@ -42,9 +45,16 @@ use std::time::{Duration, Instant};
use clap::{crate_version, Arg, Command};
use gcd::Gcd;
#[cfg(target_os = "linux")]
use nix::{
errno::Errno,
fcntl::{posix_fadvise, PosixFadviseAdvice},
};
use uucore::display::Quotable;
use uucore::error::{FromIo, UResult};
use uucore::{format_usage, help_about, help_section, help_usage, show_error};
#[cfg(target_os = "linux")]
use uucore::{show, show_if_err};
const ABOUT: &str = help_about!("dd.md");
const AFTER_HELP: &str = help_section!("after help", "dd.md");
@ -135,21 +145,54 @@ impl Num {
}
/// Data sources.
///
/// Use [`Source::stdin_as_file`] if available to enable more
/// fine-grained access to reading from stdin.
enum Source {
/// Input from stdin.
Stdin(Stdin),
#[cfg(not(unix))]
Stdin(io::Stdin),
/// Input from a file.
File(File),
/// Input from stdin, opened from its file descriptor.
#[cfg(unix)]
StdinFile(File),
/// Input from a named pipe, also known as a FIFO.
#[cfg(unix)]
Fifo(File),
}
impl Source {
/// Create a source from stdin using its raw file descriptor.
///
/// This returns an instance of the `Source::StdinFile` variant,
/// using the raw file descriptor of [`std::io::Stdin`] to create
/// the [`std::fs::File`] parameter. You can use this instead of
/// `Source::Stdin` to allow reading from stdin without consuming
/// the entire contents of stdin when this process terminates.
#[cfg(unix)]
fn stdin_as_file() -> Self {
let fd = io::stdin().as_raw_fd();
let f = unsafe { File::from_raw_fd(fd) };
Self::StdinFile(f)
}
/// The length of the data source in number of bytes.
///
/// If it cannot be determined, then this function returns 0.
fn len(&self) -> std::io::Result<i64> {
match self {
Self::File(f) => Ok(f.metadata()?.len().try_into().unwrap_or(i64::MAX)),
_ => Ok(0),
}
}
fn skip(&mut self, n: u64) -> io::Result<u64> {
match self {
#[cfg(not(unix))]
Self::Stdin(stdin) => match io::copy(&mut stdin.take(n), &mut io::sink()) {
Ok(m) if m < n => {
show_error!("'standard input': cannot skip to specified offset");
@ -158,19 +201,48 @@ impl Source {
Ok(m) => Ok(m),
Err(e) => Err(e),
},
#[cfg(unix)]
Self::StdinFile(f) => match io::copy(&mut f.take(n), &mut io::sink()) {
Ok(m) if m < n => {
show_error!("'standard input': cannot skip to specified offset");
Ok(m)
}
Ok(m) => Ok(m),
Err(e) => Err(e),
},
Self::File(f) => f.seek(io::SeekFrom::Start(n)),
#[cfg(unix)]
Self::Fifo(f) => io::copy(&mut f.take(n), &mut io::sink()),
}
}
/// Discard the system file cache for the given portion of the data source.
///
/// `offset` and `len` specify a contiguous portion of the data
/// source. This function informs the kernel that the specified
/// portion of the source is no longer needed. If not possible,
/// then this function returns an error.
#[cfg(target_os = "linux")]
fn discard_cache(&self, offset: libc::off_t, len: libc::off_t) -> nix::Result<()> {
match self {
Self::File(f) => {
let advice = PosixFadviseAdvice::POSIX_FADV_DONTNEED;
posix_fadvise(f.as_raw_fd(), offset, len, advice)
}
_ => Err(Errno::ESPIPE), // "Illegal seek"
}
}
}
impl Read for Source {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
match self {
#[cfg(not(unix))]
Self::Stdin(stdin) => stdin.read(buf),
Self::File(f) => f.read(buf),
#[cfg(unix)]
Self::StdinFile(f) => f.read(buf),
#[cfg(unix)]
Self::Fifo(f) => f.read(buf),
}
}
@ -193,7 +265,10 @@ struct Input<'a> {
impl<'a> Input<'a> {
/// Instantiate this struct with stdin as a source.
fn new_stdin(settings: &'a Settings) -> UResult<Self> {
#[cfg(not(unix))]
let mut src = Source::Stdin(io::stdin());
#[cfg(unix)]
let mut src = Source::stdin_as_file();
if settings.skip > 0 {
src.skip(settings.skip)?;
}
@ -266,10 +341,10 @@ fn make_linux_iflags(iflags: &IFlags) -> Option<libc::c_int> {
flag |= libc::O_SYNC;
}
if flag != 0 {
Some(flag)
} else {
if flag == 0 {
None
} else {
Some(flag)
}
}
@ -297,6 +372,29 @@ impl<'a> Read for Input<'a> {
}
impl<'a> Input<'a> {
/// Discard the system file cache for the given portion of the input.
///
/// `offset` and `len` specify a contiguous portion of the input.
/// This function informs the kernel that the specified portion of
/// the input file is no longer needed. If not possible, then this
/// function prints an error message to stderr and sets the exit
/// status code to 1.
#[allow(unused_variables)]
fn discard_cache(&self, offset: libc::off_t, len: libc::off_t) {
#[cfg(target_os = "linux")]
{
show_if_err!(self
.src
.discard_cache(offset, len)
.map_err_context(|| "failed to discard cache for: 'standard input'".to_string()));
}
#[cfg(not(target_os = "linux"))]
{
// TODO Is there a way to discard filesystem cache on
// these other operating systems?
}
}
/// Fills a given buffer.
/// Reads in increments of 'self.ibs'.
/// The start of each ibs-sized read follows the previous one.
@ -318,13 +416,13 @@ impl<'a> Input<'a> {
_ => break,
}
}
buf.truncate(bytes_total);
Ok(ReadStat {
reads_complete,
reads_partial,
// Records are not truncated when filling.
records_truncated: 0,
bytes_total: bytes_total.try_into().unwrap(),
})
}
@ -335,6 +433,7 @@ impl<'a> Input<'a> {
let mut reads_complete = 0;
let mut reads_partial = 0;
let mut base_idx = 0;
let mut bytes_total = 0;
while base_idx < buf.len() {
let next_blk = cmp::min(base_idx + self.settings.ibs, buf.len());
@ -343,11 +442,13 @@ impl<'a> Input<'a> {
match self.read(&mut buf[base_idx..next_blk])? {
0 => break,
rlen if rlen < target_len => {
bytes_total += rlen;
reads_partial += 1;
let padding = vec![pad; target_len - rlen];
buf.splice(base_idx + rlen..next_blk, padding.into_iter());
}
_ => {
rlen => {
bytes_total += rlen;
reads_complete += 1;
}
}
@ -360,6 +461,7 @@ impl<'a> Input<'a> {
reads_complete,
reads_partial,
records_truncated: 0,
bytes_total: bytes_total.try_into().unwrap(),
})
}
}
@ -448,6 +550,33 @@ impl Dest {
_ => Ok(()),
}
}
/// Discard the system file cache for the given portion of the destination.
///
/// `offset` and `len` specify a contiguous portion of the
/// destination. This function informs the kernel that the
/// specified portion of the destination is no longer needed. If
/// not possible, then this function returns an error.
#[cfg(target_os = "linux")]
fn discard_cache(&self, offset: libc::off_t, len: libc::off_t) -> nix::Result<()> {
match self {
Self::File(f, _) => {
let advice = PosixFadviseAdvice::POSIX_FADV_DONTNEED;
posix_fadvise(f.as_raw_fd(), offset, len, advice)
}
_ => Err(Errno::ESPIPE), // "Illegal seek"
}
}
/// The length of the data destination in number of bytes.
///
/// If it cannot be determined, then this function returns 0.
fn len(&self) -> std::io::Result<i64> {
match self {
Self::File(f, _) => Ok(f.metadata()?.len().try_into().unwrap_or(i64::MAX)),
_ => Ok(0),
}
}
}
/// Decide whether the given buffer is all zeros.
@ -581,6 +710,29 @@ impl<'a> Output<'a> {
Ok(Self { dst, settings })
}
/// Discard the system file cache for the given portion of the output.
///
/// `offset` and `len` specify a contiguous portion of the output.
/// This function informs the kernel that the specified portion of
/// the output file is no longer needed. If not possible, then
/// this function prints an error message to stderr and sets the
/// exit status code to 1.
#[allow(unused_variables)]
fn discard_cache(&self, offset: libc::off_t, len: libc::off_t) {
#[cfg(target_os = "linux")]
{
show_if_err!(self
.dst
.discard_cache(offset, len)
.map_err_context(|| "failed to discard cache for: 'standard output'".to_string()));
}
#[cfg(target_os = "linux")]
{
// TODO Is there a way to discard filesystem cache on
// these other operating systems?
}
}
/// Write the given bytes one block at a time.
///
/// This may write partial blocks (for example, if the underlying
@ -674,6 +826,27 @@ fn dd_copy(mut i: Input, mut o: Output) -> std::io::Result<()> {
// Optimization: if no blocks are to be written, then don't
// bother allocating any buffers.
if let Some(Num::Blocks(0) | Num::Bytes(0)) = i.settings.count {
// Even though we are not reading anything from the input
// file, we still need to honor the `nocache` flag, which
// requests that we inform the system that we no longer
// need the contents of the input file in a system cache.
//
// TODO Better error handling for overflowing `len`.
if i.settings.iflags.nocache {
let offset = 0;
#[allow(clippy::useless_conversion)]
let len = i.src.len()?.try_into().unwrap();
i.discard_cache(offset, len);
}
// Similarly, discard the system cache for the output file.
//
// TODO Better error handling for overflowing `len`.
if i.settings.oflags.nocache {
let offset = 0;
#[allow(clippy::useless_conversion)]
let len = o.dst.len()?.try_into().unwrap();
o.discard_cache(offset, len);
}
return finalize(&mut o, rstat, wstat, start, &prog_tx, output_thread);
};
@ -687,6 +860,13 @@ fn dd_copy(mut i: Input, mut o: Output) -> std::io::Result<()> {
// This avoids the need to query the OS monotonic clock for every block.
let alarm = Alarm::with_interval(Duration::from_secs(1));
// Index in the input file where we are reading bytes and in
// the output file where we are writing bytes.
//
// These are updated on each iteration of the main loop.
let mut read_offset = 0;
let mut write_offset = 0;
// The main read/write loop.
//
// Each iteration reads blocks from the input and writes
@ -706,6 +886,30 @@ fn dd_copy(mut i: Input, mut o: Output) -> std::io::Result<()> {
}
let wstat_update = o.write_blocks(&buf)?;
// Discard the system file cache for the read portion of
// the input file.
//
// TODO Better error handling for overflowing `offset` and `len`.
let read_len = rstat_update.bytes_total;
if i.settings.iflags.nocache {
let offset = read_offset.try_into().unwrap();
let len = read_len.try_into().unwrap();
i.discard_cache(offset, len);
}
read_offset += read_len;
// Discard the system file cache for the written portion
// of the output file.
//
// TODO Better error handling for overflowing `offset` and `len`.
let write_len = wstat_update.bytes_total;
if o.settings.oflags.nocache {
let offset = write_offset.try_into().unwrap();
let len = write_len.try_into().unwrap();
o.discard_cache(offset, len);
}
write_offset += write_len;
// Update the read/write stats and inform the progress thread once per second.
//
// If the receiver is disconnected, `send()` returns an
@ -757,6 +961,7 @@ fn finalize<T>(
}
#[cfg(any(target_os = "linux", target_os = "android"))]
#[allow(clippy::cognitive_complexity)]
fn make_linux_oflags(oflags: &OFlags) -> Option<libc::c_int> {
let mut flag = 0;
@ -789,10 +994,10 @@ fn make_linux_oflags(oflags: &OFlags) -> Option<libc::c_int> {
flag |= libc::O_SYNC;
}
if flag != 0 {
Some(flag)
} else {
if flag == 0 {
None
} else {
Some(flag)
}
}

View file

@ -293,8 +293,9 @@ impl Parser {
}
}
#[allow(clippy::cognitive_complexity)]
fn parse_input_flags(&mut self, val: &str) -> Result<(), ParseError> {
let mut i = &mut self.iflag;
let i = &mut self.iflag;
for f in val.split(',') {
match f {
// Common flags
@ -303,7 +304,7 @@ impl Parser {
"directory" => linux_only!(f, i.directory = true),
"dsync" => linux_only!(f, i.dsync = true),
"sync" => linux_only!(f, i.sync = true),
"nocache" => return Err(ParseError::Unimplemented(f.to_string())),
"nocache" => linux_only!(f, i.nocache = true),
"nonblock" => linux_only!(f, i.nonblock = true),
"noatime" => linux_only!(f, i.noatime = true),
"noctty" => linux_only!(f, i.noctty = true),
@ -324,8 +325,9 @@ impl Parser {
Ok(())
}
#[allow(clippy::cognitive_complexity)]
fn parse_output_flags(&mut self, val: &str) -> Result<(), ParseError> {
let mut o = &mut self.oflag;
let o = &mut self.oflag;
for f in val.split(',') {
match f {
// Common flags
@ -334,7 +336,7 @@ impl Parser {
"directory" => linux_only!(f, o.directory = true),
"dsync" => linux_only!(f, o.dsync = true),
"sync" => linux_only!(f, o.sync = true),
"nocache" => return Err(ParseError::Unimplemented(f.to_string())),
"nocache" => linux_only!(f, o.nocache = true),
"nonblock" => linux_only!(f, o.nonblock = true),
"noatime" => linux_only!(f, o.noatime = true),
"noctty" => linux_only!(f, o.noctty = true),
@ -355,7 +357,7 @@ impl Parser {
}
fn parse_conv_flags(&mut self, val: &str) -> Result<(), ParseError> {
let mut c = &mut self.conv;
let c = &mut self.conv;
for f in val.split(',') {
match f {
// Conversion

Some files were not shown because too many files have changed in this diff Show more