mirror of
https://github.com/fish-shell/fish-shell
synced 2024-11-10 23:24:39 +00:00
Remove all of the documentation build helper scripts
This commit is contained in:
parent
0b26b55676
commit
c99fa08f21
10 changed files with 0 additions and 7996 deletions
2357
Doxyfile.help
2357
Doxyfile.help
File diff suppressed because it is too large
Load diff
2357
Doxyfile.user
2357
Doxyfile.user
File diff suppressed because it is too large
Load diff
|
@ -1,20 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
# Builds the commands.hdr file.
|
||||
# Usage: build_commands_hdr.sh ${HELP_SRC} < commands_hdr.in > commands.hdr
|
||||
|
||||
rm -f command_list.tmp command_list_toc.tmp
|
||||
for i in `printf "%s\n" $@ | LC_ALL=C.UTF-8 sort`; do
|
||||
echo "<hr>" >>command_list.tmp;
|
||||
cat $i >>command_list.tmp;
|
||||
echo >>command_list.tmp;
|
||||
echo >>command_list.tmp;
|
||||
NAME=`basename $i .txt`;
|
||||
echo '- <a href="#'$NAME'">'$NAME'</a>' >> command_list_toc.tmp;
|
||||
echo "Back to <a href='index.html#toc-commands'>command index</a>". >>command_list.tmp;
|
||||
done
|
||||
mv command_list.tmp command_list.txt
|
||||
mv command_list_toc.tmp command_list_toc.txt
|
||||
/usr/bin/env awk '{if ($0 ~ /@command_list_toc@/) { system("cat command_list_toc.txt"); }
|
||||
else if ($0 ~ /@command_list@/){ system("cat command_list.txt");}
|
||||
else{ print $0;}}'
|
|
@ -1,163 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
# This script is run as part of the build process
|
||||
|
||||
if test $# -eq 0
|
||||
then
|
||||
# Use fish's defaults
|
||||
DOXYFILE=Doxyfile.help
|
||||
INPUTDIR=doc_src
|
||||
OUTPUTDIR=share
|
||||
echo "Using defaults: $0 ${DOXYFILE} ${INPUTDIR} ${OUTPUTDIR}"
|
||||
elif test $# -eq 3
|
||||
then
|
||||
DOXYFILE="$1"
|
||||
INPUTDIR="$2"
|
||||
OUTPUTDIR="$3"
|
||||
else
|
||||
echo "Usage: $0 doxygen_file input_directory output_directory"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Determine which man pages we don't want to generate.
|
||||
# on OS X, don't make a man page for open, since we defeat fish's open function on OS X.
|
||||
# This is also done in the Makefile, but the Xcode build doesn't use that
|
||||
CONDEMNED_PAGES=
|
||||
if test `uname` = 'Darwin'; then
|
||||
CONDEMNED_PAGES="$CONDEMNED_PAGES open.1"
|
||||
fi
|
||||
|
||||
# Helper function to turn a relative path into an absolute path
|
||||
resolve_path()
|
||||
{
|
||||
D=`command dirname "$1"`
|
||||
B=`command basename "$1"`
|
||||
echo "`cd \"$D\" 2>/dev/null && pwd || echo \"$D\"`/$B"
|
||||
}
|
||||
|
||||
# Expand relative paths
|
||||
DOXYFILE=`resolve_path "$DOXYFILE"`
|
||||
INPUTDIR=`resolve_path "$INPUTDIR"`
|
||||
INPUTFILTER=`resolve_path "$INPUT_FILTER"`
|
||||
OUTPUTDIR=`resolve_path "$OUTPUTDIR"`
|
||||
|
||||
echo " doxygen file: $DOXYFILE"
|
||||
echo " input directory: $INPUTDIR"
|
||||
echo " input filter: $INPUTFILTER"
|
||||
echo " output directory: $OUTPUTDIR"
|
||||
echo " skipping: $CONDEMNED_PAGES"
|
||||
|
||||
#Until now the makefile likely has been affecting our output, reset for upcoming warnings
|
||||
tput sgr0
|
||||
|
||||
# Make sure INPUTDIR is found
|
||||
if test ! -d "$INPUTDIR"; then
|
||||
echo >&2 "Could not find input directory '${INPUTDIR}'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Make sure doxygen is found
|
||||
DOXYGENPATH=`command -v doxygen`
|
||||
if test -z "$DOXYGENPATH" ; then
|
||||
for i in /usr/local/bin/doxygen /opt/bin/doxygen /Applications/Doxygen.app/Contents/Resources/doxygen ~/Applications/Doxygen.app/Contents/Resources/doxygen ; do
|
||||
if test -f "$i"; then
|
||||
DOXYGENPATH="$i"
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
if test -z "$DOXYGENPATH"; then
|
||||
echo >&2 "doxygen is not installed, so documentation will not be built."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Check we have the lexicon filter
|
||||
if test -z "$INPUT_FILTER"; then
|
||||
echo >&2 "Lexicon filter is not available. Continuing without."
|
||||
INPUTFILTER=''
|
||||
fi
|
||||
|
||||
# Determine where our output should go
|
||||
if ! mkdir -p "${OUTPUTDIR}" ; then
|
||||
echo "Could not create output directory '${OUTPUTDIR}'"
|
||||
fi
|
||||
|
||||
# Make a temporary directory
|
||||
TMPLOC=`mktemp -d -t fish_doc_build_XXXXXX` || { echo >&2 "Could not build documentation because mktemp failed"; exit 1; }
|
||||
|
||||
# Copy stuff to the temp directory
|
||||
for i in "$INPUTDIR"/*.txt; do
|
||||
BASENAME=`basename $i .txt`
|
||||
INPUTFILE=$TMPLOC/$BASENAME.doxygen
|
||||
echo "/** \\page" $BASENAME > $INPUTFILE
|
||||
cat $i | sed "s/\\\section $BASENAME $BASENAME/\\\section $BASENAME-man $BASENAME/" >> $INPUTFILE
|
||||
echo "*/" >> $INPUTFILE
|
||||
done
|
||||
|
||||
# Make some extra stuff to pass to doxygen
|
||||
# Input is kept as . because we cd to the input directory beforehand
|
||||
# This prevents doxygen from generating "documentation" for intermediate directories
|
||||
PROJECT_NUMBER=$(echo "$FISH_BUILD_VERSION" | env sed "s/-[a-z0-9-]*//")
|
||||
echo "PROJECT_NUMBER: $FISH_BUILD_VERSION"
|
||||
DOXYPARAMS=$(cat <<EOF
|
||||
PROJECT_NUMBER=${PROJECT_NUMBER}
|
||||
INPUT_FILTER=$INPUTFILTER
|
||||
INPUT=.
|
||||
OUTPUT_DIRECTORY=$OUTPUTDIR
|
||||
QUIET=YES
|
||||
EOF
|
||||
);
|
||||
|
||||
# echo "$DOXYPARAMS"
|
||||
|
||||
# Clear out the output directory first
|
||||
find "${OUTPUTDIR}" -name "*.1" -delete
|
||||
|
||||
# Run doxygen
|
||||
cd "$TMPLOC"
|
||||
(cat "${DOXYFILE}" ; echo "$DOXYPARAMS";) | "$DOXYGENPATH" -
|
||||
|
||||
# Remember errors
|
||||
RESULT=$?
|
||||
|
||||
cd "${OUTPUTDIR}/man/man1/"
|
||||
if test "$RESULT" = 0 ; then
|
||||
|
||||
# Postprocess the files
|
||||
for i in "$INPUTDIR"/*.txt; do
|
||||
# This command turns the following weirdness from Doxygen:
|
||||
# abbr \-
|
||||
# .SH "abbr - manage fish abbreviations"
|
||||
# into
|
||||
# \fBabbr\fP - manage fish abbreviations
|
||||
# It would be nice to use -i here for edit in place, but that is not portable
|
||||
CMD_NAME=`basename "$i" .txt`;
|
||||
sed -E < ${CMD_NAME}.1 > ${CMD_NAME}.1.tmp \
|
||||
-e "/^.SH NAME/{
|
||||
N; N
|
||||
s/${CMD_NAME} \\\\- \n.SH \"${CMD_NAME} (- .*)\"/\\\fB${CMD_NAME}\\\fP \1/g
|
||||
}"
|
||||
mv "${CMD_NAME}.1.tmp" "${CMD_NAME}.1"
|
||||
done
|
||||
|
||||
# Erase condemned pages
|
||||
rm -f $CONDEMNED_PAGES
|
||||
fi
|
||||
|
||||
# Destroy TMPLOC
|
||||
if test "$RESULT" -ne 0; then
|
||||
echo "Cleaning up '$TMPLOC'"
|
||||
fi
|
||||
rm -Rf "$TMPLOC"
|
||||
|
||||
if test "$RESULT" -ne 0; then
|
||||
tput smso 2> /dev/null || true
|
||||
echo "Doxygen failed creating manpages. See the output log for details."
|
||||
tput sgr0 2> /dev/null || true
|
||||
else
|
||||
tput bold 2> /dev/null || true
|
||||
echo Built manpages
|
||||
tput sgr0 2> /dev/null || true
|
||||
fi
|
||||
exit $RESULT
|
|
@ -1,5 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
TOC_TXT=$1
|
||||
env awk "{if (\$0 ~ /@toc@/){ system(\"cat ${TOC_TXT}\");} else{ print \$0;}}"
|
||||
|
|
@ -1,51 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
# Builds the lexicon filter
|
||||
# Usage: build_lexicon_filter.sh FUNCTIONS_DIR COMPLETIONS_DIR lexicon_filter.in [SED_BINARY] > lexicon_filter
|
||||
|
||||
set -e
|
||||
|
||||
# To enable the lexicon filter, we first need to be aware of what fish
|
||||
# considers to be a command, function, or external binary. We use
|
||||
# command_list_toc.txt for the base commands. Scan the share/functions
|
||||
# directory for other functions, some of which are mentioned in the docs, and
|
||||
# use /share/completions to find a good selection of binaries. Additionally,
|
||||
# colour defaults from __fish_config_interactive to set the docs colours when
|
||||
# used in a 'cli' style context.
|
||||
rm -f lexicon.tmp lexicon_catalog.tmp lexicon_catalog.txt lexicon.txt
|
||||
|
||||
FUNCTIONS_DIR=${1}
|
||||
FUNCTIONS_DIR_FILES=${1}/*.fish
|
||||
COMPLETIONS_DIR_FILES=${2}/*.fish
|
||||
LEXICON_FILTER_IN=${3}
|
||||
|
||||
SED=${4:-$(command -v sed)}
|
||||
|
||||
# Scan sources for commands/functions/binaries/colours. If GNU sed was portable, this could be much smarter.
|
||||
$SED <command_list_toc.txt >>lexicon.tmp -n \
|
||||
-e "s|^.*>\([a-z][a-z_]*\)</a>|'\1'|w lexicon_catalog.tmp" \
|
||||
-e "s|'\(.*\)'|bltn \1|p"; mv lexicon_catalog.tmp lexicon_catalog.txt
|
||||
printf "%s\n" ${COMPLETIONS_DIR_FILES} | $SED -n \
|
||||
-e "s|[^ ]*/\([a-z][a-z_-]*\).fish|'\1'|p" | grep -F -vx -f lexicon_catalog.txt | $SED >>lexicon.tmp -n \
|
||||
-e 'w lexicon_catalog.tmp' \
|
||||
-e "s|'\(.*\)'|cmnd \1|p"; cat lexicon_catalog.tmp >> lexicon_catalog.txt;
|
||||
printf "%s\n" ${FUNCTIONS_DIR_FILES} | $SED -n \
|
||||
-e "s|[^ ]*/\([a-z][a-z_-]*\).fish|'\1'|p" | grep -F -vx -f lexicon_catalog.txt | $SED >>lexicon.tmp -n \
|
||||
-e 'w lexicon_catalog.tmp' \
|
||||
-e "s|'\(.*\)'|func \1|p";
|
||||
$SED < ${FUNCTIONS_DIR}/__fish_config_interactive.fish >>lexicon.tmp -n \
|
||||
-e '/set_default/s/.*\(fish_[a-z][a-z_]*\).*$$/clrv \1/p'; \
|
||||
$SED < ${LEXICON_FILTER_IN} >>lexicon.tmp -n \
|
||||
-e '/^#.!#/s/^#.!# \(.... [a-z][a-z_]*\)/\1/p';
|
||||
mv lexicon.tmp lexicon.txt; rm -f lexicon_catalog.tmp lexicon_catalog.txt;
|
||||
|
||||
# Copy the filter to stdout. We're going to append sed commands to it after.
|
||||
$SED -e 's|@sed@|'$SED'|' < ${LEXICON_FILTER_IN}
|
||||
|
||||
# Scan through the lexicon, transforming each line to something useful to Doxygen.
|
||||
if echo x | $SED "/[[:<:]]x/d" 2>/dev/null; then
|
||||
WORDBL='[[:<:]]'; WORDBR='[[:>:]]';
|
||||
else
|
||||
WORDBL='\\<'; WORDBR='\\>';
|
||||
fi;
|
||||
$SED < lexicon.txt -n -e "s|^\([a-z][a-z][a-z][a-z]\) \([a-z_-]*\)$|s,$WORDBL\2$WORDBR,@\1{\2},g|p" -e '$G;s/.*\n/b tidy/p';
|
|
@ -1,17 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
# Usage: build_toc_txt.sh $(HDR_FILES:index.hdr=index.hdr.in) > toc.txt
|
||||
|
||||
# Ugly hack to set the toc initial title for the main page
|
||||
echo "- <a href=\"index.html\" id=\"toc-index\">fish shell documentation - ${FISH_BUILD_VERSION}</a>" > toc.txt
|
||||
# The first sed command captures the page name, followed by the description
|
||||
# The second sed command captures the command name \1 and the description \2, but only up to a dash
|
||||
# This is to reduce the size of the TOC in the command listing on the main page
|
||||
for i in $@; do
|
||||
NAME=`basename $i .hdr`
|
||||
NAME=`basename $NAME .hdr.in`
|
||||
env sed <$i >>toc.txt -n \
|
||||
-e 's,.*\\page *\([^ ]*\) *\(.*\)$,- <a href="'$NAME'.html" id="toc-'$NAME'">\2</a>,p' \
|
||||
-e 's,.*\\section *\([^ ]*\) *\(.*\) - .*$, - <a href="'$NAME'.html#\1">\2</a>,p' \
|
||||
-e 's,.*\\section *\([^ ]*\) *\(.*\)$, - <a href="'$NAME'.html#\1">\2</a>,p'
|
||||
done
|
|
@ -1,16 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
# Usage: Doxyfile.user lexicon_filter
|
||||
DOXYFILE=$1
|
||||
LEXICON_FILTER=$2
|
||||
|
||||
(cat "${DOXYFILE}" ;\
|
||||
echo INPUT_FILTER="${LEXICON_FILTER}"; \
|
||||
echo PROJECT_NUMBER=${FISH_BUILD_VERSION} \
|
||||
| /usr/bin/env sed "s/-[a-z0-9-]*//") \
|
||||
| doxygen - && touch user_doc
|
||||
|
||||
(cd ./user_doc/html/ && \
|
||||
rm -f bc_s.png bdwn.png closed.png doc.png folder*.png ftv2*.png \
|
||||
nav*.png open.png splitbar.png sync_*.png tab*.* doxygen.* \
|
||||
dynsections.js jquery.js pages.html)
|
|
@ -1,652 +0,0 @@
|
|||
#! @sed@ -f
|
||||
#.
|
||||
# A Doxygen filter for building Fish's lexicon, for documentation bling.
|
||||
#.
|
||||
# Written specially for Fish, the shell for the 90's, in sed, the state of the
|
||||
# art text processor from the 70's. Who's sed? sed's dead, baby, sed's dead.*
|
||||
# by Mark Griffiths <mark@thebespokepixel.com> *but quite portable
|
||||
#.
|
||||
# Finds \fish..\endfish blocks in documentation source files and enhances
|
||||
# markup. Requires that the four character word 'classes' declared here are
|
||||
# added to Doxyfiles as aliases i.e.:
|
||||
#.
|
||||
# Enhance for HTML Help pages (Doxyfile.user)…
|
||||
# ALIASES = "fish=\htmlonly[block] \n<pre class=\"fish\">"
|
||||
# ALIASES += "fish{1}=\htmlonly[block] \n<pre class=\"fish \1\">"
|
||||
# ALIASES += "endfish=</pre>\endhtmlonly \n"
|
||||
#.
|
||||
# ALIASES += "blah{1}=<span class=\"comment\">\1</span>"
|
||||
# ALIASES += "bltn{1}=<span class=\"command\">\1</span>" and so on...
|
||||
#.
|
||||
# And simplify for man pages (Doxyfile.help)…
|
||||
# ALIASES = "fish=<pre>"
|
||||
# ALIASES += "fish{1}=<pre>"
|
||||
# ALIASES += "endfish=</pre>"
|
||||
#.
|
||||
# ALIASES += "blah{1}=\1"
|
||||
# ALIASES += "bltn{1}=<em>\1</em>"...
|
||||
#.
|
||||
# It's meant to only ever be run once, during make, as Doxygen's 'INPUT
|
||||
# FILTER', though can be run interactively by passing a file in via stdin. It
|
||||
# wont respond to arguments.
|
||||
#.
|
||||
# It's most easily tested by passing test strings into the compiled script:
|
||||
#.
|
||||
# echo "/fish Line to test" | ./fish_lexicon_filter
|
||||
#.
|
||||
# The, at times, archiac looking regex is down to ensuring portable sed BREs
|
||||
#.
|
||||
# This code is licensed with fish under the GPL 2.0.
|
||||
#.
|
||||
# Pattern flow control for scanning doc.h
|
||||
/\\fish/,/\\endfish/ {
|
||||
# Open \fish block, firstly it it's on it's own line
|
||||
/^\\fish$/b
|
||||
/^\\fish{[^}]*}$/b
|
||||
# Then if it's inline. Remove and process immediately...
|
||||
/^\\fish.*$/ {
|
||||
# Catch @ symbol
|
||||
s/@/@at/g
|
||||
# Catch & symbol
|
||||
s/&\([^a-z]\)/@amp\1/g
|
||||
# Catch {{ & }} symbols
|
||||
s/{{/@curlyL/g
|
||||
s/}}/@curlyR/g
|
||||
s/^\\fish//
|
||||
s/\\endfish//
|
||||
b html
|
||||
}
|
||||
# Output blank lines
|
||||
/^$/b
|
||||
# Inside \fish block. Process...
|
||||
/\\endfish/!{
|
||||
# Catch @ symbol
|
||||
s/@/@at/g
|
||||
# Catch & symbol
|
||||
s/&\([^a-z]\)/@amp\1/g
|
||||
# Catch {{ & }} symbols
|
||||
s/{{/@curlyL/g
|
||||
s/}}/@curlyR/g
|
||||
# Preprocess HTML and HTML-like formatting
|
||||
/<[^>]*>/ {
|
||||
b html
|
||||
}
|
||||
# Process the rest
|
||||
b process
|
||||
}
|
||||
# End block
|
||||
/\\endfish/b
|
||||
}
|
||||
#.
|
||||
# This is not the pattern we're looking for
|
||||
b
|
||||
#.
|
||||
# Process any HTML tags.
|
||||
# Structured to reduce sed's greediness.
|
||||
:html
|
||||
# Spans
|
||||
s|<span style=['"]\([^'"][^'"]*\)">|@span{\1,|
|
||||
s|<span class=['"]\([^'"][^'"]*\)">|@spcl{\1,|
|
||||
s|</span>|}|
|
||||
#.
|
||||
# Bold
|
||||
s|<b>|@bold{|
|
||||
s|<b [^>]*>|@bold{|
|
||||
s|</b>|}|
|
||||
#.
|
||||
# Strong (synonimous with emphasis)
|
||||
s|<strong>|@bold{|
|
||||
s|<strong [^>]*>|@bold{|
|
||||
s|</strong>|}|
|
||||
#.
|
||||
# EMPHasis
|
||||
s|<em>|@emph{|
|
||||
s|<em [^>]*>|@emph{|
|
||||
s|</em>|}|
|
||||
#.
|
||||
# Italic (synonimous with emphasis)
|
||||
s|<i>|@emph{|
|
||||
s|<i [^>]*>|@emph{|
|
||||
s|</i>|}|
|
||||
#.
|
||||
# UNDeRline
|
||||
s|<u>|@undr{|
|
||||
s|<u [^>]*>|@undr{|
|
||||
s|</u>|}|
|
||||
# Backslash (when escaping output)
|
||||
s|<bs>|@bksl{|
|
||||
s|</bs>|}|
|
||||
t html
|
||||
#.
|
||||
# Some handy non-standard extensions
|
||||
# autoSuGgeSTion
|
||||
s|<s>|@sgst{|
|
||||
s|<s [^>]*>|@sgst{|
|
||||
s|</s>|}|
|
||||
#.
|
||||
# MaTCH
|
||||
s|<m>|@mtch{|
|
||||
s|<m [^>]*>|@mtch{|
|
||||
s|</m>|}|
|
||||
#.
|
||||
# SearchMaTCh
|
||||
s|<sm>|@smtc{|
|
||||
s|<sm [^>]*>|@smtc{|
|
||||
s|</sm>|}|
|
||||
#.
|
||||
# ERrOR
|
||||
s|<eror>|@eror{|
|
||||
s|<eror [^>]*>|@eror{|
|
||||
s|</eror>|}|
|
||||
#.
|
||||
# AsIs - protect from auto-formatting
|
||||
s|<asis>|@asis{|
|
||||
s|</asis>|}|
|
||||
#.
|
||||
# OUTPut - protect from auto-formatting
|
||||
s|<outp>|@outp{|
|
||||
s|</outp>|}|
|
||||
t html
|
||||
#.
|
||||
# Clean other unhandled html
|
||||
s|<\([A-Za-z][A-Za-z]*\)[^>]*>\([^<]*\)</\1>|\2|
|
||||
t html
|
||||
#.
|
||||
# Start processing entities
|
||||
:process
|
||||
# Output:
|
||||
# Line marked as output pass through
|
||||
/@outp/ {
|
||||
b
|
||||
}
|
||||
# Comments:
|
||||
# Capture full line comments
|
||||
/^\( *\)#\(.*\)$/ {
|
||||
# Assume any line starting with a # is complete
|
||||
s//\1@blah{\2}/
|
||||
t
|
||||
}
|
||||
# Match sub-line comments
|
||||
/#[0-9a-fA-F][0-9a-fA-F][0-9a-fA-F]/ ! {
|
||||
s/#\(.*$\)/\\\
|
||||
<@blah{#\1}\
|
||||
/
|
||||
}
|
||||
#.
|
||||
# Protected entities These shouldn't allow nested structure, so we move them
|
||||
# to a marked, new line for a future extract/process/insert action.
|
||||
#.
|
||||
# AsIs block - resists formatting.
|
||||
s/@asis{\(.*\)}/\\\
|
||||
<@asis{\1}\
|
||||
/g
|
||||
#.
|
||||
# Manual <span>
|
||||
s/@span{\(.*\)}/\\\
|
||||
<@span{\1}\
|
||||
/g
|
||||
#.
|
||||
# String Literals
|
||||
s/"\([^"]*\)"/\\\
|
||||
<@dblq{\1}\
|
||||
/g
|
||||
s/'\([^']*\)'/\\\
|
||||
<@sglq{\1}\
|
||||
/g
|
||||
#.
|
||||
# AutoSuggestions.
|
||||
s/@sgst{\([^}]*\)}/\\\
|
||||
<@sgst{\1}\
|
||||
/
|
||||
#.
|
||||
# Command/Function options
|
||||
# Short options
|
||||
s/\([[( ]\)-\([A-Za-z][A-Za-z]*\)\([^A-Za-z}]\)/\1\\\
|
||||
<@opts{-\2}\
|
||||
\3/g
|
||||
#.
|
||||
# Long options
|
||||
s/\([[( ]\)--\([A-Za-z][A-Za-z0-9=_-]*\)\([^A-Za-z0-9=_-]*\)/\1\\\
|
||||
<@opts{--\2}\
|
||||
\3/g
|
||||
#.
|
||||
# Prompt
|
||||
s/~>_/\\\
|
||||
<@prmt{\
|
||||
<@path{~}\
|
||||
}/
|
||||
s/^>_/@prmt/
|
||||
#.
|
||||
# Cursor
|
||||
#.
|
||||
s/___$/@curs/
|
||||
s/___\(.\)/\\\
|
||||
<@curs{\1}\
|
||||
/
|
||||
#.
|
||||
# Escaped Options
|
||||
s/ \\\([A-Za-z0-9][A-Za-z0-9]*\) / @bksl{\1} /g
|
||||
#.
|
||||
# Trailing Backslash
|
||||
s/ \\$/ @bksl{ }/
|
||||
#.
|
||||
# Paths
|
||||
/\n<@dblq[^}]*[~/]/b protect
|
||||
/\n<@sglq[^}]*[~/]/b protect
|
||||
/\n<@span[^}]*[~/]/b protect
|
||||
#.
|
||||
# Normal Directory
|
||||
s|mkdir |mkdir :|
|
||||
s|\([~/:][/]*[.A-Za-z_0-9*/-]*\)\\ |\1=|g
|
||||
s| \([~/][/]*[.A-Za-z_0-9*/=-]*\)| \\\
|
||||
<@path{\1}\
|
||||
|g
|
||||
s| \(:[/]*[.A-Za-z_0-9*/=-]*\)| \\\
|
||||
<@path{\1}\
|
||||
|g
|
||||
t protect
|
||||
#.
|
||||
# Dot Relative Directory (no spaces in path)
|
||||
s| \(./[A-Za-z_0-9/-]*\)| \\\
|
||||
<@path{\1}\
|
||||
|g
|
||||
b protect
|
||||
#.
|
||||
# Tidy up. Merge back 'pure' entities from hold space.
|
||||
:tidy
|
||||
#.
|
||||
# Convert loose text to arguments
|
||||
s/ \([a-zA-Z0-9+%*.-][{},a-zA-Z0-9%*._/?!=-]*\)/ @args{\1}/g
|
||||
#.
|
||||
# Or when tight to a newline
|
||||
s|\n\([a-zA-Z0-9+%*.-][{},a-zA-Z0-9%*._/?!-]*\)|\
|
||||
@args{\1}|g
|
||||
#.
|
||||
# Or when tight to the beginning
|
||||
s|^\([a-zA-Z][{},a-zA-Z0-9%*._/?!-]*\)|@args{\1}|g
|
||||
#.
|
||||
# Pick up loose text after markup.
|
||||
s/\([})]\)\([a-zA-Z0-9+%*.,][,a-zA-Z0-9%*._/?!-]*\);/\1@args{\2};/g
|
||||
s/\([})]\)\([a-zA-Z0-9+%*.,][,a-zA-Z0-9%*._/?!-]*\)$/\1@args{\2}/g
|
||||
s/\([})]\)\([a-zA-Z0-9+%*.,][,a-zA-Z0-9%*._/?!-]*\)@EOL/\1@args{\2}/g
|
||||
#.
|
||||
# Uncomment the following 2 lines (ss) to log the pattern buffer.
|
||||
s/^.*$/Pattern : &/w lexicon.log
|
||||
s/^Pattern : //
|
||||
#.
|
||||
# Uncomment the following 4 lines (xssx) to log the hold buffer.
|
||||
x
|
||||
s/^.*$/HoldBufr: &/w lexicon.log
|
||||
s/^HoldBufr: //
|
||||
x
|
||||
#.
|
||||
# Tack the hold space to the end of the pattern buffer.
|
||||
G
|
||||
#.
|
||||
# Uncomment the folowing two lines (ss) to log the buffer join.
|
||||
s/^.*$/Joined : &/w lexicon.log
|
||||
s/^Joined : //
|
||||
#.
|
||||
# Iterate over alternate lines, matching '<' to '\'
|
||||
:join
|
||||
s,\([^\\ ]*\)\\\n\([^<]*\)<\(@[^}]*[}\\]\),\1\3\2,
|
||||
t join
|
||||
# Clean up stray new lines
|
||||
s/\n//g
|
||||
# Clean up past @EOL
|
||||
s/@EOL.*$//g
|
||||
#.
|
||||
# Uncomment the folowing two lines (ss) to log the buffer before 'cleaning'.
|
||||
s/^.*$/PreClean: &/w lexicon.log
|
||||
s/^PreClean: //
|
||||
# Clean up special cases
|
||||
#.
|
||||
/@blah/{
|
||||
s/\(blah{[^@]*\)@sglq{\([^}]*\)}/\1'\2'/
|
||||
s/\(blah{[^@]*\)@dblq{\([^}]*\)}/\1"\2"/
|
||||
s/\(blah{[^@]*\)@....{\([^}]*\)}/\1\2/
|
||||
}
|
||||
/@dblq/{
|
||||
:cleandblq
|
||||
s/\(dblq{[^@}<]*\)[<]*@...[^q]{\([^}]*\)}/\1\2/
|
||||
t cleandblq
|
||||
}
|
||||
/@sglq/{
|
||||
:cleansglq
|
||||
s/\(sglq{[^@}<]*\)[<]*@...[^q]{\([^}]*\)}/\1\2/
|
||||
t cleansglq
|
||||
}
|
||||
/@vars/{
|
||||
:cleanvars
|
||||
s/\(vars{@optr{$}[^@}]*\)@bltn{\([^}]*\)}/\1\2/
|
||||
s/\(vars{@optr{$}[^@}]*\)@func{\([^}]*\)}/\1\2/
|
||||
s/\(vars{@optr{$}[^@}]*\)@cmnd{\([^}]*\)}/\1\2/
|
||||
s/\(vars{@optr{$}[^@}]*\)@args{\([^}]*\)}/\1\2/
|
||||
t cleanvars
|
||||
}
|
||||
/@redr/{
|
||||
:cleanredr
|
||||
s/\(redr{[^@}]*\)@bltn{\([^}]*\)}/\1\2/
|
||||
s/\(redr{[^@}]*\)@func{\([^}]*\)}/\1\2/
|
||||
s/\(redr{[^@}]*\)@cmnd{\([^}]*\)}/\1\2/
|
||||
s/\(redr{[^@}]*\)@fsfo{\([^}]*\)}/\1\2/
|
||||
s/\(redr{[^}]*\)}\( *\)@path{\([^}]*\)/\1\2\3/
|
||||
t cleanredr
|
||||
}
|
||||
/@sgst/{
|
||||
s/@sgst{<@/@sgst{@/
|
||||
:cleansgst
|
||||
s/\(sgst{@curs{.}[^@]*\)@bltn{\([^}]*\)}/\1\2/
|
||||
s/\(sgst{@curs{.}[^@]*\)@func{\([^}]*\)}/\1\2/
|
||||
s/\(sgst{@curs{.}[^@]*\)@cmnd{\([^}]*\)}/\1\2/
|
||||
s/\(sgst{@curs{.}[^@]*\)@opts{\([^}]*\)}/\1\2/
|
||||
s/\(sgst{@curs{.}[^@]*\)@path{\([^}]*\)}/\1\2/
|
||||
s/\(sgst{@curs{.}[^@]*\)@args{\([^}]*\)}/\1\2/
|
||||
s/\(sgst{@curs{.}[^@]*\)@fsfo{\([^}]*\)}/\1\2/
|
||||
t cleansgst
|
||||
}
|
||||
/@fsfo/{
|
||||
:cleanfsfo
|
||||
s/\(fsfo{[^@}]*\)@bltn{\([^}]*\)}/\1\2/
|
||||
s/\(fsfo{[^@}]*\)@func{\([^}]*\)}/\1\2/
|
||||
s/\(fsfo{[^@}]*\)@cmnd{\([^}]*\)}/\1\2/
|
||||
t cleanfsfo
|
||||
}
|
||||
/@prmt{/{
|
||||
s/@prmt{<@path/@prmt{@path/
|
||||
}
|
||||
#.
|
||||
# Restore Paths
|
||||
/@fsfo/ {
|
||||
s/\(@fsfo{[^=]*\)=/\1 /
|
||||
}
|
||||
/@path/ {
|
||||
:cleanpath
|
||||
s/\(@path{[^:]*\):/\1/
|
||||
s/\(@path{[^=]*\)=/\1\\ /
|
||||
t cleanpath
|
||||
s/@path{}//
|
||||
}
|
||||
#.
|
||||
# Finally, restructure to follow Fish's command [arguments] semantics.
|
||||
# Find the initial command, and change any others to arguments, up to a |, ( or ;
|
||||
# Assumes that a valid line will start with either a builtin, a function or a binary.
|
||||
#.
|
||||
# 'if' and 'for' seem to be special cases
|
||||
#.
|
||||
# Uncomment the folowing two lines (ss) to log the buffer before semantic conversion.
|
||||
s/^.*$/PreArgs : &/w lexicon.log
|
||||
s/^PreArgs : //
|
||||
#.
|
||||
# Find initial commands/functions/binaries
|
||||
#.
|
||||
# Store prmt, if present
|
||||
#.
|
||||
/@prmt/ {
|
||||
h
|
||||
s/^\(@prmt *\).*$/\1/
|
||||
x
|
||||
s/^@prmt *//
|
||||
}
|
||||
#.
|
||||
# Special case for optional commands
|
||||
s/@args{\[@bltn/@args{[@xbln/g
|
||||
# Special case for one-line 'if' statements
|
||||
/@bltn{if}/ {
|
||||
s//@xbln{if}/
|
||||
s/@bltn{set}/@xbln{set}/
|
||||
s/@bltn{not}/@xbln{not}/
|
||||
s/@bltn{else}/@xbln{else}/
|
||||
s/@bltn{contains}/@xbln{contains}/
|
||||
s/@bltn{test}/@xbln{test}/
|
||||
s/@bltn{end}/@xbln{end}/
|
||||
s/@cmnd{grep}/@xcmd{grep}/
|
||||
}
|
||||
# one-line 'for' statements
|
||||
/@bltn{for}/ {
|
||||
s//@xbln{for}/
|
||||
s/@args{in}/@xbln{in}/
|
||||
}
|
||||
# one-line 'begin' statements
|
||||
/@bltn{begin}/ {
|
||||
s//@xbln{begin}/
|
||||
s/@bltn{end}/@xbln{end}/
|
||||
}
|
||||
# one-line 'break' statements
|
||||
/@bltn{break}/ {
|
||||
s//@xbln{break}/
|
||||
s/@bltn{end}/@xbln{end}/
|
||||
}
|
||||
# one-line 'continue' statements
|
||||
/@bltn{continue}/ {
|
||||
s//@xbln{continue}/
|
||||
s/@bltn{end}/@xbln{end}/
|
||||
}
|
||||
# one-line 'switch' statements
|
||||
/@bltn{switch}/ {
|
||||
s//@xbln{switch}/
|
||||
s/@bltn{case}/@xbln{case}/
|
||||
s/@bltn{end}/@xbln{end}/
|
||||
}
|
||||
# one-line 'function' statements
|
||||
/@bltn{function}/ {
|
||||
s//@xbln{function}/
|
||||
s/@bltn{return}/@xbln{return}/
|
||||
s/@bltn{end}/@xbln{end}/
|
||||
}
|
||||
# one-line 'bind' statements - special input functions
|
||||
/@bltn{bind}/ {
|
||||
s//@xbln{bind}/
|
||||
s/@....{\([a-z]*\)}\(-[a-z-]*\)/@args{\1\2}/
|
||||
}
|
||||
# one-line 'builtin' statements
|
||||
s/@bltn{builtin} @bltn/@xbln{builtin} @xbln/g
|
||||
s/@bltn{builtin} @cmnd/@xbln{builtin} @xcmd/g
|
||||
s/@bltn{builtin} @func/@xbln{builtin} @xfnc/g
|
||||
#.
|
||||
# one-line 'command' statements
|
||||
s/@bltn{command} @bltn/@xbln{command} @xbln/g
|
||||
s/@bltn{command} @cmnd/@xbln{command} @xcmd/g
|
||||
s/@bltn{command} @func/@xbln{command} @xfnc/g
|
||||
#.
|
||||
# one-line 'and/or' statements
|
||||
s/@bltn{and} @bltn/@xbln{and} @xbln/g
|
||||
s/@bltn{and} @cmnd/@xbln{and} @xcmd/g
|
||||
s/@bltn{and} @func/@xbln{and} @xfnc/g
|
||||
s/@bltn{or} @bltn/@xbln{or} @xbln/g
|
||||
s/@bltn{or} @cmnd/@xbln{or} @xcmd/g
|
||||
s/@bltn{or} @func/@xbln{or} @xfnc/g
|
||||
#.
|
||||
s/^\( *\)@cmnd/\1@xcmd/
|
||||
s/\( *[;()] *\)@cmnd/\1@xcmd/g
|
||||
s/\( *@redr{|} *\)@cmnd/\1@xcmd/g
|
||||
s/^\( *\)@bltn/\1@xbln/
|
||||
s/\( *[;()] *\)@bltn/\1@xbln/g
|
||||
s/\( *@redr{|} *\)@bltn/\1@xbln/g
|
||||
s/^\( *\)@func/\1@xfnc/
|
||||
s/\( *[;()] *\)@func/\1@xfnc/g
|
||||
s/\( *@redr{|} *\)@func/\1@xfnc/g
|
||||
s/ @bksl{\([^}]*\)} / @args{@bksl{\1}} /g
|
||||
s/ @bksl{@bltn{\([^}]*\)}/ @args{@bksl{\1}/g
|
||||
s/ @bksl{@func{\([^}]*\)}/ @args{@bksl{\1}/g
|
||||
s/ @bksl{@cmnd{\([^}]*\)}/ @args{@bksl{\1}/g
|
||||
s/@bltn/@args/g
|
||||
s/@func/@args/g
|
||||
s/@cmnd/@args/g
|
||||
#.
|
||||
s/^.*$/PostArgs: &/w lexicon.log
|
||||
s/^PostArgs: //
|
||||
#.
|
||||
s/xbln/bltn/g
|
||||
s/xfnc/func/g
|
||||
s/xcmd/cmnd/g
|
||||
x
|
||||
/^@prmt/ {
|
||||
G
|
||||
s/^@prmt \n/@prmt /
|
||||
}
|
||||
/^@prmt/ ! {
|
||||
x
|
||||
}
|
||||
#.
|
||||
# Mark up sesitive character entities.
|
||||
#.
|
||||
s/</\</g
|
||||
s/>/\>/g
|
||||
s/@amp/\&/g
|
||||
s/@curlyL/\{/g
|
||||
s/@curlyR/\}/g
|
||||
s/@at/@atat{ }/g
|
||||
#.
|
||||
# Final post processing
|
||||
s/};\([^]]\)/}@redr{;}\1/g
|
||||
s/};$/}@redr{;}/
|
||||
s/@sglq{}/''/
|
||||
s/ \[\([@(]\)/ @args{[}\1/g
|
||||
s/ \[\([A-Z]*\) / @args{[\1} /g
|
||||
s/@args{\([a-zA-Z0-9_.]*\)}\]/@args{\1]}/g
|
||||
s/@args{\([a-zA-Z0-9_.]*\)}: /@args{\1:} /g
|
||||
s/@bltn{echo} @fsfo/@bltn{echo} @args/g
|
||||
s/@bltn{echo}\([a-zA-Z0-9.@{} _-]*\)@fsfo/@bltn{echo}\1@args/g
|
||||
s/ \] / @args{]} /g
|
||||
s/ \]$/ @args{]}/g
|
||||
s/\]}\]$/]]}/
|
||||
s/\\\([()]\)/@optr{@bksl{\1}}/g
|
||||
s/\([()]\)/@optr{\1}/g
|
||||
s/\\\\\([cdgnstwx?]\)/@bksl{\1}/g
|
||||
s/\\n/@bksl{n}/
|
||||
s/%\([diouxXfgGeEsbmy]\)/@pcnt{\1}/g
|
||||
s/ \\$//
|
||||
#.
|
||||
# Uncomment the folowing two lines (ss) to log the final output, sent to Doxygen.
|
||||
s/^.*$/Output : &\
|
||||
\
|
||||
/w lexicon.log
|
||||
s/^Output : //
|
||||
s/\n\n$//
|
||||
#.
|
||||
# Lines are reassembled, so branch to end
|
||||
b
|
||||
# === Main End ===
|
||||
#.
|
||||
#.
|
||||
# === Subroutines ===
|
||||
# Branched to when content requires.
|
||||
#.
|
||||
# Move protected content to hold space and mark up other entities.
|
||||
:protect
|
||||
# Add an 'End of Line' marker
|
||||
s/$/@EOL/
|
||||
s/^.*$/Input : &/w lexicon.log
|
||||
s/^Input : //
|
||||
h
|
||||
# Clear out any content that has already been marked up, to prevent futher
|
||||
# markup on words that should be left alone.
|
||||
#.
|
||||
:patternflush
|
||||
s/\n<@[^}]*[}\\]//
|
||||
s/\\ [^\\]*$/\\/
|
||||
t patternflush
|
||||
s/\n$//g
|
||||
#.
|
||||
# Swap the pattern and hold buffers and remove unmarked lines and extra
|
||||
# characters. Basically the inverse of the 'patternflush' action, with
|
||||
# additional trailing characters stripped.
|
||||
x
|
||||
/^<@[^}]*$/ ! {
|
||||
s/[^\<]*//
|
||||
s/^ *\\\n//g
|
||||
s/\n *\\//g
|
||||
s/\n@EOL//g
|
||||
s/[()] \\//g
|
||||
s/^[^\<][^@][^\\]*//
|
||||
s/\n[]|;) ][^\\]*\\//
|
||||
s/\n[]|;) a-zA-Z0-9-][^\\]*$//
|
||||
s/\n[]|;)}]\\//
|
||||
s/\n[]|;)}]\n//
|
||||
s/\n[]|;)}]$//
|
||||
s/[()]$//
|
||||
s/}@curs/}/
|
||||
s/\n@curs$//
|
||||
s/\n[^\<@][^\\]*\\//
|
||||
s/\n[^\<@][^\\]*//
|
||||
s/^\\//
|
||||
s/\n$//g
|
||||
}
|
||||
s/\\\n/\
|
||||
/
|
||||
s/< \n//
|
||||
s/^[a-z][a-z]* \n//
|
||||
#.
|
||||
# Swap the buffers back.
|
||||
x
|
||||
#.
|
||||
# A special case. Tidy up after performing command substitution.
|
||||
# Redirectors
|
||||
s/\([^{|] *\)|/\1@redr{|}/g
|
||||
#s/\&@EOL$/@redr{@amp}@EOL/g
|
||||
#s/@amp@EOL$/@redr{@amp}@EOL/g
|
||||
#s/\([<>]\)@amp\([0-9]\)/@redr{\1@amp\2}/g
|
||||
s/@amp&/@optr{@amp@amp}/g
|
||||
#s/\([^{&] *\)&[^@a-z]/\1@redr{\&}/g
|
||||
s/\([^{<>^] *\)\([0-9]* *[<>^][<>^]*[^@][a-zA-Z0-9./_-]*\)/\1@redr{\2}/g
|
||||
s/\\}/}\\/g
|
||||
#.
|
||||
# Now we can add in 'unsafe' entities that would be too greedy.
|
||||
# Arrays
|
||||
s/[[][0-9$a-zA-Z_;. -]*]/@args{&}/g
|
||||
#.
|
||||
# Declared Variables
|
||||
s/\($[$]*\)\([A-Za-z_0-9][A-Za-z_0-9]*\)/@vars{@optr{\1}\2}/g
|
||||
#.
|
||||
# Files
|
||||
/@at/ ! {
|
||||
s/\([A-Za-z0-9_*-][A-Za-z0-9_*-]*\.[a-z0-9*][a-z0-9*]*\)/@fsfo{\1}/g
|
||||
}
|
||||
#.
|
||||
#### This section is built in the Makefile. Just some formatting examples. #####
|
||||
#.
|
||||
# Fish builtin (bltn) <- 4 character code that has a Doxygen alias counterpart
|
||||
# template : s/[[:<:]]function[[:>:]]/@bltn{&}/
|
||||
#.
|
||||
# s,[[:<:]]function[[:>:]],@bltn{function},g
|
||||
# s,[[:<:]]begin[[:>:]],@bltn{begin},g
|
||||
# ...
|
||||
#.
|
||||
# Fish functions (func)
|
||||
# Populated by 'public' functions' filename.
|
||||
#.
|
||||
# s,[[:<:]]fish_pwd[[:>:]],@func{fish_pwd},g
|
||||
# s,[[:<:]]fish_prompt[[:>:]],@func{fish_prompt},g
|
||||
# ...
|
||||
#.
|
||||
# Shell Command (cmnd)
|
||||
# Populated from completion filenames
|
||||
#.
|
||||
# s,[[:<:]]seq[[:>:]],@cmnd{seq},g
|
||||
# s,[[:<:]]rm[[:>:]],@cmnd{rm},g
|
||||
# ...
|
||||
#.
|
||||
# Color Variable (clrv)
|
||||
# Populated from __fish_config_interactive.fish
|
||||
# Allows fish's 'special' color variables to be identified
|
||||
#.
|
||||
# s,[[:<:]]fish_color_normal[[:>:]],@clrv{fish_color_normal},g
|
||||
# s,[[:<:]]fish_color_command[[:>:]],@clrv{fish_color_command},g
|
||||
#.
|
||||
# Once all of the commands/functions/variables/special's have been marked up,
|
||||
# branch back to tidy up and collapse the pattern/hold buffers back to a
|
||||
# single line.
|
||||
#.
|
||||
# b tidy
|
||||
#.
|
||||
#.
|
||||
# Below is a special section that adds vocabuarly to the lexicon during 'make'.
|
||||
# As the lexicon is written into the output lexicon_filter, portability is
|
||||
# automatically handled.
|
||||
#.
|
||||
#.!# cmnd whoami
|
||||
#.!# cmnd mkdir
|
||||
#.!# cmnd basename
|
||||
#.!# bltn sleep
|
||||
#.!# args in
|
Loading…
Reference in a new issue