mirror of
https://github.com/lancachenet/monolithic
synced 2024-11-22 03:53:06 +00:00
Removed duplicate parsing altogether
UKLans is a curated directory and shouldn't contain duplicates, lets save ourselves some effort
This commit is contained in:
parent
5403e5cd85
commit
25f46d2c35
1 changed files with 19 additions and 21 deletions
|
@ -1,33 +1,31 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
IFS=' '
|
IFS=' '
|
||||||
cd /data/cachedomains
|
mkdir -p /data/cachedomains
|
||||||
export GIT_SSH_COMMAND="ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
|
cd /data/cachedomains
|
||||||
|
export GIT_SSH_COMMAND="ssh -o UserKnownHostsFile=/dev/null -o StrictHostCACHE_IDENTIFIERChecking=no"
|
||||||
if [[ ! -d .git ]]; then
|
if [[ ! -d .git ]]; then
|
||||||
git clone ${CACHE_DOMAIN_REPO} .
|
git clone ${CACHE_DOMAIN_REPO} .
|
||||||
fi
|
fi
|
||||||
git fetch origin
|
git fetch origin
|
||||||
git reset --hard origin/master
|
git reset --hard origin/master
|
||||||
path=$(mktemp -d)
|
TEMP_PATH=$(mktemp -d)
|
||||||
outputfile=${path}/outfile.conf
|
OUTPUTFILE=${TEMP_PATH}/outfile.conf
|
||||||
echo "map \$http_host \$cacheidentifier {" >> $outputfile
|
echo "map \$http_host \$cacheidentifier {" >> $OUTPUTFILE
|
||||||
echo " hostnames;" >> $outputfile
|
echo " hostnames;" >> $OUTPUTFILE
|
||||||
echo " default \$http_host;" >> $outputfile
|
echo " default \$http_host;" >> $OUTPUTFILE
|
||||||
jq -r '.cache_domains | to_entries[] | .key' cache_domains.json | while read entry; do
|
jq -r '.cache_domains | to_entries[] | .key' cache_domains.json | while read CACHE_ENTRY; do
|
||||||
key=$(jq -r ".cache_domains[$entry].name" cache_domains.json)
|
CACHE_IDENTIFIER=$(jq -r ".cache_domains[$CACHE_ENTRY].name" cache_domains.json)
|
||||||
jq -r ".cache_domains[$entry].domain_files | to_entries[] | .key" cache_domains.json | while read fileid; do
|
jq -r ".cache_domains[$CACHE_ENTRY].domain_files | to_entries[] | .key" cache_domains.json | while read CACHEHOSTS_FILEID; do
|
||||||
jq -r ".cache_domains[$entry].domain_files[$fileid]" cache_domains.json | while read filename; do
|
jq -r ".cache_domains[$CACHE_ENTRY].domain_files[$CACHEHOSTS_FILEID]" cache_domains.json | while read CACHEHOSTS_FILENAME; do
|
||||||
cat ${filename} | while read fileentry; do
|
echo Reading cache ${CACHE_IDENTIFIER} from ${CACHEHOSTS_FILENAME}
|
||||||
echo $fileentry
|
cat ${CACHEHOSTS_FILENAME} | while read CACHE_HOST; do
|
||||||
if grep -q "^$fileentry" $outputfile; then
|
echo " ${CACHE_HOST} ${CACHE_IDENTIFIER};" >> $OUTPUTFILE
|
||||||
continue
|
|
||||||
fi
|
|
||||||
echo " ${fileentry} ${key};" >> $outputfile
|
|
||||||
done
|
done
|
||||||
done
|
done
|
||||||
done
|
done
|
||||||
done
|
done
|
||||||
echo "}" >> $outputfile
|
echo "}" >> $OUTPUTFILE
|
||||||
cat $outputfile
|
cat $OUTPUTFILE
|
||||||
cp $outputfile /etc/nginx/conf.d/20_maps.conf
|
cp $OUTPUTFILE /etc/nginx/conf.d/20_maps.conf
|
||||||
rm -rf $path
|
rm -rf $TEMP_PATH
|
||||||
|
|
Loading…
Reference in a new issue