mirror of
https://github.com/lancachenet/monolithic
synced 2024-11-21 19:43:05 +00:00
Removed duplicate parsing altogether
UKLans is a curated directory and shouldn't contain duplicates, lets save ourselves some effort
This commit is contained in:
parent
5403e5cd85
commit
25f46d2c35
1 changed files with 19 additions and 21 deletions
|
@ -1,33 +1,31 @@
|
|||
#!/bin/bash
|
||||
|
||||
IFS=' '
|
||||
cd /data/cachedomains
|
||||
export GIT_SSH_COMMAND="ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
|
||||
mkdir -p /data/cachedomains
|
||||
cd /data/cachedomains
|
||||
export GIT_SSH_COMMAND="ssh -o UserKnownHostsFile=/dev/null -o StrictHostCACHE_IDENTIFIERChecking=no"
|
||||
if [[ ! -d .git ]]; then
|
||||
git clone ${CACHE_DOMAIN_REPO} .
|
||||
fi
|
||||
git fetch origin
|
||||
git reset --hard origin/master
|
||||
path=$(mktemp -d)
|
||||
outputfile=${path}/outfile.conf
|
||||
echo "map \$http_host \$cacheidentifier {" >> $outputfile
|
||||
echo " hostnames;" >> $outputfile
|
||||
echo " default \$http_host;" >> $outputfile
|
||||
jq -r '.cache_domains | to_entries[] | .key' cache_domains.json | while read entry; do
|
||||
key=$(jq -r ".cache_domains[$entry].name" cache_domains.json)
|
||||
jq -r ".cache_domains[$entry].domain_files | to_entries[] | .key" cache_domains.json | while read fileid; do
|
||||
jq -r ".cache_domains[$entry].domain_files[$fileid]" cache_domains.json | while read filename; do
|
||||
cat ${filename} | while read fileentry; do
|
||||
echo $fileentry
|
||||
if grep -q "^$fileentry" $outputfile; then
|
||||
continue
|
||||
fi
|
||||
echo " ${fileentry} ${key};" >> $outputfile
|
||||
TEMP_PATH=$(mktemp -d)
|
||||
OUTPUTFILE=${TEMP_PATH}/outfile.conf
|
||||
echo "map \$http_host \$cacheidentifier {" >> $OUTPUTFILE
|
||||
echo " hostnames;" >> $OUTPUTFILE
|
||||
echo " default \$http_host;" >> $OUTPUTFILE
|
||||
jq -r '.cache_domains | to_entries[] | .key' cache_domains.json | while read CACHE_ENTRY; do
|
||||
CACHE_IDENTIFIER=$(jq -r ".cache_domains[$CACHE_ENTRY].name" cache_domains.json)
|
||||
jq -r ".cache_domains[$CACHE_ENTRY].domain_files | to_entries[] | .key" cache_domains.json | while read CACHEHOSTS_FILEID; do
|
||||
jq -r ".cache_domains[$CACHE_ENTRY].domain_files[$CACHEHOSTS_FILEID]" cache_domains.json | while read CACHEHOSTS_FILENAME; do
|
||||
echo Reading cache ${CACHE_IDENTIFIER} from ${CACHEHOSTS_FILENAME}
|
||||
cat ${CACHEHOSTS_FILENAME} | while read CACHE_HOST; do
|
||||
echo " ${CACHE_HOST} ${CACHE_IDENTIFIER};" >> $OUTPUTFILE
|
||||
done
|
||||
done
|
||||
done
|
||||
done
|
||||
echo "}" >> $outputfile
|
||||
cat $outputfile
|
||||
cp $outputfile /etc/nginx/conf.d/20_maps.conf
|
||||
rm -rf $path
|
||||
echo "}" >> $OUTPUTFILE
|
||||
cat $OUTPUTFILE
|
||||
cp $OUTPUTFILE /etc/nginx/conf.d/20_maps.conf
|
||||
rm -rf $TEMP_PATH
|
||||
|
|
Loading…
Reference in a new issue