FIX: awk too many open files issue

This commit is contained in:
Ashhar Hasan 2018-11-14 22:57:44 +05:30
parent ad80ced7fa
commit 1fafa8c27c
No known key found for this signature in database
GPG Key ID: 4CD0188E0E5784EF
1 changed files with 13 additions and 8 deletions

View File

@ -29,20 +29,25 @@ wget -qO - "$CONTRIB_FEED" | \
print "<url>http://tokyo.kapeli.com/feeds/zzz/user_contributed/build/" nm "/" ar "</url>" >> of print "<url>http://tokyo.kapeli.com/feeds/zzz/user_contributed/build/" nm "/" ar "</url>" >> of
print "<url>http://sydney.kapeli.com/feeds/zzz/user_contributed/build/" nm "/" ar "</url>" >> of print "<url>http://sydney.kapeli.com/feeds/zzz/user_contributed/build/" nm "/" ar "</url>" >> of
print "</entry>" >> of print "</entry>" >> of
close(of)
ar = ""; vr = ""; nm = ""; next ; ar = ""; vr = ""; nm = ""; next ;
}' }'
# Remove duplicate files and keep only the more recent versions # Remove duplicate files and keep only the more recent versions
rm CouchDB.xml Julia.xml Phalcon.xml DUPLICATED_FILES=( $(find . -type f -name "*.xml" -printf "%f\n" | sort | uniq -d) )
for file in "${DUPLICATED_FILES[@]}"; do
rm "$file"
done
# This is bound to have some errors # This is bound to have some errors
# Detect erroneous files # Detect erroneous files
# Get all files that have malformed URLs # Get all files that have malformed URLs
MALFORMED_FILES=$(grep -L "http://.*\.tgz" ./*.xml) MALFORMED_FILES=( $(grep -L "http://.*\.tgz" ./*.xml) )
# Fix MALFORMED_FILES using some regex magic (need to make this better and not look stupid) # Fix MALFORMED_FILES using some regex magic (need to make this better and not look stupid)
for file in $MALFORMED_FILES; do for file in "${MALFORMED_FILES[@]}"; do
vim "$file" -u ./.vimrc +'call FixFileUrl()' +wq vim "$file" -u ./.vimrc +'call FixFileUrl()' +wq
done done