FIX: awk too many open files issue

pull/4/head
Ashhar Hasan 6 years ago
parent ad80ced7fa
commit 1fafa8c27c
No known key found for this signature in database
GPG Key ID: 4CD0188E0E5784EF

@ -29,20 +29,25 @@ wget -qO - "$CONTRIB_FEED" | \
print "<url>http://tokyo.kapeli.com/feeds/zzz/user_contributed/build/" nm "/" ar "</url>" >> of
print "<url>http://sydney.kapeli.com/feeds/zzz/user_contributed/build/" nm "/" ar "</url>" >> of
print "</entry>" >> of
close(of)
ar = ""; vr = ""; nm = ""; next ;
}'
# Remove duplicate files and keep only the more recent versions
rm CouchDB.xml Julia.xml Phalcon.xml
DUPLICATED_FILES=( $(find . -type f -name "*.xml" -printf "%f\n" | sort | uniq -d) )
for file in "${DUPLICATED_FILES[@]}"; do
rm "$file"
done
# This is bound to have some errors
# Detect erroneous files
# Get all files that have malformed URLs
MALFORMED_FILES=$(grep -L "http://.*\.tgz" ./*.xml)
MALFORMED_FILES=( $(grep -L "http://.*\.tgz" ./*.xml) )
# Fix MALFORMED_FILES using some regex magic (need to make this better and not look stupid)
for file in $MALFORMED_FILES; do
for file in "${MALFORMED_FILES[@]}"; do
vim "$file" -u ./.vimrc +'call FixFileUrl()' +wq
done

Loading…
Cancel
Save