Add a script to automate the process
parent
83f46529d9
commit
55364a4ce5
@ -0,0 +1,34 @@
|
||||
set nocompatible
|
||||
|
||||
function! Mirror(dict)
|
||||
for [key, value] in items(a:dict)
|
||||
let a:dict[value] = key
|
||||
endfor
|
||||
return a:dict
|
||||
endfunction
|
||||
|
||||
function! S(number)
|
||||
return submatch(a:number)
|
||||
endfunction
|
||||
|
||||
function! SwapWords(dict, ...)
|
||||
let words = keys(a:dict) + values(a:dict)
|
||||
let words = map(words, 'escape(v:val, "|")')
|
||||
if(a:0 == 1)
|
||||
let delimiter = a:1
|
||||
else
|
||||
let delimiter = '/'
|
||||
endif
|
||||
let pattern = '\v(' . join(words, '|') . ')'
|
||||
exe '%s' . delimiter . pattern . delimiter
|
||||
\ . '\=' . string(Mirror(a:dict)) . '[S(0)]'
|
||||
\ . delimiter . 'g'
|
||||
endfunction
|
||||
|
||||
function! FixFileUrl()
|
||||
exe 0
|
||||
let @q = 'jf>l"ayt<j$2F/l"byt<'
|
||||
normal @q
|
||||
call SwapWords({@a:@b})
|
||||
endfunction
|
||||
|
@ -1,10 +0,0 @@
|
||||
<entry>
|
||||
<version>1.4</version>
|
||||
<url>http://frankfurt.kapeli.com/feeds/zzz/user_contributed/build/CouchDB/CouchDB.tgz</url>
|
||||
<url>http://london.kapeli.com/feeds/zzz/user_contributed/build/CouchDB/CouchDB.tgz</url>
|
||||
<url>http://newyork.kapeli.com/feeds/zzz/user_contributed/build/CouchDB/CouchDB.tgz</url>
|
||||
<url>http://sanfrancisco.kapeli.com/feeds/zzz/user_contributed/build/CouchDB/CouchDB.tgz</url>
|
||||
<url>http://singapore.kapeli.com/feeds/zzz/user_contributed/build/CouchDB/CouchDB.tgz</url>
|
||||
<url>http://tokyo.kapeli.com/feeds/zzz/user_contributed/build/CouchDB/CouchDB.tgz</url>
|
||||
<url>http://sydney.kapeli.com/feeds/zzz/user_contributed/build/CouchDB/CouchDB.tgz</url>
|
||||
</entry>
|
@ -1,10 +0,0 @@
|
||||
<entry>
|
||||
<version>0.3.6</version>
|
||||
<url>http://frankfurt.kapeli.com/feeds/zzz/user_contributed/build/Julia/Julia.tgz</url>
|
||||
<url>http://london.kapeli.com/feeds/zzz/user_contributed/build/Julia/Julia.tgz</url>
|
||||
<url>http://newyork.kapeli.com/feeds/zzz/user_contributed/build/Julia/Julia.tgz</url>
|
||||
<url>http://sanfrancisco.kapeli.com/feeds/zzz/user_contributed/build/Julia/Julia.tgz</url>
|
||||
<url>http://singapore.kapeli.com/feeds/zzz/user_contributed/build/Julia/Julia.tgz</url>
|
||||
<url>http://tokyo.kapeli.com/feeds/zzz/user_contributed/build/Julia/Julia.tgz</url>
|
||||
<url>http://sydney.kapeli.com/feeds/zzz/user_contributed/build/Julia/Julia.tgz</url>
|
||||
</entry>
|
@ -1,10 +0,0 @@
|
||||
<entry>
|
||||
<version>2.0.0</version>
|
||||
<url>http://frankfurt.kapeli.com/feeds/zzz/user_contributed/build/Phalcon/Phalcon.tgz</url>
|
||||
<url>http://london.kapeli.com/feeds/zzz/user_contributed/build/Phalcon/Phalcon.tgz</url>
|
||||
<url>http://newyork.kapeli.com/feeds/zzz/user_contributed/build/Phalcon/Phalcon.tgz</url>
|
||||
<url>http://sanfrancisco.kapeli.com/feeds/zzz/user_contributed/build/Phalcon/Phalcon.tgz</url>
|
||||
<url>http://singapore.kapeli.com/feeds/zzz/user_contributed/build/Phalcon/Phalcon.tgz</url>
|
||||
<url>http://tokyo.kapeli.com/feeds/zzz/user_contributed/build/Phalcon/Phalcon.tgz</url>
|
||||
<url>http://sydney.kapeli.com/feeds/zzz/user_contributed/build/Phalcon/Phalcon.tgz</url>
|
||||
</entry>
|
@ -0,0 +1,57 @@
|
||||
#!/bin/bash
|
||||
|
||||
INBUILT_FEED='https://github.com/Kapeli/feeds'
|
||||
CONTRIB_FEED='http://sanfrancisco.kapeli.com/feeds/zzz/user_contributed/build/index.json'
|
||||
|
||||
# Update the INBUILT_FEED from upstream
|
||||
if [ -d "$(basename "$INBUILT_FEED")" ]; then
|
||||
cd "$(basename "$INBUILT_FEED")" && git checkout master && git pull && cd ..
|
||||
else
|
||||
git clone "$INBUILT_FEED"
|
||||
fi
|
||||
|
||||
# Update the CONTRIB_FEED from upstream
|
||||
wget -qO - "$CONTRIB_FEED" | \
|
||||
sed -n -e '/^ \{4\}"/p' \
|
||||
-e '/^ "archive" :.*tgz"/p' \
|
||||
-e '/^ "version" :/p' | \
|
||||
awk -F '"' 'NR%3==1 { nm = $2 ; next }
|
||||
NR%3==2 { ar = $4 ; ; next }
|
||||
NR%3==0 { vr = $4 ;
|
||||
of = nm ".xml"
|
||||
print "<entry>" > of
|
||||
print "<version>" vr "</version>" >> of
|
||||
print "<url>http://frankfurt.kapeli.com/feeds/zzz/user_contributed/build/" nm "/" ar "</url>" >> of
|
||||
print "<url>http://london.kapeli.com/feeds/zzz/user_contributed/build/" nm "/" ar "</url>" >> of
|
||||
print "<url>http://newyork.kapeli.com/feeds/zzz/user_contributed/build/" nm "/" ar "</url>" >> of
|
||||
print "<url>http://sanfrancisco.kapeli.com/feeds/zzz/user_contributed/build/" nm "/" ar "</url>" >> of
|
||||
print "<url>http://singapore.kapeli.com/feeds/zzz/user_contributed/build/" nm "/" ar "</url>" >> of
|
||||
print "<url>http://tokyo.kapeli.com/feeds/zzz/user_contributed/build/" nm "/" ar "</url>" >> of
|
||||
print "<url>http://sydney.kapeli.com/feeds/zzz/user_contributed/build/" nm "/" ar "</url>" >> of
|
||||
print "</entry>" >> of
|
||||
ar = ""; vr = ""; nm = ""; next ;
|
||||
}'
|
||||
|
||||
# Remove duplicate files and keep only the more recent versions
|
||||
rm CouchDB.xml Julia.xml Phalcon.xml
|
||||
|
||||
# This is bound to have some errors
|
||||
# Detect erroneous files
|
||||
|
||||
# Get all files that have malformed URLs
|
||||
MALFORMED_FILES=$(grep -L "http://.*\.tgz" ./*.xml)
|
||||
|
||||
# Fix MALFORMED_FILES using some regex magic (need to make this better and not look stupid)
|
||||
for file in $MALFORMED_FILES; do
|
||||
vim "$file" -u ./.vimrc +'call FixFileUrl()' +wq
|
||||
done
|
||||
|
||||
# Extract URLs from all files and creat a wget input file
|
||||
WGET_URLS='/tmp/docsets_url'
|
||||
grep "http://london\..*\.tgz" ./**/*.xml -o --no-filename > "$WGET_URLS"
|
||||
|
||||
# Download the archives and extract them to proper docsets directory
|
||||
cd "${1='/tmp/'}" && \
|
||||
wget --continue -i "$WGET_URLS"
|
||||
#&& \
|
||||
# tar xzf ./*.tgz -C "$HOME/.local/share/Zeal/Zeal/docsets/"
|
Loading…
Reference in New Issue