# !/bin/bash
# 20060223 wiki2file.sh hse@ukr.net tech JaFd http://docs.linux.org.ua/dlou/index.php/Man
# Distributed under the terms of the GNU General Public License v2 or later
# This script automate man page wiki source download from wiki server
# over http protocol and save it as local file.
# Edit to suite yours configuration:
# Local charset
local_charset=utf8
# url:
url='http://docs.linux.org.ua/dlou/index.php'
# page:
page='%D0%9A%D0%B0%D1%82%D0%B5%D0%B3%D0%BE%D1%80%D1%96%D1%8F:'
page=${page}'%D0%A1%D1%82%D0%BE%D1%80%D1%96%D0%BD%D0%BA%D0%B8_'
page=${page}'%D0%BF%D1%96%D0%B4%D1%80%D1%83%D1%87%D0%BD%D0%B8%D0%BA%D0%B0'
page='Категорія:Сторінки підручника'
# HTTP browzer
lynx='lynx -dump -cookie_file=~/.lynx_cookies -assume_charset '$local_charset' -assume_local_charset '$local_charset' -display_charset '$local_charset
# Location of wiki formated man pages:
wikimandir=~/doc
# Create if needed output dir:
for i in 1 2 3 4 5 6 7 8
do
if [ ! -d $wikimandir/Man/man$i ]
then
mkdir -p $wikimandir/Man/man$i
fi
done
# Create list of man pages:
$lynx "$url/$page" |grep 'http://docs.linux.org.ua/index.php/Man:Man' |awk '{print $2}' |awk -F':' '{print $3}' |sort > $wikimandir/manlist
# Download wiki formated man pages to local file:
url='http://docs.linux.org.ua/index.php'
for i in `cat $wikimandir/manlist`
do
j=`echo "$i" |sed 's/Man/man/'`
# Coment "if" when you wont download all existings file
if [ ! -f $wikimandir/Man/$j ]
then
echo -n "Download to: $wikimandir/Man/$j ..."
wget -q -N -O $wikimandir/Man/$j "$url?title=Man:$i&action=raw" &
# $lynx "$url?title=Man:$i&action=raw" > $wikimandir/Man/$j &
echo ' OK!'
fi
done
exit 0