Для совсем шизанутых man pages для утюга (openwrt)
Задался позырить маны на "утюге" (каком то длинке) с openwrt
Накабузял малость, лингвист из меня никакой
Пытайте, косяков масса и сделано на отвяжись...
Маны можно зырить на любом "утюге"
(можно и на машинках где есть инет, но нет манов)
Накабузял малость, лингвист из меня никакой
Пытайте, косяков масса и сделано на отвяжись...
Маны можно зырить на любом "утюге"
(можно и на машинках где есть инет, но нет манов)
#!/bin/sh
man_url='http://man.he.net/?section=all&topic='
man_url='http://man.cx?page='
man_conf='/etc/man.conf'
man_dir='/usr/share/man/offline.man'
force=0
on line=0
man_pages=""
[ ! -f ${man_conf} ] && {
cat << EOF_CONF > ${man_conf}
man_url='http://man.he.net/?section=all&topic=' #Url for search and download man pages
#man_url='http://man.cx?page='
on line=1 #1 - always don't save local file man pages,
#0 - if exist local man pages not download from ${man_url}
force=1 #1 - always search and download man pages from site
#0 - if exist local man pages not download from ${man_url}
man_dir='/usr/share/man/offline.man' #directory for offline man pages
EOF_CONF
echo "Firs start man..."
echo "*** Created ${man_conf}, editing: vi ${man_conf} ***"
echo "For usage enter: $0 --help"
}
. ${man_conf}
help() {
cat << EOF_HELP
Usage example: man wget, man --force|-f wget, man --help|h
Options:
--force|-f force download man pages from ${man_url}
--online|-n don't save man pages on local file
--url|-u base url for site man bages, exampe: -u 'http://man.he.net/?section=all&topic='
--help|-h|-? this help
Files:
/etc/man.conf
Examle for /etc/man,conf:
man_url='http://man.cx?page='
#man_url='http://man.he.net/?section=all&topic='
on line=1 #1 - always don't save local file man pages,
#0 - if exist local man pages not download from ${man_url}
force=1 #1 - always search and download man pages from site
#0 - if exist local man pages not download from ${man_url}
man_dir='/usr/share/man/offline.man' #directory for offline man pages
EOF_HELP
exit 0
}
while test $# -gt 0; do
case "$1" in
-f|--force)
force=2
shift
;;
-h|--help)
help
;;
-n|--online)
on line=2
shift
;;
-u|--url)
shift
man_url="${1}"
shift
;;
*)
[ -z ${man_pages} ] && man_pages=$1;
shift
;;
esac
done
[ -z ${man_pages} ] && help
[ ! -d ${man_dir} ] && mkdir -p ${man_dir}
gzip=0
ext=""
which gzip > /dev/null 2>&1 && {
gzip=1
ext=".gz"
}
man_path=${man_dir}/man.${man_pages}
[ ! -f ${man_path}${ext} -o ${force} -ne 0 -o ${online} -ne 0 ] && {
man_url="${man_url}${man_pages}"
[ ! -d /tmp/${man_dir} ] && mkdir -p /tmp/${man_dir}
echo "Pleas wait. Searches and download man pages from ${man_url}..." 1>&2
echo
{
err_download=0
if which elinks > /dev/null 2>&1; then
elinks -dump "${man_url}" 2> /dev/null || err_download=1
else
{ wget "${man_url}" -O- 2> /dev/null | sed -e 's/<[^>]*>//g'; } || err_download=1
fi
[ ${err_download} -eq 1 ] && {
echo "Not download man pages ${man_pages} from ${man_url}" 1>&2
exit 2
}
} > /tmp/${man_path}
grep -q "no[\t ]\+such[\t ]\+man[\t ]\+page:[\t ]\+${man_pages}\|No[\t ]\+matches[\t ]\+for[\t ]\+\"${man_pages}\"\|Invalid[\t ]\+characters[\t ]\+in[\t ]\+name"
cat /tmp/${man_path}
} || {
[ ${online} -ne 0 -a ${force} -lt 2 ] && {
less /tmp/${man_path}
rm -f /tmp/${man_path}
exit 0
}
{ [ ${gzip} -eq 1 ] && gzip -c -9 || cat; } < /tmp/${man_path} > ${man_path}${ext}
}
rm -f /tmp/${man_path}
}
[ ! -f ${man_path}${ext} ] && {
echo
echo "*** Not found man pages ${man_pages} ***"
exit 1
}
{ [ ${gzip} -eq 1 ] && gzip -c -d || cat; } < ${man_path}${ext} | less
ПоказатьОтветы
Подписаться на ответы
Инфостарт бот
Сортировка:
Древо развёрнутое
Свернуть все
+ Немного правка косяков, и добавление новых...
как есть
как есть
#!/bin/sh
man_urls='http://man.he.net/?section=all&topic= http://man.cx?page= '
man_conf='/etc/man.conf'
man_dir='/usr/share/man/offline.man'
force=0
on line=0
man_pages=""
[ ! -f ${man_conf} ] && {
cat << EOF_CONF > ${man_conf}
man_urls='http://man.he.net/?section=all&topic= http://man.cx?page= ' #Url's for search and download man pages
#man_url='http://man.cx?page='
on line=1 #1 - always don't save local file man pages,
#0 - if exist local man pages not download from ${man_url}
force=1 #1 - always search and download man pages from site
#0 - if exist local man pages not download from ${man_url}
man_dir='/usr/share/man/offline.man' #directory for offline man pages
EOF_CONF
echo "Firs start man..."
echo "*** Created ${man_conf}, editing: vi ${man_conf} ***"
echo "For usage enter: $0 --help"
}
[ -z "${man_conf}" ] && man_conf='/etc/man.conf'
. ${man_conf}
[ -z "${man_urls}" ] && man_urls='http://man.he.net/?section=all&topic= http://man.cx?page= '
[ -z "${man_dir}" ] && man_dir='/usr/share/man/offline.man'
help() {
cat << EOF_HELP
Usage example: man wget, man --force|-f wget, man --help|h
Options:
--force|-f force download man pages from ${man_url}
--online|-n don't save man pages on local file
--url|-u a list of base addresses separated by a space
to search for manual pages,
examle: -u 'http://man.he.net/?section=all&topic= http://man.cx?page= '
--help|-h this help
Files:
/etc/man.conf
Examle for /etc/man,conf:
man_urls='http://man.cx?page= http://man.he.net/?section=all&topic= '
#a list of base addresses separated by a space
#to search for manual pages.
on line=1 #1 - always don't save local file man pages
#0 - if exist local man pages not download from ${man_url}
force=1 #1 - always search and download man pages from site
#0 - if exist local man pages not download from ${man_url}
man_dir='/usr/share/man/offline.man' #directory for offline man pages
EOF_HELP
exit 0
}
while [ $# -gt 0 ]; do
case "$1" in
-f|--force)
force=2
shift
;;
-h|--help)
help
;;
-n|--online)
on line=2
shift
;;
-u|--url)
shift
man_urls="${1}"
shift
;;
*)
[ -z ${man_pages} ] && man_pages=$1;
shift
;;
esac
done
[ -z ${man_pages} ] && help
[ ! -d ${man_dir} ] && mkdir -p ${man_dir}
gzip=0
ext=""
which gzip > /dev/null 2>&1 && {
gzip=1
ext=".gz"
}
man_path=${man_dir}/man.${man_pages}
get_man_pages() {
local man_url
man_url=${1}
[ ! -f ${man_path}${ext} -o ${force} -ne 0 -o ${online} -ne 0 ] && {
man_url="${man_url}${man_pages}"
[ ! -d /tmp/${man_dir} ] && mkdir -p /tmp/${man_dir}
echo "Pleas wait. Searches and download man pages from ${man_url}..." 1>&2
echo
{
err_download=0
if which elinks > /dev/null 2>&1; then
elinks -dump "${man_url}" 2> /dev/null || err_download=1
else
{ wget "${man_url}" -O- 2> /dev/null | sed -e 's/<[^>]*>//g'; } || err_download=1
fi
[ ${err_download} -eq 1 ] && {
echo "Not download man pages ${man_pages} from ${man_url}" 1>&2
return 1
}
} > /tmp/${man_path}
grep -q -i "no[\t ]\+such[\t ]\+man[\t ]\+page:[\t ]\+${man_pages}\|\
No[\t ]\+matches[\t ]\+for[\t ]\+\"${man_pages}\"\|Invalid[\t ]\+characters[\t ]\+in[\t ]\+name" \
/tmp/${man_path} && {
return 2
} || {
[ ${online} -ne 0 -a ${force} -lt 2 ] && {
less /tmp/${man_path}
return 3
}
{ [ ${gzip} -eq 1 ] && gzip -c -9 || cat; } < /tmp/${man_path} > ${man_path}${ext}
}
}
return 0
}
not_found_man=0
for url in ${man_urls}; do
get_man_pages "${url}"
not_found_man=$?
case "${not_found_man}" in
0|3)
break
;;
*)
echo "not found man page"
;;
esac
done
[ ${not_found_man} -ne 0 ] && cat /tmp/${man_path}
rm -f /tmp/${man_path}
[ ! -f ${man_path}${ext} ] && {
echo
echo "*** Not found man pages ${man_pages} ***"
exit 1
}
{ [ ${gzip} -eq 1 ] && gzip -c -d || cat; } < ${man_path}${ext} | less
Показать
Для получения уведомлений об ответах подключите телеграм бот:
Инфостарт бот