Have wget http ignore robots.txt

git-svn-id: https://svn.code.sf.net/p/xcat/code/xcat-core/branches/2.7@12353 8638fb3e-16cb-4fca-ae20-7b5d299a9bcd
This commit is contained in:
jbjohnso 2012-04-26 12:41:08 +00:00
parent 7f02840237
commit f26491b055

View File

@ -17,7 +17,7 @@ do
for i in $MASTER_IPS
do
#wget -l inf -N -r --waitretry=10 --random-wait --retry-connrefused -t 0 -T 60 ftp://$i/postscripts
wget -l inf -N -r --waitretry=10 --random-wait --retry-connrefused -nH --cut-dirs=2 --reject "index.html*" --no-parent -t 0 -T 60 http://$i$INSTALLDIR/postscripts/ -P /xcatpost
wget -l inf -N -r --waitretry=10 --random-wait --retry-connrefused -e robots=off -nH --cut-dirs=2 --reject "index.html*" --no-parent -t 0 -T 60 http://$i$INSTALLDIR/postscripts/ -P /xcatpost
if [ "$?" = "0" ]
then
if [ ! -x /usr/bin/openssl ]; then #Stop if no openssl to help the next bit