################################################################################### # # NOTE!!! # This is FOUR separate scripts in one text file. You will need to separate them # at the rows of hashes. # ################################################################################### #!/bin/bash # makeurls -- bash shell script to create the URL list on the webserver. # # Vincent Berk vberk@ists.dartmouth.edu # ISTS/Dartmouth College Hanover, NH # # makeurls Copyright (C) 2001 Vincent Berk GNU/GPL # # Last updated November 13, 2003. HOME=/home/jeanne HTML=/home/httpd/html TODAY=listing.`date +%m-%d-%y` LISTING=$HOME/listings/$TODAY CGIFILE=$HOME/listings/cgis URLSFILE=$HTML/urls # First, take out the urls file to prevent it from showing up recursively # but only if it exists. If not, make listings directory and create anew. if `test -e $URLSFILE` then rm -f $URLSFILE rm -f $LISTING else mkdir /home/webmaster/listings fi # Now, go build the listings cd $HTML # Create the file touch $LISTING # Insert the CGI file if `test -f $CGIFILE` then echo "# CGI scripts & options:" >> $LISTING cat $CGIFILE >> $LISTING else echo "# No CGI file found" >> $LISTING fi # Directories need an extra /, so we add it to the find param echo "# Directories:" >> $LISTING find /home/httpd/html -type d | sed -e "s@/home/httpd/html@http\:\/\/wwwfiles.yourdomain.com@" | awk '{ print $0"\n"$0 }' >> $LISTING # Regular files echo "# Regular Files:" >> $LISTING find /home/httpd/html -type f | sed -e "s@/home/httpd/html/@http\:\/\/wwwfiles.yourdomain.com/@" >> $LISTING # Symlinks echo "# Symlinks:" >> $LISTING find /home/httpd/html -type l | sed -e "s@/home/httpd/html/@http\:\/\/wwwfiles.yourdomain.com/@" >> $LISTING # Create the current symlink rm -f $HOME/listings/current ln -s $LISTING $HOME/listings/current # Copy to the webdirectory cp $LISTING $URLSFILE # Make it (and its parent directory) belong to the listing owner chown webmaster $LISTING chown webmaster $HOME/listings/ # fix linefeeds exec `perl -pi -e 's/\r\n?/\n/g' $URLSFILE` # end ################################################################################### # makeurls.cron -- cron job to run the makeurls script every day. # # Vincent Berk vberk@ists.dartmouth.edu # ISTS/Dartmouth College Hanover, NH # # makeurls.cron Copyright (C) 2001 Vincent Berk GNU/GPL # # General info: SHELL=/bin/bash MAILTO=webmaster@yourdomain.com HOME=/home/webmaster # Run every day: @daily $HOME/scripts/makeurls # end ################################################################################### #! /bin/bash # getlist -- bash shell script to retrieve the URL list from the webserver # and restart the reverse proxy. # # Vincent Berk vberk@ists.dartmouth.edu # ISTS/Dartmouth College Hanover, NH # # getlist Copyright (C) 2001 Vincent Berk GNU/GPL # HOME=/home/mrp OLDURLSFILE=$HOME/urls NEWURLSFILE=$HOME/newurls REALHOST="192.168.1.2" REALPORT="80" echo -e "GET /urls HTTP/1.0\n\n" | nc $REALHOST $REALPORT | tail +11 > $NEWURLSFILE if [ -e "$NEWURLSFILE" ]; then # If the file exists... if [ -s "$NEWURLSFILE" ]; then # ...and if it's not 0 length... cat $NEWURLSFILE > $OLDURLSFILE # ...then overwrite old urls list with new. fi fi # fix linefeeds exec `perl -pi -e 's/\r\n?/\n/g' $OLDURLSFILE` # kill Jeanne processes killall -TERM jeanne sleep 1 # Restart the squid server /etc/init.d/squid restart # end ################################################################################### # getlist.cron -- cron job to run the getlist script every day. # # Vincent Berk vberk@ists.dartmouth.edu # ISTS/Dartmouth College Hanover, NH # # getlist.cron Copyright (C) 2001 Vincent Berk GNU/GPL # # General: SHELL=/bin/bash MAILTO=webmaster@yourdomain.com HOME=/home/mrp # # Update from webserver every day at 0:30 30 0 * * * $HOME/getlist # en