maintain-campaignwiki

#!/bin/bash
umask 0007

# You may pass a namespace on the command line
NS=$1
PATH=/bin:/usr/bin:/home/alex/bin
WIKI='https://campaignwiki.org/wiki'

# This adds the correct path to perl
source /home/alex/perl5/perlbrew/etc/bashrc

# Set the VERBOSE environment variable to produce output
if [[ -n $VERBOSE ]]; then echo Verbose On; fi

DOCROOT=~/campaignwiki.org
SCRIPT=~/farm/wiki.pl

# Data dir
WikiDataDir=~/campaignwiki

# Directory where you want the maintenance HTML files to end up.
MAINT=$DOCROOT/maintenance
mkdir -p $MAINT

# Directory with the static output files
STATIC_DIR=$DOCROOT/pics

# Extract the administrator password from the config file.  Should
# look as follows: $AdminPass="foo";
PW=`sed -n 's/\$AdminPass = "\(.*\)";.*/\1/p' < $WikiDataDir/config`
if [[ -n $VERBOSE ]]; then echo Password $PW; fi

# Publish config file without password
sed 's/\$AdminPass = "\(.*\)";.*/\$AdminPass = "*secret*"; # not the real password/' \
    < $WikiDataDir/config > $DOCROOT/config

# Publish this script
cat $0 \
| wikiput -u 'CronJob' -p "$PW" \
  -s 'Update' "$WIKI/maintain-campaignwiki" \

# Clean up any old locks
if [[ -n $VERBOSE ]]; then echo Cleaning up stale locks `find "$WikiDataDir" -type d -name 'lock*' -mmin +180`; fi
find "$WikiDataDir" -type d -name 'lock*' -mmin +180 -exec rmdir {} \;

# Find all the namespaces in the "hive".
if [[ -z "$NS" ]]; then
    NS=`find $WikiDataDir/ -maxdepth 1 -type d -name '[A-Z]*' | sort`
fi

# Maintenance action for all namespaces.
cd $DOCROOT
rm -f $MAINT/*.html
for f in Main $NS; do
    f=`basename $f`
    if [[ -n $VERBOSE ]]; then echo $f; fi
    nice perl $SCRIPT action=maintain ns=$f | tail -n +5 > "$MAINT/$f.html"
    chmod 644 "$MAINT/$f.html"
    # make sure we don't run into surge protection
    sleep 1
done

# Clean up possible pageidx file belonging to alex instead of www-data
find "$WikiDataDir" -name pageidx -user alex -exec rm {} \;

# Clean up static copies but leave the uploaded files in place!
if [[ -n $VERBOSE ]]; then echo Cleaning up static copies; fi
rm -f "$STATIC_DIR"/*/*.html
rm -f "$STATIC_DIR"/*/static.css
rm -f "$STATIC_DIR"/*.tar.gz
rm -rf "$STATIC_DIR"/*" Text"
for f in `find $STATIC_DIR/ -maxdepth 1 -type d -name '[A-Z]*' | sort`; do
    f=`basename $f`
    if [[ -n $VERBOSE ]]; then echo $f; fi
    if [[ ! -d "$WikiDataDir/$f" ]]; then rm -rf "$STATIC_DIR/$f"; fi
done

# Status -- find empty wikis by counting the number of page files.
# When counting, ignore symlinks because BannedContent.pg is a symlink
# (see above).  Also post the data collected on the Status page.  Make
# sure that nothing in the rest of this script visits the wikis lest
# their data directories be recreated.

if [[ -n $VERBOSE ]]; then echo Computing status and deleting old wikis; fi
ACTIVE="\n\n== Active Wikis\n"
ACTIVE_DE="\n\n== Aktive Wikis\n"
STATUS="\n\n== Other Wikis\n"
STATUS_DE="\n\n== Andere Wikis\n"
HIATUS="\n\n== Wikis in Hiatus\nThese wikis had no activity in half a year.\n"
HIATUS_DE="\n\n== Wikis mit Unterbruch\nIn diesen Wikis wurde im letzten halben Jahr nichts geändert.\n"
HIBERNATE="\n\n== Hibernating Wikis\nThese wikis had no activity in the last year.\n"
HIBERNATE_DE="\n\n== Wikis im Winterschlaf\nIn diesen Wikis wurde im letzten Jahr nichts geändert.\n"
for f in $NS; do
    NM=`basename $f`
    PG=0
    if [[ -d $f/page ]]; then
	PG=`find $f/page -name '*.pg' -type f | wc -l`
    fi
    if [ $PG == '0' ]; then
        if rm -rf  "$f"; then
	    if [[ -n $VERBOSE ]]; then
		echo "$NM was deleted"
	    fi
	    # delete mail timestamp as well
	    rm -rf "~/.rc2mail/CampaignWiki-$NM.ts"
        else
            STATUS+="\n* $NM had 0 pages but deletion failed"
            STATUS_DE+="\n* $NM hat 0 Seiten, konnte aber nicht gelöscht werden"
        fi
    else
	if [[ -n $VERBOSE ]]; then echo $f; fi
	# ignore missing old files
	TS=`cat $f/oldrc.log $f/rc.log 2>/dev/null | grep -v '^$' | tail -n 1 | sed 's/^\([0-9]*\).*/\1/'`
	if [[ -z "$TS" ]]; then
	    echo $NM has no timestamp in $f log files
	else
	    # compute string
            MOD=`date -u -d @$TS '+%Y-%m-%d %H:%M'`
            NOW=`date '+%s'`
            DAYS=$(( ($NOW-${TS:-0}) / 60 / 60 / 24 ))
	    if [[ -n $VERBOSE ]]; then echo " Age $DAYS"; fi
	    if [ $DAYS -lt 7 ]; then
		MOD="**$MOD**"
	    fi
	    STR="\n* [$NM:HomePage ${NM//_/ }] has [$NM:?action=index $PG page"
	    STR_DE="\n* [$NM:HomePage ${NM//_/ }] hat [$NM:?action=index $PG Seite"
	    if [[ $PG != 1 ]]; then
		STR+="s"
		STR_DE+="n"
	    fi
            STR+="], last modified $MOD"
            STR_DE+="], letzte Änderung am $MOD"
	    if [[ $PG -gt 99 ]]; then
		STR+=" &#x2605;"
		STR_DE+=" &#x2605;"
	    fi
	    # assign to one of the sections
            if [ $DAYS -gt 366 ]; then
		HIBERNATE+=$STR
		HIBERNATE_DE+=$STR_DE
	    elif [ $DAYS -gt 183 ]; then
		HIATUS+=$STR
		HIATUS_DE+=$STR_DE
	    elif [ $DAYS -gt 32 ]; then
		STATUS+=$STR
		STATUS_DE+=$STR_DE
	    else
		ACTIVE+=$STR
		ACTIVE_DE+=$STR_DE
	    fi
	fi
    fi
done

# assemble final page to post
PAGE="Large wikis with more than 100 pages are marked with a star (&#x2605;).\n"
PAGE+=$ACTIVE
PAGE+=$STATUS
PAGE+=$HIATUS
PAGE+=$HIBERNATE

echo -e $PAGE \
| wikiput -u 'CronJob' -p "$PW" \
  -s 'Page count for all namespaces' \
  "$WIKI/Status"

PAGE="Grosse Wikis mit mehr als hundert Seiten sind mit einem Stern (&#x2605;) markiert.\n"
PAGE+=$ACTIVE_DE
PAGE+=$STATUS_DE
PAGE+=$HIATUS_DE
PAGE+=$HIBERNATE_DE

echo -e $PAGE \
| wikiput -u 'CronJob' -p "$PW" \
  -s 'Seiten für alle Wikis zählen' \
  "$WIKI/Übersicht"