FixingParticipantLinks

From Robowiki
Revision as of 04:06, 8 September 2017 by Beaming (talk | contribs) (Created page with "Unfortunately, links to bots' jar files became rotten. Sometimes the hosting website goes down, sometimes API is changed. Luckily we have [http://robocode-archive.strangeautom...")
(diff) ← Older revision | Latest revision (diff) | Newer revision → (diff)
Jump to navigation Jump to search

Unfortunately, links to bots' jar files became rotten. Sometimes the hosting website goes down, sometimes API is changed. Luckily we have archive. To fix a link to a workable one, just replace the bad link to the archived one. You can do it by hand for your favorite bot or you can run the script below

#!/bin/sh

# Check rumble page for bots with rotten links.
# If possible replace the bad link with the one pointing to archive
#  http://robocode-archive.strangeautomata.com/robots/

archiveURL="http://robocode-archive.strangeautomata.com/robots"
archiveRegEx="robocode-archive.strangeautomata.com"

logFile="changes.log"

PARTICIPANTSURL="http://robowiki.net/wiki/RoboRumble/Participants?action=raw"
TAGbegin="<pre>"
TAGend="<\/pre>"

goodJarRegEx="\(Java archive data (JAR)\)\|\(Zip\)"

# mw extension stands for MediaWiki
wikiPageFileWeb=Participants.Web.mw
wikiPageFileFixed=Participants.Fixed.mw

function getParticipantsPage () {

	curl --silent --max-time 10 "$1" > "$wikiPageFileWeb" || { echo "ERROR: cannot download participant Wiki Page at $1"; exit 1; }
}

function getBotsList () {
	cat "$1" | sed -e 0,/$TAGbegin/d  -e /"$TAGend"/,\$d
}

function getHeader () {
	cat "$1" | sed -ne 0,/$TAGbegin/p
}

function getFooter () {
	cat "$1" | sed -ne /$TAGend/,\$p
}

function isUrlValidJar () {
	# url pointing to archive are assumed good
	echo "$1" | grep -q -i "$archiveRegEx"   && return 0
	# we need about 100 bytes to see if URL is JAR
	curl --silent --location --max-time 10 --range 0-100 "$1" \
		| file --brief - | grep -q "$goodJarRegEx"
}

function makeArchiveURLforBotName () {
	echo "$archiveURL"/"$1".jar|sed -e s'/ \+/_/g'
}

function fixParticipantsLinks () {
	#reads stdin
	inRobotsList=0
	while read ln
	do
		botName=`echo "$ln" | cut -d , -f 1`
		botURL=`echo "$ln" | cut -d , -f 2`
		if  (isUrlValidJar "$botURL")
		then
			#echo url is good
			echo $ln
		else
			# echo url is rotten
			# making archive url
			# replacing white spaces with one underscore
			newUrl=`makeArchiveURLforBotName "$botName"`

			# checking that archive has this bot jar
			if (isUrlValidJar "$newUrl")
			then
				echo $botName,$newUrl
				echo "Replacing link for $botName with achieved one" >> "$logFile"
				echo "   old rotten link was" >> "$logFile"
			        echo "      $botURL" >> "$logFile"
				echo "   new link is" >> "$logFile"
		      		echo "      $newUrl" >> "$logFile"
			else
				echo "Removing all records for $botName. The link is rotten and no archived Jar file" >> "$logFile"
			       	echo "	  old record was" >> $logFile
		      		echo "        $ln" >> "$logFile"
			fi
		fi
	done

}

getParticipantsPage "$PARTICIPANTSURL"

echo "=============================================" >> "$logFile"
date >> "$logFile"
echo "=============================================" >> "$logFile"

getHeader "$wikiPageFileWeb" > "$wikiPageFileFixed"
getBotsList "$wikiPageFileWeb" | fixParticipantsLinks >> "$wikiPageFileFixed"
getFooter "$wikiPageFileWeb" >> "$wikiPageFileFixed"

echo Ready to upload Wiki page with fixed links is in the file "$wikiPageFileFixed"
echo Below are the changes done to its originanl stored in "$wikiPageFileWeb"
diff -u "$wikiPageFileWeb" "$wikiPageFileFixed"