[Nagiosplug-devel] check_webapp plugin (very rough)

Barry Roberts manithree at crosswinds.net
Fri Aug 29 16:53:13 CEST 2003


I did this to replace Sitescope with nagios and it does 99% of what I
need right now.  It uses curl to hit web apps and keep track of
cookies, search output etc.  So it logs into my web apps, does things
to make sure all the backend servers (database, etc.) are all working.

I would like to switch it over to pyCurl, but I don't know when I'll
have time for that.  As is, I'm just looking for feedback on whether
there's any possibility or how much work it would take to get it
contributed as a standard plugin.

Thanks,
Barry Roberts

-------------- next part --------------
#!/usr/bin/python

# Check a web application plugin for nagios
#
# Copyright (C) 2003  Barry Roberts (blr at robertsr.us)
#
# REQUIRES CURL !!
#
#    This program is free software; you can redistribute it and/or modify
#    it under the terms of the GNU General Public License as published by
#    the Free Software Foundation; either version 2 of the License, or
#    (at your option) any later version.
#
#    This program is distributed in the hope that it will be useful,
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU General Public License for more details.
#
#    You should have received a copy of the GNU General Public License
#    along with this program; if not, write to the Free Software
#    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
##############################################################################
#
# this plugin does several things that check_http doesn't appear to do:
#
#    - keep track of cookies
#	 - check for existence and non-existence of exp in result html
#    - check multiple urls (on one check)
#
# The first arg to this script must the url file.  It has a simple syntax
# where the first char of the line is the command.  Valid commands are:
#    # - comment
#	 U - URL
#    / - match expression (if not found in result, check fails)
#	 ! - not match expression (if found in result, check fails)
#    D - dependant URL (not yet implemented.  For images, css, etc.)


import os,time,sys,popen2,warnings;

STATE_OK=0
STATE_WARNING=1
STATE_CRITICAL=2
STATE_UNKNOWN=3
STATE_DEPENDENT=4

# if necessary, you can put a full path here
CURL='curl'


class urlObj:
	"Simple object for url stuff"
	def __init__(self):
		self.url='';
		self.exp='';
		self.notexp='';


def readURL(step, uri, exp, notexp, cookiefile):
	htmlfile = os.tmpnam()
#	print htmlfile
	args = '-sS -o ' + htmlfile + ' -c ' + cookiefile + ' -b ' + cookiefile

	args += ' '
	before = time.time();
	execstr = CURL + ' ' + args + "'" + uri + "'"
#	print execstr
	child = popen2.Popen4(execstr);
	childmsg = child.fromchild.read();
	r = child.wait();
	after = time.time();
	ret = STATE_OK;
	if r > 0:
		ret = STATE_CRITICAL;
		return after-before, childmsg, ret;
	# look for the exp
	if len(exp) > 0:
		grepret = os.spawnlp(os.P_WAIT, 'grep', 'grep', '-qs', '-i', exp, htmlfile);
		if grepret > 0:
			#os.remove(htmlfile)
			return after-before, exp + ' not found', STATE_CRITICAL
	if len(notexp) > 0:
		grepret = os.spawnlp(os.P_WAIT, 'grep', 'grep', '-qs', '-i', notexp, htmlfile);
		if grepret == 0:
			#os.remove(htmlfile)
			return after-before, notexp + ' found', STATE_CRITICAL
	#os.remove(htmlfile)
	return after-before, childmsg, ret;

def usage():
	print "Usage:"
	print "\tcheck_http --help"
	print "\tcheck_http urlfile"

def checkArgs():
	if len(sys.argv) < 2 or sys.argv[1] == '--help' or sys.argv[1] == '-h':
		usage();
		sys.exit(STATE_UNKNOWN);

	if sys.argv[1] == '--version':
		print '$Revision: 1.4 $'
		sys.exit(STATE_UNKNOWN);


# main:

def main():
	checkArgs();
	# turn off warnings for tmpnam
	warnings.filterwarnings('ignore');
	
	f = open(sys.argv[1], 'r');
	
	lines = f.readlines();
	f.close();
	
	cookiefile = os.tmpnam();
	
	allStart = time.time();
	
	times = [];
	
	urls = [];
	
	currObj = urlObj();
	
	for l in range(len(lines)):
		curr = lines[l]
		cmd = curr[0];
		if cmd == '#':
			continue
		param = curr[1:].strip();
		if cmd == 'u' or cmd == 'U':
			currObj = urlObj();
			currObj.url = param;
			urls.append(currObj);
		if cmd == '!':
			currObj.notexp = param;
		if cmd == '/':
			currObj.exp = param;
		
	for l in range(len(urls)):
		currUrl = urls[l];
		elapsed, msg, status = readURL(l, currUrl.url, currUrl.exp, currUrl.notexp, cookiefile);
		times.append(elapsed);
		#	print msg;
		if status != STATE_OK:
			print 'Error in step', l, '(' + currUrl.url + '): ' + msg;
			sys.exit(STATE_CRITICAL);
	
	
	allDone = time.time();
	
	finalmsg = 'OK - total elapsed time: '+str(allDone - allStart)+ '. | '
	
	for t in range(len(times)):
		finalmsg += 'step'+str(t)+'='+str(times[t]);
		if t < len(times)-1:
			finalmsg += ','

	# clean up
#	os.remove(cookiefile)
#	os.remove(
	print finalmsg

	sys.exit(STATE_OK);

main()


More information about the Devel mailing list