#
# run some tests
BINPATH = $(shell readlink -m $(PWD)/../bin)
MEGATEST = $(BINPATH)/megatest
DASHBOARD = $(BINPATH)/dashboard
PATH := $(BINPATH):$(PATH)
RUNNAME := $(shell date +w%V.%u.%H.%M)
IPADDR := "-"
RUNID := 1
SERVER =
DEBUG = 1
LOGGING =
ROWS = 20
OS = $(shell grep ID /etc/*-release|cut -d= -f2)
FS = $(shell df -T .|tail -1|awk '{print $$2}')
VER = $(shell fsl info|grep checkout|awk '{print $$2}'|cut -c 1-5)
# The NEWTARGET causes some tests to fail. Do not use until this is fixed.
NEWTARGET = "$(OS)/$(FS)/$(VER)"
TARGET = "ubuntu/nfs/none"
all : build unit test1 test2 test3 test4 test5 test6 test7 test8 test9
unit : basicserver.log runs.log misc.log
rel :
cd release;dashboard -rows 25 &
## basicserver.log : unittests/basicserver.scm
## script -c "./rununittest.sh basicserver $(DEBUG)" basicserver.log
%.log : build unittests/%.scm
script -c "./rununittest.sh $* $(DEBUG)" $*.log
if logpro unit.logpro $*.html < $*.log > /dev/null;then echo ALLPASS;else echo ALLFAIL;mv $*.log $*.log.FAIL;fi
server :
cd fullrun;$(MEGATEST) -server - -debug $(DEBUG) -run-id $(RUNID)
stopserver :
cd fullrun;$(MEGATEST) -stop-server 0
repl :
cd fullrun;$(MEGATEST) -:b -repl
test0 : cleanprep
cd simplerun ; $(MEGATEST) -server - -debug $(DEBUG)
test1 : cleanprep
test2 : fullprep
cd fullrun;$(MEGATEST) -preclean -runtests ez_pass,runfirst/a/% -reqtarg ubuntu/nfs/none :runname $(RUNNAME) -debug $(DEBUG) $(LOGGING)
cd fullrun;megatest -preclean -runtests % -target ubuntu/nfs/none :runname $(RUNNAME)_01 -testpatt %/,%/ai -debug $(DEBUG)
cd fullrun;megatest -preclean -runtests %/,%/ai -target ubuntu/nfs/none :runname $(RUNAME)_02 -debug $(DEBUG)
cd fullrun;megatest -preclean -runtests runfirst/%,%/ai -target ubuntu/nfs/none :runname $(RUNNAME)_02 -debug $(DEBUG)
cd fullrun;megatest -runtests %/,%/winter -target ubuntu/nfs/none :runname $(RUNNAME)_03 -debug $(DEBUG)
sleep 40;cd fullrun;megatest -target ubuntu/nfs/none :runname $(RUNNAME) -set-state-status COMPLETED,FORCED :state COMPLETED :status PASS -testpatt ez_p%s,runfirst/ -debug $(DEBUG) $(LOGGING)
test3 : fullprep test3a test3b
test3a :
@echo Run runfirst and any waitons.
cd fullrun;$(MEGATEST) -preclean -runtests runfirst -reqtarg ubuntu/nfs/none :runname $(RUNNAME)_b
test3b :
@echo Run all_toplevel and all waitons
cd fullrun;$(MEGATEST) -preclean -runtests all_toplevel -reqtarg ubuntu/nfs/none :runname $(RUNNAME)_c
test4 : cleanprep
@echo "WARNING: No longer running fullprep, test converage may be lessened"
cd fullrun;time $(MEGATEST) -debug $(DEBUG) -run-wait -runtests % -reqtarg ubuntu/nfs/none :runname $(RUNNAME)_b -m "This is a comment specific to a run" -v $(LOGGING)
test4a : cleanprep
cd fullrun;time $(MEGATEST) -debug $(DEBUG) -preclean -runtests all_toplevel -reqtarg ubuntu/nfs/none :runname $(RUNNAME)_b -m "This is a comment specific to a run" -v $(LOGGING)
# NOTE: Only one instance can be a server
test5 : cleanprep
rm -f fullrun/a*.log fullrun/logs/*
@echo "WARNING: No longer running fullprep, test converage may be lessened"
cd fullrun;sleep 0;$(MEGATEST) -preclean -runtests % -target $(TARGET) :runname $(RUNNAME)_aa -debug $(DEBUG) $(LOGGING) > aa.log 2> aa.log &
cd fullrun;sleep 0;$(MEGATEST) -preclean -runtests % -target ubuntu/nfs/sleep1 :runname $(RUNNAME)_ae -debug $(DEBUG) $(LOGGING) > ae.log 2> ae.log &
cd fullrun;sleep 0;$(MEGATEST) -preclean -runtests % -target ubuntu/nfs/sleep10 :runname $(RUNNAME)_ab -debug $(DEBUG) $(LOGGING) > ab.log 2> ab.log &
cd fullrun;sleep 5;$(MEGATEST) -preclean -runtests % -target ubuntu/nfs/sleep60 :runname $(RUNNAME)_ac -debug $(DEBUG) $(LOGGING) > ac.log 2> ac.log &
cd fullrun;sleep 8;$(MEGATEST) -preclean -runtests % -target ubuntu/nfs/sleep240 :runname $(RUNNAME)_ad -debug $(DEBUG) $(LOGGING) > ad.log 2> ad.log &
# cd fullrun;sleep 0;$(MEGATEST) -preclean -runtests % -target $(TARGET) :runname $(RUNNAME)_af -debug $(DEBUG) $(LOGGING) > af.log 2> af.log &
# MUST ADD THIS BACK IN ASAP!!!!
# cd fullrun;sleep 10;$(MEGATEST) -run-wait -target $(TARGET) :runname % -testpatt % :state RUNNING,LAUNCHED,NOT_STARTED,REMOTEHOSTSTART;echo ALL DONE
test6: fullprep
cd fullrun;$(MEGATEST) -preclean -runtests runfirst -testpatt %/1 -reqtarg ubuntu/nfs/none :runname $(RUNNAME)_itempatt -v
cd fullrun;$(MEGATEST) -preclean -runtests runfirst -testpatt %blahha% -reqtarg ubuntu/nfs/none :runname $(RUNNAME)_itempatt -debug 10
cd fullrun;$(MEGATEST) -rollup :runname newrun -target ubuntu/nfs/none -debug 10
test7:
@echo Only a/c testname c should remain. If there is a run a/b/c then there is a cache issue.
cd simplerun;$(DASHBOARD) &
(cd simplerun; \
$(MEGATEST) -server - -daemonize; \
$(MEGATEST) -remove-runs -target %/% :runname % -testpatt %; \
$(MEGATEST) -preclean -runtests % -target a/b :runname c; sleep 5; \
$(MEGATEST) -remove-runs -target a/c :runname c; \
$(MEGATEST) -preclean -runtests % -target a/c :runname c; \
$(MEGATEST) -remove-runs -target a/b :runname c -testpatt % ; \
$(MEGATEST) -preclean -runtests % -target a/d :runname c;$(MEGATEST) -list-runs %|egrep ^Run:) > test7.log 2> test7.log
logpro test7.logpro test7.html < test7.log
@echo
@echo Run \"firefox test7.html\" to see the results.
# This one failed with v1.55
test8a :
cd fullrun;$(MEGATEST) -preclean -runtests priority_10_waiton_1 -target ubuntu/nfs/none :runname $(RUNNAME)_waiton_single
test8 : test8a
cd fullrun;$(MEGATEST) -preclean -runtests lineitem_fail 1 -target ubuntu/nfs/none :runname $(RUNNAME)_singletest
cd fullrun;$(MEGATEST) -preclean -runtests runfirst/fall 1 -target ubuntu/nfs/none :runname $(RUNNAME)_singleitem
cd fullrun;$(MEGATEST) -preclean -runtests test_mt_vars/2 -target ubuntu/nfs/none :runname $(RUNNAME)_singleitem_waiton
# Some simple checks for bootstrapping and run loop logic
test9 : minsetup test9a test9b test9c test9d test9e
test9a :
@echo Run super-simple mintest e, no waitons.
cd mintest;$(DASHBOARD)&
cd mintest;$(MEGATEST) -preclean -runtests e -target $(VER) -runname $(shell date +%H.%M.%S) -debug $(DEBUG)
test9b :
@echo Run simple mintest d with one waiton c
cd mintest;$(MEGATEST) -preclean -runtests d -target $(VER) -runname `date +%H.%M.%S` -debug $(DEBUG)
test9c :
@echo Run mintest a with full waiton chain a -> b -> c -> d -> e
cd mintest;$(MEGATEST) -preclean -runtests a -target $(VER) -runname `date +%H.%M.%S` -debug $(DEBUG)
test9d :
@echo Run an itemized test with no items
cd mintest;$(MEGATEST) -preclean -runtests g -target $(VER) -runname `date +%H.%M.%S` -debug $(DEBUG)
test9e :
@echo Run mintest a1 with full waiton chain with d1fail: a1 -> b1 -> c1 -> d1fail -> e1
cd mintest;$(MEGATEST) -preclean -runtests a1 -target $(VER) -runname `date +%H.%M.%S` -debug $(DEBUG)
test10 :
@echo Run a bunch of different targets simultaneously
(cd fullrun;$(MEGATEST) -server - ;sleep 2)&
for targ in mint/btrfs/mintdir sunos/sshfs/loc; do \
(cd fullrun;$(MEGATEST) -preclean -runtests priority_10_waiton_1 -target $$targ :runname $(RUNNAME) &); done
for sys in ubuntu suse redhat debian;do \
for fs in afs nfs zfs; do \
for dpath in none tmp; do \
(cd fullrun;$(MEGATEST) -preclean -runtests priority_10_waiton_1 -target $$sys/$$fs/$$dpath :runname $(RUNNAME) &);\
done;done;done
test11 :
cd fullrun;time (for a in 1 2 3 4 5 6 7 8 9 10 1 2 3 4 5 6 7 8 9 10 1 2 3 4 5 6 7 8 9 10 1 2 3 4 5 6 7 8 9 10 ;do (megatest -test-paths -target %/%/% > /dev/null ) & done; wait; )
build : ../*.scm
cd ..;make -j && make install
touch build
cleanstart :
if killall mtest -v ;then sleep 5;killall mtest -v -9;fi;true
killall mtest -v;if [ ! $$? ];then sleep 5;killall mtest -v -9;fi
minsetup : build
mkdir -p mintest/runs mintest/links
cd mintest;$(MEGATEST) -stop-server 0
cd mintest;$(MEGATEST) -server - -debug $(DEBUG) > server.log 2> server.log &
sleep 3
cd mintest;$(DASHBOARD) -rows 18 &
cleanprep : ../*.scm Makefile */*.config build
mkdir -p fullrun/tmp/mt_runs fullrun/tmp/mt_links /tmp/$(USER)/adisk1
rm -f */logging.db
touch cleanprep
fullprep : cleanprep
cd fullrun;$(MEGATEST) -remove-runs :runname $(RUNNAME)% -target %/%/% -testpatt %/%
cd fullrun;$(BINPATH)/dashboard -rows 15 &
dashboard : cleanprep
cd fullrun && $(BINPATH)/dashboard -rows $(ROWS) &
newdashboard : cleanprep
cd fullrun && $(BINPATH)/newdashboard &
mdboard : cleanprep
cd fullrun && $(BINPATH)/mdboard &
remove :
cd fullrun;$(MEGATEST) -remove-runs :runname $(RUN) -testpatt % -itempatt % :sysname % :fsname % :datapath %
clean :
rm cleanprep
kill :
killall -v mtest main.sh dboard || true
rm -rf /tmp/.$(USER)-portlogger.db *run/db/* */megatest.db */logging.db */monitor.db fullrun/tmp/mt_*/* fullrun/tmp/mt_*/.db* fullrun/logs/*.log fullrun/*.log || true
killall -v mtest dboard || true
hardkill : kill
sleep 2;killall -v mtest main.sh dboard -9
listservers :
cd fullrun;$(MEGATEST) -list-servers
runforever :
while(ls); do runname=`date +%F-%R:%S`;(cd fullrun;$(MEGATEST) -runall -target ubuntu/nfs/none :runname $$runname;/home/matt/data/megatest/megatest -runall -target ubuntu/nfs/none :runname $$runname;/home/matt/data/megatest/megatest -runall -target ubuntu/nfs/none :runname $$runname);done