parent
1f17751f98
commit
86e6f7a87b
Before Width: | Height: | Size: 49 B After Width: | Height: | Size: 49 B |
Before Width: | Height: | Size: 46 B After Width: | Height: | Size: 46 B |
14
makefile
14
makefile
|
@ -118,7 +118,7 @@ inc = -I./tools/libantlr3c-3.4/ -I./tools/libantlr3c-3.4/include
|
|||
TARGET = $(exedir)gbuild $(exedir)gserver $(exedir)gserver_backup_scheduler $(exedir)gclient $(exedir)gquery $(exedir)gconsole $(api_java) $(exedir)gadd $(exedir)gsub $(exedir)ghttp $(exedir)gmonitor $(exedir)gshow
|
||||
|
||||
all: $(TARGET)
|
||||
./test/test.sh
|
||||
./scripts/test.sh
|
||||
test_index: test_index.cpp
|
||||
$(CC) $(EXEFLAG) -o test_index test_index.cpp $(objfile) $(library) $(openmp)
|
||||
|
||||
|
@ -517,7 +517,7 @@ dist: clean
|
|||
rm -rf *.info
|
||||
|
||||
tarball:
|
||||
tar -czvf devGstore.tar.gz api bin lib tools .debug .tmp .objs test docs data makefile \
|
||||
tar -czvf devGstore.tar.gz api bin lib tools .debug .tmp .objs scripts garbage docs data makefile \
|
||||
Main Database KVstore Util Query Signature VSTree Parser Server README.md init.conf NOTES.md StringIndex COVERAGE
|
||||
|
||||
APIexample: $(api_cpp) $(api_java)
|
||||
|
@ -529,8 +529,8 @@ APIexample: $(api_cpp) $(api_java)
|
|||
gtest: $(objdir)gtest.o $(objfile)
|
||||
$(CC) $(EXEFLAG) -o $(exedir)gtest $(objdir)gtest.o $(objfile) lib/libantlr.a $(library) $(openmp)
|
||||
|
||||
$(objdir)gtest.o: test/gtest.cpp
|
||||
$(CC) $(CFLAGS) test/gtest.cpp $(inc) -o $(objdir)gtest.o $(openmp)
|
||||
$(objdir)gtest.o: scripts/gtest.cpp
|
||||
$(CC) $(CFLAGS) scripts/gtest.cpp $(inc) -o $(objdir)gtest.o $(openmp)
|
||||
|
||||
$(exedir)gadd: $(objdir)gadd.o $(objfile)
|
||||
$(CC) $(EXEFLAG) -o $(exedir)gadd $(objdir)gadd.o $(objfile) lib/libantlr.a $(library) $(openmp)
|
||||
|
@ -551,7 +551,7 @@ $(objdir)gsub.o: Main/gsub.cpp
|
|||
$(CC) $(CFLAGS) Main/gsub.cpp $(inc) -o $(objdir)gsub.o $(openmp)
|
||||
|
||||
sumlines:
|
||||
bash test/sumline.sh
|
||||
bash scripts/sumline.sh
|
||||
|
||||
tag:
|
||||
ctags -R
|
||||
|
@ -561,12 +561,12 @@ idx:
|
|||
cscope -bkq #-i cscope.files
|
||||
|
||||
cover:
|
||||
bash test/cover.sh
|
||||
bash scripts/cover.sh
|
||||
|
||||
fulltest:
|
||||
#NOTICE:compile gstore with -O2 only
|
||||
#setup new virtuoso and configure it
|
||||
cp test/full_test.sh ~
|
||||
cp scripts/full_test.sh ~
|
||||
cd ~
|
||||
bash full_test.sh
|
||||
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
build lubm ~/project/devGstore/data/LUBM_10.n3
|
||||
unload
|
||||
load lubm
|
||||
query ~/project/devGstore/data/LUBM_q0.sql
|
||||
show
|
||||
unload
|
||||
show
|
||||
quit
|
|
@ -1,61 +0,0 @@
|
|||
#! /usr/bin/bash
|
||||
|
||||
echo "test start"
|
||||
|
||||
#TODO:place this together with fulltest, and keep files in fulltest
|
||||
#add this to makefile to do every time
|
||||
|
||||
lcov -z -d ./
|
||||
bin/gload watdiv.db /home/data/WatDiv/database/watdiv10M.nt
|
||||
gcov -a -b -c gload.cpp
|
||||
lcov --no-external --directory . --capture --output-file load.info
|
||||
#genhtml --output-directory COVERAGE/LOAD --frames --show-details load.info
|
||||
|
||||
echo "build tested"
|
||||
|
||||
lcov -z -d ./
|
||||
bin/gquery watdiv.db /home/data/WatDiv/query/C1.sql
|
||||
gcov -a -b -c gquery.cpp
|
||||
lcov --no-external --directory . --capture --output-file C1.info
|
||||
|
||||
lcov -z -d ./
|
||||
bin/gquery watdiv.db /home/data/WatDiv/query/C2.sql
|
||||
gcov -a -b -c gquery.cpp
|
||||
lcov --no-external --directory . --capture --output-file C2.info
|
||||
|
||||
lcov -z -d ./
|
||||
bin/gquery watdiv.db /home/data/WatDiv/query/C3.sql
|
||||
gcov -a -b -c gquery.cpp
|
||||
lcov --no-external --directory . --capture --output-file C3.info
|
||||
|
||||
lcov -z -d ./
|
||||
bin/gquery watdiv.db /home/data/WatDiv/query/F1.sql
|
||||
gcov -a -b -c gquery.cpp
|
||||
lcov --no-external --directory . --capture --output-file F1.info
|
||||
|
||||
lcov -z -d ./
|
||||
bin/gquery watdiv.db /home/data/WatDiv/query/F2.sql
|
||||
gcov -a -b -c gquery.cpp
|
||||
lcov --no-external --directory . --capture --output-file F2.info
|
||||
|
||||
lcov -z -d ./
|
||||
bin/gquery watdiv.db /home/data/WatDiv/query/F3.sql
|
||||
gcov -a -b -c gquery.cpp
|
||||
lcov --no-external --directory . --capture --output-file F3.info
|
||||
|
||||
lcov -z -d ./
|
||||
bin/gquery watdiv.db /home/data/WatDiv/query/F4.sql
|
||||
gcov -a -b -c gquery.cpp
|
||||
lcov --no-external --directory . --capture --output-file F4.info
|
||||
|
||||
lcov -z -d ./
|
||||
bin/gquery watdiv.db /home/data/WatDiv/query/F5.sql
|
||||
gcov -a -b -c gquery.cpp
|
||||
lcov --no-external --directory . --capture --output-file F5.info
|
||||
|
||||
#also need to test: api, gconsole/gclient/gserver
|
||||
#Parser/Sparql* should not be tested
|
||||
lcov -a load.info -a C1.info -a C2.info -a C3.info -a F1.info -a F2.info -a F3.info -a F4.info -a F5.info -o gstore.info
|
||||
lcov --remove gstore.info 'Server/*' 'Main/*' 'Parser/*'
|
||||
genhtml --output-directory COVERAGE --frames --show-details gstore.info
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
=================================================
|
||||
Sesame:
|
||||
load lubm_10.nt
|
||||
Malformed document: Not a valid (absolute) URI: University0 [line 2]
|
||||
load bsbm_100.nt
|
||||
Malformed document: '7683.53' was not recognised, and could not be verified, with datatype http://www4.wiwiss.fu-berlin.de/bizer/bsbm/v01/vocabulary/USD [line 9059]
|
||||
load dbpedia2014.nt
|
||||
Malformed document: '304.0' was not recognised, and could not be verified, with datatype http://dbpedia.org/datatype/second [line 93]
|
||||
load dblp_uniq.nt
|
||||
Malformed document: Element type “http:”must be followed by either attribute specifications, “>” or “/>”. [line1, column 8]
|
||||
=================================================
|
||||
Jena:
|
||||
load yago2.db.fix
|
||||
[line: 680, col: 8 ] Illegal character in IRI (codepoint 0x5E, '^'): <0.0#m[^]...>
|
||||
load yagoFacts.nt
|
||||
[line: 3, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:base]
|
||||
load dblp_uniq.nt
|
||||
[line: 1715764, col: 144] Bad character in IRI (space): <http://www.ifi.unizh.ch/dbtg/IDEE/team.html#Dirk[space]...>
|
||||
|
|
@ -1,621 +0,0 @@
|
|||
#! /bin/env bash
|
||||
|
||||
#TODO: why no size.log?? always error!!!
|
||||
|
||||
#in some system, maybe /usr/bin/ instead of /bin/
|
||||
#according executables to deal with dbms
|
||||
#NOTICE: require that virtuoso-openlink/openrdf-sesame/apache-jena is installed and gstore(single mode) is compiled!
|
||||
|
||||
#WARN:test gstore, jena, virtuoso with lubm, bsbm, watdiv, dbpedia
|
||||
#when testing sesame and others, just use bsbm and watdiv because format in lubm
|
||||
#i snot supported by sesame(invalid IRI), and dbpedia may be too large
|
||||
|
||||
#NOTICE+WARN:if only one db or no need to compare the database size,
|
||||
#then comment the removing db phrases for virtuoso!!!
|
||||
|
||||
#QUERY:do we need to empty the buffer after each dbms to compare the performance?
|
||||
#query performance should focus on warm instead of cold
|
||||
|
||||
line1=--------------------------------------------------
|
||||
line2=##################################################
|
||||
#path=/media/data/
|
||||
path=/home/data/
|
||||
|
||||
db0=${path}WatDiv/
|
||||
db1=${path}LUBM/
|
||||
db2=${path}DBpedia/
|
||||
#db3=${path}BSBM/
|
||||
|
||||
#db=($db0 $db1 $db2 $db3) #db[4]=db4
|
||||
#db=($db0 $db1 $db2) #db[4]=db4
|
||||
db=(WatDiv/ LUBM/ BSBM/ DBpedia/)
|
||||
#db=(WatDiv/ LUBM/)
|
||||
#db=(TEST/)
|
||||
|
||||
#BETTER: add yago2/yago3, dblp...add more queries
|
||||
|
||||
length1=${#db[*]} #or @ instead of *
|
||||
|
||||
#BETTER: let user indicate the executable directory
|
||||
gstore=~/gStore/
|
||||
virtuoso=~/virtuoso/
|
||||
jena=~/jena/
|
||||
gstore2=~/devGstore/
|
||||
|
||||
#NOTICE: maybe oldGstore and newGstore
|
||||
|
||||
#NOTICE:remove debug and use -o2 not -g when testing gStore
|
||||
|
||||
#dbms_path=($gstore $jena $sesame $virtuoso)
|
||||
#dbms_name=(gstore jena sesame virtuoso)
|
||||
#
|
||||
#dbms_path=($gstore $jena $virtuoso)
|
||||
#dbms_name=(gstore jena virtuoso)
|
||||
#
|
||||
#dbms_path=($gstore2 $gstore)
|
||||
#dbms_name=(gstore gstore)
|
||||
#
|
||||
dbms_path=($gstore2)
|
||||
dbms_name=(gstore)
|
||||
|
||||
length2=${#dbms_path[*]} #or @ instead of *
|
||||
|
||||
#the language of the current operation system
|
||||
Chinese=zh_CN.utf8
|
||||
English=en_US.utf8
|
||||
|
||||
#for each db, compare, pass db and query as parameter
|
||||
#firstly load database, then query with unique program
|
||||
#output format: in each dbms, time.log/ result.log/
|
||||
#use each dataset name as subfolder like lubm_10.nt/ in result.log/
|
||||
#and time.log/lubm_10.nt.log, and for each query corresponding
|
||||
#to a dataset: result.log/lubm_10.nt/q1.sql.log
|
||||
#Finally, in the directory where this script is placed in, also
|
||||
#build result.log/ and time.log/
|
||||
#result.log/lubm_10.nt.tsv time.log/lubm_10.nt.tsv size.log.tsv
|
||||
|
||||
#below is old:
|
||||
#time log should be used in excel, and compare result log:
|
||||
#diff or grep -vFf file1 file2
|
||||
#better to compare line by line using awk
|
||||
|
||||
log1=result.log/
|
||||
log2=time.log/
|
||||
log3=load.log/
|
||||
|
||||
#clean logs in home(this script)
|
||||
home=`pwd`
|
||||
if [ -d ${home}/garbage/ ] #! as not
|
||||
then
|
||||
rm -rf ${home}/garbage/
|
||||
fi
|
||||
mkdir ${home}/garbage/
|
||||
if [ -d ${home}/${log1} ]
|
||||
then
|
||||
rm -rf ${home}/${log1}
|
||||
fi
|
||||
mkdir ${home}/${log1}
|
||||
if [ -d ${home}/${log2} ]
|
||||
then
|
||||
rm -rf ${home}/${log2}
|
||||
fi
|
||||
mkdir ${home}/${log2}
|
||||
if [ -d ${home}/${log3} ]
|
||||
then
|
||||
rm -rf ${home}/${log3}
|
||||
fi
|
||||
mkdir ${home}/${log3}
|
||||
|
||||
#clean logs in each dbms
|
||||
function initial()
|
||||
{
|
||||
if [ -d $log1 ]
|
||||
then
|
||||
rm -rf $log1
|
||||
fi
|
||||
mkdir $log1
|
||||
if [ -d $log2 ]
|
||||
then
|
||||
rm -rf $log2
|
||||
fi
|
||||
mkdir $log2
|
||||
if [ -d $log3 ]
|
||||
then
|
||||
rm -rf $log3
|
||||
fi
|
||||
mkdir $log3
|
||||
}
|
||||
|
||||
#size.tsv:the size after loaded time.tsv:time used to load
|
||||
tsv3=${home}/${log3}time.tsv
|
||||
tsv4=${home}/${log3}size.tsv
|
||||
|
||||
dsnum=0
|
||||
|
||||
for i in `seq $length1`
|
||||
do
|
||||
i=`expr $i - 1`
|
||||
|
||||
for tmpdb in `ls ${path}/${db[i]}/database/*.nt`
|
||||
do
|
||||
dsnum=`expr $dsnum + 1`
|
||||
if [ $dsnum -ne 1 ]
|
||||
then
|
||||
sleep 60 #for other processes
|
||||
#sudo echo 3 > /proc/sys/vm/drop_caches
|
||||
fi
|
||||
|
||||
cntdb="${tmpdb##*/}"
|
||||
cntdbINFO=${cntdb}.info
|
||||
echo "$tmpdb" #in case of special characters like &
|
||||
tsv1=${home}/${log1}/${cntdb}.tsv #compare result
|
||||
tsv2=${home}/${log2}/${cntdb}.tsv #compare time
|
||||
echo $tsv1
|
||||
echo $tsv2
|
||||
|
||||
#load this database into each dbms
|
||||
for j in `seq $length2`
|
||||
do
|
||||
j=`expr $j - 1`
|
||||
cd ${dbms_path[j]}
|
||||
name=${dbms_name[j]}
|
||||
echo $name
|
||||
mkdir ${cntdb}
|
||||
mkdir ${cntdbINFO}
|
||||
|
||||
#build logs structure
|
||||
echo "build logs structure!"
|
||||
if [ $dsnum -eq 1 ]
|
||||
then
|
||||
initial
|
||||
fi
|
||||
|
||||
mkdir ${log1}/${cntdb} #pass the cntdb if using function
|
||||
#touch ${log2}/${cntdb}.log
|
||||
|
||||
if [ ${name}x = gstorex ] #add a x in case of empty, otherwise will unary error
|
||||
#if [ ${j} -eq 0 ]
|
||||
then
|
||||
echo "this is for gstore!"
|
||||
lcov -z -d ./
|
||||
bin/gbuild $cntdb $tmpdb > load.txt 2>&1
|
||||
gcov -a -b -c gbuild.cpp
|
||||
lcov --no-external --directory . --capture --output-file ${cntdb}/load.info
|
||||
#awk '{if($1=="after" && $2=="build," && $3=="used"){split($4, a,"m");print "time:\t"a[1]}}' load.txt > load_${cntdb}.log
|
||||
awk '{if($1=="after" && $2=="build," && $3=="used"){split($4, a,"m");print "'$cntdb'""\t"a[1]}}' load.txt >> ${log3}/time.log
|
||||
#elif [ ${dbms[j]}x = ${virtuoso}x ]
|
||||
#elif [ ${dbms[j]}x = ${sesame}x ]
|
||||
#elif [ ${dbms[j]}x = ${jena}x ]
|
||||
elif [ ${name}x = jenax ]
|
||||
#elif [ ${j} -eq 1 ]
|
||||
then
|
||||
echo "this is for jena!"
|
||||
bin/tdbloader --loc "$cntdb" "$tmpdb" > load.txt 2>&1
|
||||
#awk '{if(NR==1){s=$1}else{t=$1}}END{split(s,a,":");split(t,b,":");ans=0+(b[1]-a[1])*3600+(b[2]-a[2])*60+(b[3]-a[3]);printf("%s\t%d\n", "time:", ans*1000);}' load.txt > load_${cntdb}.log
|
||||
#NOTICE:if use more than one day, the time computed maybe <0
|
||||
awk '{if(NR==1){s=$1}else{t=$1}}END{split(s,a,":");split(t,b,":");ans=0+(b[1]-a[1])*3600+(b[2]-a[2])*60+(b[3]-a[3]);printf("%s\t%d\n", "'$cntdb'", ans*1000);}' load.txt >> ${log3}/time.log
|
||||
#cat load.txt >> "load_${cntdb}.log"
|
||||
elif [ ${name}x = sesamex ]
|
||||
#elif [ ${j} -eq 2 ]
|
||||
then
|
||||
#NOTICE+WARN:not suitable to lubm(format: not valid IRI)
|
||||
echo "this is for sesame!"
|
||||
#write instructions into run.sql
|
||||
>run.sql
|
||||
echo -e "create native\n${cntdb}\n${cntdb}\n\n\nopen ${cntdb}" >> run.sql
|
||||
echo "load ${tmpdb}" >> run.sql
|
||||
echo -e "close\nquit" >> run.sql
|
||||
bin/console.sh < run.sql > load.txt
|
||||
awk '{if($1=="Data" && $2 == "has" && $3 == "been"){split($8, a, "(");printf("%s\t%d\n", "'$cntdb'", a[2]);}}' load.txt >> ${log3}/time.log
|
||||
elif [ ${name}x = virtuosox ]
|
||||
#elif [ ${j} -eq 3 ]
|
||||
then
|
||||
echo "this is for virtuoso!"
|
||||
#maybe write instructions into run.sql
|
||||
#>run.sql
|
||||
#echo "ld_dir('${path}/${db[i]}/database/', '${cntdb}', '${cntdb}');" >> run.sql
|
||||
#echo "rdf_loader_run();" >> run.sql
|
||||
#echo "checkpoint;" >> run.sql
|
||||
>load.txt
|
||||
bin/isql 1111 dba dba exec="ld_dir('${path}/${db[i]}/database/', '${cntdb}', '${cntdb}');" | awk '{if($1=="Done."){print $3}}' >> load.txt
|
||||
bin/isql 1111 dba dba exec="rdf_loader_run();" | awk '{if($1=="Done."){print $3}}' >> load.txt
|
||||
bin/isql 1111 dba dba exec="checkpoint;" | awk '{if($1=="Done."){print $3}}' >> load.txt
|
||||
awk 'BEGIN{sum=0}{sum+=$0}END{printf("%s\t%d\n", "'$cntdb'", sum);}' load.txt >> ${log3}/time.log
|
||||
fi
|
||||
mv load.txt ${cntdbINFO}/
|
||||
|
||||
#ls -l sums the actual size, unit is k
|
||||
echo "now to sum the database size!"
|
||||
#NOTICE:the unit is KB
|
||||
#ls -lR "$cntdb" | awk 'BEGIN{sum=0}{if($1=="total"){sum=sum+$2}}END{print "size:\t"sum}' >> load_${cntdb}.log
|
||||
#if [ ${j} -eq 3 ] #virtuoso
|
||||
if [ ${name}x = virtuosox ]
|
||||
then
|
||||
#NOTICE:this db also includes the initial data
|
||||
#realDB="../database/virtuoso.db"
|
||||
realDB="database/virtuoso.db"
|
||||
#the original size of virtuoso db is 39845888B, not so accurate
|
||||
ls -l "$realDB" | awk '{sum=$5/1000-39846;print "'$cntdb'""\t"sum}' >> ${log3}/size.log
|
||||
else
|
||||
if [ ${name}x = gstorex -o ${name}x = jenax ]
|
||||
#if [ ${j} -lt 2 ]
|
||||
then
|
||||
realDB="$cntdb"
|
||||
elif [ ${name}x = sesamex ]
|
||||
#elif [ ${j} -eq 2 ]
|
||||
then
|
||||
#NOTICE:not quoted the string!
|
||||
realDB=~/.aduna/openrdf-sesame-console/repositories/
|
||||
realDB=${realDB}${cntdb}
|
||||
fi
|
||||
lang=`echo $LANG`
|
||||
if [ $lang = $English ]
|
||||
then
|
||||
ls -lR "$realDB" | awk 'BEGIN{sum=0}{if($1=="total"){sum=sum+$2}}END{print "'$cntdb'""\t"sum}' >> ${log3}/size.log
|
||||
elif [ $lang = $Chinese ]
|
||||
then
|
||||
ls -lR "$realDB" | awk 'BEGIN{sum=0}{if($1=="总用量"){sum=sum+$2}}END{print "'$cntdb'""\t"sum}' >> ${log3}/size.log
|
||||
else
|
||||
echo "the language of the operation system is not supported!"
|
||||
fi
|
||||
fi
|
||||
|
||||
timelog=${log2}/${cntdb}.log
|
||||
touch $timelog
|
||||
#NOTICE:we remove all duplicates to compare, due to different dbms preferences
|
||||
#For example, sesame and virtuoso will not include any duplicates
|
||||
for query in `ls ${path}/${db[i]}/query/*.sql`
|
||||
do
|
||||
#NOTICE:we expect there are no duplicates in sesame and virtuoso
|
||||
echo $query
|
||||
#build logs structure
|
||||
anslog=${log1}/${cntdb}/${query##*/}.log
|
||||
#touch $anslog #needed because the result maybe empty
|
||||
>${anslog}
|
||||
if [ ${name}x = gstorex ] #add a x in case of empty (need a space from ])
|
||||
#if [ ${j} -eq 0 ]
|
||||
then
|
||||
echo "this is for gstore!"
|
||||
#NOTICE:we do not add the start time in gquery.cpp, and we expect other dbms will also deal it this way.
|
||||
lcov -z -d ./
|
||||
bin/gquery "$cntdb" $query > ans.txt 2>&1
|
||||
gcov -a -b -c gquery.cpp
|
||||
lcov --no-external --directory . --capture --output-file ${cntdb}/${query##*/}.info
|
||||
awk -F ':' 'BEGIN{query="'$query'"}{if($1=="Total time used"){split($2, a, "m");split(a[1],b," ");}}END{print query"\t"b[1]}' ans.txt >> $timelog
|
||||
#grep "Total time used:" ans.txt | grep -o "[0-9]*ms" >> ${log2}/${cntdb}.log
|
||||
awk -F ':' 'BEGIN{flag=0;old="[empty result]"}{if(flag==1 && $0 ~/^?/){flag=2}else if(flag==2){if($0 ~/^$/){flag=3}else if($0 != old){print $0;old=$0}}else if(flag == 0 && $1 ~/^final result/){flag=1}}' ans.txt > $anslog
|
||||
#awk 'BEGIN{flag=0}{if(flag==1){print $0}if($1 ~/^final$/){flag=1}}' ans.txt > ${log1}/${cntdb}/${query}.log
|
||||
elif [ ${name}x = jenax ]
|
||||
#elif [ ${j} -eq 1 ]
|
||||
then
|
||||
echo "this is for jena!"
|
||||
#NOTICE: for program output in stderr(no cache), deal like this
|
||||
#./tdbquery --repeat 1,1 --time --results TSV --loc "$cntdb" --query $query > ans.txt 2>&1
|
||||
bin/tdbquery --time --results TSV --loc "$cntdb" --query $query > ans.txt 2>&1
|
||||
#NOTICE: redirect in awk, and jena
|
||||
#use old var to remove duplicates(expect duplicates to be all together)
|
||||
#awk 'BEGIN{old=""}{if(NR>1){if($1 ~/Time:/ && $3 ~/sec/){time=$2*1000;print "'$query'""\t"time >> "'$timelog'"}else if(!($0 ~/^$/) && $0 != old){print $0 >> "'$anslog'";old=$0}}}' ans.txt
|
||||
awk 'BEGIN{old=""}{if(NR>1){if($1 ~/Time:/ && $3 ~/sec/){time=$2*1000;print "'$query'""\t"time >> "'$timelog'"}else if(!($0 ~/^?/) && $0 != old){print $0 >> "'$anslog'";old=$0}}}' ans.txt
|
||||
elif [ ${name}x = sesamex ]
|
||||
#elif [ ${j} -eq 2 ]
|
||||
then
|
||||
echo "this is for sesame!"
|
||||
#write instructions into run.sql
|
||||
>run.sql
|
||||
echo "open ${cntdb}" >> run.sql
|
||||
#pre="sparql "
|
||||
str=`cat ${query}`
|
||||
ins="sparql "${str}
|
||||
#echo "${pre}${str}" >> run.sql
|
||||
echo ${ins} >> run.sql
|
||||
echo -e "close\nquit" >> run.sql
|
||||
bin/console.sh < run.sql > ans.txt
|
||||
#awk 'BEGIN{flag=0;}{
|
||||
#if($0 ~/^+/){flag++}
|
||||
#else if(flag==2){
|
||||
#if($NF=="|"){end=NF-1}else{end=NF}
|
||||
#for(i=2;i<=end;++i){split($i, s, "|");printf("%s", s[1]) >> "'$anslog'";
|
||||
#if(i<end){printf("\t") >> "'$anslog'";}}
|
||||
#printf("\n") >> "'$anslog'";}
|
||||
#else if(flag==3){flag++;split($3, s, "(");
|
||||
#print "'$query'""\t"s[2] >> "'$timelog'"}}' ans.txt
|
||||
awk -F '|' 'BEGIN{flag=0;}{
|
||||
if($0 ~/^+/){flag++;}
|
||||
else if(flag==2){
|
||||
for(i=2;i<NF;++i){
|
||||
num=split($i, s, " ");
|
||||
for(j=1;j<num;++j){printf("%s ", s[j]) >> "'$anslog'";}
|
||||
printf("%s", s[j]) >> "'$anslog'";
|
||||
if(i<NF-1){printf("\t") >> "'$anslog'";}
|
||||
}
|
||||
printf("\n") >> "'$anslog'";
|
||||
}
|
||||
else if(flag==3){flag++;split($0, a, " ");split(a[3], b, "(");print "'$query'""\t"b[2] >> "'$timelog'"}
|
||||
}' ans.txt
|
||||
#cat > wcgdscdc.txt
|
||||
elif [ ${name}x = virtuosox ]
|
||||
#elif [ ${j} -eq 3 ]
|
||||
then
|
||||
echo "this is for virtuoso!"
|
||||
ins=`cat ${query}`
|
||||
ins="sparql "${ins}
|
||||
echo $ins > tmp.txt
|
||||
str=`awk '{for(i=1;i<=NF;++i){if($i=="WHERE" || $i=="where"){printf("from <%s> %s ", "'$cntdb'", $i)}else{printf("%s ", $i)}}}' tmp.txt`
|
||||
rm -f tmp.txt
|
||||
bin/isql 1111 dba dba exec="${str};" > ans.txt
|
||||
#awk 'BEGIN{flag=0}{if($0 ~/^____/){flag=1}else if(flag==1 && $0 ~/^$/){flag=2}else if(flag==2){if($0 ~/^$/){flag=3}else{for(i=1;i<=NF;++i){if($i ~/^http:/){str="<"$i">";}else{str="\""$i"\"";}printf("%s", str) >> "'$anslog'";if(i<NF)printf("\t") >> "'$anslog'";}printf("\n") >> "'$anslog'";}}else if(flag==3){split($0, s, " ");print "'$query'""\t"s[4] >> "'$timelog'";}}' ans.txt
|
||||
awk -F ' [ \t]+' 'BEGIN{flag=0}{if($0 ~/^____/){flag=1}else if(flag==1 && $0 ~/^$/){flag=2}else if(flag==2){if($0 ~/^$/){flag=3}else{for(i=1;i<=NF;++i){printf("%s", $i) >> "'$anslog'";if(i<NF)printf("\t") >> "'$anslog'";}printf("\n") >> "'$anslog'";}}else if(flag==3){split($0, s, " ");print "'$query'""\t"s[4] >> "'$timelog'";}}' ans.txt
|
||||
fi
|
||||
mv ans.txt ${cntdbINFO}/${query##*/}.txt
|
||||
|
||||
#NOTICE:the same record should be placed together before sorting!
|
||||
#sort according to the path order
|
||||
echo "now to sort anslog!"
|
||||
mv $anslog ${anslog}.bak
|
||||
#use own defined select-sort function
|
||||
#this function can also delete duplicates
|
||||
#BETTER: must use external-sorting when too large
|
||||
# awk -F '\t' '{for(i=1;i<=NF;++i)arr[NR]=$0}
|
||||
# END{
|
||||
# nr=sortArr(arr,NR,NF);
|
||||
# for(i=1;i<=nr;++i){print arr[i]}}
|
||||
# function sortArr(arr, nr, nf){
|
||||
# for(p=1;p<nr;++p){
|
||||
# min=p;
|
||||
# for(q=p+1;q<=nr;++q){
|
||||
# ret=less(arr[q],arr[min],nf);
|
||||
# if(ret==1){min=q}
|
||||
# else if(ret==0){swap(arr,q,nr);nr=nr-1;q=q-1}}
|
||||
# if(min!=p){swap(arr,p,min)}}
|
||||
# return nr}
|
||||
# function swap(arr,t1,t2){t=arr[t1];arr[t1]=arr[t2];arr[t2]=t}
|
||||
# function less(t1,t2,nf){
|
||||
# split(t1,s1,"\t");
|
||||
# split(t2,s2,"\t");
|
||||
# for(k=1;k<=nf;++k){
|
||||
# print s1[k]"\t"s2[k]
|
||||
# if(s1[k]<s2[k]){ return 1 }
|
||||
# else if(s1[k]>s2[k]) { return 2 }
|
||||
# else { continue; } }
|
||||
# return 0 }' ${anslog}.bak > ${anslog}
|
||||
#-k1n -k2r ...
|
||||
sort -t $'\t' -u ${anslog}.bak > ${anslog}
|
||||
rm -f ${anslog}.bak
|
||||
done
|
||||
echo "now to sort timelog!"
|
||||
mv $timelog ${timelog}.bak
|
||||
awk -F '\t' '{print $1"\t"$2 | "sort -k1"}' ${timelog}.bak > ${timelog}
|
||||
rm -f ${timelog}.bak
|
||||
|
||||
if [ ${name}x = gstorex ] #add a x in case of empty (need a space from ])
|
||||
then
|
||||
#now to add the coverage info for this dataset
|
||||
str="lcov"
|
||||
for info in `ls *.info`
|
||||
do
|
||||
str="${str}"" -a ${info}"
|
||||
done
|
||||
str="${str}"" -o COVERAGE/${cntdb}.info"
|
||||
`${str}`
|
||||
fi
|
||||
|
||||
#remove the db when finished
|
||||
echo "now to remove the cntdb!"
|
||||
if [ ${name}x = gstorex -o ${name}x = jenax ]
|
||||
#if [ ${j} -lt 2 ]
|
||||
then
|
||||
#rm -rf "$cntdb"
|
||||
echo "not remove db now"
|
||||
elif [ ${name}x = sesamex ]
|
||||
#elif [ ${j} -eq 2 ]
|
||||
then
|
||||
>run.sql
|
||||
echo "drop ${cntdb}" >> run.sql
|
||||
echo "yes" >> run.sql
|
||||
echo "quit" >> run.sql
|
||||
bin/console.sh < run.sql
|
||||
rm -f run.sql
|
||||
elif [ ${name}x = virtuosox ]
|
||||
#elif [ ${j} -eq 3 ]
|
||||
then
|
||||
echo "not remove db now"
|
||||
#bin/isql 1111 dba dba exec="sparql clear graph <${cntdb}>;"
|
||||
#bin/isql 1111 dba dba exec="checkpoint;"
|
||||
#bin/isql 1111 dba dba exec="sparql clear graph <${cntdb}>;"
|
||||
#bin/isql 1111 dba dba exec="checkpoint;"
|
||||
#bin/isql 1111 dba dba exec="sparql clear graph <${cntdb}>;"
|
||||
#bin/isql 1111 dba dba exec="checkpoint;"
|
||||
#bin/isql 1111 dba dba exec="sparql clear graph <${cntdb}>;"
|
||||
#bin/isql 1111 dba dba exec="checkpoint;"
|
||||
#bin/isql 1111 dba dba exec="sparql clear graph <${cntdb}>;"
|
||||
#bin/isql 1111 dba dba exec="checkpoint;"
|
||||
#bin/isql 1111 dba dba exec="sparql clear graph <${cntdb}>;"
|
||||
#bin/isql 1111 dba dba exec="checkpoint;"
|
||||
#bin/isql 1111 dba dba exec="sparql clear graph <${cntdb}>;"
|
||||
#bin/isql 1111 dba dba exec="checkpoint;"
|
||||
#bin/isql 1111 dba dba exec="sparql clear graph <${cntdb}>;"
|
||||
#bin/isql 1111 dba dba exec="checkpoint;"
|
||||
#bin/isql 1111 dba dba exec="sparql clear graph <${cntdb}>;"
|
||||
#bin/isql 1111 dba dba exec="checkpoint;"
|
||||
#bin/isql 1111 dba dba exec="sparql clear graph <${cntdb}>;"
|
||||
#bin/isql 1111 dba dba exec="checkpoint;"
|
||||
#bin/isql 1111 dba dba exec="delete from db.dba.load_list;"
|
||||
fi
|
||||
#BETTER:remove *.txt in each dbms path
|
||||
#rm -f *.txt
|
||||
#compare time and construct the TSV table
|
||||
if [ ${j} -eq 0 ]
|
||||
then
|
||||
echo "this is the first dbms!"
|
||||
awk -F '\t' 'BEGIN{
|
||||
print "Time\t""'${dbms_name[j]}'" }
|
||||
{ num=split($1,str,"/"); print str[num]"\t"$2 }' ${timelog} > ${tsv2}
|
||||
else
|
||||
echo "this is not the first dbms!"
|
||||
mv ${tsv2} ${tsv2}.bak
|
||||
awk -F '\t' '{
|
||||
if(NR==FNR) {
|
||||
num=split($1,str,"/");
|
||||
map[str[num]]=$2 }
|
||||
else {
|
||||
if(FNR==1) { print $0"\t""'${dbms_name[j]}'" }
|
||||
else { print $0"\t"map[$1] }
|
||||
}}' ${timelog} ${tsv2}.bak > ${tsv2}
|
||||
rm -f ${tsv2}.bak
|
||||
fi
|
||||
done
|
||||
|
||||
#compare the result and construct the TSV table
|
||||
echo "now to compare the results!"
|
||||
cd ${home}
|
||||
tvar1=`expr $length2 - 1`
|
||||
tvar2=`expr $length2 - 2`
|
||||
for p in `seq 0 $tvar2`
|
||||
do
|
||||
tvar3=`expr $p + 1`
|
||||
for q in `seq $tvar3 $tvar1`
|
||||
do
|
||||
echo $p,$q
|
||||
>compare.txt
|
||||
for query in `ls ${path}/${db[i]}/query/*.sql`
|
||||
do
|
||||
echo "compare: " $query
|
||||
tmplog=${log1}/${cntdb}/${query##*/}.log
|
||||
if [ ${dbms_name[p]}x = virtuosox -o ${dbms_name[q]}x = virtuosox ]
|
||||
#if [ $p -eq 3 -o $q -eq 3 ]
|
||||
then
|
||||
#WARN+NOTICE:the output in virtuoso is without '<>' or '""', so it is hard to compare!
|
||||
#url begins with "http://" is entities, but some others may also be, even like "FullProfessor0" in LUBM
|
||||
#So we print all 'Y' when encounter results from virtuoso
|
||||
#Another way is that we can remove all <> or "" and sort again, when comparing virtuoso and others
|
||||
#WARN:there seems to be other questions with the query result of virtuoso!
|
||||
if [ ${dbms_name[p]}x = virtuosox ]
|
||||
then
|
||||
x=$p
|
||||
y=$q
|
||||
else
|
||||
x=$q
|
||||
y=$p
|
||||
fi
|
||||
awk -F '\t' '{
|
||||
for(i=1; i<=NF; ++i){
|
||||
sub("^[<\"]", "", $i); sub("[>\"]$", "", $i); printf("%s", $i);
|
||||
if(i<NF)printf("\t");}printf("\n");}' ${dbms_path[y]}/${tmplog} > change.txt.bak
|
||||
sort -t $'\t' -u change.txt.bak > change.txt
|
||||
diff ${dbms_path[x]}/${tmplog} change.txt > comp2.txt
|
||||
rm -f comp2.txt
|
||||
#cat > tmp.txt
|
||||
#awk -F '\t' 'BEGIN{flag=0}{
|
||||
#if(NR==FNR){map[NR]=$0}
|
||||
#else if(flag==0){
|
||||
# num1=split(map[FNR],str1,"\t");
|
||||
# num2=split($0,str2,"\t");
|
||||
# if(num1 != num2){
|
||||
# flag=1;
|
||||
# }
|
||||
# else{
|
||||
# for(i=1;i<=num;++i){
|
||||
# if(str1[i]!=str2[i]){
|
||||
# flag=1;break}}}}}
|
||||
# END{print "'${query##*/}'""\tY"}' ${dbms_path[p]}/${tmplog} ${dbms_path[q]}/${tmplog} >> compare.txt
|
||||
else
|
||||
diff ${dbms_path[p]}/${tmplog} ${dbms_path[q]}/${tmplog} > comp2.txt
|
||||
rm -f comp2.txt
|
||||
#NOTICE:the col num is almost all ok for query results
|
||||
#WARN:what if row num is different?
|
||||
#awk -F '\t' 'BEGIN{flag=0}{
|
||||
#if(NR==FNR){map[NR]=$0}
|
||||
#else if(flag==0){
|
||||
# num1=split(map[FNR],str1,"\t");
|
||||
# num2=split($0,str2,"\t");
|
||||
# if(num1 != num2){
|
||||
# flag=1;
|
||||
# }
|
||||
# else{
|
||||
# for(i=1;i<=num1;++i){
|
||||
# if(str1[i]!=str2[i]){
|
||||
# flag=1;break}}}}}
|
||||
# END{
|
||||
# if(flag==0){print "'${query##*/}'""\tY"}
|
||||
# else{print "'${query##*/}'""\tN"}}' ${dbms_path[p]}/${tmplog} ${dbms_path[q]}/${tmplog} >> compare.txt
|
||||
fi
|
||||
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo -e ${query##*/}"\tN" >> compare.txt
|
||||
else
|
||||
echo -e ${query##*/}"\tY" >> compare.txt
|
||||
fi
|
||||
done
|
||||
echo "all queries done!"
|
||||
name=${dbms_name[p]}_${dbms_name[q]}
|
||||
if [ $p -eq 0 ] && [ $q -eq 1 ]
|
||||
then
|
||||
awk -F '\t' 'BEGIN{print "Result\t""'$name'"}{print $0}' compare.txt > ${tsv1}
|
||||
else
|
||||
mv ${tsv1} ${tsv1}.bak
|
||||
awk -F '\t' '{
|
||||
if(NR==FNR) { map[$1]=$2 }
|
||||
else {
|
||||
if(FNR==1) { print $0"\t""'$name'" }
|
||||
else { print $0"\t"map[$1] }
|
||||
}}' compare.txt ${tsv1}.bak > ${tsv1}
|
||||
rm -f ${tsv1}.bak
|
||||
fi
|
||||
done
|
||||
done
|
||||
done
|
||||
done
|
||||
|
||||
|
||||
|
||||
#build the load.log/ in home(this script)
|
||||
echo "now to build the load.log!"
|
||||
for j in `seq $length2`
|
||||
do
|
||||
j=`expr $j - 1`
|
||||
cd ${dbms_path[j]}
|
||||
if [ $j -eq 0 ]
|
||||
then
|
||||
echo "this is the first dbms!"
|
||||
awk -F '\t' 'BEGIN{print "dataset\\dbms\t""'${dbms_name[j]}'"}{print $0}' ${log3}/time.log > $tsv3
|
||||
awk -F '\t' 'BEGIN{print "dataset\\dbms\t""'${dbms_name[j]}'"}{print $0}' ${log3}/size.log > $tsv4
|
||||
else
|
||||
echo "this is not the first dbms!"
|
||||
mv ${tsv3} ${tsv3}.bak
|
||||
awk -F '\t' '{
|
||||
if(NR==FNR) { map[$1]=$2 }
|
||||
else {
|
||||
if(FNR==1) { print $0"\t""'${dbms_name[j]}'" }
|
||||
else { print $0"\t"map[$1] }
|
||||
}}' ${log3}/time.log ${tsv3}.bak > ${tsv3}
|
||||
rm -f ${tsv3}.bak
|
||||
mv ${tsv4} ${tsv4}.bak
|
||||
awk -F '\t' '{
|
||||
if(NR==FNR) { map[$1]=$2 }
|
||||
else {
|
||||
if(FNR==1) { print $0"\t""'${dbms_name[j]}'" }
|
||||
else { print $0"\t"map[$1] }
|
||||
}}' ${log3}/size.log ${tsv4}.bak > ${tsv4}
|
||||
rm -f ${tsv4}.bak
|
||||
fi
|
||||
done
|
||||
|
||||
|
||||
#generate coverage information view for gstore
|
||||
cd ${gstore}
|
||||
str="lcov"
|
||||
for info in `ls COVERAGE/*.info`
|
||||
do
|
||||
#info=${info##*/}
|
||||
str="${str}"" -a ${info}"
|
||||
done
|
||||
str="${str}"" -o COVERAGE/gstore.info"
|
||||
`${str}`
|
||||
lcov --remove COVERAGE/gstore.info 'Server/*' 'Main/*' 'Parser/*'
|
||||
genhtml --output-directory COVERAGE --frames --show-details COVERAGE/gstore.info
|
||||
|
||||
echo "this is the end of full test!"
|
||||
echo "please visit the result.log/, time.log/ and load.log/"
|
||||
echo "you can use excel to load the .tsv files"
|
||||
|
61
test/run.sh
61
test/run.sh
|
@ -1,61 +0,0 @@
|
|||
bin/gload watdiv1000.db /home/data/WatDiv/database/watdiv_1000.nt > load.txt 2>&1
|
||||
echo "load finished"
|
||||
|
||||
bin/gsub watdiv1000.db ../WATDIV1000_small > sub.txt 2>&1
|
||||
echo "subed"
|
||||
|
||||
bin/gquery watdiv1000.db /home/data/WatDiv/query/C1.sql > C1.txt 2>&1
|
||||
echo "C1.sql finished"
|
||||
bin/gquery watdiv1000.db /home/data/WatDiv/query/C2.sql > C2.txt 2>&1
|
||||
echo "C2.sql finished"
|
||||
bin/gquery watdiv1000.db /home/data/WatDiv/query/C3.sql > C3.txt 2>&1
|
||||
echo "C3.sql finished"
|
||||
bin/gquery watdiv1000.db /home/data/WatDiv/query/F1.sql > F1.txt 2>&1
|
||||
echo "F1.sql finished"
|
||||
bin/gquery watdiv1000.db /home/data/WatDiv/query/F2.sql > F2.txt 2>&1
|
||||
echo "F2.sql finished"
|
||||
bin/gquery watdiv1000.db /home/data/WatDiv/query/F3.sql > F3.txt 2>&1
|
||||
echo "F3.sql finished"
|
||||
bin/gquery watdiv1000.db /home/data/WatDiv/query/L1.sql > L1.txt 2>&1
|
||||
echo "L1.sql finished"
|
||||
bin/gquery watdiv1000.db /home/data/WatDiv/query/L2.sql > L2.txt 2>&1
|
||||
echo "L2.sql finished"
|
||||
bin/gquery watdiv1000.db /home/data/WatDiv/query/L3.sql > L3.txt 2>&1
|
||||
echo "L3.sql finished"
|
||||
bin/gquery watdiv1000.db /home/data/WatDiv/query/S1.sql > S1.txt 2>&1
|
||||
echo "S1.sql finished"
|
||||
bin/gquery watdiv1000.db /home/data/WatDiv/query/S2.sql > S2.txt 2>&1
|
||||
echo "S2.sql finished"
|
||||
bin/gquery watdiv1000.db /home/data/WatDiv/query/S3.sql > S3.txt 2>&1
|
||||
echo "S3.sql finished"
|
||||
|
||||
bin/gadd watdiv1000.db ../WATDIV1000_small > add.txt 2>&1
|
||||
echo "added"
|
||||
|
||||
bin/gquery watdiv1000.db /home/data/WatDiv/query/C1.sql > C1.txt.bak 2>&1
|
||||
echo "C1.sql finished"
|
||||
bin/gquery watdiv1000.db /home/data/WatDiv/query/C2.sql > C2.txt.bak 2>&1
|
||||
echo "C2.sql finished"
|
||||
bin/gquery watdiv1000.db /home/data/WatDiv/query/C3.sql > C3.txt.bak 2>&1
|
||||
echo "C3.sql finished"
|
||||
bin/gquery watdiv1000.db /home/data/WatDiv/query/F1.sql > F1.txt.bak 2>&1
|
||||
echo "F1.sql finished"
|
||||
bin/gquery watdiv1000.db /home/data/WatDiv/query/F2.sql > F2.txt.bak 2>&1
|
||||
echo "F2.sql finished"
|
||||
bin/gquery watdiv1000.db /home/data/WatDiv/query/F3.sql > F3.txt.bak 2>&1
|
||||
echo "F3.sql finished"
|
||||
bin/gquery watdiv1000.db /home/data/WatDiv/query/L1.sql > L1.txt.bak 2>&1
|
||||
echo "L1.sql finished"
|
||||
bin/gquery watdiv1000.db /home/data/WatDiv/query/L2.sql > L2.txt.bak 2>&1
|
||||
echo "L2.sql finished"
|
||||
bin/gquery watdiv1000.db /home/data/WatDiv/query/L3.sql > L3.txt.bak 2>&1
|
||||
echo "L3.sql finished"
|
||||
bin/gquery watdiv1000.db /home/data/WatDiv/query/S1.sql > S1.txt.bak 2>&1
|
||||
echo "S1.sql finished"
|
||||
bin/gquery watdiv1000.db /home/data/WatDiv/query/S2.sql > S2.txt.bak 2>&1
|
||||
echo "S2.sql finished"
|
||||
bin/gquery watdiv1000.db /home/data/WatDiv/query/S3.sql > S3.txt.bak 2>&1
|
||||
echo "S3.sql finished"
|
||||
|
||||
echo "big tested"
|
||||
|
37
test/run1.sh
37
test/run1.sh
|
@ -1,37 +0,0 @@
|
|||
bin/gload lubm5000.db /home/data/LUBM/database/lubm_5000.nt > load.txt 2>&1
|
||||
echo "load finished"
|
||||
|
||||
bin/gsub lubm5000.db ../LUBM5000_small > sub.txt 2>&1
|
||||
echo "subed"
|
||||
|
||||
bin/gquery lubm5000.db /home/data/LUBM/query/q0.sql > q0.txt 2>&1
|
||||
echo "q0.sql finished"
|
||||
bin/gquery lubm5000.db /home/data/LUBM/query/q1.sql > q1.txt 2>&1
|
||||
echo "q1.sql finished"
|
||||
bin/gquery lubm5000.db /home/data/LUBM/query/q2.sql > q2.txt 2>&1
|
||||
echo "q2.sql finished"
|
||||
bin/gquery lubm5000.db /home/data/LUBM/query/q3.sql > q3.txt 2>&1
|
||||
echo "q3.sql finished"
|
||||
bin/gquery lubm5000.db /home/data/LUBM/query/q4.sql > q4.txt 2>&1
|
||||
echo "q4.sql finished"
|
||||
bin/gquery lubm5000.db /home/data/LUBM/query/q5.sql > q5.txt 2>&1
|
||||
echo "q5.sql finished"
|
||||
|
||||
bin/gadd lubm5000.db ../LUBM5000_small > add.txt 2>&1
|
||||
echo "added"
|
||||
|
||||
bin/gquery lubm5000.db /home/data/LUBM/query/q0.sql > q0.txt.bak 2>&1
|
||||
echo "q0.sql finished"
|
||||
bin/gquery lubm5000.db /home/data/LUBM/query/q1.sql > q1.txt.bak 2>&1
|
||||
echo "q1.sql finished"
|
||||
bin/gquery lubm5000.db /home/data/LUBM/query/q2.sql > q2.txt.bak 2>&1
|
||||
echo "q2.sql finished"
|
||||
bin/gquery lubm5000.db /home/data/LUBM/query/q3.sql > q3.txt.bak 2>&1
|
||||
echo "q3.sql finished"
|
||||
bin/gquery lubm5000.db /home/data/LUBM/query/q4.sql > q4.txt.bak 2>&1
|
||||
echo "q4.sql finished"
|
||||
bin/gquery lubm5000.db /home/data/LUBM/query/q5.sql > q5.txt.bak 2>&1
|
||||
echo "q5.sql finished"
|
||||
|
||||
echo "big tested"
|
||||
|
24
test/run2.sh
24
test/run2.sh
|
@ -1,24 +0,0 @@
|
|||
bin/gload dbpedia.db /home/data/DBpedia/database/dbpedia2014.nt > load.txt 2>&1
|
||||
echo "load finished"
|
||||
|
||||
bin/gsub dbpedia.db ../dbpedia_small > sub.txt 2>&1
|
||||
echo "subed"
|
||||
|
||||
bin/gquery dbpedia.db /home/data/DBpedia/query/q0.sql > q0.txt 2>&1
|
||||
echo "q0.sql finished"
|
||||
bin/gquery dbpedia.db /home/data/DBpedia/query/q1.sql > q1.txt 2>&1
|
||||
echo "q1.sql finished"
|
||||
bin/gquery dbpedia.db /home/data/DBpedia/query/q2.sql > q2.txt 2>&1
|
||||
echo "q2.sql finished"
|
||||
bin/gquery dbpedia.db /home/data/DBpedia/query/q3.sql > q3.txt 2>&1
|
||||
echo "q3.sql finished"
|
||||
bin/gquery dbpedia.db /home/data/DBpedia/query/q4.sql > q4.txt 2>&1
|
||||
echo "q4.sql finished"
|
||||
bin/gquery dbpedia.db /home/data/DBpedia/query/q5.sql > q5.txt 2>&1
|
||||
echo "q5.sql finished"
|
||||
|
||||
bin/gadd dbpedia.db ../dbpedia_small > add.txt 2>&1
|
||||
echo "added"
|
||||
|
||||
echo "big tested"
|
||||
|
46
test/run3.sh
46
test/run3.sh
|
@ -1,46 +0,0 @@
|
|||
bin/gload watdiv4000.db /home/data/WatDiv/watdiv_4000.nt > load.txt 2>&1
|
||||
echo "load finished"
|
||||
|
||||
bin/gquery watdiv4000.db /home/data/WatDiv/query/C1.sql > C1.txt 2>&1
|
||||
echo "C1.sql finished"
|
||||
bin/gquery watdiv4000.db /home/data/WatDiv/query/C2.sql > C2.txt 2>&1
|
||||
echo "C2.sql finished"
|
||||
bin/gquery watdiv4000.db /home/data/WatDiv/query/C3.sql > C3.txt 2>&1
|
||||
echo "C3.sql finished"
|
||||
bin/gquery watdiv4000.db /home/data/WatDiv/query/F1.sql > F1.txt 2>&1
|
||||
echo "F1.sql finished"
|
||||
bin/gquery watdiv4000.db /home/data/WatDiv/query/F2.sql > F2.txt 2>&1
|
||||
echo "F2.sql finished"
|
||||
bin/gquery watdiv4000.db /home/data/WatDiv/query/F3.sql > F3.txt 2>&1
|
||||
echo "F3.sql finished"
|
||||
bin/gquery watdiv4000.db /home/data/WatDiv/query/F4.sql > F4.txt 2>&1
|
||||
echo "F4.sql finished"
|
||||
bin/gquery watdiv4000.db /home/data/WatDiv/query/F5.sql > F5.txt 2>&1
|
||||
echo "F5.sql finished"
|
||||
bin/gquery watdiv4000.db /home/data/WatDiv/query/L1.sql > L1.txt 2>&1
|
||||
echo "L1.sql finished"
|
||||
bin/gquery watdiv4000.db /home/data/WatDiv/query/L2.sql > L2.txt 2>&1
|
||||
echo "L2.sql finished"
|
||||
bin/gquery watdiv4000.db /home/data/WatDiv/query/L3.sql > L3.txt 2>&1
|
||||
echo "L3.sql finished"
|
||||
bin/gquery watdiv4000.db /home/data/WatDiv/query/L4.sql > L4.txt 2>&1
|
||||
echo "L4.sql finished"
|
||||
bin/gquery watdiv4000.db /home/data/WatDiv/query/L5.sql > L5.txt 2>&1
|
||||
echo "L5.sql finished"
|
||||
bin/gquery watdiv4000.db /home/data/WatDiv/query/S1.sql > S1.txt 2>&1
|
||||
echo "S1.sql finished"
|
||||
bin/gquery watdiv4000.db /home/data/WatDiv/query/S2.sql > S2.txt 2>&1
|
||||
echo "S2.sql finished"
|
||||
bin/gquery watdiv4000.db /home/data/WatDiv/query/S3.sql > S3.txt 2>&1
|
||||
echo "S3.sql finished"
|
||||
bin/gquery watdiv4000.db /home/data/WatDiv/query/S4.sql > S4.txt 2>&1
|
||||
echo "S4.sql finished"
|
||||
bin/gquery watdiv4000.db /home/data/WatDiv/query/S5.sql > S5.txt 2>&1
|
||||
echo "S5.sql finished"
|
||||
bin/gquery watdiv4000.db /home/data/WatDiv/query/S6.sql > S6.txt 2>&1
|
||||
echo "S6.sql finished"
|
||||
bin/gquery watdiv4000.db /home/data/WatDiv/query/S7.sql > S7.txt 2>&1
|
||||
echo "S7.sql finished"
|
||||
|
||||
echo "big tested"
|
||||
|
46
test/run4.sh
46
test/run4.sh
|
@ -1,46 +0,0 @@
|
|||
bin/gload watdiv5000.db ~/watdiv_5000.nt > watdiv5000/load.txt 2>&1
|
||||
echo "load finished"
|
||||
|
||||
bin/gquery watdiv5000.db /home/data/WatDiv/query/C1.sql > watdiv5000/C1.txt 2>&1
|
||||
echo "C1.sql finished"
|
||||
bin/gquery watdiv5000.db /home/data/WatDiv/query/C2.sql > watdiv5000/C2.txt 2>&1
|
||||
echo "C2.sql finished"
|
||||
bin/gquery watdiv5000.db /home/data/WatDiv/query/C3.sql > watdiv5000/C3.txt 2>&1
|
||||
echo "C3.sql finished"
|
||||
bin/gquery watdiv5000.db /home/data/WatDiv/query/F1.sql > watdiv5000/F1.txt 2>&1
|
||||
echo "F1.sql finished"
|
||||
bin/gquery watdiv5000.db /home/data/WatDiv/query/F2.sql > watdiv5000/F2.txt 2>&1
|
||||
echo "F2.sql finished"
|
||||
bin/gquery watdiv5000.db /home/data/WatDiv/query/F3.sql > watdiv5000/F3.txt 2>&1
|
||||
echo "F3.sql finished"
|
||||
bin/gquery watdiv5000.db /home/data/WatDiv/query/F4.sql > watdiv5000/F4.txt 2>&1
|
||||
echo "F4.sql finished"
|
||||
bin/gquery watdiv5000.db /home/data/WatDiv/query/F5.sql > watdiv5000/F5.txt 2>&1
|
||||
echo "F5.sql finished"
|
||||
bin/gquery watdiv5000.db /home/data/WatDiv/query/L1.sql > watdiv5000/L1.txt 2>&1
|
||||
echo "L1.sql finished"
|
||||
bin/gquery watdiv5000.db /home/data/WatDiv/query/L2.sql > watdiv5000/L2.txt 2>&1
|
||||
echo "L2.sql finished"
|
||||
bin/gquery watdiv5000.db /home/data/WatDiv/query/L3.sql > watdiv5000/L3.txt 2>&1
|
||||
echo "L3.sql finished"
|
||||
bin/gquery watdiv5000.db /home/data/WatDiv/query/L4.sql > watdiv5000/L4.txt 2>&1
|
||||
echo "L4.sql finished"
|
||||
bin/gquery watdiv5000.db /home/data/WatDiv/query/L5.sql > watdiv5000/L5.txt 2>&1
|
||||
echo "L5.sql finished"
|
||||
bin/gquery watdiv5000.db /home/data/WatDiv/query/S1.sql > watdiv5000/S1.txt 2>&1
|
||||
echo "S1.sql finished"
|
||||
bin/gquery watdiv5000.db /home/data/WatDiv/query/S2.sql > watdiv5000/S2.txt 2>&1
|
||||
echo "S2.sql finished"
|
||||
bin/gquery watdiv5000.db /home/data/WatDiv/query/S3.sql > watdiv5000/S3.txt 2>&1
|
||||
echo "S3.sql finished"
|
||||
bin/gquery watdiv5000.db /home/data/WatDiv/query/S4.sql > watdiv5000/S4.txt 2>&1
|
||||
echo "S4.sql finished"
|
||||
bin/gquery watdiv5000.db /home/data/WatDiv/query/S5.sql > watdiv5000/S5.txt 2>&1
|
||||
echo "S5.sql finished"
|
||||
bin/gquery watdiv5000.db /home/data/WatDiv/query/S6.sql > watdiv5000/S6.txt 2>&1
|
||||
echo "S6.sql finished"
|
||||
bin/gquery watdiv5000.db /home/data/WatDiv/query/S7.sql > watdiv5000/S7.txt 2>&1
|
||||
echo "S7.sql finished"
|
||||
|
||||
echo "big tested"
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
# make sure there exists directory DBPEDIA.info.info
|
||||
|
||||
bin/gbuild dbpedia /home/data/DBpedia/database/dbpedia170M.nt > DBPEDIA.info/load.txt 2>&1
|
||||
echo "load finished"
|
||||
|
||||
bin/gquery dbpedia /home/data/DBpedia/query/q0.sql > DBPEDIA.info/q0.txt 2>&1
|
||||
echo "q0.sql finished"
|
||||
bin/gquery dbpedia /home/data/DBpedia/query/q1.sql > DBPEDIA.info/q1.txt 2>&1
|
||||
echo "q1.sql finished"
|
||||
bin/gquery dbpedia /home/data/DBpedia/query/q2.sql > DBPEDIA.info/q2.txt 2>&1
|
||||
echo "q2.sql finished"
|
||||
bin/gquery dbpedia /home/data/DBpedia/query/q3.sql > DBPEDIA.info/q3.txt 2>&1
|
||||
echo "q3.sql finished"
|
||||
bin/gquery dbpedia /home/data/DBpedia/query/q4.sql > DBPEDIA.info/q4.txt 2>&1
|
||||
echo "q4.sql finished"
|
||||
bin/gquery dbpedia /home/data/DBpedia/query/q5.sql > DBPEDIA.info/q5.txt 2>&1
|
||||
echo "q5.sql finished"
|
||||
bin/gquery dbpedia /home/data/DBpedia/query/q6.sql > DBPEDIA.info/q6.txt 2>&1
|
||||
echo "q6.sql finished"
|
||||
|
||||
echo "dbpedia tested"
|
||||
|
|
@ -1,53 +0,0 @@
|
|||
bin/gload lubm.db /home/data/LUBM/database/lubm_5000.nt > LUBM/load.txt 2>&1
|
||||
echo "load finished"
|
||||
|
||||
bin/gquery lubm.db /home/data/LUBM/query/q0.sql > LUBM/q0.txt 2>&1
|
||||
echo "q0.sql finished"
|
||||
bin/gquery lubm.db /home/data/LUBM/query/q1.sql > LUBM/q1.txt 2>&1
|
||||
echo "q1.sql finished"
|
||||
bin/gquery lubm.db /home/data/LUBM/query/q2.sql > LUBM/q2.txt 2>&1
|
||||
echo "q2.sql finished"
|
||||
bin/gquery lubm.db /home/data/LUBM/query/q3.sql > LUBM/q3.txt 2>&1
|
||||
echo "q3.sql finished"
|
||||
bin/gquery lubm.db /home/data/LUBM/query/q4.sql > LUBM/q4.txt 2>&1
|
||||
echo "q4.sql finished"
|
||||
bin/gquery lubm.db /home/data/LUBM/query/q5.sql > LUBM/q5.txt 2>&1
|
||||
echo "q5.sql finished"
|
||||
|
||||
bin/gquery lubm.db /home/data/LUBM/query/q6.sql > LUBM/q6.txt 2>&1
|
||||
echo "q6.sql finished"
|
||||
bin/gquery lubm.db /home/data/LUBM/query/q7.sql > LUBM/q7.txt 2>&1
|
||||
echo "q7.sql finished"
|
||||
bin/gquery lubm.db /home/data/LUBM/query/q8.sql > LUBM/q8.txt 2>&1
|
||||
echo "q8.sql finished"
|
||||
bin/gquery lubm.db /home/data/LUBM/query/q9.sql > LUBM/q9.txt 2>&1
|
||||
echo "q9.sql finished"
|
||||
bin/gquery lubm.db /home/data/LUBM/query/q10.sql > LUBM/q10.txt 2>&1
|
||||
echo "q10.sql finished"
|
||||
|
||||
bin/gquery lubm.db /home/data/LUBM/query/q11.sql > LUBM/q11.txt 2>&1
|
||||
echo "q11.sql finished"
|
||||
bin/gquery lubm.db /home/data/LUBM/query/q12.sql > LUBM/q12.txt 2>&1
|
||||
echo "q12.sql finished"
|
||||
bin/gquery lubm.db /home/data/LUBM/query/q13.sql > LUBM/q13.txt 2>&1
|
||||
echo "q13.sql finished"
|
||||
bin/gquery lubm.db /home/data/LUBM/query/q14.sql > LUBM/q14.txt 2>&1
|
||||
echo "q14.sql finished"
|
||||
bin/gquery lubm.db /home/data/LUBM/query/q15.sql > LUBM/q15.txt 2>&1
|
||||
echo "q15.sql finished"
|
||||
|
||||
bin/gquery lubm.db /home/data/LUBM/query/q16.sql > LUBM/q16.txt 2>&1
|
||||
echo "q16.sql finished"
|
||||
bin/gquery lubm.db /home/data/LUBM/query/q17.sql > LUBM/q17.txt 2>&1
|
||||
echo "q17.sql finished"
|
||||
bin/gquery lubm.db /home/data/LUBM/query/q18.sql > LUBM/q18.txt 2>&1
|
||||
echo "q18.sql finished"
|
||||
bin/gquery lubm.db /home/data/LUBM/query/q19.sql > LUBM/q19.txt 2>&1
|
||||
echo "q19.sql finished"
|
||||
bin/gquery lubm.db /home/data/LUBM/query/q20.sql > LUBM/q20.txt 2>&1
|
||||
echo "q20.sql finished"
|
||||
bin/gquery lubm.db /home/data/LUBM/query/q21.sql > LUBM/q21.txt 2>&1
|
||||
echo "q21.sql finished"
|
||||
|
||||
echo "lubm tested"
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
bin/gload watdiv.db /home/data/WatDiv/database/watdiv_3000.nt > WATDIV/load.txt 2>&1
|
||||
echo "load finished"
|
||||
|
||||
bin/gquery watdiv.db /home/data/WatDiv/query/C1.sql > WATDIV/C1.txt 2>&1
|
||||
echo "C1.sql finished"
|
||||
bin/gquery watdiv.db /home/data/WatDiv/query/C2.sql > WATDIV/C2.txt 2>&1
|
||||
echo "C2.sql finished"
|
||||
bin/gquery watdiv.db /home/data/WatDiv/query/C3.sql > WATDIV/C3.txt 2>&1
|
||||
echo "C3.sql finished"
|
||||
|
||||
bin/gquery watdiv.db /home/data/WatDiv/query/F1.sql > WATDIV/F1.txt 2>&1
|
||||
echo "F1.sql finished"
|
||||
bin/gquery watdiv.db /home/data/WatDiv/query/F2.sql > WATDIV/F2.txt 2>&1
|
||||
echo "F2.sql finished"
|
||||
bin/gquery watdiv.db /home/data/WatDiv/query/F3.sql > WATDIV/F3.txt 2>&1
|
||||
echo "F3.sql finished"
|
||||
|
||||
bin/gquery watdiv.db /home/data/WatDiv/query/L1.sql > WATDIV/L1.txt 2>&1
|
||||
echo "L1.sql finished"
|
||||
bin/gquery watdiv.db /home/data/WatDiv/query/L2.sql > WATDIV/L2.txt 2>&1
|
||||
echo "L2.sql finished"
|
||||
bin/gquery watdiv.db /home/data/WatDiv/query/L3.sql > WATDIV/L3.txt 2>&1
|
||||
echo "L3.sql finished"
|
||||
|
||||
bin/gquery watdiv.db /home/data/WatDiv/query/S1.sql > WATDIV/S1.txt 2>&1
|
||||
echo "S1.sql finished"
|
||||
bin/gquery watdiv.db /home/data/WatDiv/query/S2.sql > WATDIV/S2.txt 2>&1
|
||||
echo "S2.sql finished"
|
||||
bin/gquery watdiv.db /home/data/WatDiv/query/S3.sql > WATDIV/S3.txt 2>&1
|
||||
echo "S3.sql finished"
|
||||
|
||||
echo "watdiv tested"
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
-- the versionCompare.db in docs/ is used to compare the performance of different versions
|
||||
-- no version record should be removed!
|
||||
-- For sqlite3 usage, see http://blog.csdn.net/byxdaz/article/details/5846023
|
|
@ -1,3 +0,0 @@
|
|||
rm Parser/Sparql*
|
||||
find . -type f -print | grep -E "(makefile|Makefile|\.(c(pp)?|h|sh|py|ini|sql|conf))$" | xargs wc -l
|
||||
|
145
test/test.sh
145
test/test.sh
|
@ -1,145 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
#set -v
|
||||
|
||||
db=("bbug" "lubm" "num" "small")
|
||||
op=("bin/gbuild" "bin/gquery" "bin/gadd" "bin/gsub")
|
||||
path="./data/"
|
||||
bbug_sql=("0" "0d" "1" "2" "3" "4" "5" "6")
|
||||
lubm_sql=("_p0" "_p1" "_p2" "_p3" "_p4" "_q0" "_q1" "_q2" "_q3" "_q4" "_q5")
|
||||
num_sql=("0" "1" "2" "3")
|
||||
small_sql=("_dist" "_p0" "_p1" "_p2" "_p3" "_q0" "_q1" "_q2" "_q3" "_s0" "_s1")
|
||||
bbug_ans=(-1 -1 297 -1 2 24 0 -1)
|
||||
lubm_ans=(15 227393 0 27 5916 15 0 828 27 27 5916)
|
||||
num_ans=(8 0 4 1)
|
||||
small_ans=(2 2 1 27 1 1 1 4 1 5 5)
|
||||
res="queries exist errors"
|
||||
pass=1
|
||||
|
||||
#gbuild
|
||||
echo "gbuild......"
|
||||
for i in 0 1 2 3
|
||||
do
|
||||
${op[0]} ${db[$i]} ${path}${db[$i]}"/"${db[$i]}".nt" > "1.txt" 2>&1
|
||||
"rm" "1.txt"
|
||||
done
|
||||
|
||||
#gquery
|
||||
gquery(){
|
||||
correctness=1
|
||||
for i in 0 1 2 3 4 5 6 7
|
||||
do
|
||||
${op[1]} ${db[0]} ${path}${db[0]}"/"${db[0]}${bbug_sql[$i]}".sql" > "1.txt"
|
||||
if [ ${bbug_ans[$i]} -ne -1 ]
|
||||
then
|
||||
ans=$(grep "There has answer" 1.txt)
|
||||
if [ ${ans:18:${#ans}-18} -ne ${bbug_ans[$i]} ]
|
||||
then
|
||||
correctness=0
|
||||
pass=0
|
||||
fi
|
||||
fi
|
||||
"rm" "1.txt"
|
||||
done
|
||||
if [ $correctness -eq 0 ]
|
||||
then
|
||||
echo ${db[0]} ${res}
|
||||
fi
|
||||
|
||||
correctness=1
|
||||
for i in 0 1 2 3 4 5 6 7 8 9 10
|
||||
do
|
||||
${op[1]} ${db[1]} ${path}${db[1]}"/"${db[1]}${lubm_sql[$i]}".sql" > "1.txt"
|
||||
ans=$(grep "There has answer" 1.txt)
|
||||
if [ ${ans:18:${#ans}-18} -ne ${lubm_ans[$i]} ]
|
||||
then
|
||||
correctness=0
|
||||
pass=0
|
||||
fi
|
||||
"rm" "1.txt"
|
||||
done
|
||||
if [ $correctness -eq 0 ]
|
||||
then
|
||||
echo ${db[1]} ${res}
|
||||
fi
|
||||
|
||||
correctness=1
|
||||
for i in 0 1 2 3
|
||||
do
|
||||
${op[1]} ${db[2]} ${path}${db[2]}"/"${db[2]}${num_sql[$i]}".sql" > "1.txt"
|
||||
ans=$(grep "There has answer" 1.txt)
|
||||
if [ ${ans:18:${#ans}-18} -ne ${num_ans[$i]} ]
|
||||
then
|
||||
correctness=0
|
||||
pass=0
|
||||
fi
|
||||
"rm" "1.txt"
|
||||
done
|
||||
if [ $correctness -eq 0 ]
|
||||
then
|
||||
echo ${db[2]} ${res}
|
||||
fi
|
||||
|
||||
correctness=1
|
||||
for i in 0 1 2 3 4 5 6 7 8 9 10
|
||||
do
|
||||
${op[1]} ${db[3]} ${path}${db[3]}"/"${db[3]}${small_sql[$i]}".sql" > "1.txt"
|
||||
ans=$(grep "There has answer" 1.txt)
|
||||
if [ ${ans:18:${#ans}-18} -ne ${small_ans[$i]} ]
|
||||
then
|
||||
correctness=0
|
||||
pass=0
|
||||
fi
|
||||
"rm" "1.txt"
|
||||
|
||||
done
|
||||
if [ $correctness -eq 0 ]
|
||||
then
|
||||
echo ${db[3]} ${res}
|
||||
fi
|
||||
}
|
||||
echo "gquery......"
|
||||
gquery
|
||||
|
||||
#gadd and gsub
|
||||
echo "gsub and gadd......"
|
||||
for i in 0 1 2 3
|
||||
do
|
||||
for j in 3 2
|
||||
do
|
||||
${op[$j]} ${db[$i]} ${path}${db[$i]}"/"${db[$i]}".nt" > "1.txt"
|
||||
"rm" "1.txt"
|
||||
done
|
||||
done
|
||||
gquery
|
||||
|
||||
for i in 2 3
|
||||
do
|
||||
${op[$i]} ${db[3]} ${path}${db[3]}"/small_add.nt" > "1.txt"
|
||||
"rm" "1.txt"
|
||||
done
|
||||
correctness=1
|
||||
for i in 0 1 2 3 4 5 6 7 8 9 10
|
||||
do
|
||||
${op[1]} ${db[3]} ${path}${db[3]}"/"${db[3]}${small_sql[$i]}".sql" > "1.txt"
|
||||
ans=$(grep "There has answer" 1.txt)
|
||||
if [ ${ans:18:${#ans}-18} -ne ${small_ans[$i]} ]
|
||||
then
|
||||
correctness=0
|
||||
pass=0
|
||||
fi
|
||||
"rm" "1.txt"
|
||||
|
||||
done
|
||||
if [ $correctness -eq 0 ]
|
||||
then
|
||||
echo ${db[3]} ${res}
|
||||
fi
|
||||
if [ $pass -eq 1 ]
|
||||
then
|
||||
echo "Test passed!"
|
||||
else
|
||||
echo "Test failed!"
|
||||
fi
|
||||
|
||||
|
Loading…
Reference in New Issue