CMSIS-DSP: Reworked mechanism for naming of tests.
diff --git a/CMSIS/DSP/Testing/TestScripts/NewParser.py b/CMSIS/DSP/Testing/TestScripts/NewParser.py
index 7b08d5b..4c5b371 100644
--- a/CMSIS/DSP/Testing/TestScripts/NewParser.py
+++ b/CMSIS/DSP/Testing/TestScripts/NewParser.py
@@ -182,7 +182,8 @@
paramValue = Literal("->") + ident("PARAMID")
- message = string("message")
+ messFormat = Word(alphanums + " _/")
+ message = messFormat("message")
testField = ((Keyword("oldID") + "=" + integer("INT")) | (Keyword("truc") + "=" + integer("INT"))).setParseAction(parseField)
testData = (Literal("{") + OneOrMore(testField)("fields") + Literal("}")).setParseAction(parseTestFields)
diff --git a/CMSIS/DSP/Testing/TestScripts/Parser.py b/CMSIS/DSP/Testing/TestScripts/Parser.py
index 7760ddb..508d15f 100644
--- a/CMSIS/DSP/Testing/TestScripts/Parser.py
+++ b/CMSIS/DSP/Testing/TestScripts/Parser.py
@@ -134,6 +134,12 @@
else:
return("")
+ def getSuiteMessage(self):
+ suite = self.parent
+ group = suite.parent
+ p = group.data["message"]
+ return(p)
+
def addGroup(self,g):
""" Add a group to this node
diff --git a/CMSIS/DSP/Testing/TestScripts/Regression/Commands.py b/CMSIS/DSP/Testing/TestScripts/Regression/Commands.py
index 19d1866..4e6bc58 100755
--- a/CMSIS/DSP/Testing/TestScripts/Regression/Commands.py
+++ b/CMSIS/DSP/Testing/TestScripts/Regression/Commands.py
@@ -365,8 +365,7 @@
# Compute the regression data
def computeSummaryStat(self):
msg(" Compute regressions for %s\n" % self.testName())
- with open(os.path.join(self.buildConfig().archiveResultPath(),"processedResult_%s.txt" % self.testName()),"w") as presult:
- completed=subprocess.run([sys.executable,"summaryBench.py","-r",self.getResultPath()],stdout=presult,timeout=3600)
+ completed=subprocess.run([sys.executable,"summaryBench.py","-r",self.getResultPath(),self.testName()],timeout=3600)
# When a test fail, the regression is continuing but we
# track that a test has failed
if completed.returncode==0:
diff --git a/CMSIS/DSP/Testing/addToDB.py b/CMSIS/DSP/Testing/addToDB.py
index 5b43cff..32e2f29 100755
--- a/CMSIS/DSP/Testing/addToDB.py
+++ b/CMSIS/DSP/Testing/addToDB.py
@@ -17,25 +17,28 @@
import datetime, time
import re
-# For table creation
-MKSTRFIELD=['NAME']
+# For sql table creation
+MKSTRFIELD=[]
MKBOOLFIELD=['HARDFP', 'FASTMATH', 'NEON', 'HELIUM','UNROLL', 'ROUNDING','OPTIMIZED']
MKINTFIELD=['ID', 'CYCLES']
-MKDATEFIELD=['DATE']
-MKKEYFIELD=['CATEGORY', 'PLATFORM', 'CORE', 'COMPILER','TYPE',"RUN"]
+MKDATEFIELD=[]
+MKKEYFIELD=['DATE','NAME','CATEGORY', 'PLATFORM', 'CORE', 'COMPILER','TYPE',"RUN"]
MKKEYFIELDID={'CATEGORY':'categoryid',
+ 'NAME':'testnameid',
+ 'DATE':'testdateid',
'PLATFORM':'platformid',
'CORE':'coreid',
'COMPILER':'compilerid',
'TYPE':'typeid',
'RUN':'runid'}
-# For table value extraction
-VALSTRFIELD=['NAME','VERSION']
+# For csv table value extraction
+VALSTRFIELD=['TESTNAME','VERSION']
VALBOOLFIELD=['HARDFP', 'FASTMATH', 'NEON', 'HELIUM','UNROLL', 'ROUNDING','OPTIMIZED']
VALINTFIELD=['ID', 'CYCLES']
-VALDATEFIELD=['DATE']
-VALKEYFIELD=['CATEGORY', 'PLATFORM', 'CORE', 'COMPILER','TYPE']
+VALDATEFIELD=[]
+# Some of those fields may be created by the parsing of other fields
+VALKEYFIELD=['DATE','NAME','CATEGORY', 'PLATFORM', 'CORE', 'COMPILER','TYPE']
def joinit(iterable, delimiter):
it = iter(iterable)
@@ -101,6 +104,8 @@
# Create foreign keys
sql += "%sFOREIGN KEY(typeid) REFERENCES TYPE(typeid)," % start
sql += "FOREIGN KEY(categoryid) REFERENCES CATEGORY(categoryid),"
+ sql += "FOREIGN KEY(testnameid) REFERENCES TESTNAME(testnameid),"
+ sql += "FOREIGN KEY(testdateid) REFERENCES TESTDATE(testdateid),"
sql += "FOREIGN KEY(platformid) REFERENCES PLATFORM(platformid),"
sql += "FOREIGN KEY(coreid) REFERENCES CORE(coreid),"
sql += "FOREIGN KEY(compilerid) REFERENCES COMPILER(compilerid)"
@@ -134,9 +139,10 @@
return(result[0])
else:
fullDate = datetime.datetime.now()
- conn.execute("INSERT INTO COMPILER(compilerkindid,version,date) VALUES(?,?,?)" ,(kind,version,fullDate))
+ dateid = findInTable(conn,"TESTDATE","date",str(fullDate),"testdateid")
+ conn.execute("INSERT INTO COMPILER(compilerkindid,version,testdateid) VALUES(?,?,?)" ,(kind,version,dateid))
conn.commit()
- r = conn.execute("select compilerid from COMPILER where compilerkindid=? AND version=? AND date=?" , (kind,version,fullDate))
+ r = conn.execute("select compilerid from COMPILER where compilerkindid=? AND version=? AND testdateid=?" , (kind,version,dateid))
result=r.fetchone()
if result != None:
#print(result)
@@ -175,34 +181,36 @@
keys[field]=row[field]
if field == "NAME":
name = row[field]
- if re.match(r'^.*_f64',name):
+ if field == "TESTNAME":
+ testname = row[field]
+ if re.match(r'^.*_f64',testname):
keys["TYPE"] = "f64"
- if re.match(r'^.*_f32',name):
+ if re.match(r'^.*_f32',testname):
keys["TYPE"] = "f32"
- if re.match(r'^.*_f16',name):
+ if re.match(r'^.*_f16',testname):
keys["TYPE"] = "f16"
- if re.match(r'^.*_q31',name):
+ if re.match(r'^.*_q31',testname):
keys["TYPE"] = "q31"
- if re.match(r'^.*_q15',name):
+ if re.match(r'^.*_q15',testname):
keys["TYPE"] = "q15"
- if re.match(r'^.*_q7',name):
+ if re.match(r'^.*_q7',testname):
keys["TYPE"] = "q7"
- if re.match(r'^.*_s8',name):
+ if re.match(r'^.*_s8',testname):
keys["TYPE"] = "s8"
- if re.match(r'^.*_u8',name):
+ if re.match(r'^.*_u8',testname):
keys["TYPE"] = "u8"
- if re.match(r'^.*_s16',name):
+ if re.match(r'^.*_s16',testname):
keys["TYPE"] = "s16"
- if re.match(r'^.*_u16',name):
+ if re.match(r'^.*_u16',testname):
keys["TYPE"] = "u16"
- if re.match(r'^.*_s32',name):
+ if re.match(r'^.*_s32',testname):
keys["TYPE"] = "s32"
- if re.match(r'^.*_u32',name):
+ if re.match(r'^.*_u32',testname):
keys["TYPE"] = "u32"
- if re.match(r'^.*_s64',name):
+ if re.match(r'^.*_s64',testname):
keys["TYPE"] = "s64"
- if re.match(r'^.*_u64',name):
+ if re.match(r'^.*_u64',testname):
keys["TYPE"] = "u64"
if field in VALINTFIELD:
@@ -224,6 +232,12 @@
testField=re.sub(r'^(.*)[:]([^:]+)(F16|F32|F64|Q31|Q15|Q7)$',r'\1',row[field])
val = findInTable(conn,"CATEGORY","category",testField,"categoryid")
keys[field]=val
+ if field == "NAME":
+ val = findInTable(conn,"TESTNAME","name",row[field],"testnameid")
+ keys[field]=val
+ if field == "DATE":
+ val = findInTable(conn,"TESTDATE","date",str(row[field]),"testdateid")
+ keys[field]=val
if field == "CORE":
val = findInTable(conn,"CORE","coredef",row[field],"coreid")
keys[field]=val
@@ -261,7 +275,8 @@
return({'compilerid':compilerid,'platformid':platformid,'coreid':coreid})
def addConfig(conn,config,fullDate):
- conn.execute("INSERT INTO CONFIG(compilerid,platformid,coreid,date) VALUES(?,?,?,?)" ,(config['compilerid'],config['platformid'],config['coreid'],fullDate))
+ dateid = findInTable(conn,"TESTDATE","date",str(fullDate),"testdateid")
+ conn.execute("INSERT INTO CONFIG(compilerid,platformid,coreid,testdateid) VALUES(?,?,?,?)" ,(config['compilerid'],config['platformid'],config['coreid'],dateid))
conn.commit()
def getGroup(a):
@@ -297,13 +312,13 @@
parser = argparse.ArgumentParser(description='Generate summary benchmarks')
-parser.add_argument('-f', nargs='?',type = str, default="Output.pickle", help="File path")
+parser.add_argument('-f', nargs='?',type = str, default="Output.pickle", help="Pickle")
parser.add_argument('-b', nargs='?',type = str, default="FullBenchmark", help="Full Benchmark dir path")
#parser.add_argument('-e', action='store_true', help="Embedded test")
parser.add_argument('-o', nargs='?',type = str, default="bench.db", help="Benchmark database")
parser.add_argument('-r', nargs='?',type = int, default=0, help="Run ID")
-parser.add_argument('others', nargs=argparse.REMAINDER)
+parser.add_argument('others', nargs=argparse.REMAINDER, help="Suite class")
args = parser.parse_args()
diff --git a/CMSIS/DSP/Testing/addToRegDB.py b/CMSIS/DSP/Testing/addToRegDB.py
index cf6297d..1c19469 100755
--- a/CMSIS/DSP/Testing/addToRegDB.py
+++ b/CMSIS/DSP/Testing/addToRegDB.py
@@ -17,27 +17,30 @@
import datetime, time
import re
-# For table creation
-MKSTRFIELD=['NAME','Regression']
+# For sql table creation
+MKSTRFIELD=['Regression']
MKBOOLFIELD=['HARDFP', 'FASTMATH', 'NEON', 'HELIUM','UNROLL', 'ROUNDING','OPTIMIZED']
MKINTFIELD=['ID','MAX']
MKREALFIELD=['MAXREGCOEF']
-MKDATEFIELD=['DATE']
-MKKEYFIELD=['CATEGORY', 'PLATFORM', 'CORE', 'COMPILER','TYPE','RUN']
+MKDATEFIELD=[]
+MKKEYFIELD=['DATE','NAME','CATEGORY', 'PLATFORM', 'CORE', 'COMPILER','TYPE','RUN']
MKKEYFIELDID={'CATEGORY':'categoryid',
+ 'NAME':'testnameid',
+ 'DATE':'testdateid',
'PLATFORM':'platformid',
'CORE':'coreid',
'COMPILER':'compilerid',
'TYPE':'typeid',
'RUN':'runid'}
-# For table value extraction
-VALSTRFIELD=['NAME','VERSION','Regression']
+# For csv table value extraction
+VALSTRFIELD=['TESTNAME','VERSION','Regression']
VALBOOLFIELD=['HARDFP', 'FASTMATH', 'NEON', 'HELIUM','UNROLL', 'ROUNDING','OPTIMIZED']
VALINTFIELD=['ID', 'MAX']
VALREALFIELD=['MAXREGCOEF']
-VALDATEFIELD=['DATE']
-VALKEYFIELD=['CATEGORY', 'PLATFORM', 'CORE', 'COMPILER','TYPE']
+VALDATEFIELD=[]
+# Some of those fields may be created by the parsing of other fields
+VALKEYFIELD=['DATE','NAME','CATEGORY', 'PLATFORM', 'CORE', 'COMPILER','TYPE']
def joinit(iterable, delimiter):
it = iter(iterable)
@@ -107,6 +110,8 @@
# Create foreign keys
sql += "%sFOREIGN KEY(typeid) REFERENCES TYPE(typeid)," % start
sql += "FOREIGN KEY(categoryid) REFERENCES CATEGORY(categoryid),"
+ sql += "FOREIGN KEY(testnameid) REFERENCES TESTNAME(testnameid),"
+ sql += "FOREIGN KEY(testdateid) REFERENCES TESTDATE(testdateid),"
sql += "FOREIGN KEY(platformid) REFERENCES PLATFORM(platformid),"
sql += "FOREIGN KEY(coreid) REFERENCES CORE(coreid),"
sql += "FOREIGN KEY(compilerid) REFERENCES COMPILER(compilerid)"
@@ -140,9 +145,10 @@
return(result[0])
else:
fullDate = datetime.datetime.now()
- conn.execute("INSERT INTO COMPILER(compilerkindid,version,date) VALUES(?,?,?)" ,(kind,version,fullDate))
+ dateid = findInTable(conn,"TESTDATE","date",str(fullDate),"testdateid")
+ conn.execute("INSERT INTO COMPILER(compilerkindid,version,testdateid) VALUES(?,?,?)" ,(kind,version,dateid))
conn.commit()
- r = conn.execute("select compilerid from COMPILER where compilerkindid=? AND version=? AND date=?" , (kind,version,fullDate))
+ r = conn.execute("select compilerid from COMPILER where compilerkindid=? AND version=? AND testdateid=?" , (kind,version,dateid))
result=r.fetchone()
if result != None:
#print(result)
@@ -181,34 +187,36 @@
keys[field]=row[field]
if field == "NAME":
name = row[field]
- if re.match(r'^.*_f64',name):
+ if field == "TESTNAME":
+ testname = row[field]
+ if re.match(r'^.*_f64',testname):
keys["TYPE"] = "f64"
- if re.match(r'^.*_f32',name):
+ if re.match(r'^.*_f32',testname):
keys["TYPE"] = "f32"
- if re.match(r'^.*_f16',name):
+ if re.match(r'^.*_f16',testname):
keys["TYPE"] = "f16"
- if re.match(r'^.*_q31',name):
+ if re.match(r'^.*_q31',testname):
keys["TYPE"] = "q31"
- if re.match(r'^.*_q15',name):
+ if re.match(r'^.*_q15',testname):
keys["TYPE"] = "q15"
- if re.match(r'^.*_q7',name):
+ if re.match(r'^.*_q7',testname):
keys["TYPE"] = "q7"
- if re.match(r'^.*_s8',name):
+ if re.match(r'^.*_s8',testname):
keys["TYPE"] = "s8"
- if re.match(r'^.*_u8',name):
+ if re.match(r'^.*_u8',testname):
keys["TYPE"] = "u8"
- if re.match(r'^.*_s16',name):
+ if re.match(r'^.*_s16',testname):
keys["TYPE"] = "s16"
- if re.match(r'^.*_u16',name):
+ if re.match(r'^.*_u16',testname):
keys["TYPE"] = "u16"
- if re.match(r'^.*_s32',name):
+ if re.match(r'^.*_s32',testname):
keys["TYPE"] = "s32"
- if re.match(r'^.*_u32',name):
+ if re.match(r'^.*_u32',testname):
keys["TYPE"] = "u32"
- if re.match(r'^.*_s64',name):
+ if re.match(r'^.*_s64',testname):
keys["TYPE"] = "s64"
- if re.match(r'^.*_u64',name):
+ if re.match(r'^.*_u64',testname):
keys["TYPE"] = "u64"
if field in VALINTFIELD:
@@ -231,6 +239,12 @@
testField=re.sub(r'^(.*)[:]([^:]+)(F16|F32|F64|Q31|Q15|Q7)$',r'\1',row[field])
val = findInTable(conn,"CATEGORY","category",testField,"categoryid")
keys[field]=val
+ if field == "NAME":
+ val = findInTable(conn,"TESTNAME","name",row[field],"testnameid")
+ keys[field]=val
+ if field == "DATE":
+ val = findInTable(conn,"TESTDATE","date",str(row[field]),"testdateid")
+ keys[field]=val
if field == "CORE":
val = findInTable(conn,"CORE","coredef",row[field],"coreid")
keys[field]=val
@@ -271,7 +285,8 @@
return({'compilerid':compilerid,'platformid':platformid,'coreid':coreid})
def addConfig(conn,config,fullDate):
- conn.execute("INSERT INTO CONFIG(compilerid,platformid,coreid,date) VALUES(?,?,?,?)" ,(config['compilerid'],config['platformid'],config['coreid'],fullDate))
+ dateid = findInTable(conn,"TESTDATE","date",str(fullDate),"testdateid")
+ conn.execute("INSERT INTO CONFIG(compilerid,platformid,coreid,testdateid) VALUES(?,?,?,?)" ,(config['compilerid'],config['platformid'],config['coreid'],dateid))
conn.commit()
def getGroup(a):
@@ -307,13 +322,13 @@
parser = argparse.ArgumentParser(description='Generate summary benchmarks')
-parser.add_argument('-f', nargs='?',type = str, default="Output.pickle", help="File path")
+parser.add_argument('-f', nargs='?',type = str, default="Output.pickle", help="Pickle path")
parser.add_argument('-b', nargs='?',type = str, default="FullBenchmark", help="Full Benchmark dir path")
#parser.add_argument('-e', action='store_true', help="Embedded test")
parser.add_argument('-o', nargs='?',type = str, default="reg.db", help="Regression benchmark database")
parser.add_argument('-r', nargs='?',type = int, default=0, help="Run ID")
-parser.add_argument('others', nargs=argparse.REMAINDER)
+parser.add_argument('others', nargs=argparse.REMAINDER, help="Suite class")
args = parser.parse_args()
diff --git a/CMSIS/DSP/Testing/bench.txt b/CMSIS/DSP/Testing/bench.txt
index 2a5c83c..6271e77 100755
--- a/CMSIS/DSP/Testing/bench.txt
+++ b/CMSIS/DSP/Testing/bench.txt
@@ -9,7 +9,7 @@
class = BasicBenchmarks
folder = BasicMaths
- suite BasicMaths Benchmarks F32 {
+ suite Basic Maths Benchmarks F32 {
class = BasicMathsBenchmarksF32
folder = BasicMathsF32
@@ -28,34 +28,18 @@
}
Functions {
- vec_mult_f32:vec_mult_f32 {
- oldID = 0
- }
- vec_add_f32:vec_add_f32 {
- oldID = 5
- }
- vec_sub_f32:vec_sub_f32 {
- oldID = 10
- }
- vec_abs_f32:vec_abs_f32 {
- oldID = 15
- }
- vec_negate_f32:vec_negate_f32 {
- oldID = 20
- }
- vec_offset_f32:vec_offset_f32 {
- oldID = 25
- }
- vec_scale_f32:vec_scale_f32 {
- oldID = 30
- }
- vec_dot_f32:vec_dot_f32 {
- oldID = 38
- }
+ Elementwise multiplication:vec_mult_f32
+ Vector addition:vec_add_f32
+ Vector substraction:vec_sub_f32
+ Elementwise absolute value:vec_abs_f32
+ Elementwise negation:vec_negate_f32
+ Elementwise offset:vec_offset_f32
+ Vector scaling:vec_scale_f32
+ Dot product:vec_dot_f32
} -> PARAM1_ID
}
- suite BasicMaths Benchmarks Q31 {
+ suite Basic Maths Benchmarks Q31 {
class = BasicMathsBenchmarksQ31
folder = BasicMathsQ31
@@ -74,35 +58,18 @@
}
Functions {
- vec_mult_q31:vec_mult_q31 {
- oldID = 2
- }
- vec_add_q31:vec_add_q31 {
- oldID = 7
- }
- vec_sub_q31:vec_sub_q31
- {
- oldID = 12
- }
- vec_abs_q31:vec_abs_q31 {
- oldID = 17
- }
- vec_negate_q31:vec_negate_q31 {
- oldID = 22
- }
- vec_offset_q31:vec_offset_q31 {
- oldID = 27
- }
- vec_scale_q31:vec_scale_q31 {
- oldID = 32
- }
- vec_dot_q31:vec_dot_q31 {
- oldID = 40
- }
+ Elementwise multiplication:vec_mult_q31
+ Vector addition:vec_add_q31
+ Vector substraction:vec_sub_q31
+ Elementwise absolute value:vec_abs_q31
+ Elementwise negation:vec_negate_q31
+ Elementwise offset:vec_offset_q31
+ Vector scaling:vec_scale_q31
+ Dot product:vec_dot_q31
} -> PARAM1_ID
}
- suite BasicMaths Benchmarks Q15 {
+ suite Basic Maths Benchmarks Q15 {
class = BasicMathsBenchmarksQ15
folder = BasicMathsQ15
@@ -121,34 +88,18 @@
}
Functions {
- vec_mult_q15:vec_mult_q15 {
- oldID = 3
- }
- vec_add_q15:vec_add_q15 {
- oldID = 8
- }
- vec_sub_q15:vec_sub_q15 {
- oldID = 13
- }
- vec_abs_q15:vec_abs_q15 {
- oldID = 18
- }
- vec_negate_q15:vec_negate_q15 {
- oldID = 23
- }
- vec_offset_q15:vec_offset_q15 {
- oldID = 28
- }
- vec_scale_q15:vec_scale_q15 {
- oldID = 33
- }
- vec_dot_q15:vec_dot_q15 {
- oldID = 41
- }
+ Elementwise multiplication:vec_mult_q15
+ Vector addition:vec_add_q15
+ Vector substraction:vec_sub_q15
+ Elementwise absolute value:vec_abs_q15
+ Elementwise negation:vec_negate_q15
+ Elementwise offset:vec_offset_q15
+ Vector scaling:vec_scale_q15
+ Dot product:vec_dot_q15
} -> PARAM1_ID
}
- suite BasicMaths Benchmarks Q7 {
+ suite Basic Maths Benchmarks Q7 {
class = BasicMathsBenchmarksQ7
folder = BasicMathsQ7
@@ -167,35 +118,19 @@
}
Functions {
- vec_mult_q7:vec_mult_q7 {
- oldID = 4
- }
- vec_add_q7:vec_add_q7 {
- oldID = 9
- }
- vec_sub_q7:vec_sub_q7 {
- oldID = 14
- }
- vec_abs_q7:vec_abs_q7 {
- oldID = 19
- }
- vec_negate_q7:vec_negate_q7 {
- oldID = 24
- }
- vec_offset_q7:vec_offset_q7 {
- oldID = 29
- }
- vec_scale_q7:vec_scale_q7 {
- oldID = 34
- }
- vec_dot_q7:vec_dot_q7 {
- oldID = 42
- }
+ Elementwise multiplication:vec_mult_q7
+ Vector addition:vec_add_q7
+ Vector substraction:vec_sub_q7
+ Elementwise absolute value:vec_abs_q7
+ Elementwise negation:vec_negate_q7
+ Elementwise offset:vec_offset_q7
+ Vector scaling:vec_scale_q7
+ Dot product:vec_dot_q7
} -> PARAM1_ID
}
}
- group Complex Maths Benchmarks {
+ group Complex Maths {
class = ComplexBenchmarks
folder = ComplexMaths
@@ -219,24 +154,12 @@
}
Functions {
- vec_conj_f32:vec_conj_f32 {
- oldID = 0
- }
- vec_dot_prod_f32:vec_dot_prod_f32 {
- oldID = 4
- }
- vec_mag_f32:vec_mag_f32 {
- oldID = 8
- }
- vec_mag_squared_f32:vec_mag_squared_f32 {
- oldID = 12
- }
- vec_mult_cmplx_f32:vec_mult_cmplx_f32 {
- oldID = 16
- }
- vec_mult_real_f32:vec_mult_real_f32 {
- oldID = 20
- }
+ Elementwise conjugate:vec_conj_f32
+ Complex dot product:vec_dot_prod_f32
+ Elementwise modulus:vec_mag_f32
+ Elementwise modulus squared:vec_mag_squared_f32
+ Elementwise complex multiplication:vec_mult_cmplx_f32
+ Vector scaling by real number:vec_mult_real_f32
} -> PARAM1_ID
}
@@ -260,24 +183,12 @@
}
Functions {
- vec_conj_q31:vec_conj_q31 {
- oldID = 3
- }
- vec_dot_prod_q31:vec_dot_prod_q31 {
- oldID = 7
- }
- vec_mag_q31:vec_mag_q31 {
- oldID = 11
- }
- vec_mag_squared_q31:vec_mag_squared_q31 {
- oldID = 15
- }
- vec_mult_cmplx_q31:vec_mult_cmplx_q31 {
- oldID = 19
- }
- vec_mult_real_q31:vec_mult_real_q31 {
- oldID = 23
- }
+ Elementwise conjugate:vec_conj_q31
+ Complex dot product:vec_dot_prod_q31
+ Elementwise modulus:vec_mag_q31
+ Elementwise modulus squared:vec_mag_squared_q31
+ Elementwise complex multiplication:vec_mult_cmplx_q31
+ Vector scaling by real number:vec_mult_real_q31
} -> PARAM1_ID
}
@@ -301,24 +212,12 @@
}
Functions {
- vec_conj_q15:vec_conj_q15 {
- oldID = 2
- }
- vec_dot_prod_q15:vec_dot_prod_q15 {
- oldID = 6
- }
- vec_mag_q15:vec_mag_q15 {
- oldID = 10
- }
- vec_mag_squared_q15:vec_mag_squared_q15 {
- oldID = 14
- }
- vec_mult_cmplx_q15:vec_mult_cmplx_q15 {
- oldID = 18
- }
- vec_mult_real_q15:vec_mult_real_q15 {
- oldID = 22
- }
+ Elementwise conjugate:vec_conj_q15
+ Complex dot product:vec_dot_prod_q15
+ Elementwise modulus:vec_mag_q15
+ Elementwise modulus squared:vec_mag_squared_q15
+ Elementwise complex multiplication:vec_mult_cmplx_q15
+ Vector scaling by real number:vec_mult_real_q15
} -> PARAM1_ID
}
}
@@ -356,15 +255,9 @@
}
Functions {
- test_fir_f32:test_fir_f32 {
- oldID = 41
- }
- test_lms_f32:test_lms_f32 {
- oldID = 60
- }
- test_lms_norm_f32:test_lms_norm_f32 {
- oldID = 61
- }
+ FIR Filter:test_fir_f32
+ LMS Filter:test_lms_f32
+ Normalized LMS Filter:test_lms_norm_f32
} -> PARAM1_ID
}
@@ -393,15 +286,9 @@
}
Functions {
- test_fir_q31:test_fir_q31 {
- oldID = 51
- }
- test_lms_q31:test_lms_q31 {
- oldID = 65
- }
- test_lms_norm_q31:test_lms_norm_q31 {
- oldID = 62
- }
+ FIR Filter:test_fir_q31
+ LMS Filter:test_lms_q31
+ Normalized LMS Filter:test_lms_norm_q31
} -> PARAM1_ID
}
@@ -430,24 +317,18 @@
}
Functions {
- test_fir_q15:test_fir_q15 {
- oldID = 50
- }
- test_lms_q15:test_lms_q15 {
- oldID = 64
- }
- test_lms_norm_q15:test_lms_norm_q15 {
- oldID = 63
- }
+ FIR Filter:test_fir_q15
+ LMS Filter:test_lms_q15
+ Normalized LMS Filter:test_lms_norm_q15
} -> PARAM1_ID
}
}
- group MISC {
+ group Convolutions / Correlations {
class = MISC
folder = MISC
- suite MISC F32 {
+ suite Convolutions / Correlations F32 {
class = MISCF32
folder = MISCF32
@@ -469,16 +350,12 @@
}
Functions {
- test_conv_f32:test_conv_f32 {
- oldID = 9
- }
- test_correlate_f32:test_correlate_f32 {
- oldID = 27
- }
+ Convolution:test_conv_f32
+ Correlation:test_correlate_f32
} -> PARAM1_ID
}
- suite MISC Q31 {
+ suite Convolutions / Correlations Q31 {
class = MISCQ31
folder = MISCQ31
@@ -500,16 +377,12 @@
}
Functions {
- test_conv_q31:test_conv_q31 {
- oldID = 9
- }
- test_correlate_q31:test_correlate_q31 {
- oldID = 27
- }
+ Convolution:test_conv_q31
+ Correlation:test_correlate_q31
} -> PARAM1_ID
}
- suite MISC Q15 {
+ suite Convolutions / Correlations Q15 {
class = MISCQ15
folder = MISCQ15
@@ -531,16 +404,12 @@
}
Functions {
- test_conv_q15:test_conv_q15 {
- oldID = 9
- }
- test_correlate_q15:test_correlate_q15 {
- oldID = 27
- }
+ Convolution:test_conv_q15
+ Correlation:test_correlate_q15
} -> PARAM1_ID
}
- suite MISC Q7 {
+ suite Convolutions / Correlations Q7 {
class = MISCQ7
folder = MISCQ7
@@ -562,21 +431,17 @@
}
Functions {
- test_conv_q7:test_conv_q7 {
- oldID = 9
- }
- test_correlate_q7:test_correlate_q7 {
- oldID = 27
- }
+ Convolution:test_conv_q7
+ Correlation:test_correlate_q7
} -> PARAM1_ID
}
}
- group DECIM {
+ group Decimations / Interpolations {
class = DECIM
folder = DECIM
- suite DECIM F32 {
+ suite Decimations / Interpolations F32 {
class = DECIMF32
folder = DECIMF32
@@ -597,17 +462,12 @@
Params PARAM_INTERPOL_ID : Params2.txt
Functions {
- test_fir_decimate_f32:test_fir_decimate_f32 {
- oldID = 36
- } -> PARAM_DECIM_ID
-
- test_fir_interpolate_f32:test_fir_interpolate_f32 {
- oldID = 44
- } -> PARAM_INTERPOL_ID
+ Decimation:test_fir_decimate_f32 -> PARAM_DECIM_ID
+ Interpolation:test_fir_interpolate_f32 -> PARAM_INTERPOL_ID
}
}
- suite DECIM Q31 {
+ suite Decimations / Interpolations Q31 {
class = DECIMQ31
folder = DECIMQ31
@@ -628,17 +488,12 @@
Params PARAM_INTERPOL_ID : Params2.txt
Functions {
- test_fir_decimate_q31:test_fir_decimate_q31 {
- oldID = 40
- } -> PARAM_DECIM_ID
-
- test_fir_interpolate_q31:test_fir_interpolate_q31 {
- oldID = 46
- } -> PARAM_INTERPOL_ID
+ Decimation:test_fir_decimate_q31 -> PARAM_DECIM_ID
+ Interpolation:test_fir_interpolate_q31 -> PARAM_INTERPOL_ID
}
}
- suite DECIM Q15 {
+ suite Decimations / Interpolations Q15 {
class = DECIMQ15
folder = DECIMQ15
@@ -659,22 +514,17 @@
Params PARAM_INTERPOL_ID : Params2.txt
Functions {
- test_fir_decimate_q15:test_fir_decimate_q15 {
- oldID = 39
- } -> PARAM_DECIM_ID
-
- test_fir_interpolate_q15:test_fir_interpolate_q15 {
- oldID = 45
- } -> PARAM_INTERPOL_ID
+ Decimation:test_fir_decimate_q15 -> PARAM_DECIM_ID
+ Interpolation:test_fir_interpolate_q15 -> PARAM_INTERPOL_ID
}
}
}
- group BIQUAD {
+ group BiQuads {
class = BIQUAD
folder = BIQUAD
- suite BIQUAD F32 {
+ suite BiQuads F32 {
class = BIQUADF32
folder = BIQUADF32
@@ -698,21 +548,13 @@
}
Functions {
- test_biquad_cascade_df1_f32:test_biquad_cascade_df1_f32 {
- oldID = 1
- }
-
- test_biquad_cascade_df2T_f32:test_biquad_cascade_df2T_f32 {
- oldID = 6
- }
-
- test_biquad_cascade_stereo_df2T_f32:test_biquad_cascade_stereo_df2T_f32 {
- oldID = 8
- }
+ Cascaded BiQuad Filter DF1:test_biquad_cascade_df1_f32
+ Cascaded BiQuad Filter DF2T:test_biquad_cascade_df2T_f32
+ Cascaded BiQuad Filter Stereo DF2T:test_biquad_cascade_stereo_df2T_f32
} -> PARAM1_ID
}
- suite BIQUAD F64 {
+ suite BiQuads F64 {
class = BIQUADF64
folder = BIQUADF64
@@ -735,9 +577,7 @@
}
Functions {
- test_biquad_cascade_df2T_f64:test_biquad_cascade_df2T_f64 {
- oldID = 7
- }
+ Cascaded BiQuad Filter DF2T_f64:test_biquad_cascade_df2T_f64
} -> PARAM1_ID
}
}
@@ -766,14 +606,12 @@
}
Functions {
- test_pid_f32:test_pid_f32 {
- oldID = 0
- }
- test_clarke_f32:test_clarke_f32
- test_inv_clarke_f32:test_inv_clarke_f32
- test_park_f32:test_park_f32
- test_inv_park_f32:test_inv_park_f32
- test_sin_cos_f32:test_sin_cos_f32
+ PID:test_pid_f32
+ Clarke Transform:test_clarke_f32
+ Inverse Clarke Transform:test_inv_clarke_f32
+ Park Transform:test_park_f32
+ Inverse Park Transform:test_inv_park_f32
+ Sin Cos:test_sin_cos_f32
} -> PARAM1_ID
}
@@ -796,12 +634,12 @@
}
Functions {
- test_pid_q31:test_pid_q31
- test_clarke_q31:test_clarke_q31
- test_inv_clarke_q31:test_inv_clarke_q31
- test_park_q31:test_park_q31
- test_inv_park_q31:test_inv_park_q31
- test_sin_cos_q31:test_sin_cos_q31
+ PID:test_pid_q31
+ Clarke Transform:test_clarke_q31
+ Inverse Clarke Transform:test_inv_clarke_q31
+ Park Transform:test_park_q31
+ Inverse Park Transform:test_inv_park_q31
+ Sin Cos:test_sin_cos_q31
} -> PARAM1_ID
}
@@ -824,16 +662,16 @@
}
Functions {
- test_pid_q15:test_pid_q15
+ PID:test_pid_q15
} -> PARAM1_ID
}
}
- group FastMath {
+ group Fast Maths {
class = FastMath
folder = FastMath
- suite FastMath F32 {
+ suite Fast Maths F32 {
class = FastMathF32
folder = FastMathF32
@@ -852,13 +690,13 @@
}
Functions {
- test_cos_f32:test_cos_f32
- test_sin_f32:test_sin_f32
- test_sqrt_f32:test_sqrt_f32
+ Cosine:test_cos_f32
+ Sine:test_sin_f32
+ Square Root:test_sqrt_f32
} -> PARAM1_ID
}
- suite FastMath Q31 {
+ suite Fast Maths Q31 {
class = FastMathQ31
folder = FastMathQ31
@@ -877,13 +715,13 @@
}
Functions {
- test_cos_q31:test_cos_q31
- test_sin_q31:test_sin_q31
- test_sqrt_q31:test_sqrt_q31
+ Cosine:test_cos_q31
+ Sine:test_sin_q31
+ Square Root:test_sqrt_q31
} -> PARAM1_ID
}
- suite FastMath Q15 {
+ suite Fast Maths Q15 {
class = FastMathQ15
folder = FastMathQ15
@@ -902,14 +740,14 @@
}
Functions {
- test_cos_q15:test_cos_q15
- test_sin_q15:test_sin_q15
- test_sqrt_q15:test_sqrt_q15
+ Cosine:test_cos_q15
+ Sine:test_sin_q15
+ Square Root:test_sqrt_q15
} -> PARAM1_ID
}
}
- suite SupportBar F32 {
+ suite Barycenter {
class = SupportBarF32
folder = SupportBarF32
@@ -930,16 +768,16 @@
}
Functions {
- test_barycenter_f32:test_barycenter_f32
+ Barycenter:test_barycenter_f32
} -> PARAM1_ID
}
- group Support {
+ group Support Functions {
class = Support
folder = Support
- suite Support F32 {
+ suite Support Functions F32 {
class = SupportF32
folder = SupportF32
@@ -975,7 +813,7 @@
} -> PARAM1_ID
}
- suite Support Q31 {
+ suite Support Functions Q31 {
class = SupportQ31
folder = SupportQ31
@@ -996,14 +834,14 @@
}
Functions {
- test_copy_q31:test_copy_q31
- test_fill_q31:test_fill_q31
- test_q15_to_q31:test_q15_to_q31
- test_q7_to_q31:test_q7_to_q31
+ Vector Copy:test_copy_q31
+ Vector Filling:test_fill_q31
+ Elementwise q15 to q31:test_q15_to_q31
+ Elementwise q7 to q31:test_q7_to_q31
} -> PARAM1_ID
}
- suite Support Q15 {
+ suite Support Functions Q15 {
class = SupportQ15
folder = SupportQ15
@@ -1024,14 +862,14 @@
}
Functions {
- test_copy_q15:test_copy_q15
- test_fill_q15:test_fill_q15
- test_q31_to_q15:test_q31_to_q15
- test_q7_to_q15:test_q7_to_q15
+ Vector Copy:test_copy_q15
+ Vector Filling:test_fill_q15
+ Elementwise q31 to q15:test_q31_to_q15
+ Elementwise q7 to q15:test_q7_to_q15
} -> PARAM1_ID
}
- suite Support Q7 {
+ suite Support Functions Q7 {
class = SupportQ7
folder = SupportQ7
@@ -1052,10 +890,10 @@
}
Functions {
- test_copy_q7:test_copy_q7
- test_fill_q7:test_fill_q7
- test_q31_to_q7:test_q31_to_q7
- test_q15_to_q7:test_q15_to_q7
+ Vector Copy:test_copy_q7
+ Vector Filling:test_fill_q7
+ Elementwise q31 to q7:test_q31_to_q7
+ Elementwise q15 to q7:test_q15_to_q7
} -> PARAM1_ID
}
}
@@ -1064,11 +902,11 @@
class = Matrix
folder = Matrix
- group Unary {
+ group Matrix Operators with one matrix size {
class = Unary
folder = Unary
- suite Unary F32 {
+ suite Unary Matrix Operators F32 {
class = UnaryF32
folder = UnaryF32
@@ -1088,15 +926,15 @@
}
Functions {
- test_mat_scale_f32:test_mat_scale_f32
- test_mat_inverse_f32:test_mat_inverse_f32
- test_mat_trans_f32:test_mat_trans_f32
- test_mat_add_f32:test_mat_add_f32
- test_mat_sub_f32:test_mat_sub_f32
+ Matrix Scaling:test_mat_scale_f32
+ Matrix Inversion:test_mat_inverse_f32
+ Matrix Transpose:test_mat_trans_f32
+ Matrix Addition:test_mat_add_f32
+ Matrix Substraction:test_mat_sub_f32
} -> PARAM1_ID
}
- suite Unary Q31 {
+ suite Unary Matrix Operators Q31 {
class = UnaryQ31
folder = UnaryQ31
@@ -1116,14 +954,14 @@
}
Functions {
- test_mat_scale_q31:test_mat_scale_q31
- test_mat_trans_q31:test_mat_trans_q31
- test_mat_add_q31:test_mat_add_q31
- test_mat_sub_q31:test_mat_sub_q31
+ Matrix Scaling:test_mat_scale_q31
+ Matrix Transpose:test_mat_trans_q31
+ Matrix Addition:test_mat_add_q31
+ Matrix Substraction:test_mat_sub_q31
} -> PARAM1_ID
}
- suite Unary Q15 {
+ suite Unary Matrix Operators Q15 {
class = UnaryQ15
folder = UnaryQ15
@@ -1143,14 +981,14 @@
}
Functions {
- test_mat_scale_q15:test_mat_scale_q15
- test_mat_trans_q15:test_mat_trans_q15
- test_mat_add_q15:test_mat_add_q15
- test_mat_sub_q15:test_mat_sub_q15
+ Matrix Scaling:test_mat_scale_q15
+ Matrix Transpose:test_mat_trans_q15
+ Matrix Addition:test_mat_add_q15
+ Matrix Substraction:test_mat_sub_q15
} -> PARAM1_ID
}
- suite Unary F64 {
+ suite Unary Matrix Operators F64 {
class = UnaryF64
folder = UnaryF64
@@ -1170,16 +1008,16 @@
}
Functions {
- test_mat_inverse_f64:test_mat_inverse_f64
+ Matrix Inversion:test_mat_inverse_f64
} -> PARAM1_ID
}
}
- group Binary {
+ group Matrix Operators with two matrix sizes {
class = Binary
folder = Binary
- suite Binary F32 {
+ suite Binary Matrix Operators F32 {
class = BinaryF32
folder = BinaryF32
@@ -1203,12 +1041,12 @@
}
Functions {
- test_mat_mult_f32:test_mat_mult_f32
- test_mat_cmplx_mult_f32:test_mat_cmplx_mult_f32
+ Matrix Multiplication:test_mat_mult_f32
+ Complex Matrix Multiplication:test_mat_cmplx_mult_f32
} -> PARAM1_ID
}
- suite Binary Q31 {
+ suite Binary Matrix Operators Q31 {
class = BinaryQ31
folder = BinaryQ31
@@ -1232,13 +1070,13 @@
}
Functions {
- test_mat_mult_q31:test_mat_mult_q31
- test_mat_cmplx_mult_q31:test_mat_cmplx_mult_q31
- test_mat_mult_fast_q31:test_mat_mult_fast_q31
+ Matrix Multiplication:test_mat_mult_q31
+ Complex Matrix Multiplication:test_mat_cmplx_mult_q31
+ Fast Matrix Multiplication:test_mat_mult_fast_q31
} -> PARAM1_ID
}
- suite Binary Q15 {
+ suite Binary Matrix Operators Q15 {
class = BinaryQ15
folder = BinaryQ15
@@ -1262,19 +1100,19 @@
}
Functions {
- test_mat_mult_q15:test_mat_mult_q15
- test_mat_cmplx_mult_q15:test_mat_cmplx_mult_q15
- test_mat_mult_fast_q15:test_mat_mult_fast_q15
+ Matrix Multiplication:test_mat_mult_q15
+ Complex Matrix Multiplication:test_mat_cmplx_mult_q15
+ Fast Matrix Multiplication:test_mat_mult_fast_q15
} -> PARAM1_ID
}
}
}
- group Transform {
+ group Spectral Transformations {
class = Transform
folder = Transform
- suite Transform F32 {
+ suite Spectral Transformations F32 {
class = TransformF32
folder = TransformF32
@@ -1316,15 +1154,15 @@
}
Functions {
- test_cfft_f32:test_cfft_f32 -> CFFT_PARAM_ID
- test_rfft_f32:test_rfft_f32 -> RFFT_PARAM_ID
- test_dct4_f32:test_dct4_f32 -> DCT_PARAM_ID
- test_cfft_radix4_f32:test_cfft_radix4_f32 -> CFFT4_PARAM_ID
- test_cfft_radix2_f32:test_cfft_radix2_f32 -> CFFT_PARAM_ID
+ Complex FFT:test_cfft_f32 -> CFFT_PARAM_ID
+ Real FFT:test_rfft_f32 -> RFFT_PARAM_ID
+ DCT4:test_dct4_f32 -> DCT_PARAM_ID
+ Radix 4 Complex FFT:test_cfft_radix4_f32 -> CFFT4_PARAM_ID
+ Radix 2 Complex FFT:test_cfft_radix2_f32 -> CFFT_PARAM_ID
}
}
- suite Transform Q31 {
+ suite Spectral Transformations Q31 {
class = TransformQ31
folder = TransformQ31
@@ -1366,15 +1204,15 @@
}
Functions {
- test_cfft_q31:test_cfft_q31 -> CFFT_PARAM_ID
- test_rfft_q31:test_rfft_q31 -> RFFT_PARAM_ID
- test_dct4_q31:test_dct4_q31 -> DCT_PARAM_ID
- test_cfft_radix4_q31:test_cfft_radix4_q31 -> CFFT4_PARAM_ID
- test_cfft_radix2_q31:test_cfft_radix2_q31 -> CFFT_PARAM_ID
+ Complex FFT:test_cfft_q31 -> CFFT_PARAM_ID
+ Real FFT:test_rfft_q31 -> RFFT_PARAM_ID
+ DCT4:test_dct4_q31 -> DCT_PARAM_ID
+ Radix 4 Complex FFT:test_cfft_radix4_q31 -> CFFT4_PARAM_ID
+ Radix 2 Complex FFT:test_cfft_radix2_q31 -> CFFT_PARAM_ID
}
}
- suite Transform Q15 {
+ suite Spectral Transformations Q15 {
class = TransformQ15
folder = TransformQ15
@@ -1416,11 +1254,11 @@
}
Functions {
- test_cfft_q15:test_cfft_q15 -> CFFT_PARAM_ID
- test_rfft_q15:test_rfft_q15 -> RFFT_PARAM_ID
- test_dct4_q15:test_dct4_q15 -> DCT_PARAM_ID
- test_cfft_radix4_q15:test_cfft_radix4_q15 -> CFFT4_PARAM_ID
- test_cfft_radix2_q15:test_cfft_radix2_q15 -> CFFT_PARAM_ID
+ Complex FFT:test_cfft_q15 -> CFFT_PARAM_ID
+ Real FFT:test_rfft_q15 -> RFFT_PARAM_ID
+ DCT4:test_dct4_q15 -> DCT_PARAM_ID
+ Radix 4 Complex FFT:test_cfft_radix4_q15 -> CFFT4_PARAM_ID
+ Radix 2 Complex FFT:test_cfft_radix2_q15 -> CFFT_PARAM_ID
}
}
}
diff --git a/CMSIS/DSP/Testing/createDb.sql b/CMSIS/DSP/Testing/createDb.sql
index 9285be6..293a0c7 100755
--- a/CMSIS/DSP/Testing/createDb.sql
+++ b/CMSIS/DSP/Testing/createDb.sql
@@ -21,13 +21,14 @@
compilerid INTEGER PRIMARY KEY,
compilerkindid INTEGER ,
version text,
- date text,
- FOREIGN KEY(compilerkindid) REFERENCES COMPILERKIND(compilerkindid)
+ testdateid INTEGER,
+ FOREIGN KEY(compilerkindid) REFERENCES COMPILERKIND(compilerkindid),
+ FOREIGN KEY(testdateid) REFERENCES TESTDATE(testdateid)
);
CREATE INDEX compiler_index ON COMPILER(compilerkindid,version);
-CREATE INDEX compiler_date_index ON COMPILER(date);
-CREATE INDEX compiler_all_index ON COMPILER(compilerkindid,version,date);
+CREATE INDEX compiler_date_index ON COMPILER(testdateid);
+CREATE INDEX compiler_all_index ON COMPILER(compilerkindid,version,testdateid);
CREATE TABLE RUN (
runid INTEGER PRIMARY KEY,
@@ -46,15 +47,28 @@
CREATE INDEX category_index ON CATEGORY(category);
+CREATE TABLE TESTNAME (
+ testnameid INTEGER PRIMARY KEY,
+ name text);
+
+CREATE INDEX testname_index ON TESTNAME(name);
+
+CREATE TABLE TESTDATE (
+ testdateid INTEGER PRIMARY KEY,
+ date text);
+
+CREATE INDEX testdate_index ON TESTDATE(date);
+
CREATE TABLE CONFIG (
configid INTEGER PRIMARY KEY,
compilerid INTEGER,
platformid INTEGER,
coreid INTEGER,
- date text,
+ testdateid INTEGER,
FOREIGN KEY(compilerid) REFERENCES COMPILER(compilerid),
FOREIGN KEY(platformid) REFERENCES PLATFORM(platformid),
FOREIGN KEY(coreid) REFERENCES CORE(coreid)
+ FOREIGN KEY(testdateid) REFERENCES TESTDATE(testdateid)
);
INSERT INTO TYPE VALUES(1, "q7");
diff --git a/CMSIS/DSP/Testing/extractDb.py b/CMSIS/DSP/Testing/extractDb.py
index f9c91a4..a73975a 100755
--- a/CMSIS/DSP/Testing/extractDb.py
+++ b/CMSIS/DSP/Testing/extractDb.py
@@ -5,18 +5,6 @@
import numpy as np
remapNames={
- "BasicMathsBenchmarks": "Basic Maths",
- "ComplexMathsBenchmarks": "Complex Maths",
- "FIR": "FIR",
- "MISC": "Convolutions / Correlations",
- "DECIM": "Decimations / Interpolations",
- "BIQUAD": "BiQuad",
- "FastMath": "Fast Maths",
- "SupportBar": "Barycenter",
- "Support": "Support Functions",
- "Unary": "Matrix Unary Operations",
- "Binary": "Matrix Binary Operations",
- "Transform": "Vector Transform"
}
def convertSectionName(s):
@@ -160,10 +148,11 @@
position: fixed;
left: 0;
top: 0;
- width: 250px;
+ width: 280px;
height: 100%;
overflow:auto;
margin-top:5px;
+ margin-bottom:10px;
}
html {
@@ -180,7 +169,7 @@
body {
margin: auto;
margin-top:0px;
- margin-left:250px;
+ margin-left:280px;
}
@@ -341,10 +330,11 @@
class HTML:
- def __init__(self,output):
+ def __init__(self,output,regMode):
self._id=0
self._sectionID = 0
self._output = output
+ self._regMode = regMode
@@ -382,7 +372,10 @@
<head>
<meta charset='UTF-8'><meta name='viewport' content='width=device-width initial-scale=1'>
<title>Benchmarks</title>%s</head><body>\n""" % styleSheet)
- self._output.write("<h1>ECPS Benchmark Summary</h1>\n")
+ if self._regMode:
+ self._output.write("<h1>ECPS Benchmark Regressions</h1>\n")
+ else:
+ self._output.write("<h1>ECPS Benchmark Summary</h1>\n")
self._output.write("<p>Run number %d on %s</p>\n" % (document.runid, str(document.date)))
def leaveDocument(self,document):
@@ -420,7 +413,7 @@
parser.add_argument('-t', nargs='?',type = str, default="md", help="md,html")
# For runid or runid range
-parser.add_argument('others', nargs=argparse.REMAINDER)
+parser.add_argument('others', nargs=argparse.REMAINDER,help="Run ID")
args = parser.parse_args()
@@ -433,14 +426,14 @@
# We extract data only from data tables
# Those tables below are used for descriptions
-REMOVETABLES=['RUN','CORE', 'PLATFORM', 'COMPILERKIND', 'COMPILER', 'TYPE', 'CATEGORY', 'CONFIG']
+REMOVETABLES=['TESTNAME','TESTDATE','RUN','CORE', 'PLATFORM', 'COMPILERKIND', 'COMPILER', 'TYPE', 'CATEGORY', 'CONFIG']
# This is assuming the database is generated by the regression script
# So platform is the same for all benchmarks.
# Category and type is coming from the test name in the yaml
# So no need to add this information here
# Name is removed here because it is added at the beginning
-REMOVECOLUMNS=['runid','NAME','type','platform','category','coredef','OPTIMIZED','HARDFP','FASTMATH','NEON','HELIUM','UNROLL','ROUNDING','DATE','compilerkindid','date','categoryid', 'ID', 'platformid', 'coreid', 'compilerid', 'typeid']
+REMOVECOLUMNS=['runid','name','type','platform','category','coredef','OPTIMIZED','HARDFP','FASTMATH','NEON','HELIUM','UNROLL','ROUNDING','DATE','compilerkindid','date','categoryid', 'ID', 'platformid', 'coreid', 'compilerid', 'typeid']
# Get existing benchmark tables
def getBenchTables():
@@ -458,14 +451,19 @@
return(result)
# Get compilers from specific type and table
-versioncompiler="""select distinct compiler,version from %s
- INNER JOIN COMPILER USING(compilerid)
- INNER JOIN COMPILERKIND USING(compilerkindid) WHERE typeid=?"""
+allCompilers="""select distinct compilerid from %s WHERE typeid=?"""
+
+compilerDesc="""select compiler,version from COMPILER
+ INNER JOIN COMPILERKIND USING(compilerkindid) WHERE compilerid=?"""
# Get existing compiler in a table for a specific type
# (In case report is structured by types)
def getExistingCompiler(benchTable,typeid):
- r=c.execute(versioncompiler % benchTable,(typeid,)).fetchall()
+ r=c.execute(allCompilers % benchTable,(typeid,)).fetchall()
+ return([x[0] for x in r])
+
+def getCompilerDesc(compilerid):
+ r=c.execute(compilerDesc,(compilerid,)).fetchone()
return(r)
# Get type name from type id
@@ -488,17 +486,19 @@
INNER JOIN COMPILER USING(compilerid)
INNER JOIN COMPILERKIND USING(compilerkindid)
INNER JOIN TYPE USING(typeid)
- WHERE compiler=? AND VERSION=? AND typeid = ? AND runid = ?
+ INNER JOIN TESTNAME USING(testnameid)
+ WHERE compilerid=? AND typeid = ? AND runid = ?
"""
# Command to get test names for specific compiler
# and type
-benchNames="""select distinct NAME from %s
+benchNames="""select distinct name from %s
INNER JOIN COMPILER USING(compilerid)
INNER JOIN COMPILERKIND USING(compilerkindid)
INNER JOIN TYPE USING(typeid)
- WHERE compiler=? AND VERSION=? AND typeid = ? AND runid = ?
+ INNER JOIN TESTNAME USING(testnameid)
+ WHERE compilerid=? AND typeid = ? AND runid = ?
"""
# Command to get columns for specific table
@@ -508,6 +508,7 @@
INNER JOIN CORE USING(coreid)
INNER JOIN COMPILER USING(compilerid)
INNER JOIN COMPILERKIND USING(compilerkindid)
+ INNER JOIN TESTNAME USING(testnameid)
INNER JOIN TYPE USING(typeid)
"""
@@ -529,45 +530,36 @@
# Get test names
# for specific typeid and compiler (for the data)
def getTestNames(benchTable,comp,typeid):
- vals=(comp[0],comp[1],typeid,runid)
+ vals=(comp,typeid,runid)
result=c.execute(benchNames % benchTable,vals).fetchall()
return([x[0] for x in list(result)])
# Command to get data for specific compiler
# and type
nbElemsInBenchAndTypeAndCompilerCmd="""select count(*) from %s
- INNER JOIN CATEGORY USING(categoryid)
- INNER JOIN PLATFORM USING(platformid)
- INNER JOIN CORE USING(coreid)
- INNER JOIN COMPILER USING(compilerid)
- INNER JOIN COMPILERKIND USING(compilerkindid)
- INNER JOIN TYPE USING(typeid)
- WHERE compiler=? AND VERSION=? AND typeid = ? AND runid = ?
+ WHERE compilerid=? AND typeid = ? AND runid = ?
"""
nbElemsInBenchAndTypeCmd="""select count(*) from %s
- INNER JOIN CATEGORY USING(categoryid)
- INNER JOIN PLATFORM USING(platformid)
- INNER JOIN CORE USING(coreid)
- INNER JOIN COMPILER USING(compilerid)
- INNER JOIN COMPILERKIND USING(compilerkindid)
- INNER JOIN TYPE USING(typeid)
WHERE typeid = ? AND runid = ?
"""
nbElemsInBenchCmd="""select count(*) from %s
- INNER JOIN CATEGORY USING(categoryid)
- INNER JOIN PLATFORM USING(platformid)
- INNER JOIN CORE USING(coreid)
- INNER JOIN COMPILER USING(compilerid)
- INNER JOIN COMPILERKIND USING(compilerkindid)
- INNER JOIN TYPE USING(typeid)
WHERE runid = ?
"""
+categoryName="""select distinct category from %s
+ INNER JOIN CATEGORY USING(categoryid)
+ WHERE runid = ?
+ """
+
+def getCategoryName(benchTable,runid):
+ result=c.execute(categoryName % benchTable,(runid,)).fetchone()
+ return(result[0])
+
# Get nb elems in a table
def getNbElemsInBenchAndTypeAndCompilerCmd(benchTable,comp,typeid):
- vals=(comp[0],comp[1],typeid,runid)
+ vals=(comp,typeid,runid)
result=c.execute(nbElemsInBenchAndTypeAndCompilerCmd % benchTable,vals).fetchone()
return(result[0])
@@ -587,9 +579,9 @@
cursor=c.cursor()
result=cursor.execute(benchCmdColumns % (benchTable))
cols= [member[0] for member in cursor.description]
- keepCols = ['NAME'] + [c for c in diff(cols , REMOVECOLUMNS) if isNotIDColumn(c)]
+ keepCols = ['name'] + [c for c in diff(cols , REMOVECOLUMNS) if isNotIDColumn(c)]
keepColsStr = "".join(joinit(keepCols,","))
- vals=(comp[0],comp[1],typeid,runid)
+ vals=(comp,typeid,runid)
result=cursor.execute(benchCmd % (keepColsStr,benchTable),vals)
vals =np.array([list(x) for x in list(result)])
return(keepCols,vals)
@@ -605,7 +597,7 @@
data=data.sort_values(toSort)
cores = [c[1] for c in list(data.columns)]
- columns = diff(indexCols,['NAME'])
+ columns = diff(indexCols,['name'])
dataTable=Table(columns,cores)
section.addTable(dataTable)
@@ -625,7 +617,7 @@
def formatTableByCore(typeSection,testNames,cols,vals):
if vals.size != 0:
ref=pd.DataFrame(vals,columns=cols)
- toSort=["NAME"]
+ toSort=["name"]
for param in PARAMS:
if param in ref.columns:
@@ -670,7 +662,7 @@
data=data.sort_values(toSort)
cores = [c[1] for c in list(data.columns)]
- columns = diff(indexCols,['NAME'])
+ columns = diff(indexCols,['name'])
testSection = Section(name)
typeSection.addSection(testSection)
@@ -694,7 +686,8 @@
def addReportFor(document,benchName):
nbElems = getNbElemsInBenchCmd(benchName)
if nbElems > 0:
- benchSection = Section(benchName)
+ categoryName = getCategoryName(benchName,document.runid)
+ benchSection = Section(categoryName)
document.addSection(benchSection)
print("Process %s\n" % benchName)
allTypes = getExistingTypes(benchName)
@@ -712,7 +705,8 @@
nbElems = getNbElemsInBenchAndTypeAndCompilerCmd(benchName,compiler,aTypeID)
# Print test results for table, type, compiler
if nbElems > 0:
- compilerSection = Section("%s (%s)" % compiler)
+ compilerName,version=getCompilerDesc(compiler)
+ compilerSection = Section("%s (%s)" % (compilerName,version))
typeSection.addSection(compilerSection)
cols,vals=getColNamesAndData(benchName,compiler,aTypeID)
names=getTestNames(benchName,compiler,aTypeID)
@@ -732,7 +726,7 @@
if args.t=="md":
document.accept(Markdown(output))
if args.t=="html":
- document.accept(HTML(output))
+ document.accept(HTML(output,args.r))
finally:
c.close()
diff --git a/CMSIS/DSP/Testing/processResult.py b/CMSIS/DSP/Testing/processResult.py
index a914cfb..327ad4f 100644
--- a/CMSIS/DSP/Testing/processResult.py
+++ b/CMSIS/DSP/Testing/processResult.py
@@ -95,13 +95,14 @@
def printTest(self,elem, theId, theError,errorDetail,theLine,passed,cycles,params):
message=elem.data["message"]
+ func=elem.data["class"]
if not elem.data["deprecated"]:
kind = "Test"
ident = " " * elem.ident
p=Fore.RED + "FAILED" + Style.RESET_ALL
if passed == 1:
p= Fore.GREEN + "PASSED" + Style.RESET_ALL
- print("%s%s %s(%d)%s : %s (cycles = %d)" % (ident,message,Style.BRIGHT,theId,Style.RESET_ALL,p,cycles))
+ print("%s%s %s(%s - %d)%s : %s (cycles = %d)" % (ident,message,Style.BRIGHT,func,theId,Style.RESET_ALL,p,cycles))
if params:
print("%s %s" % (ident,params))
if passed != 1:
@@ -157,7 +158,7 @@
if passed == 1:
p= "<font color=\"green\">PASSED</font>"
print("<tr>")
- print("<td><pre>%s</pre></td>" % message)
+ print("<td><pre>%s</pre></td>" % (message,))
print("<td>%d</td>" % theId)
print("<td>%s</td>" % p)
if params:
@@ -324,13 +325,15 @@
def writeBenchmark(elem,benchFile,theId,theError,passed,cycles,params,config):
if benchFile:
- name=elem.data["class"]
- category= elem.categoryDesc()
+ testname=elem.data["class"]
+ #category= elem.categoryDesc()
+ name=elem.data["message"]
+ category=elem.getSuiteMessage()
old=""
if "testData" in elem.data:
if "oldID" in elem.data["testData"]:
old=elem.data["testData"]["oldID"]
- benchFile.write("\"%s\",\"%s\",%d,\"%s\",%s,%d,%s\n" % (category,name,theId,old,params,cycles,config))
+ benchFile.write("\"%s\",\"%s\",\"%s\",%d,\"%s\",%s,%d,%s\n" % (category,testname,name,theId,old,params,cycles,config))
def getCyclesFromTrace(trace):
if not trace:
@@ -418,7 +421,7 @@
#print(configList)
config = "".join(list(joinit(configList[0],",")))
configHeaders = "".join(list(joinit(csvheaders,",")))
- benchFile.write("CATEGORY,NAME,ID,OLDID,%s,CYCLES,%s\n" % (header,configHeaders))
+ benchFile.write("CATEGORY,TESTNAME,NAME,ID,OLDID,%s,CYCLES,%s\n" % (header,configHeaders))
formatter.printGroup(elem,theId)
diff --git a/CMSIS/DSP/Testing/summaryBench.py b/CMSIS/DSP/Testing/summaryBench.py
index 341fc31..889170e 100644
--- a/CMSIS/DSP/Testing/summaryBench.py
+++ b/CMSIS/DSP/Testing/summaryBench.py
@@ -84,7 +84,7 @@
#print(results.summary())
return(pd.Series({'Regression':"%s" % f,'MAX' : m,'MAXREGCOEF' : results.params.values[-1]}))
- regList = ['ID','OLDID','CATEGORY','NAME'] + csvheaders + groupList
+ regList = ['ID','OLDID','CATEGORY','TESTNAME','NAME'] + csvheaders + groupList
regression=full.groupby(regList).apply(reg)
regression.reset_index(level=regression.index.names, inplace=True)