Christophe Favergeon | 37b8622 | 2019-07-17 11:49:00 +0200 | [diff] [blame] | 1 | # Process the test results |
| 2 | # Test status (like passed, or failed with error code) |
| 3 | |
| 4 | import argparse |
| 5 | import re |
| 6 | import TestScripts.NewParser as parse |
| 7 | import TestScripts.CodeGen |
| 8 | from collections import deque |
| 9 | import os.path |
| 10 | import numpy as np |
| 11 | import pandas as pd |
| 12 | import statsmodels.api as sm |
| 13 | import statsmodels.formula.api as smf |
| 14 | import csv |
| 15 | import TestScripts.Deprecate as d |
| 16 | import sqlite3 |
| 17 | import datetime, time |
| 18 | import re |
| 19 | |
| 20 | # For table creation |
| 21 | MKSTRFIELD=['NAME'] |
| 22 | MKBOOLFIELD=['HARDFP', 'FASTMATH', 'NEON', 'UNROLL', 'ROUNDING','OPTIMIZED'] |
| 23 | MKINTFIELD=['ID', 'CYCLES'] |
| 24 | MKDATEFIELD=['DATE'] |
| 25 | MKKEYFIELD=['CATEGORY', 'PLATFORM', 'CORE', 'COMPILER','TYPE'] |
| 26 | MKKEYFIELDID={'CATEGORY':'categoryid', |
| 27 | 'PLATFORM':'platformid', |
| 28 | 'CORE':'coreid', |
| 29 | 'COMPILER':'compilerid', |
| 30 | 'TYPE':'typeid'} |
| 31 | |
| 32 | # For table value extraction |
| 33 | VALSTRFIELD=['NAME','VERSION'] |
| 34 | VALBOOLFIELD=['HARDFP', 'FASTMATH', 'NEON', 'UNROLL', 'ROUNDING','OPTIMIZED'] |
| 35 | VALINTFIELD=['ID', 'CYCLES'] |
| 36 | VALDATEFIELD=['DATE'] |
| 37 | VALKEYFIELD=['CATEGORY', 'PLATFORM', 'CORE', 'COMPILER','TYPE'] |
| 38 | |
| 39 | def joinit(iterable, delimiter): |
| 40 | it = iter(iterable) |
| 41 | yield next(it) |
| 42 | for x in it: |
| 43 | yield delimiter |
| 44 | yield x |
| 45 | |
| 46 | def tableExists(c,tableName): |
| 47 | req=(tableName,) |
| 48 | r=c.execute("SELECT name FROM sqlite_master WHERE type='table' AND name=?",req) |
| 49 | return(r.fetchone() != None) |
| 50 | |
| 51 | def diff(first, second): |
| 52 | second = set(second) |
| 53 | return [item for item in first if item not in second] |
| 54 | |
| 55 | def getColumns(elem,full): |
| 56 | colsToKeep=[] |
| 57 | cols = list(full.columns) |
| 58 | params = list(joinit(elem.params.full,",")) |
| 59 | common = diff(cols + ["TYPE"] , ['OLDID'] + params) |
| 60 | |
| 61 | for field in common: |
| 62 | if field in MKSTRFIELD: |
| 63 | colsToKeep.append(field) |
| 64 | if field in MKINTFIELD: |
| 65 | colsToKeep.append(field) |
| 66 | if field in MKKEYFIELD: |
| 67 | colsToKeep.append(field) |
| 68 | if field in MKDATEFIELD: |
| 69 | colsToKeep.append(field) |
| 70 | if field in MKBOOLFIELD: |
| 71 | colsToKeep.append(field) |
| 72 | return(colsToKeep) |
| 73 | |
| 74 | def createTableIfMissing(conn,elem,tableName,full): |
| 75 | if not tableExists(conn,tableName): |
| 76 | sql = "CREATE TABLE %s (" % tableName |
| 77 | cols = list(full.columns) |
| 78 | params = list(joinit(elem.params.full,",")) |
| 79 | common = diff(cols + ["TYPE"] , ['OLDID'] + params) |
Christophe Favergeon | 058b63b | 2019-07-25 10:40:32 +0200 | [diff] [blame] | 80 | |
| 81 | sql += "%sid INTEGER PRIMARY KEY" % (tableName) |
| 82 | start = "," |
Christophe Favergeon | 37b8622 | 2019-07-17 11:49:00 +0200 | [diff] [blame] | 83 | |
| 84 | for field in params: |
| 85 | sql += " %s\n %s INTEGER" % (start,field) |
| 86 | start = "," |
| 87 | |
| 88 | for field in common: |
| 89 | if field in MKSTRFIELD: |
| 90 | sql += "%s\n %s TEXT" % (start,field) |
| 91 | if field in MKINTFIELD: |
| 92 | sql += "%s\n %s INTEGER" % (start,field) |
| 93 | if field in MKKEYFIELD: |
| 94 | sql += "%s\n %s INTEGER" % (start,MKKEYFIELDID[field]) |
| 95 | if field in MKDATEFIELD: |
| 96 | sql += "%s\n %s TEXT" % (start,field) |
| 97 | if field in MKBOOLFIELD: |
| 98 | sql += "%s\n %s INTEGER" % (start,field) |
| 99 | start = "," |
| 100 | # Create foreign keys |
| 101 | sql += "%sFOREIGN KEY(typeid) REFERENCES TYPE(typeid)," % start |
| 102 | sql += "FOREIGN KEY(categoryid) REFERENCES CATEGORY(categoryid)," |
| 103 | sql += "FOREIGN KEY(platformid) REFERENCES PLATFORM(platformid)," |
| 104 | sql += "FOREIGN KEY(coreid) REFERENCES CORE(coreid)," |
| 105 | sql += "FOREIGN KEY(compilerid) REFERENCES COMPILER(compilerid)" |
| 106 | sql += " )" |
| 107 | #print(sql) |
| 108 | conn.execute(sql) |
| 109 | |
| 110 | # Find the key or add it in a table |
| 111 | def findInTable(conn,table,keystr,strv,key): |
| 112 | #print(sql) |
| 113 | r = conn.execute("select %s from %s where %s=?" % (key,table,keystr),(strv,)) |
| 114 | result=r.fetchone() |
| 115 | if result != None: |
| 116 | return(result[0]) |
| 117 | else: |
| 118 | conn.execute("INSERT INTO %s(%s) VALUES(?)" % (table,keystr),(strv,)) |
| 119 | conn.commit() |
| 120 | r = conn.execute("select %s from %s where %s=?" % (key,table,keystr),(strv,)) |
| 121 | result=r.fetchone() |
| 122 | if result != None: |
| 123 | #print(result) |
| 124 | return(result[0]) |
| 125 | else: |
| 126 | return(None) |
| 127 | |
| 128 | def findInCompilerTable(conn,kind,version): |
| 129 | #print(sql) |
| 130 | r = conn.execute("select compilerid from COMPILER where compilerkindid=? AND version=?" , (kind,version)) |
| 131 | result=r.fetchone() |
| 132 | if result != None: |
| 133 | return(result[0]) |
| 134 | else: |
| 135 | conn.execute("INSERT INTO COMPILER(compilerkindid,version) VALUES(?,?)" ,(kind,version)) |
| 136 | conn.commit() |
| 137 | r = conn.execute("select compilerid from COMPILER where compilerkindid=? AND version=?" , (kind,version)) |
| 138 | result=r.fetchone() |
| 139 | if result != None: |
| 140 | #print(result) |
| 141 | return(result[0]) |
| 142 | else: |
| 143 | return(None) |
| 144 | |
| 145 | |
| 146 | def addRows(conn,elem,tableName,full): |
| 147 | # List of columns we have in DB which is |
| 148 | # different from the columns in the table |
| 149 | keep = getColumns(elem,full) |
| 150 | cols = list(full.columns) |
| 151 | params = list(joinit(elem.params.full,",")) |
| 152 | common = diff(["TYPE"] + cols , ['OLDID'] + params) |
Christophe Favergeon | 058b63b | 2019-07-25 10:40:32 +0200 | [diff] [blame] | 153 | colNameList = [] |
| 154 | for c in params + keep: |
| 155 | if c in MKKEYFIELD: |
| 156 | colNameList.append(MKKEYFIELDID[c]) |
| 157 | else: |
| 158 | colNameList.append(c) |
| 159 | colNames = "".join(joinit(colNameList,",")) |
| 160 | #print(colNameList) |
| 161 | #print(colNames) |
Christophe Favergeon | 37b8622 | 2019-07-17 11:49:00 +0200 | [diff] [blame] | 162 | #print(full) |
| 163 | for index, row in full.iterrows(): |
Christophe Favergeon | 058b63b | 2019-07-25 10:40:32 +0200 | [diff] [blame] | 164 | sql = "INSERT INTO %s(%s) VALUES(" % (tableName,colNames) |
Christophe Favergeon | 37b8622 | 2019-07-17 11:49:00 +0200 | [diff] [blame] | 165 | keys = {} |
| 166 | |
| 167 | # Get data from columns |
| 168 | for field in common: |
| 169 | if field in VALSTRFIELD: |
| 170 | keys[field]=row[field] |
| 171 | if field == "NAME": |
| 172 | name = row[field] |
| 173 | if re.match(r'^.*_f64',name): |
| 174 | keys["TYPE"] = "f64" |
| 175 | if re.match(r'^.*_f32',name): |
| 176 | keys["TYPE"] = "f32" |
| 177 | if re.match(r'^.*_f16',name): |
| 178 | keys["TYPE"] = "f16" |
| 179 | if re.match(r'^.*_q31',name): |
| 180 | keys["TYPE"] = "q31" |
| 181 | if re.match(r'^.*_q15',name): |
| 182 | keys["TYPE"] = "q15" |
| 183 | if re.match(r'^.*_q7',name): |
| 184 | keys["TYPE"] = "q7" |
| 185 | |
| 186 | if re.match(r'^.*_s8',name): |
| 187 | keys["TYPE"] = "s8" |
| 188 | if re.match(r'^.*_u8',name): |
| 189 | keys["TYPE"] = "u8" |
| 190 | if re.match(r'^.*_s16',name): |
| 191 | keys["TYPE"] = "s16" |
| 192 | if re.match(r'^.*_u16',name): |
| 193 | keys["TYPE"] = "u16" |
| 194 | if re.match(r'^.*_s32',name): |
| 195 | keys["TYPE"] = "s32" |
| 196 | if re.match(r'^.*_u32',name): |
| 197 | keys["TYPE"] = "u32" |
| 198 | if re.match(r'^.*_s64',name): |
| 199 | keys["TYPE"] = "s64" |
| 200 | if re.match(r'^.*_u64',name): |
| 201 | keys["TYPE"] = "u64" |
| 202 | |
| 203 | if field in VALINTFIELD: |
| 204 | keys[field]=row[field] |
| 205 | if field in VALDATEFIELD: |
| 206 | keys[field]=row[field] |
| 207 | if field in VALBOOLFIELD: |
| 208 | keys[field]=row[field] |
| 209 | |
| 210 | |
| 211 | # Get foreign keys and create missing data |
| 212 | for field in common: |
| 213 | if field in VALKEYFIELD: |
| 214 | if field == "CATEGORY": |
| 215 | val = findInTable(conn,"CATEGORY","category",row[field],"categoryid") |
| 216 | keys[field]=val |
| 217 | if field == "CORE": |
| 218 | val = findInTable(conn,"CORE","coredef",row[field],"coreid") |
| 219 | keys[field]=val |
| 220 | if field == "PLATFORM": |
| 221 | val = findInTable(conn,"PLATFORM","platform",row[field],"platformid") |
| 222 | keys[field]=val |
| 223 | if field == "TYPE": |
| 224 | val = findInTable(conn,"TYPE","type",keys["TYPE"],"typeid") |
| 225 | keys[field]=val |
| 226 | if field == "COMPILER": |
| 227 | compilerkind = findInTable(conn,"COMPILERKIND","compiler",row[field],"compilerkindid") |
| 228 | compiler = findInCompilerTable(conn,compilerkind,keys["VERSION"]) |
| 229 | keys[field]=compiler |
| 230 | |
| 231 | # Generate sql command |
| 232 | start = "" |
| 233 | for field in params: |
| 234 | sql += " %s\n %d" % (start,row[field]) |
| 235 | start = "," |
| 236 | |
| 237 | for field in keep: |
| 238 | if field in MKSTRFIELD or field in MKDATEFIELD: |
| 239 | sql += " %s\n \"%s\"" % (start,keys[field]) |
| 240 | elif field in keep: |
| 241 | sql += " %s\n %d" % (start,keys[field]) |
| 242 | start = "," |
| 243 | |
| 244 | sql += " )" |
| 245 | #print(sql) |
| 246 | conn.execute(sql) |
| 247 | conn.commit() |
| 248 | |
| 249 | def addOneBenchmark(elem,fullPath,db,group): |
| 250 | full=pd.read_csv(fullPath,dtype={'OLDID': str} ,keep_default_na = False) |
| 251 | full['DATE'] = datetime.datetime.now() |
| 252 | if group: |
| 253 | tableName = group |
| 254 | else: |
| 255 | tableName = elem.data["class"] |
| 256 | conn = sqlite3.connect(db) |
| 257 | createTableIfMissing(conn,elem,tableName,full) |
| 258 | addRows(conn,elem,tableName,full) |
| 259 | conn.close() |
| 260 | |
| 261 | |
| 262 | def addToDB(benchmark,dbpath,elem,group): |
| 263 | if not elem.data["deprecated"]: |
| 264 | if elem.params: |
| 265 | benchPath = os.path.join(benchmark,elem.fullPath(),"fullBenchmark.csv") |
| 266 | print("Processing %s" % benchPath) |
| 267 | addOneBenchmark(elem,benchPath,dbpath,group) |
| 268 | |
| 269 | for c in elem.children: |
| 270 | addToDB(benchmark,dbpath,c,group) |
| 271 | |
| 272 | |
| 273 | |
| 274 | parser = argparse.ArgumentParser(description='Generate summary benchmarks') |
| 275 | |
| 276 | parser.add_argument('-f', nargs='?',type = str, default=None, help="Test description file path") |
| 277 | parser.add_argument('-b', nargs='?',type = str, default="FullBenchmark", help="Full Benchmark dir path") |
| 278 | parser.add_argument('-e', action='store_true', help="Embedded test") |
| 279 | parser.add_argument('-o', nargs='?',type = str, default="bench.db", help="Benchmark database") |
| 280 | |
| 281 | parser.add_argument('others', nargs=argparse.REMAINDER) |
| 282 | |
| 283 | args = parser.parse_args() |
| 284 | |
| 285 | if args.f is not None: |
| 286 | p = parse.Parser() |
| 287 | # Parse the test description file |
| 288 | root = p.parse(args.f) |
| 289 | d.deprecate(root,args.others) |
| 290 | if args.others: |
| 291 | group=args.others[0] |
| 292 | else: |
| 293 | group=None |
| 294 | addToDB(args.b,args.o,root,group) |
| 295 | |
| 296 | else: |
| 297 | parser.print_help() |