blob: 59201555d78143bf38a224416fc2ea3f672e762e [file] [log] [blame]
Christophe Favergeon37b86222019-07-17 11:49:00 +02001# Process the test results
2# Test status (like passed, or failed with error code)
3
4import argparse
5import re
6import TestScripts.NewParser as parse
7import TestScripts.CodeGen
8from collections import deque
9import os.path
10import numpy as np
11import pandas as pd
12import statsmodels.api as sm
13import statsmodels.formula.api as smf
14import csv
15import TestScripts.Deprecate as d
16
17result = []
18commonParams = []
19
20def findItem(root,path):
21 """ Find a node in a tree
22
23 Args:
24 path (list) : A list of node ID
25 This list is describing a path in the tree.
26 By starting from the root and following this path,
27 we can find the node in the tree.
28 Raises:
29 Nothing
30 Returns:
31 TreeItem : A node
32 """
33 # The list is converted into a queue.
34 q = deque(path)
35 q.popleft()
36 c = root
37 while q:
38 n = q.popleft()
39 # We get the children based on its ID and continue
40 c = c[n-1]
41 return(c)
42
43
44
45NORMAL = 1
46INTEST = 2
47TESTPARAM = 3
48
49def joinit(iterable, delimiter):
50 it = iter(iterable)
51 yield next(it)
52 for x in it:
53 yield delimiter
54 yield x
55
56def formatProd(a,b):
57 if a == "Intercept":
58 return(str(b))
59 return("%s * %s" % (a,b))
60
61def convert(elem,fullPath):
62 global commonParams
63 global result
64 regressionPath=os.path.join(os.path.dirname(fullPath),"regression.csv")
65 full=pd.read_csv(fullPath,dtype={'OLDID': str} ,keep_default_na = False)
66 reg=pd.read_csv(regressionPath,dtype={'OLDID': str} ,keep_default_na = False)
67 commonParams = list(joinit(elem.params.full,","))
68 header = ["OLDID"] + commonParams + ["CYCLES"]
69
70 r=full[header].rename(columns = {"OLDID":"TESTNB"})
71 r["TESTNB"] = pd.to_numeric(r["TESTNB"])
72 r["PASSED"]=1
73 result.append(r)
74
75
76def extractBenchmarks(benchmark,elem):
77 if not elem.data["deprecated"]:
78 if elem.params:
79 benchPath = os.path.join(benchmark,elem.fullPath(),"fullBenchmark.csv")
80 print("Processing %s" % benchPath)
81 convert(elem,benchPath)
82
83 for c in elem.children:
84 extractBenchmarks(benchmark,c)
85
86
87
88parser = argparse.ArgumentParser(description='Generate summary benchmarks')
89
90parser.add_argument('-f', nargs='?',type = str, default=None, help="Test description file path")
91parser.add_argument('-b', nargs='?',type = str, default="FullBenchmark", help="Full Benchmark dir path")
92parser.add_argument('-e', action='store_true', help="Embedded test")
93parser.add_argument('-o', nargs='?',type = str, default="bench.csv", help="Output csv file using old format")
94
95parser.add_argument('others', nargs=argparse.REMAINDER)
96
97args = parser.parse_args()
98
99if args.f is not None:
100 p = parse.Parser()
101 # Parse the test description file
102 root = p.parse(args.f)
103 d.deprecate(root,args.others)
104 extractBenchmarks(args.b,root)
105 finalResult = pd.concat(result)
106 cols = ['TESTNB'] + commonParams
107 finalResult=finalResult.sort_values(by=cols)
108 finalResult.to_csv(args.o,index=False,quoting=csv.QUOTE_NONNUMERIC)
109
110else:
111 parser.print_help()