Skip to content
This repository was archived by the owner on Jan 10, 2023. It is now read-only.
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 8 additions & 12 deletions runtool/exporters/Stat.py
Original file line number Diff line number Diff line change
@@ -1,27 +1,23 @@
import cgi
import csv
from sea_runtool import GraphCombiner


class Stat(GraphCombiner):
def __init__(self, args, tree):
GraphCombiner.__init__(self, args, tree)
self.file = open(self.get_targets()[-1], "w+b")
self.file.write("domain,name,min,max,avg,count\n")

def get_targets(self):
return [self.args.output + ".csv"]

def finish(self):
GraphCombiner.finish(self)
for domain, data in self.per_domain.iteritems():
for task_name, task_data in data['tasks'].iteritems():
time = task_data['time']
self.file.write('%s,%s,%s,%s,%s,%d\n' % (
cgi.escape(domain), cgi.escape(task_name),
min(time), max(time), sum(time) / len(time), len(time)
)
)
self.file.close()
with open(self.get_targets()[-1], 'w+b') as f:
writer = csv.writer(f)
writer.writerow(["domain", "name", "min", "max", "avg", "total", "count"])
for domain, data in self.per_domain.iteritems():
for task_name, task_data in data['tasks'].iteritems():
time = task_data['time']
writer.writerow([domain, task_name, min(time), max(time), sum(time) / len(time), sum(time), len(time)])

@staticmethod
def join_traces(traces, output, args): # FIXME: implement real joiner
Expand Down