Adding gem5 source to svn.
git-svn-id: https://www4.informatik.uni-erlangen.de/i4svn/danceos/trunk/devel/fail@1819 8c4709b5-6ec9-48aa-a5cd-a96041d1645a
This commit is contained in:
28
simulators/gem5/util/stats/__init__.py
Normal file
28
simulators/gem5/util/stats/__init__.py
Normal file
@ -0,0 +1,28 @@
|
||||
# Copyright (c) 2005 The Regents of The University of Michigan
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met: redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer;
|
||||
# redistributions in binary form must reproduce the above copyright
|
||||
# notice, this list of conditions and the following disclaimer in the
|
||||
# documentation and/or other materials provided with the distribution;
|
||||
# neither the name of the copyright holders nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
# Authors: Nathan Binkert
|
||||
|
||||
341
simulators/gem5/util/stats/barchart.py
Normal file
341
simulators/gem5/util/stats/barchart.py
Normal file
@ -0,0 +1,341 @@
|
||||
# Copyright (c) 2005-2006 The Regents of The University of Michigan
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met: redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer;
|
||||
# redistributions in binary form must reproduce the above copyright
|
||||
# notice, this list of conditions and the following disclaimer in the
|
||||
# documentation and/or other materials provided with the distribution;
|
||||
# neither the name of the copyright holders nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
# Authors: Nathan Binkert
|
||||
# Lisa Hsu
|
||||
|
||||
import matplotlib, pylab
|
||||
from matplotlib.font_manager import FontProperties
|
||||
from matplotlib.numerix import array, arange, reshape, shape, transpose, zeros
|
||||
from matplotlib.numerix import Float
|
||||
from matplotlib.ticker import NullLocator
|
||||
|
||||
matplotlib.interactive(False)
|
||||
|
||||
from chart import ChartOptions
|
||||
|
||||
class BarChart(ChartOptions):
|
||||
def __init__(self, default=None, **kwargs):
|
||||
super(BarChart, self).__init__(default, **kwargs)
|
||||
self.inputdata = None
|
||||
self.chartdata = None
|
||||
self.inputerr = None
|
||||
self.charterr = None
|
||||
|
||||
def gen_colors(self, count):
|
||||
cmap = matplotlib.cm.get_cmap(self.colormap)
|
||||
if count == 1:
|
||||
return cmap([ 0.5 ])
|
||||
|
||||
if count < 5:
|
||||
return cmap(arange(5) / float(4))[:count]
|
||||
|
||||
return cmap(arange(count) / float(count - 1))
|
||||
|
||||
# The input data format does not match the data format that the
|
||||
# graph function takes because it is intuitive. The conversion
|
||||
# from input data format to chart data format depends on the
|
||||
# dimensionality of the input data. Check here for the
|
||||
# dimensionality and correctness of the input data
|
||||
def set_data(self, data):
|
||||
if data is None:
|
||||
self.inputdata = None
|
||||
self.chartdata = None
|
||||
return
|
||||
|
||||
data = array(data)
|
||||
dim = len(shape(data))
|
||||
if dim not in (1, 2, 3):
|
||||
raise AttributeError, "Input data must be a 1, 2, or 3d matrix"
|
||||
self.inputdata = data
|
||||
|
||||
# If the input data is a 1d matrix, then it describes a
|
||||
# standard bar chart.
|
||||
if dim == 1:
|
||||
self.chartdata = array([[data]])
|
||||
|
||||
# If the input data is a 2d matrix, then it describes a bar
|
||||
# chart with groups. The matrix being an array of groups of
|
||||
# bars.
|
||||
if dim == 2:
|
||||
self.chartdata = transpose([data], axes=(2,0,1))
|
||||
|
||||
# If the input data is a 3d matrix, then it describes an array
|
||||
# of groups of bars with each bar being an array of stacked
|
||||
# values.
|
||||
if dim == 3:
|
||||
self.chartdata = transpose(data, axes=(1,2,0))
|
||||
|
||||
def get_data(self):
|
||||
return self.inputdata
|
||||
|
||||
data = property(get_data, set_data)
|
||||
|
||||
def set_err(self, err):
|
||||
if err is None:
|
||||
self.inputerr = None
|
||||
self.charterr = None
|
||||
return
|
||||
|
||||
err = array(err)
|
||||
dim = len(shape(err))
|
||||
if dim not in (1, 2, 3):
|
||||
raise AttributeError, "Input err must be a 1, 2, or 3d matrix"
|
||||
self.inputerr = err
|
||||
|
||||
if dim == 1:
|
||||
self.charterr = array([[err]])
|
||||
|
||||
if dim == 2:
|
||||
self.charterr = transpose([err], axes=(2,0,1))
|
||||
|
||||
if dim == 3:
|
||||
self.charterr = transpose(err, axes=(1,2,0))
|
||||
|
||||
def get_err(self):
|
||||
return self.inputerr
|
||||
|
||||
err = property(get_err, set_err)
|
||||
|
||||
# Graph the chart data.
|
||||
# Input is a 3d matrix that describes a plot that has multiple
|
||||
# groups, multiple bars in each group, and multiple values stacked
|
||||
# in each bar. The underlying bar() function expects a sequence of
|
||||
# bars in the same stack location and same group location, so the
|
||||
# organization of the matrix is that the inner most sequence
|
||||
# represents one of these bar groups, then those are grouped
|
||||
# together to make one full stack of bars in each group, and then
|
||||
# the outer most layer describes the groups. Here is an example
|
||||
# data set and how it gets plotted as a result.
|
||||
#
|
||||
# e.g. data = [[[10,11,12], [13,14,15], [16,17,18], [19,20,21]],
|
||||
# [[22,23,24], [25,26,27], [28,29,30], [31,32,33]]]
|
||||
#
|
||||
# will plot like this:
|
||||
#
|
||||
# 19 31 20 32 21 33
|
||||
# 16 28 17 29 18 30
|
||||
# 13 25 14 26 15 27
|
||||
# 10 22 11 23 12 24
|
||||
#
|
||||
# Because this arrangement is rather conterintuitive, the rearrange
|
||||
# function takes various matricies and arranges them to fit this
|
||||
# profile.
|
||||
#
|
||||
# This code deals with one of the dimensions in the matrix being
|
||||
# one wide.
|
||||
#
|
||||
def graph(self):
|
||||
if self.chartdata is None:
|
||||
raise AttributeError, "Data not set for bar chart!"
|
||||
|
||||
dim = len(shape(self.inputdata))
|
||||
cshape = shape(self.chartdata)
|
||||
if self.charterr is not None and shape(self.charterr) != cshape:
|
||||
raise AttributeError, 'Dimensions of error and data do not match'
|
||||
|
||||
if dim == 1:
|
||||
colors = self.gen_colors(cshape[2])
|
||||
colors = [ [ colors ] * cshape[1] ] * cshape[0]
|
||||
|
||||
if dim == 2:
|
||||
colors = self.gen_colors(cshape[0])
|
||||
colors = [ [ [ c ] * cshape[2] ] * cshape[1] for c in colors ]
|
||||
|
||||
if dim == 3:
|
||||
colors = self.gen_colors(cshape[1])
|
||||
colors = [ [ [ c ] * cshape[2] for c in colors ] ] * cshape[0]
|
||||
|
||||
colors = array(colors)
|
||||
|
||||
self.figure = pylab.figure(figsize=self.chart_size)
|
||||
|
||||
outer_axes = None
|
||||
inner_axes = None
|
||||
if self.xsubticks is not None:
|
||||
color = self.figure.get_facecolor()
|
||||
self.metaaxes = self.figure.add_axes(self.figure_size,
|
||||
axisbg=color, frameon=False)
|
||||
for tick in self.metaaxes.xaxis.majorTicks:
|
||||
tick.tick1On = False
|
||||
tick.tick2On = False
|
||||
self.metaaxes.set_yticklabels([])
|
||||
self.metaaxes.set_yticks([])
|
||||
size = [0] * 4
|
||||
size[0] = self.figure_size[0]
|
||||
size[1] = self.figure_size[1] + .12
|
||||
size[2] = self.figure_size[2]
|
||||
size[3] = self.figure_size[3] - .12
|
||||
self.axes = self.figure.add_axes(size)
|
||||
outer_axes = self.metaaxes
|
||||
inner_axes = self.axes
|
||||
else:
|
||||
self.axes = self.figure.add_axes(self.figure_size)
|
||||
outer_axes = self.axes
|
||||
inner_axes = self.axes
|
||||
|
||||
bars_in_group = len(self.chartdata)
|
||||
|
||||
width = 1.0 / ( bars_in_group + 1)
|
||||
center = width / 2
|
||||
|
||||
bars = []
|
||||
for i,stackdata in enumerate(self.chartdata):
|
||||
bottom = array([0.0] * len(stackdata[0]), Float)
|
||||
stack = []
|
||||
for j,bardata in enumerate(stackdata):
|
||||
bardata = array(bardata)
|
||||
ind = arange(len(bardata)) + i * width + center
|
||||
yerr = None
|
||||
if self.charterr is not None:
|
||||
yerr = self.charterr[i][j]
|
||||
bar = self.axes.bar(ind, bardata, width, bottom=bottom,
|
||||
color=colors[i][j], yerr=yerr)
|
||||
if self.xsubticks is not None:
|
||||
self.metaaxes.bar(ind, [0] * len(bardata), width)
|
||||
stack.append(bar)
|
||||
bottom += bardata
|
||||
bars.append(stack)
|
||||
|
||||
if self.xlabel is not None:
|
||||
outer_axes.set_xlabel(self.xlabel)
|
||||
|
||||
if self.ylabel is not None:
|
||||
inner_axes.set_ylabel(self.ylabel)
|
||||
|
||||
if self.yticks is not None:
|
||||
ymin, ymax = self.axes.get_ylim()
|
||||
nticks = float(len(self.yticks))
|
||||
ticks = arange(nticks) / (nticks - 1) * (ymax - ymin) + ymin
|
||||
inner_axes.set_yticks(ticks)
|
||||
inner_axes.set_yticklabels(self.yticks)
|
||||
elif self.ylim is not None:
|
||||
inner_axes.set_ylim(self.ylim)
|
||||
|
||||
if self.xticks is not None:
|
||||
outer_axes.set_xticks(arange(cshape[2]) + .5)
|
||||
outer_axes.set_xticklabels(self.xticks)
|
||||
|
||||
if self.xsubticks is not None:
|
||||
numticks = (cshape[0] + 1) * cshape[2]
|
||||
inner_axes.set_xticks(arange(numticks) * width + 2 * center)
|
||||
xsubticks = list(self.xsubticks) + [ '' ]
|
||||
inner_axes.set_xticklabels(xsubticks * cshape[2], fontsize=7,
|
||||
rotation=30)
|
||||
|
||||
if self.legend is not None:
|
||||
if dim == 1:
|
||||
lbars = bars[0][0]
|
||||
if dim == 2:
|
||||
lbars = [ bars[i][0][0] for i in xrange(len(bars))]
|
||||
if dim == 3:
|
||||
number = len(bars[0])
|
||||
lbars = [ bars[0][number - j - 1][0] for j in xrange(number)]
|
||||
|
||||
if self.fig_legend:
|
||||
self.figure.legend(lbars, self.legend, self.legend_loc,
|
||||
prop=FontProperties(size=self.legend_size))
|
||||
else:
|
||||
self.axes.legend(lbars, self.legend, self.legend_loc,
|
||||
prop=FontProperties(size=self.legend_size))
|
||||
|
||||
if self.title is not None:
|
||||
self.axes.set_title(self.title)
|
||||
|
||||
def savefig(self, name):
|
||||
self.figure.savefig(name)
|
||||
|
||||
def savecsv(self, name):
|
||||
f = file(name, 'w')
|
||||
data = array(self.inputdata)
|
||||
dim = len(data.shape)
|
||||
|
||||
if dim == 1:
|
||||
#if self.xlabel:
|
||||
# f.write(', '.join(list(self.xlabel)) + '\n')
|
||||
f.write(', '.join([ '%f' % val for val in data]) + '\n')
|
||||
if dim == 2:
|
||||
#if self.xlabel:
|
||||
# f.write(', '.join([''] + list(self.xlabel)) + '\n')
|
||||
for i,row in enumerate(data):
|
||||
ylabel = []
|
||||
#if self.ylabel:
|
||||
# ylabel = [ self.ylabel[i] ]
|
||||
f.write(', '.join(ylabel + [ '%f' % v for v in row]) + '\n')
|
||||
if dim == 3:
|
||||
f.write("don't do 3D csv files\n")
|
||||
pass
|
||||
|
||||
f.close()
|
||||
|
||||
if __name__ == '__main__':
|
||||
from random import randrange
|
||||
import random, sys
|
||||
|
||||
dim = 3
|
||||
number = 5
|
||||
|
||||
args = sys.argv[1:]
|
||||
if len(args) > 3:
|
||||
sys.exit("invalid number of arguments")
|
||||
elif len(args) > 0:
|
||||
myshape = [ int(x) for x in args ]
|
||||
else:
|
||||
myshape = [ 3, 4, 8 ]
|
||||
|
||||
# generate a data matrix of the given shape
|
||||
size = reduce(lambda x,y: x*y, myshape)
|
||||
#data = [ random.randrange(size - i) + 10 for i in xrange(size) ]
|
||||
data = [ float(i)/100.0 for i in xrange(size) ]
|
||||
data = reshape(data, myshape)
|
||||
|
||||
# setup some test bar charts
|
||||
if True:
|
||||
chart1 = BarChart()
|
||||
chart1.data = data
|
||||
|
||||
chart1.xlabel = 'Benchmark'
|
||||
chart1.ylabel = 'Bandwidth (GBps)'
|
||||
chart1.legend = [ 'x%d' % x for x in xrange(myshape[-1]) ]
|
||||
chart1.xticks = [ 'xtick%d' % x for x in xrange(myshape[0]) ]
|
||||
chart1.title = 'this is the title'
|
||||
if len(myshape) > 2:
|
||||
chart1.xsubticks = [ '%d' % x for x in xrange(myshape[1]) ]
|
||||
chart1.graph()
|
||||
chart1.savefig('/tmp/test1.png')
|
||||
chart1.savefig('/tmp/test1.ps')
|
||||
chart1.savefig('/tmp/test1.eps')
|
||||
chart1.savecsv('/tmp/test1.csv')
|
||||
|
||||
if False:
|
||||
chart2 = BarChart()
|
||||
chart2.data = data
|
||||
chart2.colormap = 'gray'
|
||||
chart2.graph()
|
||||
chart2.savefig('/tmp/test2.png')
|
||||
chart2.savefig('/tmp/test2.ps')
|
||||
|
||||
# pylab.show()
|
||||
1940
simulators/gem5/util/stats/categories.py
Normal file
1940
simulators/gem5/util/stats/categories.py
Normal file
File diff suppressed because it is too large
Load Diff
86
simulators/gem5/util/stats/chart.py
Normal file
86
simulators/gem5/util/stats/chart.py
Normal file
@ -0,0 +1,86 @@
|
||||
# Copyright (c) 2005-2006 The Regents of The University of Michigan
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met: redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer;
|
||||
# redistributions in binary form must reproduce the above copyright
|
||||
# notice, this list of conditions and the following disclaimer in the
|
||||
# documentation and/or other materials provided with the distribution;
|
||||
# neither the name of the copyright holders nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
# Authors: Nathan Binkert
|
||||
# Lisa Hsu
|
||||
|
||||
class ChartOptions(object):
|
||||
defaults = { 'chart_size' : (8, 4),
|
||||
'figure_size' : [0.1, 0.1, 0.6, 0.85],
|
||||
'title' : None,
|
||||
'fig_legend' : True,
|
||||
'legend' : None,
|
||||
'legend_loc' : 'upper right',
|
||||
'legend_size' : 6,
|
||||
'colormap' : 'jet',
|
||||
'xlabel' : None,
|
||||
'ylabel' : None,
|
||||
'xticks' : None,
|
||||
'xsubticks' : None,
|
||||
'yticks' : None,
|
||||
'ylim' : None,
|
||||
}
|
||||
|
||||
def __init__(self, options=None, **kwargs):
|
||||
self.init(options, **kwargs)
|
||||
|
||||
def clear(self):
|
||||
self.options = {}
|
||||
|
||||
def init(self, options=None, **kwargs):
|
||||
self.clear()
|
||||
self.update(options, **kwargs)
|
||||
|
||||
def update(self, options=None, **kwargs):
|
||||
if options is not None:
|
||||
if not isinstance(options, ChartOptions):
|
||||
raise AttributeError, \
|
||||
'attribute options of type %s should be %s' % \
|
||||
(type(options), ChartOptions)
|
||||
self.options.update(options.options)
|
||||
|
||||
for key,value in kwargs.iteritems():
|
||||
if key not in ChartOptions.defaults:
|
||||
raise AttributeError, \
|
||||
"%s instance has no attribute '%s'" % (type(self), key)
|
||||
self.options[key] = value
|
||||
|
||||
def __getattr__(self, attr):
|
||||
if attr in self.options:
|
||||
return self.options[attr]
|
||||
|
||||
if attr in ChartOptions.defaults:
|
||||
return ChartOptions.defaults[attr]
|
||||
|
||||
raise AttributeError, \
|
||||
"%s instance has no attribute '%s'" % (type(self), attr)
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
if attr in ChartOptions.defaults:
|
||||
self.options[attr] = value
|
||||
else:
|
||||
super(ChartOptions, self).__setattr__(attr, value)
|
||||
|
||||
436
simulators/gem5/util/stats/db.py
Normal file
436
simulators/gem5/util/stats/db.py
Normal file
@ -0,0 +1,436 @@
|
||||
# Copyright (c) 2003-2004 The Regents of The University of Michigan
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met: redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer;
|
||||
# redistributions in binary form must reproduce the above copyright
|
||||
# notice, this list of conditions and the following disclaimer in the
|
||||
# documentation and/or other materials provided with the distribution;
|
||||
# neither the name of the copyright holders nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
# Authors: Nathan Binkert
|
||||
|
||||
import MySQLdb, re, string
|
||||
|
||||
def statcmp(a, b):
|
||||
v1 = a.split('.')
|
||||
v2 = b.split('.')
|
||||
|
||||
last = min(len(v1), len(v2)) - 1
|
||||
for i,j in zip(v1[0:last], v2[0:last]):
|
||||
if i != j:
|
||||
return cmp(i, j)
|
||||
|
||||
# Special compare for last element.
|
||||
if len(v1) == len(v2):
|
||||
return cmp(v1[last], v2[last])
|
||||
else:
|
||||
return cmp(len(v1), len(v2))
|
||||
|
||||
class RunData:
|
||||
def __init__(self, row):
|
||||
self.run = int(row[0])
|
||||
self.name = row[1]
|
||||
self.user = row[2]
|
||||
self.project = row[3]
|
||||
|
||||
class SubData:
|
||||
def __init__(self, row):
|
||||
self.stat = int(row[0])
|
||||
self.x = int(row[1])
|
||||
self.y = int(row[2])
|
||||
self.name = row[3]
|
||||
self.descr = row[4]
|
||||
|
||||
class Data:
|
||||
def __init__(self, row):
|
||||
if len(row) != 5:
|
||||
raise 'stat db error'
|
||||
self.stat = int(row[0])
|
||||
self.run = int(row[1])
|
||||
self.x = int(row[2])
|
||||
self.y = int(row[3])
|
||||
self.data = float(row[4])
|
||||
|
||||
def __repr__(self):
|
||||
return '''Data(['%d', '%d', '%d', '%d', '%f'])''' % ( self.stat,
|
||||
self.run, self.x, self.y, self.data)
|
||||
|
||||
class StatData(object):
|
||||
def __init__(self, row):
|
||||
self.stat = int(row[0])
|
||||
self.name = row[1]
|
||||
self.desc = row[2]
|
||||
self.type = row[3]
|
||||
self.prereq = int(row[5])
|
||||
self.precision = int(row[6])
|
||||
|
||||
import flags
|
||||
self.flags = 0
|
||||
if int(row[4]): self.flags |= flags.printable
|
||||
if int(row[7]): self.flags |= flags.nozero
|
||||
if int(row[8]): self.flags |= flags.nonan
|
||||
if int(row[9]): self.flags |= flags.total
|
||||
if int(row[10]): self.flags |= flags.pdf
|
||||
if int(row[11]): self.flags |= flags.cdf
|
||||
|
||||
if self.type == 'DIST' or self.type == 'VECTORDIST':
|
||||
self.min = float(row[12])
|
||||
self.max = float(row[13])
|
||||
self.bktsize = float(row[14])
|
||||
self.size = int(row[15])
|
||||
|
||||
if self.type == 'FORMULA':
|
||||
self.formula = self.db.allFormulas[self.stat]
|
||||
|
||||
class Node(object):
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
class Result(object):
|
||||
def __init__(self, x, y):
|
||||
self.data = {}
|
||||
self.x = x
|
||||
self.y = y
|
||||
|
||||
def __contains__(self, run):
|
||||
return run in self.data
|
||||
|
||||
def __getitem__(self, run):
|
||||
if run not in self.data:
|
||||
self.data[run] = [ [ 0.0 ] * self.y for i in xrange(self.x) ]
|
||||
return self.data[run]
|
||||
|
||||
class Database(object):
|
||||
def __init__(self):
|
||||
self.host = 'zizzer.pool'
|
||||
self.user = ''
|
||||
self.passwd = ''
|
||||
self.db = 'm5stats'
|
||||
self.cursor = None
|
||||
|
||||
self.allStats = []
|
||||
self.allStatIds = {}
|
||||
self.allStatNames = {}
|
||||
|
||||
self.allSubData = {}
|
||||
|
||||
self.allRuns = []
|
||||
self.allRunIds = {}
|
||||
self.allRunNames = {}
|
||||
|
||||
self.allFormulas = {}
|
||||
|
||||
self.stattop = {}
|
||||
self.statdict = {}
|
||||
self.statlist = []
|
||||
|
||||
self.mode = 'sum';
|
||||
self.runs = None
|
||||
self.ticks = None
|
||||
self.method = 'sum'
|
||||
self._method = type(self).sum
|
||||
|
||||
def get(self, job, stat, system=None):
|
||||
run = self.allRunNames.get(str(job), None)
|
||||
if run is None:
|
||||
return None
|
||||
|
||||
from info import ProxyError, scalar, vector, value, values, total, len
|
||||
if system is None and hasattr(job, 'system'):
|
||||
system = job.system
|
||||
|
||||
if system is not None:
|
||||
stat.system = self[system]
|
||||
try:
|
||||
if scalar(stat):
|
||||
return value(stat, run.run)
|
||||
if vector(stat):
|
||||
return values(stat, run.run)
|
||||
except ProxyError:
|
||||
return None
|
||||
|
||||
return None
|
||||
|
||||
def query(self, sql):
|
||||
self.cursor.execute(sql)
|
||||
|
||||
def update_dict(self, dict):
|
||||
dict.update(self.stattop)
|
||||
|
||||
def append(self, stat):
|
||||
statname = re.sub(':', '__', stat.name)
|
||||
path = string.split(statname, '.')
|
||||
pathtop = path[0]
|
||||
fullname = ''
|
||||
|
||||
x = self
|
||||
while len(path) > 1:
|
||||
name = path.pop(0)
|
||||
if not x.__dict__.has_key(name):
|
||||
x.__dict__[name] = Node(fullname + name)
|
||||
x = x.__dict__[name]
|
||||
fullname = '%s%s.' % (fullname, name)
|
||||
|
||||
name = path.pop(0)
|
||||
x.__dict__[name] = stat
|
||||
|
||||
self.stattop[pathtop] = self.__dict__[pathtop]
|
||||
self.statdict[statname] = stat
|
||||
self.statlist.append(statname)
|
||||
|
||||
def connect(self):
|
||||
# connect
|
||||
self.thedb = MySQLdb.connect(db=self.db,
|
||||
host=self.host,
|
||||
user=self.user,
|
||||
passwd=self.passwd)
|
||||
|
||||
# create a cursor
|
||||
self.cursor = self.thedb.cursor()
|
||||
|
||||
self.query('''select rn_id,rn_name,rn_sample,rn_user,rn_project
|
||||
from runs''')
|
||||
for result in self.cursor.fetchall():
|
||||
run = RunData(result);
|
||||
self.allRuns.append(run)
|
||||
self.allRunIds[run.run] = run
|
||||
self.allRunNames[run.name] = run
|
||||
|
||||
self.query('select sd_stat,sd_x,sd_y,sd_name,sd_descr from subdata')
|
||||
for result in self.cursor.fetchall():
|
||||
subdata = SubData(result)
|
||||
if self.allSubData.has_key(subdata.stat):
|
||||
self.allSubData[subdata.stat].append(subdata)
|
||||
else:
|
||||
self.allSubData[subdata.stat] = [ subdata ]
|
||||
|
||||
self.query('select * from formulas')
|
||||
for id,formula in self.cursor.fetchall():
|
||||
self.allFormulas[int(id)] = formula.tostring()
|
||||
|
||||
StatData.db = self
|
||||
self.query('select * from stats')
|
||||
import info
|
||||
for result in self.cursor.fetchall():
|
||||
stat = info.NewStat(self, StatData(result))
|
||||
self.append(stat)
|
||||
self.allStats.append(stat)
|
||||
self.allStatIds[stat.stat] = stat
|
||||
self.allStatNames[stat.name] = stat
|
||||
|
||||
# Name: listruns
|
||||
# Desc: Prints all runs matching a given user, if no argument
|
||||
# is given all runs are returned
|
||||
def listRuns(self, user=None):
|
||||
print '%-40s %-10s %-5s' % ('run name', 'user', 'id')
|
||||
print '-' * 62
|
||||
for run in self.allRuns:
|
||||
if user == None or user == run.user:
|
||||
print '%-40s %-10s %-10d' % (run.name, run.user, run.run)
|
||||
|
||||
# Name: listTicks
|
||||
# Desc: Prints all samples for a given run
|
||||
def listTicks(self, runs=None):
|
||||
print "tick"
|
||||
print "----------------------------------------"
|
||||
sql = 'select distinct dt_tick from data where dt_stat=1180 and ('
|
||||
if runs != None:
|
||||
first = True
|
||||
for run in runs:
|
||||
if first:
|
||||
# sql += ' where'
|
||||
first = False
|
||||
else:
|
||||
sql += ' or'
|
||||
sql += ' dt_run=%s' % run.run
|
||||
sql += ')'
|
||||
self.query(sql)
|
||||
for r in self.cursor.fetchall():
|
||||
print r[0]
|
||||
|
||||
# Name: retTicks
|
||||
# Desc: Prints all samples for a given run
|
||||
def retTicks(self, runs=None):
|
||||
sql = 'select distinct dt_tick from data where dt_stat=1180 and ('
|
||||
if runs != None:
|
||||
first = True
|
||||
for run in runs:
|
||||
if first:
|
||||
first = False
|
||||
else:
|
||||
sql += ' or'
|
||||
sql += ' dt_run=%s' % run.run
|
||||
sql += ')'
|
||||
self.query(sql)
|
||||
ret = []
|
||||
for r in self.cursor.fetchall():
|
||||
ret.append(r[0])
|
||||
return ret
|
||||
|
||||
# Name: liststats
|
||||
# Desc: Prints all statistics that appear in the database,
|
||||
# the optional argument is a regular expression that can
|
||||
# be used to prune the result set
|
||||
def listStats(self, regex=None):
|
||||
print '%-60s %-8s %-10s' % ('stat name', 'id', 'type')
|
||||
print '-' * 80
|
||||
|
||||
rx = None
|
||||
if regex != None:
|
||||
rx = re.compile(regex)
|
||||
|
||||
stats = [ stat.name for stat in self.allStats ]
|
||||
stats.sort(statcmp)
|
||||
for stat in stats:
|
||||
stat = self.allStatNames[stat]
|
||||
if rx == None or rx.match(stat.name):
|
||||
print '%-60s %-8s %-10s' % (stat.name, stat.stat, stat.type)
|
||||
|
||||
# Name: liststats
|
||||
# Desc: Prints all statistics that appear in the database,
|
||||
# the optional argument is a regular expression that can
|
||||
# be used to prune the result set
|
||||
def listFormulas(self, regex=None):
|
||||
print '%-60s %s' % ('formula name', 'formula')
|
||||
print '-' * 80
|
||||
|
||||
rx = None
|
||||
if regex != None:
|
||||
rx = re.compile(regex)
|
||||
|
||||
stats = [ stat.name for stat in self.allStats ]
|
||||
stats.sort(statcmp)
|
||||
for stat in stats:
|
||||
stat = self.allStatNames[stat]
|
||||
if stat.type == 'FORMULA' and (rx == None or rx.match(stat.name)):
|
||||
print '%-60s %s' % (stat.name, self.allFormulas[stat.stat])
|
||||
|
||||
def getStat(self, stats):
|
||||
if type(stats) is not list:
|
||||
stats = [ stats ]
|
||||
|
||||
ret = []
|
||||
for stat in stats:
|
||||
if type(stat) is int:
|
||||
ret.append(self.allStatIds[stat])
|
||||
|
||||
if type(stat) is str:
|
||||
rx = re.compile(stat)
|
||||
for stat in self.allStats:
|
||||
if rx.match(stat.name):
|
||||
ret.append(stat)
|
||||
return ret
|
||||
|
||||
#########################################
|
||||
# get the data
|
||||
#
|
||||
def query(self, op, stat, ticks, group=False):
|
||||
sql = 'select '
|
||||
sql += 'dt_stat as stat, '
|
||||
sql += 'dt_run as run, '
|
||||
sql += 'dt_x as x, '
|
||||
sql += 'dt_y as y, '
|
||||
if group:
|
||||
sql += 'dt_tick as tick, '
|
||||
sql += '%s(dt_data) as data ' % op
|
||||
sql += 'from data '
|
||||
sql += 'where '
|
||||
|
||||
if isinstance(stat, list):
|
||||
val = ' or '.join([ 'dt_stat=%d' % s.stat for s in stat ])
|
||||
sql += ' (%s)' % val
|
||||
else:
|
||||
sql += ' dt_stat=%d' % stat.stat
|
||||
|
||||
if self.runs != None and len(self.runs):
|
||||
val = ' or '.join([ 'dt_run=%d' % r for r in self.runs ])
|
||||
sql += ' and (%s)' % val
|
||||
|
||||
if ticks != None and len(ticks):
|
||||
val = ' or '.join([ 'dt_tick=%d' % s for s in ticks ])
|
||||
sql += ' and (%s)' % val
|
||||
|
||||
sql += ' group by dt_stat,dt_run,dt_x,dt_y'
|
||||
if group:
|
||||
sql += ',dt_tick'
|
||||
return sql
|
||||
|
||||
# Name: sum
|
||||
# Desc: given a run, a stat and an array of samples, total the samples
|
||||
def sum(self, *args, **kwargs):
|
||||
return self.query('sum', *args, **kwargs)
|
||||
|
||||
# Name: avg
|
||||
# Desc: given a run, a stat and an array of samples, average the samples
|
||||
def avg(self, stat, ticks):
|
||||
return self.query('avg', *args, **kwargs)
|
||||
|
||||
# Name: stdev
|
||||
# Desc: given a run, a stat and an array of samples, get the standard
|
||||
# deviation
|
||||
def stdev(self, stat, ticks):
|
||||
return self.query('stddev', *args, **kwargs)
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
super(Database, self).__setattr__(attr, value)
|
||||
if attr != 'method':
|
||||
return
|
||||
|
||||
if value == 'sum':
|
||||
self._method = self.sum
|
||||
elif value == 'avg':
|
||||
self._method = self.avg
|
||||
elif value == 'stdev':
|
||||
self._method = self.stdev
|
||||
else:
|
||||
raise AttributeError, "can only set get to: sum | avg | stdev"
|
||||
|
||||
def data(self, stat, ticks=None):
|
||||
if ticks is None:
|
||||
ticks = self.ticks
|
||||
sql = self._method(self, stat, ticks)
|
||||
self.query(sql)
|
||||
|
||||
runs = {}
|
||||
xmax = 0
|
||||
ymax = 0
|
||||
for x in self.cursor.fetchall():
|
||||
data = Data(x)
|
||||
if not runs.has_key(data.run):
|
||||
runs[data.run] = {}
|
||||
if not runs[data.run].has_key(data.x):
|
||||
runs[data.run][data.x] = {}
|
||||
|
||||
xmax = max(xmax, data.x)
|
||||
ymax = max(ymax, data.y)
|
||||
runs[data.run][data.x][data.y] = data.data
|
||||
|
||||
results = Result(xmax + 1, ymax + 1)
|
||||
for run,data in runs.iteritems():
|
||||
result = results[run]
|
||||
for x,ydata in data.iteritems():
|
||||
for y,data in ydata.iteritems():
|
||||
result[x][y] = data
|
||||
return results
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.stattop[key]
|
||||
385
simulators/gem5/util/stats/dbinit.py
Normal file
385
simulators/gem5/util/stats/dbinit.py
Normal file
@ -0,0 +1,385 @@
|
||||
# Copyright (c) 2003-2004 The Regents of The University of Michigan
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met: redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer;
|
||||
# redistributions in binary form must reproduce the above copyright
|
||||
# notice, this list of conditions and the following disclaimer in the
|
||||
# documentation and/or other materials provided with the distribution;
|
||||
# neither the name of the copyright holders nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
# Authors: Nathan Binkert
|
||||
|
||||
import MySQLdb
|
||||
|
||||
class MyDB(object):
|
||||
def __init__(self, options):
|
||||
self.name = options.db
|
||||
self.host = options.host
|
||||
self.user = options.user
|
||||
self.passwd = options.passwd
|
||||
self.mydb = None
|
||||
self.cursor = None
|
||||
|
||||
def admin(self):
|
||||
self.close()
|
||||
self.mydb = MySQLdb.connect(db='mysql', host=self.host, user=self.user,
|
||||
passwd=self.passwd)
|
||||
self.cursor = self.mydb.cursor()
|
||||
|
||||
def connect(self):
|
||||
self.close()
|
||||
self.mydb = MySQLdb.connect(db=self.name, host=self.host,
|
||||
user=self.user, passwd=self.passwd)
|
||||
self.cursor = self.mydb.cursor()
|
||||
|
||||
def close(self):
|
||||
if self.mydb is not None:
|
||||
self.mydb.close()
|
||||
self.cursor = None
|
||||
|
||||
def query(self, sql):
|
||||
self.cursor.execute(sql)
|
||||
|
||||
def drop(self):
|
||||
self.query('DROP DATABASE IF EXISTS %s' % self.name)
|
||||
|
||||
def create(self):
|
||||
self.query('CREATE DATABASE %s' % self.name)
|
||||
|
||||
def populate(self):
|
||||
#
|
||||
# Each run (or simulation) gets its own entry in the runs table to
|
||||
# group stats by where they were generated
|
||||
#
|
||||
# COLUMNS:
|
||||
# 'id' is a unique identifier for each run to be used in other
|
||||
# tables.
|
||||
# 'name' is the user designated name for the data generated. It is
|
||||
# configured in the simulator.
|
||||
# 'user' identifies the user that generated the data for the given
|
||||
# run.
|
||||
# 'project' another name to identify runs for a specific goal
|
||||
# 'date' is a timestamp for when the data was generated. It can be
|
||||
# used to easily expire data that was generated in the past.
|
||||
# 'expire' is a timestamp for when the data should be removed from
|
||||
# the database so we don't have years worth of junk.
|
||||
#
|
||||
# INDEXES:
|
||||
# 'run' is indexed so you can find out details of a run if the run
|
||||
# was retreived from the data table.
|
||||
# 'name' is indexed so that two all run names are forced to be unique
|
||||
#
|
||||
self.query('''
|
||||
CREATE TABLE runs(
|
||||
rn_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
|
||||
rn_name VARCHAR(200) NOT NULL,
|
||||
rn_sample VARCHAR(32) NOT NULL,
|
||||
rn_user VARCHAR(32) NOT NULL,
|
||||
rn_project VARCHAR(100) NOT NULL,
|
||||
rn_date TIMESTAMP NOT NULL,
|
||||
rn_expire TIMESTAMP NOT NULL,
|
||||
PRIMARY KEY (rn_id),
|
||||
UNIQUE (rn_name,rn_sample)
|
||||
) TYPE=InnoDB''')
|
||||
|
||||
#
|
||||
# The stat table gives us all of the data for a particular stat.
|
||||
#
|
||||
# COLUMNS:
|
||||
# 'stat' is a unique identifier for each stat to be used in other
|
||||
# tables for references.
|
||||
# 'name' is simply the simulator derived name for a given
|
||||
# statistic.
|
||||
# 'descr' is the description of the statistic and what it tells
|
||||
# you.
|
||||
# 'type' defines what the stat tells you. Types are:
|
||||
# SCALAR: A simple scalar statistic that holds one value
|
||||
# VECTOR: An array of statistic values. Such a something that
|
||||
# is generated per-thread. Vectors exist to give averages,
|
||||
# pdfs, cdfs, means, standard deviations, etc across the
|
||||
# stat values.
|
||||
# DIST: Is a distribution of data. When the statistic value is
|
||||
# sampled, its value is counted in a particular bucket.
|
||||
# Useful for keeping track of utilization of a resource.
|
||||
# (e.g. fraction of time it is 25% used vs. 50% vs. 100%)
|
||||
# VECTORDIST: Can be used when the distribution needs to be
|
||||
# factored out into a per-thread distribution of data for
|
||||
# example. It can still be summed across threads to find
|
||||
# the total distribution.
|
||||
# VECTOR2D: Can be used when you have a stat that is not only
|
||||
# per-thread, but it is per-something else. Like
|
||||
# per-message type.
|
||||
# FORMULA: This statistic is a formula, and its data must be
|
||||
# looked up in the formula table, for indicating how to
|
||||
# present its values.
|
||||
# 'subdata' is potentially used by any of the vector types to
|
||||
# give a specific name to all of the data elements within a
|
||||
# stat.
|
||||
# 'print' indicates whether this stat should be printed ever.
|
||||
# (Unnamed stats don't usually get printed)
|
||||
# 'prereq' only print the stat if the prereq is not zero.
|
||||
# 'prec' number of decimal places to print
|
||||
# 'nozero' don't print zero values
|
||||
# 'nonan' don't print NaN values
|
||||
# 'total' for vector type stats, print the total.
|
||||
# 'pdf' for vector type stats, print the pdf.
|
||||
# 'cdf' for vector type stats, print the cdf.
|
||||
#
|
||||
# The Following are for dist type stats:
|
||||
# 'min' is the minimum bucket value. Anything less is an underflow.
|
||||
# 'max' is the maximum bucket value. Anything more is an overflow.
|
||||
# 'bktsize' is the approximate number of entries in each bucket.
|
||||
# 'size' is the number of buckets. equal to (min/max)/bktsize.
|
||||
#
|
||||
# INDEXES:
|
||||
# 'stat' is indexed so that you can find out details about a stat
|
||||
# if the stat id was retrieved from the data table.
|
||||
# 'name' is indexed so that you can simply look up data about a
|
||||
# named stat.
|
||||
#
|
||||
self.query('''
|
||||
CREATE TABLE stats(
|
||||
st_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
|
||||
st_name VARCHAR(255) NOT NULL,
|
||||
st_descr TEXT NOT NULL,
|
||||
st_type ENUM("SCALAR", "VECTOR", "DIST", "VECTORDIST",
|
||||
"VECTOR2D", "FORMULA") NOT NULL,
|
||||
st_print BOOL NOT NULL,
|
||||
st_prereq SMALLINT UNSIGNED NOT NULL,
|
||||
st_prec TINYINT NOT NULL,
|
||||
st_nozero BOOL NOT NULL,
|
||||
st_nonan BOOL NOT NULL,
|
||||
st_total BOOL NOT NULL,
|
||||
st_pdf BOOL NOT NULL,
|
||||
st_cdf BOOL NOT NULL,
|
||||
st_min DOUBLE NOT NULL,
|
||||
st_max DOUBLE NOT NULL,
|
||||
st_bktsize DOUBLE NOT NULL,
|
||||
st_size SMALLINT UNSIGNED NOT NULL,
|
||||
PRIMARY KEY (st_id),
|
||||
UNIQUE (st_name)
|
||||
) TYPE=InnoDB''')
|
||||
|
||||
#
|
||||
# This is the main table of data for stats.
|
||||
#
|
||||
# COLUMNS:
|
||||
# 'stat' refers to the stat field given in the stat table.
|
||||
#
|
||||
# 'x' referrs to the first dimension of a multi-dimensional stat. For
|
||||
# a vector, x will start at 0 and increase for each vector
|
||||
# element.
|
||||
# For a distribution:
|
||||
# -1: sum (for calculating standard deviation)
|
||||
# -2: sum of squares (for calculating standard deviation)
|
||||
# -3: total number of samples taken (for calculating
|
||||
# standard deviation)
|
||||
# -4: minimum value
|
||||
# -5: maximum value
|
||||
# -6: underflow
|
||||
# -7: overflow
|
||||
# 'y' is used by a VECTORDIST and the VECTOR2D to describe the second
|
||||
# dimension.
|
||||
# 'run' is the run that the data was generated from. Details up in
|
||||
# the run table
|
||||
# 'tick' is a timestamp generated by the simulator.
|
||||
# 'data' is the actual stat value.
|
||||
#
|
||||
# INDEXES:
|
||||
# 'stat' is indexed so that a user can find all of the data for a
|
||||
# particular stat. It is not unique, because that specific stat
|
||||
# can be found in many runs and samples, in addition to
|
||||
# having entries for the mulidimensional cases.
|
||||
# 'run' is indexed to allow a user to remove all of the data for a
|
||||
# particular execution run. It can also be used to allow the
|
||||
# user to print out all of the data for a given run.
|
||||
#
|
||||
self.query('''
|
||||
CREATE TABLE data(
|
||||
dt_stat SMALLINT UNSIGNED NOT NULL,
|
||||
dt_x SMALLINT NOT NULL,
|
||||
dt_y SMALLINT NOT NULL,
|
||||
dt_run SMALLINT UNSIGNED NOT NULL,
|
||||
dt_tick BIGINT UNSIGNED NOT NULL,
|
||||
dt_data DOUBLE NOT NULL,
|
||||
INDEX (dt_stat),
|
||||
INDEX (dt_run),
|
||||
UNIQUE (dt_stat,dt_x,dt_y,dt_run,dt_tick)
|
||||
) TYPE=InnoDB;''')
|
||||
|
||||
#
|
||||
# Names and descriptions for multi-dimensional stats (vectors, etc.)
|
||||
# are stored here instead of having their own entry in the statistics
|
||||
# table. This allows all parts of a single stat to easily share a
|
||||
# single id.
|
||||
#
|
||||
# COLUMNS:
|
||||
# 'stat' is the unique stat identifier from the stat table.
|
||||
# 'x' is the first dimension for multi-dimensional stats
|
||||
# corresponding to the data table above.
|
||||
# 'y' is the second dimension for multi-dimensional stats
|
||||
# corresponding to the data table above.
|
||||
# 'name' is the specific subname for the unique stat,x,y combination.
|
||||
# 'descr' is the specific description for the uniqe stat,x,y
|
||||
# combination.
|
||||
#
|
||||
# INDEXES:
|
||||
# 'stat' is indexed so you can get the subdata for a specific stat.
|
||||
#
|
||||
self.query('''
|
||||
CREATE TABLE subdata(
|
||||
sd_stat SMALLINT UNSIGNED NOT NULL,
|
||||
sd_x SMALLINT NOT NULL,
|
||||
sd_y SMALLINT NOT NULL,
|
||||
sd_name VARCHAR(255) NOT NULL,
|
||||
sd_descr TEXT,
|
||||
UNIQUE (sd_stat,sd_x,sd_y)
|
||||
) TYPE=InnoDB''')
|
||||
|
||||
|
||||
#
|
||||
# The formula table is maintained separately from the data table
|
||||
# because formula data, unlike other stat data cannot be represented
|
||||
# there.
|
||||
#
|
||||
# COLUMNS:
|
||||
# 'stat' refers to the stat field generated in the stat table.
|
||||
# 'formula' is the actual string representation of the formula
|
||||
# itself.
|
||||
#
|
||||
# INDEXES:
|
||||
# 'stat' is indexed so that you can just look up a formula.
|
||||
#
|
||||
self.query('''
|
||||
CREATE TABLE formulas(
|
||||
fm_stat SMALLINT UNSIGNED NOT NULL,
|
||||
fm_formula BLOB NOT NULL,
|
||||
PRIMARY KEY(fm_stat)
|
||||
) TYPE=InnoDB''')
|
||||
|
||||
#
|
||||
# Each stat used in each formula is kept in this table. This way, if
|
||||
# you want to print out a particular formula, you can simply find out
|
||||
# which stats you need by looking in this table. Additionally, when
|
||||
# you remove a stat from the stats table and data table, you remove
|
||||
# any references to the formula in this table. When a formula is no
|
||||
# longer referred to, you remove its entry.
|
||||
#
|
||||
# COLUMNS:
|
||||
# 'stat' is the stat id from the stat table above.
|
||||
# 'child' is the stat id of a stat that is used for this formula.
|
||||
# There may be many children for any given 'stat' (formula)
|
||||
#
|
||||
# INDEXES:
|
||||
# 'stat' is indexed so you can look up all of the children for a
|
||||
# particular stat.
|
||||
# 'child' is indexed so that you can remove an entry when a stat is
|
||||
# removed.
|
||||
#
|
||||
self.query('''
|
||||
CREATE TABLE formula_ref(
|
||||
fr_stat SMALLINT UNSIGNED NOT NULL,
|
||||
fr_run SMALLINT UNSIGNED NOT NULL,
|
||||
UNIQUE (fr_stat,fr_run),
|
||||
INDEX (fr_stat),
|
||||
INDEX (fr_run)
|
||||
) TYPE=InnoDB''')
|
||||
|
||||
# COLUMNS:
|
||||
# 'event' is the unique event id from the event_desc table
|
||||
# 'run' is simulation run id that this event took place in
|
||||
# 'tick' is the tick when the event happened
|
||||
#
|
||||
# INDEXES:
|
||||
# 'event' is indexed so you can look up all occurences of a
|
||||
# specific event
|
||||
# 'run' is indexed so you can find all events in a run
|
||||
# 'tick' is indexed because we want the unique thing anyway
|
||||
# 'event,run,tick' is unique combination
|
||||
self.query('''
|
||||
CREATE TABLE events(
|
||||
ev_event SMALLINT UNSIGNED NOT NULL,
|
||||
ev_run SMALLINT UNSIGNED NOT NULL,
|
||||
ev_tick BIGINT UNSIGNED NOT NULL,
|
||||
INDEX(ev_event),
|
||||
INDEX(ev_run),
|
||||
INDEX(ev_tick),
|
||||
UNIQUE(ev_event,ev_run,ev_tick)
|
||||
) TYPE=InnoDB''')
|
||||
|
||||
# COLUMNS:
|
||||
# 'id' is the unique description id
|
||||
# 'name' is the name of the event that occurred
|
||||
#
|
||||
# INDEXES:
|
||||
# 'id' is indexed because it is the primary key and is what you use
|
||||
# to look up the descriptions
|
||||
# 'name' is indexed so one can find the event based on name
|
||||
#
|
||||
self.query('''
|
||||
CREATE TABLE event_names(
|
||||
en_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
|
||||
en_name VARCHAR(255) NOT NULL,
|
||||
PRIMARY KEY (en_id),
|
||||
UNIQUE (en_name)
|
||||
) TYPE=InnoDB''')
|
||||
|
||||
def clean(self):
|
||||
self.query('''
|
||||
DELETE data
|
||||
FROM data
|
||||
LEFT JOIN runs ON dt_run=rn_id
|
||||
WHERE rn_id IS NULL''')
|
||||
|
||||
self.query('''
|
||||
DELETE formula_ref
|
||||
FROM formula_ref
|
||||
LEFT JOIN runs ON fr_run=rn_id
|
||||
WHERE rn_id IS NULL''')
|
||||
|
||||
self.query('''
|
||||
DELETE formulas
|
||||
FROM formulas
|
||||
LEFT JOIN formula_ref ON fm_stat=fr_stat
|
||||
WHERE fr_stat IS NULL''')
|
||||
|
||||
self.query('''
|
||||
DELETE stats
|
||||
FROM stats
|
||||
LEFT JOIN data ON st_id=dt_stat
|
||||
WHERE dt_stat IS NULL''')
|
||||
|
||||
self.query('''
|
||||
DELETE subdata
|
||||
FROM subdata
|
||||
LEFT JOIN data ON sd_stat=dt_stat
|
||||
WHERE dt_stat IS NULL''')
|
||||
|
||||
self.query('''
|
||||
DELETE events
|
||||
FROM events
|
||||
LEFT JOIN runs ON ev_run=rn_id
|
||||
WHERE rn_id IS NULL''')
|
||||
|
||||
self.query('''
|
||||
DELETE event_names
|
||||
FROM event_names
|
||||
LEFT JOIN events ON en_id=ev_event
|
||||
WHERE ev_event IS NULL''')
|
||||
151
simulators/gem5/util/stats/display.py
Normal file
151
simulators/gem5/util/stats/display.py
Normal file
@ -0,0 +1,151 @@
|
||||
# Copyright (c) 2003-2004 The Regents of The University of Michigan
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met: redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer;
|
||||
# redistributions in binary form must reproduce the above copyright
|
||||
# notice, this list of conditions and the following disclaimer in the
|
||||
# documentation and/or other materials provided with the distribution;
|
||||
# neither the name of the copyright holders nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
# Authors: Nathan Binkert
|
||||
|
||||
class Value:
|
||||
def __init__(self, value, precision, percent = False):
|
||||
self.value = float(value)
|
||||
self.precision = precision
|
||||
self.percent = percent
|
||||
def __str__(self):
|
||||
if isinstance(self.value, str):
|
||||
if self.value.lower() == 'nan':
|
||||
value = 'NaN'
|
||||
if self.value.lower() == 'inf':
|
||||
value = 'Inf'
|
||||
else:
|
||||
if self.precision >= 0:
|
||||
format = "%%.%df" % self.precision
|
||||
elif self.value == 0.0:
|
||||
format = "%.0f"
|
||||
elif self.value % 1.0 == 0.0:
|
||||
format = "%.0f"
|
||||
else:
|
||||
format = "%f"
|
||||
value = self.value
|
||||
if self.percent:
|
||||
value = value * 100.0
|
||||
value = format % value
|
||||
|
||||
if self.percent:
|
||||
value = value + "%"
|
||||
|
||||
return value
|
||||
|
||||
class Print:
|
||||
def __init__(self, **vals):
|
||||
self.__dict__.update(vals)
|
||||
|
||||
def __str__(self):
|
||||
value = Value(self.value, self.precision)
|
||||
pdf = ''
|
||||
cdf = ''
|
||||
if self.__dict__.has_key('pdf'):
|
||||
pdf = Value(self.pdf, 2, True)
|
||||
if self.__dict__.has_key('cdf'):
|
||||
cdf = Value(self.cdf, 2, True)
|
||||
|
||||
output = "%-40s %12s %8s %8s" % (self.name, value, pdf, cdf)
|
||||
|
||||
if descriptions and self.__dict__.has_key('desc') and self.desc:
|
||||
output = "%s # %s" % (output, self.desc)
|
||||
|
||||
return output
|
||||
|
||||
def doprint(self):
|
||||
if display_all:
|
||||
return True
|
||||
if self.value == 0.0 and (self.flags & flags_nozero):
|
||||
return False
|
||||
if isinstance(self.value, str):
|
||||
if self.value == 'NaN' and (self.flags & flags_nonan):
|
||||
return False
|
||||
return True
|
||||
|
||||
def display(self):
|
||||
if self.doprint():
|
||||
print self
|
||||
|
||||
class VectorDisplay:
|
||||
def display(self):
|
||||
if not self.value:
|
||||
return
|
||||
|
||||
p = Print()
|
||||
p.flags = self.flags
|
||||
p.precision = self.precision
|
||||
|
||||
if not isinstance(self.value, (list, tuple)):
|
||||
p.name = self.name
|
||||
p.desc = self.desc
|
||||
p.value = self.value
|
||||
p.display()
|
||||
return
|
||||
|
||||
mytotal = reduce(lambda x,y: float(x) + float(y), self.value)
|
||||
mycdf = 0.0
|
||||
|
||||
value = self.value
|
||||
|
||||
if display_all:
|
||||
subnames = [ '[%d]' % i for i in range(len(value)) ]
|
||||
else:
|
||||
subnames = [''] * len(value)
|
||||
|
||||
if self.__dict__.has_key('subnames'):
|
||||
for i,each in enumerate(self.subnames):
|
||||
if len(each) > 0:
|
||||
subnames[i] = '.%s' % each
|
||||
|
||||
subdescs = [self.desc]*len(value)
|
||||
if self.__dict__.has_key('subdescs'):
|
||||
for i in xrange(min(len(value), len(self.subdescs))):
|
||||
subdescs[i] = self.subdescs[i]
|
||||
|
||||
for val,sname,sdesc in map(None, value, subnames, subdescs):
|
||||
if mytotal > 0.0:
|
||||
mypdf = float(val) / float(mytotal)
|
||||
mycdf += mypdf
|
||||
if (self.flags & flags_pdf):
|
||||
p.pdf = mypdf
|
||||
p.cdf = mycdf
|
||||
|
||||
if len(sname) == 0:
|
||||
continue
|
||||
|
||||
p.name = self.name + sname
|
||||
p.desc = sdesc
|
||||
p.value = val
|
||||
p.display()
|
||||
|
||||
if (self.flags & flags_total):
|
||||
if (p.__dict__.has_key('pdf')): del p.__dict__['pdf']
|
||||
if (p.__dict__.has_key('cdf')): del p.__dict__['cdf']
|
||||
p.name = self.name + '.total'
|
||||
p.desc = self.desc
|
||||
p.value = mytotal
|
||||
p.display()
|
||||
36
simulators/gem5/util/stats/flags.py
Normal file
36
simulators/gem5/util/stats/flags.py
Normal file
@ -0,0 +1,36 @@
|
||||
# Copyright (c) 2004 The Regents of The University of Michigan
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met: redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer;
|
||||
# redistributions in binary form must reproduce the above copyright
|
||||
# notice, this list of conditions and the following disclaimer in the
|
||||
# documentation and/or other materials provided with the distribution;
|
||||
# neither the name of the copyright holders nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
# Authors: Nathan Binkert
|
||||
|
||||
init = 0x00000001
|
||||
printable = 0x00000002
|
||||
total = 0x00000010
|
||||
pdf = 0x00000020
|
||||
cdf = 0x00000040
|
||||
dist = 0x00000080
|
||||
nozero = 0x00000100
|
||||
nonan = 0x00000200
|
||||
768
simulators/gem5/util/stats/info.py
Normal file
768
simulators/gem5/util/stats/info.py
Normal file
@ -0,0 +1,768 @@
|
||||
# Copyright (c) 2003-2004 The Regents of The University of Michigan
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met: redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer;
|
||||
# redistributions in binary form must reproduce the above copyright
|
||||
# notice, this list of conditions and the following disclaimer in the
|
||||
# documentation and/or other materials provided with the distribution;
|
||||
# neither the name of the copyright holders nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
# Authors: Nathan Binkert
|
||||
|
||||
from __future__ import division
|
||||
import operator, re, types
|
||||
|
||||
class ProxyError(Exception):
|
||||
pass
|
||||
|
||||
def unproxy(proxy):
|
||||
if hasattr(proxy, '__unproxy__'):
|
||||
return proxy.__unproxy__()
|
||||
|
||||
return proxy
|
||||
|
||||
def scalar(stat):
|
||||
stat = unproxy(stat)
|
||||
assert(stat.__scalar__() != stat.__vector__())
|
||||
return stat.__scalar__()
|
||||
|
||||
def vector(stat):
|
||||
stat = unproxy(stat)
|
||||
assert(stat.__scalar__() != stat.__vector__())
|
||||
return stat.__vector__()
|
||||
|
||||
def value(stat, *args):
|
||||
stat = unproxy(stat)
|
||||
return stat.__value__(*args)
|
||||
|
||||
def values(stat, run):
|
||||
stat = unproxy(stat)
|
||||
result = []
|
||||
for i in xrange(len(stat)):
|
||||
val = value(stat, run, i)
|
||||
if val is None:
|
||||
return None
|
||||
result.append(val)
|
||||
return result
|
||||
|
||||
def total(stat, run):
|
||||
return sum(values(stat, run))
|
||||
|
||||
def len(stat):
|
||||
stat = unproxy(stat)
|
||||
return stat.__len__()
|
||||
|
||||
class Value(object):
|
||||
def __scalar__(self):
|
||||
raise AttributeError, "must define __scalar__ for %s" % (type (self))
|
||||
def __vector__(self):
|
||||
raise AttributeError, "must define __vector__ for %s" % (type (self))
|
||||
|
||||
def __add__(self, other):
|
||||
return BinaryProxy(operator.__add__, self, other)
|
||||
def __sub__(self, other):
|
||||
return BinaryProxy(operator.__sub__, self, other)
|
||||
def __mul__(self, other):
|
||||
return BinaryProxy(operator.__mul__, self, other)
|
||||
def __div__(self, other):
|
||||
return BinaryProxy(operator.__div__, self, other)
|
||||
def __truediv__(self, other):
|
||||
return BinaryProxy(operator.__truediv__, self, other)
|
||||
def __floordiv__(self, other):
|
||||
return BinaryProxy(operator.__floordiv__, self, other)
|
||||
|
||||
def __radd__(self, other):
|
||||
return BinaryProxy(operator.__add__, other, self)
|
||||
def __rsub__(self, other):
|
||||
return BinaryProxy(operator.__sub__, other, self)
|
||||
def __rmul__(self, other):
|
||||
return BinaryProxy(operator.__mul__, other, self)
|
||||
def __rdiv__(self, other):
|
||||
return BinaryProxy(operator.__div__, other, self)
|
||||
def __rtruediv__(self, other):
|
||||
return BinaryProxy(operator.__truediv__, other, self)
|
||||
def __rfloordiv__(self, other):
|
||||
return BinaryProxy(operator.__floordiv__, other, self)
|
||||
|
||||
def __neg__(self):
|
||||
return UnaryProxy(operator.__neg__, self)
|
||||
def __pos__(self):
|
||||
return UnaryProxy(operator.__pos__, self)
|
||||
def __abs__(self):
|
||||
return UnaryProxy(operator.__abs__, self)
|
||||
|
||||
class Scalar(Value):
|
||||
def __scalar__(self):
|
||||
return True
|
||||
|
||||
def __vector__(self):
|
||||
return False
|
||||
|
||||
def __value__(self, run):
|
||||
raise AttributeError, '__value__ must be defined'
|
||||
|
||||
class VectorItemProxy(Value):
|
||||
def __init__(self, proxy, index):
|
||||
self.proxy = proxy
|
||||
self.index = index
|
||||
|
||||
def __scalar__(self):
|
||||
return True
|
||||
|
||||
def __vector__(self):
|
||||
return False
|
||||
|
||||
def __value__(self, run):
|
||||
return value(self.proxy, run, self.index)
|
||||
|
||||
class Vector(Value):
|
||||
def __scalar__(self):
|
||||
return False
|
||||
|
||||
def __vector__(self):
|
||||
return True
|
||||
|
||||
def __value__(self, run, index):
|
||||
raise AttributeError, '__value__ must be defined'
|
||||
|
||||
def __getitem__(self, index):
|
||||
return VectorItemProxy(self, index)
|
||||
|
||||
class ScalarConstant(Scalar):
|
||||
def __init__(self, constant):
|
||||
self.constant = constant
|
||||
def __value__(self, run):
|
||||
return self.constant
|
||||
def __str__(self):
|
||||
return str(self.constant)
|
||||
|
||||
class VectorConstant(Vector):
|
||||
def __init__(self, constant):
|
||||
self.constant = constant
|
||||
def __value__(self, run, index):
|
||||
return self.constant[index]
|
||||
def __len__(self):
|
||||
return len(self.constant)
|
||||
def __str__(self):
|
||||
return str(self.constant)
|
||||
|
||||
def WrapValue(value):
|
||||
if isinstance(value, (int, long, float)):
|
||||
return ScalarConstant(value)
|
||||
if isinstance(value, (list, tuple)):
|
||||
return VectorConstant(value)
|
||||
if isinstance(value, Value):
|
||||
return value
|
||||
|
||||
raise AttributeError, 'Only values can be wrapped'
|
||||
|
||||
class Statistic(object):
|
||||
def __getattr__(self, attr):
|
||||
if attr in ('data', 'x', 'y'):
|
||||
result = self.source.data(self, self.ticks)
|
||||
self.data = result.data
|
||||
self.x = result.x
|
||||
self.y = result.y
|
||||
return super(Statistic, self).__getattribute__(attr)
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
if attr == 'stat':
|
||||
raise AttributeError, '%s is read only' % stat
|
||||
if attr in ('source', 'ticks'):
|
||||
if getattr(self, attr) != value:
|
||||
if hasattr(self, 'data'):
|
||||
delattr(self, 'data')
|
||||
|
||||
super(Statistic, self).__setattr__(attr, value)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
class ValueProxy(Value):
|
||||
def __getattr__(self, attr):
|
||||
if attr == '__value__':
|
||||
if scalar(self):
|
||||
return self.__scalarvalue__
|
||||
if vector(self):
|
||||
return self.__vectorvalue__
|
||||
if attr == '__len__':
|
||||
if vector(self):
|
||||
return self.__vectorlen__
|
||||
return super(ValueProxy, self).__getattribute__(attr)
|
||||
|
||||
class UnaryProxy(ValueProxy):
|
||||
def __init__(self, op, arg):
|
||||
self.op = op
|
||||
self.arg = WrapValue(arg)
|
||||
|
||||
def __scalar__(self):
|
||||
return scalar(self.arg)
|
||||
|
||||
def __vector__(self):
|
||||
return vector(self.arg)
|
||||
|
||||
def __scalarvalue__(self, run):
|
||||
val = value(self.arg, run)
|
||||
if val is None:
|
||||
return None
|
||||
return self.op(val)
|
||||
|
||||
def __vectorvalue__(self, run, index):
|
||||
val = value(self.arg, run, index)
|
||||
if val is None:
|
||||
return None
|
||||
return self.op(val)
|
||||
|
||||
def __vectorlen__(self):
|
||||
return len(unproxy(self.arg))
|
||||
|
||||
def __str__(self):
|
||||
if self.op == operator.__neg__:
|
||||
return '-%s' % str(self.arg)
|
||||
if self.op == operator.__pos__:
|
||||
return '+%s' % str(self.arg)
|
||||
if self.op == operator.__abs__:
|
||||
return 'abs(%s)' % self.arg
|
||||
|
||||
class BinaryProxy(ValueProxy):
|
||||
def __init__(self, op, arg0, arg1):
|
||||
super(BinaryProxy, self).__init__()
|
||||
self.op = op
|
||||
self.arg0 = WrapValue(arg0)
|
||||
self.arg1 = WrapValue(arg1)
|
||||
|
||||
def __scalar__(self):
|
||||
return scalar(self.arg0) and scalar(self.arg1)
|
||||
|
||||
def __vector__(self):
|
||||
return vector(self.arg0) or vector(self.arg1)
|
||||
|
||||
def __scalarvalue__(self, run):
|
||||
val0 = value(self.arg0, run)
|
||||
val1 = value(self.arg1, run)
|
||||
if val0 is None or val1 is None:
|
||||
return None
|
||||
try:
|
||||
return self.op(val0, val1)
|
||||
except ZeroDivisionError:
|
||||
return None
|
||||
|
||||
def __vectorvalue__(self, run, index):
|
||||
if scalar(self.arg0):
|
||||
val0 = value(self.arg0, run)
|
||||
if vector(self.arg0):
|
||||
val0 = value(self.arg0, run, index)
|
||||
if scalar(self.arg1):
|
||||
val1 = value(self.arg1, run)
|
||||
if vector(self.arg1):
|
||||
val1 = value(self.arg1, run, index)
|
||||
|
||||
if val0 is None or val1 is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
return self.op(val0, val1)
|
||||
except ZeroDivisionError:
|
||||
return None
|
||||
|
||||
def __vectorlen__(self):
|
||||
if vector(self.arg0) and scalar(self.arg1):
|
||||
return len(self.arg0)
|
||||
if scalar(self.arg0) and vector(self.arg1):
|
||||
return len(self.arg1)
|
||||
|
||||
len0 = len(self.arg0)
|
||||
len1 = len(self.arg1)
|
||||
|
||||
if len0 != len1:
|
||||
raise AttributeError, \
|
||||
"vectors of different lengths %d != %d" % (len0, len1)
|
||||
|
||||
return len0
|
||||
|
||||
def __str__(self):
|
||||
ops = { operator.__add__ : '+',
|
||||
operator.__sub__ : '-',
|
||||
operator.__mul__ : '*',
|
||||
operator.__div__ : '/',
|
||||
operator.__truediv__ : '/',
|
||||
operator.__floordiv__ : '//' }
|
||||
|
||||
return '(%s %s %s)' % (str(self.arg0), ops[self.op], str(self.arg1))
|
||||
|
||||
class Proxy(Value):
|
||||
def __init__(self, name, dict):
|
||||
self.name = name
|
||||
self.dict = dict
|
||||
|
||||
def __unproxy__(self):
|
||||
return unproxy(self.dict[self.name])
|
||||
|
||||
def __getitem__(self, index):
|
||||
return ItemProxy(self, index)
|
||||
|
||||
def __getattr__(self, attr):
|
||||
return AttrProxy(self, attr)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.dict[self.name])
|
||||
|
||||
class ItemProxy(Proxy):
|
||||
def __init__(self, proxy, index):
|
||||
self.proxy = proxy
|
||||
self.index = index
|
||||
|
||||
def __unproxy__(self):
|
||||
return unproxy(unproxy(self.proxy)[self.index])
|
||||
|
||||
def __str__(self):
|
||||
return '%s[%s]' % (self.proxy, self.index)
|
||||
|
||||
class AttrProxy(Proxy):
|
||||
def __init__(self, proxy, attr):
|
||||
self.proxy = proxy
|
||||
self.attr = attr
|
||||
|
||||
def __unproxy__(self):
|
||||
proxy = unproxy(self.proxy)
|
||||
try:
|
||||
attr = getattr(proxy, self.attr)
|
||||
except AttributeError, e:
|
||||
raise ProxyError, e
|
||||
return unproxy(attr)
|
||||
|
||||
def __str__(self):
|
||||
return '%s.%s' % (self.proxy, self.attr)
|
||||
|
||||
class ProxyGroup(object):
|
||||
def __init__(self, dict=None, **kwargs):
|
||||
self.__dict__['dict'] = {}
|
||||
|
||||
if dict is not None:
|
||||
self.dict.update(dict)
|
||||
|
||||
if kwargs:
|
||||
self.dict.update(kwargs)
|
||||
|
||||
def __getattr__(self, name):
|
||||
return Proxy(name, self.dict)
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
self.dict[attr] = value
|
||||
|
||||
class ScalarStat(Statistic,Scalar):
|
||||
def __value__(self, run):
|
||||
if run not in self.data:
|
||||
return None
|
||||
return self.data[run][0][0]
|
||||
|
||||
def display(self, run=None):
|
||||
import display
|
||||
p = display.Print()
|
||||
p.name = self.name
|
||||
p.desc = self.desc
|
||||
p.value = value(self, run)
|
||||
p.flags = self.flags
|
||||
p.precision = self.precision
|
||||
if display.all or (self.flags & flags.printable):
|
||||
p.display()
|
||||
|
||||
class VectorStat(Statistic,Vector):
|
||||
def __value__(self, run, item):
|
||||
if run not in self.data:
|
||||
return None
|
||||
return self.data[run][item][0]
|
||||
|
||||
def __len__(self):
|
||||
return self.x
|
||||
|
||||
def display(self, run=None):
|
||||
import display
|
||||
d = display.VectorDisplay()
|
||||
d.name = self.name
|
||||
d.desc = self.desc
|
||||
d.value = [ value(self, run, i) for i in xrange(len(self)) ]
|
||||
d.flags = self.flags
|
||||
d.precision = self.precision
|
||||
d.display()
|
||||
|
||||
class Formula(Value):
|
||||
def __getattribute__(self, attr):
|
||||
if attr not in ( '__scalar__', '__vector__', '__value__', '__len__' ):
|
||||
return super(Formula, self).__getattribute__(attr)
|
||||
|
||||
formula = re.sub(':', '__', self.formula)
|
||||
value = eval(formula, self.source.stattop)
|
||||
return getattr(value, attr)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
class SimpleDist(Statistic):
|
||||
def __init__(self, sums, squares, samples):
|
||||
self.sums = sums
|
||||
self.squares = squares
|
||||
self.samples = samples
|
||||
|
||||
def display(self, name, desc, flags, precision):
|
||||
import display
|
||||
p = display.Print()
|
||||
p.flags = flags
|
||||
p.precision = precision
|
||||
|
||||
if self.samples > 0:
|
||||
p.name = name + ".mean"
|
||||
p.value = self.sums / self.samples
|
||||
p.display()
|
||||
|
||||
p.name = name + ".stdev"
|
||||
if self.samples > 1:
|
||||
var = (self.samples * self.squares - self.sums ** 2) \
|
||||
/ (self.samples * (self.samples - 1))
|
||||
if var >= 0:
|
||||
p.value = math.sqrt(var)
|
||||
else:
|
||||
p.value = 'NaN'
|
||||
else:
|
||||
p.value = 0.0
|
||||
p.display()
|
||||
|
||||
p.name = name + ".samples"
|
||||
p.value = self.samples
|
||||
p.display()
|
||||
|
||||
def comparable(self, other):
|
||||
return True
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.sums == other.sums and self.squares == other.squares and \
|
||||
self.samples == other.samples
|
||||
|
||||
def __isub__(self, other):
|
||||
self.sums -= other.sums
|
||||
self.squares -= other.squares
|
||||
self.samples -= other.samples
|
||||
return self
|
||||
|
||||
def __iadd__(self, other):
|
||||
self.sums += other.sums
|
||||
self.squares += other.squares
|
||||
self.samples += other.samples
|
||||
return self
|
||||
|
||||
def __itruediv__(self, other):
|
||||
if not other:
|
||||
return self
|
||||
self.sums /= other
|
||||
self.squares /= other
|
||||
self.samples /= other
|
||||
return self
|
||||
|
||||
class FullDist(SimpleDist):
|
||||
def __init__(self, sums, squares, samples, minval, maxval,
|
||||
under, vec, over, min, max, bsize, size):
|
||||
self.sums = sums
|
||||
self.squares = squares
|
||||
self.samples = samples
|
||||
self.minval = minval
|
||||
self.maxval = maxval
|
||||
self.under = under
|
||||
self.vec = vec
|
||||
self.over = over
|
||||
self.min = min
|
||||
self.max = max
|
||||
self.bsize = bsize
|
||||
self.size = size
|
||||
|
||||
def display(self, name, desc, flags, precision):
|
||||
import display
|
||||
p = display.Print()
|
||||
p.flags = flags
|
||||
p.precision = precision
|
||||
|
||||
p.name = name + '.min_val'
|
||||
p.value = self.minval
|
||||
p.display()
|
||||
|
||||
p.name = name + '.max_val'
|
||||
p.value = self.maxval
|
||||
p.display()
|
||||
|
||||
p.name = name + '.underflow'
|
||||
p.value = self.under
|
||||
p.display()
|
||||
|
||||
i = self.min
|
||||
for val in self.vec[:-1]:
|
||||
p.name = name + '[%d:%d]' % (i, i + self.bsize - 1)
|
||||
p.value = val
|
||||
p.display()
|
||||
i += self.bsize
|
||||
|
||||
p.name = name + '[%d:%d]' % (i, self.max)
|
||||
p.value = self.vec[-1]
|
||||
p.display()
|
||||
|
||||
|
||||
p.name = name + '.overflow'
|
||||
p.value = self.over
|
||||
p.display()
|
||||
|
||||
SimpleDist.display(self, name, desc, flags, precision)
|
||||
|
||||
def comparable(self, other):
|
||||
return self.min == other.min and self.max == other.max and \
|
||||
self.bsize == other.bsize and self.size == other.size
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.sums == other.sums and self.squares == other.squares and \
|
||||
self.samples == other.samples
|
||||
|
||||
def __isub__(self, other):
|
||||
self.sums -= other.sums
|
||||
self.squares -= other.squares
|
||||
self.samples -= other.samples
|
||||
|
||||
if other.samples:
|
||||
self.minval = min(self.minval, other.minval)
|
||||
self.maxval = max(self.maxval, other.maxval)
|
||||
self.under -= under
|
||||
self.vec = map(lambda x,y: x - y, self.vec, other.vec)
|
||||
self.over -= over
|
||||
return self
|
||||
|
||||
def __iadd__(self, other):
|
||||
if not self.samples and other.samples:
|
||||
self = other
|
||||
return self
|
||||
|
||||
self.sums += other.sums
|
||||
self.squares += other.squares
|
||||
self.samples += other.samples
|
||||
|
||||
if other.samples:
|
||||
self.minval = min(self.minval, other.minval)
|
||||
self.maxval = max(self.maxval, other.maxval)
|
||||
self.under += other.under
|
||||
self.vec = map(lambda x,y: x + y, self.vec, other.vec)
|
||||
self.over += other.over
|
||||
return self
|
||||
|
||||
def __itruediv__(self, other):
|
||||
if not other:
|
||||
return self
|
||||
self.sums /= other
|
||||
self.squares /= other
|
||||
self.samples /= other
|
||||
|
||||
if self.samples:
|
||||
self.under /= other
|
||||
for i in xrange(len(self.vec)):
|
||||
self.vec[i] /= other
|
||||
self.over /= other
|
||||
return self
|
||||
|
||||
class Dist(Statistic):
|
||||
def display(self):
|
||||
import display
|
||||
if not display.all and not (self.flags & flags.printable):
|
||||
return
|
||||
|
||||
self.dist.display(self.name, self.desc, self.flags, self.precision)
|
||||
|
||||
def comparable(self, other):
|
||||
return self.name == other.name and \
|
||||
self.dist.compareable(other.dist)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.dist == other.dist
|
||||
|
||||
def __isub__(self, other):
|
||||
self.dist -= other.dist
|
||||
return self
|
||||
|
||||
def __iadd__(self, other):
|
||||
self.dist += other.dist
|
||||
return self
|
||||
|
||||
def __itruediv__(self, other):
|
||||
if not other:
|
||||
return self
|
||||
self.dist /= other
|
||||
return self
|
||||
|
||||
class VectorDist(Statistic):
|
||||
def display(self):
|
||||
import display
|
||||
if not display.all and not (self.flags & flags.printable):
|
||||
return
|
||||
|
||||
if isinstance(self.dist, SimpleDist):
|
||||
return
|
||||
|
||||
for dist,sn,sd,i in map(None, self.dist, self.subnames, self.subdescs,
|
||||
range(len(self.dist))):
|
||||
if len(sn) > 0:
|
||||
name = '%s.%s' % (self.name, sn)
|
||||
else:
|
||||
name = '%s[%d]' % (self.name, i)
|
||||
|
||||
if len(sd) > 0:
|
||||
desc = sd
|
||||
else:
|
||||
desc = self.desc
|
||||
|
||||
dist.display(name, desc, self.flags, self.precision)
|
||||
|
||||
if (self.flags & flags.total) or 1:
|
||||
if isinstance(self.dist[0], SimpleDist):
|
||||
disttotal = SimpleDist( \
|
||||
reduce(sums, [d.sums for d in self.dist]),
|
||||
reduce(sums, [d.squares for d in self.dist]),
|
||||
reduce(sums, [d.samples for d in self.dist]))
|
||||
else:
|
||||
disttotal = FullDist( \
|
||||
reduce(sums, [d.sums for d in self.dist]),
|
||||
reduce(sums, [d.squares for d in self.dist]),
|
||||
reduce(sums, [d.samples for d in self.dist]),
|
||||
min([d.minval for d in self.dist]),
|
||||
max([d.maxval for d in self.dist]),
|
||||
reduce(sums, [d.under for d in self.dist]),
|
||||
reduce(sums, [d.vec for d in self.dist]),
|
||||
reduce(sums, [d.over for d in self.dist]),
|
||||
dist[0].min,
|
||||
dist[0].max,
|
||||
dist[0].bsize,
|
||||
dist[0].size)
|
||||
|
||||
name = '%s.total' % (self.name)
|
||||
desc = self.desc
|
||||
disttotal.display(name, desc, self.flags, self.precision)
|
||||
|
||||
def comparable(self, other):
|
||||
return self.name == other.name and \
|
||||
alltrue(map(lambda x, y : x.comparable(y),
|
||||
self.dist,
|
||||
other.dist))
|
||||
|
||||
def __eq__(self, other):
|
||||
return alltrue(map(lambda x, y : x == y, self.dist, other.dist))
|
||||
|
||||
def __isub__(self, other):
|
||||
if isinstance(self.dist, (list, tuple)) and \
|
||||
isinstance(other.dist, (list, tuple)):
|
||||
for sd,od in zip(self.dist, other.dist):
|
||||
sd -= od
|
||||
else:
|
||||
self.dist -= other.dist
|
||||
return self
|
||||
|
||||
def __iadd__(self, other):
|
||||
if isinstance(self.dist, (list, tuple)) and \
|
||||
isinstance(other.dist, (list, tuple)):
|
||||
for sd,od in zip(self.dist, other.dist):
|
||||
sd += od
|
||||
else:
|
||||
self.dist += other.dist
|
||||
return self
|
||||
|
||||
def __itruediv__(self, other):
|
||||
if not other:
|
||||
return self
|
||||
if isinstance(self.dist, (list, tuple)):
|
||||
for dist in self.dist:
|
||||
dist /= other
|
||||
else:
|
||||
self.dist /= other
|
||||
return self
|
||||
|
||||
class Vector2d(Statistic):
|
||||
def display(self):
|
||||
import display
|
||||
if not display.all and not (self.flags & flags.printable):
|
||||
return
|
||||
|
||||
d = display.VectorDisplay()
|
||||
d.__dict__.update(self.__dict__)
|
||||
|
||||
if self.__dict__.has_key('ysubnames'):
|
||||
ysubnames = list(self.ysubnames)
|
||||
slack = self.x - len(ysubnames)
|
||||
if slack > 0:
|
||||
ysubnames.extend(['']*slack)
|
||||
else:
|
||||
ysubnames = range(self.x)
|
||||
|
||||
for x,sname in enumerate(ysubnames):
|
||||
o = x * self.y
|
||||
d.value = self.value[o:o+self.y]
|
||||
d.name = '%s[%s]' % (self.name, sname)
|
||||
d.display()
|
||||
|
||||
if self.flags & flags.total:
|
||||
d.value = []
|
||||
for y in range(self.y):
|
||||
xtot = 0.0
|
||||
for x in range(self.x):
|
||||
xtot += self.value[y + x * self.x]
|
||||
d.value.append(xtot)
|
||||
|
||||
d.name = self.name + '.total'
|
||||
d.display()
|
||||
|
||||
def comparable(self, other):
|
||||
return self.name == other.name and self.x == other.x and \
|
||||
self.y == other.y
|
||||
|
||||
def __eq__(self, other):
|
||||
return True
|
||||
|
||||
def __isub__(self, other):
|
||||
return self
|
||||
|
||||
def __iadd__(self, other):
|
||||
return self
|
||||
|
||||
def __itruediv__(self, other):
|
||||
if not other:
|
||||
return self
|
||||
return self
|
||||
|
||||
def NewStat(source, data):
|
||||
stat = None
|
||||
if data.type == 'SCALAR':
|
||||
stat = ScalarStat()
|
||||
elif data.type == 'VECTOR':
|
||||
stat = VectorStat()
|
||||
elif data.type == 'DIST':
|
||||
stat = Dist()
|
||||
elif data.type == 'VECTORDIST':
|
||||
stat = VectorDist()
|
||||
elif data.type == 'VECTOR2D':
|
||||
stat = Vector2d()
|
||||
elif data.type == 'FORMULA':
|
||||
stat = Formula()
|
||||
|
||||
stat.__dict__['source'] = source
|
||||
stat.__dict__['ticks'] = None
|
||||
stat.__dict__.update(data.__dict__)
|
||||
|
||||
return stat
|
||||
|
||||
213
simulators/gem5/util/stats/output.py
Normal file
213
simulators/gem5/util/stats/output.py
Normal file
@ -0,0 +1,213 @@
|
||||
# Copyright (c) 2005-2006 The Regents of The University of Michigan
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met: redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer;
|
||||
# redistributions in binary form must reproduce the above copyright
|
||||
# notice, this list of conditions and the following disclaimer in the
|
||||
# documentation and/or other materials provided with the distribution;
|
||||
# neither the name of the copyright holders nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
# Authors: Nathan Binkert
|
||||
|
||||
from chart import ChartOptions
|
||||
|
||||
class StatOutput(ChartOptions):
|
||||
def __init__(self, jobfile, info, stat=None):
|
||||
super(StatOutput, self).__init__()
|
||||
self.jobfile = jobfile
|
||||
self.stat = stat
|
||||
self.invert = False
|
||||
self.info = info
|
||||
|
||||
def display(self, name, printmode = 'G'):
|
||||
import info
|
||||
|
||||
if printmode == 'G':
|
||||
valformat = '%g'
|
||||
elif printmode != 'F' and value > 1e6:
|
||||
valformat = '%0.5e'
|
||||
else:
|
||||
valformat = '%f'
|
||||
|
||||
for job in self.jobfile.jobs():
|
||||
value = self.info.get(job, self.stat)
|
||||
if value is None:
|
||||
return
|
||||
|
||||
if not isinstance(value, list):
|
||||
value = [ value ]
|
||||
|
||||
if self.invert:
|
||||
for i,val in enumerate(value):
|
||||
if val != 0.0:
|
||||
value[i] = 1 / val
|
||||
|
||||
valstring = ', '.join([ valformat % val for val in value ])
|
||||
print '%-50s %s' % (job.name + ':', valstring)
|
||||
|
||||
def graph(self, name, graphdir, proxy=None):
|
||||
from os.path import expanduser, isdir, join as joinpath
|
||||
from barchart import BarChart
|
||||
from matplotlib.numerix import Float, array, zeros
|
||||
import os, re, urllib
|
||||
from jobfile import crossproduct
|
||||
|
||||
confgroups = self.jobfile.groups()
|
||||
ngroups = len(confgroups)
|
||||
skiplist = [ False ] * ngroups
|
||||
groupopts = []
|
||||
baropts = []
|
||||
groups = []
|
||||
for i,group in enumerate(confgroups):
|
||||
if group.flags.graph_group:
|
||||
groupopts.append(group.subopts())
|
||||
skiplist[i] = True
|
||||
elif group.flags.graph_bars:
|
||||
baropts.append(group.subopts())
|
||||
skiplist[i] = True
|
||||
else:
|
||||
groups.append(group)
|
||||
|
||||
has_group = bool(groupopts)
|
||||
if has_group:
|
||||
groupopts = [ group for group in crossproduct(groupopts) ]
|
||||
else:
|
||||
groupopts = [ None ]
|
||||
|
||||
if baropts:
|
||||
baropts = [ bar for bar in crossproduct(baropts) ]
|
||||
else:
|
||||
raise AttributeError, 'No group selected for graph bars'
|
||||
|
||||
directory = expanduser(graphdir)
|
||||
if not isdir(directory):
|
||||
os.mkdir(directory)
|
||||
html = file(joinpath(directory, '%s.html' % name), 'w')
|
||||
print >>html, '<html>'
|
||||
print >>html, '<title>Graphs for %s</title>' % name
|
||||
print >>html, '<body>'
|
||||
html.flush()
|
||||
|
||||
for options in self.jobfile.options(groups):
|
||||
chart = BarChart(self)
|
||||
|
||||
data = [ [ None ] * len(baropts) for i in xrange(len(groupopts)) ]
|
||||
enabled = False
|
||||
stacked = 0
|
||||
for g,gopt in enumerate(groupopts):
|
||||
for b,bopt in enumerate(baropts):
|
||||
if gopt is None:
|
||||
gopt = []
|
||||
job = self.jobfile.job(options + gopt + bopt)
|
||||
if not job:
|
||||
continue
|
||||
|
||||
if proxy:
|
||||
import db
|
||||
proxy.dict['system'] = self.info[job.system]
|
||||
val = self.info.get(job, self.stat)
|
||||
if val is None:
|
||||
print 'stat "%s" for job "%s" not found' % \
|
||||
(self.stat, job)
|
||||
|
||||
if isinstance(val, (list, tuple)):
|
||||
if len(val) == 1:
|
||||
val = val[0]
|
||||
else:
|
||||
stacked = len(val)
|
||||
|
||||
data[g][b] = val
|
||||
|
||||
if stacked == 0:
|
||||
for i in xrange(len(groupopts)):
|
||||
for j in xrange(len(baropts)):
|
||||
if data[i][j] is None:
|
||||
data[i][j] = 0.0
|
||||
else:
|
||||
for i in xrange(len(groupopts)):
|
||||
for j in xrange(len(baropts)):
|
||||
val = data[i][j]
|
||||
if val is None:
|
||||
data[i][j] = [ 0.0 ] * stacked
|
||||
elif len(val) != stacked:
|
||||
raise ValueError, "some stats stacked, some not"
|
||||
|
||||
data = array(data)
|
||||
if data.sum() == 0:
|
||||
continue
|
||||
|
||||
dim = len(data.shape)
|
||||
x = data.shape[0]
|
||||
xkeep = [ i for i in xrange(x) if data[i].sum() != 0 ]
|
||||
y = data.shape[1]
|
||||
ykeep = [ i for i in xrange(y) if data[:,i].sum() != 0 ]
|
||||
data = data.take(xkeep, axis=0)
|
||||
data = data.take(ykeep, axis=1)
|
||||
if not has_group:
|
||||
data = data.take([ 0 ], axis=0)
|
||||
chart.data = data
|
||||
|
||||
|
||||
bopts = [ baropts[i] for i in ykeep ]
|
||||
bdescs = [ ' '.join([o.desc for o in opt]) for opt in bopts]
|
||||
|
||||
if has_group:
|
||||
gopts = [ groupopts[i] for i in xkeep ]
|
||||
gdescs = [ ' '.join([o.desc for o in opt]) for opt in gopts]
|
||||
|
||||
if chart.legend is None:
|
||||
if stacked:
|
||||
try:
|
||||
chart.legend = self.info.rcategories
|
||||
except:
|
||||
chart.legend = [ str(i) for i in xrange(stacked) ]
|
||||
else:
|
||||
chart.legend = bdescs
|
||||
|
||||
if chart.xticks is None:
|
||||
if has_group:
|
||||
chart.xticks = gdescs
|
||||
else:
|
||||
chart.xticks = []
|
||||
chart.graph()
|
||||
|
||||
names = [ opt.name for opt in options ]
|
||||
descs = [ opt.desc for opt in options ]
|
||||
|
||||
if names[0] == 'run':
|
||||
names = names[1:]
|
||||
descs = descs[1:]
|
||||
|
||||
basename = '%s-%s' % (name, ':'.join(names))
|
||||
desc = ' '.join(descs)
|
||||
|
||||
pngname = '%s.png' % basename
|
||||
psname = '%s.eps' % re.sub(':', '-', basename)
|
||||
epsname = '%s.ps' % re.sub(':', '-', basename)
|
||||
chart.savefig(joinpath(directory, pngname))
|
||||
chart.savefig(joinpath(directory, epsname))
|
||||
chart.savefig(joinpath(directory, psname))
|
||||
html_name = urllib.quote(pngname)
|
||||
print >>html, '''%s<br><img src="%s"><br>''' % (desc, html_name)
|
||||
html.flush()
|
||||
|
||||
print >>html, '</body>'
|
||||
print >>html, '</html>'
|
||||
html.close()
|
||||
155
simulators/gem5/util/stats/print.py
Normal file
155
simulators/gem5/util/stats/print.py
Normal file
@ -0,0 +1,155 @@
|
||||
# Copyright (c) 2003-2004 The Regents of The University of Michigan
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met: redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer;
|
||||
# redistributions in binary form must reproduce the above copyright
|
||||
# notice, this list of conditions and the following disclaimer in the
|
||||
# documentation and/or other materials provided with the distribution;
|
||||
# neither the name of the copyright holders nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
# Authors: Nathan Binkert
|
||||
|
||||
all = False
|
||||
descriptions = False
|
||||
|
||||
class Value:
|
||||
def __init__(self, value, precision, percent = False):
|
||||
self.value = value
|
||||
self.precision = precision
|
||||
self.percent = percent
|
||||
def __str__(self):
|
||||
if isinstance(self.value, str):
|
||||
if self.value.lower() == 'nan':
|
||||
value = 'NaN'
|
||||
if self.value.lower() == 'inf':
|
||||
value = 'Inf'
|
||||
else:
|
||||
if self.precision >= 0:
|
||||
format = "%%.%df" % self.precision
|
||||
elif self.value == 0.0:
|
||||
format = "%.0f"
|
||||
elif self.value % 1.0 == 0.0:
|
||||
format = "%.0f"
|
||||
else:
|
||||
format = "%f"
|
||||
value = self.value
|
||||
if self.percent:
|
||||
value = value * 100.0
|
||||
value = format % value
|
||||
|
||||
if self.percent:
|
||||
value = value + "%"
|
||||
|
||||
return value
|
||||
|
||||
class Print:
|
||||
def __init__(self, **vals):
|
||||
self.__dict__.update(vals)
|
||||
|
||||
def __str__(self):
|
||||
value = Value(self.value, self.precision)
|
||||
pdf = ''
|
||||
cdf = ''
|
||||
if self.__dict__.has_key('pdf'):
|
||||
pdf = Value(self.pdf, 2, True)
|
||||
if self.__dict__.has_key('cdf'):
|
||||
cdf = Value(self.cdf, 2, True)
|
||||
|
||||
output = "%-40s %12s %8s %8s" % (self.name, value, pdf, cdf)
|
||||
|
||||
if descriptions and self.__dict__.has_key('desc') and self.desc:
|
||||
output = "%s # %s" % (output, self.desc)
|
||||
|
||||
return output
|
||||
|
||||
def doprint(self):
|
||||
if display_all:
|
||||
return True
|
||||
if self.value == 0.0 and (self.flags & flags_nozero):
|
||||
return False
|
||||
if isinstance(self.value, str):
|
||||
if self.value == 'NaN' and (self.flags & flags_nonan):
|
||||
return False
|
||||
return True
|
||||
|
||||
def display(self):
|
||||
if self.doprint():
|
||||
print self
|
||||
|
||||
class VectorDisplay:
|
||||
def display(self):
|
||||
p = Print()
|
||||
p.flags = self.flags
|
||||
p.precision = self.precision
|
||||
|
||||
if isinstance(self.value, (list, tuple)):
|
||||
if not len(self.value):
|
||||
return
|
||||
|
||||
mytotal = reduce(lambda x,y: float(x) + float(y), self.value)
|
||||
mycdf = 0.0
|
||||
|
||||
value = self.value
|
||||
|
||||
if display_all:
|
||||
subnames = [ '[%d]' % i for i in range(len(value)) ]
|
||||
else:
|
||||
subnames = [''] * len(value)
|
||||
|
||||
if self.__dict__.has_key('subnames'):
|
||||
for i,each in enumerate(self.subnames):
|
||||
if len(each) > 0:
|
||||
subnames[i] = '.%s' % each
|
||||
|
||||
subdescs = [self.desc]*len(value)
|
||||
if self.__dict__.has_key('subdescs'):
|
||||
for i in xrange(min(len(value), len(self.subdescs))):
|
||||
subdescs[i] = self.subdescs[i]
|
||||
|
||||
for val,sname,sdesc in map(None, value, subnames, subdescs):
|
||||
if mytotal > 0.0:
|
||||
mypdf = float(val) / float(mytotal)
|
||||
mycdf += mypdf
|
||||
if (self.flags & flags_pdf):
|
||||
p.pdf = mypdf
|
||||
p.cdf = mycdf
|
||||
|
||||
if len(sname) == 0:
|
||||
continue
|
||||
|
||||
p.name = self.name + sname
|
||||
p.desc = sdesc
|
||||
p.value = val
|
||||
p.display()
|
||||
|
||||
if (self.flags & flags_total):
|
||||
if (p.__dict__.has_key('pdf')): del p.__dict__['pdf']
|
||||
if (p.__dict__.has_key('cdf')): del p.__dict__['cdf']
|
||||
p.name = self.name + '.total'
|
||||
p.desc = self.desc
|
||||
p.value = mytotal
|
||||
p.display()
|
||||
|
||||
else:
|
||||
p.name = self.name
|
||||
p.desc = self.desc
|
||||
p.value = self.value
|
||||
p.display()
|
||||
|
||||
504
simulators/gem5/util/stats/profile.py
Normal file
504
simulators/gem5/util/stats/profile.py
Normal file
@ -0,0 +1,504 @@
|
||||
# Copyright (c) 2005 The Regents of The University of Michigan
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met: redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer;
|
||||
# redistributions in binary form must reproduce the above copyright
|
||||
# notice, this list of conditions and the following disclaimer in the
|
||||
# documentation and/or other materials provided with the distribution;
|
||||
# neither the name of the copyright holders nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
# Authors: Nathan Binkert
|
||||
|
||||
from orderdict import orderdict
|
||||
import output
|
||||
|
||||
class FileData(dict):
|
||||
def __init__(self, filename):
|
||||
self.filename = filename
|
||||
fd = file(filename)
|
||||
current = []
|
||||
for line in fd:
|
||||
line = line.strip()
|
||||
if line.startswith('>>>'):
|
||||
current = []
|
||||
self[line[3:]] = current
|
||||
else:
|
||||
current.append(line)
|
||||
fd.close()
|
||||
|
||||
class RunData(dict):
|
||||
def __init__(self, filename):
|
||||
self.filename = filename
|
||||
|
||||
def __getattribute__(self, attr):
|
||||
if attr == 'total':
|
||||
total = 0.0
|
||||
for value in self.itervalues():
|
||||
total += value
|
||||
return total
|
||||
|
||||
if attr == 'filedata':
|
||||
return FileData(self.filename)
|
||||
|
||||
if attr == 'maxsymlen':
|
||||
return max([ len(sym) for sym in self.iterkeys() ])
|
||||
|
||||
return super(RunData, self).__getattribute__(attr)
|
||||
|
||||
def display(self, output=None, limit=None, maxsymlen=None):
|
||||
if not output:
|
||||
import sys
|
||||
output = sys.stdout
|
||||
elif isinstance(output, str):
|
||||
output = file(output, 'w')
|
||||
|
||||
total = float(self.total)
|
||||
|
||||
# swap (string,count) order so we can sort on count
|
||||
symbols = [ (count,name) for name,count in self.iteritems() ]
|
||||
symbols.sort(reverse=True)
|
||||
if limit is not None:
|
||||
symbols = symbols[:limit]
|
||||
|
||||
if not maxsymlen:
|
||||
maxsymlen = self.maxsymlen
|
||||
|
||||
symbolf = "%-" + str(maxsymlen + 1) + "s %.2f%%"
|
||||
for number,name in symbols:
|
||||
print >>output, symbolf % (name, 100.0 * (float(number) / total))
|
||||
|
||||
class PCData(RunData):
|
||||
def __init__(self, filename=None, categorize=None, showidle=True):
|
||||
super(PCData, self).__init__(self, filename)
|
||||
|
||||
filedata = self.filedata['PC data']
|
||||
for line in filedata:
|
||||
(symbol, count) = line.split()
|
||||
if symbol == "0x0":
|
||||
continue
|
||||
count = int(count)
|
||||
|
||||
if categorize is not None:
|
||||
category = categorize(symbol)
|
||||
if category is None:
|
||||
category = 'other'
|
||||
elif category == 'idle' and not showidle:
|
||||
continue
|
||||
|
||||
self[category] = count
|
||||
|
||||
class FuncNode(object):
|
||||
def __new__(cls, filedata=None):
|
||||
if filedata is None:
|
||||
return super(FuncNode, cls).__new__(cls)
|
||||
|
||||
nodes = {}
|
||||
for line in filedata['function data']:
|
||||
data = line.split(' ')
|
||||
node_id = long(data[0], 16)
|
||||
node = FuncNode()
|
||||
node.symbol = data[1]
|
||||
if node.symbol == '':
|
||||
node.symbol = 'unknown'
|
||||
node.count = long(data[2])
|
||||
node.children = [ long(child, 16) for child in data[3:] ]
|
||||
nodes[node_id] = node
|
||||
|
||||
for node in nodes.itervalues():
|
||||
children = []
|
||||
for cid in node.children:
|
||||
child = nodes[cid]
|
||||
children.append(child)
|
||||
child.parent = node
|
||||
node.children = tuple(children)
|
||||
if not nodes:
|
||||
print filedata.filename
|
||||
print nodes
|
||||
return nodes[0]
|
||||
|
||||
def total(self):
|
||||
total = self.count
|
||||
for child in self.children:
|
||||
total += child.total()
|
||||
|
||||
return total
|
||||
|
||||
def aggregate(self, dict, categorize, incategory):
|
||||
category = None
|
||||
if categorize:
|
||||
category = categorize(self.symbol)
|
||||
|
||||
total = self.count
|
||||
for child in self.children:
|
||||
total += child.aggregate(dict, categorize, category or incategory)
|
||||
|
||||
if category:
|
||||
dict[category] = dict.get(category, 0) + total
|
||||
return 0
|
||||
elif not incategory:
|
||||
dict[self.symbol] = dict.get(self.symbol, 0) + total
|
||||
|
||||
return total
|
||||
|
||||
def dump(self):
|
||||
kids = [ child.symbol for child in self.children]
|
||||
print '%s %d <%s>' % (self.symbol, self.count, ', '.join(kids))
|
||||
for child in self.children:
|
||||
child.dump()
|
||||
|
||||
def _dot(self, dot, threshold, categorize, total):
|
||||
from pydot import Dot, Edge, Node
|
||||
self.dot_node = None
|
||||
|
||||
value = self.total() * 100.0 / total
|
||||
if value < threshold:
|
||||
return
|
||||
if categorize:
|
||||
category = categorize(self.symbol)
|
||||
if category and category != 'other':
|
||||
return
|
||||
label = '%s %.2f%%' % (self.symbol, value)
|
||||
self.dot_node = Node(self, label=label)
|
||||
dot.add_node(self.dot_node)
|
||||
|
||||
for child in self.children:
|
||||
child._dot(dot, threshold, categorize, total)
|
||||
if child.dot_node is not None:
|
||||
dot.add_edge(Edge(self, child))
|
||||
|
||||
def _cleandot(self):
|
||||
for child in self.children:
|
||||
child._cleandot()
|
||||
self.dot_node = None
|
||||
del self.__dict__['dot_node']
|
||||
|
||||
def dot(self, dot, threshold=0.1, categorize=None):
|
||||
self._dot(dot, threshold, categorize, self.total())
|
||||
self._cleandot()
|
||||
|
||||
class FuncData(RunData):
|
||||
def __init__(self, filename, categorize=None):
|
||||
super(FuncData, self).__init__(filename)
|
||||
tree = self.tree
|
||||
tree.aggregate(self, categorize, incategory=False)
|
||||
self.total = tree.total()
|
||||
|
||||
def __getattribute__(self, attr):
|
||||
if attr == 'tree':
|
||||
return FuncNode(self.filedata)
|
||||
return super(FuncData, self).__getattribute__(attr)
|
||||
|
||||
def displayx(self, output=None, maxcount=None):
|
||||
if output is None:
|
||||
import sys
|
||||
output = sys.stdout
|
||||
|
||||
items = [ (val,key) for key,val in self.iteritems() ]
|
||||
items.sort(reverse=True)
|
||||
for val,key in items:
|
||||
if maxcount is not None:
|
||||
if maxcount == 0:
|
||||
return
|
||||
maxcount -= 1
|
||||
|
||||
percent = val * 100.0 / self.total
|
||||
print >>output, '%-30s %8s' % (key, '%3.2f%%' % percent)
|
||||
|
||||
class Profile(object):
|
||||
# This list controls the order of values in stacked bar data output
|
||||
default_categories = [ 'interrupt',
|
||||
'driver',
|
||||
'stack',
|
||||
'buffer',
|
||||
'copy',
|
||||
'syscall',
|
||||
'user',
|
||||
'other',
|
||||
'idle']
|
||||
|
||||
def __init__(self, datatype, categorize=None):
|
||||
categories = Profile.default_categories
|
||||
|
||||
self.datatype = datatype
|
||||
self.categorize = categorize
|
||||
self.data = {}
|
||||
self.categories = categories[:]
|
||||
self.rcategories = categories[:]
|
||||
self.rcategories.reverse()
|
||||
self.cpu = 0
|
||||
|
||||
# Read in files
|
||||
def inputdir(self, directory):
|
||||
import os, os.path, re
|
||||
from os.path import expanduser, join as joinpath
|
||||
|
||||
directory = expanduser(directory)
|
||||
label_ex = re.compile(r'profile\.(.*).dat')
|
||||
for root,dirs,files in os.walk(directory):
|
||||
for name in files:
|
||||
match = label_ex.match(name)
|
||||
if not match:
|
||||
continue
|
||||
|
||||
filename = joinpath(root, name)
|
||||
prefix = os.path.commonprefix([root, directory])
|
||||
dirname = root[len(prefix)+1:]
|
||||
data = self.datatype(filename, self.categorize)
|
||||
self.setdata(dirname, match.group(1), data)
|
||||
|
||||
def setdata(self, run, cpu, data):
|
||||
if run not in self.data:
|
||||
self.data[run] = {}
|
||||
|
||||
if cpu in self.data[run]:
|
||||
raise AttributeError, \
|
||||
'data already stored for run %s and cpu %s' % (run, cpu)
|
||||
|
||||
self.data[run][cpu] = data
|
||||
|
||||
def getdata(self, run, cpu):
|
||||
try:
|
||||
return self.data[run][cpu]
|
||||
except KeyError:
|
||||
print run, cpu
|
||||
return None
|
||||
|
||||
def alldata(self):
|
||||
for run,cpus in self.data.iteritems():
|
||||
for cpu,data in cpus.iteritems():
|
||||
yield run,cpu,data
|
||||
|
||||
def get(self, job, stat, system=None):
|
||||
if system is None and hasattr('system', job):
|
||||
system = job.system
|
||||
|
||||
if system is None:
|
||||
raise AttributeError, 'The job must have a system set'
|
||||
|
||||
cpu = '%s.run%d' % (system, self.cpu)
|
||||
|
||||
data = self.getdata(str(job), cpu)
|
||||
if not data:
|
||||
return None
|
||||
|
||||
values = []
|
||||
for category in self.categories:
|
||||
val = float(data.get(category, 0.0))
|
||||
if val < 0.0:
|
||||
raise ValueError, 'value is %f' % val
|
||||
values.append(val)
|
||||
total = sum(values)
|
||||
return [ v / total * 100.0 for v in values ]
|
||||
|
||||
def dump(self):
|
||||
for run,cpu,data in self.alldata():
|
||||
print 'run %s, cpu %s' % (run, cpu)
|
||||
data.dump()
|
||||
print
|
||||
|
||||
def write_dot(self, threshold, jobfile=None, jobs=None):
|
||||
import pydot
|
||||
|
||||
if jobs is None:
|
||||
jobs = [ job for job in jobfile.jobs() ]
|
||||
|
||||
for job in jobs:
|
||||
cpu = '%s.run%d' % (job.system, self.cpu)
|
||||
symbols = self.getdata(job.name, cpu)
|
||||
if not symbols:
|
||||
continue
|
||||
|
||||
dot = pydot.Dot()
|
||||
symbols.tree.dot(dot, threshold=threshold)
|
||||
dot.write(symbols.filename[:-3] + 'dot')
|
||||
|
||||
def write_txt(self, jobfile=None, jobs=None, limit=None):
|
||||
if jobs is None:
|
||||
jobs = [ job for job in jobfile.jobs() ]
|
||||
|
||||
for job in jobs:
|
||||
cpu = '%s.run%d' % (job.system, self.cpu)
|
||||
symbols = self.getdata(job.name, cpu)
|
||||
if not symbols:
|
||||
continue
|
||||
|
||||
output = file(symbols.filename[:-3] + 'txt', 'w')
|
||||
symbols.display(output, limit)
|
||||
|
||||
def display(self, jobfile=None, jobs=None, limit=None):
|
||||
if jobs is None:
|
||||
jobs = [ job for job in jobfile.jobs() ]
|
||||
|
||||
maxsymlen = 0
|
||||
|
||||
thejobs = []
|
||||
for job in jobs:
|
||||
cpu = '%s.run%d' % (job.system, self.cpu)
|
||||
symbols = self.getdata(job.name, cpu)
|
||||
if symbols:
|
||||
thejobs.append(job)
|
||||
maxsymlen = max(maxsymlen, symbols.maxsymlen)
|
||||
|
||||
for job in thejobs:
|
||||
cpu = '%s.run%d' % (job.system, self.cpu)
|
||||
symbols = self.getdata(job.name, cpu)
|
||||
print job.name
|
||||
symbols.display(limit=limit, maxsymlen=maxsymlen)
|
||||
print
|
||||
|
||||
|
||||
from categories import func_categorize, pc_categorize
|
||||
class PCProfile(Profile):
|
||||
def __init__(self, categorize=pc_categorize):
|
||||
super(PCProfile, self).__init__(PCData, categorize)
|
||||
|
||||
|
||||
class FuncProfile(Profile):
|
||||
def __init__(self, categorize=func_categorize):
|
||||
super(FuncProfile, self).__init__(FuncData, categorize)
|
||||
|
||||
def usage(exitcode = None):
|
||||
print '''\
|
||||
Usage: %s [-bc] [-g <dir>] [-j <jobfile>] [-n <num>]
|
||||
|
||||
-c groups symbols into categories
|
||||
-b dumps data for bar charts
|
||||
-d generate dot output
|
||||
-g <d> draw graphs and send output to <d>
|
||||
-j <jobfile> specify a different jobfile (default is Test.py)
|
||||
-n <n> selects number of top symbols to print (default 5)
|
||||
''' % sys.argv[0]
|
||||
|
||||
if exitcode is not None:
|
||||
sys.exit(exitcode)
|
||||
|
||||
if __name__ == '__main__':
|
||||
import getopt, re, sys
|
||||
from os.path import expanduser
|
||||
from output import StatOutput
|
||||
|
||||
# default option values
|
||||
numsyms = 10
|
||||
graph = None
|
||||
cpus = [ 0 ]
|
||||
categorize = False
|
||||
showidle = True
|
||||
funcdata = True
|
||||
jobfilename = 'Test.py'
|
||||
dodot = False
|
||||
dotfile = None
|
||||
textout = False
|
||||
threshold = 0.01
|
||||
inputfile = None
|
||||
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], 'C:cdD:f:g:ij:n:pT:t')
|
||||
except getopt.GetoptError:
|
||||
usage(2)
|
||||
|
||||
for o,a in opts:
|
||||
if o == '-C':
|
||||
cpus = [ int(x) for x in a.split(',') ]
|
||||
elif o == '-c':
|
||||
categorize = True
|
||||
elif o == '-D':
|
||||
dotfile = a
|
||||
elif o == '-d':
|
||||
dodot = True
|
||||
elif o == '-f':
|
||||
inputfile = expanduser(a)
|
||||
elif o == '-g':
|
||||
graph = a
|
||||
elif o == '-i':
|
||||
showidle = False
|
||||
elif o == '-j':
|
||||
jobfilename = a
|
||||
elif o == '-n':
|
||||
numsyms = int(a)
|
||||
elif o == '-p':
|
||||
funcdata = False
|
||||
elif o == '-T':
|
||||
threshold = float(a)
|
||||
elif o == '-t':
|
||||
textout = True
|
||||
|
||||
if args:
|
||||
print "'%s'" % args, len(args)
|
||||
usage(1)
|
||||
|
||||
if inputfile:
|
||||
catfunc = None
|
||||
if categorize:
|
||||
catfunc = func_categorize
|
||||
data = FuncData(inputfile, categorize=catfunc)
|
||||
|
||||
if dodot:
|
||||
import pydot
|
||||
dot = pydot.Dot()
|
||||
data.tree.dot(dot, threshold=threshold)
|
||||
#dot.orientation = 'landscape'
|
||||
#dot.ranksep='equally'
|
||||
#dot.rank='samerank'
|
||||
dot.write(dotfile, format='png')
|
||||
else:
|
||||
data.display(limit=numsyms)
|
||||
|
||||
else:
|
||||
from jobfile import JobFile
|
||||
jobfile = JobFile(jobfilename)
|
||||
|
||||
if funcdata:
|
||||
profile = FuncProfile()
|
||||
else:
|
||||
profile = PCProfile()
|
||||
|
||||
if not categorize:
|
||||
profile.categorize = None
|
||||
profile.inputdir(jobfile.rootdir)
|
||||
|
||||
if graph:
|
||||
for cpu in cpus:
|
||||
profile.cpu = cpu
|
||||
if funcdata:
|
||||
name = 'funcstacks%d' % cpu
|
||||
else:
|
||||
name = 'stacks%d' % cpu
|
||||
output = StatOutput(jobfile, info=profile)
|
||||
output.xlabel = 'System Configuration'
|
||||
output.ylabel = '% CPU utilization'
|
||||
output.stat = name
|
||||
output.graph(name, graph)
|
||||
|
||||
if dodot:
|
||||
for cpu in cpus:
|
||||
profile.cpu = cpu
|
||||
profile.write_dot(jobfile=jobfile, threshold=threshold)
|
||||
|
||||
if textout:
|
||||
for cpu in cpus:
|
||||
profile.cpu = cpu
|
||||
profile.write_txt(jobfile=jobfile)
|
||||
|
||||
if not graph and not textout and not dodot:
|
||||
for cpu in cpus:
|
||||
if not categorize:
|
||||
profile.categorize = None
|
||||
profile.cpu = cpu
|
||||
profile.display(jobfile=jobfile, limit=numsyms)
|
||||
486
simulators/gem5/util/stats/stats.py
Executable file
486
simulators/gem5/util/stats/stats.py
Executable file
@ -0,0 +1,486 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright (c) 2003-2004 The Regents of The University of Michigan
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met: redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer;
|
||||
# redistributions in binary form must reproduce the above copyright
|
||||
# notice, this list of conditions and the following disclaimer in the
|
||||
# documentation and/or other materials provided with the distribution;
|
||||
# neither the name of the copyright holders nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
# Authors: Nathan Binkert
|
||||
|
||||
import re, sys, math
|
||||
|
||||
def usage():
|
||||
print '''\
|
||||
Usage: %s [-E] [-F] [ -G <get> ] [-d <db> ] [-g <graphdir> ] [-h <host>] [-p]
|
||||
[-s <system>] [-r <runs> ] [-T <samples>] [-u <username>]
|
||||
<command> [command args]
|
||||
|
||||
commands extra parameters description
|
||||
----------- ------------------ ---------------------------------------
|
||||
formula <formula> Evaluated formula specified
|
||||
formulas [regex] List formulas (only matching regex)
|
||||
runs none List all runs in database
|
||||
samples none List samples present in database
|
||||
stability <pairnum> <stats> Calculated statistical info about stats
|
||||
stat <regex> Show stat data (only matching regex)
|
||||
stats [regex] List all stats (only matching regex)
|
||||
|
||||
database <command> Where command is drop, init, or clean
|
||||
|
||||
''' % sys.argv[0]
|
||||
sys.exit(1)
|
||||
|
||||
def getopts(list, flags):
|
||||
import getopt
|
||||
try:
|
||||
opts, args = getopt.getopt(list, flags)
|
||||
except getopt.GetoptError:
|
||||
usage()
|
||||
|
||||
return opts, args
|
||||
|
||||
class CommandException(Exception):
|
||||
pass
|
||||
|
||||
def commands(options, command, args):
|
||||
if command == 'database':
|
||||
if len(args) == 0: raise CommandException
|
||||
|
||||
import dbinit
|
||||
mydb = dbinit.MyDB(options)
|
||||
|
||||
if args[0] == 'drop':
|
||||
if len(args) > 2: raise CommandException
|
||||
mydb.admin()
|
||||
mydb.drop()
|
||||
if len(args) == 2 and args[1] == 'init':
|
||||
mydb.create()
|
||||
mydb.connect()
|
||||
mydb.populate()
|
||||
mydb.close()
|
||||
return
|
||||
|
||||
if args[0] == 'init':
|
||||
if len(args) > 1: raise CommandException
|
||||
mydb.admin()
|
||||
mydb.create()
|
||||
mydb.connect()
|
||||
mydb.populate()
|
||||
mydb.close()
|
||||
return
|
||||
|
||||
if args[0] == 'clean':
|
||||
if len(args) > 1: raise CommandException
|
||||
mydb.connect()
|
||||
mydb.clean()
|
||||
return
|
||||
|
||||
raise CommandException
|
||||
|
||||
import db
|
||||
source = db.Database()
|
||||
source.host = options.host
|
||||
source.db = options.db
|
||||
source.passwd = options.passwd
|
||||
source.user = options.user
|
||||
source.connect()
|
||||
#source.update_dict(globals())
|
||||
|
||||
if type(options.method) is str:
|
||||
source.method = options.method
|
||||
|
||||
if options.runs is None:
|
||||
runs = source.allRuns
|
||||
else:
|
||||
rx = re.compile(options.runs)
|
||||
runs = []
|
||||
for run in source.allRuns:
|
||||
if rx.match(run.name):
|
||||
runs.append(run)
|
||||
|
||||
if command == 'runs':
|
||||
user = None
|
||||
opts, args = getopts(args, '-u')
|
||||
if len(args):
|
||||
raise CommandException
|
||||
for o,a in opts:
|
||||
if o == '-u':
|
||||
user = a
|
||||
source.listRuns(user)
|
||||
return
|
||||
|
||||
if command == 'stats':
|
||||
if len(args) == 0:
|
||||
source.listStats()
|
||||
elif len(args) == 1:
|
||||
source.listStats(args[0])
|
||||
else:
|
||||
raise CommandException
|
||||
|
||||
return
|
||||
|
||||
if command == 'formulas':
|
||||
if len(args) == 0:
|
||||
source.listFormulas()
|
||||
elif len(args) == 1:
|
||||
source.listFormulas(args[0])
|
||||
else:
|
||||
raise CommandException
|
||||
|
||||
return
|
||||
|
||||
if command == 'samples':
|
||||
if len(args):
|
||||
raise CommandException
|
||||
|
||||
source.listTicks(runs)
|
||||
return
|
||||
|
||||
if command == 'stability':
|
||||
if len(args) < 2:
|
||||
raise CommandException
|
||||
|
||||
try:
|
||||
merge = int(args[0])
|
||||
except ValueError:
|
||||
usage()
|
||||
stats = source.getStat(args[1])
|
||||
source.method = 'sum'
|
||||
|
||||
def disp(*args):
|
||||
print "%-35s %12s %12s %4s %5s %5s %5s %10s" % args
|
||||
|
||||
# temporary variable containing a bunch of dashes
|
||||
d = '-' * 100
|
||||
|
||||
#loop through all the stats selected
|
||||
for stat in stats:
|
||||
print "%s:" % stat.name
|
||||
disp("run name", "average", "stdev", ">10%", ">1SDV", ">2SDV",
|
||||
"SAMP", "CV")
|
||||
disp(d[:35], d[:12], d[:12], d[:4], d[:5], d[:5], d[:5], d[:10])
|
||||
|
||||
#loop through all the selected runs
|
||||
for run in runs:
|
||||
runTicks = source.retTicks([ run ])
|
||||
#throw away the first one, it's 0
|
||||
runTicks.pop(0)
|
||||
source.ticks = runTicks
|
||||
avg = 0
|
||||
stdev = 0
|
||||
numoutsideavg = 0
|
||||
numoutside1std = 0
|
||||
numoutside2std = 0
|
||||
pairRunTicks = []
|
||||
if value(stat, run.run) == 1e300*1e300:
|
||||
continue
|
||||
for t in range(0, len(runTicks)-(merge-1), merge):
|
||||
tempPair = []
|
||||
for p in range(0,merge):
|
||||
tempPair.append(runTicks[t+p])
|
||||
pairRunTicks.append(tempPair)
|
||||
#loop through all the various ticks for each run
|
||||
for tick in pairRunTicks:
|
||||
source.ticks = tick
|
||||
avg += value(stat, run.run)
|
||||
avg /= len(pairRunTicks)
|
||||
for tick in pairRunTicks:
|
||||
source.ticks = tick
|
||||
val = value(stat, run.run)
|
||||
stdev += pow((val-avg),2)
|
||||
stdev = math.sqrt(stdev / len(pairRunTicks))
|
||||
for tick in pairRunTicks:
|
||||
source.ticks = tick
|
||||
val = value(stat, run.run)
|
||||
if (val < (avg * .9)) or (val > (avg * 1.1)):
|
||||
numoutsideavg += 1
|
||||
if (val < (avg - stdev)) or (val > (avg + stdev)):
|
||||
numoutside1std += 1
|
||||
if (val < (avg - (2*stdev))) or (val > (avg + (2*stdev))):
|
||||
numoutside2std += 1
|
||||
if avg > 1000:
|
||||
disp(run.name, "%.1f" % avg, "%.1f" % stdev,
|
||||
"%d" % numoutsideavg, "%d" % numoutside1std,
|
||||
"%d" % numoutside2std, "%d" % len(pairRunTicks),
|
||||
"%.3f" % (stdev/avg*100))
|
||||
elif avg > 100:
|
||||
disp(run.name, "%.1f" % avg, "%.1f" % stdev,
|
||||
"%d" % numoutsideavg, "%d" % numoutside1std,
|
||||
"%d" % numoutside2std, "%d" % len(pairRunTicks),
|
||||
"%.5f" % (stdev/avg*100))
|
||||
else:
|
||||
disp(run.name, "%.5f" % avg, "%.5f" % stdev,
|
||||
"%d" % numoutsideavg, "%d" % numoutside1std,
|
||||
"%d" % numoutside2std, "%d" % len(pairRunTicks),
|
||||
"%.7f" % (stdev/avg*100))
|
||||
return
|
||||
|
||||
if command == 'all':
|
||||
if len(args):
|
||||
raise CommandException
|
||||
|
||||
all = [ 'bps', 'misses', 'mpkb', 'ipkb', 'pps', 'bpt' ]
|
||||
for command in all:
|
||||
commands(options, command, args)
|
||||
|
||||
if options.ticks:
|
||||
if not options.graph:
|
||||
print 'only displaying sample %s' % options.ticks
|
||||
source.ticks = [ int(x) for x in options.ticks.split() ]
|
||||
|
||||
from output import StatOutput
|
||||
output = StatOutput(options.jobfile, source)
|
||||
output.xlabel = 'System Configuration'
|
||||
output.colormap = 'RdYlGn'
|
||||
|
||||
if command == 'stat' or command == 'formula':
|
||||
if len(args) != 1:
|
||||
raise CommandException
|
||||
|
||||
if command == 'stat':
|
||||
stats = source.getStat(args[0])
|
||||
if command == 'formula':
|
||||
stats = eval(args[0])
|
||||
|
||||
for stat in stats:
|
||||
output.stat = stat
|
||||
output.ylabel = stat.name
|
||||
if options.graph:
|
||||
output.graph(stat.name, options.graphdir)
|
||||
else:
|
||||
output.display(stat.name, options.printmode)
|
||||
|
||||
return
|
||||
|
||||
if len(args):
|
||||
raise CommandException
|
||||
|
||||
from info import ProxyGroup
|
||||
proxy = ProxyGroup(system = source[options.system])
|
||||
system = proxy.system
|
||||
|
||||
etherdev = system.tsunami.etherdev0
|
||||
bytes = etherdev.rxBytes + etherdev.txBytes
|
||||
kbytes = bytes / 1024
|
||||
packets = etherdev.rxPackets + etherdev.txPackets
|
||||
|
||||
def display():
|
||||
if options.graph:
|
||||
output.graph(command, options.graphdir, proxy)
|
||||
else:
|
||||
output.display(command, options.printmode)
|
||||
|
||||
if command == 'ticks':
|
||||
output.stat = system.run0.numCycles
|
||||
|
||||
display()
|
||||
return
|
||||
|
||||
if command == 'bytes':
|
||||
output.stat = bytes
|
||||
display()
|
||||
return
|
||||
|
||||
if command == 'packets':
|
||||
output.stat = packets
|
||||
display()
|
||||
return
|
||||
|
||||
if command == 'ppt' or command == 'tpp':
|
||||
output.stat = packets / system.run0.numCycles
|
||||
output.invert = command == 'tpp'
|
||||
display()
|
||||
return
|
||||
|
||||
if command == 'pps':
|
||||
output.stat = packets / source['sim_seconds']
|
||||
output.ylabel = 'Packets/s'
|
||||
display()
|
||||
return
|
||||
|
||||
if command == 'bpt' or command == 'tpb':
|
||||
output.stat = bytes / system.run0.numCycles * 8
|
||||
output.ylabel = 'bps / Hz'
|
||||
output.invert = command == 'tpb'
|
||||
display()
|
||||
return
|
||||
|
||||
if command in ('rxbps', 'txbps', 'bps'):
|
||||
if command == 'rxbps':
|
||||
output.stat = etherdev.rxBandwidth / 1e9
|
||||
if command == 'txbps':
|
||||
output.stat = etherdev.txBandwidth / 1e9
|
||||
if command == 'bps':
|
||||
output.stat = (etherdev.rxBandwidth + etherdev.txBandwidth) / 1e9
|
||||
|
||||
output.ylabel = 'Bandwidth (Gbps)'
|
||||
output.ylim = [ 0.0, 10.0 ]
|
||||
display()
|
||||
return
|
||||
|
||||
if command == 'bpp':
|
||||
output.stat = bytes / packets
|
||||
output.ylabel = 'Bytes / Packet'
|
||||
display()
|
||||
return
|
||||
|
||||
if command == 'rxbpp':
|
||||
output.stat = etherdev.rxBytes / etherdev.rxPackets
|
||||
output.ylabel = 'Receive Bytes / Packet'
|
||||
display()
|
||||
return
|
||||
|
||||
if command == 'txbpp':
|
||||
output.stat = etherdev.txBytes / etherdev.txPackets
|
||||
output.ylabel = 'Transmit Bytes / Packet'
|
||||
display()
|
||||
return
|
||||
|
||||
if command == 'rtp':
|
||||
output.stat = etherdev.rxPackets / etherdev.txPackets
|
||||
output.ylabel = 'rxPackets / txPackets'
|
||||
display()
|
||||
return
|
||||
|
||||
if command == 'rtb':
|
||||
output.stat = etherdev.rxBytes / etherdev.txBytes
|
||||
output.ylabel = 'rxBytes / txBytes'
|
||||
display()
|
||||
return
|
||||
|
||||
misses = system.l2.overall_mshr_misses
|
||||
|
||||
if command == 'misses':
|
||||
output.stat = misses
|
||||
output.ylabel = 'Overall MSHR Misses'
|
||||
display()
|
||||
return
|
||||
|
||||
if command == 'mpkb':
|
||||
output.stat = misses / (bytes / 1024)
|
||||
output.ylabel = 'Misses / KB'
|
||||
display()
|
||||
return
|
||||
|
||||
if command == 'ipkb':
|
||||
interrupts = system.run0.kern.faults[4]
|
||||
output.stat = interrupts / kbytes
|
||||
output.ylabel = 'Interrupts / KB'
|
||||
display()
|
||||
return
|
||||
|
||||
if command == 'execute':
|
||||
output.stat = system.run0.ISSUE__count
|
||||
display()
|
||||
return
|
||||
|
||||
if command == 'commit':
|
||||
output.stat = system.run0.COM__count
|
||||
display()
|
||||
return
|
||||
|
||||
if command == 'fetch':
|
||||
output.stat = system.run0.FETCH__count
|
||||
display()
|
||||
return
|
||||
|
||||
raise CommandException
|
||||
|
||||
|
||||
class Options: pass
|
||||
|
||||
if __name__ == '__main__':
|
||||
import getpass
|
||||
|
||||
options = Options()
|
||||
options.host = None
|
||||
options.db = None
|
||||
options.passwd = ''
|
||||
options.user = getpass.getuser()
|
||||
options.runs = None
|
||||
options.system = 'client'
|
||||
options.method = None
|
||||
options.graph = False
|
||||
options.ticks = False
|
||||
options.printmode = 'G'
|
||||
jobfilename = None
|
||||
options.jobfile = None
|
||||
options.all = False
|
||||
|
||||
opts, args = getopts(sys.argv[1:], '-EFJad:g:h:j:m:pr:s:u:T:')
|
||||
for o,a in opts:
|
||||
if o == '-E':
|
||||
options.printmode = 'E'
|
||||
if o == '-F':
|
||||
options.printmode = 'F'
|
||||
if o == '-a':
|
||||
options.all = True
|
||||
if o == '-d':
|
||||
options.db = a
|
||||
if o == '-g':
|
||||
options.graph = True;
|
||||
options.graphdir = a
|
||||
if o == '-h':
|
||||
options.host = a
|
||||
if o == '-J':
|
||||
jobfilename = None
|
||||
if o == '-j':
|
||||
jobfilename = a
|
||||
if o == '-m':
|
||||
options.method = a
|
||||
if o == '-p':
|
||||
options.passwd = getpass.getpass()
|
||||
if o == '-r':
|
||||
options.runs = a
|
||||
if o == '-u':
|
||||
options.user = a
|
||||
if o == '-s':
|
||||
options.system = a
|
||||
if o == '-T':
|
||||
options.ticks = a
|
||||
|
||||
if jobfilename:
|
||||
from jobfile import JobFile
|
||||
options.jobfile = JobFile(jobfilename)
|
||||
if not options.host:
|
||||
options.host = options.jobfile.dbhost
|
||||
if not options.db:
|
||||
options.db = options.jobfile.statdb
|
||||
|
||||
if not options.host:
|
||||
sys.exit('Database server must be provided from a jobfile or -h')
|
||||
|
||||
if not options.db:
|
||||
sys.exit('Database name must be provided from a jobfile or -d')
|
||||
|
||||
if len(args) == 0:
|
||||
usage()
|
||||
|
||||
command = args[0]
|
||||
args = args[1:]
|
||||
|
||||
try:
|
||||
commands(options, command, args)
|
||||
except CommandException:
|
||||
usage()
|
||||
Reference in New Issue
Block a user