aboutsummaryrefslogtreecommitdiff
path: root/scripts
diff options
context:
space:
mode:
authorHarsh Prateek Bora <harsh@linux.vnet.ibm.com>2012-07-18 15:16:00 +0530
committerStefan Hajnoczi <stefanha@linux.vnet.ibm.com>2012-07-19 11:34:33 +0100
commit90a147a275da3a432bdf00238ebf438eff1d2c1b (patch)
treec80095b5162b934286fbd44512e5b84d79bc7e53 /scripts
parent62bab73213ba885426a781eb2741670b9f3cae36 (diff)
Update simpletrace.py for new log format
Support new tracelog format for multiple arguments and strings. Signed-off-by: Harsh Prateek Bora <harsh@linux.vnet.ibm.com> Signed-off-by: Stefan Hajnoczi <stefanha@linux.vnet.ibm.com>
Diffstat (limited to 'scripts')
-rwxr-xr-xscripts/simpletrace.py116
1 files changed, 75 insertions, 41 deletions
diff --git a/scripts/simpletrace.py b/scripts/simpletrace.py
index f55e5e63f9..9b4419f7c3 100755
--- a/scripts/simpletrace.py
+++ b/scripts/simpletrace.py
@@ -12,53 +12,69 @@
import struct
import re
import inspect
+from tracetool import _read_events, Event
+from tracetool.backend.simple import is_string
header_event_id = 0xffffffffffffffff
header_magic = 0xf2b177cb0aa429b4
-header_version = 0
dropped_event_id = 0xfffffffffffffffe
-trace_fmt = '=QQQQQQQQ'
-trace_len = struct.calcsize(trace_fmt)
-event_re = re.compile(r'(disable\s+)?([a-zA-Z0-9_]+)\(([^)]*)\).*')
+log_header_fmt = '=QQQ'
+rec_header_fmt = '=QQII'
-def parse_events(fobj):
- """Parse a trace-events file into {event_num: (name, arg1, ...)}."""
-
- def get_argnames(args):
- """Extract argument names from a parameter list."""
- return tuple(arg.split()[-1].lstrip('*') for arg in args.split(','))
-
- events = {dropped_event_id: ('dropped', 'count')}
- event_num = 0
- for line in fobj:
- m = event_re.match(line.strip())
- if m is None:
- continue
-
- disable, name, args = m.groups()
- events[event_num] = (name,) + get_argnames(args)
- event_num += 1
- return events
+def read_header(fobj, hfmt):
+ '''Read a trace record header'''
+ hlen = struct.calcsize(hfmt)
+ hdr = fobj.read(hlen)
+ if len(hdr) != hlen:
+ return None
+ return struct.unpack(hfmt, hdr)
-def read_record(fobj):
+def get_record(edict, rechdr, fobj):
"""Deserialize a trace record from a file into a tuple (event_num, timestamp, arg1, ..., arg6)."""
- s = fobj.read(trace_len)
- if len(s) != trace_len:
+ if rechdr is None:
return None
- return struct.unpack(trace_fmt, s)
+ rec = (rechdr[0], rechdr[1])
+ if rechdr[0] != dropped_event_id:
+ event_id = rechdr[0]
+ event = edict[event_id]
+ for type, name in event.args:
+ if is_string(type):
+ l = fobj.read(4)
+ (len,) = struct.unpack('=L', l)
+ s = fobj.read(len)
+ rec = rec + (s,)
+ else:
+ (value,) = struct.unpack('=Q', fobj.read(8))
+ rec = rec + (value,)
+ else:
+ (value,) = struct.unpack('=Q', fobj.read(8))
+ rec = rec + (value,)
+ return rec
+
+
+def read_record(edict, fobj):
+ """Deserialize a trace record from a file into a tuple (event_num, timestamp, arg1, ..., arg6)."""
+ rechdr = read_header(fobj, rec_header_fmt)
+ return get_record(edict, rechdr, fobj) # return tuple of record elements
-def read_trace_file(fobj):
+def read_trace_file(edict, fobj):
"""Deserialize trace records from a file, yielding record tuples (event_num, timestamp, arg1, ..., arg6)."""
- header = read_record(fobj)
+ header = read_header(fobj, log_header_fmt)
if header is None or \
header[0] != header_event_id or \
- header[1] != header_magic or \
- header[2] != header_version:
- raise ValueError('not a trace file or incompatible version')
+ header[1] != header_magic:
+ raise ValueError('Not a valid trace file!')
+ if header[2] != 0 and \
+ header[2] != 2:
+ raise ValueError('Unknown version of tracelog format!')
+
+ log_version = header[2]
+ if log_version == 0:
+ raise ValueError('Older log format, not supported with this Qemu release!')
while True:
- rec = read_record(fobj)
+ rec = read_record(edict, fobj)
if rec is None:
break
@@ -89,16 +105,29 @@ class Analyzer(object):
def process(events, log, analyzer):
"""Invoke an analyzer on each event in a log."""
if isinstance(events, str):
- events = parse_events(open(events, 'r'))
+ events = _read_events(open(events, 'r'))
if isinstance(log, str):
log = open(log, 'rb')
+ enabled_events = []
+ dropped_event = Event.build("Dropped_Event(uint64_t num_events_dropped)")
+ edict = {dropped_event_id: dropped_event}
+
+ for e in events:
+ if 'disable' not in e.properties:
+ enabled_events.append(e)
+ for num, event in enumerate(enabled_events):
+ edict[num] = event
+
def build_fn(analyzer, event):
- fn = getattr(analyzer, event[0], None)
+ if isinstance(event, str):
+ return analyzer.catchall
+
+ fn = getattr(analyzer, event.name, None)
if fn is None:
return analyzer.catchall
- event_argcount = len(event) - 1
+ event_argcount = len(event.args)
fn_argcount = len(inspect.getargspec(fn)[0]) - 1
if fn_argcount == event_argcount + 1:
# Include timestamp as first argument
@@ -109,9 +138,9 @@ def process(events, log, analyzer):
analyzer.begin()
fn_cache = {}
- for rec in read_trace_file(log):
+ for rec in read_trace_file(edict, log):
event_num = rec[0]
- event = events[event_num]
+ event = edict[event_num]
if event_num not in fn_cache:
fn_cache[event_num] = build_fn(analyzer, event)
fn_cache[event_num](event, rec)
@@ -128,7 +157,7 @@ def run(analyzer):
sys.stderr.write('usage: %s <trace-events> <trace-file>\n' % sys.argv[0])
sys.exit(1)
- events = parse_events(open(sys.argv[1], 'r'))
+ events = _read_events(open(sys.argv[1], 'r'))
process(events, sys.argv[2], analyzer)
if __name__ == '__main__':
@@ -137,15 +166,20 @@ if __name__ == '__main__':
self.last_timestamp = None
def catchall(self, event, rec):
+ i = 1
timestamp = rec[1]
if self.last_timestamp is None:
self.last_timestamp = timestamp
delta_ns = timestamp - self.last_timestamp
self.last_timestamp = timestamp
- fields = [event[0], '%0.3f' % (delta_ns / 1000.0)]
- for i in xrange(1, len(event)):
- fields.append('%s=0x%x' % (event[i], rec[i + 1]))
+ fields = [event.name, '%0.3f' % (delta_ns / 1000.0)]
+ for type, name in event.args:
+ if is_string(type):
+ fields.append('%s=%s' % (name, rec[i + 1]))
+ else:
+ fields.append('%s=0x%x' % (name, rec[i + 1]))
+ i += 1
print ' '.join(fields)
run(Formatter())