premiere-libtorrent/parse_session_stats.py

151 lines
7.3 KiB
Python
Raw Normal View History

2010-03-05 19:12:23 +01:00
#! /usr/bin/env python
# Copyright Arvid Norberg 2008. Use, modification and distribution is
# subject to the Boost Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
import os, sys, time, os
2008-12-27 03:22:20 +01:00
stat = open(sys.argv[1])
line = stat.readline()
while not 'second:' in line:
line = stat.readline()
keys = line.strip().split(':')[1:]
axes = ['x1y2', 'x1y2', 'x1y1', 'x1y1', 'x1y1', 'x1y1', 'x1y1', 'x1y1', 'x1y2']
2008-12-27 03:22:20 +01:00
output_dir = 'session_stats_report'
def gen_report(name, unit, lines, generation, log_file):
try:
os.mkdir(output_dir)
except: pass
filename = os.path.join(output_dir, 'session_stats_%s_%04d.png' % (name, generation))
thumb = os.path.join(output_dir, 'session_stats_%s_%04d_thumb.png' % (name, generation))
# don't re-render a graph unless the logfile has changed
try:
dst1 = os.stat(filename)
dst2 = os.stat(thumb)
src = os.stat(log_file)
if dst1.st_mtime > src.st_mtime and dst2.st_mtime > src.st_mtime:
sys.stdout.write('.')
return
except: pass
2011-03-20 22:05:49 +01:00
out = open('session_stats.gnuplot', 'wb')
2008-12-27 03:22:20 +01:00
print >>out, "set term png size 1200,700"
print >>out, 'set output "%s"' % filename
2008-12-27 03:22:20 +01:00
print >>out, 'set xrange [0:*]'
print >>out, 'set xlabel "time (s)"'
print >>out, 'set ylabel "%s"' % unit
2011-03-20 19:59:16 +01:00
print >>out, 'set yrange [0:*]'
print >>out, "set tics nomirror"
2008-12-27 03:22:20 +01:00
print >>out, "set style data lines"
print >>out, "set key box"
print >>out, 'plot',
column = 2
2011-02-02 04:37:09 +01:00
first = True
for k in lines:
try:
column = keys.index(k) + 2
except:
print '"%s" not found' % k
continue;
2011-02-02 04:37:09 +01:00
if not first: print >>out, ', ',
axis = 'x1y1'
if column-2 < len(axes): axis = axes[column-2]
print >>out, ' "%s" using 1:%d title "%s" axes %s with steps' % (log_file, column, k, axis),
2011-02-02 04:37:09 +01:00
first = False
2008-12-27 03:22:20 +01:00
column = column + 1
2011-02-02 04:37:09 +01:00
print >>out, ''
2011-03-20 22:05:49 +01:00
print >>out, "set term png size 150,100"
print >>out, 'set output "%s"' % thumb
print >>out, 'set key off'
print >>out, 'unset tics'
print >>out, 'set format x ""'
print >>out, 'set format y ""'
print >>out, 'set xlabel ""'
print >>out, 'set ylabel ""'
print >>out, 'set y2label ""'
2011-03-20 22:05:49 +01:00
print >>out, 'set rmargin 0'
print >>out, 'set lmargin 0'
print >>out, 'set tmargin 0'
print >>out, 'set bmargin 0'
print >>out, "replot"
2008-12-27 03:22:20 +01:00
out.close()
2011-03-20 22:05:49 +01:00
os.system('gnuplot session_stats.gnuplot 2>/dev/null');
sys.stdout.write('.')
sys.stdout.flush()
def gen_html(reports, generations):
file = open(os.path.join(output_dir, 'index.html'), 'w+')
2011-03-20 21:22:23 +01:00
css = '''img { margin: 0}
#head { display: block }
2011-03-20 22:05:49 +01:00
#graphs { white-space:nowrap; }
2011-03-20 21:22:23 +01:00
h1 { line-height: 1; display: inline }
h2 { line-height: 1; display: inline; font-size: 1em; font-weight: normal};'''
print >>file, '<html><head><style type="text/css">%s</style></head><body>' % css
for i in reports:
2011-03-20 22:05:49 +01:00
print >>file, '<div id="head"><h1>%s </h1><h2>%s</h2><div><div id="graphs">' % (i[0], i[2])
for g in generations:
2011-03-20 21:22:23 +01:00
print >>file, '<a href="session_stats_%s_%04d.png"><img src="session_stats_%s_%04d_thumb.png"></a>' % (i[0], g, i[0], g)
2011-03-20 22:05:49 +01:00
print >>file, '</div>'
2011-03-20 21:22:23 +01:00
print >>file, '</body></html>'
file.close()
reports = [
2011-03-20 21:22:23 +01:00
('torrents', 'num', 'number of torrents in different torrent states', ['downloading torrents', 'seeding torrents', 'checking torrents', 'stopped torrents', 'upload-only torrents', 'error torrents']),
('peers', 'num', 'num connected peers', ['peers', 'connecting peers', 'connection attempts', 'banned peers', 'max connections']),
('peers_list_size', 'num', 'number of known peers (not necessarily connected)', ['num list peers']),
('overall_rates', 'Bytes / second', 'download and upload rates', ['upload rate', 'download rate', 'smooth upload rate', 'smooth download rate']),
2011-03-20 19:02:53 +01:00
('disk_write_queue', 'Bytes', 'bytes queued up by peers, to be written to disk', ['disk write queued bytes', 'disk queue limit', 'disk queue low watermark']),
('peers_upload', 'num', 'number of peers by state wrt. uploading', ['peers up interested', 'peers up unchoked', 'peers up requests', 'peers disk-up', 'peers bw-up']),
('peers_download', 'num', 'number of peers by state wrt. downloading', ['peers down interesting', 'peers down unchoked', 'peers down requests', 'peers disk-down', 'peers bw-down']),
('peer_errors', 'num', 'number of peers by error that disconnected them', ['error peers', 'peer disconnects', 'peers eof', 'peers connection reset', 'connect timeouts', 'uninteresting peers disconnect', 'banned for hash failure']),
2011-03-20 21:22:23 +01:00
('waste', '% of all downloaded bytes', 'proportion of all downloaded bytes that were wasted', ['% failed payload bytes', '% wasted payload bytes', '% protocol bytes']),
('average_disk_time_absolute', 'microseconds', 'running averages of timings of disk operations', ['disk read time', 'disk write time', 'disk queue time', 'disk hash time', 'disk job time', 'disk sort time']),
('disk_time', '% of total disk job time', 'proportion of time spent by the disk thread', ['% read time', '% write time', '% hash time', '% sort time']),
('disk_cache_hits', 'blocks (16kiB)', '', ['disk block read', 'read cache hits', 'disk block written', 'disk read back']),
('disk_cache', 'blocks (16kiB)', 'disk cache size and usage', ['read disk cache size', 'disk cache size', 'disk buffer allocations', 'cache size']),
2011-03-20 21:22:23 +01:00
('disk_readback', '% of written blocks', 'portion of written blocks that had to be read back for hash verification', ['% read back']),
('disk_queue', 'number of queued disk jobs', 'queued disk jobs', ['disk queue size', 'disk read queue size']),
# ('absolute_waste', 'num', '', ['failed bytes', 'redundant bytes', 'download rate']),
('connect_candidates', 'num', 'number of peers we know of that we can connect to', ['connect candidates']),
2008-12-27 03:22:20 +01:00
#somewhat uninteresting stats
2011-03-20 21:22:23 +01:00
('peer_dl_rates', 'num', 'peers split into download rate buckets', ['peers down 0', 'peers down 0-2', 'peers down 2-5', 'peers down 5-10', 'peers down 50-100', 'peers down 100-']),
('peer_dl_rates2', 'num', 'peers split into download rate buckets (only downloading peers)', ['peers down 0-2', 'peers down 2-5', 'peers down 5-10', 'peers down 50-100', 'peers down 100-']),
('peer_ul_rates', 'num', 'peers split into upload rate buckets', ['peers up 0', 'peers up 0-2', 'peers up 2-5', 'peers up 5-10', 'peers up 50-100', 'peers up 100-']),
('peer_ul_rates2', 'num', 'peers split into upload rate buckets (only uploading peers)', ['peers up 0-2', 'peers up 2-5', 'peers up 5-10', 'peers up 50-100', 'peers up 100-']),
2011-03-20 22:05:49 +01:00
('piece_picker_end_game', 'blocks', '', ['end game piece picker blocks', 'piece picker blocks', 'piece picks', 'reject piece picks', 'unchoke piece picks', 'incoming redundant piece picks', 'incoming piece picks', 'end game piece picks', 'snubbed piece picks']),
('piece_picker', 'blocks', '', ['piece picks', 'reject piece picks', 'unchoke piece picks', 'incoming redundant piece picks', 'incoming piece picks', 'end game piece picks', 'snubbed piece picks']),
]
print 'generating graphs'
log_file_path, log_file = os.path.split(sys.argv[1])
# count the number of log files (generations)
log_file_list = log_file.split('.')
g = int(log_file_list[1])
generations = []
2011-03-23 05:44:38 +01:00
while os.path.exists(os.path.join(log_file_path, log_file)):
print '[%s] %04d\r[' % (' ' * len(reports), g),
for i in reports: gen_report(i[0], i[1], i[3], g, os.path.join(log_file_path, log_file))
print ''
generations.append(g)
g += 1
log_file_list[1] = '%04d' % g
log_file = '.'.join(log_file_list)
print 'generating html'
gen_html(reports, generations)