add a bunch more stats logged to session stats

This commit is contained in:
Arvid Norberg 2011-03-17 05:31:06 +00:00
parent 9fafd2f3e6
commit 0b25b87593
5 changed files with 51 additions and 11 deletions

View File

@ -899,6 +899,9 @@ namespace libtorrent
size_type m_last_redundant;
size_type m_last_uploaded;
size_type m_last_downloaded;
int m_connection_attempts;
int m_num_banned_peers;
int m_banned_for_hash_failure;
#endif
// each second tick the timer takes a little

View File

@ -30,10 +30,12 @@ def gen_report(name, lines):
print >>out, 'plot',
column = 2
first = True
for k in keys:
if k not in lines:
column = column + 1
continue
for k in lines:
try:
column = keys.index(k) + 2
except:
print '"%s" not found' % k
continue;
if not first: print >>out, ', ',
axis = 'x1y1'
if column-2 < len(axes): axis = axes[column-2]
@ -45,7 +47,7 @@ def gen_report(name, lines):
os.system('gnuplot session_stats_%s.gnuplot' % name);
gen_report('torrents', ['downloading torrents', 'seeding torrents', 'checking torrents', 'stopped torrents', 'upload-only torrents', 'error torrents'])
gen_report('peers', ['peers', 'connecting peers', 'unchoked peers', 'peers disk-up', 'peers disk-down', 'peers bw-up', 'peers bw-down'])
gen_report('peers', ['peers', 'connecting peers', 'connection attempts', 'banned peers', 'max connections'])
gen_report('peers_list', ['num list peers', 'peer storage bytes'])
gen_report('overall_rates', ['upload rate', 'download rate', 'smooth upload rate', 'smooth download rate'])
gen_report('peer_dl_rates', ['peers down 0', 'peers down 0-2', 'peers down 2-5', 'peers down 5-10', 'peers down 50-100', 'peers down 100-'])
@ -55,13 +57,14 @@ gen_report('peer_ul_rates2', ['peers up 0-2', 'peers up 2-5', 'peers up 5-10', '
gen_report('disk', ['disk write queued bytes', 'disk block buffers'])
gen_report('peers_upload', ['peers up interested', 'peers up unchoked', 'peers up requests', 'peers disk-up', 'peers bw-up'])
gen_report('peers_download', ['peers down interesting', 'peers down unchoked', 'peers down requests', 'peers disk-down', 'peers bw-down'])
gen_report('peer_errors', ['error peers', 'peer disconnects', 'peers eof', 'peers connection reset', 'connect timeouts', 'uninteresting peers disconnect'])
gen_report('piece_picker_end_game', ['end game piece picker blocks', 'strict end game piece picker blocks', 'piece picker blocks', 'piece picks', 'reject piece picks', 'unchoked piece picks', 'incoming redundant piece picks', 'incoming piece picks', 'end game piece picks', 'snubbed piece picks'])
gen_report('piece_picker', ['piece picks', 'reject piece picks', 'unchoked piece picks', 'incoming redundant piece picks', 'incoming piece picks', 'end game piece picks', 'snubbed piece picks'])
gen_report('peer_errors', ['error peers', 'peer disconnects', 'peers eof', 'peers connection reset', 'connect timeouts', 'uninteresting peers disconnect', 'banned for hash failure'])
gen_report('piece_picker_end_game', ['end game piece picker blocks', 'piece picker blocks', 'piece picks', 'reject piece picks', 'unchoke piece picks', 'incoming redundant piece picks', 'incoming piece picks', 'end game piece picks', 'snubbed piece picks'])
gen_report('piece_picker', ['piece picks', 'reject piece picks', 'unchoke piece picks', 'incoming redundant piece picks', 'incoming piece picks', 'end game piece picks', 'snubbed piece picks'])
gen_report('bandwidth', ['% failed payload bytes', '% wasted payload bytes', '% protocol bytes'])
gen_report('disk_time', ['disk read time', 'disk write time', 'disk queue time', 'disk hash time', 'disk cache time', 'disk sort time'])
gen_report('disk_cache_hits', ['disk block read', 'read cache hits', 'disk block written'])
gen_report('disk_cache', ['read disk cache size', 'disk cache size', 'disk buffer allocations'])
gen_report('disk_cache', ['read disk cache size', 'disk cache size', 'disk buffer allocations', 'cache size'])
gen_report('disk_queue', ['disk queue size', 'disk queued bytes'])
gen_report('waste', ['failed bytes', 'redundant bytes', 'download rate'])
gen_report('connect_candidates', ['connect candidates'])

View File

@ -519,6 +519,11 @@ namespace libtorrent
if (is_connect_candidate(*p, m_finished))
--m_num_connect_candidates;
#ifdef TORRENT_STATS
aux::session_impl& ses = m_torrent->session();
++ses.m_num_banned_peers;
#endif
p->banned = true;
TORRENT_ASSERT(!is_connect_candidate(*p, m_finished));
}

View File

@ -822,6 +822,9 @@ namespace aux {
m_last_redundant = 0;
m_last_uploaded = 0;
m_last_downloaded = 0;
m_connection_attempts = 0;
m_num_banned_peers = 0;
m_banned_for_hash_failure = 0;
rotate_stats_log();
#endif
#ifdef TORRENT_DISK_STATS
@ -909,7 +912,7 @@ namespace aux {
":smooth upload rate:smooth download rate:disk write queued bytes"
":peers down 0:peers down 0-2:peers down 2-5:peers down 5-10:peers down 10-50"
":peers down 50-100:peers down 100-"
":peers up 0:peers up 0-2:peers up 2-5:peers up 5-10:peers up 10-50: peers up 50-100"
":peers up 0:peers up 0-2:peers up 2-5:peers up 5-10:peers up 10-50:peers up 50-100"
":peers up 100-:error peers"
":peers down interesting:peers down unchoked:peers down requests"
":peers up interested:peers up unchoked:peers up requests"
@ -948,6 +951,12 @@ namespace aux {
":disk hash time"
":disk cache time"
":disk sort time"
":connection attempts"
":banned peers"
":banned for hash failure"
":cache size"
":max connections"
":connect candidates"
"\n\n", m_stats_logger);
}
#endif
@ -2599,6 +2608,7 @@ namespace aux {
}
#ifdef TORRENT_STATS
int connect_candidates = 0;
int downloading_torrents = 0;
int seeding_torrents = 0;
int checking_torrents = 0;
@ -2623,6 +2633,9 @@ namespace aux {
for (torrent_map::iterator i = m_torrents.begin()
, end(m_torrents.end()); i != end; ++i)
{
int connection_slots = (std::max)(i->second->max_connections() - i->second->num_peers(), 0);
int candidates = i->second->get_policy().num_connect_candidates();
connect_candidates += (std::min)(candidates, connection_slots);
num_peers += i->second->get_policy().num_peers();
if (i->second->is_seed())
++seeding_torrents;
@ -2717,7 +2730,8 @@ namespace aux {
"%d\t%d\t%d\t%d\t%d\t%d\t%d\t%d\t%d\t%d\t"
"%d\t%d\t%d\t%d\t%d\t%d\t%d\t%d\t%d\t%d\t"
"%f\t%f\t%f\t%d\t%d\t%d\t%d\t%d\t%d\t%d\t"
"%d\t%d\t%d\t%d\t%d\t%d\t%d\t%d\t%d\t%d\n"
"%d\t%d\t%d\t%d\t%d\t%d\t%d\t%d\t%d\t%d\t"
"%d\t%d\t%d\t%d\t%d\t%d\n"
, total_milliseconds(now - m_last_log_rotation) / 1000.f
, int(m_stat.total_upload() - m_last_uploaded)
, int(m_stat.total_download() - m_last_downloaded)
@ -2798,6 +2812,12 @@ namespace aux {
, int(cs.average_hash_time)
, int(cs.average_cache_time)
, int(cs.average_sort_time)
, m_connection_attempts
, m_num_banned_peers
, m_banned_for_hash_failure
, m_settings.cache_size
, m_settings.connections_limit
, connect_candidates
);
m_last_cache_status = cs;
m_last_failed = m_total_failed_bytes;
@ -2822,6 +2842,9 @@ namespace aux {
m_connect_timeouts = 0;
m_uninteresting_peers = 0;
m_timeout_peers = 0;
m_connection_attempts = 0;
m_num_banned_peers = 0;
m_banned_for_hash_failure = 0;
#endif
// --------------------------------------------------------------
@ -3118,6 +3141,9 @@ namespace aux {
--max_connections;
--free_slots;
steps_since_last_connect = 0;
#ifdef TORRENT_STATS
++m_connection_attempts;
#endif
}
}
TORRENT_CATCH(std::bad_alloc&)

View File

@ -2931,6 +2931,9 @@ namespace libtorrent
// mark the peer as banned
m_policy.ban_peer(p);
#ifdef TORRENT_STATS
++m_ses.m_banned_for_hash_failure;
#endif
if (p->connection)
{