Clean-up python code (#3075)

lint and enforce python code formatting
This commit is contained in:
Andrew Resch 2018-06-12 02:43:13 -07:00 committed by Arvid Norberg
parent a2ea79de4e
commit 596d98cac4
34 changed files with 4016 additions and 3435 deletions

View File

@ -5,7 +5,7 @@ matrix:
fast_finish: true
include:
- if: repo = arvidn/libtorrent
env: variant=release sonar_scan=1 toolset=gcc
env: variant=release sonar_scan=1 toolset=gcc pylint=1
- env: variant=test_debug lint=1 tests=1 toolset=gcc-sanitizer
- env: variant=test_debug sim=1 crypto=openssl toolset=gcc-sanitizer
- env: variant=test_release coverage=1 tests=1 toolset=gcc-coverage python=1
@ -46,6 +46,7 @@ addons:
- python2.7-dev
- g++-5
- [cmake3, ninja-build]
- python3-pip
before_install:
@ -79,7 +80,6 @@ before_install:
- ulimit -a
install:
- touch ~/user-config.jam
- 'if [[ $toolset == "gcc" ]]; then
g++-5 --version;
@ -130,7 +130,12 @@ install:
- 'echo "using python : 2.7 ;" >> ~/user-config.jam'
- if [ "$docs" == "1" ]; then rst2html.py --version; fi
- 'if [ "$lint" == "1" ]; then curl "https://raw.githubusercontent.com/google/styleguide/71ec7f1e524969c19ce33cfc72e8e023f2b98ee2/cpplint/cpplint.py" >~/cpplint.py; fi'
- 'if [ "$pylint" == "1" ]; then
sudo pip install flake8;
flake8 --version;
sudo pip3 install flake8;
python3 -m flake8 --version;
fi'
- 'if [ $sonar_scan == "1" ]; then
wget https://sonarsource.bintray.com/Distribution/sonar-scanner-cli/sonar-scanner-2.6.1.zip;
wget https://sonarqube.com/static/cpp/build-wrapper-linux-x86.zip;
@ -183,7 +188,10 @@ script:
- 'if [ "$lint" == "1" ]; then
python ~/cpplint.py --extensions=cpp --headers=hpp --filter=-,+runtime/explicit,+whitespace/end_of_line --linelength=90 test/*.{cpp,hpp} src/*.cpp include/libtorrent/*.hpp include/libtorrent/kademlia/*.hpp src/kademlia/*.cpp include/libtorrent/aux_/*.hpp include/libtorrent/extensions/*.hpp simulation/*.{cpp,hpp} tools/*.{cpp,hpp} examples/*.{cpp,hpp};
fi'
- 'if [ "$pylint" == "1" ]; then
flake8 --max-line-length=120;
python3 -m flake8 --max-line-length=120;
fi'
- 'if [ "$sonar_scan" == "1" ]; then
build-wrapper-linux-x86-64 --out-dir bw-output bjam -a -j3 optimization=off crypto=$crypto deprecated-functions=off $toolset variant=$variant -l300 &&
sonar-scanner -D sonar.login=$SONAR_TOKEN;

View File

@ -3,7 +3,7 @@
# Copyright Daniel Wallin 2006. Use, modification and distribution is
# subject to the Boost Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
from __future__ import print_function
import sys
import atexit
@ -153,6 +153,7 @@ def print_download_queue(console, download_queue):
write_line(console, out)
def add_torrent(ses, filename, options):
atp = lt.add_torrent_params()
if filename.startswith('magnet:'):
@ -160,8 +161,8 @@ def add_torrent(ses, filename, options):
else:
atp.ti = lt.torrent_info(filename)
try:
at.resume_data = open(os.path.join(options.save_path, info.name() + '.fastresume'), 'rb').read()
except:
atp.resume_data = open(os.path.join(options.save_path, atp.info.name() + '.fastresume'), 'rb').read()
except BaseException:
pass
atp.save_path = options.save_path
@ -171,6 +172,7 @@ def add_torrent(ses, filename, options):
| lt.torrent_flags.duplicate_is_error
ses.async_add_torrent(atp)
def main():
from optparse import OptionParser
@ -225,12 +227,13 @@ def main():
if options.max_download_rate <= 0:
options.max_download_rate = -1
settings = { 'user_agent': 'python_client/' + lt.__version__,
settings = {
'user_agent': 'python_client/' + lt.__version__,
'listen_interfaces': '%s:%d' % (options.listen_interface, options.port),
'download_rate_limit': int(options.max_download_rate),
'upload_rate_limit': int(options.max_upload_rate),
'alert_mask': lt.alert.category_t.all_categories,
'outgoing_interfaces' : options.outgoing_interface
'outgoing_interfaces': options.outgoing_interface,
}
if options.proxy_host != '':
@ -258,16 +261,16 @@ def main():
out = ''
for h,t in torrents.items():
for h, t in torrents.items():
out += 'name: %-40s\n' % t.name[:40]
if t.state != lt.torrent_status.seeding:
state_str = ['queued', 'checking', 'downloading metadata', \
'downloading', 'finished', 'seeding', \
state_str = ['queued', 'checking', 'downloading metadata',
'downloading', 'finished', 'seeding',
'allocating', 'checking fastresume']
out += state_str[t.state] + ' '
out += '%5.4f%% ' % (t.progress*100)
out += '%5.4f%% ' % (t.progress * 100)
out += progress_bar(t.progress, 49)
out += '\n'
@ -300,7 +303,7 @@ def main():
out += progress_bar(p / float(f.size), 20)
out += ' ' + f.path + '\n'
write_line(console, out)
except:
except BaseException:
pass
write_line(console, 76 * '-' + '\n')
@ -312,7 +315,7 @@ def main():
alerts_log.append(a.message())
# add new torrents to our list of torrent_status
if type(a) == lt.add_torrent_alert:
if isinstance(a, lt.add_torrent_alert):
h = a.handle
h.set_max_connections(60)
h.set_max_uploads(-1)
@ -320,7 +323,7 @@ def main():
# update our torrent_status array for torrents that have
# changed some of their state
if type(a) == lt.state_update_alert:
if isinstance(a, lt.state_update_alert):
for s in a.status:
torrents[s.handle] = s
@ -333,19 +336,23 @@ def main():
c = console.sleep_and_input(0.5)
ses.post_torrent_updates()
if not c: continue
if not c:
continue
if c == 'r':
for h in torrents.keys(): h.force_reannounce()
for h in torrents:
h.force_reannounce()
elif c == 'q':
alive = False
elif c == 'p':
for h in torrents.keys(): h.pause()
for h in torrents:
h.pause()
elif c == 'u':
for h in torrents.keys(): h.resume()
for h in torrents:
h.resume()
ses.pause()
for h,t in torrents.items():
for h, t in torrents.items():
if not h.is_valid() or not t.has_metadata:
continue
h.save_resume_data()
@ -353,7 +360,7 @@ def main():
while len(torrents) > 0:
alerts = ses.pop_alerts()
for a in alerts:
if type(a) == lt.save_resume_data_alert:
if isinstance(a, lt.save_resume_data_alert):
print(a)
data = lt.write_resume_data_buf(a.params)
h = a.handle
@ -361,11 +368,12 @@ def main():
open(os.path.join(options.save_path, torrents[h].name + '.fastresume'), 'wb').write(data)
del torrents[h]
if type(a) == lt.save_resume_data_failed_alert:
if isinstance(a, lt.save_resume_data_failed_alert):
h = a.handle
if h in torrents:
print('failed to save resume data for ', torrents[h].name)
del torrents[h]
time.sleep(0.5)
main()

View File

@ -1,5 +1,5 @@
#!/usr/bin/env python
from __future__ import print_function
import sys
import os
@ -22,8 +22,8 @@ parent_input = os.path.split(input)[0]
# if we have a single file, use it because os.walk does not work on a single files
if os.path.isfile(input):
size = os.path.getsize(input)
fs.add_file(input, size)
size = os.path.getsize(input)
fs.add_file(input, size)
for root, dirs, files in os.walk(input):
# skip directories starting with .
@ -39,7 +39,7 @@ for root, dirs, files in os.walk(input):
if f == 'Thumbs.db':
continue
fname = os.path.join(root[len(parent_input)+1:], f)
fname = os.path.join(root[len(parent_input) + 1:], f)
size = os.path.getsize(os.path.join(parent_input, fname))
print('%10d kiB %s' % (size / 1024, fname))
fs.add_file(fname, size)

View File

@ -1,5 +1,5 @@
#!/usr/bin/env python
from __future__ import print_function
from distutils.core import setup, Extension
from distutils.sysconfig import get_config_vars
@ -55,13 +55,13 @@ def target_specific():
try:
with open('compile_flags') as _file:
extra_cmd = _file.read()
except:
except BaseException:
extra_cmd = None
try:
with open('link_flags') as _file:
ldflags = _file.read()
except:
except BaseException:
ldflags = None
# this is to pull out compiler arguments from the CXX flags set up by the
@ -75,7 +75,7 @@ try:
while len(cmd) > 0 and not cmd[0].startswith('-'):
cmd = cmd[1:]
extra_cmd += ' '.join(cmd)
except:
except BaseException:
pass
ext = None
@ -85,7 +85,7 @@ if '--bjam' in sys.argv:
del sys.argv[sys.argv.index('--bjam')]
if '--help' not in sys.argv \
and '--help-commands' not in sys.argv:
and '--help-commands' not in sys.argv:
toolset = ''
file_ext = '.so'
@ -129,14 +129,22 @@ if '--bjam' in sys.argv:
print('build failed')
sys.exit(1)
try: os.mkdir('build')
except: pass
try: shutil.rmtree('build/lib')
except: pass
try: os.mkdir('build/lib')
except: pass
try: os.mkdir('libtorrent')
except: pass
try:
os.mkdir('build')
except BaseException:
pass
try:
shutil.rmtree('build/lib')
except BaseException:
pass
try:
os.mkdir('build/lib')
except BaseException:
pass
try:
os.mkdir('libtorrent')
except BaseException:
pass
shutil.copyfile('libtorrent' + file_ext,
'build/lib/libtorrent' + file_ext)
@ -145,13 +153,12 @@ if '--bjam' in sys.argv:
else:
# Remove '-Wstrict-prototypes' compiler option, which isn't valid for C++.
cfg_vars = get_config_vars()
for key, value in cfg_vars.items():
for key, value in list(cfg_vars.items()):
if isinstance(value, str):
cfg_vars[key] = value.replace('-Wstrict-prototypes', '')
source_list = os.listdir(os.path.join(os.path.dirname(__file__), "src"))
source_list = [os.path.abspath(os.path.join(os.path.dirname(__file__),
"src", s)) for s in source_list if s.endswith(".cpp")]
src_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "src"))
source_list = [os.path.join(src_dir, s) for s in os.listdir(src_dir) if s.endswith(".cpp")]
if extra_cmd:
flags = flags_parser()

View File

@ -8,7 +8,7 @@ import libtorrent as lt
import time
import sys
ses = lt.session({'listen_interfaces':'0.0.0.0:6881'})
ses = lt.session({'listen_interfaces': '0.0.0.0:6881'})
info = lt.torrent_info(sys.argv[1])
h = ses.add_torrent({'ti': info, 'save_path': '.'})

View File

@ -1,7 +1,6 @@
#!/usr/bin/env python
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
from __future__ import print_function
import libtorrent as lt
@ -13,7 +12,6 @@ import shutil
import binascii
import subprocess as sub
import sys
import inspect
import pickle
import threading
@ -29,6 +27,7 @@ settings = {
'enable_dht': False, 'enable_lsd': False, 'enable_natpmp': False,
'enable_upnp': False, 'listen_interfaces': '0.0.0.0:0', 'file_pool_size': 1}
class test_create_torrent(unittest.TestCase):
def test_from_torrent_info(self):
@ -47,10 +46,10 @@ class test_create_torrent(unittest.TestCase):
class test_session_stats(unittest.TestCase):
def test_unique(self):
l = lt.session_stats_metrics()
self.assertTrue(len(l) > 40)
metrics = lt.session_stats_metrics()
self.assertTrue(len(metrics) > 40)
idx = set()
for m in l:
for m in metrics:
self.assertTrue(m.value_index not in idx)
idx.add(m.value_index)
@ -118,7 +117,6 @@ class test_torrent_handle(unittest.TestCase):
self.assertEqual(len(torrents), 1)
self.assertEqual(torrents[self.h], 'bar')
def test_replace_trackers(self):
self.setup()
trackers = []
@ -141,20 +139,18 @@ class test_torrent_handle(unittest.TestCase):
tracker.fail_limit = 1
trackers = [tracker]
self.h.replace_trackers(trackers)
tracker_list = [tracker for tracker in self.h.trackers()]
# wait a bit until the endpoints list gets populated
while len(tracker_list[0]['endpoints']) == 0:
while len(self.h.trackers()[0]['endpoints']) == 0:
time.sleep(0.1)
tracker_list = [tracker for tracker in self.h.trackers()]
pickled_trackers = pickle.dumps(tracker_list)
pickled_trackers = pickle.dumps(self.h.trackers())
unpickled_trackers = pickle.loads(pickled_trackers)
self.assertEqual(unpickled_trackers[0]['url'], 'udp://tracker1.com')
self.assertEqual(unpickled_trackers[0]['endpoints'][0]['last_error']['value'], 0)
def test_file_status(self):
self.setup()
l = self.h.file_status()
print(l)
status = self.h.file_status()
print(status)
def test_piece_deadlines(self):
self.setup()
@ -165,11 +161,10 @@ class test_torrent_handle(unittest.TestCase):
# time, wait for next full second to prevent second increment
time.sleep(1 - datetime.datetime.now().microsecond / 1000000.0)
sessionStart = datetime.datetime.now().replace(microsecond=0)
self.setup()
st = self.h.status()
for attr in dir(st):
print('%s: %s' % (attr, getattr(st, attr)))
print('%s: %s' % (attr, getattr(st, attr)))
# last upload and download times are at session start time
self.assertEqual(st.last_upload, None)
self.assertEqual(st.last_download, None)
@ -177,7 +172,7 @@ class test_torrent_handle(unittest.TestCase):
def test_serialize_trackers(self):
"""Test to ensure the dict contains only python built-in types"""
self.setup()
self.h.add_tracker({'url':'udp://tracker1.com'})
self.h.add_tracker({'url': 'udp://tracker1.com'})
tr = self.h.trackers()[0]
# wait a bit until the endpoints list gets populated
while len(tr['endpoints']) == 0:
@ -215,7 +210,7 @@ class test_torrent_handle(unittest.TestCase):
ses = lt.session(settings)
h = ses.add_torrent(tp)
for attr in dir(tp):
print('%s: %s' % (attr, getattr(tp, attr)))
print('%s: %s' % (attr, getattr(tp, attr)))
h.connect_peer(('3.3.3.3', 3))
@ -246,33 +241,34 @@ class test_torrent_handle(unittest.TestCase):
def test_torrent_parameter(self):
self.ses = lt.session(settings)
self.ti = lt.torrent_info('url_seed_multi.torrent');
self.ti = lt.torrent_info('url_seed_multi.torrent')
self.h = self.ses.add_torrent({
'ti': self.ti,
'save_path': os.getcwd(),
'trackers': ['http://test.com/announce'],
'dht_nodes': [('1.2.3.4', 6881), ('4.3.2.1', 6881)],
'file_priorities': [1,1],
'file_priorities': [1, 1],
'http_seeds': ['http://test.com/file3'],
'url_seeds': ['http://test.com/announce-url'],
'peers': [('5.6.7.8', 6881)],
'banned_peers': [('8.7.6.5', 6881)],
'renamed_files': { 0: 'test.txt', 2: 'test.txt' }
})
'renamed_files': {0: 'test.txt', 2: 'test.txt'}
})
self.st = self.h.status()
self.assertEqual(self.st.save_path, os.getcwd())
trackers = self.h.trackers();
trackers = self.h.trackers()
self.assertEqual(len(trackers), 1)
self.assertEqual(trackers[0].get('url'), 'http://test.com/announce')
self.assertEqual(trackers[0].get('tier'), 0)
self.assertEqual(self.h.get_file_priorities(), [1,1])
self.assertEqual(self.h.http_seeds(),['http://test.com/file3'])
self.assertEqual(self.h.get_file_priorities(), [1, 1])
self.assertEqual(self.h.http_seeds(), ['http://test.com/file3'])
# url_seeds was already set, test that it did not got overwritten
self.assertEqual(self.h.url_seeds(),
['http://test.com/announce-url/', 'http://test.com/file/'])
self.assertEqual(self.h.get_piece_priorities(),[4])
self.assertEqual(self.ti.merkle_tree(),[])
self.assertEqual(self.st.verified_pieces,[])
['http://test.com/announce-url/', 'http://test.com/file/'])
self.assertEqual(self.h.get_piece_priorities(), [4])
self.assertEqual(self.ti.merkle_tree(), [])
self.assertEqual(self.st.verified_pieces, [])
class test_torrent_info(unittest.TestCase):
@ -328,13 +324,13 @@ class test_torrent_info(unittest.TestCase):
os.path.join('temp', 'foo'))
idx += 1
def test_announce_entry(self):
ae = lt.announce_entry('test')
self.assertEquals(ae.url, 'test')
self.assertEquals(ae.tier, 0)
self.assertEquals(ae.verified, False)
self.assertEquals(ae.source, 0)
self.assertEqual(ae.url, 'test')
self.assertEqual(ae.tier, 0)
self.assertEqual(ae.verified, False)
self.assertEqual(ae.source, 0)
class test_alerts(unittest.TestCase):
@ -350,7 +346,7 @@ class test_alerts(unittest.TestCase):
alerts = ses.pop_alerts()
for a in alerts:
if a.what() == 'add_torrent_alert':
self.assertEquals(a.torrent_name, 'temp')
self.assertEqual(a.torrent_name, 'temp')
print(a.message())
for field_name in dir(a):
if field_name.startswith('__'):
@ -429,10 +425,11 @@ class test_bencoder(unittest.TestCase):
class test_sha1hash(unittest.TestCase):
def test_sha1hash(self):
h = 'a0'*20
h = 'a0' * 20
s = lt.sha1_hash(binascii.unhexlify(h))
self.assertEqual(h, str(s))
class test_magnet_link(unittest.TestCase):
def test_parse_magnet_uri(self):
@ -453,70 +450,72 @@ class test_magnet_link(unittest.TestCase):
h = ses.add_torrent(p)
self.assertEqual(str(h.info_hash()), '178882f042c0c33426a6d81e0333ece346e68a68')
class test_peer_class(unittest.TestCase):
def test_peer_class_ids(self):
s = lt.session(settings)
def test_peer_class_ids(self):
s = lt.session(settings)
print('global_peer_class_id:', lt.session.global_peer_class_id)
print('tcp_peer_class_id:', lt.session.tcp_peer_class_id)
print('local_peer_class_id:', lt.session.local_peer_class_id)
print('global_peer_class_id:', lt.session.global_peer_class_id)
print('tcp_peer_class_id:', lt.session.tcp_peer_class_id)
print('local_peer_class_id:', lt.session.local_peer_class_id)
print('global: ', s.get_peer_class(s.global_peer_class_id))
print('tcp: ', s.get_peer_class(s.local_peer_class_id))
print('local: ', s.get_peer_class(s.local_peer_class_id))
print('global: ', s.get_peer_class(s.global_peer_class_id))
print('tcp: ', s.get_peer_class(s.local_peer_class_id))
print('local: ', s.get_peer_class(s.local_peer_class_id))
def test_peer_class(self):
s = lt.session(settings)
def test_peer_class(self):
s = lt.session(settings)
c = s.create_peer_class('test class')
print('new class: ', s.get_peer_class(c))
c = s.create_peer_class('test class')
print('new class: ', s.get_peer_class(c))
nfo = s.get_peer_class(c)
self.assertEqual(nfo['download_limit'], 0)
self.assertEqual(nfo['upload_limit'], 0)
self.assertEqual(nfo['ignore_unchoke_slots'], False)
self.assertEqual(nfo['connection_limit_factor'], 100)
self.assertEqual(nfo['download_priority'], 1)
self.assertEqual(nfo['upload_priority'], 1)
self.assertEqual(nfo['label'], 'test class')
nfo = s.get_peer_class(c)
self.assertEqual(nfo['download_limit'], 0)
self.assertEqual(nfo['upload_limit'], 0)
self.assertEqual(nfo['ignore_unchoke_slots'], False)
self.assertEqual(nfo['connection_limit_factor'], 100)
self.assertEqual(nfo['download_priority'], 1)
self.assertEqual(nfo['upload_priority'], 1)
self.assertEqual(nfo['label'], 'test class')
nfo['download_limit'] = 1337
nfo['upload_limit'] = 1338
nfo['ignore_unchoke_slots'] = True
nfo['connection_limit_factor'] = 42
nfo['download_priority'] = 2
nfo['upload_priority'] = 3
nfo['download_limit'] = 1337
nfo['upload_limit'] = 1338
nfo['ignore_unchoke_slots'] = True
nfo['connection_limit_factor'] = 42
nfo['download_priority'] = 2
nfo['upload_priority'] = 3
s.set_peer_class(c, nfo)
s.set_peer_class(c, nfo)
nfo2 = s.get_peer_class(c)
self.assertEqual(nfo, nfo2)
nfo2 = s.get_peer_class(c)
self.assertEqual(nfo, nfo2)
def test_peer_class_filter(self):
filt = lt.peer_class_type_filter()
filt.add(lt.peer_class_type_filter.tcp_socket, lt.session.global_peer_class_id);
filt.remove(lt.peer_class_type_filter.utp_socket, lt.session.local_peer_class_id);
def test_peer_class_filter(self):
filt = lt.peer_class_type_filter()
filt.add(lt.peer_class_type_filter.tcp_socket, lt.session.global_peer_class_id)
filt.remove(lt.peer_class_type_filter.utp_socket, lt.session.local_peer_class_id)
filt.disallow(lt.peer_class_type_filter.tcp_socket, lt.session.global_peer_class_id);
filt.allow(lt.peer_class_type_filter.utp_socket, lt.session.local_peer_class_id);
filt.disallow(lt.peer_class_type_filter.tcp_socket, lt.session.global_peer_class_id)
filt.allow(lt.peer_class_type_filter.utp_socket, lt.session.local_peer_class_id)
def test_peer_class_ip_filter(self):
s = lt.session(settings)
s.set_peer_class_type_filter(lt.peer_class_type_filter())
s.set_peer_class_filter(lt.ip_filter())
def test_peer_class_ip_filter(self):
s = lt.session(settings)
s.set_peer_class_type_filter(lt.peer_class_type_filter())
s.set_peer_class_filter(lt.ip_filter())
class test_session(unittest.TestCase):
def test_add_torrent(self):
s = lt.session(settings)
h = s.add_torrent({'ti': lt.torrent_info('base.torrent'),
'save_path': '.',
'dht_nodes': [('1.2.3.4', 6881), ('4.3.2.1', 6881)],
'http_seeds': ['http://test.com/seed'],
'peers': [('5.6.7.8', 6881)],
'banned_peers': [('8.7.6.5', 6881)],
'file_priorities': [1,1,1,2,0]})
s.add_torrent({'ti': lt.torrent_info('base.torrent'),
'save_path': '.',
'dht_nodes': [('1.2.3.4', 6881), ('4.3.2.1', 6881)],
'http_seeds': ['http://test.com/seed'],
'peers': [('5.6.7.8', 6881)],
'banned_peers': [('8.7.6.5', 6881)],
'file_priorities': [1, 1, 1, 2, 0]})
def test_apply_settings(self):
@ -565,10 +564,9 @@ class test_session(unittest.TestCase):
self.assertTrue(isinstance(a.active_requests, list))
self.assertTrue(isinstance(a.routing_table, list))
def test_unknown_settings(self):
try:
s = lt.session({'unexpected-key-name': 42})
lt.session({'unexpected-key-name': 42})
self.assertFalse('should have thrown an exception')
except KeyError as e:
print(e)
@ -606,52 +604,52 @@ class test_example_client(unittest.TestCase):
my_stdin = slave_fd
process = sub.Popen(
[sys.executable,"client.py","url_seed_multi.torrent"],
[sys.executable, "client.py", "url_seed_multi.torrent"],
stdin=my_stdin, stdout=sub.PIPE, stderr=sub.PIPE)
# python2 has no Popen.wait() timeout
time.sleep(5)
returncode = process.poll()
if returncode == None:
if returncode is None:
# this is an expected use-case
process.kill()
err = process.stderr.read().decode("utf-8")
self.assertEqual('', err, 'process throw errors: \n' + err)
# check error code if process did unexpected end
if returncode != None:
if returncode is not None:
# in case of error return: output stdout if nothing was on stderr
if returncode != 0:
print("stdout:\n" + process.stdout.read().decode("utf-8"))
self.assertEqual(returncode, 0, "returncode: " + str(returncode) + "\n"
+ "stderr: empty\n"
+ "some configuration does not output errors like missing module members,"
+ "try to call it manually to get the error message\n")
+ "stderr: empty\n"
+ "some configuration does not output errors like missing module members,"
+ "try to call it manually to get the error message\n")
def test_execute_simple_client(self):
process = sub.Popen(
[sys.executable,"simple_client.py","url_seed_multi.torrent"],
[sys.executable, "simple_client.py", "url_seed_multi.torrent"],
stdout=sub.PIPE, stderr=sub.PIPE)
# python2 has no Popen.wait() timeout
time.sleep(5)
returncode = process.poll()
if returncode == None:
if returncode is None:
# this is an expected use-case
process.kill()
err = process.stderr.read().decode("utf-8")
self.assertEqual('', err, 'process throw errors: \n' + err)
# check error code if process did unexpected end
if returncode != None:
if returncode is not None:
# in case of error return: output stdout if nothing was on stderr
if returncode != 0:
print("stdout:\n" + process.stdout.read().decode("utf-8"))
self.assertEqual(returncode, 0, "returncode: " + str(returncode) + "\n"
+ "stderr: empty\n"
+ "some configuration does not output errors like missing module members,"
+ "try to call it manually to get the error message\n")
+ "stderr: empty\n"
+ "some configuration does not output errors like missing module members,"
+ "try to call it manually to get the error message\n")
def test_execute_make_torrent(self):
process = sub.Popen(
[sys.executable,"make_torrent.py","url_seed_multi.torrent",
"http://test.com/test"], stdout=sub.PIPE, stderr=sub.PIPE)
[sys.executable, "make_torrent.py", "url_seed_multi.torrent",
"http://test.com/test"], stdout=sub.PIPE, stderr=sub.PIPE)
returncode = process.wait()
# python2 has no Popen.wait() timeout
err = process.stderr.read().decode("utf-8")
@ -660,15 +658,16 @@ class test_example_client(unittest.TestCase):
if returncode != 0:
print("stdout:\n" + process.stdout.read().decode("utf-8"))
self.assertEqual(returncode, 0, "returncode: " + str(returncode) + "\n"
+ "stderr: empty\n"
+ "some configuration does not output errors like missing module members,"
+ "try to call it manually to get the error message\n")
+ "stderr: empty\n"
+ "some configuration does not output errors like missing module members,"
+ "try to call it manually to get the error message\n")
def test_default_settings(self):
default = lt.default_settings()
print(default)
class test_operation_t(unittest.TestCase):
def test_enum(self):
@ -678,6 +677,7 @@ class test_operation_t(unittest.TestCase):
self.assertEqual(lt.operation_name(lt.operation_t.partfile_write), "partfile_write")
self.assertEqual(lt.operation_name(lt.operation_t.hostname_lookup), "hostname_lookup")
if __name__ == '__main__':
print(lt.__version__)
shutil.copy(os.path.join('..', '..', 'test', 'test_torrents',

File diff suppressed because it is too large Load Diff

View File

@ -1,98 +1,123 @@
#!/usr/bin/env python
from __future__ import print_function
f = open('../include/libtorrent/settings_pack.hpp')
out = open('settings.rst', 'w+')
def print_field(str, width):
return '%s%s' % (str, ' ' * (width - len(str)))
return '%s%s' % (str, ' ' * (width - len(str)))
def render_section(names, description, type, default_values):
max_name_len = max(len(max(names, key=len)), len('name'))
max_type_len = max(len(type), len('type'))
max_val_len = max(len(max(default_values, key=len)), len('default'))
max_name_len = max(len(max(names, key=len)), len('name'))
max_type_len = max(len(type), len('type'))
max_val_len = max(len(max(default_values, key=len)), len('default'))
# add link targets for the rest of the manual to reference
for n in names:
print >>out, '.. _%s:\n' % n
# add link targets for the rest of the manual to reference
for n in names:
print('.. _%s:\n' % n, file=out)
if len(names) > 0:
print >>out, '.. raw:: html\n'
for n in names:
print >>out, '\t<a name="%s"></a>' % n
print >>out, ''
if len(names) > 0:
print('.. raw:: html\n', file=out)
for n in names:
print('\t<a name="%s"></a>' % n, file=out)
print('', file=out)
separator = '+-' + ('-' * max_name_len) + '-+-' + ('-' * max_type_len) + '-+-' + ('-' * max_val_len) + '-+'
separator = '+-' + ('-' * max_name_len) + '-+-' + ('-' * max_type_len) + '-+-' + ('-' * max_val_len) + '-+'
# build a table for the settings, their type and default value
print(separator, file=out)
print(
'| %s | %s | %s |' %
(print_field(
'name', max_name_len), print_field(
'type', max_type_len), print_field(
'default', max_val_len)), file=out)
print(separator.replace('-', '='), file=out)
for i in range(len(names)):
print(
'| %s | %s | %s |' %
(print_field(
names[i], max_name_len), print_field(
type, max_type_len), print_field(
default_values[i], max_val_len)), file=out)
print(separator, file=out)
print(file=out)
print(description, file=out)
# build a table for the settings, their type and default value
print >>out, separator
print >>out, '| %s | %s | %s |' % (print_field('name', max_name_len), print_field('type', max_type_len), print_field('default', max_val_len))
print >>out, separator.replace('-', '=')
for i in range(len(names)):
print >>out, '| %s | %s | %s |' % (print_field(names[i], max_name_len), print_field(type, max_type_len), print_field(default_values[i], max_val_len))
print >>out, separator
print >>out
print >>out, description
mode = ''
# parse out default values for settings
f2 = open('../src/settings_pack.cpp')
def_map = {}
for l in f2:
l = l.strip()
if not l.startswith('SET(') \
and not l.startswith('SET_NOPREV(') \
and not l.startswith('DEPRECATED_SET('): continue
for line in f2:
line = line.strip()
if not line.startswith('SET(') \
and not line.startswith('SET_NOPREV(') \
and not line.startswith('DEPRECATED_SET('):
continue
l = l.split('(')[1].split(',')
def_map[l[0]] = l[1].strip()
print '%s = %s' % (l[0], l[1].strip())
line = line.split('(')[1].split(',')
def_map[line[0]] = line[1].strip()
print('%s = %s' % (line[0], line[1].strip()))
description = ''
names = []
for l in f:
if 'enum string_types' in l: mode = 'string'
if 'enum bool_types' in l: mode = 'bool'
if 'enum int_types' in l: mode = 'int'
if '#if TORRENT_ABI_VERSION == 1' in l: mode += 'skip'
if '#endif' in l: mode = mode[0:-4]
for line in f:
if 'enum string_types' in line:
mode = 'string'
if 'enum bool_types' in line:
mode = 'bool'
if 'enum int_types' in line:
mode = 'int'
if '#if TORRENT_ABI_VERSION == 1' in line:
mode += 'skip'
if '#endif' in line:
mode = mode[0:-4]
if mode == '': continue
if mode[-4:] == 'skip': continue
if mode == '':
continue
if mode[-4:] == 'skip':
continue
l = l.lstrip()
line = line.lstrip()
if l == '' and len(names) > 0:
if description == '':
for n in names:
print 'WARNING: no description for "%s"' % n
else:
default_values = []
for n in names:
default_values.append(def_map[n])
render_section(names, description, mode, default_values)
description = ''
names = []
if line == '' and len(names) > 0:
if description == '':
for n in names:
print('WARNING: no description for "%s"' % n)
else:
default_values = []
for n in names:
default_values.append(def_map[n])
render_section(names, description, mode, default_values)
description = ''
names = []
if l.startswith('};'):
mode = ''
continue
if line.startswith('};'):
mode = ''
continue
if l.startswith('//'):
if l[2] == ' ': description += l[3:]
else: description += l[2:]
continue
if line.startswith('//'):
if line[2] == ' ':
description += line[3:]
else:
description += line[2:]
continue
l = l.strip()
if l.endswith(','):
l = l[:-1] # strip trailing comma
if '=' in l: l = l.split('=')[0].strip()
if l.endswith('_internal'): continue
line = line.strip()
if line.endswith(','):
line = line[:-1] # strip trailing comma
if '=' in line:
line = line.split('=')[0].strip()
if line.endswith('_internal'):
continue
names.append(l)
names.append(line)
out.close()
f.close()

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python
from __future__ import print_function
counter_types = {}
@ -6,33 +7,40 @@ f = open('../include/libtorrent/performance_counters.hpp')
counter_type = ''
for l in f:
for line in f:
# ignore anything after //
if '//' in l: l = l.split('//')[0]
# ignore anything after //
if '//' in line:
line = line.split('//')[0]
l = l.strip()
line = line.strip()
if l.startswith('#'): continue
if l == '': continue
if line.startswith('#'):
continue
if line == '':
continue
if 'enum stats_counter_t' in l:
counter_type = 'counter'
continue
if 'enum stats_counter_t' in line:
counter_type = 'counter'
continue
if 'enum stats_gauge_t' in l:
counter_type = 'gauge'
continue
if 'enum stats_gauge_t' in line:
counter_type = 'gauge'
continue
if '{' in l or '}' in l or 'struct' in l or 'namespace' in l: continue
if counter_type == '': continue
if not l.endswith(','): continue
if '{' in line or '}' in line or 'struct' in line or 'namespace' in line:
continue
if counter_type == '':
continue
if not line.endswith(','):
continue
# strip off trailing comma
l = l[:-1]
if '=' in l: l = l[:l.index('=')].strip()
# strip off trailing comma
line = line[:-1]
if '=' in line:
line = line[:line.index('=')].strip()
counter_types[l] = counter_type
counter_types[line] = counter_type
f.close()
@ -40,39 +48,42 @@ f = open('../src/session_stats.cpp')
out = open('stats_counters.rst', 'w+')
def print_field(str, width):
return '%s%s' % (str, ' ' * (width - len(str)))
return '%s%s' % (str, ' ' * (width - len(str)))
def render_section(names, description, types):
max_name_len = max(len(max(names, key=len)), len('name'))
max_type_len = max(len(max(types, key=len)), len('type'))
max_name_len = max(len(max(names, key=len)), len('name'))
max_type_len = max(len(max(types, key=len)), len('type'))
if description == '':
for n in names:
print 'WARNING: no description for "%s"' % n
if description == '':
for n in names:
print('WARNING: no description for "%s"' % n)
# add link targets for the rest of the manual to reference
for n in names:
print >>out, '.. _%s:\n' % n
# add link targets for the rest of the manual to reference
for n in names:
print('.. _%s:\n' % n, file=out)
if len(names) > 0:
print >>out, '.. raw:: html\n'
for n in names:
print >>out, '\t<a name="%s"></a>' % n
print >>out, ''
if len(names) > 0:
print('.. raw:: html\n', file=out)
for n in names:
print('\t<a name="%s"></a>' % n, file=out)
print('', file=out)
separator = '+-' + ('-' * max_name_len) + '-+-' + ('-' * max_type_len) + '-+'
separator = '+-' + ('-' * max_name_len) + '-+-' + ('-' * max_type_len) + '-+'
# build a table for the settings, their type and default value
print(separator, file=out)
print('| %s | %s |' % (print_field('name', max_name_len), print_field('type', max_type_len)), file=out)
print(separator.replace('-', '='), file=out)
for i in range(len(names)):
print('| %s | %s |' % (print_field(names[i], max_name_len), print_field(types[i], max_type_len)), file=out)
print(separator, file=out)
print(file=out)
print(description, file=out)
print('', file=out)
# build a table for the settings, their type and default value
print >>out, separator
print >>out, '| %s | %s |' % (print_field('name', max_name_len), print_field('type', max_type_len))
print >>out, separator.replace('-', '=')
for i in range(len(names)):
print >>out, '| %s | %s |' % (print_field(names[i], max_name_len), print_field(types[i], max_type_len))
print >>out, separator
print >>out
print >>out, description
print >>out, ''
mode = ''
@ -80,42 +91,43 @@ description = ''
names = []
types = []
for l in f:
description_line = l.lstrip().startswith('//')
for line in f:
description_line = line.lstrip().startswith('//')
l = l.strip()
line = line.strip()
if mode == 'ignore':
if '#endif' in l: mode = ''
continue
if mode == 'ignore':
if '#endif' in line:
mode = ''
continue
if 'TORRENT_ABI_VERSION == 1' in l:
mode = 'ignore'
continue
if 'TORRENT_ABI_VERSION == 1' in line:
mode = 'ignore'
continue
if description_line == True:
if len(names) > 0:
render_section(names, description, types)
description = ''
names = []
types = []
if description_line:
if len(names) > 0:
render_section(names, description, types)
description = ''
names = []
types = []
description += '\n' + l[3:]
description += '\n' + line[3:]
if '#define' in l: continue
if '#define' in line:
continue
if 'METRIC(' in l:
args = l.split('(')[1].split(')')[0].split(',')
if 'METRIC(' in line:
args = line.split('(')[1].split(')')[0].split(',')
# args: category, name, type
# args: category, name, type
args[1] = args[1].strip()
names.append(args[0].strip() + '.' + args[1].strip())
types.append(counter_types[args[1]])
args[1] = args[1].strip()
names.append(args[0].strip() + '.' + args[1].strip())
types.append(counter_types[args[1]])
if len(names) > 0:
render_section(names, description, types)
render_section(names, description, types)
out.close()
f.close()

View File

@ -2,15 +2,23 @@
import glob
import os
import sys
paths = ['test/*.cpp', 'src/*.cpp', 'src/kademlia/*.cpp', 'include/libtorrent/*.hpp', 'include/libtorrent/kademlia/*.hpp', 'include/libtorrent/aux_/*.hpp', 'include/libtorrent/extensions/*.hpp']
paths = [
'test/*.cpp',
'src/*.cpp',
'src/kademlia/*.cpp',
'include/libtorrent/*.hpp',
'include/libtorrent/kademlia/*.hpp',
'include/libtorrent/aux_/*.hpp',
'include/libtorrent/extensions/*.hpp']
os.system('(cd .. ; ctags %s 2>/dev/null)' % ' '.join(paths))
files = []
for p in paths:
files.extend(glob.glob(os.path.join('..', p)))
files.extend(glob.glob(os.path.join('..', p)))
items = []
@ -20,79 +28,91 @@ context = []
priority_count = [0, 0, 0, 0, 0]
def html_sanitize(s):
ret = ''
for i in s:
if i == '<': ret += '&lt;'
elif i == '>': ret += '&gt;'
elif i == '&': ret += '&amp;'
else: ret += i
return ret
ret = ''
for i in s:
if i == '<':
ret += '&lt;'
elif i == '>':
ret += '&gt;'
elif i == '&':
ret += '&amp;'
else:
ret += i
return ret
for f in files:
h = open(f)
h = open(f)
state = ''
line_no = 0
context_lines = 0
state = ''
line_no = 0
context_lines = 0
for l in h:
line_no += 1
line = l.strip()
if 'TODO:' in line and line.startswith('//'):
line = line.split('TODO:')[1].strip()
state = 'todo'
items.append({})
items[-1]['location'] = '%s:%d' % (f, line_no)
items[-1]['priority'] = 0
if line[0] in '0123456789':
items[-1]['priority'] = int(line[0])
if int(line[0]) > 5:
print 'priority too high: ' + line
sys.exit(1)
for l in h:
line_no += 1
line = l.strip()
if 'TODO:' in line and line.startswith('//'):
line = line.split('TODO:')[1].strip()
state = 'todo'
items.append({})
items[-1]['location'] = '%s:%d' % (f, line_no)
items[-1]['priority'] = 0
if line[0] in '0123456789':
items[-1]['priority'] = int(line[0])
if int(line[0]) > 5:
print('priority too high: ' + line)
sys.exit(1)
line = line[1:].strip()
items[-1]['todo'] = line
prio = items[-1]['priority']
if prio >= 0 and prio <= 4: priority_count[prio] += 1
continue
line = line[1:].strip()
items[-1]['todo'] = line
prio = items[-1]['priority']
if prio >= 0 and prio <= 4:
priority_count[prio] += 1
continue
if state == '':
context.append(html_sanitize(l))
if len(context) > 20: context.pop(0)
continue
if state == '':
context.append(html_sanitize(l))
if len(context) > 20:
context.pop(0)
continue
if state == 'todo':
if line.strip().startswith('//'):
items[-1]['todo'] += '\n'
items[-1]['todo'] += line[2:].strip()
else:
state = 'context'
items[-1]['context'] = ''.join(context) + '<div style="background: #ffff00" width="100%">' + html_sanitize(l) + '</div>';
context_lines = 1
if state == 'todo':
if line.strip().startswith('//'):
items[-1]['todo'] += '\n'
items[-1]['todo'] += line[2:].strip()
else:
state = 'context'
items[-1]['context'] = ''.join(context) + \
'<div style="background: #ffff00" width="100%">' + html_sanitize(l) + '</div>'
context_lines = 1
context.append(html_sanitize(l))
if len(context) > 20: context.pop(0)
continue
context.append(html_sanitize(l))
if len(context) > 20:
context.pop(0)
continue
if state == 'context':
items[-1]['context'] += html_sanitize(l)
context_lines += 1
if state == 'context':
items[-1]['context'] += html_sanitize(l)
context_lines += 1
context.append(html_sanitize(l))
if len(context) > 20: context.pop(0)
if context_lines > 30: state = ''
context.append(html_sanitize(l))
if len(context) > 20:
context.pop(0)
if context_lines > 30:
state = ''
h.close()
h.close()
items.sort(key = lambda x: x['priority'], reverse = True)
items.sort(key=lambda x: x['priority'], reverse=True)
#for i in items:
# print '\n\n', i['todo'], '\n'
# print i['location'], '\n'
# print 'prio: ', i['priority'], '\n'
# if 'context' in i:
# print i['context'], '\n'
# for i in items:
# print('\n\n', i['todo'], '\n')
# print(i['location'], '\n')
# print('prio: ', i['priority'], '\n')
# if 'context' in i:
# print(i['context'], '\n')
out = open('todo.html', 'w+')
out.write('''<html><head>
@ -123,21 +143,24 @@ out.write('''<html><head>
<span style="color: #3c3">%d relevant</span>
<span style="color: #77f">%d feasible</span>
<span style="color: #999">%d notes</span>
<table width="100%%" border="1" style="border-collapse: collapse;">''' % \
(priority_count[4], priority_count[3], priority_count[2], priority_count[1], priority_count[0]))
<table width="100%%" border="1" style="border-collapse: collapse;">''' # noqa
% (priority_count[4], priority_count[3], priority_count[2], priority_count[1], priority_count[0]))
prio_colors = [ '#ccc', '#ccf', '#cfc', '#fcc', '#f44']
prio_colors = ['#ccc', '#ccf', '#cfc', '#fcc', '#f44']
index = 0
for i in items:
if not 'context' in i: i['context'] = ''
out.write('<tr style="background: %s"><td>relevance&nbsp;%d</td><td><a href="javascript:expand(%d)">%s</a></td><td>%s</td></tr>' \
% (prio_colors[i['priority']], i['priority'], index, i['location'], i['todo'].replace('\n', ' ')))
if 'context' not in i:
i['context'] = ''
out.write(('<tr style="background: %s"><td>relevance&nbsp;%d</td>'
'<td><a href="javascript:expand(%d)">%s</a></td><td>%s</td></tr>')
% (prio_colors[i['priority']], i['priority'], index, i['location'], i['todo'].replace('\n', ' ')))
out.write('<tr id="%d" style="display: none;" colspan="3"><td colspan="3"><h2>%s</h2><h4>%s</h4><pre style="background: #f6f6f6; border: solid 1px #ddd;">%s</pre></td></tr>' \
% (index, i['todo'], i['location'], i['context']))
index += 1
out.write(
('<tr id="%d" style="display: none;" colspan="3"><td colspan="3"><h2>%s</h2><h4>%s</h4>'
'<pre style="background: #f6f6f6; border: solid 1px #ddd;">%s</pre></td></tr>') %
(index, i['todo'], i['location'], i['context']))
index += 1
out.write('</table></body></html>')
out.close()

View File

@ -7,19 +7,20 @@ sys.stdout.write(open(sys.argv[1], 'r').read())
sys.stderr.write('joining %s\n' % sys.argv[1])
for name in sys.argv[2:]:
sys.stdout.write('\n')
sys.stderr.write('joining %s\n' % name)
f = open(name, 'r')
for l in f:
# strip out the table of contents from subsequent files
if '.. contents::' in l:
in_directive = True
continue
if ':Author:' in l: continue
if ':Version:' in l: continue
if l[0] in ' \t' and in_directive:
continue
in_directive = False
sys.stdout.write(l)
sys.stdout.write('\n')
sys.stderr.write('joining %s\n' % name)
f = open(name, 'r')
for l in f:
# strip out the table of contents from subsequent files
if '.. contents::' in l:
in_directive = True
continue
if ':Author:' in l:
continue
if ':Version:' in l:
continue
if l[0] in ' \t' and in_directive:
continue
in_directive = False
sys.stdout.write(l)

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python
from __future__ import print_function
import sys
import os
@ -62,378 +63,465 @@ default_cache = cache_sizes[-1]
# disk cache is not a significant part of the test,
# since download rates will be extremely high while downloading
# into RAM
test_duration = 200 # 700
test_duration = 200 # 700
# make sure the environment is properly set up
try:
if os.name == 'posix':
resource.setrlimit(resource.RLIMIT_NOFILE, (4000, 5000))
except:
if resource.getrlimit(resource.RLIMIT_NOFILE)[0] < 4000:
print 'please set ulimit -n to at least 4000'
sys.exit(1)
if os.name == 'posix':
resource.setrlimit(resource.RLIMIT_NOFILE, (4000, 5000))
except BaseException:
if resource.getrlimit(resource.RLIMIT_NOFILE)[0] < 4000:
print('please set ulimit -n to at least 4000')
sys.exit(1)
def build_stage_dirs():
ret = []
for i in builds[2:3]:
ret.append('stage_%s' % i)
return ret
ret = []
for i in builds[2:3]:
ret.append('stage_%s' % i)
return ret
# make sure we have all the binaries available
binaries = ['client_test', 'connection_tester', 'fragmentation_test']
for b in build_stage_dirs():
for i in binaries:
p = os.path.join(b, i)
if not os.path.exists(p):
print 'make sure "%s" is available in ./%s' % (i, b)
sys.exit(1)
for i in binaries:
p = os.path.join(b, i)
if not os.path.exists(p):
print('make sure "%s" is available in ./%s' % (i, b))
sys.exit(1)
for i in filesystem:
if not os.path.exists(i):
print ('the path "%s" does not exist. This is directory/mountpoint is ' +
'used as the download directory and is the filesystem that will be benchmarked ' +
'and need to exist.') % i
sys.exit(1)
if not os.path.exists(i):
print(('the path "%s" does not exist. This is directory/mountpoint is ' +
'used as the download directory and is the filesystem that will be benchmarked ' +
'and need to exist.') % i)
sys.exit(1)
# make sure we have a test torrent
if not os.path.exists('test.torrent'):
print 'generating test torrent'
# generate a 100 GB torrent, to make sure it won't all fit in physical RAM
os.system('./stage_aio/connection_tester gen-torrent 10000 test.torrent')
print('generating test torrent')
# generate a 100 GB torrent, to make sure it won't all fit in physical RAM
os.system('./stage_aio/connection_tester gen-torrent 10000 test.torrent')
if not os.path.exists('test2.torrent'):
print 'generating test torrent 2'
# generate a 6 GB torrent, to make sure it will fit in physical RAM
os.system('./stage_aio/connection_tester gen-torrent 6000 test2.torrent')
print('generating test torrent 2')
# generate a 6 GB torrent, to make sure it will fit in physical RAM
os.system('./stage_aio/connection_tester gen-torrent 6000 test2.torrent')
# use a new port for each test to make sure they keep working
# this port is incremented for each test run
port = 10000 + random.randint(0, 40000)
def clear_caches():
if 'linux' in sys.platform:
os.system('sync')
open('/proc/sys/vm/drop_caches', 'w').write('3')
elif 'darwin' in sys.platform:
os.system('purge')
if 'linux' in sys.platform:
os.system('sync')
open('/proc/sys/vm/drop_caches', 'w').write('3')
elif 'darwin' in sys.platform:
os.system('purge')
def build_commandline(config, port):
num_peers = config['num-peers']
torrent_path = config['torrent']
num_peers = config['num-peers']
torrent_path = config['torrent']
if config['build'] == 'utorrent':
try: os.mkdir('utorrent_session')
except: pass
cfg = open('utorrent_session/settings.dat', 'w+')
if config['build'] == 'utorrent':
try:
os.mkdir('utorrent_session')
except BaseException:
pass
cfg = open('utorrent_session/settings.dat', 'w+')
cfg.write('d')
cfg.write('20:ul_slots_per_torrenti%de' % num_peers)
cfg.write('17:conns_per_torrenti%de' % num_peers)
cfg.write('14:conns_globallyi%de' % num_peers)
cfg.write('9:bind_porti%de' % port)
cfg.write('19:dir_active_download%d:%s' % (len(config['save-path']), config['save-path']))
cfg.write('19:diskio.sparse_filesi1e')
cfg.write('14:cache.overridei1e')
cfg.write('19:cache.override_sizei%de' % int(config['cache-size'] * 16 / 1024))
cfg.write('17:dir_autoload_flagi1e')
cfg.write('12:dir_autoload8:autoload')
cfg.write('11:logger_maski4294967295e')
cfg.write('1:vi0e')
cfg.write('12:webui.enablei1e')
cfg.write('19:webui.enable_listeni1e')
cfg.write('14:webui.hashword20:' + hashlib.sha1('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaadmin').digest())
cfg.write('10:webui.porti8080e')
cfg.write('10:webui.salt32:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa')
cfg.write('14:webui.username5:admin')
cfg.write('e')
cfg.close()
try: os.mkdir('utorrent_session/autoload')
except: pass
try: shutil.copy(torrent_path, 'utorrent_session/autoload/')
except: pass
return './utorrent-server-v3_0/utserver -logfile session_stats/alerts_log.txt -settingspath utorrent_session'
cfg.write('d')
cfg.write('20:ul_slots_per_torrenti%de' % num_peers)
cfg.write('17:conns_per_torrenti%de' % num_peers)
cfg.write('14:conns_globallyi%de' % num_peers)
cfg.write('9:bind_porti%de' % port)
cfg.write('19:dir_active_download%d:%s' % (len(config['save-path']), config['save-path']))
cfg.write('19:diskio.sparse_filesi1e')
cfg.write('14:cache.overridei1e')
cfg.write('19:cache.override_sizei%de' % int(config['cache-size'] * 16 / 1024))
cfg.write('17:dir_autoload_flagi1e')
cfg.write('12:dir_autoload8:autoload')
cfg.write('11:logger_maski4294967295e')
cfg.write('1:vi0e')
cfg.write('12:webui.enablei1e')
cfg.write('19:webui.enable_listeni1e')
cfg.write('14:webui.hashword20:' + hashlib.sha1('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaadmin').digest())
cfg.write('10:webui.porti8080e')
cfg.write('10:webui.salt32:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa')
cfg.write('14:webui.username5:admin')
cfg.write('e')
cfg.close()
try:
os.mkdir('utorrent_session/autoload')
except BaseException:
pass
try:
shutil.copy(torrent_path, 'utorrent_session/autoload/')
except BaseException:
pass
return './utorrent-server-v3_0/utserver -logfile session_stats/alerts_log.txt -settingspath utorrent_session'
if config['build'] == 'rtorrent':
if os.path.exists('rtorrent_session'):
add_command = ''
else:
try: os.mkdir('rtorrent_session')
except: pass
# it seems rtorrent may delete the original torrent when it's being added
try: shutil.copy(torrent_path, 'rtorrent_session/')
except: pass
add_command = '-O load_start_verbose=rtorrent_session/%s ' % torrent_path
if config['build'] == 'rtorrent':
if os.path.exists('rtorrent_session'):
add_command = ''
else:
try:
os.mkdir('rtorrent_session')
except BaseException:
pass
# it seems rtorrent may delete the original torrent when it's being added
try:
shutil.copy(torrent_path, 'rtorrent_session/')
except BaseException:
pass
add_command = '-O load_start_verbose=rtorrent_session/%s ' % torrent_path
return 'rtorrent -d %s -n -p %d-%d -O max_peers=%d -O max_uploads=%d %s -s rtorrent_session -O max_memory_usage=128000000000' \
% (config['save-path'], port, port, num_peers, num_peers, add_command)
return ('rtorrent -d %s -n -p %d-%d -O max_peers=%d -O max_uploads=%d %s -s '
'rtorrent_session -O max_memory_usage=128000000000') % (
config['save-path'], port, port, num_peers, num_peers, add_command)
disable_disk = ''
if config['disable-disk']:
disable_disk = '-0'
return ('./stage_%s/client_test -k -N -H -M -B %d -l %d -S %d -T %d -c %d -C %d -s "%s" -p %d -E %d %s '
'-f session_stats/alerts_log.txt %s') % (
config['build'], test_duration, num_peers, num_peers, num_peers, num_peers, config['cache-size'],
config['save-path'], port, config['hash-threads'], disable_disk, torrent_path)
disable_disk = ''
if config['disable-disk']: disable_disk = '-0'
return './stage_%s/client_test -k -N -H -M -B %d -l %d -S %d -T %d -c %d -C %d -s "%s" -p %d -E %d %s -f session_stats/alerts_log.txt %s' \
% (config['build'], test_duration, num_peers, num_peers, num_peers, num_peers, config['cache-size'], config['save-path'], port, \
config['hash-threads'], disable_disk, torrent_path)
def delete_files(files):
for i in files:
try: os.remove(i)
except:
try: shutil.rmtree(i)
except:
try:
if os.path.exists(i): print 'failed to delete %s' % i
except: pass
for i in files:
try:
os.remove(i)
except BaseException:
try:
shutil.rmtree(i)
except BaseException:
try:
if os.path.exists(i):
print('failed to delete %s' % i)
except BaseException:
pass
# typically the schedulers available are 'noop', 'deadline' and 'cfq'
def build_test_config(fs=default_fs, num_peers=default_peers, cache_size=default_cache, \
test='upload', build='aio', profile='', hash_threads=1, torrent='test.torrent', \
disable_disk = False):
config = {'test': test, 'save-path': os.path.join('./', fs), 'num-peers': num_peers, \
'cache-size': cache_size, 'build': build, 'profile':profile, \
'hash-threads': hash_threads, 'torrent': torrent, 'disable-disk': disable_disk }
return config
def build_test_config(fs=default_fs, num_peers=default_peers, cache_size=default_cache,
test='upload', build='aio', profile='', hash_threads=1, torrent='test.torrent',
disable_disk=False):
config = {'test': test, 'save-path': os.path.join('./', fs), 'num-peers': num_peers,
'cache-size': cache_size, 'build': build, 'profile': profile,
'hash-threads': hash_threads, 'torrent': torrent, 'disable-disk': disable_disk}
return config
def prefix_len(text, prefix):
for i in xrange(1, len(prefix)):
if (not text.startswith(prefix[0:i])): return i-1
return len(prefix)
for i in range(1, len(prefix)):
if (not text.startswith(prefix[0:i])):
return i - 1
return len(prefix)
def device_name(path):
mount = subprocess.Popen('mount', stdout=subprocess.PIPE)
mount = subprocess.Popen('mount', stdout=subprocess.PIPE)
max_match_len = 0
match_device = ''
path = os.path.abspath(path)
max_match_len = 0
match_device = ''
path = os.path.abspath(path)
for mp in mount.stdout.readlines():
c = mp.split(' ')
device = c[0]
mountpoint = c[2]
prefix = prefix_len(path, mountpoint)
if prefix > max_match_len:
max_match_len = prefix
match_device = device
for mp in mount.stdout.readlines():
c = mp.split(' ')
device = c[0]
mountpoint = c[2]
prefix = prefix_len(path, mountpoint)
if prefix > max_match_len:
max_match_len = prefix
match_device = device
device = match_device
device = device.split('/')[-1][0:3]
print('device for path: %s -> %s' % (path, device))
return device
device = match_device
device = device.split('/')[-1][0:3]
print 'device for path: %s -> %s' % (path, device)
return device
def build_target_folder(config):
test = 'seed'
if config['test'] == 'upload': test = 'download'
elif config['test'] == 'dual': test = 'dual'
test = 'seed'
if config['test'] == 'upload':
test = 'download'
elif config['test'] == 'dual':
test = 'dual'
if 'linux' in sys.platform:
io_scheduler = open('/sys/block/%s/queue/scheduler' % device_name(config['save-path'])).read().split('[')[1].split(']')[0]
else:
io_scheduler = sys.platform
if 'linux' in sys.platform:
io_scheduler = open('/sys/block/%s/queue/scheduler' %
device_name(config['save-path'])).read().split('[')[1].split(']')[0]
else:
io_scheduler = sys.platform
no_disk = ''
if config['disable-disk']: no_disk = '_no-disk'
no_disk = ''
if config['disable-disk']:
no_disk = '_no-disk'
return 'results_%s_%s_%d_%d_%s_%s_h%d%s' % (config['build'],
test,
config['num-peers'],
config['cache-size'],
os.path.split(
config['save-path'])[1],
io_scheduler,
config['hash-threads'],
no_disk)
return 'results_%s_%s_%d_%d_%s_%s_h%d%s' % (config['build'], test, config['num-peers'], \
config['cache-size'], os.path.split(config['save-path'])[1], io_scheduler, \
config['hash-threads'], no_disk)
def find_library(name):
paths = ['/usr/lib64/', '/usr/local/lib64/', '/usr/lib/', '/usr/local/lib/']
paths = ['/usr/lib64/', '/usr/local/lib64/', '/usr/lib/', '/usr/local/lib/']
for p in paths:
try:
if os.path.exists(p + name):
return p + name
except BaseException:
pass
return name
for p in paths:
try:
if os.path.exists(p + name): return p + name
except: pass
return name
def find_binary(names):
paths = ['/usr/bin/', '/usr/local/bin/']
for n in names:
for p in paths:
try:
if os.path.exists(p + n): return p + n
except: pass
return names[0]
paths = ['/usr/bin/', '/usr/local/bin/']
for n in names:
for p in paths:
try:
if os.path.exists(p + n):
return p + n
except BaseException:
pass
return names[0]
def run_test(config):
target_folder = build_target_folder(config)
if os.path.exists(target_folder):
print 'results already exists, skipping test (%s)' % target_folder
return
target_folder = build_target_folder(config)
if os.path.exists(target_folder):
print('results already exists, skipping test (%s)' % target_folder)
return
print '\n\n*********************************'
print '* RUNNING TEST *'
print '*********************************\n\n'
print '%s %s' % (config['build'], config['test'])
print('\n\n*********************************')
print('* RUNNING TEST *')
print('*********************************\n\n')
print('%s %s' % (config['build'], config['test']))
# make sure any previous test file is removed
# don't clean up unless we're running a download-test, so that we leave the test file
# complete for a seed test.
delete_files(['utorrent_session/settings.dat', 'utorrent_session/settings.dat.old', 'asserts.log'])
if config['test'] == 'upload' or config['test'] == 'dual':
print 'deleting files'
delete_files([os.path.join(config['save-path'], 'stress_test_file'), '.ses_state', os.path.join(config['save-path'], '.resume'), 'utorrent_session', '.dht_state', 'session_stats', 'rtorrent_session'])
# make sure any previous test file is removed
# don't clean up unless we're running a download-test, so that we leave the test file
# complete for a seed test.
delete_files(['utorrent_session/settings.dat', 'utorrent_session/settings.dat.old', 'asserts.log'])
if config['test'] == 'upload' or config['test'] == 'dual':
print('deleting files')
delete_files([os.path.join(config['save-path'],
'stress_test_file'),
'.ses_state',
os.path.join(config['save-path'],
'.resume'),
'utorrent_session',
'.dht_state',
'session_stats',
'rtorrent_session'])
try: os.mkdir('session_stats')
except: pass
try:
os.mkdir('session_stats')
except BaseException:
pass
# save off the command line for reference
global port
cmdline = build_commandline(config, port)
binary = cmdline.split(' ')[0]
environment = None
if config['profile'] == 'tcmalloc': environment = {'LD_PRELOAD':find_library('libprofiler.so.0'), 'CPUPROFILE': 'session_stats/cpu_profile.prof'}
if config['profile'] == 'memory': environment = {'LD_PRELOAD':find_library('libprofiler.so.0'), 'HEAPPROFILE': 'session_stats/heap_profile.prof'}
if config['profile'] == 'perf': cmdline = 'perf timechart record --call-graph --output=session_stats/perf_profile.prof ' + cmdline
f = open('session_stats/cmdline.txt', 'w+')
f.write(cmdline)
f.close()
# save off the command line for reference
global port
cmdline = build_commandline(config, port)
binary = cmdline.split(' ')[0]
environment = None
if config['profile'] == 'tcmalloc':
environment = {'LD_PRELOAD': find_library('libprofiler.so.0'), 'CPUPROFILE': 'session_stats/cpu_profile.prof'}
if config['profile'] == 'memory':
environment = {'LD_PRELOAD': find_library('libprofiler.so.0'), 'HEAPPROFILE': 'session_stats/heap_profile.prof'}
if config['profile'] == 'perf':
cmdline = 'perf timechart record --call-graph --output=session_stats/perf_profile.prof ' + cmdline
f = open('session_stats/cmdline.txt', 'w+')
f.write(cmdline)
f.close()
f = open('session_stats/config.txt', 'w+')
print >>f, config
f.close()
f = open('session_stats/config.txt', 'w+')
print(config, file=f)
f.close()
print 'clearing disk cache'
clear_caches()
print 'OK'
client_output = open('session_stats/client.output', 'w+')
client_error = open('session_stats/client.error', 'w+')
print 'launching: %s' % cmdline
client = subprocess.Popen(shlex.split(cmdline), stdout=client_output, stdin=subprocess.PIPE, stderr=client_error, env=environment)
print 'OK'
# enable disk stats printing
if config['build'] != 'rtorrent' and config['build'] != 'utorrent':
print >>client.stdin, 'x',
time.sleep(4)
cmdline = './stage_aio/connection_tester %s %d 127.0.0.1 %d %s' % (config['test'], config['num-peers'], port, config['torrent'])
print 'launching: %s' % cmdline
tester_output = open('session_stats/tester.output', 'w+')
tester = subprocess.Popen(shlex.split(cmdline), stdout=tester_output)
print 'OK'
print('clearing disk cache')
clear_caches()
print('OK')
client_output = open('session_stats/client.output', 'w+')
client_error = open('session_stats/client.error', 'w+')
print('launching: %s' % cmdline)
client = subprocess.Popen(
shlex.split(cmdline),
stdout=client_output,
stdin=subprocess.PIPE,
stderr=client_error,
env=environment)
print('OK')
# enable disk stats printing
if config['build'] != 'rtorrent' and config['build'] != 'utorrent':
print('x', end=' ', file=client.stdin)
time.sleep(4)
cmdline = './stage_aio/connection_tester %s %d 127.0.0.1 %d %s' % (
config['test'], config['num-peers'], port, config['torrent'])
print('launching: %s' % cmdline)
tester_output = open('session_stats/tester.output', 'w+')
tester = subprocess.Popen(shlex.split(cmdline), stdout=tester_output)
print('OK')
time.sleep(2)
time.sleep(2)
print '\n'
i = 0
while True:
time.sleep(1)
tester.poll()
if tester.returncode != None:
print 'tester terminated'
break
client.poll()
if client.returncode != None:
print 'client terminated'
break
print '\r%d / %d' % (i, test_duration),
sys.stdout.flush()
i += 1
if config['test'] != 'upload' and config['test'] != 'dual' and i >= test_duration: break
print '\n'
print('\n')
i = 0
while True:
time.sleep(1)
tester.poll()
if tester.returncode is not None:
print('tester terminated')
break
client.poll()
if client.returncode is not None:
print('client terminated')
break
print('\r%d / %d' % (i, test_duration), end=' ')
sys.stdout.flush()
i += 1
if config['test'] != 'upload' and config['test'] != 'dual' and i >= test_duration:
break
print('\n')
if client.returncode == None:
try:
print 'killing client'
client.send_signal(signal.SIGINT)
except:
pass
if client.returncode is None:
try:
print('killing client')
client.send_signal(signal.SIGINT)
except BaseException:
pass
time.sleep(10)
client.wait()
tester.wait()
tester_output.close()
client_output.close()
terminate = False
if tester.returncode != 0:
print 'tester returned %d' % tester.returncode
terminate = True
if client.returncode != 0:
print 'client returned %d' % client.returncode
terminate = True
time.sleep(10)
client.wait()
tester.wait()
tester_output.close()
client_output.close()
terminate = False
if tester.returncode != 0:
print('tester returned %d' % tester.returncode)
terminate = True
if client.returncode != 0:
print('client returned %d' % client.returncode)
terminate = True
try: shutil.copy('asserts.log', 'session_stats/')
except: pass
try:
shutil.copy('asserts.log', 'session_stats/')
except BaseException:
pass
try: shutil.move('libtorrent_logs0', 'session_stats/')
except: pass
try: shutil.move('libtorrent_logs%s' % port, 'session_stats/')
except: pass
try:
shutil.move('libtorrent_logs0', 'session_stats/')
except BaseException:
pass
try:
shutil.move('libtorrent_logs%s' % port, 'session_stats/')
except BaseException:
pass
# run fragmentation test
print 'analyzing fragmentation'
os.system('./stage_aio/fragmentation_test test.torrent %s' % (config['save-path']))
try: shutil.copy('fragmentation.log', 'session_stats/')
except: pass
# run fragmentation test
print('analyzing fragmentation')
os.system('./stage_aio/fragmentation_test test.torrent %s' % (config['save-path']))
try:
shutil.copy('fragmentation.log', 'session_stats/')
except BaseException:
pass
shutil.copy('fragmentation.gnuplot', 'session_stats/')
try: shutil.copy('file_access.log', 'session_stats/')
except: pass
shutil.copy('fragmentation.gnuplot', 'session_stats/')
try:
shutil.copy('file_access.log', 'session_stats/')
except BaseException:
pass
os.system('filefrag %s >session_stats/filefrag.out' % config['save-path'])
os.system('filefrag -v %s >session_stats/filefrag_verbose.out' % config['save-path'])
os.system('filefrag %s >session_stats/filefrag.out' % config['save-path'])
os.system('filefrag -v %s >session_stats/filefrag_verbose.out' % config['save-path'])
os.chdir('session_stats')
os.chdir('session_stats')
# parse session stats
print 'parsing session log'
os.system('python ../../parse_session_stats.py *.0000.log')
os.system('../stage_aio/parse_access_log file_access.log %s' % (os.path.join('..', config['save-path'], 'stress_test_file')))
# parse session stats
print('parsing session log')
os.system('python ../../parse_session_stats.py *.0000.log')
os.system('../stage_aio/parse_access_log file_access.log %s' %
(os.path.join('..', config['save-path'], 'stress_test_file')))
os.chdir('..')
os.chdir('..')
if config['profile'] == 'tcmalloc':
print 'analyzing CPU profile [%s]' % binary
os.system('%s --pdf %s session_stats/cpu_profile.prof >session_stats/cpu_profile.pdf' % (find_binary(['google-pprof', 'pprof']), binary))
if config['profile'] == 'memory':
for i in xrange(1, 300):
profile = 'session_stats/heap_profile.prof.%04d.heap' % i
try: os.stat(profile)
except: break
print 'analyzing heap profile [%s] %d' % (binary, i)
os.system('%s --pdf %s %s >session_stats/heap_profile_%d.pdf' % (find_binary(['google-pprof', 'pprof']), binary, profile, i))
if config['profile'] == 'perf':
print 'analyzing CPU profile [%s]' % binary
os.system('perf timechart --input=session_stats/perf_profile.prof --output=session_stats/profile_timechart.svg')
os.system('perf report --input=session_stats/perf_profile.prof --threads --show-nr-samples --vmlinux vmlinuz-2.6.38-8-generic.bzip >session_stats/profile.txt')
if config['profile'] == 'tcmalloc':
print('analyzing CPU profile [%s]' % binary)
os.system('%s --pdf %s session_stats/cpu_profile.prof >session_stats/cpu_profile.pdf' %
(find_binary(['google-pprof', 'pprof']), binary))
if config['profile'] == 'memory':
for i in range(1, 300):
profile = 'session_stats/heap_profile.prof.%04d.heap' % i
try:
os.stat(profile)
except BaseException:
break
print('analyzing heap profile [%s] %d' % (binary, i))
os.system('%s --pdf %s %s >session_stats/heap_profile_%d.pdf' %
(find_binary(['google-pprof', 'pprof']), binary, profile, i))
if config['profile'] == 'perf':
print('analyzing CPU profile [%s]' % binary)
os.system('perf timechart --input=session_stats/perf_profile.prof --output=session_stats/profile_timechart.svg')
os.system(('perf report --input=session_stats/perf_profile.prof --threads --show-nr-samples '
'--vmlinux vmlinuz-2.6.38-8-generic.bzip >session_stats/profile.txt'))
# move the results into its final place
print 'saving results'
os.rename('session_stats', build_target_folder(config))
# move the results into its final place
print('saving results')
os.rename('session_stats', build_target_folder(config))
port += 1
port += 1
if terminate:
sys.exit(1)
if terminate: sys.exit(1)
for h in range(0, 7):
config = build_test_config(num_peers=30, build='aio', test='upload', torrent='test.torrent', hash_threads=h, disable_disk=True)
run_test(config)
config = build_test_config(
num_peers=30,
build='aio',
test='upload',
torrent='test.torrent',
hash_threads=h,
disable_disk=True)
run_test(config)
sys.exit(0)
for b in ['aio', 'syncio']:
for test in ['dual', 'upload', 'download']:
config = build_test_config(build=b, test=test)
run_test(config)
for test in ['dual', 'upload', 'download']:
config = build_test_config(build=b, test=test)
run_test(config)
sys.exit(0)
for b in builds:
for test in ['upload', 'download']:
config = build_test_config(build=b, test=test)
run_test(config)
for test in ['upload', 'download']:
config = build_test_config(build=b, test=test)
run_test(config)
for p in peers:
for test in ['upload', 'download']:
config = build_test_config(num_peers=p, test=test)
run_test(config)
for test in ['upload', 'download']:
config = build_test_config(num_peers=p, test=test)
run_test(config)
for c in cache_sizes:
for test in ['upload', 'download']:
config = build_test_config(cache_size=c, test=test)
run_test(config)
for test in ['upload', 'download']:
config = build_test_config(cache_size=c, test=test)
run_test(config)
for fs in filesystem:
for test in ['upload', 'download']:
config = build_test_config(fs=fs, test=test)
run_test(config)
for test in ['upload', 'download']:
config = build_test_config(fs=fs, test=test)
run_test(config)

View File

@ -1,28 +1,28 @@
# -*- coding: cp1252 -*-
# <PythonProxy.py>
#
#Copyright (c) <2009> <Fábio Domingues - fnds3000 in gmail.com>
# Copyright (c) <2009> <Fábio Domingues - fnds3000 in gmail.com>
#
#Permission is hereby granted, free of charge, to any person
#obtaining a copy of this software and associated documentation
#files (the "Software"), to deal in the Software without
#restriction, including without limitation the rights to use,
#copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the
#Software is furnished to do so, subject to the following
#conditions:
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
#The above copyright notice and this permission notice shall be
#included in all copies or substantial portions of the Software.
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
#EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
#OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
#NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
#HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
#WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
#FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
#OTHER DEALINGS IN THE SOFTWARE.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
"""\
Copyright (c) <2009> <Fábio Domingues - fnds3000 in gmail.com> <MIT Licence>
@ -81,16 +81,28 @@ Qual a diferença entre um proxy Elite, Anónimo e Transparente?
"""
import socket, thread, select, sys, base64, time, errno
import socket
import select
import sys
import base64
import time
import errno
# Python 3 renamed thread module to _thread
try:
import _thread as thread
except BaseException:
import thread
__version__ = '0.1.0 Draft 1'
BUFLEN = 8192
VERSION = 'Python Proxy/'+__version__
VERSION = 'Python Proxy/' + __version__
HTTPVER = 'HTTP/1.1'
username = None
password = None
class ConnectionHandler:
def __init__(self, connection, address, timeout):
self.client = connection
@ -99,26 +111,26 @@ class ConnectionHandler:
self.method, self.path, self.protocol = self.get_base_header()
global username
global password
if username != None:
if username is not None:
auth = base64.b64encode(username + ':' + password)
if not 'Proxy-Authorization: Basic ' + auth in self.client_buffer:
print 'PROXY - failed authentication: %s' % self.client_buffer
self.client.send(HTTPVER+' 401 Authentication Failed\n'+
'Proxy-agent: %s\n\n'%VERSION)
print('PROXY - failed authentication: %s' % self.client_buffer)
self.client.send(HTTPVER + ' 401 Authentication Failed\n' +
'Proxy-agent: %s\n\n' % VERSION)
self.client.close()
return
try:
if self.method == 'CONNECT':
self.method_CONNECT()
self.method_CONNECT()
elif self.method in ('OPTIONS', 'GET', 'HEAD', 'POST', 'PUT',
'DELETE', 'TRACE'):
self.method_others()
except:
except BaseException:
try:
self.client.send(HTTPVER+' 502 Connection failed\n'+
'Proxy-agent: %s\n\n'%VERSION)
except Exception, e:
print 'PROXY - ', e
self.client.send(HTTPVER + ' 502 Connection failed\n' +
'Proxy-agent: %s\n\n' % VERSION)
except Exception as e:
print('PROXY - ', e)
self.client.close()
return
@ -127,10 +139,10 @@ class ConnectionHandler:
def get_base_header(self):
retries = 0
while 1:
while True:
try:
self.client_buffer += self.client.recv(BUFLEN)
except socket.error, e:
except socket.error as e:
err = e.args[0]
if (err == errno.EAGAIN or err == errno.EWOULDBLOCK) and retries < 20:
time.sleep(0.5)
@ -138,18 +150,18 @@ class ConnectionHandler:
continue
raise e
end = self.client_buffer.find('\r\n\r\n')
if end!=-1:
if end != -1:
break
line_end = self.client_buffer.find('\n')
print 'PROXY - %s' % self.client_buffer[:line_end]#debug
data = (self.client_buffer[:line_end+1]).split()
self.client_buffer = self.client_buffer[line_end+1:]
print('PROXY - %s' % self.client_buffer[:line_end]) # debug
data = (self.client_buffer[:line_end + 1]).split()
self.client_buffer = self.client_buffer[line_end + 1:]
return data
def method_CONNECT(self):
self._connect_target(self.path)
self.client.send(HTTPVER+' 200 Connection established\n'+
'Proxy-agent: %s\n\n'%VERSION)
self.client.send(HTTPVER + ' 200 Connection established\n' +
'Proxy-agent: %s\n\n' % VERSION)
self.client_buffer = ''
self._read_write()
@ -159,15 +171,15 @@ class ConnectionHandler:
host = self.path[:i]
path = self.path[i:]
self._connect_target(host)
self.target.send('%s %s %s\n' % (self.method, path, self.protocol)+
self.target.send('%s %s %s\n' % (self.method, path, self.protocol) +
self.client_buffer)
self.client_buffer = ''
self._read_write()
def _connect_target(self, host):
i = host.find(':')
if i!=-1:
port = int(host[i+1:])
if i != -1:
port = int(host[i + 1:])
host = host[:i]
else:
port = 80
@ -176,10 +188,10 @@ class ConnectionHandler:
self.target.connect(address)
def _read_write(self):
time_out_max = self.timeout/3
time_out_max = self.timeout / 3
socs = [self.client, self.target]
count = 0
while 1:
while True:
count += 1
(recv, _, error) = select.select(socs, [], socs, 3)
if error:
@ -197,37 +209,39 @@ class ConnectionHandler:
if count == time_out_max:
break
def start_server(host='localhost', port=8080, IPv6=False, timeout=100,
handler=ConnectionHandler):
if IPv6==True:
soc_type=socket.AF_INET6
handler=ConnectionHandler):
if IPv6:
soc_type = socket.AF_INET6
else:
soc_type=socket.AF_INET
soc_type = socket.AF_INET
soc = socket.socket(soc_type)
soc.settimeout(120)
print "PROXY - Serving on %s:%d."%(host, port)#debug
print("PROXY - Serving on %s:%d." % (host, port)) # debug
soc.bind((host, port))
soc.listen(0)
while 1:
thread.start_new_thread(handler, soc.accept()+(timeout,))
while True:
thread.start_new_thread(handler, soc.accept() + (timeout,))
if __name__ == '__main__':
listen_port = 8080
i = 1
while i < len(sys.argv):
if sys.argv[i] == '--port':
listen_port = int(sys.argv[i+1])
listen_port = int(sys.argv[i + 1])
i += 1
elif sys.argv[i] == '--username':
username = sys.argv[i+1]
username = sys.argv[i + 1]
i += 1
elif sys.argv[i] == '--password':
password = sys.argv[i+1]
password = sys.argv[i + 1]
i += 1
else:
if sys.argv[i] != '--help': print('PROXY - unknown option "%s"' % sys.argv[i])
if sys.argv[i] != '--help':
print(('PROXY - unknown option "%s"' % sys.argv[i]))
print('usage: http.py [--port <listen-port>]')
sys.exit(1)
i += 1
start_server(port=listen_port)

View File

@ -1,19 +1,27 @@
#!/usr/bin/env python
"""Minimal non-feature complete socks proxy"""
from __future__ import print_function
import random
import socket
from SocketServer import StreamRequestHandler, ThreadingTCPServer
from struct import pack, unpack
import threading
import sys
# Python 3 renamed SocketServer to socketserver
try:
from socketserver import StreamRequestHandler, ThreadingTCPServer
except BaseException:
from SocketServer import StreamRequestHandler, ThreadingTCPServer
def debug(s):
print >>sys.stderr, 'socks.py: ', s
print('socks.py: ', s, file=sys.stderr)
def error(s):
print >>sys.stderr, 'socks.py, ERROR: ', s
print('socks.py, ERROR: ', s, file=sys.stderr)
class MyTCPServer(ThreadingTCPServer):
allow_reuse_address = True
@ -21,6 +29,7 @@ class MyTCPServer(ThreadingTCPServer):
def handle_timeout(self):
raise Exception('timeout')
CLOSE = object()
VERSION = '\x05'
@ -37,15 +46,19 @@ password = None
username = None
allow_v4 = False
def send(dest, msg):
if msg == CLOSE:
try: dest.shutdown(socket.SHUT_WR)
except: pass
try:
dest.shutdown(socket.SHUT_WR)
except BaseException:
pass
dest.close()
return 0
else:
return dest.sendall(msg)
def recv(source, buffer):
data = source.recv(buffer)
if data == '':
@ -53,6 +66,7 @@ def recv(source, buffer):
else:
return data
def forward(source, dest, name):
while True:
data = recv(source, 4000)
@ -63,11 +77,13 @@ def forward(source, dest, name):
# debug('Forwarding (%d) %r' % (len(data), data))
send(dest, data)
def spawn_forwarder(source, dest, name):
t = threading.Thread(target=forward, args=(source, dest, name))
t.daemon = True
t.start()
class SocksHandler(StreamRequestHandler):
"""Highly feature incomplete SOCKS 5 implementation"""
@ -109,7 +125,7 @@ class SocksHandler(StreamRequestHandler):
c = self.read(1)
outbound_sock = socket.socket(socket.AF_INET)
out_address = socket.getaddrinfo(dest_address,dest_port)[0][4]
out_address = socket.getaddrinfo(dest_address, dest_port)[0][4]
debug("Creating forwarder connection to %s:%d" % (out_address[0], out_address[1]))
outbound_sock.connect(out_address)
@ -130,7 +146,7 @@ class SocksHandler(StreamRequestHandler):
global password
global username
if password == None and NOAUTH in method_list:
if password is None and NOAUTH in method_list:
self.send_no_auth_method()
debug('Authenticated (no-auth)')
elif USERPASS in method_list:
@ -173,7 +189,7 @@ class SocksHandler(StreamRequestHandler):
dest_address = '.'.join(map(str, unpack('>4B', raw_dest_address)))
elif address_type == IPV6:
raw_dest_address = self.read(16)
dest_address = ":".join(map(lambda x: hex(x)[2:],unpack('>8H',raw_dest_address)))
dest_address = ":".join([hex(x)[2:] for x in unpack('>8H', raw_dest_address)])
elif address_type == DOMAIN_NAME:
dns_length = ord(self.read(1))
dns_name = self.read(dns_length)
@ -190,21 +206,21 @@ class SocksHandler(StreamRequestHandler):
else:
outbound_sock = socket.socket(socket.AF_INET)
try:
out_address = socket.getaddrinfo(dest_address,dest_port)[0][4]
except Exception, e:
print e
out_address = socket.getaddrinfo(dest_address, dest_port)[0][4]
except Exception as e:
print(e)
return
if cmd == UDP_ASSOCIATE:
debug("no UDP support yet, closing")
return;
debug("no UDP support yet, closing")
return
debug("Creating forwarder connection to %s:%d" % (out_address[0], out_address[1]))
try:
outbound_sock.connect(out_address)
except Exception, e:
print e
except Exception as e:
print(e)
return
if address_type == IPV6:
@ -215,22 +231,25 @@ class SocksHandler(StreamRequestHandler):
spawn_forwarder(outbound_sock, self.request, 'destination')
try:
forward(self.request, outbound_sock, 'client')
except Exception,e:
print e
except Exception as e:
print(e)
def send_reply_v4(self, (bind_addr, bind_port)):
def send_reply_v4(self, xxx_todo_changeme):
(bind_addr, bind_port) = xxx_todo_changeme
self.wfile.write('\0\x5a\0\0\0\0\0\0')
self.wfile.flush()
def send_reply(self, (bind_addr, bind_port)):
def send_reply(self, xxx_todo_changeme1):
(bind_addr, bind_port) = xxx_todo_changeme1
bind_tuple = tuple(map(int, bind_addr.split('.')))
full_address = bind_tuple + (bind_port,)
debug('Setting up forwarding port %r' % (full_address,))
msg = pack('>cccc4BH', VERSION, SUCCESS, '\x00', IPV4, *full_address)
self.wfile.write(msg)
def send_reply6(self, (bind_addr, bind_port, unused1, unused2)):
bind_tuple = tuple(map(lambda x: int(x,16), bind_addr.split(':')))
def send_reply6(self, xxx_todo_changeme2):
(bind_addr, bind_port, unused1, unused2) = xxx_todo_changeme2
bind_tuple = tuple([int(x, 16) for x in bind_addr.split(':')])
full_address = bind_tuple + (bind_port,)
debug('Setting up forwarding port %r' % (full_address,))
msg = pack('>cccc8HH', VERSION, SUCCESS, '\x00', IPV6, *full_address)
@ -252,24 +271,26 @@ class SocksHandler(StreamRequestHandler):
self.wfile.write('\x01\x00')
self.wfile.flush()
if __name__ == '__main__':
listen_port = 8002
i = 1
while i < len(sys.argv):
if sys.argv[i] == '--username':
username = sys.argv[i+1]
username = sys.argv[i + 1]
i += 1
elif sys.argv[i] == '--password':
password = sys.argv[i+1]
password = sys.argv[i + 1]
i += 1
elif sys.argv[i] == '--port':
listen_port = int(sys.argv[i+1])
listen_port = int(sys.argv[i + 1])
i += 1
elif sys.argv[i] == '--allow-v4':
allow_v4 = True
else:
if sys.argv[i] != '--help': debug('unknown option "%s"' % sys.argv[i])
if sys.argv[i] != '--help':
debug('unknown option "%s"' % sys.argv[i])
print('usage: socks.py [--username <user> --password <password>] [--port <listen-port>]')
sys.exit(1)
i += 1
@ -279,4 +300,3 @@ if __name__ == '__main__':
server.timeout = 190
while True:
server.handle_request()

View File

@ -1,194 +1,205 @@
#!/usr/bin/env python
import BaseHTTPServer
import SimpleHTTPServer
import sys
import os
import ssl
import gzip
import base64
# Python 3 has moved {Simple,Base}HTTPServer to http module
try:
# Remove '.' from sys.path or we try to import the http.py module
# which is not what we want.
sys.path = sys.path[1:]
from http.server import HTTPServer, BaseHTTPRequestHandler
except ImportError:
from SimpleHTTPServer import SimpleHTTPRequestHandler as BaseHTTPRequestHandler
from BaseHTTPServer import HTTPServer
chunked_encoding = False
keepalive = True
try:
fin = open('test_file', 'rb')
f = gzip.open('test_file.gz', 'wb')
f.writelines(fin)
f.close()
fin.close()
except:
pass
fin = open('test_file', 'rb')
f = gzip.open('test_file.gz', 'wb')
f.writelines(fin)
f.close()
fin.close()
except BaseException:
pass
class http_server_with_timeout(BaseHTTPServer.HTTPServer):
allow_reuse_address = True
timeout = 190
def handle_timeout(self):
raise Exception('timeout')
class http_server_with_timeout(HTTPServer):
allow_reuse_address = True
timeout = 190
class http_handler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def handle_timeout(self):
raise Exception('timeout')
def do_GET(s):
print 'INCOMING-REQUEST: ', s.requestline
print s.headers
class http_handler(BaseHTTPRequestHandler):
global chunked_encoding
global keepalive
def do_GET(s):
# if the request contains the hostname and port. strip it
if s.path.startswith('http://') or s.path.startswith('https://'):
s.path = s.path[8:]
s.path = s.path[s.path.find('/'):]
print('INCOMING-REQUEST: ', s.requestline)
print(s.headers)
file_path = os.path.normpath(s.path)
print file_path
print s.path
global chunked_encoding
global keepalive
if s.path == '/password_protected':
passed = False
if 'Authorization' in s.headers:
auth = s.headers['Authorization']
passed = auth == 'Basic %s' % base64.b64encode('testuser:testpass')
# if the request contains the hostname and port. strip it
if s.path.startswith('http://') or s.path.startswith('https://'):
s.path = s.path[8:]
s.path = s.path[s.path.find('/'):]
if not passed:
s.send_response(401)
s.send_header("Connection", "close")
s.end_headers()
return
file_path = os.path.normpath(s.path)
print(file_path)
print(s.path)
s.path = '/test_file'
file_path = os.path.normpath('/test_file')
if s.path == '/password_protected':
passed = False
if 'Authorization' in s.headers:
auth = s.headers['Authorization']
passed = auth == 'Basic %s' % base64.b64encode('testuser:testpass')
if s.path == '/redirect':
s.send_response(301)
s.send_header("Location", "/test_file")
s.send_header("Connection", "close")
s.end_headers()
elif s.path == '/infinite_redirect':
s.send_response(301)
s.send_header("Location", "/infinite_redirect")
s.send_header("Connection", "close")
s.end_headers()
elif s.path == '/relative/redirect':
s.send_response(301)
s.send_header("Location", "../test_file")
s.send_header("Connection", "close")
s.end_headers()
elif s.path.startswith('/announce'):
s.send_response(200)
response = 'd8:intervali1800e8:completei1e10:incompletei1e' + \
'12:min intervali' + min_interval + 'e' + \
'5:peers12:AAAABBCCCCDD' + \
'6:peers618:EEEEEEEEEEEEEEEEFF' + \
'e'
s.send_header("Content-Length", "%d" % len(response))
s.send_header("Connection", "close")
s.end_headers()
s.wfile.write(response)
elif os.path.split(s.path)[1].startswith('seed?'):
query = s.path[6:]
args_raw = query.split('&')
args = {}
for a in args_raw:
kvp = a.split('=')
args[kvp[0]] = kvp[1]
piece = int(args['piece'])
ranges = args['ranges'].split('-')
if not passed:
s.send_response(401)
s.send_header("Connection", "close")
s.end_headers()
return
filename = ''
try:
filename = os.path.normpath(s.path[1:s.path.find('seed?') + 4])
print 'filename = %s' % filename
f = open(filename, 'rb')
f.seek(piece * 32 * 1024 + int(ranges[0]))
data = f.read(int(ranges[1]) - int(ranges[0]) + 1)
f.close()
s.path = '/test_file'
file_path = os.path.normpath('/test_file')
s.send_response(200)
print 'sending %d bytes' % len(data)
s.send_header("Content-Length", "%d" % len(data))
s.end_headers()
s.wfile.write(data);
except Exception, e:
print 'FILE ERROR: ', filename, e
s.send_response(404)
s.send_header("Content-Length", "0")
s.end_headers()
else:
filename = ''
try:
filename = os.path.normpath(file_path[1:])
# serve file by invoking default handler
f = open(filename, 'rb')
size = int(os.stat(filename).st_size)
start_range = 0
end_range = size
if 'Range' in s.headers:
s.send_response(206)
st, e = s.headers['range'][6:].split('-', 1)
sl = len(st)
el = len(e)
if sl > 0:
start_range = int(st)
if el > 0:
end_range = int(e) + 1
elif el > 0:
ei = int(e)
if ei < size:
start_range = size - ei
s.send_header('Content-Range', 'bytes ' + str(start_range) \
+ '-' + str(end_range - 1) + '/' + str(size))
else:
s.send_response(200)
s.send_header('Accept-Ranges', 'bytes')
if chunked_encoding:
s.send_header('Transfer-Encoding', 'chunked')
s.send_header('Content-Length', end_range - start_range)
if filename.endswith('.gz'):
s.send_header('Content-Encoding', 'gzip')
if not keepalive:
s.send_header("Connection", "close")
try:
s.request.shutdown();
except Exception, e:
print 'Failed to shutdown read-channel of socket: ', e
if s.path == '/redirect':
s.send_response(301)
s.send_header("Location", "/test_file")
s.send_header("Connection", "close")
s.end_headers()
elif s.path == '/infinite_redirect':
s.send_response(301)
s.send_header("Location", "/infinite_redirect")
s.send_header("Connection", "close")
s.end_headers()
elif s.path == '/relative/redirect':
s.send_response(301)
s.send_header("Location", "../test_file")
s.send_header("Connection", "close")
s.end_headers()
elif s.path.startswith('/announce'):
s.send_response(200)
response = 'd8:intervali1800e8:completei1e10:incompletei1e' + \
'12:min intervali' + min_interval + 'e' + \
'5:peers12:AAAABBCCCCDD' + \
'6:peers618:EEEEEEEEEEEEEEEEFF' + \
'e'
s.send_header("Content-Length", "%d" % len(response))
s.send_header("Connection", "close")
s.end_headers()
s.wfile.write(response)
elif os.path.split(s.path)[1].startswith('seed?'):
query = s.path[6:]
args_raw = query.split('&')
args = {}
for a in args_raw:
kvp = a.split('=')
args[kvp[0]] = kvp[1]
piece = int(args['piece'])
ranges = args['ranges'].split('-')
s.end_headers()
filename = ''
try:
filename = os.path.normpath(s.path[1:s.path.find('seed?') + 4])
print('filename = %s' % filename)
f = open(filename, 'rb')
f.seek(piece * 32 * 1024 + int(ranges[0]))
data = f.read(int(ranges[1]) - int(ranges[0]) + 1)
f.close()
s.send_response(200)
print('sending %d bytes' % len(data))
s.send_header("Content-Length", "%d" % len(data))
s.end_headers()
s.wfile.write(data)
except Exception as e:
print('FILE ERROR: ', filename, e)
s.send_response(404)
s.send_header("Content-Length", "0")
s.end_headers()
else:
filename = ''
try:
filename = os.path.normpath(file_path[1:])
# serve file by invoking default handler
f = open(filename, 'rb')
size = int(os.stat(filename).st_size)
start_range = 0
end_range = size
if 'Range' in s.headers:
s.send_response(206)
st, e = s.headers['range'][6:].split('-', 1)
sl = len(st)
el = len(e)
if sl > 0:
start_range = int(st)
if el > 0:
end_range = int(e) + 1
elif el > 0:
ei = int(e)
if ei < size:
start_range = size - ei
s.send_header('Content-Range', 'bytes ' + str(start_range)
+ '-' + str(end_range - 1) + '/' + str(size))
else:
s.send_response(200)
s.send_header('Accept-Ranges', 'bytes')
if chunked_encoding:
s.send_header('Transfer-Encoding', 'chunked')
s.send_header('Content-Length', end_range - start_range)
if filename.endswith('.gz'):
s.send_header('Content-Encoding', 'gzip')
if not keepalive:
s.send_header("Connection", "close")
try:
s.request.shutdown()
except Exception as e:
print('Failed to shutdown read-channel of socket: ', e)
s.end_headers()
f.seek(start_range)
length = end_range - start_range
while length > 0:
to_send = min(length, 0x900)
if chunked_encoding:
s.wfile.write('%x\r\n' % to_send)
data = f.read(to_send)
print('read %d bytes' % to_send)
s.wfile.write(data)
if chunked_encoding:
s.wfile.write('\r\n')
length -= to_send
print('sent %d bytes (%d bytes left)' % (len(data), length))
if chunked_encoding:
s.wfile.write('0\r\n\r\n')
except Exception as e:
print('FILE ERROR: ', filename, e)
s.send_response(404)
s.send_header("Content-Length", "0")
s.end_headers()
f.seek(start_range)
length = end_range - start_range
while length > 0:
to_send = min(length, 0x900)
if chunked_encoding:
s.wfile.write('%x\r\n' % to_send)
data = f.read(to_send)
print 'read %d bytes' % to_send
s.wfile.write(data)
if chunked_encoding:
s.wfile.write('\r\n')
length -= to_send
print 'sent %d bytes (%d bytes left)' % (len(data), length)
if chunked_encoding:
s.wfile.write('0\r\n\r\n')
except Exception, e:
print 'FILE ERROR: ', filename, e
s.send_response(404)
s.send_header("Content-Length", "0")
s.end_headers()
if __name__ == '__main__':
port = int(sys.argv[1])
chunked_encoding = sys.argv[2] != '0'
use_ssl = sys.argv[3] != '0'
keepalive = sys.argv[4] != '0'
min_interval = sys.argv[5]
port = int(sys.argv[1])
chunked_encoding = sys.argv[2] != '0'
use_ssl = sys.argv[3] != '0'
keepalive = sys.argv[4] != '0'
min_interval = sys.argv[5]
http_handler.protocol_version = 'HTTP/1.1'
httpd = http_server_with_timeout(('127.0.0.1', port), http_handler)
if use_ssl:
httpd.socket = ssl.wrap_socket(httpd.socket, certfile='../ssl/server.pem', server_side=True)
http_handler.protocol_version = 'HTTP/1.1'
httpd = http_server_with_timeout(('127.0.0.1', port), http_handler)
if use_ssl:
httpd.socket = ssl.wrap_socket(httpd.socket, certfile='../ssl/server.pem', server_side=True)
while True:
httpd.handle_request()
while True:
httpd.handle_request()

View File

@ -4,78 +4,79 @@ import os
import shutil
import glob
def clean():
to_delete = [
'session_stats',
'libtorrent_logs*',
'round_trip_ms.log',
'dht.log',
'upnp.log',
'natpmp.log',
'bin',
'build-aux',
'.deps',
'test_tmp_*',
'bjam_build.*.xml'
'*.exe',
'*.pdb',
'*.pyd',
'dist',
'build',
'.libs',
'*.cpp.orig',
'*.cpp.rej',
'*.hpp.orig',
'*.hpp.rej',
'*.hpp.gcov',
'*.cpp.gcov',
'Makefile.in',
'Makefile',
'lib*.a',
'Jamfile.rej',
'Jamfile.orig',
'*.o',
'*.lo',
'autom4te.cache',
'configure',
'config.report',
'config.log',
'.lib',
]
to_delete = [
'session_stats',
'libtorrent_logs*',
'round_trip_ms.log',
'dht.log',
'upnp.log',
'natpmp.log',
'bin',
'build-aux',
'.deps',
'test_tmp_*',
'bjam_build.*.xml'
'*.exe',
'*.pdb',
'*.pyd',
'dist',
'build',
'.libs',
'*.cpp.orig',
'*.cpp.rej',
'*.hpp.orig',
'*.hpp.rej',
'*.hpp.gcov',
'*.cpp.gcov',
'Makefile.in',
'Makefile',
'lib*.a',
'Jamfile.rej',
'Jamfile.orig',
'*.o',
'*.lo',
'autom4te.cache',
'configure',
'config.report',
'config.log',
'.lib',
]
directories = [
'examples',
'test',
'.',
'tools',
'src',
'simulation',
os.path.join('src', 'kademlia'),
os.path.join('include', 'libtorrent'),
os.path.join('include', os.path.join('libtorrent', '_aux')),
os.path.join('include', os.path.join('libtorrent', 'kademlia')),
os.path.join('bindings', 'python'),
os.path.join('bindings', os.path.join('python', 'src')),
os.path.join('bindings', 'c'),
os.path.join('bindings', os.path.join('c', 'src')),
os.path.join('simulation', 'libsimulator')
]
directories = [
'examples',
'test',
'.',
'tools',
'src',
'simulation',
os.path.join('src', 'kademlia'),
os.path.join('include', 'libtorrent'),
os.path.join('include', os.path.join('libtorrent', '_aux')),
os.path.join('include', os.path.join('libtorrent', 'kademlia')),
os.path.join('bindings', 'python'),
os.path.join('bindings', os.path.join('python', 'src')),
os.path.join('bindings', 'c'),
os.path.join('bindings', os.path.join('c', 'src')),
os.path.join('simulation', 'libsimulator')
]
for d in directories:
for f in to_delete:
path = os.path.join(d, f)
entries = glob.glob(path)
for p in entries:
try:
shutil.rmtree(p)
print p
except Exception, e:
try:
os.remove(p)
print p
except Exception, e:
print p, e
for d in directories:
for f in to_delete:
path = os.path.join(d, f)
entries = glob.glob(path)
for p in entries:
try:
shutil.rmtree(p)
print(p)
except Exception as e:
try:
os.remove(p)
print(p)
except Exception as e:
print(p, e)
if __name__ == "__main__":
clean()
if __name__ == "__main__":
clean()

View File

@ -8,30 +8,36 @@ import random
port = int(sys.argv[1])
# from BitTorrent 4.3.0
def encode_bencached(x,r):
def encode_bencached(x, r):
r.append(x.bencoded)
def encode_int(x, r):
r.extend(('i', str(x), 'e'))
def encode_string(x, r):
r.extend((str(len(x)), ':', x))
def encode_list(x, r):
r.append('l')
for i in x:
encode_func[type(i)](i, r)
r.append('e')
def encode_dict(x,r):
def encode_dict(x, r):
r.append('d')
ilist = x.items()
ilist.sort()
ilist = sorted(x.items())
for k, v in ilist:
r.extend((str(len(k)), ':', k))
encode_func[type(v)](v, r)
r.append('e')
encode_func = {}
encode_func[IntType] = encode_int
encode_func[LongType] = encode_int
@ -40,29 +46,32 @@ encode_func[ListType] = encode_list
encode_func[TupleType] = encode_list
encode_func[DictType] = encode_dict
def bencode(x):
r = []
encode_func[type(x)](x, r)
return ''.join(r)
def send_dht_message(msg):
s.sendto(bencode(msg), 0, ('127.0.0.1', port))
s.sendto(bencode(msg), 0, ('127.0.0.1', port))
def random_key():
ret = ''
for i in range(0, 20):
ret += chr(random.randint(0, 255))
return ret
ret = ''
for i in range(0, 20):
ret += chr(random.randint(0, 255))
return ret
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
node_id = '1' * 20;
node_id = '1' * 20
query = 'get_peers'
print 'test random info-hashes'
for i in xrange(1, 30000):
send_dht_message({'a': {'id': node_id, 'info_hash': random_key()}, 'q': query, 'y': 'q', 't': '%d' % i})
print 'test random peer-ids'
for i in xrange(1, 30000):
send_dht_message({'a': {'id': random_key(), 'info_hash': random_key()}, 'q': query, 'y': 'q', 't': '%d' % i})
print('test random info-hashes')
for i in range(1, 30000):
send_dht_message({'a': {'id': node_id, 'info_hash': random_key()}, 'q': query, 'y': 'q', 't': '%d' % i})
print('test random peer-ids')
for i in range(1, 30000):
send_dht_message({'a': {'id': random_key(), 'info_hash': random_key()}, 'q': query, 'y': 'q', 't': '%d' % i})

View File

@ -1,9 +1,8 @@
#!/usr/bin/env python
import os
import sys
file_header ='''/*
file_header = '''/*
Copyright (c) 2017, Arvid Norberg
All rights reserved.
@ -52,79 +51,83 @@ namespace lt = libtorrent;
#endif // TORRENT_FWD_HPP
'''
classes = os.popen('git grep "\(TORRENT_EXPORT\|TORRENT_DEPRECATED_EXPORT\|^TORRENT_[A-Z0-9]\+_NAMESPACE\)"').read().split('\n')
classes = os.popen(
r'git grep "\(TORRENT_EXPORT\|TORRENT_DEPRECATED_EXPORT\|^TORRENT_[A-Z0-9]\+_NAMESPACE\)"').read().split('\n')
def print_classes(out, classes, keyword):
current_file = ''
ret = ''
dht_ret = ''
current_file = ''
# [(file, decl), ...]
classes = [(l.split(':')[0].strip(), ':'.join(l.split(':')[1:]).strip()) for l in classes]
# [(file, decl), ...]
classes = [(l.split(':')[0].strip(), ':'.join(l.split(':')[1:]).strip()) for l in classes]
# we only care about header files
# ignore the forward header itself, that's the one we're generating
# also ignore any header in the aux_ directory, those are private
classes = [l for l in classes if l[0].endswith('.hpp') and not l[0].endswith('/fwd.hpp') and '/aux_/' not in l[0]]
# we only care about header files
# ignore the forward header itself, that's the one we're generating
# also ignore any header in the aux_ directory, those are private
classes = [l for l in classes if l[0].endswith('.hpp') and not l[0].endswith('/fwd.hpp') and '/aux_/' not in l[0]]
namespaces = ['TORRENT_VERSION_NAMESPACE_2', 'TORRENT_IPV6_NAMESPACE',
'TORRENT_VERSION_NAMESPACE_2_END', 'TORRENT_IPV6_NAMESPACE_END']
namespaces = ['TORRENT_VERSION_NAMESPACE_2', 'TORRENT_IPV6_NAMESPACE',
'TORRENT_VERSION_NAMESPACE_2_END', 'TORRENT_IPV6_NAMESPACE_END']
# only include classes with the right kind of export
classes = [l for l in classes if l[1] in namespaces or (l[1].split(' ')[0] in ['class', 'struct'] and l[1].split(' ')[1] == keyword)]
# only include classes with the right kind of export
classes = [
l for l in classes if l[1] in namespaces or (
l[1].split(' ')[0] in [
'class',
'struct'] and l[1].split(' ')[1] == keyword)]
# collapse empty namespaces
classes2 = []
skip = 0
for i in xrange(len(classes)):
if skip > 0:
skip -= 1
continue
if classes[i][1] in namespaces \
and len(classes) > i+1 \
and classes[i+1][1] == ('%s_END' % classes[i][1]):
skip = 1
else:
classes2.append(classes[i])
# collapse empty namespaces
classes2 = []
skip = 0
for i in range(len(classes)):
if skip > 0:
skip -= 1
continue
if classes[i][1] in namespaces \
and len(classes) > i + 1 \
and classes[i + 1][1] == ('%s_END' % classes[i][1]):
skip = 1
else:
classes2.append(classes[i])
classes = classes2
classes = classes2
idx = -1
for line in classes:
idx += 1
this_file = line[0]
decl = line[1].split(' ')
idx = -1
for line in classes:
idx += 1
this_file = line[0]
decl = line[1].split(' ')
content = ''
if this_file != current_file:
out.write('\n// ' + this_file + '\n')
current_file = this_file;
if len(decl) > 2 and decl[0] in ['struct', 'class']:
decl = decl[0] + ' ' + decl[2]
if not decl.endswith(';'): decl += ';'
content = decl + '\n'
else:
content = line[1] + '\n'
content = ''
if this_file != current_file:
out.write('\n// ' + this_file + '\n')
current_file = this_file
if len(decl) > 2 and decl[0] in ['struct', 'class']:
decl = decl[0] + ' ' + decl[2]
if not decl.endswith(';'):
decl += ';'
content = decl + '\n'
else:
content = line[1] + '\n'
if 'kademlia' in this_file:
out.write('namespace dht {\n')
out.write(content)
out.write('}\n')
else:
out.write(content)
if 'kademlia' in this_file:
out.write('namespace dht {\n')
out.write(content)
out.write('}\n')
else:
out.write(content)
os.remove('include/libtorrent/fwd.hpp')
with open('include/libtorrent/fwd.hpp', 'w+') as f:
f.write(file_header)
f.write(file_header)
print_classes(f, classes, 'TORRENT_EXPORT');
print_classes(f, classes, 'TORRENT_EXPORT')
f.write('\n#if TORRENT_ABI_VERSION == 1\n')
f.write('\n#if TORRENT_ABI_VERSION == 1\n')
print_classes(f, classes, 'TORRENT_DEPRECATED_EXPORT');
f.write('\n#endif // TORRENT_ABI_VERSION')
f.write(file_footer)
print_classes(f, classes, 'TORRENT_DEPRECATED_EXPORT')
f.write('\n#endif // TORRENT_ABI_VERSION')
f.write(file_footer)

View File

@ -35,53 +35,57 @@ import locale
# Python 2.x/3.x compatibility
if sys.version_info[0] >= 3:
PYTHON_3 = True
def compat_iteritems(x): return x.items() # No iteritems() in Python 3
def compat_itervalues(x): return x.values() # No itervalues() in Python 3
def compat_keys(x): return list(x.keys()) # keys() is a generator in Python 3
basestring = str # No class basestring in Python 3
unichr = chr # No unichr in Python 3
xrange = range # No xrange in Python 3
unichr = chr # No unichr in Python 3
xrange = range # No xrange in Python 3
else:
PYTHON_3 = False
def compat_iteritems(x): return x.iteritems()
def compat_itervalues(x): return x.itervalues()
def compat_keys(x): return x.keys()
try:
# Debugging helper module
import debug
except ImportError:
pass
MULTIPLICATION_SIGN = unichr(0xd7)
def times(x):
return "%u%s" % (x, MULTIPLICATION_SIGN)
def percentage(p):
return "%.02f%%" % (p*100.0,)
return "%.02f%%" % (p * 100.0,)
def add(a, b):
return a + b
def equal(a, b):
if a == b:
return a
else:
return None
def fail(a, b):
assert False
tol = 2 ** -23
def ratio(numerator, denominator):
try:
ratio = float(numerator)/float(denominator)
ratio = float(numerator) / float(denominator)
except ZeroDivisionError:
# 0/0 is undefined, but 1.0 yields more useful results
return 1.0
@ -110,7 +114,7 @@ class UndefinedEvent(Exception):
class Event(object):
"""Describe a kind of event, and its basic operations."""
def __init__(self, name, null, aggregator, formatter = str):
def __init__(self, name, null, aggregator, formatter=str):
self.name = name
self._null = null
self._aggregator = aggregator
@ -224,7 +228,7 @@ class Function(Object):
self.calls[call.callee_id] = call
def get_call(self, callee_id):
if not callee_id in self.calls:
if callee_id not in self.calls:
call = Call(callee_id)
call[SAMPLES] = 0
call[SAMPLES2] = 0
@ -306,7 +310,9 @@ class Profile(Object):
for callee_id in compat_keys(function.calls):
assert function.calls[callee_id].callee_id == callee_id
if callee_id not in self.functions:
sys.stderr.write('warning: call to undefined function %s from function %s\n' % (str(callee_id), function.name))
sys.stderr.write(
'warning: call to undefined function %s from function %s\n' %
(str(callee_id), function.name))
del function.calls[callee_id]
def find_cycles(self):
@ -368,7 +374,6 @@ class Profile(Object):
pathFunctions[n] = f
self.functions = pathFunctions
def getFunctionId(self, funcName):
for f in self.functions:
if self.functions[f].name == funcName:
@ -496,7 +501,7 @@ class Profile(Object):
assert outevent not in call
assert call.ratio is not None
callee = self.functions[call.callee_id]
subtotal = call.ratio *self._integrate_function(callee, outevent, inevent)
subtotal = call.ratio * self._integrate_function(callee, outevent, inevent)
call[outevent] = subtotal
return subtotal
@ -535,9 +540,10 @@ class Profile(Object):
partials = {}
self._rank_cycle_function(cycle, callee, 0, ranks)
self._call_ratios_cycle(cycle, callee, ranks, call_ratios, set())
partial = self._integrate_cycle_function(cycle, callee, call_ratio, partials, ranks, call_ratios, outevent, inevent)
partial = self._integrate_cycle_function(
cycle, callee, call_ratio, partials, ranks, call_ratios, outevent, inevent)
assert partial == max(partials.values())
assert not total or abs(1.0 - partial/(call_ratio*total)) <= 0.001
assert not total or abs(1.0 - partial / (call_ratio * total)) <= 0.001
return cycle[outevent]
@ -561,20 +567,30 @@ class Profile(Object):
call_ratios[callee] = call_ratios.get(callee, 0.0) + call.ratio
self._call_ratios_cycle(cycle, callee, ranks, call_ratios, visited)
def _integrate_cycle_function(self, cycle, function, partial_ratio, partials, ranks, call_ratios, outevent, inevent):
def _integrate_cycle_function(
self,
cycle,
function,
partial_ratio,
partials,
ranks,
call_ratios,
outevent,
inevent):
if function not in partials:
partial = partial_ratio*function[inevent]
partial = partial_ratio * function[inevent]
for call in compat_itervalues(function.calls):
if call.callee_id != function.id:
callee = self.functions[call.callee_id]
if callee.cycle is not cycle:
assert outevent in call
partial += partial_ratio*call[outevent]
partial += partial_ratio * call[outevent]
else:
if ranks[callee] > ranks[function]:
callee_partial = self._integrate_cycle_function(cycle, callee, partial_ratio, partials, ranks, call_ratios, outevent, inevent)
callee_partial = self._integrate_cycle_function(
cycle, callee, partial_ratio, partials, ranks, call_ratios, outevent, inevent)
call_ratio = ratio(call.ratio, call_ratios[callee])
call_partial = call_ratio*callee_partial
call_partial = call_ratio * callee_partial
try:
call[outevent] += call_partial
except UndefinedEvent:
@ -670,7 +686,7 @@ class Profile(Object):
class Struct:
"""Masquerade a dictionary with a structure-like behavior."""
def __init__(self, attrs = None):
def __init__(self, attrs=None):
if attrs is None:
attrs = {}
self.__dict__['_attrs'] = attrs
@ -761,7 +777,7 @@ XML_ELEMENT_START, XML_ELEMENT_END, XML_CHARACTER_DATA, XML_EOF = range(4)
class XmlToken:
def __init__(self, type, name_or_data, attrs = None, line = None, column = None):
def __init__(self, type, name_or_data, attrs=None, line=None, column=None):
assert type in (XML_ELEMENT_START, XML_ELEMENT_END, XML_CHARACTER_DATA, XML_EOF)
self.type = type
self.name_or_data = name_or_data
@ -784,7 +800,7 @@ class XmlToken:
class XmlTokenizer:
"""Expat based XML tokenizer."""
def __init__(self, fp, skip_ws = True):
def __init__(self, fp, skip_ws=True):
self.fp = fp
self.tokens = []
self.index = 0
@ -795,8 +811,8 @@ class XmlTokenizer:
self.character_data = ''
self.parser = xml.parsers.expat.ParserCreate()
self.parser.StartElementHandler = self.handle_element_start
self.parser.EndElementHandler = self.handle_element_end
self.parser.StartElementHandler = self.handle_element_start
self.parser.EndElementHandler = self.handle_element_end
self.parser.CharacterDataHandler = self.handle_character_data
def handle_element_start(self, name, attributes):
@ -825,7 +841,7 @@ class XmlTokenizer:
self.character_data = ''
def next(self):
size = 16*1024
size = 16 * 1024
while self.index >= len(self.tokens) and not self.final:
self.tokens = []
self.index = 0
@ -834,7 +850,7 @@ class XmlTokenizer:
try:
self.parser.Parse(data, self.final)
except xml.parsers.expat.ExpatError as e:
#if e.code == xml.parsers.expat.errors.XML_ERROR_NO_ELEMENTS:
# if e.code == xml.parsers.expat.errors.XML_ERROR_NO_ELEMENTS:
if e.code == 3:
pass
else:
@ -858,7 +874,8 @@ class XmlTokenMismatch(Exception):
self.found = found
def __str__(self):
return '%u:%u: %s expected, %s found' % (self.found.line, self.found.column, str(self.expected), str(self.found))
return '%u:%u: %s expected, %s found' % (
self.found.line, self.found.column, str(self.expected), str(self.found))
class XmlParser(Parser):
@ -898,7 +915,7 @@ class XmlParser(Parser):
raise XmlTokenMismatch(XmlToken(XML_ELEMENT_END, name), self.token)
self.consume()
def character_data(self, strip = True):
def character_data(self, strip=True):
data = ''
while self.token.type == XML_CHARACTER_DATA:
data += self.token.name_or_data
@ -1095,7 +1112,7 @@ class GprofParser(Parser):
# process call graph entries
entry_lines = []
while line != '\014': # form feed
while line != '\014': # form feed
if line and not line.isspace():
if self._cg_sep_re.match(line):
self.parse_cg_entry(entry_lines)
@ -1227,7 +1244,7 @@ class AXEParser(Parser):
'^-----+ '
)
_cg_footer_re = re.compile('^Index\s+Function\s*$')
_cg_footer_re = re.compile(r'^Index\s+Function\s*$')
_cg_primary_re = re.compile(
r'^\[(?P<index>\d+)\]?' +
@ -1378,7 +1395,8 @@ class AXEParser(Parser):
line = self.readline()
def parse(self):
sys.stderr.write('warning: for axe format, edge weights are unreliable estimates derived from\nfunction total times.\n')
sys.stderr.write(
'warning: for axe format, edge weights are unreliable estimates derived from\nfunction total times.\n')
self.parse_cg()
self.fp.close()
@ -1440,7 +1458,7 @@ class AXEParser(Parser):
for call in compat_itervalues(function.calls):
if call.ratio is not None:
callee = profile.functions[call.callee_id]
call[TOTAL_TIME_RATIO] = call.ratio * callee[TOTAL_TIME_RATIO];
call[TOTAL_TIME_RATIO] = call.ratio * callee[TOTAL_TIME_RATIO]
return profile
@ -1452,7 +1470,7 @@ class CallgrindParser(LineParser):
- http://valgrind.org/docs/manual/cl-format.html
"""
_call_re = re.compile('^calls=\s*(\d+)\s+((\d+|\+\d+|-\d+|\*)\s+)+$')
_call_re = re.compile(r'^calls=\s*(\d+)\s+((\d+|\+\d+|-\d+|\*)\s+)+$')
def __init__(self, infile):
LineParser.__init__(self, infile)
@ -1541,7 +1559,7 @@ class CallgrindParser(LineParser):
if key == 'positions':
self.num_positions = len(items)
self.cost_positions = items
self.last_positions = [0]*self.num_positions
self.last_positions = [0] * self.num_positions
return True
def parse_cost_summary(self):
@ -1560,9 +1578,9 @@ class CallgrindParser(LineParser):
__subpos_re = r'(0x[0-9a-fA-F]+|\d+|\+\d+|-\d+|\*)'
_cost_re = re.compile(r'^' +
__subpos_re + r'( +' + __subpos_re + r')*' +
r'( +\d+)*' +
'$')
__subpos_re + r'( +' + __subpos_re + r')*' +
r'( +\d+)*' +
'$')
def parse_cost_line(self, calls=None):
line = self.lookahead().rstrip()
@ -1584,9 +1602,9 @@ class CallgrindParser(LineParser):
values = line.split()
assert len(values) <= self.num_positions + self.num_events
positions = values[0 : self.num_positions]
events = values[self.num_positions : ]
events += ['0']*(self.num_events - len(events))
positions = values[0: self.num_positions]
events = values[self.num_positions:]
events += ['0'] * (self.num_events - len(events))
for i in range(self.num_positions):
position = positions[i]
@ -1631,14 +1649,13 @@ class CallgrindParser(LineParser):
_, values = line.split('=', 1)
values = values.strip().split()
calls = int(values[0])
call_position = values[1:]
self.consume()
self.parse_cost_line(calls)
return True
_position_re = re.compile('^(?P<position>[cj]?(?:ob|fl|fi|fe|fn))=\s*(?:\((?P<id>\d+)\))?(?:\s*(?P<name>.+))?')
_position_re = re.compile(r'^(?P<position>[cj]?(?:ob|fl|fi|fe|fn))=\s*(?:\((?P<id>\d+)\))?(?:\s*(?P<name>.+))?')
_position_table_map = {
'ob': 'ob',
@ -1730,7 +1747,7 @@ class CallgrindParser(LineParser):
def make_function(self, module, filename, name):
# FIXME: module and filename are not being tracked reliably
#id = '|'.join((module, filename, name))
# id = '|'.join((module, filename, name))
id = name
try:
function = self.profile.functions[id]
@ -1802,7 +1819,7 @@ class PerfParser(LineParser):
for call in compat_itervalues(function.calls):
if call.ratio is not None:
callee = profile.functions[call.callee_id]
call[TOTAL_TIME_RATIO] = call.ratio * callee[TOTAL_TIME_RATIO];
call[TOTAL_TIME_RATIO] = call.ratio * callee[TOTAL_TIME_RATIO]
else:
assert False
@ -1931,8 +1948,6 @@ class OprofileParser(LineParser):
profile = Profile()
reverse_call_samples = {}
# populate the profile
profile[SAMPLES] = 0
for _callers, _function, _callees in compat_itervalues(self.entries):
@ -2016,7 +2031,7 @@ class OprofileParser(LineParser):
if entry.symbol.startswith('"') and entry.symbol.endswith('"'):
entry.symbol = entry.symbol[1:-1]
entry.id = ':'.join((entry.application, entry.image, source, entry.symbol))
entry.self = fields.get('self', None) != None
entry.self = fields.get('self', None) is not None
if entry.self:
entry.id += ':self'
if entry.symbol:
@ -2036,7 +2051,7 @@ class OprofileParser(LineParser):
def match_separator(self):
line = self.lookahead()
return line == '-'*len(line)
return line == '-' * len(line)
def match_primary(self):
line = self.lookahead()
@ -2066,8 +2081,10 @@ class HProfParser(LineParser):
# read lookahead
self.readline()
while not self.lookahead().startswith('------'): self.consume()
while not self.lookahead().startswith('TRACE '): self.consume()
while not self.lookahead().startswith('------'):
self.consume()
while not self.lookahead().startswith('TRACE '):
self.consume()
self.parse_traces()
@ -2084,12 +2101,13 @@ class HProfParser(LineParser):
# build up callgraph
for id, trace in compat_iteritems(self.traces):
if not id in self.samples: continue
if id not in self.samples:
continue
mtime = self.samples[id][0]
last = None
for func, file, line in trace:
if not func in functions:
if func not in functions:
function = Function(func, func)
function[SAMPLES] = 0
profile.add_function(function)
@ -2120,17 +2138,16 @@ class HProfParser(LineParser):
self.parse_trace()
def parse_trace(self):
l = self.consume()
mo = self.trace_id_re.match(l)
consume = self.consume()
mo = self.trace_id_re.match(consume)
tid = mo.group(1)
last = None
trace = []
while self.lookahead().startswith('\t'):
l = self.consume()
match = self.trace_re.search(l)
consume = self.consume()
match = self.trace_re.search(consume)
if not match:
#sys.stderr.write('Invalid line: %s\n' % l)
# sys.stderr.write('Invalid line: %s\n' % consume)
break
else:
function_name, file, line = match.groups()
@ -2280,13 +2297,13 @@ class XPerfParser(Parser):
import csv
reader = csv.reader(
self.stream,
delimiter = ',',
quotechar = None,
escapechar = None,
doublequote = False,
skipinitialspace = True,
lineterminator = '\r\n',
quoting = csv.QUOTE_NONE)
delimiter=',',
quotechar=None,
escapechar=None,
doublequote=False,
skipinitialspace=True,
lineterminator='\r\n',
quoting=csv.QUOTE_NONE)
header = True
for row in reader:
if header:
@ -2548,7 +2565,6 @@ class AQtimeParser(XmlParser):
return table
def parse_data(self):
rows = []
attrs = self.element_start('DATA')
table_id = int(attrs['TABLE_ID'])
table_name, field_types, field_names = self.tables[table_id]
@ -2560,7 +2576,7 @@ class AQtimeParser(XmlParser):
return table
def parse_row(self, field_types):
row = [None]*len(field_types)
row = [None] * len(field_types)
children = []
self.element_start('ROW')
while self.token.type == XML_ELEMENT_START:
@ -2622,16 +2638,16 @@ class AQtimeParser(XmlParser):
function = Function(self.build_id(fields), self.build_name(fields))
function[TIME] = fields['Time']
function[TOTAL_TIME] = fields['Time with Children']
#function[TIME_RATIO] = fields['% Time']/100.0
#function[TOTAL_TIME_RATIO] = fields['% with Children']/100.0
# function[TIME_RATIO] = fields['% Time']/100.0
# function[TOTAL_TIME_RATIO] = fields['% with Children']/100.0
return function
def build_call(self, fields):
call = Call(self.build_id(fields))
call[TIME] = fields['Time']
call[TOTAL_TIME] = fields['Time with Children']
#call[TIME_RATIO] = fields['% Time']/100.0
#call[TOTAL_TIME_RATIO] = fields['% with Children']/100.0
# call[TIME_RATIO] = fields['% Time']/100.0
# call[TOTAL_TIME_RATIO] = fields['% with Children']/100.0
return call
def build_id(self, fields):
@ -2694,7 +2710,7 @@ class PstatsParser:
call = Call(callee.id)
if isinstance(value, tuple):
for i in xrange(0, len(value), 4):
nc, cc, tt, ct = value[i:i+4]
nc, cc, tt, ct = value[i:i + 4]
if CALLS in call:
call[CALLS] += cc
else:
@ -2707,11 +2723,11 @@ class PstatsParser:
else:
call[CALLS] = value
call[TOTAL_TIME] = ratio(value, nc)*ct
call[TOTAL_TIME] = ratio(value, nc) * ct
caller.add_call(call)
#self.stats.print_stats()
#self.stats.print_callees()
# self.stats.print_stats()
# self.stats.print_callees()
# Compute derived events
self.profile.validate()
@ -2724,18 +2740,18 @@ class PstatsParser:
class Theme:
def __init__(self,
bgcolor = (0.0, 0.0, 1.0),
mincolor = (0.0, 0.0, 0.0),
maxcolor = (0.0, 0.0, 1.0),
fontname = "Arial",
fontcolor = "white",
nodestyle = "filled",
minfontsize = 10.0,
maxfontsize = 10.0,
minpenwidth = 0.5,
maxpenwidth = 4.0,
gamma = 2.2,
skew = 1.0):
bgcolor=(0.0, 0.0, 1.0),
mincolor=(0.0, 0.0, 0.0),
maxcolor=(0.0, 0.0, 1.0),
fontname="Arial",
fontcolor="white",
nodestyle="filled",
minfontsize=10.0,
maxfontsize=10.0,
minpenwidth=0.5,
maxpenwidth=4.0,
gamma=2.2,
skew=1.0):
self.bgcolor = bgcolor
self.mincolor = mincolor
self.maxcolor = maxcolor
@ -2783,7 +2799,7 @@ class Theme:
return self.fontsize(weight)
def edge_penwidth(self, weight):
return max(weight*self.maxpenwidth, self.minpenwidth)
return max(weight * self.maxpenwidth, self.minpenwidth)
def edge_arrowsize(self, weight):
return 0.5 * math.sqrt(self.edge_penwidth(weight))
@ -2800,14 +2816,14 @@ class Theme:
if self.skew < 0:
raise ValueError("Skew must be greater than 0")
elif self.skew == 1.0:
h = hmin + weight*(hmax - hmin)
s = smin + weight*(smax - smin)
l = lmin + weight*(lmax - lmin)
h = hmin + weight * (hmax - hmin)
s = smin + weight * (smax - smin)
l = lmin + weight * (lmax - lmin) # noqa
else:
base = self.skew
h = hmin + ((hmax-hmin)*(-1.0 + (base ** weight)) / (base - 1.0))
s = smin + ((smax-smin)*(-1.0 + (base ** weight)) / (base - 1.0))
l = lmin + ((lmax-lmin)*(-1.0 + (base ** weight)) / (base - 1.0))
h = hmin + ((hmax - hmin) * (-1.0 + (base ** weight)) / (base - 1.0))
s = smin + ((smax - smin) * (-1.0 + (base ** weight)) / (base - 1.0))
l = lmin + ((lmax - lmin) * (-1.0 + (base ** weight)) / (base - 1.0)) # noqa
return self.hsl_to_rgb(h, s, l)
@ -2820,16 +2836,16 @@ class Theme:
h = h % 1.0
s = min(max(s, 0.0), 1.0)
l = min(max(l, 0.0), 1.0)
l = min(max(l, 0.0), 1.0) # noqa
if l <= 0.5:
m2 = l*(s + 1.0)
if l <= 0.5: # noqa
m2 = l * (s + 1.0)
else:
m2 = l + s - l*s
m1 = l*2.0 - m2
r = self._hue_to_rgb(m1, m2, h + 1.0/3.0)
m2 = l + s - l * s
m1 = l * 2.0 - m2
r = self._hue_to_rgb(m1, m2, h + 1.0 / 3.0)
g = self._hue_to_rgb(m1, m2, h)
b = self._hue_to_rgb(m1, m2, h - 1.0/3.0)
b = self._hue_to_rgb(m1, m2, h - 1.0 / 3.0)
# Apply gamma correction
r **= self.gamma
@ -2843,50 +2859,50 @@ class Theme:
h += 1.0
elif h > 1.0:
h -= 1.0
if h*6 < 1.0:
return m1 + (m2 - m1)*h*6.0
elif h*2 < 1.0:
if h * 6 < 1.0:
return m1 + (m2 - m1) * h * 6.0
elif h * 2 < 1.0:
return m2
elif h*3 < 2.0:
return m1 + (m2 - m1)*(2.0/3.0 - h)*6.0
elif h * 3 < 2.0:
return m1 + (m2 - m1) * (2.0 / 3.0 - h) * 6.0
else:
return m1
TEMPERATURE_COLORMAP = Theme(
mincolor = (2.0/3.0, 0.80, 0.25), # dark blue
maxcolor = (0.0, 1.0, 0.5), # satured red
gamma = 1.0
mincolor=(2.0 / 3.0, 0.80, 0.25), # dark blue
maxcolor=(0.0, 1.0, 0.5), # satured red
gamma=1.0
)
PINK_COLORMAP = Theme(
mincolor = (0.0, 1.0, 0.90), # pink
maxcolor = (0.0, 1.0, 0.5), # satured red
mincolor=(0.0, 1.0, 0.90), # pink
maxcolor=(0.0, 1.0, 0.5), # satured red
)
GRAY_COLORMAP = Theme(
mincolor = (0.0, 0.0, 0.85), # light gray
maxcolor = (0.0, 0.0, 0.0), # black
mincolor=(0.0, 0.0, 0.85), # light gray
maxcolor=(0.0, 0.0, 0.0), # black
)
BW_COLORMAP = Theme(
minfontsize = 8.0,
maxfontsize = 24.0,
mincolor = (0.0, 0.0, 0.0), # black
maxcolor = (0.0, 0.0, 0.0), # black
minpenwidth = 0.1,
maxpenwidth = 8.0,
minfontsize=8.0,
maxfontsize=24.0,
mincolor=(0.0, 0.0, 0.0), # black
maxcolor=(0.0, 0.0, 0.0), # black
minpenwidth=0.1,
maxpenwidth=8.0,
)
PRINT_COLORMAP = Theme(
minfontsize = 18.0,
maxfontsize = 30.0,
fontcolor = "black",
nodestyle = "solid",
mincolor = (0.0, 0.0, 0.0), # black
maxcolor = (0.0, 0.0, 0.0), # black
minpenwidth = 0.1,
maxpenwidth = 8.0,
minfontsize=18.0,
maxfontsize=30.0,
fontcolor="black",
nodestyle="solid",
mincolor=(0.0, 0.0, 0.0), # black
maxcolor=(0.0, 0.0, 0.0), # black
minpenwidth=0.1,
maxpenwidth=8.0,
)
@ -2908,16 +2924,16 @@ class DotWriter:
"""Split the function name on multiple lines."""
if len(name) > 32:
ratio = 2.0/3.0
height = max(int(len(name)/(1.0 - ratio) + 0.5), 1)
width = max(len(name)/height, 32)
ratio = 2.0 / 3.0
height = max(int(len(name) / (1.0 - ratio) + 0.5), 1)
width = max(len(name) / height, 32)
# TODO: break lines in symbols
name = textwrap.fill(name, width, break_long_words=False)
# Take away spaces
name = name.replace(", ", ",")
name = name.replace("> >", ">>")
name = name.replace("> >", ">>") # catch consecutive
name = name.replace("> >", ">>") # catch consecutive
return name
@ -2964,11 +2980,11 @@ class DotWriter:
label = '\n'.join(labels)
self.node(function.id,
label = label,
color = self.color(theme.node_bgcolor(weight)),
fontcolor = self.color(theme.node_fgcolor(weight)),
fontsize = "%.2f" % theme.node_fontsize(weight),
)
label=label,
color=self.color(theme.node_bgcolor(weight)),
fontcolor=self.color(theme.node_fgcolor(weight)),
fontsize="%.2f" % theme.node_fontsize(weight),
)
for call in compat_itervalues(function.calls):
callee = profile.functions[call.callee_id]
@ -2989,14 +3005,14 @@ class DotWriter:
label = '\n'.join(labels)
self.edge(function.id, call.callee_id,
label = label,
color = self.color(theme.edge_color(weight)),
fontcolor = self.color(theme.edge_color(weight)),
fontsize = "%.2f" % theme.edge_fontsize(weight),
penwidth = "%.2f" % theme.edge_penwidth(weight),
labeldistance = "%.2f" % theme.edge_penwidth(weight),
arrowsize = "%.2f" % theme.edge_arrowsize(weight),
)
label=label,
color=self.color(theme.edge_color(weight)),
fontcolor=self.color(theme.edge_color(weight)),
fontsize="%.2f" % theme.edge_fontsize(weight),
penwidth="%.2f" % theme.edge_penwidth(weight),
labeldistance="%.2f" % theme.edge_penwidth(weight),
arrowsize="%.2f" % theme.edge_arrowsize(weight),
)
self.end_graph()
@ -3061,7 +3077,7 @@ class DotWriter:
return 0
if f >= 1.0:
return 255
return int(255.0*f + 0.5)
return int(255.0 * f + 0.5)
return "#" + "".join(["%02x" % float2int(c) for c in (r, g, b)])
@ -3082,11 +3098,11 @@ class Main:
"""Main program."""
themes = {
"color": TEMPERATURE_COLORMAP,
"pink": PINK_COLORMAP,
"gray": GRAY_COLORMAP,
"bw": BW_COLORMAP,
"print": PRINT_COLORMAP,
"color": TEMPERATURE_COLORMAP,
"pink": PINK_COLORMAP,
"gray": GRAY_COLORMAP,
"bw": BW_COLORMAP,
"print": PRINT_COLORMAP,
}
formats = {
@ -3115,8 +3131,7 @@ class Main:
global totalMethod
formatNames = list(self.formats.keys())
formatNames.sort()
formatNames = sorted(self.formats.keys())
optparser = optparse.OptionParser(
usage="\n\t%prog [options] [file] ...")
@ -3139,19 +3154,27 @@ class Main:
help="profile format: %s [default: %%default]" % self.naturalJoin(formatNames))
optparser.add_option(
'--total',
type="choice", choices=('callratios', 'callstacks'),
dest="totalMethod", default=totalMethod,
help="preferred method of calculating total time: callratios or callstacks (currently affects only perf format) [default: %default]")
type="choice",
choices=(
'callratios',
'callstacks'),
dest="totalMethod",
default=totalMethod,
help=("preferred method of calculating total time: callratios or callstacks"
" (currently affects only perf format) [default: %default]"))
optparser.add_option(
'-c', '--colormap',
type="choice", choices=('color', 'pink', 'gray', 'bw', 'print'),
dest="theme", default="color",
help="color map: color, pink, gray, bw, or print [default: %default]")
optparser.add_option(
'-s', '--strip',
'-s',
'--strip',
action="store_true",
dest="strip", default=False,
help="strip function parameters, template parameters, and const modifiers from demangled C++ function names")
dest="strip",
default=False,
help=("strip function parameters, template parameters, and const modifiers from"
" demangled C++ function names"))
optparser.add_option(
'-w', '--wrap',
action="store_true",
@ -3176,8 +3199,11 @@ class Main:
# add a new option to control skew of the colorization curve
optparser.add_option(
'--skew',
type="float", dest="theme_skew", default=1.0,
help="skew the colorization curve. Values < 1.0 give more variety to lower percentages. Values > 1.0 give less variety to lower percentages")
type="float",
dest="theme_skew",
default=1.0,
help=("skew the colorization curve. Values < 1.0 give more variety to lower"
" percentages. Values > 1.0 give less variety to lower percentages"))
(self.options, self.args) = optparser.parse_args(sys.argv[1:])
if len(self.args) > 1 and self.options.format != 'pstats':
@ -3234,18 +3260,24 @@ class Main:
dot.show_function_events.append(SAMPLES)
profile = self.profile
profile.prune(self.options.node_thres/100.0, self.options.edge_thres/100.0)
profile.prune(self.options.node_thres / 100.0, self.options.edge_thres / 100.0)
if self.options.root:
rootId = profile.getFunctionId(self.options.root)
if not rootId:
sys.stderr.write('root node ' + self.options.root + ' not found (might already be pruned : try -e0 -n0 flags)\n')
sys.stderr.write(
'root node ' +
self.options.root +
' not found (might already be pruned : try -e0 -n0 flags)\n')
sys.exit(1)
profile.prune_root(rootId)
if self.options.leaf:
leafId = profile.getFunctionId(self.options.leaf)
if not leafId:
sys.stderr.write('leaf node ' + self.options.leaf + ' not found (maybe already pruned : try -e0 -n0 flags)\n')
sys.stderr.write(
'leaf node ' +
self.options.leaf +
' not found (maybe already pruned : try -e0 -n0 flags)\n')
sys.exit(1)
profile.prune_leaf(leafId)

View File

@ -1,24 +1,26 @@
#!/usr/bin/env python
import os, sys, time
from __future__ import print_function
import os
import sys
keys = [['upload rate', 'x1y1', 6], ['history entries', 'x1y2', 10], ['queue', 'x1y2', 4]]
out = open('bandwidth.gnuplot', 'wb')
print >>out, "set term png size 1200,700"
print >>out, 'set output "bandwidth_manager.png"'
print >>out, 'set xrange [0:*]'
print >>out, 'set xlabel "time (ms)"'
print >>out, 'set ylabel "Rate (B/s)"'
print >>out, 'set ytics 10000'
print >>out, 'set y2label "number"'
print >>out, 'set y2range [0:*]'
#print >>out, "set style data lines"
print >>out, "set key box"
print >>out, 'plot',
print("set term png size 1200,700", file=out)
print('set output "bandwidth_manager.png"', file=out)
print('set xrange [0:*]', file=out)
print('set xlabel "time (ms)"', file=out)
print('set ylabel "Rate (B/s)"', file=out)
print('set ytics 10000', file=out)
print('set y2label "number"', file=out)
print('set y2range [0:*]', file=out)
# print("set style data lines", file=out)
print("set key box", file=out)
print('plot', end=' ', file=out)
for k, a, c in keys:
print >>out, ' "%s" using 1:%d title "%s" axes %s with steps,' % (sys.argv[1], c, k, a),
print >>out, 'x=0'
print(' "%s" using 1:%d title "%s" axes %s with steps,' % (sys.argv[1], c, k, a), end=' ', file=out)
print('x=0', file=out)
out.close()
os.system('gnuplot bandwidth.gnuplot');
os.system('gnuplot bandwidth.gnuplot')

View File

@ -3,14 +3,18 @@
# subject to the Boost Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
import os, sys, time
from __future__ import print_function
import os
import sys
lines = open(sys.argv[1], 'rb').readlines()
#keys = ['send_buffer_utilization']
# keys = ['send_buffer_utilization']
keys = ['send_buffer_size', 'used_send_buffer', 'protocol_buffer']
#keys = ['send_buffer_alloc', 'send_buffer', 'allocate_buffer_alloc', 'allocate_buffer', 'protocol_buffer']
#keys = ['send_buffer_alloc', 'send_buffer', 'allocate_buffer_alloc', 'allocate_buffer', 'protocol_buffer', 'append_send_buffer']
# keys = ['send_buffer_alloc', 'send_buffer', 'allocate_buffer_alloc', 'allocate_buffer', 'protocol_buffer']
# keys = ['send_buffer_alloc', 'send_buffer', 'allocate_buffer_alloc', 'allocate_buffer', 'protocol_buffer',
# 'append_send_buffer']
average = ['send_buffer_utilization', 'send_buffer_size', 'used_send_buffer']
average_interval = 120000
@ -18,7 +22,7 @@ render = 'lines'
time_limit = -1
if len(sys.argv) > 2:
time_limit = long(sys.argv[2])
time_limit = int(sys.argv[2])
# logfile format:
@ -27,64 +31,68 @@ if len(sys.argv) > 2:
# 16434 allocate_buffer: 17
for k in keys:
last_sample = 0
average_accumulator = 0
average_samples = 0
peak = 0
last_sample = 0
average_accumulator = 0
average_samples = 0
peak = 0
out = open(k + '.dat', 'wb')
eval_average = False
if k in average:
eval_average = True
peak_out = open(k + '_peak.dat', 'wb')
out = open(k + '.dat', 'wb')
eval_average = False
if k in average:
eval_average = True
peak_out = open(k + '_peak.dat', 'wb')
for l in lines:
l = l.split(' ')
if len(l) != 3:
print l
continue
try:
if l[1] == k + ':':
if time_limit != -1 and long(l[0]) > time_limit: break
time = l[0]
value = l[2]
if eval_average:
while long(time) > last_sample + average_interval:
last_sample = last_sample + average_interval
if average_samples < 1: average_samples = 1
print >>out, '%d %f' % (last_sample, average_accumulator / average_samples)
print >>peak_out, '%d %f' % (last_sample, peak)
average_accumulator = 0
average_samples = 0
peak = 0
average_accumulator = average_accumulator + float(value)
average_samples = average_samples + 1
if float(value) > peak: peak = float(value)
else:
print >>out, time + ' ' + value,
except:
print l
for line in lines:
line = line.split(' ')
if len(line) != 3:
print(line)
continue
try:
if line[1] == k + ':':
if time_limit != -1 and int(line[0]) > time_limit:
break
time = line[0]
value = line[2]
if eval_average:
while int(time) > last_sample + average_interval:
last_sample = last_sample + average_interval
if average_samples < 1:
average_samples = 1
print('%d %f' % (last_sample, average_accumulator / average_samples), file=out)
print('%d %f' % (last_sample, peak), file=peak_out)
average_accumulator = 0
average_samples = 0
peak = 0
average_accumulator = average_accumulator + float(value)
average_samples = average_samples + 1
if float(value) > peak:
peak = float(value)
else:
print(time + ' ' + value, end=' ', file=out)
except BaseException:
print(line)
out.close()
peak_out.close()
out.close()
peak_out.close()
out = open('send_buffer.gnuplot', 'wb')
print >>out, "set term png size 1200,700"
print >>out, 'set output "send_buffer.png"'
print >>out, 'set xrange [0:*]'
print >>out, 'set xlabel "time (ms)"'
print >>out, 'set ylabel "bytes (B)"'
print >>out, "set style data lines"
print >>out, "set key box"
print >>out, 'plot',
print("set term png size 1200,700", file=out)
print('set output "send_buffer.png"', file=out)
print('set xrange [0:*]', file=out)
print('set xlabel "time (ms)"', file=out)
print('set ylabel "bytes (B)"', file=out)
print("set style data lines", file=out)
print("set key box", file=out)
print('plot', end=' ', file=out)
for k in keys:
if k in average:
print >>out, ' "%s.dat" using 1:2 title "%s %d seconds average" with %s,' % (k, k, average_interval / 1000., render),
print >>out, ' "%s_peak.dat" using 1:2 title "%s %d seconds peak" with %s,' % (k, k, average_interval / 1000., render),
else:
print >>out, ' "%s.dat" using 1:2 title "%s" with %s,' % (k, k, render),
print >>out, 'x=0'
if k in average:
print(' "%s.dat" using 1:2 title "%s %d seconds average" with %s,' %
(k, k, average_interval / 1000., render), end=' ', file=out)
print(' "%s_peak.dat" using 1:2 title "%s %d seconds peak" with %s,' %
(k, k, average_interval / 1000., render), end=' ', file=out)
else:
print(' "%s.dat" using 1:2 title "%s" with %s,' % (k, k, render), end=' ', file=out)
print('x=0', file=out)
out.close()
os.system('gnuplot send_buffer.gnuplot')

View File

@ -1,8 +1,8 @@
#!/usr/bin/env python
from __future__ import print_function
import sys
import os
import time
import calendar
import pprint
pp = pprint.PrettyPrinter(indent=4)
@ -13,11 +13,11 @@ f = open(sys.argv[1])
announce_histogram = {}
#TODO: make this histogram into a CDF
# TODO: make this histogram into a CDF
node_uptime_histogram = {}
counter = 0;
counter = 0
# maps search_id to a list of events. Each event is a dict containing:
# t: timestamp
@ -32,10 +32,12 @@ outstanding_searches = {}
# list of completed searches
searches = []
def convert_timestamp(t):
parts = t.split('.')
hms = parts[0].split(':')
return (int(hms[0]) * 3600 + int(hms[1]) * 60 + int(hms[2])) * 1000 + int(parts[1])
parts = t.split('.')
hms = parts[0].split(':')
return (int(hms[0]) * 3600 + int(hms[1]) * 60 + int(hms[2])) * 1000 + int(parts[1])
last_incoming = ''
@ -46,111 +48,117 @@ client_version_histogram = {}
client_histogram = {}
for line in f:
counter += 1
# if counter % 1000 == 0:
# print '\r%d' % counter,
try:
l = line.split(' ')
if 'starting DHT tracker with node id:' in line:
our_node_id = l[l.index('id:') + 1].strip()
counter += 1
# if counter % 1000 == 0:
# print '\r%d' % counter,
try:
ls = line.split(' ')
if 'starting DHT tracker with node id:' in line:
our_node_id = ls[ls.index('id:') + 1].strip()
try:
if len(l) > 4 and l[2] == '<==' and l[1] == '[dht_tracker]':
ip = l[3].split(':')[0]
if ip not in unique_ips:
unique_ips.add(ip)
json_blob = line.split(l[3])[1]
version = json_blob.split("'v': '")[1].split("'")[0]
if len(version) == 4:
v = '%s-%d' % (version[0:2], (ord(version[2]) << 8) + ord(version[3]))
elif len(version) == 8:
v = '%c%c-%d' % (chr(int(version[0:2], 16)), chr(int(version[2:4], 16)), int(version[4:8], 16))
else:
v = 'unknown'
try:
if len(ls) > 4 and ls[2] == '<==' and ls[1] == '[dht_tracker]':
ip = ls[3].split(':')[0]
if ip not in unique_ips:
unique_ips.add(ip)
json_blob = line.split(ls[3])[1]
version = json_blob.split("'v': '")[1].split("'")[0]
if len(version) == 4:
v = '%s-%d' % (version[0:2], (ord(version[2]) << 8) + ord(version[3]))
elif len(version) == 8:
v = '%c%c-%d' % (chr(int(version[0:2], 16)), chr(int(version[2:4], 16)), int(version[4:8], 16))
else:
v = 'unknown'
if not v in client_version_histogram:
client_version_histogram[v] = 1
else:
client_version_histogram[v] += 1
if v not in client_version_histogram:
client_version_histogram[v] = 1
else:
client_version_histogram[v] += 1
if not v[0:2] in client_histogram:
client_histogram[v[0:2]] = 1
else:
client_histogram[v[0:2]] += 1
except: pass
if not v[0:2] in client_histogram:
client_histogram[v[0:2]] = 1
else:
client_histogram[v[0:2]] += 1
except BaseException:
pass
if 'announce-distance:' in line:
idx = l.index('announce-distance:')
if 'announce-distance:' in line:
idx = ls.index('announce-distance:')
d = int(l[idx+1].strip())
if not d in announce_histogram: announce_histogram[d] = 0
announce_histogram[d] += 1
if 'NODE FAILED' in line:
idx = l.index('fails:')
if int(l[idx+1].strip()) != 1: continue;
idx = l.index('up-time:')
d = int(l[idx+1].strip())
# quantize
d = d - (d % up_time_quanta)
if not d in node_uptime_histogram: node_uptime_histogram[d] = 0
node_uptime_histogram[d] += 1
d = int(ls[idx + 1].strip())
if d not in announce_histogram:
announce_histogram[d] = 0
announce_histogram[d] += 1
if 'NODE FAILED' in line:
idx = ls.index('fails:')
if int(ls[idx + 1].strip()) != 1:
continue
idx = ls.index('up-time:')
d = int(ls[idx + 1].strip())
# quantize
d = d - (d % up_time_quanta)
if d not in node_uptime_histogram:
node_uptime_histogram[d] = 0
node_uptime_histogram[d] += 1
search_id = l[2]
ts = l[0]
event = l[3]
search_id = ls[2]
ts = ls[0]
event = ls[3]
if event == 'RESPONSE':
outstanding = int(l[l.index('invoke-count:')+1])
nid = l[l.index('id:')+1]
addr = l[l.index('addr:')+1]
last_response = addr
outstanding_searches[search_id].append({ 't': ts, 'd': distance,
'o': outstanding + 1, 'a':addr, 'e': event,'i':nid, 's':source})
elif event == 'NEW':
nid = l[l.index('target:')+1]
outstanding_searches[search_id] = [{ 't': ts, 'd': 0, 'o': 0, \
'e': event, 'abstime': ts, 'i': nid}]
last_response = ''
elif event == 'INVOKE' or event == 'ADD' or event == '1ST_TIMEOUT' or \
event == 'TIMEOUT' or event == 'PEERS':
if not search_id in outstanding_searches:
print 'orphaned event: %s' % line
else:
outstanding = int(l[l.index('invoke-count:')+1])
distance = int(l[l.index('distance:')+1])
nid = l[l.index('id:')+1]
addr = l[l.index('addr:')+1]
source = ''
if event == 'ADD':
if last_response == '': continue
source = last_response
if event == 'RESPONSE':
outstanding = int(ls[ls.index('invoke-count:') + 1])
distance = int(ls[ls.index('distance:') + 1])
nid = ls[ls.index('id:') + 1]
addr = ls[ls.index('addr:') + 1]
last_response = addr
outstanding_searches[search_id].append({'t': ts, 'd': distance,
'o': outstanding + 1, 'a': addr, 'e': event, 'i': nid})
elif event == 'NEW':
nid = ls[ls.index('target:') + 1]
outstanding_searches[search_id] = [{'t': ts, 'd': 0, 'o': 0,
'e': event, 'abstime': ts, 'i': nid}]
last_response = ''
elif event == 'INVOKE' or event == 'ADD' or event == '1ST_TIMEOUT' or \
event == 'TIMEOUT' or event == 'PEERS':
if search_id not in outstanding_searches:
print('orphaned event: %s' % line)
else:
outstanding = int(ls[ls.index('invoke-count:') + 1])
distance = int(ls[ls.index('distance:') + 1])
nid = ls[ls.index('id:') + 1]
addr = ls[ls.index('addr:') + 1]
source = ''
if event == 'ADD':
if last_response == '':
continue
source = last_response
outstanding_searches[search_id].append({ 't': ts, 'd': distance,
'o': outstanding + 1, 'a':addr, 'e': event,'i':nid, 's':source})
elif event == 'ABORTED':
outstanding_searches[search_id].append({ 't': ts, 'e': event})
elif event == 'COMPLETED':
distance = int(l[l.index('distance:')+1])
lookup_type = l[l.index('type:')+1].strip()
outstanding_searches[search_id].append({ 't': ts, 'd': distance,
'o': 0, 'e': event,'i':''})
outstanding_searches[search_id].append(
{'t': ts, 'd': distance, 'o': outstanding + 1, 'a': addr, 'e': event, 'i': nid, 's': source})
elif event == 'ABORTED':
outstanding_searches[search_id].append({'t': ts, 'e': event})
elif event == 'COMPLETED':
distance = int(ls[ls.index('distance:') + 1])
lookup_type = ls[ls.index('type:') + 1].strip()
outstanding_searches[search_id].append({'t': ts, 'd': distance,
'o': 0, 'e': event, 'i': ''})
outstanding_searches[search_id][0]['type'] = lookup_type
outstanding_searches[search_id][0]['type'] = lookup_type
s = outstanding_searches[search_id]
s = outstanding_searches[search_id]
try:
start_time = convert_timestamp(s[0]['t'])
for i in range(len(s)):
s[i]['t'] = convert_timestamp(s[i]['t']) - start_time
except:
pass
searches.append(s)
del outstanding_searches[search_id]
try:
start_time = convert_timestamp(s[0]['t'])
for i in range(len(s)):
s[i]['t'] = convert_timestamp(s[i]['t']) - start_time
except BaseException:
pass
searches.append(s)
del outstanding_searches[search_id]
except Exception, e:
print e
print line.split(' ')
except Exception as e:
print(e)
print(line.split(' '))
lookup_times_min = []
lookup_times_max = []
@ -159,26 +167,29 @@ lookup_times_max = []
# to target boundaries
lookup_distance = []
for i in range(0, 15):
lookup_distance.append([])
lookup_distance.append([])
for s in searches:
for i in s:
if not 'last_dist' in i:
i['last_dist'] = -1
cur_dist = 160 - i['d']
last_dist = i['last_dist']
if cur_dist > last_dist:
for j in range(last_dist + 1, cur_dist + 1):
if j >= len(lookup_distance): break
lookup_distance[j].append(i['t'])
i['last_dist'] = cur_dist
if i['e'] != 'PEERS': continue
lookup_times_min.append(i['t'])
break
for i in reversed(s):
if i['e'] != 'PEERS': continue
lookup_times_max.append(i['t'])
break
for i in s:
if 'last_dist' not in i:
i['last_dist'] = -1
cur_dist = 160 - i['d']
last_dist = i['last_dist']
if cur_dist > last_dist:
for j in range(last_dist + 1, cur_dist + 1):
if j >= len(lookup_distance):
break
lookup_distance[j].append(i['t'])
i['last_dist'] = cur_dist
if i['e'] != 'PEERS':
continue
lookup_times_min.append(i['t'])
break
for i in reversed(s):
if i['e'] != 'PEERS':
continue
lookup_times_max.append(i['t'])
break
lookup_times_min.sort()
@ -186,74 +197,74 @@ lookup_times_max.sort()
out = open('dht_lookup_times_cdf.txt', 'w+')
counter = 0
for i in range(len(lookup_times_min)):
counter += 1
print >>out, '%d\t%d\t%f' % (lookup_times_min[i], lookup_times_max[i], counter / float(len(lookup_times_min)))
counter += 1
print('%d\t%d\t%f' % (lookup_times_min[i], lookup_times_max[i], counter / float(len(lookup_times_min))), file=out)
out.close()
for i in lookup_distance:
i.sort()
i.sort()
dist = 0
for i in lookup_distance:
out = open('dht_lookup_distance_%d.txt' % dist, 'w+')
dist += 1
counter = 0
for j in i:
counter += 1
print >>out, '%d\t%f' % (j, counter / float(len(i)))
out.close()
out = open('dht_lookup_distance_%d.txt' % dist, 'w+')
dist += 1
counter = 0
for j in i:
counter += 1
print('%d\t%f' % (j, counter / float(len(i))), file=out)
out.close()
out = open('dht_lookups.txt', 'w+')
for s in searches:
for i in s:
if i['e'] == 'INVOKE':
print >>out, ' ->', i['t'], 160 - i['d'], i['i'], i['a']
elif i['e'] == '1ST_TIMEOUT':
print >>out, ' x ', i['t'], 160 - i['d'], i['i'], i['a']
elif i['e'] == 'TIMEOUT':
print >>out, ' X ', i['t'], 160 - i['d'], i['i'], i['a']
elif i['e'] == 'ADD':
print >>out, ' + ', i['t'], 160 - i['d'], i['i'], i['a'], i['s']
elif i['e'] == 'RESPONSE':
print >>out, ' <-', i['t'], 160 - i['d'], i['i'], i['a']
elif i['e'] == 'PEERS':
print >>out, ' <-', i['t'], 160 - i['d'], i['i'], i['a']
elif i['e'] == 'ABORTED':
print >>out, 'abort'
elif i['e'] == 'COMPLETED':
print >>out, '***', i['t'], 160 - i['d'], '\n'
elif i['e'] == 'NEW':
print >>out, '===', i['abstime'], i['type'], '==='
print >>out, '<> ', 0, our_node_id, i['i']
for i in s:
if i['e'] == 'INVOKE':
print(' ->', i['t'], 160 - i['d'], i['i'], i['a'], file=out)
elif i['e'] == '1ST_TIMEOUT':
print(' x ', i['t'], 160 - i['d'], i['i'], i['a'], file=out)
elif i['e'] == 'TIMEOUT':
print(' X ', i['t'], 160 - i['d'], i['i'], i['a'], file=out)
elif i['e'] == 'ADD':
print(' + ', i['t'], 160 - i['d'], i['i'], i['a'], i['s'], file=out)
elif i['e'] == 'RESPONSE':
print(' <-', i['t'], 160 - i['d'], i['i'], i['a'], file=out)
elif i['e'] == 'PEERS':
print(' <-', i['t'], 160 - i['d'], i['i'], i['a'], file=out)
elif i['e'] == 'ABORTED':
print('abort', file=out)
elif i['e'] == 'COMPLETED':
print('***', i['t'], 160 - i['d'], '\n', file=out)
elif i['e'] == 'NEW':
print('===', i['abstime'], i['type'], '===', file=out)
print('<> ', 0, our_node_id, i['i'], file=out)
out.close()
out = open('dht_announce_distribution.dat', 'w+')
print 'announce distribution items: %d' % len(announce_histogram)
for k,v in announce_histogram.items():
print >>out, '%d %d' % (k, v)
print '%d %d' % (k, v)
print('announce distribution items: %d' % len(announce_histogram))
for k, v in list(announce_histogram.items()):
print('%d %d' % (k, v), file=out)
print('%d %d' % (k, v))
out.close()
out = open('dht_node_uptime_cdf.txt', 'w+')
s = 0
total_uptime_nodes = 0
for k,v in node_uptime_histogram.items():
total_uptime_nodes += v
for k, v in list(node_uptime_histogram.items()):
total_uptime_nodes += v
for k,v in sorted(node_uptime_histogram.items()):
s += v
print >>out, '%f %f' % (k / float(60), s / float(total_uptime_nodes))
print '%f %f' % (k / float(60), s / float(total_uptime_nodes))
for k, v in sorted(node_uptime_histogram.items()):
s += v
print('%f %f' % (k / float(60), s / float(total_uptime_nodes)), file=out)
print('%f %f' % (k / float(60), s / float(total_uptime_nodes)))
out.close()
print 'clients by version'
client_version_histogram = sorted(client_version_histogram.items(), key=lambda x: x[1], reverse=True)
print('clients by version')
client_version_histogram = sorted(list(client_version_histogram.items()), key=lambda x: x[1], reverse=True)
pp.pprint(client_version_histogram)
print 'clients'
client_histogram = sorted(client_histogram.items(), key=lambda x: x[1], reverse=True)
print('clients')
client_histogram = sorted(list(client_histogram.items()), key=lambda x: x[1], reverse=True)
pp.pprint(client_histogram)
out = open('dht.gnuplot', 'w+')
@ -265,7 +276,7 @@ set ylabel "portion of lookups"
set xlabel "time from start of lookup (ms)"
set grid
plot "dht_lookup_times_cdf.txt" using 1:3 with lines title "time to first result", \
"dht_lookup_times_cdf.txt" using 2:3 with lines title "time to last result"
"dht_lookup_times_cdf.txt" using 2:3 with lines title "time to last result"
set terminal postscript
set output "dht_lookup_times_cdf.ps"
@ -318,12 +329,11 @@ plot ''')
dist = 0
for i in lookup_distance:
if dist > 0: out.write(', ')
out.write('"dht_lookup_distance_%d.txt" using 1:2 title "%d" with lines' % (dist, dist))
dist += 1
if dist > 0:
out.write(', ')
out.write('"dht_lookup_distance_%d.txt" using 1:2 title "%d" with lines' % (dist, dist))
dist += 1
out.close()
os.system('gnuplot dht.gnuplot');
os.system('gnuplot dht.gnuplot')

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python
from __future__ import print_function
import sys
import os
@ -11,21 +12,22 @@ distribution = {}
num_messages = 0
for i in range(0, max_rtt, quantize):
distribution[i] = 0
distribution[i] = 0
for line in f:
time = int(line.split('\t')[1])
if (time < 0 or time > max_rtt - quantize): continue
num_messages += 1
time /= quantize
time *= quantize
distribution[time] += 1
time = int(line.split('\t')[1])
if (time < 0 or time > max_rtt - quantize):
continue
num_messages += 1
time /= quantize
time *= quantize
distribution[time] += 1
f = open('round_trip_distribution.log', 'w+')
for k, v in distribution.items():
print >>f, '%f %d' % ((k + (quantize / 2)) / 1000.0, v)
f.close();
for k, v in list(distribution.items()):
print('%f %d' % ((k + (quantize / 2)) / 1000.0, v), file=f)
f.close()
f = open('round_trip_distribution.gnuplot', 'w+')
@ -48,5 +50,4 @@ replot
''' % (float(quantize) / 1000.0))
f.close()
os.system('gnuplot round_trip_distribution.gnuplot');
os.system('gnuplot round_trip_distribution.gnuplot')

View File

@ -4,50 +4,59 @@ import os
gnuplot_scripts = []
def gen_stats_gnuplot(name, y, lines):
global gnuplot_scripts
global gnuplot_scripts
stat = open(sys.argv[1])
line = stat.readline()
while not 'minute:' in line:
line = stat.readline()
stat = open(sys.argv[1])
line = stat.readline()
while 'minute:' not in line:
line = stat.readline()
names = line.strip().split(':')
counter = 1
for i in names:
print '%d: %s' % (counter, i)
counter += 1
names = line.strip().split(':')
counter = 1
for i in names:
print('%d: %s' % (counter, i))
counter += 1
out = open('%s.gnuplot' % name, 'w+')
out.write('''
out = open('%s.gnuplot' % name, 'w+')
out.write('''
set term png size 1200,700 small
set output "%s.png"
set title "%s"
set ylabel "%s"
set xlabel "time (minutes)"
plot ''' % (name, name.strip('_'), y))
first = True
for i in lines:
if not first:
out.write(', \\\n')
first = False
out.write('"%s" using 1:%d title "%s" with lines' % (sys.argv[1], names.index(i)+1, i))
out.write('\n')
first = True
for i in lines:
if not first:
out.write(', \\\n')
first = False
out.write('"%s" using 1:%d title "%s" with lines' % (sys.argv[1], names.index(i) + 1, i))
out.write('\n')
out.write('''set terminal postscript
out.write('''set terminal postscript
set output "%s.ps"
replot
''' % (name))
out.close()
gnuplot_scripts += [name]
out.close()
gnuplot_scripts += [name]
gen_stats_gnuplot('dht_routing_table_size', 'nodes', ['active nodes','passive nodes', 'confirmed nodes'])
gen_stats_gnuplot('dht_routing_table_size', 'nodes', ['active nodes', 'passive nodes', 'confirmed nodes'])
gen_stats_gnuplot('dht_tracker_table_size', '', ['num torrents', 'num peers'])
gen_stats_gnuplot('dht_announces', 'messages per minute', ['announces per min', 'failed announces per min'])
gen_stats_gnuplot('dht_clients', 'messages per minute', ['total msgs per min', 'az msgs per min', 'ut msgs per min', 'lt msgs per min', 'mp msgs per min', 'gr msgs per min'])
gen_stats_gnuplot('dht_rate', 'bytes per second', ['bytes in per sec', 'bytes out per sec'])
gen_stats_gnuplot('dht_errors', 'messages per minute', ['error replies sent', 'error queries recvd'])
gen_stats_gnuplot('dht_clients',
'messages per minute',
['total msgs per min',
'az msgs per min',
'ut msgs per min',
'lt msgs per min',
'mp msgs per min',
'gr msgs per min'])
gen_stats_gnuplot('dht_rate', 'bytes per second', ['bytes in per sec', 'bytes out per sec'])
gen_stats_gnuplot('dht_errors', 'messages per minute', ['error replies sent', 'error queries recvd'])
for i in gnuplot_scripts:
os.system('gnuplot %s.gnuplot' % i);
os.system('gnuplot %s.gnuplot' % i)

View File

@ -1,6 +1,8 @@
#!/usr/bin/env python
from __future__ import print_function
import os, sys, time
import os
import sys
lines = open(sys.argv[1], 'rb').readlines()
@ -10,11 +12,11 @@ lines = open(sys.argv[1], 'rb').readlines()
# 16434 read cache: 17
key_order = ['receive buffer', 'send buffer', 'released send buffer', 'posted send buffer',
'received send buffer', 'dispatched send buffer', 'queued send buffer',
'write cache', 'read cache', 'hash temp']
'received send buffer', 'dispatched send buffer', 'queued send buffer',
'write cache', 'read cache', 'hash temp']
colors = ['30f030', '001070', '101080', '2040a0',
'4070d0', '80a0f0', 'f03030',
'80f080', 'f08080', '4040ff']
'4070d0', '80a0f0', 'f03030',
'80f080', 'f08080', '4040ff']
keys = []
fields = {}
@ -26,67 +28,70 @@ field_num_samples = {}
field_timestamp = {}
for c in key_order:
keys.append(c)
fields[c] = 0
maximum[c] = 0
field_sum[c] = 0
field_num_samples[c] = 0
field_timestamp[c] = 0
keys.append(c)
fields[c] = 0
maximum[c] = 0
field_sum[c] = 0
field_num_samples[c] = 0
field_timestamp[c] = 0
last_t = 0
for l in lines:
try:
t = int(l[0:l.find(' ')])
c = l[l.find(' ')+1:l.find(':')]
n = int(l[l.find(':')+1:-1])
except:
print l
continue
try:
t = int(l[0:l.find(' ')])
c = l[l.find(' ') + 1:l.find(':')]
n = int(l[l.find(':') + 1:-1])
except BaseException:
print(l)
continue
if last_t != t:
print >>out, '%d\t' % last_t,
for i in keys:
print >>out, '%d\t' % maximum[i],
print >>out, '\n',
if last_t != t:
print('%d\t' % last_t, end=' ', file=out)
for i in keys:
print('%d\t' % maximum[i], end=' ', file=out)
print('\n', end=' ', file=out)
if not c in keys: continue
if c not in keys:
continue
field_sum[c] += fields[c] * float(t - field_timestamp[c])
field_timestamp[c] = t
field_sum[c] += fields[c] * float(t - field_timestamp[c])
field_timestamp[c] = t
fields[c] = n
fields[c] = n
if n > maximum[c]: maximum[c] = n
if n > maximum[c]:
maximum[c] = n
if last_t != t:
last_t = t
maximum = fields
if last_t != t:
last_t = t
maximum = fields
for i in keys:
print '%s: avg: %f' % (i, field_sum[i] / last_t)
print
print('%s: avg: %f' % (i, field_sum[i] / last_t))
print()
out.close()
out = open('disk_buffer.gnuplot', 'wb')
print >>out, "set term png size 1200,700"
print >>out, 'set output "disk_buffer.png"'
print >>out, 'set xrange [0:*]'
print >>out, 'set xlabel "time (ms)"'
print >>out, 'set ylabel "buffers"'
print >>out, "set style data lines"
print >>out, "set key box"
print >>out, 'plot',
print("set term png size 1200,700", file=out)
print('set output "disk_buffer.png"', file=out)
print('set xrange [0:*]', file=out)
print('set xlabel "time (ms)"', file=out)
print('set ylabel "buffers"', file=out)
print("set style data lines", file=out)
print("set key box", file=out)
print('plot', end=' ', file=out)
count = 1 + len(keys)
keys.reverse()
comma = ''
for k in keys:
expr = "$%d" % count
for i in xrange(2, count): expr += "+$%d" % i
count -= 1
print >>out, ' %s"disk_buffer_log.dat" using 1:(%s) title "%s" with filledcurves x1 lt rgb "#%s"' % (comma, expr, k, colors[count-1]),
comma = ','
expr = "$%d" % count
for i in range(2, count):
expr += "+$%d" % i
count -= 1
print(' %s"disk_buffer_log.dat" using 1:(%s) title "%s" with filledcurves x1 lt rgb "#%s"' %
(comma, expr, k, colors[count - 1]), end=' ', file=out)
comma = ','
out.close()
os.system('gnuplot disk_buffer.gnuplot')

View File

@ -1,119 +1,129 @@
#!/usr/bin/env python
# this is meant to parse the dht_lookups.log generated by parse_dht_log.py
from __future__ import print_function
import os
nodes = {}
def get_origin(n):
if n in nodes:
return list(nodes[n]['conns'])
else:
return ['0.0.0.0']
if n in nodes:
return list(nodes[n]['conns'])
else:
return ['0.0.0.0']
def calculate_pos(nid, dist):
nid = int(nid[0:7], 16)
nid = int(nid[0:7], 16)
x = 0
y = 0
for i in range(0, 28, 2):
x |= (nid & (1 << i)) >> (i / 2)
y |= (nid & (2 << i)) >> (i / 2 + 1)
x = 0
y = 0
for i in range(0, 28, 2):
x |= (nid & (1 << i)) >> (i / 2)
y |= (nid & (2 << i)) >> (i / 2 + 1)
# print '%d -> %d %d' % (dist, x, y)
# print '%d -> %d %d' % (dist, x, y)
return (x / 3, y / 3)
return (x / 3, y / 3)
def plot_nodes(nodes, frame):
try: os.mkdir('dht_frames')
except: pass
try:
os.mkdir('dht_frames')
except BaseException:
pass
out = open('dht_frames/plot-%02d.dot' % frame, 'w+')
edges = set()
print >>out, 'graph swarm {'
# print >>out, '"tl" [shape=point pos="0,0!"];'
# print >>out, '"tr" [shape=point pos="1638,0!"];'
# print >>out, '"ll" [shape=point pos="1638,1638!"];'
# print >>out, '"tr" [shape=point pos="0,1638!"];'
for dst, n in nodes.items():
shape = 'point'
if 's' in n: shape = n['s']
out = open('dht_frames/plot-%02d.dot' % frame, 'w+')
edges = set()
print('graph swarm {', file=out)
# print >>out, '"tl" [shape=point pos="0,0!"];'
# print >>out, '"tr" [shape=point pos="1638,0!"];'
# print >>out, '"ll" [shape=point pos="1638,1638!"];'
# print >>out, '"tr" [shape=point pos="0,1638!"];'
for dst, n in list(nodes.items()):
shape = 'point'
if 's' in n:
shape = n['s']
print >>out, '"%s" [shape=%s fillcolor="%s" label="" pos="%d,%d!"];' % (dst, shape, n['c'], n['p'][0], n['p'][1])
for e in n['conns']:
if (e, dst) in edges: continue
print('"%s" [shape=%s fillcolor="%s" label="" pos="%d,%d!"];' %
(dst, shape, n['c'], n['p'][0], n['p'][1]), file=out)
for e in n['conns']:
if (e, dst) in edges:
continue
# only add an edge once to the .dot file
edges.add((e, dst))
edges.add((dst, e))
# only add an edge once to the .dot file
edges.add((e, dst))
edges.add((dst, e))
style = 'solid'
col = 'gray'
if nodes[dst]['c'] != 'white' and nodes[e]['c'] != 'white':
style = 'solid'
col = 'black'
print >>out, '"%s" -- "%s" [style="%s" color="%s"];' % (e, dst, style, col)
style = 'solid'
col = 'gray'
if nodes[dst]['c'] != 'white' and nodes[e]['c'] != 'white':
style = 'solid'
col = 'black'
print('"%s" -- "%s" [style="%s" color="%s"];' % (e, dst, style, col), file=out)
print >>out, '}'
out.close()
os.system('neato -n dht_frames/plot-%02d.dot -Tpng -o dht_frames/frame-%02d.png' % (frame, frame))
print('}', file=out)
out.close()
os.system('neato -n dht_frames/plot-%02d.dot -Tpng -o dht_frames/frame-%02d.png' % (frame, frame))
frame = 0
next_render_time = 100
f = open('dht_lookups.txt')
for l in f:
if l.startswith('***'): break
for line in f:
if line.startswith('***'):
break
kind = l[0:3].strip()
l = l[3:].strip().split(' ')
kind = line[0:3].strip()
line = line[3:].strip().split(' ')
if kind == '===': continue
if kind == '===':
continue
t = int(l[0])
if t > next_render_time:
plot_nodes(nodes, frame)
frame += 1
next_render_time += 100
# sys.exit(0)
t = int(line[0])
if t > next_render_time:
plot_nodes(nodes, frame)
frame += 1
next_render_time += 100
# sys.exit(0)
if kind == '<>':
p = calculate_pos(l[1], 0)
dst = '0.0.0.0'
if not dst in nodes:
nodes[dst] = { 'conns': set(), 'p': p, 'c': 'blue', 's': 'circle'}
if kind == '<>':
p = calculate_pos(line[1], 0)
dst = '0.0.0.0'
if dst not in nodes:
nodes[dst] = {'conns': set(), 'p': p, 'c': 'blue', 's': 'circle'}
p = calculate_pos(l[2], 25)
dst = '255.255.255.255'
if not dst in nodes:
nodes[dst] = { 'conns': set(), 'p': p, 'c': 'yellow', 's': 'circle'}
elif kind == '->':
dst = l[3]
p = calculate_pos(line[2], 25)
dst = '255.255.255.255'
if dst not in nodes:
nodes[dst] = {'conns': set(), 'p': p, 'c': 'yellow', 's': 'circle'}
elif kind == '->':
dst = line[3]
if not dst in nodes:
src = get_origin(dst)
p = calculate_pos(l[2], int(l[1]))
nodes[dst] = { 'conns': set(src), 'p': p, 'c': 'grey'}
nodes[dst]['c'] = 'grey'
if dst not in nodes:
src = get_origin(dst)
p = calculate_pos(line[2], int(line[1]))
nodes[dst] = {'conns': set(src), 'p': p, 'c': 'grey'}
nodes[dst]['c'] = 'grey'
elif kind == '+':
dst = l[3]
src = l[4]
p = calculate_pos(l[2], int(l[1]))
if not dst in nodes:
nodes[dst] = { 'conns': set(), 'p': p, 'c': 'white'}
nodes[dst]['conns'].add(src)
elif kind == '+':
dst = line[3]
src = line[4]
p = calculate_pos(line[2], int(line[1]))
if dst not in nodes:
nodes[dst] = {'conns': set(), 'p': p, 'c': 'white'}
nodes[dst]['conns'].add(src)
elif kind == '<-':
dst = l[3]
nodes[dst]['c'] = 'green'
elif kind == 'x':
dst = l[3]
nodes[dst]['c'] = 'orange'
elif kind == 'X':
dst = l[3]
nodes[dst]['c'] = 'red'
elif kind == '<-':
dst = line[3]
nodes[dst]['c'] = 'green'
elif kind == 'x':
dst = line[3]
nodes[dst]['c'] = 'orange'
elif kind == 'X':
dst = line[3]
nodes[dst]['c'] = 'red'
f.close()

View File

@ -1,5 +1,8 @@
#!/usr/bin/env python
import os, sys, time
from __future__ import print_function
import os
import sys
# usage: memory.log memory_index.log
@ -13,58 +16,60 @@ index = open(sys.argv[2], 'rb').readlines()
allocation_points_to_print = 30
def print_allocation_point(ap):
print 'space_time: %d kBms' % (ap['spacetime'] / 1024)
print 'allocations: %d' % ap['allocations']
print 'peak: %d kB' % (ap['peak'] / 1024)
print 'stack: '
counter = 0
for e in ap['stack']:
print '#%d %s' % (counter, e)
counter += 1
print('space_time: %d kBms' % (ap['spacetime'] / 1024))
print('allocations: %d' % ap['allocations'])
print('peak: %d kB' % (ap['peak'] / 1024))
print('stack: ')
counter = 0
for e in ap['stack']:
print('#%d %s' % (counter, e))
counter += 1
allocation_points = []
for l in index:
l = l.split('#')
l.pop(0)
ap = { 'allocations': 0, 'peak': 0, 'spacetime': 0, 'allocation_point': len(allocation_points), 'stack': l}
allocation_points.append(ap);
for line in index:
line = line.split('#')
line.pop(0)
ap = {'allocations': 0, 'peak': 0, 'spacetime': 0, 'allocation_point': len(allocation_points), 'stack': line}
allocation_points.append(ap)
for l in lines:
l = l.lstrip('#').rstrip('\n').split(' ')
if len(l) != 8:
print l
continue
try:
ap = int(l[0])
allocation_points[ap]['allocations'] += 1
allocation_points[ap]['peak'] = int(l[7])
allocation_points[ap]['spacetime'] = int(l[6])
except Exception, e:
print type(e), e, l
for line in lines:
line = line.lstrip('#').rstrip('\n').split(' ')
if len(line) != 8:
print(line)
continue
try:
ap = int(line[0])
allocation_points[ap]['allocations'] += 1
allocation_points[ap]['peak'] = int(line[7])
allocation_points[ap]['spacetime'] = int(line[6])
except Exception as e:
print(type(e), e, line)
print '=== space time ==='
print('=== space time ===')
hot_ap = []
allocation_points.sort(key = lambda x:x['spacetime'], reverse=True);
allocation_points.sort(key=lambda x: x['spacetime'], reverse=True)
counter = 0
for ap in allocation_points[0:allocation_points_to_print]:
print '== %d ==' % counter
counter += 1
print_allocation_point(ap)
hot_ap.append(ap['allocation_point']);
print('== %d ==' % counter)
counter += 1
print_allocation_point(ap)
hot_ap.append(ap['allocation_point'])
print '=== allocations ==='
print('=== allocations ===')
allocation_points.sort(key = lambda x:x['allocations'], reverse=True);
allocation_points.sort(key=lambda x: x['allocations'], reverse=True)
for ap in allocation_points[0:allocation_points_to_print]:
print_allocation_point(ap)
print_allocation_point(ap)
print '=== peak ==='
print('=== peak ===')
allocation_points.sort(key = lambda x:x['peak'], reverse=True);
allocation_points.sort(key=lambda x: x['peak'], reverse=True)
for ap in allocation_points[0:allocation_points_to_print]:
print_allocation_point(ap)
print_allocation_point(ap)
# generate graph
lines = open(sys.argv[1], 'rb').readlines()
@ -74,53 +79,54 @@ cur_line = [0] * allocation_points_to_print
prev_line = [0] * allocation_points_to_print
last_time = 0
for l in lines:
l = l.lstrip('#').rstrip('\n').split(' ')
if len(l) != 8:
print l
continue
try:
time = int(l[1])
if time != last_time:
print >>out, last_time, '\t',
for i in range(allocation_points_to_print):
if cur_line[i] == -1:
print >>out, prev_line[i], '\t',
else:
print >>out, cur_line[i], '\t',
prev_line[i] = cur_line[i]
print >>out
cur_line = [-1] * allocation_points_to_print
last_time = time
for line in lines:
line = line.lstrip('#').rstrip('\n').split(' ')
if len(line) != 8:
print(line)
continue
try:
time = int(line[1])
if time != last_time:
print(last_time, '\t', end=' ', file=out)
for i in range(allocation_points_to_print):
if cur_line[i] == -1:
print(prev_line[i], '\t', end=' ', file=out)
else:
print(cur_line[i], '\t', end=' ', file=out)
prev_line[i] = cur_line[i]
print(file=out)
cur_line = [-1] * allocation_points_to_print
last_time = time
size = int(l[5])
ap = int(l[0])
if ap in hot_ap:
index = hot_ap.index(ap)
cur_line[index] = max(cur_line[index], size)
size = int(line[5])
ap = int(line[0])
if ap in hot_ap:
index = hot_ap.index(ap)
cur_line[index] = max(cur_line[index], size)
except Exception, e:
print type(e), e, l
except Exception as e:
print(type(e), e, line)
out.close()
out = open('memory.gnuplot', 'wb')
print >>out, "set term png size 1200,700"
print >>out, 'set output "memory.png"'
print >>out, 'set xrange [0:*]'
print >>out, 'set xlabel "time (ms)"'
print >>out, 'set ylabel "bytes (B)"'
print >>out, "set style data lines"
print >>out, "set key box"
print >>out, 'plot',
print("set term png size 1200,700", file=out)
print('set output "memory.png"', file=out)
print('set xrange [0:*]', file=out)
print('set xlabel "time (ms)"', file=out)
print('set ylabel "bytes (B)"', file=out)
print("set style data lines", file=out)
print("set key box", file=out)
print('plot', end=' ', file=out)
for k in range(allocation_points_to_print):
print >>out, ' "memory.dat" using 1:(',
for i in range(k, allocation_points_to_print):
if i == k: print >>out, '$%d' % (i + 2),
else: print >>out, '+$%d' % (i + 2),
print >>out, ') title "%d" with filledcurves x1, \\' % k
print >>out, 'x=0'
print(' "memory.dat" using 1:(', end=' ', file=out)
for i in range(k, allocation_points_to_print):
if i == k:
print('$%d' % (i + 2), end=' ', file=out)
else:
print('+$%d' % (i + 2), end=' ', file=out)
print(') title "%d" with filledcurves x1, \\' % k, file=out)
print('x=0', file=out)
out.close()
os.system('gnuplot memory.gnuplot');
os.system('gnuplot memory.gnuplot')

View File

@ -1,5 +1,7 @@
#!/usr/bin/env python
from __future__ import print_function
import glob
import os
import sys
@ -9,64 +11,64 @@ import sys
log_files = []
for p in glob.iglob(os.path.join(sys.argv[1], '*.log')):
name = os.path.split(p)[1]
if name == 'main_session.log': continue
print name
f = open(p, 'r')
out_file = p + '.dat'
log_files.append(out_file)
out = open(out_file, 'w+')
name = os.path.split(p)[1]
if name == 'main_session.log':
continue
print(name)
f = open(p, 'r')
out_file = p + '.dat'
log_files.append(out_file)
out = open(out_file, 'w+')
uploaded_blocks = 0;
downloaded_blocks = 0;
uploaded_blocks = 0
downloaded_blocks = 0
for l in f:
t = l.split(': ')[0].split('.')[0]
log_line = False
if ' ==> PIECE' in l:
uploaded_blocks+= 1
log_line = True
for l in f:
t = l.split(': ')[0].split('.')[0]
log_line = False
if ' ==> PIECE' in l:
uploaded_blocks += 1
log_line = True
if ' <== PIECE' in l:
downloaded_blocks+= 1
log_line = True
if ' <== PIECE' in l:
downloaded_blocks += 1
log_line = True
if log_line:
print >>out, '%s\t%d\t%d' % (t, uploaded_blocks, downloaded_blocks)
if log_line:
print('%s\t%d\t%d' % (t, uploaded_blocks, downloaded_blocks), file=out)
out.close()
f.close()
out.close()
f.close()
out = open('peers.gnuplot', 'wb')
print >>out, "set term png size 1200,700"
print >>out, 'set xrange [0:*]'
print >>out, 'set xlabel "time"'
print >>out, 'set ylabel "blocks"'
print >>out, 'set key box'
print >>out, 'set xdata time'
print >>out, 'set timefmt "%H:%M:%S"'
print >>out, 'set title "uploaded blocks"'
print >>out, 'set output "peers_upload.png"'
print >>out, 'plot',
print("set term png size 1200,700", file=out)
print('set xrange [0:*]', file=out)
print('set xlabel "time"', file=out)
print('set ylabel "blocks"', file=out)
print('set key box', file=out)
print('set xdata time', file=out)
print('set timefmt "%H:%M:%S"', file=out)
print('set title "uploaded blocks"', file=out)
print('set output "peers_upload.png"', file=out)
print('plot', end=' ', file=out)
first = True
for n in log_files:
if not first:
print >>out, ',',
first = False
print >>out, ' "%s" using 1:2 title "%s" with steps' % (n, os.path.split(n)[1].split('.log')[0]),
print >>out, ''
if not first:
print(',', end=' ', file=out)
first = False
print(' "%s" using 1:2 title "%s" with steps' % (n, os.path.split(n)[1].split('.log')[0]), end=' ', file=out)
print('', file=out)
print >>out, 'set title "downloaded blocks"'
print >>out, 'set output "peers_download.png"'
print >>out, 'plot',
print('set title "downloaded blocks"', file=out)
print('set output "peers_download.png"', file=out)
print('plot', end=' ', file=out)
first = True
for n in log_files:
if not first:
print >>out, ',',
first = False
print >>out, ' "%s" using 1:3 title "%s" with steps' % (n, os.path.split(n)[1].split('.log')[0]),
print >>out, ''
if not first:
print(',', end=' ', file=out)
first = False
print(' "%s" using 1:3 title "%s" with steps' % (n, os.path.split(n)[1].split('.log')[0]), end=' ', file=out)
print('', file=out)
out.close()
os.system('gnuplot peers.gnuplot');
os.system('gnuplot peers.gnuplot')

View File

@ -7,103 +7,155 @@ import sys
# relevant depths and to filter out low sample counts
f = open(sys.argv[1])
def parse_line(l):
indentation = 0
while indentation < len(l) and l[indentation] == ' ':
indentation += 1
if indentation == 0:
return (0, 0, '')
def parse_line(line):
indentation = 0
while indentation < len(line) and line[indentation] == ' ':
indentation += 1
if indentation == 0:
return (0, 0, '')
l = l.strip().split(' ')
samples = int(l[0])
fun = ' '.join(l[1:])
line = line.strip().split(' ')
samples = int(line[0])
fun = ' '.join(line[1:])
return (indentation, samples, fun)
return (indentation, samples, fun)
fold = -1
try:
sample_limit = int(sys.argv[2])
except:
sample_limit = 5
sample_limit = int(sys.argv[2])
except BaseException:
sample_limit = 5
fun_samples = {}
for l in f:
if 'Sort by top of stack' in l: break
if 'Sort by top of stack' in l:
break
indentation, samples, fun = parse_line(l)
if samples < sample_limit: continue
if fold != -1 and indentation > fold: continue
fold = -1
indentation, samples, fun = parse_line(l)
if samples < sample_limit:
continue
if fold != -1 and indentation > fold:
continue
fold = -1
if '__gnu_cxx::__normal_iterator<' in fun:
fold = indentation - 1
continue
if '__gnu_cxx::__normal_iterator<' in fun:
fold = indentation - 1
continue
if 'boost::_bi::bind_t' in fun: continue
if 'boost::_bi::list' in fun: continue
if 'boost::_mfi::mf' in fun: continue
if 'boost::_bi::storage' in fun: continue
if 'boost::_bi::bind_t' in fun:
continue
if 'boost::_bi::list' in fun:
continue
if 'boost::_mfi::mf' in fun:
continue
if 'boost::_bi::storage' in fun:
continue
# should only add leaves
if fun in fun_samples: fun_samples[fun] += samples
else: fun_samples[fun] = samples
if fun in fun_samples:
fun_samples[fun] += samples
else:
fun_samples[fun] = samples
output = '%s%-4d %s' % (' ' * (indentation/2), samples, fun)
if len(output) > 200: output = output[0:200]
print output
output = '%s%-4d %s' % (' ' * (indentation / 2), samples, fun)
if len(output) > 200:
output = output[0:200]
print(output)
if 'invariant_checker_impl' in fun: fold = indentation
if 'free_multiple_buffers' in fun: fold = indentation
if 'libtorrent::condition::wait' in fun: fold = indentation
if 'allocate_buffer' in fun: fold = indentation
if '::find_POD' in fun: fold = indentation
if 'SHA1_Update' in fun: fold = indentation
if 'boost::detail::function::basic_vtable' in fun: fold = indentation
if 'operator new' in fun: fold = indentation
if 'malloc' == fun: fold = indentation
if 'free' == fun: fold = indentation
if 'std::_Rb_tree' in fun: fold = indentation
if 'pthread_cond_wait' in fun: fold = indentation
if 'mp_exptmod' == fun: fold = indentation
if '::check_invariant()' in fun: fold = indentation
if 'libtorrent::condition::wait' in fun: fold = indentation
if '_sigtramp' in fun: fold = indentation
if 'time_now_hires' in fun: fold = indentation
if 'libtorrent::sleep' in fun: fold = indentation
if 'puts' == fun: fold = indentation
if 'boost::asio::basic_stream_socket' in fun: fold = indentation
if 'recvmsg' == fun: fold = indentation
if 'sendmsg' == fun: fold = indentation
if 'semaphore_signal_trap' == fun: fold = indentation
if 'boost::detail::atomic_count::operator' in fun: fold = indentation
if 'pthread_mutex_lock' == fun: fold = indentation
if 'pthread_mutex_unlock' == fun: fold = indentation
if '>::~vector()' == fun: fold = indentation
if 'szone_free_definite_size' == fun: fold = indentation
if 'snprintf' == fun: fold = indentation
if 'usleep' == fun: fold = indentation
if 'pthread_mutex_lock' == fun: fold = indentation
if 'pthread_mutex_unlock' == fun: fold = indentation
if 'std::string::append' in fun: fold = indentation
if 'getipnodebyname' == fun: fold = indentation
if '__gnu_debug::_Safe_iterator<std::' in fun: fold = indentation
if 'fflush' == fun: fold = indentation
if 'vfprintf' == fun: fold = indentation
if 'fprintf' == fun: fold = indentation
if 'BN_mod_exp' == fun: fold = indentation
if 'BN_CTX_free' == fun: fold = indentation
if 'cerror' == fun: fold = indentation
if '0xffffffff' == fun: fold = indentation
if 'invariant_checker_impl' in fun:
fold = indentation
if 'free_multiple_buffers' in fun:
fold = indentation
if 'libtorrent::condition::wait' in fun:
fold = indentation
if 'allocate_buffer' in fun:
fold = indentation
if '::find_POD' in fun:
fold = indentation
if 'SHA1_Update' in fun:
fold = indentation
if 'boost::detail::function::basic_vtable' in fun:
fold = indentation
if 'operator new' in fun:
fold = indentation
if 'malloc' == fun:
fold = indentation
if 'free' == fun:
fold = indentation
if 'std::_Rb_tree' in fun:
fold = indentation
if 'pthread_cond_wait' in fun:
fold = indentation
if 'mp_exptmod' == fun:
fold = indentation
if '::check_invariant()' in fun:
fold = indentation
if 'libtorrent::condition::wait' in fun:
fold = indentation
if '_sigtramp' in fun:
fold = indentation
if 'time_now_hires' in fun:
fold = indentation
if 'libtorrent::sleep' in fun:
fold = indentation
if 'puts' == fun:
fold = indentation
if 'boost::asio::basic_stream_socket' in fun:
fold = indentation
if 'recvmsg' == fun:
fold = indentation
if 'sendmsg' == fun:
fold = indentation
if 'semaphore_signal_trap' == fun:
fold = indentation
if 'boost::detail::atomic_count::operator' in fun:
fold = indentation
if 'pthread_mutex_lock' == fun:
fold = indentation
if 'pthread_mutex_unlock' == fun:
fold = indentation
if '>::~vector()' == fun:
fold = indentation
if 'szone_free_definite_size' == fun:
fold = indentation
if 'snprintf' == fun:
fold = indentation
if 'usleep' == fun:
fold = indentation
if 'pthread_mutex_lock' == fun:
fold = indentation
if 'pthread_mutex_unlock' == fun:
fold = indentation
if 'std::string::append' in fun:
fold = indentation
if 'getipnodebyname' == fun:
fold = indentation
if '__gnu_debug::_Safe_iterator<std::' in fun:
fold = indentation
if 'fflush' == fun:
fold = indentation
if 'vfprintf' == fun:
fold = indentation
if 'fprintf' == fun:
fold = indentation
if 'BN_mod_exp' == fun:
fold = indentation
if 'BN_CTX_free' == fun:
fold = indentation
if 'cerror' == fun:
fold = indentation
if '0xffffffff' == fun:
fold = indentation
list = []
for k in fun_samples:
list.append((fun_samples[k], k))
list.append((fun_samples[k], k))
list = sorted(list, reverse=True)
for i in list:
print '%-4d %s' % (i[0], i[1])
print('%-4d %s' % (i[0], i[1]))

File diff suppressed because it is too large Load Diff

View File

@ -1,54 +1,59 @@
#!/usr/bin/env python
from __future__ import print_function
import os, sys, time
import os
import sys
from functools import reduce
# usage: parse_log.py log-file [socket-index to focus on]
socket_filter = None
if len(sys.argv) >= 3:
socket_filter = sys.argv[2].strip()
socket_filter = sys.argv[2].strip()
if socket_filter == None:
print "scanning for socket with the most packets"
file = open(sys.argv[1], 'rb')
if socket_filter is None:
print("scanning for socket with the most packets")
file = open(sys.argv[1], 'rb')
sockets = {}
sockets = {}
for l in file:
if not 'our_delay' in l: continue
for l in file:
if 'our_delay' not in l:
continue
try:
a = l.strip().split(" ")
socket_index = a[1][:-1]
except:
continue
try:
a = l.strip().split(" ")
socket_index = a[1][:-1]
except BaseException:
continue
# msvc's runtime library doesn't prefix pointers
# with '0x'
# if socket_index[:2] != '0x':
# continue
# msvc's runtime library doesn't prefix pointers
# with '0x'
# if socket_index[:2] != '0x':
# continue
if socket_index in sockets:
sockets[socket_index] += 1
else:
sockets[socket_index] = 1
if socket_index in sockets:
sockets[socket_index] += 1
else:
sockets[socket_index] = 1
items = sockets.items()
items.sort(lambda x, y: y[1] - x[1])
items = list(sockets.items())
items.sort(lambda x, y: y[1] - x[1])
count = 0
for i in items:
print '%s: %d' % (i[0], i[1])
count += 1
if count > 5: break
count = 0
for i in items:
print('%s: %d' % (i[0], i[1]))
count += 1
if count > 5:
break
file.close()
socket_filter = items[0][0]
print '\nfocusing on socket %s' % socket_filter
file.close()
socket_filter = items[0][0]
print('\nfocusing on socket %s' % socket_filter)
file = open(sys.argv[1], 'rb')
out_file = 'utp.out%s' % socket_filter;
out_file = 'utp.out%s' % socket_filter
out = open(out_file, 'wb')
delay_samples = 'points lc rgb "blue"'
@ -61,27 +66,27 @@ rtt = 'lines lc rgb "light-blue"'
send_buffer = 'lines lc rgb "light-red"'
metrics = {
'our_delay':['our delay (ms)', 'x1y2', delay_samples],
'upload_rate':['send rate (B/s)', 'x1y1', 'lines'],
'max_window':['cwnd (B)', 'x1y1', cwnd],
'target_delay':['target delay (ms)', 'x1y2', target_delay],
'cur_window':['bytes in-flight (B)', 'x1y1', window_size],
'cur_window_packets':['number of packets in-flight', 'x1y2', 'steps'],
'packet_size':['current packet size (B)', 'x1y2', 'steps'],
'rtt':['rtt (ms)', 'x1y2', rtt],
'off_target':['off-target (ms)', 'x1y2', off_target],
'delay_sum':['delay sum (ms)', 'x1y2', 'steps'],
'their_delay':['their delay (ms)', 'x1y2', delay_samples],
'get_microseconds':['clock (us)', 'x1y1', 'steps'],
'wnduser':['advertised window size (B)', 'x1y1', 'steps'],
'ssthres':['slow-start threshold (B)', 'x1y1', 'steps'],
'our_delay': ['our delay (ms)', 'x1y2', delay_samples],
'upload_rate': ['send rate (B/s)', 'x1y1', 'lines'],
'max_window': ['cwnd (B)', 'x1y1', cwnd],
'target_delay': ['target delay (ms)', 'x1y2', target_delay],
'cur_window': ['bytes in-flight (B)', 'x1y1', window_size],
'cur_window_packets': ['number of packets in-flight', 'x1y2', 'steps'],
'packet_size': ['current packet size (B)', 'x1y2', 'steps'],
'rtt': ['rtt (ms)', 'x1y2', rtt],
'off_target': ['off-target (ms)', 'x1y2', off_target],
'delay_sum': ['delay sum (ms)', 'x1y2', 'steps'],
'their_delay': ['their delay (ms)', 'x1y2', delay_samples],
'get_microseconds': ['clock (us)', 'x1y1', 'steps'],
'wnduser': ['advertised window size (B)', 'x1y1', 'steps'],
'ssthres': ['slow-start threshold (B)', 'x1y1', 'steps'],
'delay_base':['delay base (us)', 'x1y1', delay_base],
'their_delay_base':['their delay base (us)', 'x1y1', delay_base],
'their_actual_delay':['their actual delay (us)', 'x1y1', delay_samples],
'actual_delay':['actual_delay (us)', 'x1y1', delay_samples],
'send_buffer':['send buffer size (B)', 'x1y1', send_buffer],
'recv_buffer':['receive buffer size (B)', 'x1y1', 'lines']
'delay_base': ['delay base (us)', 'x1y1', delay_base],
'their_delay_base': ['their delay base (us)', 'x1y1', delay_base],
'their_actual_delay': ['their actual delay (us)', 'x1y1', delay_samples],
'actual_delay': ['actual_delay (us)', 'x1y1', delay_samples],
'send_buffer': ['send buffer size (B)', 'x1y1', send_buffer],
'recv_buffer': ['receive buffer size (B)', 'x1y1', 'lines']
}
histogram_quantization = 1
@ -99,49 +104,51 @@ delay_histogram = {}
packet_size_histogram = {}
window_size = {'0': 0, '1': 0}
# [35301484] 0x00ec1190: actual_delay:1021583 our_delay:102 their_delay:-1021345 off_target:297 max_window:2687 upload_rate:18942 delay_base:1021481154 delay_sum:-1021242 target_delay:400 acked_bytes:1441 cur_window:2882 scaled_gain:2.432
# [35301484] 0x00ec1190: actual_delay:1021583 our_delay:102 their_delay:-1021345 off_target:297 max_window:2687
# upload_rate:18942 delay_base:1021481154 delay_sum:-1021242 target_delay:400 acked_bytes:1441 cur_window:2882
# scaled_gain:2.432
counter = 0
print "reading log file"
print("reading log file")
for l in file:
if "UTP_Connect" in l:
title = l[:-2]
if socket_filter != None:
for line in file:
if "UTP_Connect" in line:
title = line[:-2]
if socket_filter is not None:
title += ' socket: %s' % socket_filter
else:
title += ' sum of all sockets'
continue
try:
a = l.strip().split(" ")
a = line.strip().split(" ")
t = a[0][1:-1]
socket_index = a[1][:-1]
except:
except BaseException:
continue
# if socket_index[:2] != '0x':
# continue
if socket_filter != None and socket_index != socket_filter:
if socket_filter is not None and socket_index != socket_filter:
continue
counter += 1
if (counter % 300 == 0):
print "\r%d " % counter,
print("\r%d " % counter, end=' ')
if "lost." in l:
if "lost." in line:
packet_loss = packet_loss + 1
continue
if "Packet timeout" in l:
if "Packet timeout" in line:
packet_timeout = packet_timeout + 1
continue
if "sending packet" in l:
v = l.split('size:')[1].split(' ')[0]
if "sending packet" in line:
v = line.split('size:')[1].split(' ')[0]
packet_size_histogram[v] = 1 + packet_size_histogram.get(v, 0)
if "our_delay:" not in l:
if "our_delay:" not in line:
continue
# used for Logf timestamps
@ -161,159 +168,161 @@ for l in file:
begin = t
t = t - begin
# print time. Convert from milliseconds to seconds
print >>out, '%f\t' % (float(t)/1000.),
print('%f\t' % (float(t) / 1000.), end=' ', file=out)
#if t > 200000:
# if t > 200000:
# break
fill_columns = not columns
for i in a[2:]:
try:
n, v = i.split(':')
except:
except BaseException:
continue
v = float(v)
if n == "our_delay":
bucket = int(v / histogram_quantization)
delay_histogram[bucket] = 1 + delay_histogram.get(bucket, 0)
if not n in metrics: continue
if n not in metrics:
continue
if fill_columns:
columns.append(n)
if n == "max_window":
window_size[socket_index] = v
print >>out, '%f\t' % int(reduce(lambda a,b: a+b, window_size.values())),
print('%f\t' % int(reduce(lambda a, b: a + b, list(window_size.values()))), end=' ', file=out)
else:
print >>out, '%f\t' % v,
print >>out, float(packet_loss * 8000), float(packet_timeout * 8000)
print('%f\t' % v, end=' ', file=out)
print(float(packet_loss * 8000), float(packet_timeout * 8000), file=out)
packet_loss = 0
packet_timeout = 0
out.close()
out = open('%s.histogram' % out_file, 'wb')
for d,f in delay_histogram.iteritems():
print >>out, float(d*histogram_quantization) + histogram_quantization / 2, f
for d, f in delay_histogram.items():
print(float(d * histogram_quantization) + histogram_quantization / 2, f, file=out)
out.close()
out = open('%s_packet_size.histogram' % out_file, 'wb')
for d,f in packet_size_histogram.iteritems():
print >>out, d, f
for d, f in packet_size_histogram.items():
print(d, f, file=out)
out.close()
plot = [
{
'data': ['max_window', 'send_buffer', 'cur_window', 'rtt'],
'title': 'send-packet-size',
'y1': 'Bytes',
'y2': 'Time (ms)'
},
{
'data': ['upload_rate', 'max_window', 'cur_window', 'wnduser', 'cur_window_packets', 'packet_size', 'rtt'],
'title': 'slow-start',
'y1': 'Bytes',
'y2': 'Time (ms)'
},
{
'data': ['max_window', 'cur_window', 'our_delay', 'target_delay', 'ssthres'],
'title': 'cwnd',
'y1': 'Bytes',
'y2': 'Time (ms)'
},
{
'data': ['our_delay', 'max_window', 'target_delay', 'cur_window', 'wnduser', 'cur_window_packets'],
'title': 'uploading',
'y1': 'Bytes',
'y2': 'Time (ms)'
},
{
'data': ['our_delay', 'max_window', 'target_delay', 'cur_window', 'send_buffer'],
'title': 'uploading_packets',
'y1': 'Bytes',
'y2': 'Time (ms)'
},
{
'data': ['their_delay', 'target_delay', 'rtt'],
'title': 'their_delay',
'y1': '',
'y2': 'Time (ms)'
},
{
'data': ['their_actual_delay','their_delay_base'],
'title': 'their_delay_base',
'y1': 'Time (us)',
'y2': ''
},
{
'data': ['our_delay', 'target_delay', 'rtt'],
'title': 'our-delay',
'y1': '',
'y2': 'Time (ms)'
},
{
'data': ['actual_delay', 'delay_base'],
'title': 'our_delay_base',
'y1': 'Time (us)',
'y2': ''
}
{
'data': ['max_window', 'send_buffer', 'cur_window', 'rtt'],
'title': 'send-packet-size',
'y1': 'Bytes',
'y2': 'Time (ms)'
},
{
'data': ['upload_rate', 'max_window', 'cur_window', 'wnduser', 'cur_window_packets', 'packet_size', 'rtt'],
'title': 'slow-start',
'y1': 'Bytes',
'y2': 'Time (ms)'
},
{
'data': ['max_window', 'cur_window', 'our_delay', 'target_delay', 'ssthres'],
'title': 'cwnd',
'y1': 'Bytes',
'y2': 'Time (ms)'
},
{
'data': ['our_delay', 'max_window', 'target_delay', 'cur_window', 'wnduser', 'cur_window_packets'],
'title': 'uploading',
'y1': 'Bytes',
'y2': 'Time (ms)'
},
{
'data': ['our_delay', 'max_window', 'target_delay', 'cur_window', 'send_buffer'],
'title': 'uploading_packets',
'y1': 'Bytes',
'y2': 'Time (ms)'
},
{
'data': ['their_delay', 'target_delay', 'rtt'],
'title': 'their_delay',
'y1': '',
'y2': 'Time (ms)'
},
{
'data': ['their_actual_delay', 'their_delay_base'],
'title': 'their_delay_base',
'y1': 'Time (us)',
'y2': ''
},
{
'data': ['our_delay', 'target_delay', 'rtt'],
'title': 'our-delay',
'y1': '',
'y2': 'Time (ms)'
},
{
'data': ['actual_delay', 'delay_base'],
'title': 'our_delay_base',
'y1': 'Time (us)',
'y2': ''
}
]
out = open('utp.gnuplot', 'w+')
files = ''
#print >>out, 'set xtics 0, 20'
print >>out, "set term png size 1280,800"
print >>out, 'set output "%s.delays.png"' % out_file
print >>out, 'set xrange [0:200]'
print >>out, 'set xlabel "delay (ms)"'
print >>out, 'set boxwidth 1'
print >>out, 'set ylabel "number of packets"'
print >>out, 'plot "%s.histogram" using 1:2 with boxes fs solid 0.3' % out_file
# print('set xtics 0, 20', file=out)
print("set term png size 1280,800", file=out)
print('set output "%s.delays.png"' % out_file, file=out)
print('set xrange [0:200]', file=out)
print('set xlabel "delay (ms)"', file=out)
print('set boxwidth 1', file=out)
print('set ylabel "number of packets"', file=out)
print('plot "%s.histogram" using 1:2 with boxes fs solid 0.3' % out_file, file=out)
files += out_file + '.delays.png '
print >>out, 'set output "%s.packet_sizes.png"' % out_file
print >>out, 'set xrange [0:*]'
print >>out, 'set xlabel "packet size (B)"'
print >>out, 'set boxwidth 1'
print >>out, 'set ylabel "number of packets sent"'
print >>out, 'set logscale y'
print >>out, 'plot "%s_packet_size.histogram" using 1:2 with boxes fs solid 0.3' % out_file
print >>out, 'set nologscale y'
print('set output "%s.packet_sizes.png"' % out_file, file=out)
print('set xrange [0:*]', file=out)
print('set xlabel "packet size (B)"', file=out)
print('set boxwidth 1', file=out)
print('set ylabel "number of packets sent"', file=out)
print('set logscale y', file=out)
print('plot "%s_packet_size.histogram" using 1:2 with boxes fs solid 0.3' % out_file, file=out)
print('set nologscale y', file=out)
files += out_file + '.packet_sizes.png '
print >>out, "set style data steps"
#print >>out, "set yrange [0:*]"
print >>out, "set y2range [*:*]"
#set hidden3d
#set title "Peer bandwidth distribution"
#set xlabel "Ratio"
print("set style data steps", file=out)
# print("set yrange [0:*]", file=out)
print("set y2range [*:*]", file=out)
# set hidden3d
# set title "Peer bandwidth distribution"
# set xlabel "Ratio"
for p in plot:
print >>out, 'set title "%s %s"' % (p['title'], title)
print >>out, 'set xlabel "time (s)"'
print >>out, 'set ylabel "%s"' % p['y1']
print >>out, "set tics nomirror"
print >>out, 'set y2tics'
print >>out, 'set y2label "%s"' % p['y2']
print >>out, 'set xrange [0:*]'
print >>out, "set key box"
print >>out, "set term png size 1280,800"
print >>out, 'set output "%s-%s.png"' % (out_file, p['title'])
files += '%s-%s.png ' % (out_file, p['title'])
print('set title "%s %s"' % (p['title'], title), file=out)
print('set xlabel "time (s)"', file=out)
print('set ylabel "%s"' % p['y1'], file=out)
print("set tics nomirror", file=out)
print('set y2tics', file=out)
print('set y2label "%s"' % p['y2'], file=out)
print('set xrange [0:*]', file=out)
print("set key box", file=out)
print("set term png size 1280,800", file=out)
print('set output "%s-%s.png"' % (out_file, p['title']), file=out)
files += '%s-%s.png ' % (out_file, p['title'])
comma = ''
print >>out, "plot",
comma = ''
print("plot", end=' ', file=out)
for c in p['data']:
if not c in metrics: continue
i = columns.index(c)
print >>out, '%s"%s" using ($1/1000):%d title "%s-%s" axes %s with %s' % (comma, out_file, i + 2, metrics[c][0], metrics[c][1], metrics[c][1], metrics[c][2]),
comma = ', '
print >>out, ''
for c in p['data']:
if c not in metrics:
continue
i = columns.index(c)
print('%s"%s" using ($1/1000):%d title "%s-%s" axes %s with %s' %
(comma, out_file, i + 2, metrics[c][0], metrics[c][1], metrics[c][1], metrics[c][2]), end=' ', file=out)
comma = ', '
print('', file=out)
out.close()
os.system("gnuplot utp.gnuplot")
os.system("open %s" % files)

View File

@ -6,89 +6,114 @@ import shutil
import subprocess
import sys
cache_size = 300 # in MiB
cache_size = 300 # in MiB
toolset = ''
if len(sys.argv) > 1:
toolset = sys.argv[1]
toolset = sys.argv[1]
ret = os.system('cd ../examples && bjam profile statistics=on %s stage_client_test' % toolset)
if ret != 0:
print 'ERROR: build failed: %d' % ret
sys.exit(1)
print('ERROR: build failed: %d' % ret)
sys.exit(1)
ret = os.system('cd ../examples && bjam release %s stage_connection_tester' % toolset)
if ret != 0:
print 'ERROR: build failed: %d' % ret
sys.exit(1)
print('ERROR: build failed: %d' % ret)
sys.exit(1)
try: os.remove('.ses_state')
except Exception, e: print e
try: shutil.rmtree('.resume')
except Exception, e: print e
try: shutil.rmtree('cpu_benchmark')
except Exception, e: print e
try:
os.remove('.ses_state')
except Exception as e:
print(e)
try:
shutil.rmtree('.resume')
except Exception as e:
print(e)
try:
shutil.rmtree('cpu_benchmark')
except Exception as e:
print(e)
if not os.path.exists('cpu_benchmark.torrent'):
ret = os.system('../examples/connection_tester gen-torrent -s 10000 -n 15 -t cpu_benchmark.torrent')
if ret != 0:
print 'ERROR: connection_tester failed: %d' % ret
sys.exit(1)
ret = os.system('../examples/connection_tester gen-torrent -s 10000 -n 15 -t cpu_benchmark.torrent')
if ret != 0:
print('ERROR: connection_tester failed: %d' % ret)
sys.exit(1)
try:
shutil.rmtree('t')
except BaseException:
pass
try: shutil.rmtree('t')
except: pass
def run_test(name, test_cmd, client_arg, num_peers):
output_dir = 'logs_%s' % name
output_dir = 'logs_%s' % name
try: shutil.rmtree(output_dir)
except: pass
try: os.mkdir(output_dir)
except: pass
try:
shutil.rmtree(output_dir)
except BaseException:
pass
try:
os.mkdir(output_dir)
except BaseException:
pass
port = (int(time.time()) % 50000) + 2000
port = (int(time.time()) % 50000) + 2000
try: shutil.rmtree('session_stats')
except: pass
try: shutil.rmtree('session_stats_report')
except: pass
try:
shutil.rmtree('session_stats')
except BaseException:
pass
try:
shutil.rmtree('session_stats_report')
except BaseException:
pass
start = time.time();
client_cmd = '../examples/client_test -p %d cpu_benchmark.torrent -k -z -H -X -q 120 %s -h -c %d -T %d -C %d -f %s/events.log' \
% (port, client_arg, num_peers*2, num_peers*2, cache_size * 16, output_dir)
test_cmd = '../examples/connection_tester %s -c %d -d 127.0.0.1 -p %d -t cpu_benchmark.torrent' % (test_cmd, num_peers, port)
start = time.time()
client_cmd = ('../examples/client_test -p %d cpu_benchmark.torrent -k -z -H -X -q 120 %s'
'-h -c %d -T %d -C %d -f %s/events.log').format(
port, client_arg, num_peers * 2, num_peers * 2, cache_size * 16, output_dir)
test_cmd = '../examples/connection_tester %s -c %d -d 127.0.0.1 -p %d -t cpu_benchmark.torrent' % (
test_cmd, num_peers, port)
client_out = open('%s/client.out' % output_dir, 'w+')
test_out = open('%s/test.out' % output_dir, 'w+')
print client_cmd
c = subprocess.Popen(client_cmd.split(' '), stdout=client_out, stderr=client_out, stdin=subprocess.PIPE)
time.sleep(2)
print test_cmd
t = subprocess.Popen(test_cmd.split(' '), stdout=test_out, stderr=test_out)
client_out = open('%s/client.out' % output_dir, 'w+')
test_out = open('%s/test.out' % output_dir, 'w+')
print(client_cmd)
c = subprocess.Popen(client_cmd.split(' '), stdout=client_out, stderr=client_out, stdin=subprocess.PIPE)
time.sleep(2)
print(test_cmd)
t = subprocess.Popen(test_cmd.split(' '), stdout=test_out, stderr=test_out)
t.wait()
t.wait()
end = time.time();
end = time.time()
try: c.communicate('q')
except: pass
c.wait()
try:
c.communicate('q')
except BaseException:
pass
c.wait()
client_out.close();
test_out.close();
client_out.close()
test_out.close()
print 'runtime %d seconds' % (end - start)
print 'analyzing proile...'
os.system('gprof ../examples/client_test >%s/gprof.out' % output_dir)
print 'generating profile graph...'
os.system('python gprof2dot.py --strip <%s/gprof.out | dot -Tpng -o %s/cpu_profile.png' % (output_dir, output_dir))
print('runtime %d seconds' % (end - start))
print('analyzing proile...')
os.system('gprof ../examples/client_test >%s/gprof.out' % output_dir)
print('generating profile graph...')
os.system('python gprof2dot.py --strip <%s/gprof.out | dot -Tpng -o %s/cpu_profile.png' % (output_dir, output_dir))
os.system('python parse_session_stats.py session_stats/*.log')
try:
shutil.move('session_stats_report', '%s/session_stats_report' % output_dir)
except BaseException:
pass
try:
shutil.move('session_stats', '%s/session_stats' % output_dir)
except BaseException:
pass
os.system('python parse_session_stats.py session_stats/*.log')
try: shutil.move('session_stats_report', '%s/session_stats_report' % output_dir)
except: pass
try: shutil.move('session_stats', '%s/session_stats' % output_dir)
except: pass
run_test('download', 'upload', '', 50)
run_test('upload', 'download', '-G', 20)

View File

@ -7,48 +7,54 @@ import re
version = (int(sys.argv[1]), int(sys.argv[2]), int(sys.argv[3]), int(sys.argv[4]))
def v(version):
ret = ()
for i in version:
if i < 9: ret = ret + (chr(ord('0') + i),)
else: ret = ret + (chr(ord('A') + i - 10),)
return ret
ret = ()
for i in version:
if i < 9:
ret = ret + (chr(ord('0') + i),)
else:
ret = ret + (chr(ord('A') + i - 10),)
return ret
revision = os.popen('git log -1 --format=format:%h').read().strip()
def substitute_file(name):
subst = ''
f = open(name)
for l in f:
if '#define LIBTORRENT_VERSION_MAJOR' in l and name.endswith('.hpp'):
l = '#define LIBTORRENT_VERSION_MAJOR %d\n' % version[0]
elif '#define LIBTORRENT_VERSION_MINOR' in l and name.endswith('.hpp'):
l = '#define LIBTORRENT_VERSION_MINOR %d\n' % version[1]
elif '#define LIBTORRENT_VERSION_TINY' in l and name.endswith('.hpp'):
l = '#define LIBTORRENT_VERSION_TINY %d\n' % version[2]
elif '#define LIBTORRENT_VERSION ' in l and name.endswith('.hpp'):
l = '#define LIBTORRENT_VERSION "%d.%d.%d.%d"\n' % (version[0], version[1], version[2], version[3])
elif '#define LIBTORRENT_REVISION ' in l and name.endswith('.hpp'):
l = '#define LIBTORRENT_REVISION "%s"\n' % revision
elif 'AC_INIT([libtorrent-rasterbar]' in l and name.endswith('.ac'):
l = 'AC_INIT([libtorrent-rasterbar],[%d.%d.%d],[arvid@libtorrent.org],\n' % (version[0], version[1], version[2])
elif 'set (VERSION ' in l and name.endswith('.txt'):
l = 'set (VERSION "%d.%d.%d")\n' % (version[0], version[1], version[2])
elif ':Version: ' in l and (name.endswith('.rst') or name.endswith('.py')):
l = ':Version: %d.%d.%d\n' % (version[0], version[1], version[2])
elif 'VERSION = ' in l and name.endswith('Jamfile'):
l = 'VERSION = %d.%d.%d ;\n' % (version[0], version[1], version[2])
elif 'version=' in l and name.endswith('setup.py'):
l = "\tversion = '%d.%d.%d',\n" % (version[0], version[1], version[2])
elif "version = '" in l and name.endswith('setup.py'):
l = "\tversion = '%d.%d.%d',\n" % (version[0], version[1], version[2])
elif '"-LT' in l and name.endswith('settings_pack.cpp'):
l = re.sub('"-LT[0-9A-Za-z]{4}-"', '"-LT%c%c%c%c-"' % v(version), l)
subst = ''
f = open(name)
for line in f:
if '#define LIBTORRENT_VERSION_MAJOR' in line and name.endswith('.hpp'):
line = '#define LIBTORRENT_VERSION_MAJOR %d\n' % version[0]
elif '#define LIBTORRENT_VERSION_MINOR' in line and name.endswith('.hpp'):
line = '#define LIBTORRENT_VERSION_MINOR %d\n' % version[1]
elif '#define LIBTORRENT_VERSION_TINY' in line and name.endswith('.hpp'):
line = '#define LIBTORRENT_VERSION_TINY %d\n' % version[2]
elif '#define LIBTORRENT_VERSION ' in line and name.endswith('.hpp'):
line = '#define LIBTORRENT_VERSION "%d.%d.%d.%d"\n' % (version[0], version[1], version[2], version[3])
elif '#define LIBTORRENT_REVISION ' in line and name.endswith('.hpp'):
line = '#define LIBTORRENT_REVISION "%s"\n' % revision
elif 'AC_INIT([libtorrent-rasterbar]' in line and name.endswith('.ac'):
line = 'AC_INIT([libtorrent-rasterbar],[%d.%d.%d],[arvid@libtorrent.org],\n' % (
version[0], version[1], version[2])
elif 'set (VERSION ' in line and name.endswith('.txt'):
line = 'set (VERSION "%d.%d.%d")\n' % (version[0], version[1], version[2])
elif ':Version: ' in line and (name.endswith('.rst') or name.endswith('.py')):
line = ':Version: %d.%d.%d\n' % (version[0], version[1], version[2])
elif 'VERSION = ' in line and name.endswith('Jamfile'):
line = 'VERSION = %d.%d.%d ;\n' % (version[0], version[1], version[2])
elif 'version=' in line and name.endswith('setup.py'):
line = "\tversion = '%d.%d.%d',\n" % (version[0], version[1], version[2])
elif "version = '" in line and name.endswith('setup.py'):
line = "\tversion = '%d.%d.%d',\n" % (version[0], version[1], version[2])
elif '"-LT' in line and name.endswith('settings_pack.cpp'):
line = re.sub('"-LT[0-9A-Za-z]{4}-"', '"-LT%c%c%c%c-"' % v(version), line)
subst += l
subst += line
f.close()
open(name, 'w+').write(subst)
f.close()
open(name, 'w+').write(subst)
substitute_file('include/libtorrent/version.hpp')
@ -58,7 +64,5 @@ substitute_file('bindings/python/setup.py')
substitute_file('docs/gen_reference_doc.py')
substitute_file('src/settings_pack.cpp')
for i in glob.glob('docs/*.rst'):
substitute_file(i)
substitute_file(i)
substitute_file('Jamfile')

View File

@ -1,36 +1,35 @@
#!/usr/bin/env python
import os
import sys
import glob
import datetime
this_year = datetime.date.today().year
print 'current year: %d' % this_year
print('current year: %d' % this_year)
def update_file(name):
subst = ''
f = open(name)
for l in f:
if 'Copyright (c) ' in l and 'Arvid Norberg' in l:
year_idx = l.index('Copyright (c) ')
first_year = int(l[year_idx + 14: year_idx + 18])
if first_year != this_year:
if l[year_idx + 18] == '-':
l = l[:year_idx + 19] + str(this_year) + l[year_idx + 23:]
else:
l = l[:year_idx + 18] + '-' + str(this_year) + l[year_idx + 18:]
subst = ''
f = open(name)
for line in f:
if 'Copyright (c) ' in line and 'Arvid Norberg' in line:
year_idx = line.index('Copyright (c) ')
first_year = int(line[year_idx + 14: year_idx + 18])
if first_year != this_year:
if line[year_idx + 18] == '-':
line = line[:year_idx + 19] + str(this_year) + line[year_idx + 23:]
else:
line = line[:year_idx + 18] + '-' + str(this_year) + line[year_idx + 18:]
subst += l
subst += line
f.close()
open(name, 'w+').write(subst)
f.close()
open(name, 'w+').write(subst)
for i in glob.glob('src/*.cpp') + \
glob.glob('include/libtorrent/*.hpp') + \
glob.glob('include/libtorrent/extensions/*.hpp') + \
glob.glob('include/libtorrent/kademlia/*.hpp') + \
glob.glob('src/kademlia/*.cpp') + \
['COPYING', 'LICENSE', 'AUTHORS']:
update_file(i)
glob.glob('include/libtorrent/*.hpp') + \
glob.glob('include/libtorrent/extensions/*.hpp') + \
glob.glob('include/libtorrent/kademlia/*.hpp') + \
glob.glob('src/kademlia/*.cpp') + \
['COPYING', 'LICENSE', 'AUTHORS']:
update_file(i)