Clean-up python code (#3075)

lint and enforce python code formatting
This commit is contained in:
Andrew Resch 2018-06-12 02:43:13 -07:00 committed by Arvid Norberg
parent a2ea79de4e
commit 596d98cac4
34 changed files with 4016 additions and 3435 deletions

View File

@ -5,7 +5,7 @@ matrix:
fast_finish: true fast_finish: true
include: include:
- if: repo = arvidn/libtorrent - if: repo = arvidn/libtorrent
env: variant=release sonar_scan=1 toolset=gcc env: variant=release sonar_scan=1 toolset=gcc pylint=1
- env: variant=test_debug lint=1 tests=1 toolset=gcc-sanitizer - env: variant=test_debug lint=1 tests=1 toolset=gcc-sanitizer
- env: variant=test_debug sim=1 crypto=openssl toolset=gcc-sanitizer - env: variant=test_debug sim=1 crypto=openssl toolset=gcc-sanitizer
- env: variant=test_release coverage=1 tests=1 toolset=gcc-coverage python=1 - env: variant=test_release coverage=1 tests=1 toolset=gcc-coverage python=1
@ -46,6 +46,7 @@ addons:
- python2.7-dev - python2.7-dev
- g++-5 - g++-5
- [cmake3, ninja-build] - [cmake3, ninja-build]
- python3-pip
before_install: before_install:
@ -79,7 +80,6 @@ before_install:
- ulimit -a - ulimit -a
install: install:
- touch ~/user-config.jam - touch ~/user-config.jam
- 'if [[ $toolset == "gcc" ]]; then - 'if [[ $toolset == "gcc" ]]; then
g++-5 --version; g++-5 --version;
@ -130,7 +130,12 @@ install:
- 'echo "using python : 2.7 ;" >> ~/user-config.jam' - 'echo "using python : 2.7 ;" >> ~/user-config.jam'
- if [ "$docs" == "1" ]; then rst2html.py --version; fi - if [ "$docs" == "1" ]; then rst2html.py --version; fi
- 'if [ "$lint" == "1" ]; then curl "https://raw.githubusercontent.com/google/styleguide/71ec7f1e524969c19ce33cfc72e8e023f2b98ee2/cpplint/cpplint.py" >~/cpplint.py; fi' - 'if [ "$lint" == "1" ]; then curl "https://raw.githubusercontent.com/google/styleguide/71ec7f1e524969c19ce33cfc72e8e023f2b98ee2/cpplint/cpplint.py" >~/cpplint.py; fi'
- 'if [ "$pylint" == "1" ]; then
sudo pip install flake8;
flake8 --version;
sudo pip3 install flake8;
python3 -m flake8 --version;
fi'
- 'if [ $sonar_scan == "1" ]; then - 'if [ $sonar_scan == "1" ]; then
wget https://sonarsource.bintray.com/Distribution/sonar-scanner-cli/sonar-scanner-2.6.1.zip; wget https://sonarsource.bintray.com/Distribution/sonar-scanner-cli/sonar-scanner-2.6.1.zip;
wget https://sonarqube.com/static/cpp/build-wrapper-linux-x86.zip; wget https://sonarqube.com/static/cpp/build-wrapper-linux-x86.zip;
@ -183,7 +188,10 @@ script:
- 'if [ "$lint" == "1" ]; then - 'if [ "$lint" == "1" ]; then
python ~/cpplint.py --extensions=cpp --headers=hpp --filter=-,+runtime/explicit,+whitespace/end_of_line --linelength=90 test/*.{cpp,hpp} src/*.cpp include/libtorrent/*.hpp include/libtorrent/kademlia/*.hpp src/kademlia/*.cpp include/libtorrent/aux_/*.hpp include/libtorrent/extensions/*.hpp simulation/*.{cpp,hpp} tools/*.{cpp,hpp} examples/*.{cpp,hpp}; python ~/cpplint.py --extensions=cpp --headers=hpp --filter=-,+runtime/explicit,+whitespace/end_of_line --linelength=90 test/*.{cpp,hpp} src/*.cpp include/libtorrent/*.hpp include/libtorrent/kademlia/*.hpp src/kademlia/*.cpp include/libtorrent/aux_/*.hpp include/libtorrent/extensions/*.hpp simulation/*.{cpp,hpp} tools/*.{cpp,hpp} examples/*.{cpp,hpp};
fi' fi'
- 'if [ "$pylint" == "1" ]; then
flake8 --max-line-length=120;
python3 -m flake8 --max-line-length=120;
fi'
- 'if [ "$sonar_scan" == "1" ]; then - 'if [ "$sonar_scan" == "1" ]; then
build-wrapper-linux-x86-64 --out-dir bw-output bjam -a -j3 optimization=off crypto=$crypto deprecated-functions=off $toolset variant=$variant -l300 && build-wrapper-linux-x86-64 --out-dir bw-output bjam -a -j3 optimization=off crypto=$crypto deprecated-functions=off $toolset variant=$variant -l300 &&
sonar-scanner -D sonar.login=$SONAR_TOKEN; sonar-scanner -D sonar.login=$SONAR_TOKEN;

View File

@ -3,7 +3,7 @@
# Copyright Daniel Wallin 2006. Use, modification and distribution is # Copyright Daniel Wallin 2006. Use, modification and distribution is
# subject to the Boost Software License, Version 1.0. (See accompanying # subject to the Boost Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) # file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
from __future__ import print_function
import sys import sys
import atexit import atexit
@ -153,6 +153,7 @@ def print_download_queue(console, download_queue):
write_line(console, out) write_line(console, out)
def add_torrent(ses, filename, options): def add_torrent(ses, filename, options):
atp = lt.add_torrent_params() atp = lt.add_torrent_params()
if filename.startswith('magnet:'): if filename.startswith('magnet:'):
@ -160,8 +161,8 @@ def add_torrent(ses, filename, options):
else: else:
atp.ti = lt.torrent_info(filename) atp.ti = lt.torrent_info(filename)
try: try:
at.resume_data = open(os.path.join(options.save_path, info.name() + '.fastresume'), 'rb').read() atp.resume_data = open(os.path.join(options.save_path, atp.info.name() + '.fastresume'), 'rb').read()
except: except BaseException:
pass pass
atp.save_path = options.save_path atp.save_path = options.save_path
@ -171,6 +172,7 @@ def add_torrent(ses, filename, options):
| lt.torrent_flags.duplicate_is_error | lt.torrent_flags.duplicate_is_error
ses.async_add_torrent(atp) ses.async_add_torrent(atp)
def main(): def main():
from optparse import OptionParser from optparse import OptionParser
@ -225,12 +227,13 @@ def main():
if options.max_download_rate <= 0: if options.max_download_rate <= 0:
options.max_download_rate = -1 options.max_download_rate = -1
settings = { 'user_agent': 'python_client/' + lt.__version__, settings = {
'user_agent': 'python_client/' + lt.__version__,
'listen_interfaces': '%s:%d' % (options.listen_interface, options.port), 'listen_interfaces': '%s:%d' % (options.listen_interface, options.port),
'download_rate_limit': int(options.max_download_rate), 'download_rate_limit': int(options.max_download_rate),
'upload_rate_limit': int(options.max_upload_rate), 'upload_rate_limit': int(options.max_upload_rate),
'alert_mask': lt.alert.category_t.all_categories, 'alert_mask': lt.alert.category_t.all_categories,
'outgoing_interfaces' : options.outgoing_interface 'outgoing_interfaces': options.outgoing_interface,
} }
if options.proxy_host != '': if options.proxy_host != '':
@ -258,16 +261,16 @@ def main():
out = '' out = ''
for h,t in torrents.items(): for h, t in torrents.items():
out += 'name: %-40s\n' % t.name[:40] out += 'name: %-40s\n' % t.name[:40]
if t.state != lt.torrent_status.seeding: if t.state != lt.torrent_status.seeding:
state_str = ['queued', 'checking', 'downloading metadata', \ state_str = ['queued', 'checking', 'downloading metadata',
'downloading', 'finished', 'seeding', \ 'downloading', 'finished', 'seeding',
'allocating', 'checking fastresume'] 'allocating', 'checking fastresume']
out += state_str[t.state] + ' ' out += state_str[t.state] + ' '
out += '%5.4f%% ' % (t.progress*100) out += '%5.4f%% ' % (t.progress * 100)
out += progress_bar(t.progress, 49) out += progress_bar(t.progress, 49)
out += '\n' out += '\n'
@ -300,7 +303,7 @@ def main():
out += progress_bar(p / float(f.size), 20) out += progress_bar(p / float(f.size), 20)
out += ' ' + f.path + '\n' out += ' ' + f.path + '\n'
write_line(console, out) write_line(console, out)
except: except BaseException:
pass pass
write_line(console, 76 * '-' + '\n') write_line(console, 76 * '-' + '\n')
@ -312,7 +315,7 @@ def main():
alerts_log.append(a.message()) alerts_log.append(a.message())
# add new torrents to our list of torrent_status # add new torrents to our list of torrent_status
if type(a) == lt.add_torrent_alert: if isinstance(a, lt.add_torrent_alert):
h = a.handle h = a.handle
h.set_max_connections(60) h.set_max_connections(60)
h.set_max_uploads(-1) h.set_max_uploads(-1)
@ -320,7 +323,7 @@ def main():
# update our torrent_status array for torrents that have # update our torrent_status array for torrents that have
# changed some of their state # changed some of their state
if type(a) == lt.state_update_alert: if isinstance(a, lt.state_update_alert):
for s in a.status: for s in a.status:
torrents[s.handle] = s torrents[s.handle] = s
@ -333,19 +336,23 @@ def main():
c = console.sleep_and_input(0.5) c = console.sleep_and_input(0.5)
ses.post_torrent_updates() ses.post_torrent_updates()
if not c: continue if not c:
continue
if c == 'r': if c == 'r':
for h in torrents.keys(): h.force_reannounce() for h in torrents:
h.force_reannounce()
elif c == 'q': elif c == 'q':
alive = False alive = False
elif c == 'p': elif c == 'p':
for h in torrents.keys(): h.pause() for h in torrents:
h.pause()
elif c == 'u': elif c == 'u':
for h in torrents.keys(): h.resume() for h in torrents:
h.resume()
ses.pause() ses.pause()
for h,t in torrents.items(): for h, t in torrents.items():
if not h.is_valid() or not t.has_metadata: if not h.is_valid() or not t.has_metadata:
continue continue
h.save_resume_data() h.save_resume_data()
@ -353,7 +360,7 @@ def main():
while len(torrents) > 0: while len(torrents) > 0:
alerts = ses.pop_alerts() alerts = ses.pop_alerts()
for a in alerts: for a in alerts:
if type(a) == lt.save_resume_data_alert: if isinstance(a, lt.save_resume_data_alert):
print(a) print(a)
data = lt.write_resume_data_buf(a.params) data = lt.write_resume_data_buf(a.params)
h = a.handle h = a.handle
@ -361,11 +368,12 @@ def main():
open(os.path.join(options.save_path, torrents[h].name + '.fastresume'), 'wb').write(data) open(os.path.join(options.save_path, torrents[h].name + '.fastresume'), 'wb').write(data)
del torrents[h] del torrents[h]
if type(a) == lt.save_resume_data_failed_alert: if isinstance(a, lt.save_resume_data_failed_alert):
h = a.handle h = a.handle
if h in torrents: if h in torrents:
print('failed to save resume data for ', torrents[h].name) print('failed to save resume data for ', torrents[h].name)
del torrents[h] del torrents[h]
time.sleep(0.5) time.sleep(0.5)
main() main()

View File

@ -1,5 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env python
from __future__ import print_function
import sys import sys
import os import os
@ -22,8 +22,8 @@ parent_input = os.path.split(input)[0]
# if we have a single file, use it because os.walk does not work on a single files # if we have a single file, use it because os.walk does not work on a single files
if os.path.isfile(input): if os.path.isfile(input):
size = os.path.getsize(input) size = os.path.getsize(input)
fs.add_file(input, size) fs.add_file(input, size)
for root, dirs, files in os.walk(input): for root, dirs, files in os.walk(input):
# skip directories starting with . # skip directories starting with .
@ -39,7 +39,7 @@ for root, dirs, files in os.walk(input):
if f == 'Thumbs.db': if f == 'Thumbs.db':
continue continue
fname = os.path.join(root[len(parent_input)+1:], f) fname = os.path.join(root[len(parent_input) + 1:], f)
size = os.path.getsize(os.path.join(parent_input, fname)) size = os.path.getsize(os.path.join(parent_input, fname))
print('%10d kiB %s' % (size / 1024, fname)) print('%10d kiB %s' % (size / 1024, fname))
fs.add_file(fname, size) fs.add_file(fname, size)

View File

@ -1,5 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env python
from __future__ import print_function
from distutils.core import setup, Extension from distutils.core import setup, Extension
from distutils.sysconfig import get_config_vars from distutils.sysconfig import get_config_vars
@ -55,13 +55,13 @@ def target_specific():
try: try:
with open('compile_flags') as _file: with open('compile_flags') as _file:
extra_cmd = _file.read() extra_cmd = _file.read()
except: except BaseException:
extra_cmd = None extra_cmd = None
try: try:
with open('link_flags') as _file: with open('link_flags') as _file:
ldflags = _file.read() ldflags = _file.read()
except: except BaseException:
ldflags = None ldflags = None
# this is to pull out compiler arguments from the CXX flags set up by the # this is to pull out compiler arguments from the CXX flags set up by the
@ -75,7 +75,7 @@ try:
while len(cmd) > 0 and not cmd[0].startswith('-'): while len(cmd) > 0 and not cmd[0].startswith('-'):
cmd = cmd[1:] cmd = cmd[1:]
extra_cmd += ' '.join(cmd) extra_cmd += ' '.join(cmd)
except: except BaseException:
pass pass
ext = None ext = None
@ -85,7 +85,7 @@ if '--bjam' in sys.argv:
del sys.argv[sys.argv.index('--bjam')] del sys.argv[sys.argv.index('--bjam')]
if '--help' not in sys.argv \ if '--help' not in sys.argv \
and '--help-commands' not in sys.argv: and '--help-commands' not in sys.argv:
toolset = '' toolset = ''
file_ext = '.so' file_ext = '.so'
@ -129,14 +129,22 @@ if '--bjam' in sys.argv:
print('build failed') print('build failed')
sys.exit(1) sys.exit(1)
try: os.mkdir('build') try:
except: pass os.mkdir('build')
try: shutil.rmtree('build/lib') except BaseException:
except: pass pass
try: os.mkdir('build/lib') try:
except: pass shutil.rmtree('build/lib')
try: os.mkdir('libtorrent') except BaseException:
except: pass pass
try:
os.mkdir('build/lib')
except BaseException:
pass
try:
os.mkdir('libtorrent')
except BaseException:
pass
shutil.copyfile('libtorrent' + file_ext, shutil.copyfile('libtorrent' + file_ext,
'build/lib/libtorrent' + file_ext) 'build/lib/libtorrent' + file_ext)
@ -145,13 +153,12 @@ if '--bjam' in sys.argv:
else: else:
# Remove '-Wstrict-prototypes' compiler option, which isn't valid for C++. # Remove '-Wstrict-prototypes' compiler option, which isn't valid for C++.
cfg_vars = get_config_vars() cfg_vars = get_config_vars()
for key, value in cfg_vars.items(): for key, value in list(cfg_vars.items()):
if isinstance(value, str): if isinstance(value, str):
cfg_vars[key] = value.replace('-Wstrict-prototypes', '') cfg_vars[key] = value.replace('-Wstrict-prototypes', '')
source_list = os.listdir(os.path.join(os.path.dirname(__file__), "src")) src_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "src"))
source_list = [os.path.abspath(os.path.join(os.path.dirname(__file__), source_list = [os.path.join(src_dir, s) for s in os.listdir(src_dir) if s.endswith(".cpp")]
"src", s)) for s in source_list if s.endswith(".cpp")]
if extra_cmd: if extra_cmd:
flags = flags_parser() flags = flags_parser()

View File

@ -8,7 +8,7 @@ import libtorrent as lt
import time import time
import sys import sys
ses = lt.session({'listen_interfaces':'0.0.0.0:6881'}) ses = lt.session({'listen_interfaces': '0.0.0.0:6881'})
info = lt.torrent_info(sys.argv[1]) info = lt.torrent_info(sys.argv[1])
h = ses.add_torrent({'ti': info, 'save_path': '.'}) h = ses.add_torrent({'ti': info, 'save_path': '.'})

View File

@ -1,7 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4 # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
from __future__ import print_function
import libtorrent as lt import libtorrent as lt
@ -13,7 +12,6 @@ import shutil
import binascii import binascii
import subprocess as sub import subprocess as sub
import sys import sys
import inspect
import pickle import pickle
import threading import threading
@ -29,6 +27,7 @@ settings = {
'enable_dht': False, 'enable_lsd': False, 'enable_natpmp': False, 'enable_dht': False, 'enable_lsd': False, 'enable_natpmp': False,
'enable_upnp': False, 'listen_interfaces': '0.0.0.0:0', 'file_pool_size': 1} 'enable_upnp': False, 'listen_interfaces': '0.0.0.0:0', 'file_pool_size': 1}
class test_create_torrent(unittest.TestCase): class test_create_torrent(unittest.TestCase):
def test_from_torrent_info(self): def test_from_torrent_info(self):
@ -47,10 +46,10 @@ class test_create_torrent(unittest.TestCase):
class test_session_stats(unittest.TestCase): class test_session_stats(unittest.TestCase):
def test_unique(self): def test_unique(self):
l = lt.session_stats_metrics() metrics = lt.session_stats_metrics()
self.assertTrue(len(l) > 40) self.assertTrue(len(metrics) > 40)
idx = set() idx = set()
for m in l: for m in metrics:
self.assertTrue(m.value_index not in idx) self.assertTrue(m.value_index not in idx)
idx.add(m.value_index) idx.add(m.value_index)
@ -118,7 +117,6 @@ class test_torrent_handle(unittest.TestCase):
self.assertEqual(len(torrents), 1) self.assertEqual(len(torrents), 1)
self.assertEqual(torrents[self.h], 'bar') self.assertEqual(torrents[self.h], 'bar')
def test_replace_trackers(self): def test_replace_trackers(self):
self.setup() self.setup()
trackers = [] trackers = []
@ -141,20 +139,18 @@ class test_torrent_handle(unittest.TestCase):
tracker.fail_limit = 1 tracker.fail_limit = 1
trackers = [tracker] trackers = [tracker]
self.h.replace_trackers(trackers) self.h.replace_trackers(trackers)
tracker_list = [tracker for tracker in self.h.trackers()]
# wait a bit until the endpoints list gets populated # wait a bit until the endpoints list gets populated
while len(tracker_list[0]['endpoints']) == 0: while len(self.h.trackers()[0]['endpoints']) == 0:
time.sleep(0.1) time.sleep(0.1)
tracker_list = [tracker for tracker in self.h.trackers()] pickled_trackers = pickle.dumps(self.h.trackers())
pickled_trackers = pickle.dumps(tracker_list)
unpickled_trackers = pickle.loads(pickled_trackers) unpickled_trackers = pickle.loads(pickled_trackers)
self.assertEqual(unpickled_trackers[0]['url'], 'udp://tracker1.com') self.assertEqual(unpickled_trackers[0]['url'], 'udp://tracker1.com')
self.assertEqual(unpickled_trackers[0]['endpoints'][0]['last_error']['value'], 0) self.assertEqual(unpickled_trackers[0]['endpoints'][0]['last_error']['value'], 0)
def test_file_status(self): def test_file_status(self):
self.setup() self.setup()
l = self.h.file_status() status = self.h.file_status()
print(l) print(status)
def test_piece_deadlines(self): def test_piece_deadlines(self):
self.setup() self.setup()
@ -165,11 +161,10 @@ class test_torrent_handle(unittest.TestCase):
# time, wait for next full second to prevent second increment # time, wait for next full second to prevent second increment
time.sleep(1 - datetime.datetime.now().microsecond / 1000000.0) time.sleep(1 - datetime.datetime.now().microsecond / 1000000.0)
sessionStart = datetime.datetime.now().replace(microsecond=0)
self.setup() self.setup()
st = self.h.status() st = self.h.status()
for attr in dir(st): for attr in dir(st):
print('%s: %s' % (attr, getattr(st, attr))) print('%s: %s' % (attr, getattr(st, attr)))
# last upload and download times are at session start time # last upload and download times are at session start time
self.assertEqual(st.last_upload, None) self.assertEqual(st.last_upload, None)
self.assertEqual(st.last_download, None) self.assertEqual(st.last_download, None)
@ -177,7 +172,7 @@ class test_torrent_handle(unittest.TestCase):
def test_serialize_trackers(self): def test_serialize_trackers(self):
"""Test to ensure the dict contains only python built-in types""" """Test to ensure the dict contains only python built-in types"""
self.setup() self.setup()
self.h.add_tracker({'url':'udp://tracker1.com'}) self.h.add_tracker({'url': 'udp://tracker1.com'})
tr = self.h.trackers()[0] tr = self.h.trackers()[0]
# wait a bit until the endpoints list gets populated # wait a bit until the endpoints list gets populated
while len(tr['endpoints']) == 0: while len(tr['endpoints']) == 0:
@ -215,7 +210,7 @@ class test_torrent_handle(unittest.TestCase):
ses = lt.session(settings) ses = lt.session(settings)
h = ses.add_torrent(tp) h = ses.add_torrent(tp)
for attr in dir(tp): for attr in dir(tp):
print('%s: %s' % (attr, getattr(tp, attr))) print('%s: %s' % (attr, getattr(tp, attr)))
h.connect_peer(('3.3.3.3', 3)) h.connect_peer(('3.3.3.3', 3))
@ -246,33 +241,34 @@ class test_torrent_handle(unittest.TestCase):
def test_torrent_parameter(self): def test_torrent_parameter(self):
self.ses = lt.session(settings) self.ses = lt.session(settings)
self.ti = lt.torrent_info('url_seed_multi.torrent'); self.ti = lt.torrent_info('url_seed_multi.torrent')
self.h = self.ses.add_torrent({ self.h = self.ses.add_torrent({
'ti': self.ti, 'ti': self.ti,
'save_path': os.getcwd(), 'save_path': os.getcwd(),
'trackers': ['http://test.com/announce'], 'trackers': ['http://test.com/announce'],
'dht_nodes': [('1.2.3.4', 6881), ('4.3.2.1', 6881)], 'dht_nodes': [('1.2.3.4', 6881), ('4.3.2.1', 6881)],
'file_priorities': [1,1], 'file_priorities': [1, 1],
'http_seeds': ['http://test.com/file3'], 'http_seeds': ['http://test.com/file3'],
'url_seeds': ['http://test.com/announce-url'], 'url_seeds': ['http://test.com/announce-url'],
'peers': [('5.6.7.8', 6881)], 'peers': [('5.6.7.8', 6881)],
'banned_peers': [('8.7.6.5', 6881)], 'banned_peers': [('8.7.6.5', 6881)],
'renamed_files': { 0: 'test.txt', 2: 'test.txt' } 'renamed_files': {0: 'test.txt', 2: 'test.txt'}
}) })
self.st = self.h.status() self.st = self.h.status()
self.assertEqual(self.st.save_path, os.getcwd()) self.assertEqual(self.st.save_path, os.getcwd())
trackers = self.h.trackers(); trackers = self.h.trackers()
self.assertEqual(len(trackers), 1) self.assertEqual(len(trackers), 1)
self.assertEqual(trackers[0].get('url'), 'http://test.com/announce') self.assertEqual(trackers[0].get('url'), 'http://test.com/announce')
self.assertEqual(trackers[0].get('tier'), 0) self.assertEqual(trackers[0].get('tier'), 0)
self.assertEqual(self.h.get_file_priorities(), [1,1]) self.assertEqual(self.h.get_file_priorities(), [1, 1])
self.assertEqual(self.h.http_seeds(),['http://test.com/file3']) self.assertEqual(self.h.http_seeds(), ['http://test.com/file3'])
# url_seeds was already set, test that it did not got overwritten # url_seeds was already set, test that it did not got overwritten
self.assertEqual(self.h.url_seeds(), self.assertEqual(self.h.url_seeds(),
['http://test.com/announce-url/', 'http://test.com/file/']) ['http://test.com/announce-url/', 'http://test.com/file/'])
self.assertEqual(self.h.get_piece_priorities(),[4]) self.assertEqual(self.h.get_piece_priorities(), [4])
self.assertEqual(self.ti.merkle_tree(),[]) self.assertEqual(self.ti.merkle_tree(), [])
self.assertEqual(self.st.verified_pieces,[]) self.assertEqual(self.st.verified_pieces, [])
class test_torrent_info(unittest.TestCase): class test_torrent_info(unittest.TestCase):
@ -328,13 +324,13 @@ class test_torrent_info(unittest.TestCase):
os.path.join('temp', 'foo')) os.path.join('temp', 'foo'))
idx += 1 idx += 1
def test_announce_entry(self): def test_announce_entry(self):
ae = lt.announce_entry('test') ae = lt.announce_entry('test')
self.assertEquals(ae.url, 'test') self.assertEqual(ae.url, 'test')
self.assertEquals(ae.tier, 0) self.assertEqual(ae.tier, 0)
self.assertEquals(ae.verified, False) self.assertEqual(ae.verified, False)
self.assertEquals(ae.source, 0) self.assertEqual(ae.source, 0)
class test_alerts(unittest.TestCase): class test_alerts(unittest.TestCase):
@ -350,7 +346,7 @@ class test_alerts(unittest.TestCase):
alerts = ses.pop_alerts() alerts = ses.pop_alerts()
for a in alerts: for a in alerts:
if a.what() == 'add_torrent_alert': if a.what() == 'add_torrent_alert':
self.assertEquals(a.torrent_name, 'temp') self.assertEqual(a.torrent_name, 'temp')
print(a.message()) print(a.message())
for field_name in dir(a): for field_name in dir(a):
if field_name.startswith('__'): if field_name.startswith('__'):
@ -429,10 +425,11 @@ class test_bencoder(unittest.TestCase):
class test_sha1hash(unittest.TestCase): class test_sha1hash(unittest.TestCase):
def test_sha1hash(self): def test_sha1hash(self):
h = 'a0'*20 h = 'a0' * 20
s = lt.sha1_hash(binascii.unhexlify(h)) s = lt.sha1_hash(binascii.unhexlify(h))
self.assertEqual(h, str(s)) self.assertEqual(h, str(s))
class test_magnet_link(unittest.TestCase): class test_magnet_link(unittest.TestCase):
def test_parse_magnet_uri(self): def test_parse_magnet_uri(self):
@ -453,70 +450,72 @@ class test_magnet_link(unittest.TestCase):
h = ses.add_torrent(p) h = ses.add_torrent(p)
self.assertEqual(str(h.info_hash()), '178882f042c0c33426a6d81e0333ece346e68a68') self.assertEqual(str(h.info_hash()), '178882f042c0c33426a6d81e0333ece346e68a68')
class test_peer_class(unittest.TestCase): class test_peer_class(unittest.TestCase):
def test_peer_class_ids(self): def test_peer_class_ids(self):
s = lt.session(settings) s = lt.session(settings)
print('global_peer_class_id:', lt.session.global_peer_class_id) print('global_peer_class_id:', lt.session.global_peer_class_id)
print('tcp_peer_class_id:', lt.session.tcp_peer_class_id) print('tcp_peer_class_id:', lt.session.tcp_peer_class_id)
print('local_peer_class_id:', lt.session.local_peer_class_id) print('local_peer_class_id:', lt.session.local_peer_class_id)
print('global: ', s.get_peer_class(s.global_peer_class_id)) print('global: ', s.get_peer_class(s.global_peer_class_id))
print('tcp: ', s.get_peer_class(s.local_peer_class_id)) print('tcp: ', s.get_peer_class(s.local_peer_class_id))
print('local: ', s.get_peer_class(s.local_peer_class_id)) print('local: ', s.get_peer_class(s.local_peer_class_id))
def test_peer_class(self): def test_peer_class(self):
s = lt.session(settings) s = lt.session(settings)
c = s.create_peer_class('test class') c = s.create_peer_class('test class')
print('new class: ', s.get_peer_class(c)) print('new class: ', s.get_peer_class(c))
nfo = s.get_peer_class(c) nfo = s.get_peer_class(c)
self.assertEqual(nfo['download_limit'], 0) self.assertEqual(nfo['download_limit'], 0)
self.assertEqual(nfo['upload_limit'], 0) self.assertEqual(nfo['upload_limit'], 0)
self.assertEqual(nfo['ignore_unchoke_slots'], False) self.assertEqual(nfo['ignore_unchoke_slots'], False)
self.assertEqual(nfo['connection_limit_factor'], 100) self.assertEqual(nfo['connection_limit_factor'], 100)
self.assertEqual(nfo['download_priority'], 1) self.assertEqual(nfo['download_priority'], 1)
self.assertEqual(nfo['upload_priority'], 1) self.assertEqual(nfo['upload_priority'], 1)
self.assertEqual(nfo['label'], 'test class') self.assertEqual(nfo['label'], 'test class')
nfo['download_limit'] = 1337 nfo['download_limit'] = 1337
nfo['upload_limit'] = 1338 nfo['upload_limit'] = 1338
nfo['ignore_unchoke_slots'] = True nfo['ignore_unchoke_slots'] = True
nfo['connection_limit_factor'] = 42 nfo['connection_limit_factor'] = 42
nfo['download_priority'] = 2 nfo['download_priority'] = 2
nfo['upload_priority'] = 3 nfo['upload_priority'] = 3
s.set_peer_class(c, nfo) s.set_peer_class(c, nfo)
nfo2 = s.get_peer_class(c) nfo2 = s.get_peer_class(c)
self.assertEqual(nfo, nfo2) self.assertEqual(nfo, nfo2)
def test_peer_class_filter(self): def test_peer_class_filter(self):
filt = lt.peer_class_type_filter() filt = lt.peer_class_type_filter()
filt.add(lt.peer_class_type_filter.tcp_socket, lt.session.global_peer_class_id); filt.add(lt.peer_class_type_filter.tcp_socket, lt.session.global_peer_class_id)
filt.remove(lt.peer_class_type_filter.utp_socket, lt.session.local_peer_class_id); filt.remove(lt.peer_class_type_filter.utp_socket, lt.session.local_peer_class_id)
filt.disallow(lt.peer_class_type_filter.tcp_socket, lt.session.global_peer_class_id); filt.disallow(lt.peer_class_type_filter.tcp_socket, lt.session.global_peer_class_id)
filt.allow(lt.peer_class_type_filter.utp_socket, lt.session.local_peer_class_id); filt.allow(lt.peer_class_type_filter.utp_socket, lt.session.local_peer_class_id)
def test_peer_class_ip_filter(self):
s = lt.session(settings)
s.set_peer_class_type_filter(lt.peer_class_type_filter())
s.set_peer_class_filter(lt.ip_filter())
def test_peer_class_ip_filter(self):
s = lt.session(settings)
s.set_peer_class_type_filter(lt.peer_class_type_filter())
s.set_peer_class_filter(lt.ip_filter())
class test_session(unittest.TestCase): class test_session(unittest.TestCase):
def test_add_torrent(self): def test_add_torrent(self):
s = lt.session(settings) s = lt.session(settings)
h = s.add_torrent({'ti': lt.torrent_info('base.torrent'), s.add_torrent({'ti': lt.torrent_info('base.torrent'),
'save_path': '.', 'save_path': '.',
'dht_nodes': [('1.2.3.4', 6881), ('4.3.2.1', 6881)], 'dht_nodes': [('1.2.3.4', 6881), ('4.3.2.1', 6881)],
'http_seeds': ['http://test.com/seed'], 'http_seeds': ['http://test.com/seed'],
'peers': [('5.6.7.8', 6881)], 'peers': [('5.6.7.8', 6881)],
'banned_peers': [('8.7.6.5', 6881)], 'banned_peers': [('8.7.6.5', 6881)],
'file_priorities': [1,1,1,2,0]}) 'file_priorities': [1, 1, 1, 2, 0]})
def test_apply_settings(self): def test_apply_settings(self):
@ -565,10 +564,9 @@ class test_session(unittest.TestCase):
self.assertTrue(isinstance(a.active_requests, list)) self.assertTrue(isinstance(a.active_requests, list))
self.assertTrue(isinstance(a.routing_table, list)) self.assertTrue(isinstance(a.routing_table, list))
def test_unknown_settings(self): def test_unknown_settings(self):
try: try:
s = lt.session({'unexpected-key-name': 42}) lt.session({'unexpected-key-name': 42})
self.assertFalse('should have thrown an exception') self.assertFalse('should have thrown an exception')
except KeyError as e: except KeyError as e:
print(e) print(e)
@ -606,52 +604,52 @@ class test_example_client(unittest.TestCase):
my_stdin = slave_fd my_stdin = slave_fd
process = sub.Popen( process = sub.Popen(
[sys.executable,"client.py","url_seed_multi.torrent"], [sys.executable, "client.py", "url_seed_multi.torrent"],
stdin=my_stdin, stdout=sub.PIPE, stderr=sub.PIPE) stdin=my_stdin, stdout=sub.PIPE, stderr=sub.PIPE)
# python2 has no Popen.wait() timeout # python2 has no Popen.wait() timeout
time.sleep(5) time.sleep(5)
returncode = process.poll() returncode = process.poll()
if returncode == None: if returncode is None:
# this is an expected use-case # this is an expected use-case
process.kill() process.kill()
err = process.stderr.read().decode("utf-8") err = process.stderr.read().decode("utf-8")
self.assertEqual('', err, 'process throw errors: \n' + err) self.assertEqual('', err, 'process throw errors: \n' + err)
# check error code if process did unexpected end # check error code if process did unexpected end
if returncode != None: if returncode is not None:
# in case of error return: output stdout if nothing was on stderr # in case of error return: output stdout if nothing was on stderr
if returncode != 0: if returncode != 0:
print("stdout:\n" + process.stdout.read().decode("utf-8")) print("stdout:\n" + process.stdout.read().decode("utf-8"))
self.assertEqual(returncode, 0, "returncode: " + str(returncode) + "\n" self.assertEqual(returncode, 0, "returncode: " + str(returncode) + "\n"
+ "stderr: empty\n" + "stderr: empty\n"
+ "some configuration does not output errors like missing module members," + "some configuration does not output errors like missing module members,"
+ "try to call it manually to get the error message\n") + "try to call it manually to get the error message\n")
def test_execute_simple_client(self): def test_execute_simple_client(self):
process = sub.Popen( process = sub.Popen(
[sys.executable,"simple_client.py","url_seed_multi.torrent"], [sys.executable, "simple_client.py", "url_seed_multi.torrent"],
stdout=sub.PIPE, stderr=sub.PIPE) stdout=sub.PIPE, stderr=sub.PIPE)
# python2 has no Popen.wait() timeout # python2 has no Popen.wait() timeout
time.sleep(5) time.sleep(5)
returncode = process.poll() returncode = process.poll()
if returncode == None: if returncode is None:
# this is an expected use-case # this is an expected use-case
process.kill() process.kill()
err = process.stderr.read().decode("utf-8") err = process.stderr.read().decode("utf-8")
self.assertEqual('', err, 'process throw errors: \n' + err) self.assertEqual('', err, 'process throw errors: \n' + err)
# check error code if process did unexpected end # check error code if process did unexpected end
if returncode != None: if returncode is not None:
# in case of error return: output stdout if nothing was on stderr # in case of error return: output stdout if nothing was on stderr
if returncode != 0: if returncode != 0:
print("stdout:\n" + process.stdout.read().decode("utf-8")) print("stdout:\n" + process.stdout.read().decode("utf-8"))
self.assertEqual(returncode, 0, "returncode: " + str(returncode) + "\n" self.assertEqual(returncode, 0, "returncode: " + str(returncode) + "\n"
+ "stderr: empty\n" + "stderr: empty\n"
+ "some configuration does not output errors like missing module members," + "some configuration does not output errors like missing module members,"
+ "try to call it manually to get the error message\n") + "try to call it manually to get the error message\n")
def test_execute_make_torrent(self): def test_execute_make_torrent(self):
process = sub.Popen( process = sub.Popen(
[sys.executable,"make_torrent.py","url_seed_multi.torrent", [sys.executable, "make_torrent.py", "url_seed_multi.torrent",
"http://test.com/test"], stdout=sub.PIPE, stderr=sub.PIPE) "http://test.com/test"], stdout=sub.PIPE, stderr=sub.PIPE)
returncode = process.wait() returncode = process.wait()
# python2 has no Popen.wait() timeout # python2 has no Popen.wait() timeout
err = process.stderr.read().decode("utf-8") err = process.stderr.read().decode("utf-8")
@ -660,15 +658,16 @@ class test_example_client(unittest.TestCase):
if returncode != 0: if returncode != 0:
print("stdout:\n" + process.stdout.read().decode("utf-8")) print("stdout:\n" + process.stdout.read().decode("utf-8"))
self.assertEqual(returncode, 0, "returncode: " + str(returncode) + "\n" self.assertEqual(returncode, 0, "returncode: " + str(returncode) + "\n"
+ "stderr: empty\n" + "stderr: empty\n"
+ "some configuration does not output errors like missing module members," + "some configuration does not output errors like missing module members,"
+ "try to call it manually to get the error message\n") + "try to call it manually to get the error message\n")
def test_default_settings(self): def test_default_settings(self):
default = lt.default_settings() default = lt.default_settings()
print(default) print(default)
class test_operation_t(unittest.TestCase): class test_operation_t(unittest.TestCase):
def test_enum(self): def test_enum(self):
@ -678,6 +677,7 @@ class test_operation_t(unittest.TestCase):
self.assertEqual(lt.operation_name(lt.operation_t.partfile_write), "partfile_write") self.assertEqual(lt.operation_name(lt.operation_t.partfile_write), "partfile_write")
self.assertEqual(lt.operation_name(lt.operation_t.hostname_lookup), "hostname_lookup") self.assertEqual(lt.operation_name(lt.operation_t.hostname_lookup), "hostname_lookup")
if __name__ == '__main__': if __name__ == '__main__':
print(lt.__version__) print(lt.__version__)
shutil.copy(os.path.join('..', '..', 'test', 'test_torrents', shutil.copy(os.path.join('..', '..', 'test', 'test_torrents',

File diff suppressed because it is too large Load Diff

View File

@ -1,98 +1,123 @@
#!/usr/bin/env python #!/usr/bin/env python
from __future__ import print_function
f = open('../include/libtorrent/settings_pack.hpp') f = open('../include/libtorrent/settings_pack.hpp')
out = open('settings.rst', 'w+') out = open('settings.rst', 'w+')
def print_field(str, width): def print_field(str, width):
return '%s%s' % (str, ' ' * (width - len(str))) return '%s%s' % (str, ' ' * (width - len(str)))
def render_section(names, description, type, default_values): def render_section(names, description, type, default_values):
max_name_len = max(len(max(names, key=len)), len('name')) max_name_len = max(len(max(names, key=len)), len('name'))
max_type_len = max(len(type), len('type')) max_type_len = max(len(type), len('type'))
max_val_len = max(len(max(default_values, key=len)), len('default')) max_val_len = max(len(max(default_values, key=len)), len('default'))
# add link targets for the rest of the manual to reference # add link targets for the rest of the manual to reference
for n in names: for n in names:
print >>out, '.. _%s:\n' % n print('.. _%s:\n' % n, file=out)
if len(names) > 0: if len(names) > 0:
print >>out, '.. raw:: html\n' print('.. raw:: html\n', file=out)
for n in names: for n in names:
print >>out, '\t<a name="%s"></a>' % n print('\t<a name="%s"></a>' % n, file=out)
print >>out, '' print('', file=out)
separator = '+-' + ('-' * max_name_len) + '-+-' + ('-' * max_type_len) + '-+-' + ('-' * max_val_len) + '-+' separator = '+-' + ('-' * max_name_len) + '-+-' + ('-' * max_type_len) + '-+-' + ('-' * max_val_len) + '-+'
# build a table for the settings, their type and default value
print(separator, file=out)
print(
'| %s | %s | %s |' %
(print_field(
'name', max_name_len), print_field(
'type', max_type_len), print_field(
'default', max_val_len)), file=out)
print(separator.replace('-', '='), file=out)
for i in range(len(names)):
print(
'| %s | %s | %s |' %
(print_field(
names[i], max_name_len), print_field(
type, max_type_len), print_field(
default_values[i], max_val_len)), file=out)
print(separator, file=out)
print(file=out)
print(description, file=out)
# build a table for the settings, their type and default value
print >>out, separator
print >>out, '| %s | %s | %s |' % (print_field('name', max_name_len), print_field('type', max_type_len), print_field('default', max_val_len))
print >>out, separator.replace('-', '=')
for i in range(len(names)):
print >>out, '| %s | %s | %s |' % (print_field(names[i], max_name_len), print_field(type, max_type_len), print_field(default_values[i], max_val_len))
print >>out, separator
print >>out
print >>out, description
mode = '' mode = ''
# parse out default values for settings # parse out default values for settings
f2 = open('../src/settings_pack.cpp') f2 = open('../src/settings_pack.cpp')
def_map = {} def_map = {}
for l in f2: for line in f2:
l = l.strip() line = line.strip()
if not l.startswith('SET(') \ if not line.startswith('SET(') \
and not l.startswith('SET_NOPREV(') \ and not line.startswith('SET_NOPREV(') \
and not l.startswith('DEPRECATED_SET('): continue and not line.startswith('DEPRECATED_SET('):
continue
l = l.split('(')[1].split(',') line = line.split('(')[1].split(',')
def_map[l[0]] = l[1].strip() def_map[line[0]] = line[1].strip()
print '%s = %s' % (l[0], l[1].strip()) print('%s = %s' % (line[0], line[1].strip()))
description = '' description = ''
names = [] names = []
for l in f: for line in f:
if 'enum string_types' in l: mode = 'string' if 'enum string_types' in line:
if 'enum bool_types' in l: mode = 'bool' mode = 'string'
if 'enum int_types' in l: mode = 'int' if 'enum bool_types' in line:
if '#if TORRENT_ABI_VERSION == 1' in l: mode += 'skip' mode = 'bool'
if '#endif' in l: mode = mode[0:-4] if 'enum int_types' in line:
mode = 'int'
if '#if TORRENT_ABI_VERSION == 1' in line:
mode += 'skip'
if '#endif' in line:
mode = mode[0:-4]
if mode == '': continue if mode == '':
if mode[-4:] == 'skip': continue continue
if mode[-4:] == 'skip':
continue
l = l.lstrip() line = line.lstrip()
if l == '' and len(names) > 0: if line == '' and len(names) > 0:
if description == '': if description == '':
for n in names: for n in names:
print 'WARNING: no description for "%s"' % n print('WARNING: no description for "%s"' % n)
else: else:
default_values = [] default_values = []
for n in names: for n in names:
default_values.append(def_map[n]) default_values.append(def_map[n])
render_section(names, description, mode, default_values) render_section(names, description, mode, default_values)
description = '' description = ''
names = [] names = []
if l.startswith('};'): if line.startswith('};'):
mode = '' mode = ''
continue continue
if l.startswith('//'): if line.startswith('//'):
if l[2] == ' ': description += l[3:] if line[2] == ' ':
else: description += l[2:] description += line[3:]
continue else:
description += line[2:]
continue
l = l.strip() line = line.strip()
if l.endswith(','): if line.endswith(','):
l = l[:-1] # strip trailing comma line = line[:-1] # strip trailing comma
if '=' in l: l = l.split('=')[0].strip() if '=' in line:
if l.endswith('_internal'): continue line = line.split('=')[0].strip()
if line.endswith('_internal'):
continue
names.append(l) names.append(line)
out.close() out.close()
f.close() f.close()

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env python
from __future__ import print_function
counter_types = {} counter_types = {}
@ -6,33 +7,40 @@ f = open('../include/libtorrent/performance_counters.hpp')
counter_type = '' counter_type = ''
for l in f: for line in f:
# ignore anything after // # ignore anything after //
if '//' in l: l = l.split('//')[0] if '//' in line:
line = line.split('//')[0]
l = l.strip() line = line.strip()
if l.startswith('#'): continue if line.startswith('#'):
if l == '': continue continue
if line == '':
continue
if 'enum stats_counter_t' in l: if 'enum stats_counter_t' in line:
counter_type = 'counter' counter_type = 'counter'
continue continue
if 'enum stats_gauge_t' in l: if 'enum stats_gauge_t' in line:
counter_type = 'gauge' counter_type = 'gauge'
continue continue
if '{' in l or '}' in l or 'struct' in l or 'namespace' in l: continue if '{' in line or '}' in line or 'struct' in line or 'namespace' in line:
if counter_type == '': continue continue
if not l.endswith(','): continue if counter_type == '':
continue
if not line.endswith(','):
continue
# strip off trailing comma # strip off trailing comma
l = l[:-1] line = line[:-1]
if '=' in l: l = l[:l.index('=')].strip() if '=' in line:
line = line[:line.index('=')].strip()
counter_types[l] = counter_type counter_types[line] = counter_type
f.close() f.close()
@ -40,39 +48,42 @@ f = open('../src/session_stats.cpp')
out = open('stats_counters.rst', 'w+') out = open('stats_counters.rst', 'w+')
def print_field(str, width): def print_field(str, width):
return '%s%s' % (str, ' ' * (width - len(str))) return '%s%s' % (str, ' ' * (width - len(str)))
def render_section(names, description, types): def render_section(names, description, types):
max_name_len = max(len(max(names, key=len)), len('name')) max_name_len = max(len(max(names, key=len)), len('name'))
max_type_len = max(len(max(types, key=len)), len('type')) max_type_len = max(len(max(types, key=len)), len('type'))
if description == '': if description == '':
for n in names: for n in names:
print 'WARNING: no description for "%s"' % n print('WARNING: no description for "%s"' % n)
# add link targets for the rest of the manual to reference # add link targets for the rest of the manual to reference
for n in names: for n in names:
print >>out, '.. _%s:\n' % n print('.. _%s:\n' % n, file=out)
if len(names) > 0: if len(names) > 0:
print >>out, '.. raw:: html\n' print('.. raw:: html\n', file=out)
for n in names: for n in names:
print >>out, '\t<a name="%s"></a>' % n print('\t<a name="%s"></a>' % n, file=out)
print >>out, '' print('', file=out)
separator = '+-' + ('-' * max_name_len) + '-+-' + ('-' * max_type_len) + '-+' separator = '+-' + ('-' * max_name_len) + '-+-' + ('-' * max_type_len) + '-+'
# build a table for the settings, their type and default value
print(separator, file=out)
print('| %s | %s |' % (print_field('name', max_name_len), print_field('type', max_type_len)), file=out)
print(separator.replace('-', '='), file=out)
for i in range(len(names)):
print('| %s | %s |' % (print_field(names[i], max_name_len), print_field(types[i], max_type_len)), file=out)
print(separator, file=out)
print(file=out)
print(description, file=out)
print('', file=out)
# build a table for the settings, their type and default value
print >>out, separator
print >>out, '| %s | %s |' % (print_field('name', max_name_len), print_field('type', max_type_len))
print >>out, separator.replace('-', '=')
for i in range(len(names)):
print >>out, '| %s | %s |' % (print_field(names[i], max_name_len), print_field(types[i], max_type_len))
print >>out, separator
print >>out
print >>out, description
print >>out, ''
mode = '' mode = ''
@ -80,42 +91,43 @@ description = ''
names = [] names = []
types = [] types = []
for l in f: for line in f:
description_line = l.lstrip().startswith('//') description_line = line.lstrip().startswith('//')
l = l.strip() line = line.strip()
if mode == 'ignore': if mode == 'ignore':
if '#endif' in l: mode = '' if '#endif' in line:
continue mode = ''
continue
if 'TORRENT_ABI_VERSION == 1' in l: if 'TORRENT_ABI_VERSION == 1' in line:
mode = 'ignore' mode = 'ignore'
continue continue
if description_line == True: if description_line:
if len(names) > 0: if len(names) > 0:
render_section(names, description, types) render_section(names, description, types)
description = '' description = ''
names = [] names = []
types = [] types = []
description += '\n' + l[3:] description += '\n' + line[3:]
if '#define' in l: continue if '#define' in line:
continue
if 'METRIC(' in l: if 'METRIC(' in line:
args = l.split('(')[1].split(')')[0].split(',') args = line.split('(')[1].split(')')[0].split(',')
# args: category, name, type # args: category, name, type
args[1] = args[1].strip() args[1] = args[1].strip()
names.append(args[0].strip() + '.' + args[1].strip()) names.append(args[0].strip() + '.' + args[1].strip())
types.append(counter_types[args[1]]) types.append(counter_types[args[1]])
if len(names) > 0: if len(names) > 0:
render_section(names, description, types) render_section(names, description, types)
out.close() out.close()
f.close() f.close()

View File

@ -2,15 +2,23 @@
import glob import glob
import os import os
import sys
paths = ['test/*.cpp', 'src/*.cpp', 'src/kademlia/*.cpp', 'include/libtorrent/*.hpp', 'include/libtorrent/kademlia/*.hpp', 'include/libtorrent/aux_/*.hpp', 'include/libtorrent/extensions/*.hpp'] paths = [
'test/*.cpp',
'src/*.cpp',
'src/kademlia/*.cpp',
'include/libtorrent/*.hpp',
'include/libtorrent/kademlia/*.hpp',
'include/libtorrent/aux_/*.hpp',
'include/libtorrent/extensions/*.hpp']
os.system('(cd .. ; ctags %s 2>/dev/null)' % ' '.join(paths)) os.system('(cd .. ; ctags %s 2>/dev/null)' % ' '.join(paths))
files = [] files = []
for p in paths: for p in paths:
files.extend(glob.glob(os.path.join('..', p))) files.extend(glob.glob(os.path.join('..', p)))
items = [] items = []
@ -20,79 +28,91 @@ context = []
priority_count = [0, 0, 0, 0, 0] priority_count = [0, 0, 0, 0, 0]
def html_sanitize(s): def html_sanitize(s):
ret = '' ret = ''
for i in s: for i in s:
if i == '<': ret += '&lt;' if i == '<':
elif i == '>': ret += '&gt;' ret += '&lt;'
elif i == '&': ret += '&amp;' elif i == '>':
else: ret += i ret += '&gt;'
return ret elif i == '&':
ret += '&amp;'
else:
ret += i
return ret
for f in files: for f in files:
h = open(f) h = open(f)
state = '' state = ''
line_no = 0 line_no = 0
context_lines = 0 context_lines = 0
for l in h: for l in h:
line_no += 1 line_no += 1
line = l.strip() line = l.strip()
if 'TODO:' in line and line.startswith('//'): if 'TODO:' in line and line.startswith('//'):
line = line.split('TODO:')[1].strip() line = line.split('TODO:')[1].strip()
state = 'todo' state = 'todo'
items.append({}) items.append({})
items[-1]['location'] = '%s:%d' % (f, line_no) items[-1]['location'] = '%s:%d' % (f, line_no)
items[-1]['priority'] = 0 items[-1]['priority'] = 0
if line[0] in '0123456789': if line[0] in '0123456789':
items[-1]['priority'] = int(line[0]) items[-1]['priority'] = int(line[0])
if int(line[0]) > 5: if int(line[0]) > 5:
print 'priority too high: ' + line print('priority too high: ' + line)
sys.exit(1) sys.exit(1)
line = line[1:].strip() line = line[1:].strip()
items[-1]['todo'] = line items[-1]['todo'] = line
prio = items[-1]['priority'] prio = items[-1]['priority']
if prio >= 0 and prio <= 4: priority_count[prio] += 1 if prio >= 0 and prio <= 4:
continue priority_count[prio] += 1
continue
if state == '': if state == '':
context.append(html_sanitize(l)) context.append(html_sanitize(l))
if len(context) > 20: context.pop(0) if len(context) > 20:
continue context.pop(0)
continue
if state == 'todo': if state == 'todo':
if line.strip().startswith('//'): if line.strip().startswith('//'):
items[-1]['todo'] += '\n' items[-1]['todo'] += '\n'
items[-1]['todo'] += line[2:].strip() items[-1]['todo'] += line[2:].strip()
else: else:
state = 'context' state = 'context'
items[-1]['context'] = ''.join(context) + '<div style="background: #ffff00" width="100%">' + html_sanitize(l) + '</div>'; items[-1]['context'] = ''.join(context) + \
context_lines = 1 '<div style="background: #ffff00" width="100%">' + html_sanitize(l) + '</div>'
context_lines = 1
context.append(html_sanitize(l)) context.append(html_sanitize(l))
if len(context) > 20: context.pop(0) if len(context) > 20:
continue context.pop(0)
continue
if state == 'context': if state == 'context':
items[-1]['context'] += html_sanitize(l) items[-1]['context'] += html_sanitize(l)
context_lines += 1 context_lines += 1
context.append(html_sanitize(l)) context.append(html_sanitize(l))
if len(context) > 20: context.pop(0) if len(context) > 20:
if context_lines > 30: state = '' context.pop(0)
if context_lines > 30:
state = ''
h.close() h.close()
items.sort(key = lambda x: x['priority'], reverse = True) items.sort(key=lambda x: x['priority'], reverse=True)
#for i in items: # for i in items:
# print '\n\n', i['todo'], '\n' # print('\n\n', i['todo'], '\n')
# print i['location'], '\n' # print(i['location'], '\n')
# print 'prio: ', i['priority'], '\n' # print('prio: ', i['priority'], '\n')
# if 'context' in i: # if 'context' in i:
# print i['context'], '\n' # print(i['context'], '\n')
out = open('todo.html', 'w+') out = open('todo.html', 'w+')
out.write('''<html><head> out.write('''<html><head>
@ -123,21 +143,24 @@ out.write('''<html><head>
<span style="color: #3c3">%d relevant</span> <span style="color: #3c3">%d relevant</span>
<span style="color: #77f">%d feasible</span> <span style="color: #77f">%d feasible</span>
<span style="color: #999">%d notes</span> <span style="color: #999">%d notes</span>
<table width="100%%" border="1" style="border-collapse: collapse;">''' % \ <table width="100%%" border="1" style="border-collapse: collapse;">''' # noqa
(priority_count[4], priority_count[3], priority_count[2], priority_count[1], priority_count[0])) % (priority_count[4], priority_count[3], priority_count[2], priority_count[1], priority_count[0]))
prio_colors = [ '#ccc', '#ccf', '#cfc', '#fcc', '#f44'] prio_colors = ['#ccc', '#ccf', '#cfc', '#fcc', '#f44']
index = 0 index = 0
for i in items: for i in items:
if not 'context' in i: i['context'] = '' if 'context' not in i:
out.write('<tr style="background: %s"><td>relevance&nbsp;%d</td><td><a href="javascript:expand(%d)">%s</a></td><td>%s</td></tr>' \ i['context'] = ''
% (prio_colors[i['priority']], i['priority'], index, i['location'], i['todo'].replace('\n', ' '))) out.write(('<tr style="background: %s"><td>relevance&nbsp;%d</td>'
'<td><a href="javascript:expand(%d)">%s</a></td><td>%s</td></tr>')
% (prio_colors[i['priority']], i['priority'], index, i['location'], i['todo'].replace('\n', ' ')))
out.write('<tr id="%d" style="display: none;" colspan="3"><td colspan="3"><h2>%s</h2><h4>%s</h4><pre style="background: #f6f6f6; border: solid 1px #ddd;">%s</pre></td></tr>' \ out.write(
% (index, i['todo'], i['location'], i['context'])) ('<tr id="%d" style="display: none;" colspan="3"><td colspan="3"><h2>%s</h2><h4>%s</h4>'
index += 1 '<pre style="background: #f6f6f6; border: solid 1px #ddd;">%s</pre></td></tr>') %
(index, i['todo'], i['location'], i['context']))
index += 1
out.write('</table></body></html>') out.write('</table></body></html>')
out.close() out.close()

View File

@ -7,19 +7,20 @@ sys.stdout.write(open(sys.argv[1], 'r').read())
sys.stderr.write('joining %s\n' % sys.argv[1]) sys.stderr.write('joining %s\n' % sys.argv[1])
for name in sys.argv[2:]: for name in sys.argv[2:]:
sys.stdout.write('\n') sys.stdout.write('\n')
sys.stderr.write('joining %s\n' % name) sys.stderr.write('joining %s\n' % name)
f = open(name, 'r') f = open(name, 'r')
for l in f: for l in f:
# strip out the table of contents from subsequent files # strip out the table of contents from subsequent files
if '.. contents::' in l: if '.. contents::' in l:
in_directive = True in_directive = True
continue continue
if ':Author:' in l: continue if ':Author:' in l:
if ':Version:' in l: continue continue
if ':Version:' in l:
if l[0] in ' \t' and in_directive: continue
continue
in_directive = False
sys.stdout.write(l)
if l[0] in ' \t' and in_directive:
continue
in_directive = False
sys.stdout.write(l)

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env python
from __future__ import print_function
import sys import sys
import os import os
@ -62,378 +63,465 @@ default_cache = cache_sizes[-1]
# disk cache is not a significant part of the test, # disk cache is not a significant part of the test,
# since download rates will be extremely high while downloading # since download rates will be extremely high while downloading
# into RAM # into RAM
test_duration = 200 # 700 test_duration = 200 # 700
# make sure the environment is properly set up # make sure the environment is properly set up
try: try:
if os.name == 'posix': if os.name == 'posix':
resource.setrlimit(resource.RLIMIT_NOFILE, (4000, 5000)) resource.setrlimit(resource.RLIMIT_NOFILE, (4000, 5000))
except: except BaseException:
if resource.getrlimit(resource.RLIMIT_NOFILE)[0] < 4000: if resource.getrlimit(resource.RLIMIT_NOFILE)[0] < 4000:
print 'please set ulimit -n to at least 4000' print('please set ulimit -n to at least 4000')
sys.exit(1) sys.exit(1)
def build_stage_dirs(): def build_stage_dirs():
ret = [] ret = []
for i in builds[2:3]: for i in builds[2:3]:
ret.append('stage_%s' % i) ret.append('stage_%s' % i)
return ret return ret
# make sure we have all the binaries available # make sure we have all the binaries available
binaries = ['client_test', 'connection_tester', 'fragmentation_test'] binaries = ['client_test', 'connection_tester', 'fragmentation_test']
for b in build_stage_dirs(): for b in build_stage_dirs():
for i in binaries: for i in binaries:
p = os.path.join(b, i) p = os.path.join(b, i)
if not os.path.exists(p): if not os.path.exists(p):
print 'make sure "%s" is available in ./%s' % (i, b) print('make sure "%s" is available in ./%s' % (i, b))
sys.exit(1) sys.exit(1)
for i in filesystem: for i in filesystem:
if not os.path.exists(i): if not os.path.exists(i):
print ('the path "%s" does not exist. This is directory/mountpoint is ' + print(('the path "%s" does not exist. This is directory/mountpoint is ' +
'used as the download directory and is the filesystem that will be benchmarked ' + 'used as the download directory and is the filesystem that will be benchmarked ' +
'and need to exist.') % i 'and need to exist.') % i)
sys.exit(1) sys.exit(1)
# make sure we have a test torrent # make sure we have a test torrent
if not os.path.exists('test.torrent'): if not os.path.exists('test.torrent'):
print 'generating test torrent' print('generating test torrent')
# generate a 100 GB torrent, to make sure it won't all fit in physical RAM # generate a 100 GB torrent, to make sure it won't all fit in physical RAM
os.system('./stage_aio/connection_tester gen-torrent 10000 test.torrent') os.system('./stage_aio/connection_tester gen-torrent 10000 test.torrent')
if not os.path.exists('test2.torrent'): if not os.path.exists('test2.torrent'):
print 'generating test torrent 2' print('generating test torrent 2')
# generate a 6 GB torrent, to make sure it will fit in physical RAM # generate a 6 GB torrent, to make sure it will fit in physical RAM
os.system('./stage_aio/connection_tester gen-torrent 6000 test2.torrent') os.system('./stage_aio/connection_tester gen-torrent 6000 test2.torrent')
# use a new port for each test to make sure they keep working # use a new port for each test to make sure they keep working
# this port is incremented for each test run # this port is incremented for each test run
port = 10000 + random.randint(0, 40000) port = 10000 + random.randint(0, 40000)
def clear_caches(): def clear_caches():
if 'linux' in sys.platform: if 'linux' in sys.platform:
os.system('sync') os.system('sync')
open('/proc/sys/vm/drop_caches', 'w').write('3') open('/proc/sys/vm/drop_caches', 'w').write('3')
elif 'darwin' in sys.platform: elif 'darwin' in sys.platform:
os.system('purge') os.system('purge')
def build_commandline(config, port): def build_commandline(config, port):
num_peers = config['num-peers'] num_peers = config['num-peers']
torrent_path = config['torrent'] torrent_path = config['torrent']
if config['build'] == 'utorrent': if config['build'] == 'utorrent':
try: os.mkdir('utorrent_session') try:
except: pass os.mkdir('utorrent_session')
cfg = open('utorrent_session/settings.dat', 'w+') except BaseException:
pass
cfg = open('utorrent_session/settings.dat', 'w+')
cfg.write('d') cfg.write('d')
cfg.write('20:ul_slots_per_torrenti%de' % num_peers) cfg.write('20:ul_slots_per_torrenti%de' % num_peers)
cfg.write('17:conns_per_torrenti%de' % num_peers) cfg.write('17:conns_per_torrenti%de' % num_peers)
cfg.write('14:conns_globallyi%de' % num_peers) cfg.write('14:conns_globallyi%de' % num_peers)
cfg.write('9:bind_porti%de' % port) cfg.write('9:bind_porti%de' % port)
cfg.write('19:dir_active_download%d:%s' % (len(config['save-path']), config['save-path'])) cfg.write('19:dir_active_download%d:%s' % (len(config['save-path']), config['save-path']))
cfg.write('19:diskio.sparse_filesi1e') cfg.write('19:diskio.sparse_filesi1e')
cfg.write('14:cache.overridei1e') cfg.write('14:cache.overridei1e')
cfg.write('19:cache.override_sizei%de' % int(config['cache-size'] * 16 / 1024)) cfg.write('19:cache.override_sizei%de' % int(config['cache-size'] * 16 / 1024))
cfg.write('17:dir_autoload_flagi1e') cfg.write('17:dir_autoload_flagi1e')
cfg.write('12:dir_autoload8:autoload') cfg.write('12:dir_autoload8:autoload')
cfg.write('11:logger_maski4294967295e') cfg.write('11:logger_maski4294967295e')
cfg.write('1:vi0e') cfg.write('1:vi0e')
cfg.write('12:webui.enablei1e') cfg.write('12:webui.enablei1e')
cfg.write('19:webui.enable_listeni1e') cfg.write('19:webui.enable_listeni1e')
cfg.write('14:webui.hashword20:' + hashlib.sha1('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaadmin').digest()) cfg.write('14:webui.hashword20:' + hashlib.sha1('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaadmin').digest())
cfg.write('10:webui.porti8080e') cfg.write('10:webui.porti8080e')
cfg.write('10:webui.salt32:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa') cfg.write('10:webui.salt32:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa')
cfg.write('14:webui.username5:admin') cfg.write('14:webui.username5:admin')
cfg.write('e') cfg.write('e')
cfg.close() cfg.close()
try: os.mkdir('utorrent_session/autoload') try:
except: pass os.mkdir('utorrent_session/autoload')
try: shutil.copy(torrent_path, 'utorrent_session/autoload/') except BaseException:
except: pass pass
return './utorrent-server-v3_0/utserver -logfile session_stats/alerts_log.txt -settingspath utorrent_session' try:
shutil.copy(torrent_path, 'utorrent_session/autoload/')
except BaseException:
pass
return './utorrent-server-v3_0/utserver -logfile session_stats/alerts_log.txt -settingspath utorrent_session'
if config['build'] == 'rtorrent': if config['build'] == 'rtorrent':
if os.path.exists('rtorrent_session'): if os.path.exists('rtorrent_session'):
add_command = '' add_command = ''
else: else:
try: os.mkdir('rtorrent_session') try:
except: pass os.mkdir('rtorrent_session')
# it seems rtorrent may delete the original torrent when it's being added except BaseException:
try: shutil.copy(torrent_path, 'rtorrent_session/') pass
except: pass # it seems rtorrent may delete the original torrent when it's being added
add_command = '-O load_start_verbose=rtorrent_session/%s ' % torrent_path try:
shutil.copy(torrent_path, 'rtorrent_session/')
except BaseException:
pass
add_command = '-O load_start_verbose=rtorrent_session/%s ' % torrent_path
return 'rtorrent -d %s -n -p %d-%d -O max_peers=%d -O max_uploads=%d %s -s rtorrent_session -O max_memory_usage=128000000000' \ return ('rtorrent -d %s -n -p %d-%d -O max_peers=%d -O max_uploads=%d %s -s '
% (config['save-path'], port, port, num_peers, num_peers, add_command) 'rtorrent_session -O max_memory_usage=128000000000') % (
config['save-path'], port, port, num_peers, num_peers, add_command)
disable_disk = ''
if config['disable-disk']:
disable_disk = '-0'
return ('./stage_%s/client_test -k -N -H -M -B %d -l %d -S %d -T %d -c %d -C %d -s "%s" -p %d -E %d %s '
'-f session_stats/alerts_log.txt %s') % (
config['build'], test_duration, num_peers, num_peers, num_peers, num_peers, config['cache-size'],
config['save-path'], port, config['hash-threads'], disable_disk, torrent_path)
disable_disk = ''
if config['disable-disk']: disable_disk = '-0'
return './stage_%s/client_test -k -N -H -M -B %d -l %d -S %d -T %d -c %d -C %d -s "%s" -p %d -E %d %s -f session_stats/alerts_log.txt %s' \
% (config['build'], test_duration, num_peers, num_peers, num_peers, num_peers, config['cache-size'], config['save-path'], port, \
config['hash-threads'], disable_disk, torrent_path)
def delete_files(files): def delete_files(files):
for i in files: for i in files:
try: os.remove(i) try:
except: os.remove(i)
try: shutil.rmtree(i) except BaseException:
except: try:
try: shutil.rmtree(i)
if os.path.exists(i): print 'failed to delete %s' % i except BaseException:
except: pass try:
if os.path.exists(i):
print('failed to delete %s' % i)
except BaseException:
pass
# typically the schedulers available are 'noop', 'deadline' and 'cfq' # typically the schedulers available are 'noop', 'deadline' and 'cfq'
def build_test_config(fs=default_fs, num_peers=default_peers, cache_size=default_cache, \
test='upload', build='aio', profile='', hash_threads=1, torrent='test.torrent', \
disable_disk = False): def build_test_config(fs=default_fs, num_peers=default_peers, cache_size=default_cache,
config = {'test': test, 'save-path': os.path.join('./', fs), 'num-peers': num_peers, \ test='upload', build='aio', profile='', hash_threads=1, torrent='test.torrent',
'cache-size': cache_size, 'build': build, 'profile':profile, \ disable_disk=False):
'hash-threads': hash_threads, 'torrent': torrent, 'disable-disk': disable_disk } config = {'test': test, 'save-path': os.path.join('./', fs), 'num-peers': num_peers,
return config 'cache-size': cache_size, 'build': build, 'profile': profile,
'hash-threads': hash_threads, 'torrent': torrent, 'disable-disk': disable_disk}
return config
def prefix_len(text, prefix): def prefix_len(text, prefix):
for i in xrange(1, len(prefix)): for i in range(1, len(prefix)):
if (not text.startswith(prefix[0:i])): return i-1 if (not text.startswith(prefix[0:i])):
return len(prefix) return i - 1
return len(prefix)
def device_name(path): def device_name(path):
mount = subprocess.Popen('mount', stdout=subprocess.PIPE) mount = subprocess.Popen('mount', stdout=subprocess.PIPE)
max_match_len = 0 max_match_len = 0
match_device = '' match_device = ''
path = os.path.abspath(path) path = os.path.abspath(path)
for mp in mount.stdout.readlines(): for mp in mount.stdout.readlines():
c = mp.split(' ') c = mp.split(' ')
device = c[0] device = c[0]
mountpoint = c[2] mountpoint = c[2]
prefix = prefix_len(path, mountpoint) prefix = prefix_len(path, mountpoint)
if prefix > max_match_len: if prefix > max_match_len:
max_match_len = prefix max_match_len = prefix
match_device = device match_device = device
device = match_device
device = device.split('/')[-1][0:3]
print('device for path: %s -> %s' % (path, device))
return device
device = match_device
device = device.split('/')[-1][0:3]
print 'device for path: %s -> %s' % (path, device)
return device
def build_target_folder(config): def build_target_folder(config):
test = 'seed' test = 'seed'
if config['test'] == 'upload': test = 'download' if config['test'] == 'upload':
elif config['test'] == 'dual': test = 'dual' test = 'download'
elif config['test'] == 'dual':
test = 'dual'
if 'linux' in sys.platform: if 'linux' in sys.platform:
io_scheduler = open('/sys/block/%s/queue/scheduler' % device_name(config['save-path'])).read().split('[')[1].split(']')[0] io_scheduler = open('/sys/block/%s/queue/scheduler' %
else: device_name(config['save-path'])).read().split('[')[1].split(']')[0]
io_scheduler = sys.platform else:
io_scheduler = sys.platform
no_disk = '' no_disk = ''
if config['disable-disk']: no_disk = '_no-disk' if config['disable-disk']:
no_disk = '_no-disk'
return 'results_%s_%s_%d_%d_%s_%s_h%d%s' % (config['build'],
test,
config['num-peers'],
config['cache-size'],
os.path.split(
config['save-path'])[1],
io_scheduler,
config['hash-threads'],
no_disk)
return 'results_%s_%s_%d_%d_%s_%s_h%d%s' % (config['build'], test, config['num-peers'], \
config['cache-size'], os.path.split(config['save-path'])[1], io_scheduler, \
config['hash-threads'], no_disk)
def find_library(name): def find_library(name):
paths = ['/usr/lib64/', '/usr/local/lib64/', '/usr/lib/', '/usr/local/lib/'] paths = ['/usr/lib64/', '/usr/local/lib64/', '/usr/lib/', '/usr/local/lib/']
for p in paths:
try:
if os.path.exists(p + name):
return p + name
except BaseException:
pass
return name
for p in paths:
try:
if os.path.exists(p + name): return p + name
except: pass
return name
def find_binary(names): def find_binary(names):
paths = ['/usr/bin/', '/usr/local/bin/'] paths = ['/usr/bin/', '/usr/local/bin/']
for n in names: for n in names:
for p in paths: for p in paths:
try: try:
if os.path.exists(p + n): return p + n if os.path.exists(p + n):
except: pass return p + n
return names[0] except BaseException:
pass
return names[0]
def run_test(config): def run_test(config):
target_folder = build_target_folder(config) target_folder = build_target_folder(config)
if os.path.exists(target_folder): if os.path.exists(target_folder):
print 'results already exists, skipping test (%s)' % target_folder print('results already exists, skipping test (%s)' % target_folder)
return return
print '\n\n*********************************' print('\n\n*********************************')
print '* RUNNING TEST *' print('* RUNNING TEST *')
print '*********************************\n\n' print('*********************************\n\n')
print '%s %s' % (config['build'], config['test']) print('%s %s' % (config['build'], config['test']))
# make sure any previous test file is removed # make sure any previous test file is removed
# don't clean up unless we're running a download-test, so that we leave the test file # don't clean up unless we're running a download-test, so that we leave the test file
# complete for a seed test. # complete for a seed test.
delete_files(['utorrent_session/settings.dat', 'utorrent_session/settings.dat.old', 'asserts.log']) delete_files(['utorrent_session/settings.dat', 'utorrent_session/settings.dat.old', 'asserts.log'])
if config['test'] == 'upload' or config['test'] == 'dual': if config['test'] == 'upload' or config['test'] == 'dual':
print 'deleting files' print('deleting files')
delete_files([os.path.join(config['save-path'], 'stress_test_file'), '.ses_state', os.path.join(config['save-path'], '.resume'), 'utorrent_session', '.dht_state', 'session_stats', 'rtorrent_session']) delete_files([os.path.join(config['save-path'],
'stress_test_file'),
'.ses_state',
os.path.join(config['save-path'],
'.resume'),
'utorrent_session',
'.dht_state',
'session_stats',
'rtorrent_session'])
try: os.mkdir('session_stats') try:
except: pass os.mkdir('session_stats')
except BaseException:
pass
# save off the command line for reference # save off the command line for reference
global port global port
cmdline = build_commandline(config, port) cmdline = build_commandline(config, port)
binary = cmdline.split(' ')[0] binary = cmdline.split(' ')[0]
environment = None environment = None
if config['profile'] == 'tcmalloc': environment = {'LD_PRELOAD':find_library('libprofiler.so.0'), 'CPUPROFILE': 'session_stats/cpu_profile.prof'} if config['profile'] == 'tcmalloc':
if config['profile'] == 'memory': environment = {'LD_PRELOAD':find_library('libprofiler.so.0'), 'HEAPPROFILE': 'session_stats/heap_profile.prof'} environment = {'LD_PRELOAD': find_library('libprofiler.so.0'), 'CPUPROFILE': 'session_stats/cpu_profile.prof'}
if config['profile'] == 'perf': cmdline = 'perf timechart record --call-graph --output=session_stats/perf_profile.prof ' + cmdline if config['profile'] == 'memory':
f = open('session_stats/cmdline.txt', 'w+') environment = {'LD_PRELOAD': find_library('libprofiler.so.0'), 'HEAPPROFILE': 'session_stats/heap_profile.prof'}
f.write(cmdline) if config['profile'] == 'perf':
f.close() cmdline = 'perf timechart record --call-graph --output=session_stats/perf_profile.prof ' + cmdline
f = open('session_stats/cmdline.txt', 'w+')
f.write(cmdline)
f.close()
f = open('session_stats/config.txt', 'w+') f = open('session_stats/config.txt', 'w+')
print >>f, config print(config, file=f)
f.close() f.close()
print 'clearing disk cache' print('clearing disk cache')
clear_caches() clear_caches()
print 'OK' print('OK')
client_output = open('session_stats/client.output', 'w+') client_output = open('session_stats/client.output', 'w+')
client_error = open('session_stats/client.error', 'w+') client_error = open('session_stats/client.error', 'w+')
print 'launching: %s' % cmdline print('launching: %s' % cmdline)
client = subprocess.Popen(shlex.split(cmdline), stdout=client_output, stdin=subprocess.PIPE, stderr=client_error, env=environment) client = subprocess.Popen(
print 'OK' shlex.split(cmdline),
# enable disk stats printing stdout=client_output,
if config['build'] != 'rtorrent' and config['build'] != 'utorrent': stdin=subprocess.PIPE,
print >>client.stdin, 'x', stderr=client_error,
time.sleep(4) env=environment)
cmdline = './stage_aio/connection_tester %s %d 127.0.0.1 %d %s' % (config['test'], config['num-peers'], port, config['torrent']) print('OK')
print 'launching: %s' % cmdline # enable disk stats printing
tester_output = open('session_stats/tester.output', 'w+') if config['build'] != 'rtorrent' and config['build'] != 'utorrent':
tester = subprocess.Popen(shlex.split(cmdline), stdout=tester_output) print('x', end=' ', file=client.stdin)
print 'OK' time.sleep(4)
cmdline = './stage_aio/connection_tester %s %d 127.0.0.1 %d %s' % (
config['test'], config['num-peers'], port, config['torrent'])
print('launching: %s' % cmdline)
tester_output = open('session_stats/tester.output', 'w+')
tester = subprocess.Popen(shlex.split(cmdline), stdout=tester_output)
print('OK')
time.sleep(2) time.sleep(2)
print '\n' print('\n')
i = 0 i = 0
while True: while True:
time.sleep(1) time.sleep(1)
tester.poll() tester.poll()
if tester.returncode != None: if tester.returncode is not None:
print 'tester terminated' print('tester terminated')
break break
client.poll() client.poll()
if client.returncode != None: if client.returncode is not None:
print 'client terminated' print('client terminated')
break break
print '\r%d / %d' % (i, test_duration), print('\r%d / %d' % (i, test_duration), end=' ')
sys.stdout.flush() sys.stdout.flush()
i += 1 i += 1
if config['test'] != 'upload' and config['test'] != 'dual' and i >= test_duration: break if config['test'] != 'upload' and config['test'] != 'dual' and i >= test_duration:
print '\n' break
print('\n')
if client.returncode == None: if client.returncode is None:
try: try:
print 'killing client' print('killing client')
client.send_signal(signal.SIGINT) client.send_signal(signal.SIGINT)
except: except BaseException:
pass pass
time.sleep(10) time.sleep(10)
client.wait() client.wait()
tester.wait() tester.wait()
tester_output.close() tester_output.close()
client_output.close() client_output.close()
terminate = False terminate = False
if tester.returncode != 0: if tester.returncode != 0:
print 'tester returned %d' % tester.returncode print('tester returned %d' % tester.returncode)
terminate = True terminate = True
if client.returncode != 0: if client.returncode != 0:
print 'client returned %d' % client.returncode print('client returned %d' % client.returncode)
terminate = True terminate = True
try: shutil.copy('asserts.log', 'session_stats/') try:
except: pass shutil.copy('asserts.log', 'session_stats/')
except BaseException:
pass
try: shutil.move('libtorrent_logs0', 'session_stats/') try:
except: pass shutil.move('libtorrent_logs0', 'session_stats/')
try: shutil.move('libtorrent_logs%s' % port, 'session_stats/') except BaseException:
except: pass pass
try:
shutil.move('libtorrent_logs%s' % port, 'session_stats/')
except BaseException:
pass
# run fragmentation test # run fragmentation test
print 'analyzing fragmentation' print('analyzing fragmentation')
os.system('./stage_aio/fragmentation_test test.torrent %s' % (config['save-path'])) os.system('./stage_aio/fragmentation_test test.torrent %s' % (config['save-path']))
try: shutil.copy('fragmentation.log', 'session_stats/') try:
except: pass shutil.copy('fragmentation.log', 'session_stats/')
except BaseException:
pass
shutil.copy('fragmentation.gnuplot', 'session_stats/') shutil.copy('fragmentation.gnuplot', 'session_stats/')
try: shutil.copy('file_access.log', 'session_stats/') try:
except: pass shutil.copy('file_access.log', 'session_stats/')
except BaseException:
pass
os.system('filefrag %s >session_stats/filefrag.out' % config['save-path']) os.system('filefrag %s >session_stats/filefrag.out' % config['save-path'])
os.system('filefrag -v %s >session_stats/filefrag_verbose.out' % config['save-path']) os.system('filefrag -v %s >session_stats/filefrag_verbose.out' % config['save-path'])
os.chdir('session_stats') os.chdir('session_stats')
# parse session stats # parse session stats
print 'parsing session log' print('parsing session log')
os.system('python ../../parse_session_stats.py *.0000.log') os.system('python ../../parse_session_stats.py *.0000.log')
os.system('../stage_aio/parse_access_log file_access.log %s' % (os.path.join('..', config['save-path'], 'stress_test_file'))) os.system('../stage_aio/parse_access_log file_access.log %s' %
(os.path.join('..', config['save-path'], 'stress_test_file')))
os.chdir('..') os.chdir('..')
if config['profile'] == 'tcmalloc': if config['profile'] == 'tcmalloc':
print 'analyzing CPU profile [%s]' % binary print('analyzing CPU profile [%s]' % binary)
os.system('%s --pdf %s session_stats/cpu_profile.prof >session_stats/cpu_profile.pdf' % (find_binary(['google-pprof', 'pprof']), binary)) os.system('%s --pdf %s session_stats/cpu_profile.prof >session_stats/cpu_profile.pdf' %
if config['profile'] == 'memory': (find_binary(['google-pprof', 'pprof']), binary))
for i in xrange(1, 300): if config['profile'] == 'memory':
profile = 'session_stats/heap_profile.prof.%04d.heap' % i for i in range(1, 300):
try: os.stat(profile) profile = 'session_stats/heap_profile.prof.%04d.heap' % i
except: break try:
print 'analyzing heap profile [%s] %d' % (binary, i) os.stat(profile)
os.system('%s --pdf %s %s >session_stats/heap_profile_%d.pdf' % (find_binary(['google-pprof', 'pprof']), binary, profile, i)) except BaseException:
if config['profile'] == 'perf': break
print 'analyzing CPU profile [%s]' % binary print('analyzing heap profile [%s] %d' % (binary, i))
os.system('perf timechart --input=session_stats/perf_profile.prof --output=session_stats/profile_timechart.svg') os.system('%s --pdf %s %s >session_stats/heap_profile_%d.pdf' %
os.system('perf report --input=session_stats/perf_profile.prof --threads --show-nr-samples --vmlinux vmlinuz-2.6.38-8-generic.bzip >session_stats/profile.txt') (find_binary(['google-pprof', 'pprof']), binary, profile, i))
if config['profile'] == 'perf':
print('analyzing CPU profile [%s]' % binary)
os.system('perf timechart --input=session_stats/perf_profile.prof --output=session_stats/profile_timechart.svg')
os.system(('perf report --input=session_stats/perf_profile.prof --threads --show-nr-samples '
'--vmlinux vmlinuz-2.6.38-8-generic.bzip >session_stats/profile.txt'))
# move the results into its final place # move the results into its final place
print 'saving results' print('saving results')
os.rename('session_stats', build_target_folder(config)) os.rename('session_stats', build_target_folder(config))
port += 1 port += 1
if terminate:
sys.exit(1)
if terminate: sys.exit(1)
for h in range(0, 7): for h in range(0, 7):
config = build_test_config(num_peers=30, build='aio', test='upload', torrent='test.torrent', hash_threads=h, disable_disk=True) config = build_test_config(
run_test(config) num_peers=30,
build='aio',
test='upload',
torrent='test.torrent',
hash_threads=h,
disable_disk=True)
run_test(config)
sys.exit(0) sys.exit(0)
for b in ['aio', 'syncio']: for b in ['aio', 'syncio']:
for test in ['dual', 'upload', 'download']: for test in ['dual', 'upload', 'download']:
config = build_test_config(build=b, test=test) config = build_test_config(build=b, test=test)
run_test(config) run_test(config)
sys.exit(0) sys.exit(0)
for b in builds: for b in builds:
for test in ['upload', 'download']: for test in ['upload', 'download']:
config = build_test_config(build=b, test=test) config = build_test_config(build=b, test=test)
run_test(config) run_test(config)
for p in peers: for p in peers:
for test in ['upload', 'download']: for test in ['upload', 'download']:
config = build_test_config(num_peers=p, test=test) config = build_test_config(num_peers=p, test=test)
run_test(config) run_test(config)
for c in cache_sizes: for c in cache_sizes:
for test in ['upload', 'download']: for test in ['upload', 'download']:
config = build_test_config(cache_size=c, test=test) config = build_test_config(cache_size=c, test=test)
run_test(config) run_test(config)
for fs in filesystem: for fs in filesystem:
for test in ['upload', 'download']: for test in ['upload', 'download']:
config = build_test_config(fs=fs, test=test) config = build_test_config(fs=fs, test=test)
run_test(config) run_test(config)

View File

@ -1,28 +1,28 @@
# -*- coding: cp1252 -*- # -*- coding: cp1252 -*-
# <PythonProxy.py> # <PythonProxy.py>
# #
#Copyright (c) <2009> <Fábio Domingues - fnds3000 in gmail.com> # Copyright (c) <2009> <Fábio Domingues - fnds3000 in gmail.com>
# #
#Permission is hereby granted, free of charge, to any person # Permission is hereby granted, free of charge, to any person
#obtaining a copy of this software and associated documentation # obtaining a copy of this software and associated documentation
#files (the "Software"), to deal in the Software without # files (the "Software"), to deal in the Software without
#restriction, including without limitation the rights to use, # restriction, including without limitation the rights to use,
#copy, modify, merge, publish, distribute, sublicense, and/or sell # copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the # copies of the Software, and to permit persons to whom the
#Software is furnished to do so, subject to the following # Software is furnished to do so, subject to the following
#conditions: # conditions:
# #
#The above copyright notice and this permission notice shall be # The above copyright notice and this permission notice shall be
#included in all copies or substantial portions of the Software. # included in all copies or substantial portions of the Software.
# #
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
#EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
#OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
#NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
#HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
#WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
#FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
#OTHER DEALINGS IN THE SOFTWARE. # OTHER DEALINGS IN THE SOFTWARE.
"""\ """\
Copyright (c) <2009> <Fábio Domingues - fnds3000 in gmail.com> <MIT Licence> Copyright (c) <2009> <Fábio Domingues - fnds3000 in gmail.com> <MIT Licence>
@ -81,16 +81,28 @@ Qual a diferença entre um proxy Elite, Anónimo e Transparente?
""" """
import socket, thread, select, sys, base64, time, errno import socket
import select
import sys
import base64
import time
import errno
# Python 3 renamed thread module to _thread
try:
import _thread as thread
except BaseException:
import thread
__version__ = '0.1.0 Draft 1' __version__ = '0.1.0 Draft 1'
BUFLEN = 8192 BUFLEN = 8192
VERSION = 'Python Proxy/'+__version__ VERSION = 'Python Proxy/' + __version__
HTTPVER = 'HTTP/1.1' HTTPVER = 'HTTP/1.1'
username = None username = None
password = None password = None
class ConnectionHandler: class ConnectionHandler:
def __init__(self, connection, address, timeout): def __init__(self, connection, address, timeout):
self.client = connection self.client = connection
@ -99,26 +111,26 @@ class ConnectionHandler:
self.method, self.path, self.protocol = self.get_base_header() self.method, self.path, self.protocol = self.get_base_header()
global username global username
global password global password
if username != None: if username is not None:
auth = base64.b64encode(username + ':' + password) auth = base64.b64encode(username + ':' + password)
if not 'Proxy-Authorization: Basic ' + auth in self.client_buffer: if not 'Proxy-Authorization: Basic ' + auth in self.client_buffer:
print 'PROXY - failed authentication: %s' % self.client_buffer print('PROXY - failed authentication: %s' % self.client_buffer)
self.client.send(HTTPVER+' 401 Authentication Failed\n'+ self.client.send(HTTPVER + ' 401 Authentication Failed\n' +
'Proxy-agent: %s\n\n'%VERSION) 'Proxy-agent: %s\n\n' % VERSION)
self.client.close() self.client.close()
return return
try: try:
if self.method == 'CONNECT': if self.method == 'CONNECT':
self.method_CONNECT() self.method_CONNECT()
elif self.method in ('OPTIONS', 'GET', 'HEAD', 'POST', 'PUT', elif self.method in ('OPTIONS', 'GET', 'HEAD', 'POST', 'PUT',
'DELETE', 'TRACE'): 'DELETE', 'TRACE'):
self.method_others() self.method_others()
except: except BaseException:
try: try:
self.client.send(HTTPVER+' 502 Connection failed\n'+ self.client.send(HTTPVER + ' 502 Connection failed\n' +
'Proxy-agent: %s\n\n'%VERSION) 'Proxy-agent: %s\n\n' % VERSION)
except Exception, e: except Exception as e:
print 'PROXY - ', e print('PROXY - ', e)
self.client.close() self.client.close()
return return
@ -127,10 +139,10 @@ class ConnectionHandler:
def get_base_header(self): def get_base_header(self):
retries = 0 retries = 0
while 1: while True:
try: try:
self.client_buffer += self.client.recv(BUFLEN) self.client_buffer += self.client.recv(BUFLEN)
except socket.error, e: except socket.error as e:
err = e.args[0] err = e.args[0]
if (err == errno.EAGAIN or err == errno.EWOULDBLOCK) and retries < 20: if (err == errno.EAGAIN or err == errno.EWOULDBLOCK) and retries < 20:
time.sleep(0.5) time.sleep(0.5)
@ -138,18 +150,18 @@ class ConnectionHandler:
continue continue
raise e raise e
end = self.client_buffer.find('\r\n\r\n') end = self.client_buffer.find('\r\n\r\n')
if end!=-1: if end != -1:
break break
line_end = self.client_buffer.find('\n') line_end = self.client_buffer.find('\n')
print 'PROXY - %s' % self.client_buffer[:line_end]#debug print('PROXY - %s' % self.client_buffer[:line_end]) # debug
data = (self.client_buffer[:line_end+1]).split() data = (self.client_buffer[:line_end + 1]).split()
self.client_buffer = self.client_buffer[line_end+1:] self.client_buffer = self.client_buffer[line_end + 1:]
return data return data
def method_CONNECT(self): def method_CONNECT(self):
self._connect_target(self.path) self._connect_target(self.path)
self.client.send(HTTPVER+' 200 Connection established\n'+ self.client.send(HTTPVER + ' 200 Connection established\n' +
'Proxy-agent: %s\n\n'%VERSION) 'Proxy-agent: %s\n\n' % VERSION)
self.client_buffer = '' self.client_buffer = ''
self._read_write() self._read_write()
@ -159,15 +171,15 @@ class ConnectionHandler:
host = self.path[:i] host = self.path[:i]
path = self.path[i:] path = self.path[i:]
self._connect_target(host) self._connect_target(host)
self.target.send('%s %s %s\n' % (self.method, path, self.protocol)+ self.target.send('%s %s %s\n' % (self.method, path, self.protocol) +
self.client_buffer) self.client_buffer)
self.client_buffer = '' self.client_buffer = ''
self._read_write() self._read_write()
def _connect_target(self, host): def _connect_target(self, host):
i = host.find(':') i = host.find(':')
if i!=-1: if i != -1:
port = int(host[i+1:]) port = int(host[i + 1:])
host = host[:i] host = host[:i]
else: else:
port = 80 port = 80
@ -176,10 +188,10 @@ class ConnectionHandler:
self.target.connect(address) self.target.connect(address)
def _read_write(self): def _read_write(self):
time_out_max = self.timeout/3 time_out_max = self.timeout / 3
socs = [self.client, self.target] socs = [self.client, self.target]
count = 0 count = 0
while 1: while True:
count += 1 count += 1
(recv, _, error) = select.select(socs, [], socs, 3) (recv, _, error) = select.select(socs, [], socs, 3)
if error: if error:
@ -197,37 +209,39 @@ class ConnectionHandler:
if count == time_out_max: if count == time_out_max:
break break
def start_server(host='localhost', port=8080, IPv6=False, timeout=100, def start_server(host='localhost', port=8080, IPv6=False, timeout=100,
handler=ConnectionHandler): handler=ConnectionHandler):
if IPv6==True: if IPv6:
soc_type=socket.AF_INET6 soc_type = socket.AF_INET6
else: else:
soc_type=socket.AF_INET soc_type = socket.AF_INET
soc = socket.socket(soc_type) soc = socket.socket(soc_type)
soc.settimeout(120) soc.settimeout(120)
print "PROXY - Serving on %s:%d."%(host, port)#debug print("PROXY - Serving on %s:%d." % (host, port)) # debug
soc.bind((host, port)) soc.bind((host, port))
soc.listen(0) soc.listen(0)
while 1: while True:
thread.start_new_thread(handler, soc.accept()+(timeout,)) thread.start_new_thread(handler, soc.accept() + (timeout,))
if __name__ == '__main__': if __name__ == '__main__':
listen_port = 8080 listen_port = 8080
i = 1 i = 1
while i < len(sys.argv): while i < len(sys.argv):
if sys.argv[i] == '--port': if sys.argv[i] == '--port':
listen_port = int(sys.argv[i+1]) listen_port = int(sys.argv[i + 1])
i += 1 i += 1
elif sys.argv[i] == '--username': elif sys.argv[i] == '--username':
username = sys.argv[i+1] username = sys.argv[i + 1]
i += 1 i += 1
elif sys.argv[i] == '--password': elif sys.argv[i] == '--password':
password = sys.argv[i+1] password = sys.argv[i + 1]
i += 1 i += 1
else: else:
if sys.argv[i] != '--help': print('PROXY - unknown option "%s"' % sys.argv[i]) if sys.argv[i] != '--help':
print(('PROXY - unknown option "%s"' % sys.argv[i]))
print('usage: http.py [--port <listen-port>]') print('usage: http.py [--port <listen-port>]')
sys.exit(1) sys.exit(1)
i += 1 i += 1
start_server(port=listen_port) start_server(port=listen_port)

View File

@ -1,19 +1,27 @@
#!/usr/bin/env python #!/usr/bin/env python
"""Minimal non-feature complete socks proxy""" """Minimal non-feature complete socks proxy"""
from __future__ import print_function
import random
import socket import socket
from SocketServer import StreamRequestHandler, ThreadingTCPServer
from struct import pack, unpack from struct import pack, unpack
import threading import threading
import sys import sys
# Python 3 renamed SocketServer to socketserver
try:
from socketserver import StreamRequestHandler, ThreadingTCPServer
except BaseException:
from SocketServer import StreamRequestHandler, ThreadingTCPServer
def debug(s): def debug(s):
print >>sys.stderr, 'socks.py: ', s print('socks.py: ', s, file=sys.stderr)
def error(s): def error(s):
print >>sys.stderr, 'socks.py, ERROR: ', s print('socks.py, ERROR: ', s, file=sys.stderr)
class MyTCPServer(ThreadingTCPServer): class MyTCPServer(ThreadingTCPServer):
allow_reuse_address = True allow_reuse_address = True
@ -21,6 +29,7 @@ class MyTCPServer(ThreadingTCPServer):
def handle_timeout(self): def handle_timeout(self):
raise Exception('timeout') raise Exception('timeout')
CLOSE = object() CLOSE = object()
VERSION = '\x05' VERSION = '\x05'
@ -37,15 +46,19 @@ password = None
username = None username = None
allow_v4 = False allow_v4 = False
def send(dest, msg): def send(dest, msg):
if msg == CLOSE: if msg == CLOSE:
try: dest.shutdown(socket.SHUT_WR) try:
except: pass dest.shutdown(socket.SHUT_WR)
except BaseException:
pass
dest.close() dest.close()
return 0 return 0
else: else:
return dest.sendall(msg) return dest.sendall(msg)
def recv(source, buffer): def recv(source, buffer):
data = source.recv(buffer) data = source.recv(buffer)
if data == '': if data == '':
@ -53,6 +66,7 @@ def recv(source, buffer):
else: else:
return data return data
def forward(source, dest, name): def forward(source, dest, name):
while True: while True:
data = recv(source, 4000) data = recv(source, 4000)
@ -63,11 +77,13 @@ def forward(source, dest, name):
# debug('Forwarding (%d) %r' % (len(data), data)) # debug('Forwarding (%d) %r' % (len(data), data))
send(dest, data) send(dest, data)
def spawn_forwarder(source, dest, name): def spawn_forwarder(source, dest, name):
t = threading.Thread(target=forward, args=(source, dest, name)) t = threading.Thread(target=forward, args=(source, dest, name))
t.daemon = True t.daemon = True
t.start() t.start()
class SocksHandler(StreamRequestHandler): class SocksHandler(StreamRequestHandler):
"""Highly feature incomplete SOCKS 5 implementation""" """Highly feature incomplete SOCKS 5 implementation"""
@ -109,7 +125,7 @@ class SocksHandler(StreamRequestHandler):
c = self.read(1) c = self.read(1)
outbound_sock = socket.socket(socket.AF_INET) outbound_sock = socket.socket(socket.AF_INET)
out_address = socket.getaddrinfo(dest_address,dest_port)[0][4] out_address = socket.getaddrinfo(dest_address, dest_port)[0][4]
debug("Creating forwarder connection to %s:%d" % (out_address[0], out_address[1])) debug("Creating forwarder connection to %s:%d" % (out_address[0], out_address[1]))
outbound_sock.connect(out_address) outbound_sock.connect(out_address)
@ -130,7 +146,7 @@ class SocksHandler(StreamRequestHandler):
global password global password
global username global username
if password == None and NOAUTH in method_list: if password is None and NOAUTH in method_list:
self.send_no_auth_method() self.send_no_auth_method()
debug('Authenticated (no-auth)') debug('Authenticated (no-auth)')
elif USERPASS in method_list: elif USERPASS in method_list:
@ -173,7 +189,7 @@ class SocksHandler(StreamRequestHandler):
dest_address = '.'.join(map(str, unpack('>4B', raw_dest_address))) dest_address = '.'.join(map(str, unpack('>4B', raw_dest_address)))
elif address_type == IPV6: elif address_type == IPV6:
raw_dest_address = self.read(16) raw_dest_address = self.read(16)
dest_address = ":".join(map(lambda x: hex(x)[2:],unpack('>8H',raw_dest_address))) dest_address = ":".join([hex(x)[2:] for x in unpack('>8H', raw_dest_address)])
elif address_type == DOMAIN_NAME: elif address_type == DOMAIN_NAME:
dns_length = ord(self.read(1)) dns_length = ord(self.read(1))
dns_name = self.read(dns_length) dns_name = self.read(dns_length)
@ -190,21 +206,21 @@ class SocksHandler(StreamRequestHandler):
else: else:
outbound_sock = socket.socket(socket.AF_INET) outbound_sock = socket.socket(socket.AF_INET)
try: try:
out_address = socket.getaddrinfo(dest_address,dest_port)[0][4] out_address = socket.getaddrinfo(dest_address, dest_port)[0][4]
except Exception, e: except Exception as e:
print e print(e)
return return
if cmd == UDP_ASSOCIATE: if cmd == UDP_ASSOCIATE:
debug("no UDP support yet, closing") debug("no UDP support yet, closing")
return; return
debug("Creating forwarder connection to %s:%d" % (out_address[0], out_address[1])) debug("Creating forwarder connection to %s:%d" % (out_address[0], out_address[1]))
try: try:
outbound_sock.connect(out_address) outbound_sock.connect(out_address)
except Exception, e: except Exception as e:
print e print(e)
return return
if address_type == IPV6: if address_type == IPV6:
@ -215,22 +231,25 @@ class SocksHandler(StreamRequestHandler):
spawn_forwarder(outbound_sock, self.request, 'destination') spawn_forwarder(outbound_sock, self.request, 'destination')
try: try:
forward(self.request, outbound_sock, 'client') forward(self.request, outbound_sock, 'client')
except Exception,e: except Exception as e:
print e print(e)
def send_reply_v4(self, (bind_addr, bind_port)): def send_reply_v4(self, xxx_todo_changeme):
(bind_addr, bind_port) = xxx_todo_changeme
self.wfile.write('\0\x5a\0\0\0\0\0\0') self.wfile.write('\0\x5a\0\0\0\0\0\0')
self.wfile.flush() self.wfile.flush()
def send_reply(self, (bind_addr, bind_port)): def send_reply(self, xxx_todo_changeme1):
(bind_addr, bind_port) = xxx_todo_changeme1
bind_tuple = tuple(map(int, bind_addr.split('.'))) bind_tuple = tuple(map(int, bind_addr.split('.')))
full_address = bind_tuple + (bind_port,) full_address = bind_tuple + (bind_port,)
debug('Setting up forwarding port %r' % (full_address,)) debug('Setting up forwarding port %r' % (full_address,))
msg = pack('>cccc4BH', VERSION, SUCCESS, '\x00', IPV4, *full_address) msg = pack('>cccc4BH', VERSION, SUCCESS, '\x00', IPV4, *full_address)
self.wfile.write(msg) self.wfile.write(msg)
def send_reply6(self, (bind_addr, bind_port, unused1, unused2)): def send_reply6(self, xxx_todo_changeme2):
bind_tuple = tuple(map(lambda x: int(x,16), bind_addr.split(':'))) (bind_addr, bind_port, unused1, unused2) = xxx_todo_changeme2
bind_tuple = tuple([int(x, 16) for x in bind_addr.split(':')])
full_address = bind_tuple + (bind_port,) full_address = bind_tuple + (bind_port,)
debug('Setting up forwarding port %r' % (full_address,)) debug('Setting up forwarding port %r' % (full_address,))
msg = pack('>cccc8HH', VERSION, SUCCESS, '\x00', IPV6, *full_address) msg = pack('>cccc8HH', VERSION, SUCCESS, '\x00', IPV6, *full_address)
@ -252,24 +271,26 @@ class SocksHandler(StreamRequestHandler):
self.wfile.write('\x01\x00') self.wfile.write('\x01\x00')
self.wfile.flush() self.wfile.flush()
if __name__ == '__main__': if __name__ == '__main__':
listen_port = 8002 listen_port = 8002
i = 1 i = 1
while i < len(sys.argv): while i < len(sys.argv):
if sys.argv[i] == '--username': if sys.argv[i] == '--username':
username = sys.argv[i+1] username = sys.argv[i + 1]
i += 1 i += 1
elif sys.argv[i] == '--password': elif sys.argv[i] == '--password':
password = sys.argv[i+1] password = sys.argv[i + 1]
i += 1 i += 1
elif sys.argv[i] == '--port': elif sys.argv[i] == '--port':
listen_port = int(sys.argv[i+1]) listen_port = int(sys.argv[i + 1])
i += 1 i += 1
elif sys.argv[i] == '--allow-v4': elif sys.argv[i] == '--allow-v4':
allow_v4 = True allow_v4 = True
else: else:
if sys.argv[i] != '--help': debug('unknown option "%s"' % sys.argv[i]) if sys.argv[i] != '--help':
debug('unknown option "%s"' % sys.argv[i])
print('usage: socks.py [--username <user> --password <password>] [--port <listen-port>]') print('usage: socks.py [--username <user> --password <password>] [--port <listen-port>]')
sys.exit(1) sys.exit(1)
i += 1 i += 1
@ -279,4 +300,3 @@ if __name__ == '__main__':
server.timeout = 190 server.timeout = 190
while True: while True:
server.handle_request() server.handle_request()

View File

@ -1,194 +1,205 @@
#!/usr/bin/env python #!/usr/bin/env python
import BaseHTTPServer
import SimpleHTTPServer
import sys import sys
import os import os
import ssl import ssl
import gzip import gzip
import base64 import base64
# Python 3 has moved {Simple,Base}HTTPServer to http module
try:
# Remove '.' from sys.path or we try to import the http.py module
# which is not what we want.
sys.path = sys.path[1:]
from http.server import HTTPServer, BaseHTTPRequestHandler
except ImportError:
from SimpleHTTPServer import SimpleHTTPRequestHandler as BaseHTTPRequestHandler
from BaseHTTPServer import HTTPServer
chunked_encoding = False chunked_encoding = False
keepalive = True keepalive = True
try: try:
fin = open('test_file', 'rb') fin = open('test_file', 'rb')
f = gzip.open('test_file.gz', 'wb') f = gzip.open('test_file.gz', 'wb')
f.writelines(fin) f.writelines(fin)
f.close() f.close()
fin.close() fin.close()
except: except BaseException:
pass pass
class http_server_with_timeout(BaseHTTPServer.HTTPServer):
allow_reuse_address = True
timeout = 190
def handle_timeout(self): class http_server_with_timeout(HTTPServer):
raise Exception('timeout') allow_reuse_address = True
timeout = 190
class http_handler(SimpleHTTPServer.SimpleHTTPRequestHandler): def handle_timeout(self):
raise Exception('timeout')
def do_GET(s):
print 'INCOMING-REQUEST: ', s.requestline class http_handler(BaseHTTPRequestHandler):
print s.headers
global chunked_encoding def do_GET(s):
global keepalive
# if the request contains the hostname and port. strip it print('INCOMING-REQUEST: ', s.requestline)
if s.path.startswith('http://') or s.path.startswith('https://'): print(s.headers)
s.path = s.path[8:]
s.path = s.path[s.path.find('/'):]
file_path = os.path.normpath(s.path) global chunked_encoding
print file_path global keepalive
print s.path
if s.path == '/password_protected': # if the request contains the hostname and port. strip it
passed = False if s.path.startswith('http://') or s.path.startswith('https://'):
if 'Authorization' in s.headers: s.path = s.path[8:]
auth = s.headers['Authorization'] s.path = s.path[s.path.find('/'):]
passed = auth == 'Basic %s' % base64.b64encode('testuser:testpass')
if not passed: file_path = os.path.normpath(s.path)
s.send_response(401) print(file_path)
s.send_header("Connection", "close") print(s.path)
s.end_headers()
return
s.path = '/test_file' if s.path == '/password_protected':
file_path = os.path.normpath('/test_file') passed = False
if 'Authorization' in s.headers:
auth = s.headers['Authorization']
passed = auth == 'Basic %s' % base64.b64encode('testuser:testpass')
if s.path == '/redirect': if not passed:
s.send_response(301) s.send_response(401)
s.send_header("Location", "/test_file") s.send_header("Connection", "close")
s.send_header("Connection", "close") s.end_headers()
s.end_headers() return
elif s.path == '/infinite_redirect':
s.send_response(301)
s.send_header("Location", "/infinite_redirect")
s.send_header("Connection", "close")
s.end_headers()
elif s.path == '/relative/redirect':
s.send_response(301)
s.send_header("Location", "../test_file")
s.send_header("Connection", "close")
s.end_headers()
elif s.path.startswith('/announce'):
s.send_response(200)
response = 'd8:intervali1800e8:completei1e10:incompletei1e' + \
'12:min intervali' + min_interval + 'e' + \
'5:peers12:AAAABBCCCCDD' + \
'6:peers618:EEEEEEEEEEEEEEEEFF' + \
'e'
s.send_header("Content-Length", "%d" % len(response))
s.send_header("Connection", "close")
s.end_headers()
s.wfile.write(response)
elif os.path.split(s.path)[1].startswith('seed?'):
query = s.path[6:]
args_raw = query.split('&')
args = {}
for a in args_raw:
kvp = a.split('=')
args[kvp[0]] = kvp[1]
piece = int(args['piece'])
ranges = args['ranges'].split('-')
filename = '' s.path = '/test_file'
try: file_path = os.path.normpath('/test_file')
filename = os.path.normpath(s.path[1:s.path.find('seed?') + 4])
print 'filename = %s' % filename
f = open(filename, 'rb')
f.seek(piece * 32 * 1024 + int(ranges[0]))
data = f.read(int(ranges[1]) - int(ranges[0]) + 1)
f.close()
s.send_response(200) if s.path == '/redirect':
print 'sending %d bytes' % len(data) s.send_response(301)
s.send_header("Content-Length", "%d" % len(data)) s.send_header("Location", "/test_file")
s.end_headers() s.send_header("Connection", "close")
s.wfile.write(data); s.end_headers()
except Exception, e: elif s.path == '/infinite_redirect':
print 'FILE ERROR: ', filename, e s.send_response(301)
s.send_response(404) s.send_header("Location", "/infinite_redirect")
s.send_header("Content-Length", "0") s.send_header("Connection", "close")
s.end_headers() s.end_headers()
else: elif s.path == '/relative/redirect':
filename = '' s.send_response(301)
try: s.send_header("Location", "../test_file")
filename = os.path.normpath(file_path[1:]) s.send_header("Connection", "close")
# serve file by invoking default handler s.end_headers()
f = open(filename, 'rb') elif s.path.startswith('/announce'):
size = int(os.stat(filename).st_size) s.send_response(200)
start_range = 0 response = 'd8:intervali1800e8:completei1e10:incompletei1e' + \
end_range = size '12:min intervali' + min_interval + 'e' + \
if 'Range' in s.headers: '5:peers12:AAAABBCCCCDD' + \
s.send_response(206) '6:peers618:EEEEEEEEEEEEEEEEFF' + \
st, e = s.headers['range'][6:].split('-', 1) 'e'
sl = len(st) s.send_header("Content-Length", "%d" % len(response))
el = len(e) s.send_header("Connection", "close")
if sl > 0: s.end_headers()
start_range = int(st) s.wfile.write(response)
if el > 0: elif os.path.split(s.path)[1].startswith('seed?'):
end_range = int(e) + 1 query = s.path[6:]
elif el > 0: args_raw = query.split('&')
ei = int(e) args = {}
if ei < size: for a in args_raw:
start_range = size - ei kvp = a.split('=')
s.send_header('Content-Range', 'bytes ' + str(start_range) \ args[kvp[0]] = kvp[1]
+ '-' + str(end_range - 1) + '/' + str(size)) piece = int(args['piece'])
else: ranges = args['ranges'].split('-')
s.send_response(200)
s.send_header('Accept-Ranges', 'bytes')
if chunked_encoding:
s.send_header('Transfer-Encoding', 'chunked')
s.send_header('Content-Length', end_range - start_range)
if filename.endswith('.gz'):
s.send_header('Content-Encoding', 'gzip')
if not keepalive:
s.send_header("Connection", "close")
try:
s.request.shutdown();
except Exception, e:
print 'Failed to shutdown read-channel of socket: ', e
s.end_headers() filename = ''
try:
filename = os.path.normpath(s.path[1:s.path.find('seed?') + 4])
print('filename = %s' % filename)
f = open(filename, 'rb')
f.seek(piece * 32 * 1024 + int(ranges[0]))
data = f.read(int(ranges[1]) - int(ranges[0]) + 1)
f.close()
s.send_response(200)
print('sending %d bytes' % len(data))
s.send_header("Content-Length", "%d" % len(data))
s.end_headers()
s.wfile.write(data)
except Exception as e:
print('FILE ERROR: ', filename, e)
s.send_response(404)
s.send_header("Content-Length", "0")
s.end_headers()
else:
filename = ''
try:
filename = os.path.normpath(file_path[1:])
# serve file by invoking default handler
f = open(filename, 'rb')
size = int(os.stat(filename).st_size)
start_range = 0
end_range = size
if 'Range' in s.headers:
s.send_response(206)
st, e = s.headers['range'][6:].split('-', 1)
sl = len(st)
el = len(e)
if sl > 0:
start_range = int(st)
if el > 0:
end_range = int(e) + 1
elif el > 0:
ei = int(e)
if ei < size:
start_range = size - ei
s.send_header('Content-Range', 'bytes ' + str(start_range)
+ '-' + str(end_range - 1) + '/' + str(size))
else:
s.send_response(200)
s.send_header('Accept-Ranges', 'bytes')
if chunked_encoding:
s.send_header('Transfer-Encoding', 'chunked')
s.send_header('Content-Length', end_range - start_range)
if filename.endswith('.gz'):
s.send_header('Content-Encoding', 'gzip')
if not keepalive:
s.send_header("Connection", "close")
try:
s.request.shutdown()
except Exception as e:
print('Failed to shutdown read-channel of socket: ', e)
s.end_headers()
f.seek(start_range)
length = end_range - start_range
while length > 0:
to_send = min(length, 0x900)
if chunked_encoding:
s.wfile.write('%x\r\n' % to_send)
data = f.read(to_send)
print('read %d bytes' % to_send)
s.wfile.write(data)
if chunked_encoding:
s.wfile.write('\r\n')
length -= to_send
print('sent %d bytes (%d bytes left)' % (len(data), length))
if chunked_encoding:
s.wfile.write('0\r\n\r\n')
except Exception as e:
print('FILE ERROR: ', filename, e)
s.send_response(404)
s.send_header("Content-Length", "0")
s.end_headers()
f.seek(start_range)
length = end_range - start_range
while length > 0:
to_send = min(length, 0x900)
if chunked_encoding:
s.wfile.write('%x\r\n' % to_send)
data = f.read(to_send)
print 'read %d bytes' % to_send
s.wfile.write(data)
if chunked_encoding:
s.wfile.write('\r\n')
length -= to_send
print 'sent %d bytes (%d bytes left)' % (len(data), length)
if chunked_encoding:
s.wfile.write('0\r\n\r\n')
except Exception, e:
print 'FILE ERROR: ', filename, e
s.send_response(404)
s.send_header("Content-Length", "0")
s.end_headers()
if __name__ == '__main__': if __name__ == '__main__':
port = int(sys.argv[1]) port = int(sys.argv[1])
chunked_encoding = sys.argv[2] != '0' chunked_encoding = sys.argv[2] != '0'
use_ssl = sys.argv[3] != '0' use_ssl = sys.argv[3] != '0'
keepalive = sys.argv[4] != '0' keepalive = sys.argv[4] != '0'
min_interval = sys.argv[5] min_interval = sys.argv[5]
http_handler.protocol_version = 'HTTP/1.1' http_handler.protocol_version = 'HTTP/1.1'
httpd = http_server_with_timeout(('127.0.0.1', port), http_handler) httpd = http_server_with_timeout(('127.0.0.1', port), http_handler)
if use_ssl: if use_ssl:
httpd.socket = ssl.wrap_socket(httpd.socket, certfile='../ssl/server.pem', server_side=True) httpd.socket = ssl.wrap_socket(httpd.socket, certfile='../ssl/server.pem', server_side=True)
while True: while True:
httpd.handle_request() httpd.handle_request()

View File

@ -4,78 +4,79 @@ import os
import shutil import shutil
import glob import glob
def clean(): def clean():
to_delete = [ to_delete = [
'session_stats', 'session_stats',
'libtorrent_logs*', 'libtorrent_logs*',
'round_trip_ms.log', 'round_trip_ms.log',
'dht.log', 'dht.log',
'upnp.log', 'upnp.log',
'natpmp.log', 'natpmp.log',
'bin', 'bin',
'build-aux', 'build-aux',
'.deps', '.deps',
'test_tmp_*', 'test_tmp_*',
'bjam_build.*.xml' 'bjam_build.*.xml'
'*.exe', '*.exe',
'*.pdb', '*.pdb',
'*.pyd', '*.pyd',
'dist', 'dist',
'build', 'build',
'.libs', '.libs',
'*.cpp.orig', '*.cpp.orig',
'*.cpp.rej', '*.cpp.rej',
'*.hpp.orig', '*.hpp.orig',
'*.hpp.rej', '*.hpp.rej',
'*.hpp.gcov', '*.hpp.gcov',
'*.cpp.gcov', '*.cpp.gcov',
'Makefile.in', 'Makefile.in',
'Makefile', 'Makefile',
'lib*.a', 'lib*.a',
'Jamfile.rej', 'Jamfile.rej',
'Jamfile.orig', 'Jamfile.orig',
'*.o', '*.o',
'*.lo', '*.lo',
'autom4te.cache', 'autom4te.cache',
'configure', 'configure',
'config.report', 'config.report',
'config.log', 'config.log',
'.lib', '.lib',
] ]
directories = [ directories = [
'examples', 'examples',
'test', 'test',
'.', '.',
'tools', 'tools',
'src', 'src',
'simulation', 'simulation',
os.path.join('src', 'kademlia'), os.path.join('src', 'kademlia'),
os.path.join('include', 'libtorrent'), os.path.join('include', 'libtorrent'),
os.path.join('include', os.path.join('libtorrent', '_aux')), os.path.join('include', os.path.join('libtorrent', '_aux')),
os.path.join('include', os.path.join('libtorrent', 'kademlia')), os.path.join('include', os.path.join('libtorrent', 'kademlia')),
os.path.join('bindings', 'python'), os.path.join('bindings', 'python'),
os.path.join('bindings', os.path.join('python', 'src')), os.path.join('bindings', os.path.join('python', 'src')),
os.path.join('bindings', 'c'), os.path.join('bindings', 'c'),
os.path.join('bindings', os.path.join('c', 'src')), os.path.join('bindings', os.path.join('c', 'src')),
os.path.join('simulation', 'libsimulator') os.path.join('simulation', 'libsimulator')
] ]
for d in directories: for d in directories:
for f in to_delete: for f in to_delete:
path = os.path.join(d, f) path = os.path.join(d, f)
entries = glob.glob(path) entries = glob.glob(path)
for p in entries: for p in entries:
try: try:
shutil.rmtree(p) shutil.rmtree(p)
print p print(p)
except Exception, e: except Exception as e:
try: try:
os.remove(p) os.remove(p)
print p print(p)
except Exception, e: except Exception as e:
print p, e print(p, e)
if __name__ == "__main__":
clean()
if __name__ == "__main__":
clean()

View File

@ -8,30 +8,36 @@ import random
port = int(sys.argv[1]) port = int(sys.argv[1])
# from BitTorrent 4.3.0 # from BitTorrent 4.3.0
def encode_bencached(x,r):
def encode_bencached(x, r):
r.append(x.bencoded) r.append(x.bencoded)
def encode_int(x, r): def encode_int(x, r):
r.extend(('i', str(x), 'e')) r.extend(('i', str(x), 'e'))
def encode_string(x, r): def encode_string(x, r):
r.extend((str(len(x)), ':', x)) r.extend((str(len(x)), ':', x))
def encode_list(x, r): def encode_list(x, r):
r.append('l') r.append('l')
for i in x: for i in x:
encode_func[type(i)](i, r) encode_func[type(i)](i, r)
r.append('e') r.append('e')
def encode_dict(x,r):
def encode_dict(x, r):
r.append('d') r.append('d')
ilist = x.items() ilist = sorted(x.items())
ilist.sort()
for k, v in ilist: for k, v in ilist:
r.extend((str(len(k)), ':', k)) r.extend((str(len(k)), ':', k))
encode_func[type(v)](v, r) encode_func[type(v)](v, r)
r.append('e') r.append('e')
encode_func = {} encode_func = {}
encode_func[IntType] = encode_int encode_func[IntType] = encode_int
encode_func[LongType] = encode_int encode_func[LongType] = encode_int
@ -40,29 +46,32 @@ encode_func[ListType] = encode_list
encode_func[TupleType] = encode_list encode_func[TupleType] = encode_list
encode_func[DictType] = encode_dict encode_func[DictType] = encode_dict
def bencode(x): def bencode(x):
r = [] r = []
encode_func[type(x)](x, r) encode_func[type(x)](x, r)
return ''.join(r) return ''.join(r)
def send_dht_message(msg): def send_dht_message(msg):
s.sendto(bencode(msg), 0, ('127.0.0.1', port)) s.sendto(bencode(msg), 0, ('127.0.0.1', port))
def random_key(): def random_key():
ret = '' ret = ''
for i in range(0, 20): for i in range(0, 20):
ret += chr(random.randint(0, 255)) ret += chr(random.randint(0, 255))
return ret return ret
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
node_id = '1' * 20; node_id = '1' * 20
query = 'get_peers' query = 'get_peers'
print 'test random info-hashes' print('test random info-hashes')
for i in xrange(1, 30000): for i in range(1, 30000):
send_dht_message({'a': {'id': node_id, 'info_hash': random_key()}, 'q': query, 'y': 'q', 't': '%d' % i}) send_dht_message({'a': {'id': node_id, 'info_hash': random_key()}, 'q': query, 'y': 'q', 't': '%d' % i})
print 'test random peer-ids'
for i in xrange(1, 30000):
send_dht_message({'a': {'id': random_key(), 'info_hash': random_key()}, 'q': query, 'y': 'q', 't': '%d' % i})
print('test random peer-ids')
for i in range(1, 30000):
send_dht_message({'a': {'id': random_key(), 'info_hash': random_key()}, 'q': query, 'y': 'q', 't': '%d' % i})

View File

@ -1,9 +1,8 @@
#!/usr/bin/env python #!/usr/bin/env python
import os import os
import sys
file_header ='''/* file_header = '''/*
Copyright (c) 2017, Arvid Norberg Copyright (c) 2017, Arvid Norberg
All rights reserved. All rights reserved.
@ -52,79 +51,83 @@ namespace lt = libtorrent;
#endif // TORRENT_FWD_HPP #endif // TORRENT_FWD_HPP
''' '''
classes = os.popen('git grep "\(TORRENT_EXPORT\|TORRENT_DEPRECATED_EXPORT\|^TORRENT_[A-Z0-9]\+_NAMESPACE\)"').read().split('\n') classes = os.popen(
r'git grep "\(TORRENT_EXPORT\|TORRENT_DEPRECATED_EXPORT\|^TORRENT_[A-Z0-9]\+_NAMESPACE\)"').read().split('\n')
def print_classes(out, classes, keyword): def print_classes(out, classes, keyword):
current_file = '' current_file = ''
ret = ''
dht_ret = ''
# [(file, decl), ...] # [(file, decl), ...]
classes = [(l.split(':')[0].strip(), ':'.join(l.split(':')[1:]).strip()) for l in classes] classes = [(l.split(':')[0].strip(), ':'.join(l.split(':')[1:]).strip()) for l in classes]
# we only care about header files # we only care about header files
# ignore the forward header itself, that's the one we're generating # ignore the forward header itself, that's the one we're generating
# also ignore any header in the aux_ directory, those are private # also ignore any header in the aux_ directory, those are private
classes = [l for l in classes if l[0].endswith('.hpp') and not l[0].endswith('/fwd.hpp') and '/aux_/' not in l[0]] classes = [l for l in classes if l[0].endswith('.hpp') and not l[0].endswith('/fwd.hpp') and '/aux_/' not in l[0]]
namespaces = ['TORRENT_VERSION_NAMESPACE_2', 'TORRENT_IPV6_NAMESPACE', namespaces = ['TORRENT_VERSION_NAMESPACE_2', 'TORRENT_IPV6_NAMESPACE',
'TORRENT_VERSION_NAMESPACE_2_END', 'TORRENT_IPV6_NAMESPACE_END'] 'TORRENT_VERSION_NAMESPACE_2_END', 'TORRENT_IPV6_NAMESPACE_END']
# only include classes with the right kind of export # only include classes with the right kind of export
classes = [l for l in classes if l[1] in namespaces or (l[1].split(' ')[0] in ['class', 'struct'] and l[1].split(' ')[1] == keyword)] classes = [
l for l in classes if l[1] in namespaces or (
l[1].split(' ')[0] in [
'class',
'struct'] and l[1].split(' ')[1] == keyword)]
# collapse empty namespaces # collapse empty namespaces
classes2 = [] classes2 = []
skip = 0 skip = 0
for i in xrange(len(classes)): for i in range(len(classes)):
if skip > 0: if skip > 0:
skip -= 1 skip -= 1
continue continue
if classes[i][1] in namespaces \ if classes[i][1] in namespaces \
and len(classes) > i+1 \ and len(classes) > i + 1 \
and classes[i+1][1] == ('%s_END' % classes[i][1]): and classes[i + 1][1] == ('%s_END' % classes[i][1]):
skip = 1 skip = 1
else: else:
classes2.append(classes[i]) classes2.append(classes[i])
classes = classes2 classes = classes2
idx = -1 idx = -1
for line in classes: for line in classes:
idx += 1 idx += 1
this_file = line[0] this_file = line[0]
decl = line[1].split(' ') decl = line[1].split(' ')
content = '' content = ''
if this_file != current_file: if this_file != current_file:
out.write('\n// ' + this_file + '\n') out.write('\n// ' + this_file + '\n')
current_file = this_file; current_file = this_file
if len(decl) > 2 and decl[0] in ['struct', 'class']: if len(decl) > 2 and decl[0] in ['struct', 'class']:
decl = decl[0] + ' ' + decl[2] decl = decl[0] + ' ' + decl[2]
if not decl.endswith(';'): decl += ';' if not decl.endswith(';'):
content = decl + '\n' decl += ';'
else: content = decl + '\n'
content = line[1] + '\n' else:
content = line[1] + '\n'
if 'kademlia' in this_file:
out.write('namespace dht {\n')
out.write(content)
out.write('}\n')
else:
out.write(content)
if 'kademlia' in this_file:
out.write('namespace dht {\n')
out.write(content)
out.write('}\n')
else:
out.write(content)
os.remove('include/libtorrent/fwd.hpp') os.remove('include/libtorrent/fwd.hpp')
with open('include/libtorrent/fwd.hpp', 'w+') as f: with open('include/libtorrent/fwd.hpp', 'w+') as f:
f.write(file_header) f.write(file_header)
print_classes(f, classes, 'TORRENT_EXPORT'); print_classes(f, classes, 'TORRENT_EXPORT')
f.write('\n#if TORRENT_ABI_VERSION == 1\n') f.write('\n#if TORRENT_ABI_VERSION == 1\n')
print_classes(f, classes, 'TORRENT_DEPRECATED_EXPORT'); print_classes(f, classes, 'TORRENT_DEPRECATED_EXPORT')
f.write('\n#endif // TORRENT_ABI_VERSION')
f.write(file_footer)
f.write('\n#endif // TORRENT_ABI_VERSION')
f.write(file_footer)

View File

@ -35,53 +35,57 @@ import locale
# Python 2.x/3.x compatibility # Python 2.x/3.x compatibility
if sys.version_info[0] >= 3: if sys.version_info[0] >= 3:
PYTHON_3 = True PYTHON_3 = True
def compat_iteritems(x): return x.items() # No iteritems() in Python 3 def compat_iteritems(x): return x.items() # No iteritems() in Python 3
def compat_itervalues(x): return x.values() # No itervalues() in Python 3 def compat_itervalues(x): return x.values() # No itervalues() in Python 3
def compat_keys(x): return list(x.keys()) # keys() is a generator in Python 3 def compat_keys(x): return list(x.keys()) # keys() is a generator in Python 3
basestring = str # No class basestring in Python 3 basestring = str # No class basestring in Python 3
unichr = chr # No unichr in Python 3 unichr = chr # No unichr in Python 3
xrange = range # No xrange in Python 3 xrange = range # No xrange in Python 3
else: else:
PYTHON_3 = False PYTHON_3 = False
def compat_iteritems(x): return x.iteritems() def compat_iteritems(x): return x.iteritems()
def compat_itervalues(x): return x.itervalues() def compat_itervalues(x): return x.itervalues()
def compat_keys(x): return x.keys() def compat_keys(x): return x.keys()
try:
# Debugging helper module
import debug
except ImportError:
pass
MULTIPLICATION_SIGN = unichr(0xd7) MULTIPLICATION_SIGN = unichr(0xd7)
def times(x): def times(x):
return "%u%s" % (x, MULTIPLICATION_SIGN) return "%u%s" % (x, MULTIPLICATION_SIGN)
def percentage(p): def percentage(p):
return "%.02f%%" % (p*100.0,) return "%.02f%%" % (p * 100.0,)
def add(a, b): def add(a, b):
return a + b return a + b
def equal(a, b): def equal(a, b):
if a == b: if a == b:
return a return a
else: else:
return None return None
def fail(a, b): def fail(a, b):
assert False assert False
tol = 2 ** -23 tol = 2 ** -23
def ratio(numerator, denominator): def ratio(numerator, denominator):
try: try:
ratio = float(numerator)/float(denominator) ratio = float(numerator) / float(denominator)
except ZeroDivisionError: except ZeroDivisionError:
# 0/0 is undefined, but 1.0 yields more useful results # 0/0 is undefined, but 1.0 yields more useful results
return 1.0 return 1.0
@ -110,7 +114,7 @@ class UndefinedEvent(Exception):
class Event(object): class Event(object):
"""Describe a kind of event, and its basic operations.""" """Describe a kind of event, and its basic operations."""
def __init__(self, name, null, aggregator, formatter = str): def __init__(self, name, null, aggregator, formatter=str):
self.name = name self.name = name
self._null = null self._null = null
self._aggregator = aggregator self._aggregator = aggregator
@ -224,7 +228,7 @@ class Function(Object):
self.calls[call.callee_id] = call self.calls[call.callee_id] = call
def get_call(self, callee_id): def get_call(self, callee_id):
if not callee_id in self.calls: if callee_id not in self.calls:
call = Call(callee_id) call = Call(callee_id)
call[SAMPLES] = 0 call[SAMPLES] = 0
call[SAMPLES2] = 0 call[SAMPLES2] = 0
@ -306,7 +310,9 @@ class Profile(Object):
for callee_id in compat_keys(function.calls): for callee_id in compat_keys(function.calls):
assert function.calls[callee_id].callee_id == callee_id assert function.calls[callee_id].callee_id == callee_id
if callee_id not in self.functions: if callee_id not in self.functions:
sys.stderr.write('warning: call to undefined function %s from function %s\n' % (str(callee_id), function.name)) sys.stderr.write(
'warning: call to undefined function %s from function %s\n' %
(str(callee_id), function.name))
del function.calls[callee_id] del function.calls[callee_id]
def find_cycles(self): def find_cycles(self):
@ -368,7 +374,6 @@ class Profile(Object):
pathFunctions[n] = f pathFunctions[n] = f
self.functions = pathFunctions self.functions = pathFunctions
def getFunctionId(self, funcName): def getFunctionId(self, funcName):
for f in self.functions: for f in self.functions:
if self.functions[f].name == funcName: if self.functions[f].name == funcName:
@ -496,7 +501,7 @@ class Profile(Object):
assert outevent not in call assert outevent not in call
assert call.ratio is not None assert call.ratio is not None
callee = self.functions[call.callee_id] callee = self.functions[call.callee_id]
subtotal = call.ratio *self._integrate_function(callee, outevent, inevent) subtotal = call.ratio * self._integrate_function(callee, outevent, inevent)
call[outevent] = subtotal call[outevent] = subtotal
return subtotal return subtotal
@ -535,9 +540,10 @@ class Profile(Object):
partials = {} partials = {}
self._rank_cycle_function(cycle, callee, 0, ranks) self._rank_cycle_function(cycle, callee, 0, ranks)
self._call_ratios_cycle(cycle, callee, ranks, call_ratios, set()) self._call_ratios_cycle(cycle, callee, ranks, call_ratios, set())
partial = self._integrate_cycle_function(cycle, callee, call_ratio, partials, ranks, call_ratios, outevent, inevent) partial = self._integrate_cycle_function(
cycle, callee, call_ratio, partials, ranks, call_ratios, outevent, inevent)
assert partial == max(partials.values()) assert partial == max(partials.values())
assert not total or abs(1.0 - partial/(call_ratio*total)) <= 0.001 assert not total or abs(1.0 - partial / (call_ratio * total)) <= 0.001
return cycle[outevent] return cycle[outevent]
@ -561,20 +567,30 @@ class Profile(Object):
call_ratios[callee] = call_ratios.get(callee, 0.0) + call.ratio call_ratios[callee] = call_ratios.get(callee, 0.0) + call.ratio
self._call_ratios_cycle(cycle, callee, ranks, call_ratios, visited) self._call_ratios_cycle(cycle, callee, ranks, call_ratios, visited)
def _integrate_cycle_function(self, cycle, function, partial_ratio, partials, ranks, call_ratios, outevent, inevent): def _integrate_cycle_function(
self,
cycle,
function,
partial_ratio,
partials,
ranks,
call_ratios,
outevent,
inevent):
if function not in partials: if function not in partials:
partial = partial_ratio*function[inevent] partial = partial_ratio * function[inevent]
for call in compat_itervalues(function.calls): for call in compat_itervalues(function.calls):
if call.callee_id != function.id: if call.callee_id != function.id:
callee = self.functions[call.callee_id] callee = self.functions[call.callee_id]
if callee.cycle is not cycle: if callee.cycle is not cycle:
assert outevent in call assert outevent in call
partial += partial_ratio*call[outevent] partial += partial_ratio * call[outevent]
else: else:
if ranks[callee] > ranks[function]: if ranks[callee] > ranks[function]:
callee_partial = self._integrate_cycle_function(cycle, callee, partial_ratio, partials, ranks, call_ratios, outevent, inevent) callee_partial = self._integrate_cycle_function(
cycle, callee, partial_ratio, partials, ranks, call_ratios, outevent, inevent)
call_ratio = ratio(call.ratio, call_ratios[callee]) call_ratio = ratio(call.ratio, call_ratios[callee])
call_partial = call_ratio*callee_partial call_partial = call_ratio * callee_partial
try: try:
call[outevent] += call_partial call[outevent] += call_partial
except UndefinedEvent: except UndefinedEvent:
@ -670,7 +686,7 @@ class Profile(Object):
class Struct: class Struct:
"""Masquerade a dictionary with a structure-like behavior.""" """Masquerade a dictionary with a structure-like behavior."""
def __init__(self, attrs = None): def __init__(self, attrs=None):
if attrs is None: if attrs is None:
attrs = {} attrs = {}
self.__dict__['_attrs'] = attrs self.__dict__['_attrs'] = attrs
@ -761,7 +777,7 @@ XML_ELEMENT_START, XML_ELEMENT_END, XML_CHARACTER_DATA, XML_EOF = range(4)
class XmlToken: class XmlToken:
def __init__(self, type, name_or_data, attrs = None, line = None, column = None): def __init__(self, type, name_or_data, attrs=None, line=None, column=None):
assert type in (XML_ELEMENT_START, XML_ELEMENT_END, XML_CHARACTER_DATA, XML_EOF) assert type in (XML_ELEMENT_START, XML_ELEMENT_END, XML_CHARACTER_DATA, XML_EOF)
self.type = type self.type = type
self.name_or_data = name_or_data self.name_or_data = name_or_data
@ -784,7 +800,7 @@ class XmlToken:
class XmlTokenizer: class XmlTokenizer:
"""Expat based XML tokenizer.""" """Expat based XML tokenizer."""
def __init__(self, fp, skip_ws = True): def __init__(self, fp, skip_ws=True):
self.fp = fp self.fp = fp
self.tokens = [] self.tokens = []
self.index = 0 self.index = 0
@ -795,8 +811,8 @@ class XmlTokenizer:
self.character_data = '' self.character_data = ''
self.parser = xml.parsers.expat.ParserCreate() self.parser = xml.parsers.expat.ParserCreate()
self.parser.StartElementHandler = self.handle_element_start self.parser.StartElementHandler = self.handle_element_start
self.parser.EndElementHandler = self.handle_element_end self.parser.EndElementHandler = self.handle_element_end
self.parser.CharacterDataHandler = self.handle_character_data self.parser.CharacterDataHandler = self.handle_character_data
def handle_element_start(self, name, attributes): def handle_element_start(self, name, attributes):
@ -825,7 +841,7 @@ class XmlTokenizer:
self.character_data = '' self.character_data = ''
def next(self): def next(self):
size = 16*1024 size = 16 * 1024
while self.index >= len(self.tokens) and not self.final: while self.index >= len(self.tokens) and not self.final:
self.tokens = [] self.tokens = []
self.index = 0 self.index = 0
@ -834,7 +850,7 @@ class XmlTokenizer:
try: try:
self.parser.Parse(data, self.final) self.parser.Parse(data, self.final)
except xml.parsers.expat.ExpatError as e: except xml.parsers.expat.ExpatError as e:
#if e.code == xml.parsers.expat.errors.XML_ERROR_NO_ELEMENTS: # if e.code == xml.parsers.expat.errors.XML_ERROR_NO_ELEMENTS:
if e.code == 3: if e.code == 3:
pass pass
else: else:
@ -858,7 +874,8 @@ class XmlTokenMismatch(Exception):
self.found = found self.found = found
def __str__(self): def __str__(self):
return '%u:%u: %s expected, %s found' % (self.found.line, self.found.column, str(self.expected), str(self.found)) return '%u:%u: %s expected, %s found' % (
self.found.line, self.found.column, str(self.expected), str(self.found))
class XmlParser(Parser): class XmlParser(Parser):
@ -898,7 +915,7 @@ class XmlParser(Parser):
raise XmlTokenMismatch(XmlToken(XML_ELEMENT_END, name), self.token) raise XmlTokenMismatch(XmlToken(XML_ELEMENT_END, name), self.token)
self.consume() self.consume()
def character_data(self, strip = True): def character_data(self, strip=True):
data = '' data = ''
while self.token.type == XML_CHARACTER_DATA: while self.token.type == XML_CHARACTER_DATA:
data += self.token.name_or_data data += self.token.name_or_data
@ -1095,7 +1112,7 @@ class GprofParser(Parser):
# process call graph entries # process call graph entries
entry_lines = [] entry_lines = []
while line != '\014': # form feed while line != '\014': # form feed
if line and not line.isspace(): if line and not line.isspace():
if self._cg_sep_re.match(line): if self._cg_sep_re.match(line):
self.parse_cg_entry(entry_lines) self.parse_cg_entry(entry_lines)
@ -1227,7 +1244,7 @@ class AXEParser(Parser):
'^-----+ ' '^-----+ '
) )
_cg_footer_re = re.compile('^Index\s+Function\s*$') _cg_footer_re = re.compile(r'^Index\s+Function\s*$')
_cg_primary_re = re.compile( _cg_primary_re = re.compile(
r'^\[(?P<index>\d+)\]?' + r'^\[(?P<index>\d+)\]?' +
@ -1378,7 +1395,8 @@ class AXEParser(Parser):
line = self.readline() line = self.readline()
def parse(self): def parse(self):
sys.stderr.write('warning: for axe format, edge weights are unreliable estimates derived from\nfunction total times.\n') sys.stderr.write(
'warning: for axe format, edge weights are unreliable estimates derived from\nfunction total times.\n')
self.parse_cg() self.parse_cg()
self.fp.close() self.fp.close()
@ -1440,7 +1458,7 @@ class AXEParser(Parser):
for call in compat_itervalues(function.calls): for call in compat_itervalues(function.calls):
if call.ratio is not None: if call.ratio is not None:
callee = profile.functions[call.callee_id] callee = profile.functions[call.callee_id]
call[TOTAL_TIME_RATIO] = call.ratio * callee[TOTAL_TIME_RATIO]; call[TOTAL_TIME_RATIO] = call.ratio * callee[TOTAL_TIME_RATIO]
return profile return profile
@ -1452,7 +1470,7 @@ class CallgrindParser(LineParser):
- http://valgrind.org/docs/manual/cl-format.html - http://valgrind.org/docs/manual/cl-format.html
""" """
_call_re = re.compile('^calls=\s*(\d+)\s+((\d+|\+\d+|-\d+|\*)\s+)+$') _call_re = re.compile(r'^calls=\s*(\d+)\s+((\d+|\+\d+|-\d+|\*)\s+)+$')
def __init__(self, infile): def __init__(self, infile):
LineParser.__init__(self, infile) LineParser.__init__(self, infile)
@ -1541,7 +1559,7 @@ class CallgrindParser(LineParser):
if key == 'positions': if key == 'positions':
self.num_positions = len(items) self.num_positions = len(items)
self.cost_positions = items self.cost_positions = items
self.last_positions = [0]*self.num_positions self.last_positions = [0] * self.num_positions
return True return True
def parse_cost_summary(self): def parse_cost_summary(self):
@ -1560,9 +1578,9 @@ class CallgrindParser(LineParser):
__subpos_re = r'(0x[0-9a-fA-F]+|\d+|\+\d+|-\d+|\*)' __subpos_re = r'(0x[0-9a-fA-F]+|\d+|\+\d+|-\d+|\*)'
_cost_re = re.compile(r'^' + _cost_re = re.compile(r'^' +
__subpos_re + r'( +' + __subpos_re + r')*' + __subpos_re + r'( +' + __subpos_re + r')*' +
r'( +\d+)*' + r'( +\d+)*' +
'$') '$')
def parse_cost_line(self, calls=None): def parse_cost_line(self, calls=None):
line = self.lookahead().rstrip() line = self.lookahead().rstrip()
@ -1584,9 +1602,9 @@ class CallgrindParser(LineParser):
values = line.split() values = line.split()
assert len(values) <= self.num_positions + self.num_events assert len(values) <= self.num_positions + self.num_events
positions = values[0 : self.num_positions] positions = values[0: self.num_positions]
events = values[self.num_positions : ] events = values[self.num_positions:]
events += ['0']*(self.num_events - len(events)) events += ['0'] * (self.num_events - len(events))
for i in range(self.num_positions): for i in range(self.num_positions):
position = positions[i] position = positions[i]
@ -1631,14 +1649,13 @@ class CallgrindParser(LineParser):
_, values = line.split('=', 1) _, values = line.split('=', 1)
values = values.strip().split() values = values.strip().split()
calls = int(values[0]) calls = int(values[0])
call_position = values[1:]
self.consume() self.consume()
self.parse_cost_line(calls) self.parse_cost_line(calls)
return True return True
_position_re = re.compile('^(?P<position>[cj]?(?:ob|fl|fi|fe|fn))=\s*(?:\((?P<id>\d+)\))?(?:\s*(?P<name>.+))?') _position_re = re.compile(r'^(?P<position>[cj]?(?:ob|fl|fi|fe|fn))=\s*(?:\((?P<id>\d+)\))?(?:\s*(?P<name>.+))?')
_position_table_map = { _position_table_map = {
'ob': 'ob', 'ob': 'ob',
@ -1730,7 +1747,7 @@ class CallgrindParser(LineParser):
def make_function(self, module, filename, name): def make_function(self, module, filename, name):
# FIXME: module and filename are not being tracked reliably # FIXME: module and filename are not being tracked reliably
#id = '|'.join((module, filename, name)) # id = '|'.join((module, filename, name))
id = name id = name
try: try:
function = self.profile.functions[id] function = self.profile.functions[id]
@ -1802,7 +1819,7 @@ class PerfParser(LineParser):
for call in compat_itervalues(function.calls): for call in compat_itervalues(function.calls):
if call.ratio is not None: if call.ratio is not None:
callee = profile.functions[call.callee_id] callee = profile.functions[call.callee_id]
call[TOTAL_TIME_RATIO] = call.ratio * callee[TOTAL_TIME_RATIO]; call[TOTAL_TIME_RATIO] = call.ratio * callee[TOTAL_TIME_RATIO]
else: else:
assert False assert False
@ -1931,8 +1948,6 @@ class OprofileParser(LineParser):
profile = Profile() profile = Profile()
reverse_call_samples = {}
# populate the profile # populate the profile
profile[SAMPLES] = 0 profile[SAMPLES] = 0
for _callers, _function, _callees in compat_itervalues(self.entries): for _callers, _function, _callees in compat_itervalues(self.entries):
@ -2016,7 +2031,7 @@ class OprofileParser(LineParser):
if entry.symbol.startswith('"') and entry.symbol.endswith('"'): if entry.symbol.startswith('"') and entry.symbol.endswith('"'):
entry.symbol = entry.symbol[1:-1] entry.symbol = entry.symbol[1:-1]
entry.id = ':'.join((entry.application, entry.image, source, entry.symbol)) entry.id = ':'.join((entry.application, entry.image, source, entry.symbol))
entry.self = fields.get('self', None) != None entry.self = fields.get('self', None) is not None
if entry.self: if entry.self:
entry.id += ':self' entry.id += ':self'
if entry.symbol: if entry.symbol:
@ -2036,7 +2051,7 @@ class OprofileParser(LineParser):
def match_separator(self): def match_separator(self):
line = self.lookahead() line = self.lookahead()
return line == '-'*len(line) return line == '-' * len(line)
def match_primary(self): def match_primary(self):
line = self.lookahead() line = self.lookahead()
@ -2066,8 +2081,10 @@ class HProfParser(LineParser):
# read lookahead # read lookahead
self.readline() self.readline()
while not self.lookahead().startswith('------'): self.consume() while not self.lookahead().startswith('------'):
while not self.lookahead().startswith('TRACE '): self.consume() self.consume()
while not self.lookahead().startswith('TRACE '):
self.consume()
self.parse_traces() self.parse_traces()
@ -2084,12 +2101,13 @@ class HProfParser(LineParser):
# build up callgraph # build up callgraph
for id, trace in compat_iteritems(self.traces): for id, trace in compat_iteritems(self.traces):
if not id in self.samples: continue if id not in self.samples:
continue
mtime = self.samples[id][0] mtime = self.samples[id][0]
last = None last = None
for func, file, line in trace: for func, file, line in trace:
if not func in functions: if func not in functions:
function = Function(func, func) function = Function(func, func)
function[SAMPLES] = 0 function[SAMPLES] = 0
profile.add_function(function) profile.add_function(function)
@ -2120,17 +2138,16 @@ class HProfParser(LineParser):
self.parse_trace() self.parse_trace()
def parse_trace(self): def parse_trace(self):
l = self.consume() consume = self.consume()
mo = self.trace_id_re.match(l) mo = self.trace_id_re.match(consume)
tid = mo.group(1) tid = mo.group(1)
last = None
trace = [] trace = []
while self.lookahead().startswith('\t'): while self.lookahead().startswith('\t'):
l = self.consume() consume = self.consume()
match = self.trace_re.search(l) match = self.trace_re.search(consume)
if not match: if not match:
#sys.stderr.write('Invalid line: %s\n' % l) # sys.stderr.write('Invalid line: %s\n' % consume)
break break
else: else:
function_name, file, line = match.groups() function_name, file, line = match.groups()
@ -2280,13 +2297,13 @@ class XPerfParser(Parser):
import csv import csv
reader = csv.reader( reader = csv.reader(
self.stream, self.stream,
delimiter = ',', delimiter=',',
quotechar = None, quotechar=None,
escapechar = None, escapechar=None,
doublequote = False, doublequote=False,
skipinitialspace = True, skipinitialspace=True,
lineterminator = '\r\n', lineterminator='\r\n',
quoting = csv.QUOTE_NONE) quoting=csv.QUOTE_NONE)
header = True header = True
for row in reader: for row in reader:
if header: if header:
@ -2548,7 +2565,6 @@ class AQtimeParser(XmlParser):
return table return table
def parse_data(self): def parse_data(self):
rows = []
attrs = self.element_start('DATA') attrs = self.element_start('DATA')
table_id = int(attrs['TABLE_ID']) table_id = int(attrs['TABLE_ID'])
table_name, field_types, field_names = self.tables[table_id] table_name, field_types, field_names = self.tables[table_id]
@ -2560,7 +2576,7 @@ class AQtimeParser(XmlParser):
return table return table
def parse_row(self, field_types): def parse_row(self, field_types):
row = [None]*len(field_types) row = [None] * len(field_types)
children = [] children = []
self.element_start('ROW') self.element_start('ROW')
while self.token.type == XML_ELEMENT_START: while self.token.type == XML_ELEMENT_START:
@ -2622,16 +2638,16 @@ class AQtimeParser(XmlParser):
function = Function(self.build_id(fields), self.build_name(fields)) function = Function(self.build_id(fields), self.build_name(fields))
function[TIME] = fields['Time'] function[TIME] = fields['Time']
function[TOTAL_TIME] = fields['Time with Children'] function[TOTAL_TIME] = fields['Time with Children']
#function[TIME_RATIO] = fields['% Time']/100.0 # function[TIME_RATIO] = fields['% Time']/100.0
#function[TOTAL_TIME_RATIO] = fields['% with Children']/100.0 # function[TOTAL_TIME_RATIO] = fields['% with Children']/100.0
return function return function
def build_call(self, fields): def build_call(self, fields):
call = Call(self.build_id(fields)) call = Call(self.build_id(fields))
call[TIME] = fields['Time'] call[TIME] = fields['Time']
call[TOTAL_TIME] = fields['Time with Children'] call[TOTAL_TIME] = fields['Time with Children']
#call[TIME_RATIO] = fields['% Time']/100.0 # call[TIME_RATIO] = fields['% Time']/100.0
#call[TOTAL_TIME_RATIO] = fields['% with Children']/100.0 # call[TOTAL_TIME_RATIO] = fields['% with Children']/100.0
return call return call
def build_id(self, fields): def build_id(self, fields):
@ -2694,7 +2710,7 @@ class PstatsParser:
call = Call(callee.id) call = Call(callee.id)
if isinstance(value, tuple): if isinstance(value, tuple):
for i in xrange(0, len(value), 4): for i in xrange(0, len(value), 4):
nc, cc, tt, ct = value[i:i+4] nc, cc, tt, ct = value[i:i + 4]
if CALLS in call: if CALLS in call:
call[CALLS] += cc call[CALLS] += cc
else: else:
@ -2707,11 +2723,11 @@ class PstatsParser:
else: else:
call[CALLS] = value call[CALLS] = value
call[TOTAL_TIME] = ratio(value, nc)*ct call[TOTAL_TIME] = ratio(value, nc) * ct
caller.add_call(call) caller.add_call(call)
#self.stats.print_stats() # self.stats.print_stats()
#self.stats.print_callees() # self.stats.print_callees()
# Compute derived events # Compute derived events
self.profile.validate() self.profile.validate()
@ -2724,18 +2740,18 @@ class PstatsParser:
class Theme: class Theme:
def __init__(self, def __init__(self,
bgcolor = (0.0, 0.0, 1.0), bgcolor=(0.0, 0.0, 1.0),
mincolor = (0.0, 0.0, 0.0), mincolor=(0.0, 0.0, 0.0),
maxcolor = (0.0, 0.0, 1.0), maxcolor=(0.0, 0.0, 1.0),
fontname = "Arial", fontname="Arial",
fontcolor = "white", fontcolor="white",
nodestyle = "filled", nodestyle="filled",
minfontsize = 10.0, minfontsize=10.0,
maxfontsize = 10.0, maxfontsize=10.0,
minpenwidth = 0.5, minpenwidth=0.5,
maxpenwidth = 4.0, maxpenwidth=4.0,
gamma = 2.2, gamma=2.2,
skew = 1.0): skew=1.0):
self.bgcolor = bgcolor self.bgcolor = bgcolor
self.mincolor = mincolor self.mincolor = mincolor
self.maxcolor = maxcolor self.maxcolor = maxcolor
@ -2783,7 +2799,7 @@ class Theme:
return self.fontsize(weight) return self.fontsize(weight)
def edge_penwidth(self, weight): def edge_penwidth(self, weight):
return max(weight*self.maxpenwidth, self.minpenwidth) return max(weight * self.maxpenwidth, self.minpenwidth)
def edge_arrowsize(self, weight): def edge_arrowsize(self, weight):
return 0.5 * math.sqrt(self.edge_penwidth(weight)) return 0.5 * math.sqrt(self.edge_penwidth(weight))
@ -2800,14 +2816,14 @@ class Theme:
if self.skew < 0: if self.skew < 0:
raise ValueError("Skew must be greater than 0") raise ValueError("Skew must be greater than 0")
elif self.skew == 1.0: elif self.skew == 1.0:
h = hmin + weight*(hmax - hmin) h = hmin + weight * (hmax - hmin)
s = smin + weight*(smax - smin) s = smin + weight * (smax - smin)
l = lmin + weight*(lmax - lmin) l = lmin + weight * (lmax - lmin) # noqa
else: else:
base = self.skew base = self.skew
h = hmin + ((hmax-hmin)*(-1.0 + (base ** weight)) / (base - 1.0)) h = hmin + ((hmax - hmin) * (-1.0 + (base ** weight)) / (base - 1.0))
s = smin + ((smax-smin)*(-1.0 + (base ** weight)) / (base - 1.0)) s = smin + ((smax - smin) * (-1.0 + (base ** weight)) / (base - 1.0))
l = lmin + ((lmax-lmin)*(-1.0 + (base ** weight)) / (base - 1.0)) l = lmin + ((lmax - lmin) * (-1.0 + (base ** weight)) / (base - 1.0)) # noqa
return self.hsl_to_rgb(h, s, l) return self.hsl_to_rgb(h, s, l)
@ -2820,16 +2836,16 @@ class Theme:
h = h % 1.0 h = h % 1.0
s = min(max(s, 0.0), 1.0) s = min(max(s, 0.0), 1.0)
l = min(max(l, 0.0), 1.0) l = min(max(l, 0.0), 1.0) # noqa
if l <= 0.5: if l <= 0.5: # noqa
m2 = l*(s + 1.0) m2 = l * (s + 1.0)
else: else:
m2 = l + s - l*s m2 = l + s - l * s
m1 = l*2.0 - m2 m1 = l * 2.0 - m2
r = self._hue_to_rgb(m1, m2, h + 1.0/3.0) r = self._hue_to_rgb(m1, m2, h + 1.0 / 3.0)
g = self._hue_to_rgb(m1, m2, h) g = self._hue_to_rgb(m1, m2, h)
b = self._hue_to_rgb(m1, m2, h - 1.0/3.0) b = self._hue_to_rgb(m1, m2, h - 1.0 / 3.0)
# Apply gamma correction # Apply gamma correction
r **= self.gamma r **= self.gamma
@ -2843,50 +2859,50 @@ class Theme:
h += 1.0 h += 1.0
elif h > 1.0: elif h > 1.0:
h -= 1.0 h -= 1.0
if h*6 < 1.0: if h * 6 < 1.0:
return m1 + (m2 - m1)*h*6.0 return m1 + (m2 - m1) * h * 6.0
elif h*2 < 1.0: elif h * 2 < 1.0:
return m2 return m2
elif h*3 < 2.0: elif h * 3 < 2.0:
return m1 + (m2 - m1)*(2.0/3.0 - h)*6.0 return m1 + (m2 - m1) * (2.0 / 3.0 - h) * 6.0
else: else:
return m1 return m1
TEMPERATURE_COLORMAP = Theme( TEMPERATURE_COLORMAP = Theme(
mincolor = (2.0/3.0, 0.80, 0.25), # dark blue mincolor=(2.0 / 3.0, 0.80, 0.25), # dark blue
maxcolor = (0.0, 1.0, 0.5), # satured red maxcolor=(0.0, 1.0, 0.5), # satured red
gamma = 1.0 gamma=1.0
) )
PINK_COLORMAP = Theme( PINK_COLORMAP = Theme(
mincolor = (0.0, 1.0, 0.90), # pink mincolor=(0.0, 1.0, 0.90), # pink
maxcolor = (0.0, 1.0, 0.5), # satured red maxcolor=(0.0, 1.0, 0.5), # satured red
) )
GRAY_COLORMAP = Theme( GRAY_COLORMAP = Theme(
mincolor = (0.0, 0.0, 0.85), # light gray mincolor=(0.0, 0.0, 0.85), # light gray
maxcolor = (0.0, 0.0, 0.0), # black maxcolor=(0.0, 0.0, 0.0), # black
) )
BW_COLORMAP = Theme( BW_COLORMAP = Theme(
minfontsize = 8.0, minfontsize=8.0,
maxfontsize = 24.0, maxfontsize=24.0,
mincolor = (0.0, 0.0, 0.0), # black mincolor=(0.0, 0.0, 0.0), # black
maxcolor = (0.0, 0.0, 0.0), # black maxcolor=(0.0, 0.0, 0.0), # black
minpenwidth = 0.1, minpenwidth=0.1,
maxpenwidth = 8.0, maxpenwidth=8.0,
) )
PRINT_COLORMAP = Theme( PRINT_COLORMAP = Theme(
minfontsize = 18.0, minfontsize=18.0,
maxfontsize = 30.0, maxfontsize=30.0,
fontcolor = "black", fontcolor="black",
nodestyle = "solid", nodestyle="solid",
mincolor = (0.0, 0.0, 0.0), # black mincolor=(0.0, 0.0, 0.0), # black
maxcolor = (0.0, 0.0, 0.0), # black maxcolor=(0.0, 0.0, 0.0), # black
minpenwidth = 0.1, minpenwidth=0.1,
maxpenwidth = 8.0, maxpenwidth=8.0,
) )
@ -2908,16 +2924,16 @@ class DotWriter:
"""Split the function name on multiple lines.""" """Split the function name on multiple lines."""
if len(name) > 32: if len(name) > 32:
ratio = 2.0/3.0 ratio = 2.0 / 3.0
height = max(int(len(name)/(1.0 - ratio) + 0.5), 1) height = max(int(len(name) / (1.0 - ratio) + 0.5), 1)
width = max(len(name)/height, 32) width = max(len(name) / height, 32)
# TODO: break lines in symbols # TODO: break lines in symbols
name = textwrap.fill(name, width, break_long_words=False) name = textwrap.fill(name, width, break_long_words=False)
# Take away spaces # Take away spaces
name = name.replace(", ", ",") name = name.replace(", ", ",")
name = name.replace("> >", ">>") name = name.replace("> >", ">>")
name = name.replace("> >", ">>") # catch consecutive name = name.replace("> >", ">>") # catch consecutive
return name return name
@ -2964,11 +2980,11 @@ class DotWriter:
label = '\n'.join(labels) label = '\n'.join(labels)
self.node(function.id, self.node(function.id,
label = label, label=label,
color = self.color(theme.node_bgcolor(weight)), color=self.color(theme.node_bgcolor(weight)),
fontcolor = self.color(theme.node_fgcolor(weight)), fontcolor=self.color(theme.node_fgcolor(weight)),
fontsize = "%.2f" % theme.node_fontsize(weight), fontsize="%.2f" % theme.node_fontsize(weight),
) )
for call in compat_itervalues(function.calls): for call in compat_itervalues(function.calls):
callee = profile.functions[call.callee_id] callee = profile.functions[call.callee_id]
@ -2989,14 +3005,14 @@ class DotWriter:
label = '\n'.join(labels) label = '\n'.join(labels)
self.edge(function.id, call.callee_id, self.edge(function.id, call.callee_id,
label = label, label=label,
color = self.color(theme.edge_color(weight)), color=self.color(theme.edge_color(weight)),
fontcolor = self.color(theme.edge_color(weight)), fontcolor=self.color(theme.edge_color(weight)),
fontsize = "%.2f" % theme.edge_fontsize(weight), fontsize="%.2f" % theme.edge_fontsize(weight),
penwidth = "%.2f" % theme.edge_penwidth(weight), penwidth="%.2f" % theme.edge_penwidth(weight),
labeldistance = "%.2f" % theme.edge_penwidth(weight), labeldistance="%.2f" % theme.edge_penwidth(weight),
arrowsize = "%.2f" % theme.edge_arrowsize(weight), arrowsize="%.2f" % theme.edge_arrowsize(weight),
) )
self.end_graph() self.end_graph()
@ -3061,7 +3077,7 @@ class DotWriter:
return 0 return 0
if f >= 1.0: if f >= 1.0:
return 255 return 255
return int(255.0*f + 0.5) return int(255.0 * f + 0.5)
return "#" + "".join(["%02x" % float2int(c) for c in (r, g, b)]) return "#" + "".join(["%02x" % float2int(c) for c in (r, g, b)])
@ -3082,11 +3098,11 @@ class Main:
"""Main program.""" """Main program."""
themes = { themes = {
"color": TEMPERATURE_COLORMAP, "color": TEMPERATURE_COLORMAP,
"pink": PINK_COLORMAP, "pink": PINK_COLORMAP,
"gray": GRAY_COLORMAP, "gray": GRAY_COLORMAP,
"bw": BW_COLORMAP, "bw": BW_COLORMAP,
"print": PRINT_COLORMAP, "print": PRINT_COLORMAP,
} }
formats = { formats = {
@ -3115,8 +3131,7 @@ class Main:
global totalMethod global totalMethod
formatNames = list(self.formats.keys()) formatNames = sorted(self.formats.keys())
formatNames.sort()
optparser = optparse.OptionParser( optparser = optparse.OptionParser(
usage="\n\t%prog [options] [file] ...") usage="\n\t%prog [options] [file] ...")
@ -3139,19 +3154,27 @@ class Main:
help="profile format: %s [default: %%default]" % self.naturalJoin(formatNames)) help="profile format: %s [default: %%default]" % self.naturalJoin(formatNames))
optparser.add_option( optparser.add_option(
'--total', '--total',
type="choice", choices=('callratios', 'callstacks'), type="choice",
dest="totalMethod", default=totalMethod, choices=(
help="preferred method of calculating total time: callratios or callstacks (currently affects only perf format) [default: %default]") 'callratios',
'callstacks'),
dest="totalMethod",
default=totalMethod,
help=("preferred method of calculating total time: callratios or callstacks"
" (currently affects only perf format) [default: %default]"))
optparser.add_option( optparser.add_option(
'-c', '--colormap', '-c', '--colormap',
type="choice", choices=('color', 'pink', 'gray', 'bw', 'print'), type="choice", choices=('color', 'pink', 'gray', 'bw', 'print'),
dest="theme", default="color", dest="theme", default="color",
help="color map: color, pink, gray, bw, or print [default: %default]") help="color map: color, pink, gray, bw, or print [default: %default]")
optparser.add_option( optparser.add_option(
'-s', '--strip', '-s',
'--strip',
action="store_true", action="store_true",
dest="strip", default=False, dest="strip",
help="strip function parameters, template parameters, and const modifiers from demangled C++ function names") default=False,
help=("strip function parameters, template parameters, and const modifiers from"
" demangled C++ function names"))
optparser.add_option( optparser.add_option(
'-w', '--wrap', '-w', '--wrap',
action="store_true", action="store_true",
@ -3176,8 +3199,11 @@ class Main:
# add a new option to control skew of the colorization curve # add a new option to control skew of the colorization curve
optparser.add_option( optparser.add_option(
'--skew', '--skew',
type="float", dest="theme_skew", default=1.0, type="float",
help="skew the colorization curve. Values < 1.0 give more variety to lower percentages. Values > 1.0 give less variety to lower percentages") dest="theme_skew",
default=1.0,
help=("skew the colorization curve. Values < 1.0 give more variety to lower"
" percentages. Values > 1.0 give less variety to lower percentages"))
(self.options, self.args) = optparser.parse_args(sys.argv[1:]) (self.options, self.args) = optparser.parse_args(sys.argv[1:])
if len(self.args) > 1 and self.options.format != 'pstats': if len(self.args) > 1 and self.options.format != 'pstats':
@ -3234,18 +3260,24 @@ class Main:
dot.show_function_events.append(SAMPLES) dot.show_function_events.append(SAMPLES)
profile = self.profile profile = self.profile
profile.prune(self.options.node_thres/100.0, self.options.edge_thres/100.0) profile.prune(self.options.node_thres / 100.0, self.options.edge_thres / 100.0)
if self.options.root: if self.options.root:
rootId = profile.getFunctionId(self.options.root) rootId = profile.getFunctionId(self.options.root)
if not rootId: if not rootId:
sys.stderr.write('root node ' + self.options.root + ' not found (might already be pruned : try -e0 -n0 flags)\n') sys.stderr.write(
'root node ' +
self.options.root +
' not found (might already be pruned : try -e0 -n0 flags)\n')
sys.exit(1) sys.exit(1)
profile.prune_root(rootId) profile.prune_root(rootId)
if self.options.leaf: if self.options.leaf:
leafId = profile.getFunctionId(self.options.leaf) leafId = profile.getFunctionId(self.options.leaf)
if not leafId: if not leafId:
sys.stderr.write('leaf node ' + self.options.leaf + ' not found (maybe already pruned : try -e0 -n0 flags)\n') sys.stderr.write(
'leaf node ' +
self.options.leaf +
' not found (maybe already pruned : try -e0 -n0 flags)\n')
sys.exit(1) sys.exit(1)
profile.prune_leaf(leafId) profile.prune_leaf(leafId)

View File

@ -1,24 +1,26 @@
#!/usr/bin/env python #!/usr/bin/env python
import os, sys, time from __future__ import print_function
import os
import sys
keys = [['upload rate', 'x1y1', 6], ['history entries', 'x1y2', 10], ['queue', 'x1y2', 4]] keys = [['upload rate', 'x1y1', 6], ['history entries', 'x1y2', 10], ['queue', 'x1y2', 4]]
out = open('bandwidth.gnuplot', 'wb') out = open('bandwidth.gnuplot', 'wb')
print >>out, "set term png size 1200,700" print("set term png size 1200,700", file=out)
print >>out, 'set output "bandwidth_manager.png"' print('set output "bandwidth_manager.png"', file=out)
print >>out, 'set xrange [0:*]' print('set xrange [0:*]', file=out)
print >>out, 'set xlabel "time (ms)"' print('set xlabel "time (ms)"', file=out)
print >>out, 'set ylabel "Rate (B/s)"' print('set ylabel "Rate (B/s)"', file=out)
print >>out, 'set ytics 10000' print('set ytics 10000', file=out)
print >>out, 'set y2label "number"' print('set y2label "number"', file=out)
print >>out, 'set y2range [0:*]' print('set y2range [0:*]', file=out)
#print >>out, "set style data lines" # print("set style data lines", file=out)
print >>out, "set key box" print("set key box", file=out)
print >>out, 'plot', print('plot', end=' ', file=out)
for k, a, c in keys: for k, a, c in keys:
print >>out, ' "%s" using 1:%d title "%s" axes %s with steps,' % (sys.argv[1], c, k, a), print(' "%s" using 1:%d title "%s" axes %s with steps,' % (sys.argv[1], c, k, a), end=' ', file=out)
print >>out, 'x=0' print('x=0', file=out)
out.close() out.close()
os.system('gnuplot bandwidth.gnuplot'); os.system('gnuplot bandwidth.gnuplot')

View File

@ -3,14 +3,18 @@
# subject to the Boost Software License, Version 1.0. (See accompanying # subject to the Boost Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) # file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
import os, sys, time from __future__ import print_function
import os
import sys
lines = open(sys.argv[1], 'rb').readlines() lines = open(sys.argv[1], 'rb').readlines()
#keys = ['send_buffer_utilization'] # keys = ['send_buffer_utilization']
keys = ['send_buffer_size', 'used_send_buffer', 'protocol_buffer'] keys = ['send_buffer_size', 'used_send_buffer', 'protocol_buffer']
#keys = ['send_buffer_alloc', 'send_buffer', 'allocate_buffer_alloc', 'allocate_buffer', 'protocol_buffer'] # keys = ['send_buffer_alloc', 'send_buffer', 'allocate_buffer_alloc', 'allocate_buffer', 'protocol_buffer']
#keys = ['send_buffer_alloc', 'send_buffer', 'allocate_buffer_alloc', 'allocate_buffer', 'protocol_buffer', 'append_send_buffer'] # keys = ['send_buffer_alloc', 'send_buffer', 'allocate_buffer_alloc', 'allocate_buffer', 'protocol_buffer',
# 'append_send_buffer']
average = ['send_buffer_utilization', 'send_buffer_size', 'used_send_buffer'] average = ['send_buffer_utilization', 'send_buffer_size', 'used_send_buffer']
average_interval = 120000 average_interval = 120000
@ -18,7 +22,7 @@ render = 'lines'
time_limit = -1 time_limit = -1
if len(sys.argv) > 2: if len(sys.argv) > 2:
time_limit = long(sys.argv[2]) time_limit = int(sys.argv[2])
# logfile format: # logfile format:
@ -27,64 +31,68 @@ if len(sys.argv) > 2:
# 16434 allocate_buffer: 17 # 16434 allocate_buffer: 17
for k in keys: for k in keys:
last_sample = 0 last_sample = 0
average_accumulator = 0 average_accumulator = 0
average_samples = 0 average_samples = 0
peak = 0 peak = 0
out = open(k + '.dat', 'wb') out = open(k + '.dat', 'wb')
eval_average = False eval_average = False
if k in average: if k in average:
eval_average = True eval_average = True
peak_out = open(k + '_peak.dat', 'wb') peak_out = open(k + '_peak.dat', 'wb')
for l in lines: for line in lines:
l = l.split(' ') line = line.split(' ')
if len(l) != 3: if len(line) != 3:
print l print(line)
continue continue
try: try:
if l[1] == k + ':': if line[1] == k + ':':
if time_limit != -1 and long(l[0]) > time_limit: break if time_limit != -1 and int(line[0]) > time_limit:
time = l[0] break
value = l[2] time = line[0]
if eval_average: value = line[2]
while long(time) > last_sample + average_interval: if eval_average:
last_sample = last_sample + average_interval while int(time) > last_sample + average_interval:
if average_samples < 1: average_samples = 1 last_sample = last_sample + average_interval
print >>out, '%d %f' % (last_sample, average_accumulator / average_samples) if average_samples < 1:
print >>peak_out, '%d %f' % (last_sample, peak) average_samples = 1
average_accumulator = 0 print('%d %f' % (last_sample, average_accumulator / average_samples), file=out)
average_samples = 0 print('%d %f' % (last_sample, peak), file=peak_out)
peak = 0 average_accumulator = 0
average_accumulator = average_accumulator + float(value) average_samples = 0
average_samples = average_samples + 1 peak = 0
if float(value) > peak: peak = float(value) average_accumulator = average_accumulator + float(value)
else: average_samples = average_samples + 1
print >>out, time + ' ' + value, if float(value) > peak:
except: peak = float(value)
print l else:
print(time + ' ' + value, end=' ', file=out)
except BaseException:
print(line)
out.close() out.close()
peak_out.close() peak_out.close()
out = open('send_buffer.gnuplot', 'wb') out = open('send_buffer.gnuplot', 'wb')
print >>out, "set term png size 1200,700" print("set term png size 1200,700", file=out)
print >>out, 'set output "send_buffer.png"' print('set output "send_buffer.png"', file=out)
print >>out, 'set xrange [0:*]' print('set xrange [0:*]', file=out)
print >>out, 'set xlabel "time (ms)"' print('set xlabel "time (ms)"', file=out)
print >>out, 'set ylabel "bytes (B)"' print('set ylabel "bytes (B)"', file=out)
print >>out, "set style data lines" print("set style data lines", file=out)
print >>out, "set key box" print("set key box", file=out)
print >>out, 'plot', print('plot', end=' ', file=out)
for k in keys: for k in keys:
if k in average: if k in average:
print >>out, ' "%s.dat" using 1:2 title "%s %d seconds average" with %s,' % (k, k, average_interval / 1000., render), print(' "%s.dat" using 1:2 title "%s %d seconds average" with %s,' %
print >>out, ' "%s_peak.dat" using 1:2 title "%s %d seconds peak" with %s,' % (k, k, average_interval / 1000., render), (k, k, average_interval / 1000., render), end=' ', file=out)
else: print(' "%s_peak.dat" using 1:2 title "%s %d seconds peak" with %s,' %
print >>out, ' "%s.dat" using 1:2 title "%s" with %s,' % (k, k, render), (k, k, average_interval / 1000., render), end=' ', file=out)
print >>out, 'x=0' else:
print(' "%s.dat" using 1:2 title "%s" with %s,' % (k, k, render), end=' ', file=out)
print('x=0', file=out)
out.close() out.close()
os.system('gnuplot send_buffer.gnuplot') os.system('gnuplot send_buffer.gnuplot')

View File

@ -1,8 +1,8 @@
#!/usr/bin/env python #!/usr/bin/env python
from __future__ import print_function
import sys import sys
import os import os
import time
import calendar
import pprint import pprint
pp = pprint.PrettyPrinter(indent=4) pp = pprint.PrettyPrinter(indent=4)
@ -13,11 +13,11 @@ f = open(sys.argv[1])
announce_histogram = {} announce_histogram = {}
#TODO: make this histogram into a CDF # TODO: make this histogram into a CDF
node_uptime_histogram = {} node_uptime_histogram = {}
counter = 0; counter = 0
# maps search_id to a list of events. Each event is a dict containing: # maps search_id to a list of events. Each event is a dict containing:
# t: timestamp # t: timestamp
@ -32,10 +32,12 @@ outstanding_searches = {}
# list of completed searches # list of completed searches
searches = [] searches = []
def convert_timestamp(t): def convert_timestamp(t):
parts = t.split('.') parts = t.split('.')
hms = parts[0].split(':') hms = parts[0].split(':')
return (int(hms[0]) * 3600 + int(hms[1]) * 60 + int(hms[2])) * 1000 + int(parts[1]) return (int(hms[0]) * 3600 + int(hms[1]) * 60 + int(hms[2])) * 1000 + int(parts[1])
last_incoming = '' last_incoming = ''
@ -46,111 +48,117 @@ client_version_histogram = {}
client_histogram = {} client_histogram = {}
for line in f: for line in f:
counter += 1 counter += 1
# if counter % 1000 == 0: # if counter % 1000 == 0:
# print '\r%d' % counter, # print '\r%d' % counter,
try: try:
l = line.split(' ') ls = line.split(' ')
if 'starting DHT tracker with node id:' in line: if 'starting DHT tracker with node id:' in line:
our_node_id = l[l.index('id:') + 1].strip() our_node_id = ls[ls.index('id:') + 1].strip()
try: try:
if len(l) > 4 and l[2] == '<==' and l[1] == '[dht_tracker]': if len(ls) > 4 and ls[2] == '<==' and ls[1] == '[dht_tracker]':
ip = l[3].split(':')[0] ip = ls[3].split(':')[0]
if ip not in unique_ips: if ip not in unique_ips:
unique_ips.add(ip) unique_ips.add(ip)
json_blob = line.split(l[3])[1] json_blob = line.split(ls[3])[1]
version = json_blob.split("'v': '")[1].split("'")[0] version = json_blob.split("'v': '")[1].split("'")[0]
if len(version) == 4: if len(version) == 4:
v = '%s-%d' % (version[0:2], (ord(version[2]) << 8) + ord(version[3])) v = '%s-%d' % (version[0:2], (ord(version[2]) << 8) + ord(version[3]))
elif len(version) == 8: elif len(version) == 8:
v = '%c%c-%d' % (chr(int(version[0:2], 16)), chr(int(version[2:4], 16)), int(version[4:8], 16)) v = '%c%c-%d' % (chr(int(version[0:2], 16)), chr(int(version[2:4], 16)), int(version[4:8], 16))
else: else:
v = 'unknown' v = 'unknown'
if not v in client_version_histogram: if v not in client_version_histogram:
client_version_histogram[v] = 1 client_version_histogram[v] = 1
else: else:
client_version_histogram[v] += 1 client_version_histogram[v] += 1
if not v[0:2] in client_histogram: if not v[0:2] in client_histogram:
client_histogram[v[0:2]] = 1 client_histogram[v[0:2]] = 1
else: else:
client_histogram[v[0:2]] += 1 client_histogram[v[0:2]] += 1
except: pass except BaseException:
pass
if 'announce-distance:' in line: if 'announce-distance:' in line:
idx = l.index('announce-distance:') idx = ls.index('announce-distance:')
d = int(l[idx+1].strip()) d = int(ls[idx + 1].strip())
if not d in announce_histogram: announce_histogram[d] = 0 if d not in announce_histogram:
announce_histogram[d] += 1 announce_histogram[d] = 0
if 'NODE FAILED' in line: announce_histogram[d] += 1
idx = l.index('fails:') if 'NODE FAILED' in line:
if int(l[idx+1].strip()) != 1: continue; idx = ls.index('fails:')
idx = l.index('up-time:') if int(ls[idx + 1].strip()) != 1:
d = int(l[idx+1].strip()) continue
# quantize idx = ls.index('up-time:')
d = d - (d % up_time_quanta) d = int(ls[idx + 1].strip())
if not d in node_uptime_histogram: node_uptime_histogram[d] = 0 # quantize
node_uptime_histogram[d] += 1 d = d - (d % up_time_quanta)
if d not in node_uptime_histogram:
node_uptime_histogram[d] = 0
node_uptime_histogram[d] += 1
search_id = l[2] search_id = ls[2]
ts = l[0] ts = ls[0]
event = l[3] event = ls[3]
if event == 'RESPONSE': if event == 'RESPONSE':
outstanding = int(l[l.index('invoke-count:')+1]) outstanding = int(ls[ls.index('invoke-count:') + 1])
nid = l[l.index('id:')+1] distance = int(ls[ls.index('distance:') + 1])
addr = l[l.index('addr:')+1] nid = ls[ls.index('id:') + 1]
last_response = addr addr = ls[ls.index('addr:') + 1]
outstanding_searches[search_id].append({ 't': ts, 'd': distance, last_response = addr
'o': outstanding + 1, 'a':addr, 'e': event,'i':nid, 's':source}) outstanding_searches[search_id].append({'t': ts, 'd': distance,
elif event == 'NEW': 'o': outstanding + 1, 'a': addr, 'e': event, 'i': nid})
nid = l[l.index('target:')+1] elif event == 'NEW':
outstanding_searches[search_id] = [{ 't': ts, 'd': 0, 'o': 0, \ nid = ls[ls.index('target:') + 1]
'e': event, 'abstime': ts, 'i': nid}] outstanding_searches[search_id] = [{'t': ts, 'd': 0, 'o': 0,
last_response = '' 'e': event, 'abstime': ts, 'i': nid}]
elif event == 'INVOKE' or event == 'ADD' or event == '1ST_TIMEOUT' or \ last_response = ''
event == 'TIMEOUT' or event == 'PEERS': elif event == 'INVOKE' or event == 'ADD' or event == '1ST_TIMEOUT' or \
if not search_id in outstanding_searches: event == 'TIMEOUT' or event == 'PEERS':
print 'orphaned event: %s' % line if search_id not in outstanding_searches:
else: print('orphaned event: %s' % line)
outstanding = int(l[l.index('invoke-count:')+1]) else:
distance = int(l[l.index('distance:')+1]) outstanding = int(ls[ls.index('invoke-count:') + 1])
nid = l[l.index('id:')+1] distance = int(ls[ls.index('distance:') + 1])
addr = l[l.index('addr:')+1] nid = ls[ls.index('id:') + 1]
source = '' addr = ls[ls.index('addr:') + 1]
if event == 'ADD': source = ''
if last_response == '': continue if event == 'ADD':
source = last_response if last_response == '':
continue
source = last_response
outstanding_searches[search_id].append({ 't': ts, 'd': distance, outstanding_searches[search_id].append(
'o': outstanding + 1, 'a':addr, 'e': event,'i':nid, 's':source}) {'t': ts, 'd': distance, 'o': outstanding + 1, 'a': addr, 'e': event, 'i': nid, 's': source})
elif event == 'ABORTED': elif event == 'ABORTED':
outstanding_searches[search_id].append({ 't': ts, 'e': event}) outstanding_searches[search_id].append({'t': ts, 'e': event})
elif event == 'COMPLETED': elif event == 'COMPLETED':
distance = int(l[l.index('distance:')+1]) distance = int(ls[ls.index('distance:') + 1])
lookup_type = l[l.index('type:')+1].strip() lookup_type = ls[ls.index('type:') + 1].strip()
outstanding_searches[search_id].append({ 't': ts, 'd': distance, outstanding_searches[search_id].append({'t': ts, 'd': distance,
'o': 0, 'e': event,'i':''}) 'o': 0, 'e': event, 'i': ''})
outstanding_searches[search_id][0]['type'] = lookup_type outstanding_searches[search_id][0]['type'] = lookup_type
s = outstanding_searches[search_id] s = outstanding_searches[search_id]
try: try:
start_time = convert_timestamp(s[0]['t']) start_time = convert_timestamp(s[0]['t'])
for i in range(len(s)): for i in range(len(s)):
s[i]['t'] = convert_timestamp(s[i]['t']) - start_time s[i]['t'] = convert_timestamp(s[i]['t']) - start_time
except: except BaseException:
pass pass
searches.append(s) searches.append(s)
del outstanding_searches[search_id] del outstanding_searches[search_id]
except Exception, e: except Exception as e:
print e print(e)
print line.split(' ') print(line.split(' '))
lookup_times_min = [] lookup_times_min = []
lookup_times_max = [] lookup_times_max = []
@ -159,26 +167,29 @@ lookup_times_max = []
# to target boundaries # to target boundaries
lookup_distance = [] lookup_distance = []
for i in range(0, 15): for i in range(0, 15):
lookup_distance.append([]) lookup_distance.append([])
for s in searches: for s in searches:
for i in s: for i in s:
if not 'last_dist' in i: if 'last_dist' not in i:
i['last_dist'] = -1 i['last_dist'] = -1
cur_dist = 160 - i['d'] cur_dist = 160 - i['d']
last_dist = i['last_dist'] last_dist = i['last_dist']
if cur_dist > last_dist: if cur_dist > last_dist:
for j in range(last_dist + 1, cur_dist + 1): for j in range(last_dist + 1, cur_dist + 1):
if j >= len(lookup_distance): break if j >= len(lookup_distance):
lookup_distance[j].append(i['t']) break
i['last_dist'] = cur_dist lookup_distance[j].append(i['t'])
if i['e'] != 'PEERS': continue i['last_dist'] = cur_dist
lookup_times_min.append(i['t']) if i['e'] != 'PEERS':
break continue
for i in reversed(s): lookup_times_min.append(i['t'])
if i['e'] != 'PEERS': continue break
lookup_times_max.append(i['t']) for i in reversed(s):
break if i['e'] != 'PEERS':
continue
lookup_times_max.append(i['t'])
break
lookup_times_min.sort() lookup_times_min.sort()
@ -186,74 +197,74 @@ lookup_times_max.sort()
out = open('dht_lookup_times_cdf.txt', 'w+') out = open('dht_lookup_times_cdf.txt', 'w+')
counter = 0 counter = 0
for i in range(len(lookup_times_min)): for i in range(len(lookup_times_min)):
counter += 1 counter += 1
print >>out, '%d\t%d\t%f' % (lookup_times_min[i], lookup_times_max[i], counter / float(len(lookup_times_min))) print('%d\t%d\t%f' % (lookup_times_min[i], lookup_times_max[i], counter / float(len(lookup_times_min))), file=out)
out.close() out.close()
for i in lookup_distance: for i in lookup_distance:
i.sort() i.sort()
dist = 0 dist = 0
for i in lookup_distance: for i in lookup_distance:
out = open('dht_lookup_distance_%d.txt' % dist, 'w+') out = open('dht_lookup_distance_%d.txt' % dist, 'w+')
dist += 1 dist += 1
counter = 0 counter = 0
for j in i: for j in i:
counter += 1 counter += 1
print >>out, '%d\t%f' % (j, counter / float(len(i))) print('%d\t%f' % (j, counter / float(len(i))), file=out)
out.close() out.close()
out = open('dht_lookups.txt', 'w+') out = open('dht_lookups.txt', 'w+')
for s in searches: for s in searches:
for i in s: for i in s:
if i['e'] == 'INVOKE': if i['e'] == 'INVOKE':
print >>out, ' ->', i['t'], 160 - i['d'], i['i'], i['a'] print(' ->', i['t'], 160 - i['d'], i['i'], i['a'], file=out)
elif i['e'] == '1ST_TIMEOUT': elif i['e'] == '1ST_TIMEOUT':
print >>out, ' x ', i['t'], 160 - i['d'], i['i'], i['a'] print(' x ', i['t'], 160 - i['d'], i['i'], i['a'], file=out)
elif i['e'] == 'TIMEOUT': elif i['e'] == 'TIMEOUT':
print >>out, ' X ', i['t'], 160 - i['d'], i['i'], i['a'] print(' X ', i['t'], 160 - i['d'], i['i'], i['a'], file=out)
elif i['e'] == 'ADD': elif i['e'] == 'ADD':
print >>out, ' + ', i['t'], 160 - i['d'], i['i'], i['a'], i['s'] print(' + ', i['t'], 160 - i['d'], i['i'], i['a'], i['s'], file=out)
elif i['e'] == 'RESPONSE': elif i['e'] == 'RESPONSE':
print >>out, ' <-', i['t'], 160 - i['d'], i['i'], i['a'] print(' <-', i['t'], 160 - i['d'], i['i'], i['a'], file=out)
elif i['e'] == 'PEERS': elif i['e'] == 'PEERS':
print >>out, ' <-', i['t'], 160 - i['d'], i['i'], i['a'] print(' <-', i['t'], 160 - i['d'], i['i'], i['a'], file=out)
elif i['e'] == 'ABORTED': elif i['e'] == 'ABORTED':
print >>out, 'abort' print('abort', file=out)
elif i['e'] == 'COMPLETED': elif i['e'] == 'COMPLETED':
print >>out, '***', i['t'], 160 - i['d'], '\n' print('***', i['t'], 160 - i['d'], '\n', file=out)
elif i['e'] == 'NEW': elif i['e'] == 'NEW':
print >>out, '===', i['abstime'], i['type'], '===' print('===', i['abstime'], i['type'], '===', file=out)
print >>out, '<> ', 0, our_node_id, i['i'] print('<> ', 0, our_node_id, i['i'], file=out)
out.close() out.close()
out = open('dht_announce_distribution.dat', 'w+') out = open('dht_announce_distribution.dat', 'w+')
print 'announce distribution items: %d' % len(announce_histogram) print('announce distribution items: %d' % len(announce_histogram))
for k,v in announce_histogram.items(): for k, v in list(announce_histogram.items()):
print >>out, '%d %d' % (k, v) print('%d %d' % (k, v), file=out)
print '%d %d' % (k, v) print('%d %d' % (k, v))
out.close() out.close()
out = open('dht_node_uptime_cdf.txt', 'w+') out = open('dht_node_uptime_cdf.txt', 'w+')
s = 0 s = 0
total_uptime_nodes = 0 total_uptime_nodes = 0
for k,v in node_uptime_histogram.items(): for k, v in list(node_uptime_histogram.items()):
total_uptime_nodes += v total_uptime_nodes += v
for k,v in sorted(node_uptime_histogram.items()): for k, v in sorted(node_uptime_histogram.items()):
s += v s += v
print >>out, '%f %f' % (k / float(60), s / float(total_uptime_nodes)) print('%f %f' % (k / float(60), s / float(total_uptime_nodes)), file=out)
print '%f %f' % (k / float(60), s / float(total_uptime_nodes)) print('%f %f' % (k / float(60), s / float(total_uptime_nodes)))
out.close() out.close()
print 'clients by version' print('clients by version')
client_version_histogram = sorted(client_version_histogram.items(), key=lambda x: x[1], reverse=True) client_version_histogram = sorted(list(client_version_histogram.items()), key=lambda x: x[1], reverse=True)
pp.pprint(client_version_histogram) pp.pprint(client_version_histogram)
print 'clients' print('clients')
client_histogram = sorted(client_histogram.items(), key=lambda x: x[1], reverse=True) client_histogram = sorted(list(client_histogram.items()), key=lambda x: x[1], reverse=True)
pp.pprint(client_histogram) pp.pprint(client_histogram)
out = open('dht.gnuplot', 'w+') out = open('dht.gnuplot', 'w+')
@ -265,7 +276,7 @@ set ylabel "portion of lookups"
set xlabel "time from start of lookup (ms)" set xlabel "time from start of lookup (ms)"
set grid set grid
plot "dht_lookup_times_cdf.txt" using 1:3 with lines title "time to first result", \ plot "dht_lookup_times_cdf.txt" using 1:3 with lines title "time to first result", \
"dht_lookup_times_cdf.txt" using 2:3 with lines title "time to last result" "dht_lookup_times_cdf.txt" using 2:3 with lines title "time to last result"
set terminal postscript set terminal postscript
set output "dht_lookup_times_cdf.ps" set output "dht_lookup_times_cdf.ps"
@ -318,12 +329,11 @@ plot ''')
dist = 0 dist = 0
for i in lookup_distance: for i in lookup_distance:
if dist > 0: out.write(', ') if dist > 0:
out.write('"dht_lookup_distance_%d.txt" using 1:2 title "%d" with lines' % (dist, dist)) out.write(', ')
dist += 1 out.write('"dht_lookup_distance_%d.txt" using 1:2 title "%d" with lines' % (dist, dist))
dist += 1
out.close() out.close()
os.system('gnuplot dht.gnuplot'); os.system('gnuplot dht.gnuplot')

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env python
from __future__ import print_function
import sys import sys
import os import os
@ -11,21 +12,22 @@ distribution = {}
num_messages = 0 num_messages = 0
for i in range(0, max_rtt, quantize): for i in range(0, max_rtt, quantize):
distribution[i] = 0 distribution[i] = 0
for line in f: for line in f:
time = int(line.split('\t')[1]) time = int(line.split('\t')[1])
if (time < 0 or time > max_rtt - quantize): continue if (time < 0 or time > max_rtt - quantize):
num_messages += 1 continue
time /= quantize num_messages += 1
time *= quantize time /= quantize
distribution[time] += 1 time *= quantize
distribution[time] += 1
f = open('round_trip_distribution.log', 'w+') f = open('round_trip_distribution.log', 'w+')
for k, v in distribution.items(): for k, v in list(distribution.items()):
print >>f, '%f %d' % ((k + (quantize / 2)) / 1000.0, v) print('%f %d' % ((k + (quantize / 2)) / 1000.0, v), file=f)
f.close(); f.close()
f = open('round_trip_distribution.gnuplot', 'w+') f = open('round_trip_distribution.gnuplot', 'w+')
@ -48,5 +50,4 @@ replot
''' % (float(quantize) / 1000.0)) ''' % (float(quantize) / 1000.0))
f.close() f.close()
os.system('gnuplot round_trip_distribution.gnuplot'); os.system('gnuplot round_trip_distribution.gnuplot')

View File

@ -4,50 +4,59 @@ import os
gnuplot_scripts = [] gnuplot_scripts = []
def gen_stats_gnuplot(name, y, lines): def gen_stats_gnuplot(name, y, lines):
global gnuplot_scripts global gnuplot_scripts
stat = open(sys.argv[1]) stat = open(sys.argv[1])
line = stat.readline() line = stat.readline()
while not 'minute:' in line: while 'minute:' not in line:
line = stat.readline() line = stat.readline()
names = line.strip().split(':') names = line.strip().split(':')
counter = 1 counter = 1
for i in names: for i in names:
print '%d: %s' % (counter, i) print('%d: %s' % (counter, i))
counter += 1 counter += 1
out = open('%s.gnuplot' % name, 'w+') out = open('%s.gnuplot' % name, 'w+')
out.write(''' out.write('''
set term png size 1200,700 small set term png size 1200,700 small
set output "%s.png" set output "%s.png"
set title "%s" set title "%s"
set ylabel "%s" set ylabel "%s"
set xlabel "time (minutes)" set xlabel "time (minutes)"
plot ''' % (name, name.strip('_'), y)) plot ''' % (name, name.strip('_'), y))
first = True first = True
for i in lines: for i in lines:
if not first: if not first:
out.write(', \\\n') out.write(', \\\n')
first = False first = False
out.write('"%s" using 1:%d title "%s" with lines' % (sys.argv[1], names.index(i)+1, i)) out.write('"%s" using 1:%d title "%s" with lines' % (sys.argv[1], names.index(i) + 1, i))
out.write('\n') out.write('\n')
out.write('''set terminal postscript out.write('''set terminal postscript
set output "%s.ps" set output "%s.ps"
replot replot
''' % (name)) ''' % (name))
out.close() out.close()
gnuplot_scripts += [name] gnuplot_scripts += [name]
gen_stats_gnuplot('dht_routing_table_size', 'nodes', ['active nodes','passive nodes', 'confirmed nodes'])
gen_stats_gnuplot('dht_routing_table_size', 'nodes', ['active nodes', 'passive nodes', 'confirmed nodes'])
gen_stats_gnuplot('dht_tracker_table_size', '', ['num torrents', 'num peers']) gen_stats_gnuplot('dht_tracker_table_size', '', ['num torrents', 'num peers'])
gen_stats_gnuplot('dht_announces', 'messages per minute', ['announces per min', 'failed announces per min']) gen_stats_gnuplot('dht_announces', 'messages per minute', ['announces per min', 'failed announces per min'])
gen_stats_gnuplot('dht_clients', 'messages per minute', ['total msgs per min', 'az msgs per min', 'ut msgs per min', 'lt msgs per min', 'mp msgs per min', 'gr msgs per min']) gen_stats_gnuplot('dht_clients',
gen_stats_gnuplot('dht_rate', 'bytes per second', ['bytes in per sec', 'bytes out per sec']) 'messages per minute',
gen_stats_gnuplot('dht_errors', 'messages per minute', ['error replies sent', 'error queries recvd']) ['total msgs per min',
'az msgs per min',
'ut msgs per min',
'lt msgs per min',
'mp msgs per min',
'gr msgs per min'])
gen_stats_gnuplot('dht_rate', 'bytes per second', ['bytes in per sec', 'bytes out per sec'])
gen_stats_gnuplot('dht_errors', 'messages per minute', ['error replies sent', 'error queries recvd'])
for i in gnuplot_scripts: for i in gnuplot_scripts:
os.system('gnuplot %s.gnuplot' % i); os.system('gnuplot %s.gnuplot' % i)

View File

@ -1,6 +1,8 @@
#!/usr/bin/env python #!/usr/bin/env python
from __future__ import print_function
import os, sys, time import os
import sys
lines = open(sys.argv[1], 'rb').readlines() lines = open(sys.argv[1], 'rb').readlines()
@ -10,11 +12,11 @@ lines = open(sys.argv[1], 'rb').readlines()
# 16434 read cache: 17 # 16434 read cache: 17
key_order = ['receive buffer', 'send buffer', 'released send buffer', 'posted send buffer', key_order = ['receive buffer', 'send buffer', 'released send buffer', 'posted send buffer',
'received send buffer', 'dispatched send buffer', 'queued send buffer', 'received send buffer', 'dispatched send buffer', 'queued send buffer',
'write cache', 'read cache', 'hash temp'] 'write cache', 'read cache', 'hash temp']
colors = ['30f030', '001070', '101080', '2040a0', colors = ['30f030', '001070', '101080', '2040a0',
'4070d0', '80a0f0', 'f03030', '4070d0', '80a0f0', 'f03030',
'80f080', 'f08080', '4040ff'] '80f080', 'f08080', '4040ff']
keys = [] keys = []
fields = {} fields = {}
@ -26,67 +28,70 @@ field_num_samples = {}
field_timestamp = {} field_timestamp = {}
for c in key_order: for c in key_order:
keys.append(c) keys.append(c)
fields[c] = 0 fields[c] = 0
maximum[c] = 0 maximum[c] = 0
field_sum[c] = 0 field_sum[c] = 0
field_num_samples[c] = 0 field_num_samples[c] = 0
field_timestamp[c] = 0 field_timestamp[c] = 0
last_t = 0 last_t = 0
for l in lines: for l in lines:
try: try:
t = int(l[0:l.find(' ')]) t = int(l[0:l.find(' ')])
c = l[l.find(' ')+1:l.find(':')] c = l[l.find(' ') + 1:l.find(':')]
n = int(l[l.find(':')+1:-1]) n = int(l[l.find(':') + 1:-1])
except: except BaseException:
print l print(l)
continue continue
if last_t != t: if last_t != t:
print >>out, '%d\t' % last_t, print('%d\t' % last_t, end=' ', file=out)
for i in keys: for i in keys:
print >>out, '%d\t' % maximum[i], print('%d\t' % maximum[i], end=' ', file=out)
print >>out, '\n', print('\n', end=' ', file=out)
if not c in keys: continue if c not in keys:
continue
field_sum[c] += fields[c] * float(t - field_timestamp[c]) field_sum[c] += fields[c] * float(t - field_timestamp[c])
field_timestamp[c] = t field_timestamp[c] = t
fields[c] = n fields[c] = n
if n > maximum[c]: maximum[c] = n if n > maximum[c]:
maximum[c] = n
if last_t != t: if last_t != t:
last_t = t last_t = t
maximum = fields maximum = fields
for i in keys: for i in keys:
print '%s: avg: %f' % (i, field_sum[i] / last_t) print('%s: avg: %f' % (i, field_sum[i] / last_t))
print print()
out.close() out.close()
out = open('disk_buffer.gnuplot', 'wb') out = open('disk_buffer.gnuplot', 'wb')
print >>out, "set term png size 1200,700" print("set term png size 1200,700", file=out)
print >>out, 'set output "disk_buffer.png"' print('set output "disk_buffer.png"', file=out)
print >>out, 'set xrange [0:*]' print('set xrange [0:*]', file=out)
print >>out, 'set xlabel "time (ms)"' print('set xlabel "time (ms)"', file=out)
print >>out, 'set ylabel "buffers"' print('set ylabel "buffers"', file=out)
print >>out, "set style data lines" print("set style data lines", file=out)
print >>out, "set key box" print("set key box", file=out)
print >>out, 'plot', print('plot', end=' ', file=out)
count = 1 + len(keys) count = 1 + len(keys)
keys.reverse() keys.reverse()
comma = '' comma = ''
for k in keys: for k in keys:
expr = "$%d" % count expr = "$%d" % count
for i in xrange(2, count): expr += "+$%d" % i for i in range(2, count):
count -= 1 expr += "+$%d" % i
print >>out, ' %s"disk_buffer_log.dat" using 1:(%s) title "%s" with filledcurves x1 lt rgb "#%s"' % (comma, expr, k, colors[count-1]), count -= 1
comma = ',' print(' %s"disk_buffer_log.dat" using 1:(%s) title "%s" with filledcurves x1 lt rgb "#%s"' %
(comma, expr, k, colors[count - 1]), end=' ', file=out)
comma = ','
out.close() out.close()
os.system('gnuplot disk_buffer.gnuplot') os.system('gnuplot disk_buffer.gnuplot')

View File

@ -1,119 +1,129 @@
#!/usr/bin/env python #!/usr/bin/env python
# this is meant to parse the dht_lookups.log generated by parse_dht_log.py # this is meant to parse the dht_lookups.log generated by parse_dht_log.py
from __future__ import print_function
import os import os
nodes = {} nodes = {}
def get_origin(n): def get_origin(n):
if n in nodes: if n in nodes:
return list(nodes[n]['conns']) return list(nodes[n]['conns'])
else: else:
return ['0.0.0.0'] return ['0.0.0.0']
def calculate_pos(nid, dist): def calculate_pos(nid, dist):
nid = int(nid[0:7], 16) nid = int(nid[0:7], 16)
x = 0 x = 0
y = 0 y = 0
for i in range(0, 28, 2): for i in range(0, 28, 2):
x |= (nid & (1 << i)) >> (i / 2) x |= (nid & (1 << i)) >> (i / 2)
y |= (nid & (2 << i)) >> (i / 2 + 1) y |= (nid & (2 << i)) >> (i / 2 + 1)
# print '%d -> %d %d' % (dist, x, y) # print '%d -> %d %d' % (dist, x, y)
return (x / 3, y / 3)
return (x / 3, y / 3)
def plot_nodes(nodes, frame): def plot_nodes(nodes, frame):
try: os.mkdir('dht_frames') try:
except: pass os.mkdir('dht_frames')
except BaseException:
pass
out = open('dht_frames/plot-%02d.dot' % frame, 'w+') out = open('dht_frames/plot-%02d.dot' % frame, 'w+')
edges = set() edges = set()
print >>out, 'graph swarm {' print('graph swarm {', file=out)
# print >>out, '"tl" [shape=point pos="0,0!"];' # print >>out, '"tl" [shape=point pos="0,0!"];'
# print >>out, '"tr" [shape=point pos="1638,0!"];' # print >>out, '"tr" [shape=point pos="1638,0!"];'
# print >>out, '"ll" [shape=point pos="1638,1638!"];' # print >>out, '"ll" [shape=point pos="1638,1638!"];'
# print >>out, '"tr" [shape=point pos="0,1638!"];' # print >>out, '"tr" [shape=point pos="0,1638!"];'
for dst, n in nodes.items(): for dst, n in list(nodes.items()):
shape = 'point' shape = 'point'
if 's' in n: shape = n['s'] if 's' in n:
shape = n['s']
print >>out, '"%s" [shape=%s fillcolor="%s" label="" pos="%d,%d!"];' % (dst, shape, n['c'], n['p'][0], n['p'][1]) print('"%s" [shape=%s fillcolor="%s" label="" pos="%d,%d!"];' %
for e in n['conns']: (dst, shape, n['c'], n['p'][0], n['p'][1]), file=out)
if (e, dst) in edges: continue for e in n['conns']:
if (e, dst) in edges:
continue
# only add an edge once to the .dot file # only add an edge once to the .dot file
edges.add((e, dst)) edges.add((e, dst))
edges.add((dst, e)) edges.add((dst, e))
style = 'solid' style = 'solid'
col = 'gray' col = 'gray'
if nodes[dst]['c'] != 'white' and nodes[e]['c'] != 'white': if nodes[dst]['c'] != 'white' and nodes[e]['c'] != 'white':
style = 'solid' style = 'solid'
col = 'black' col = 'black'
print >>out, '"%s" -- "%s" [style="%s" color="%s"];' % (e, dst, style, col) print('"%s" -- "%s" [style="%s" color="%s"];' % (e, dst, style, col), file=out)
print >>out, '}' print('}', file=out)
out.close() out.close()
os.system('neato -n dht_frames/plot-%02d.dot -Tpng -o dht_frames/frame-%02d.png' % (frame, frame)) os.system('neato -n dht_frames/plot-%02d.dot -Tpng -o dht_frames/frame-%02d.png' % (frame, frame))
frame = 0 frame = 0
next_render_time = 100 next_render_time = 100
f = open('dht_lookups.txt') f = open('dht_lookups.txt')
for l in f: for line in f:
if l.startswith('***'): break if line.startswith('***'):
break
kind = l[0:3].strip() kind = line[0:3].strip()
l = l[3:].strip().split(' ') line = line[3:].strip().split(' ')
if kind == '===': continue if kind == '===':
continue
t = int(l[0]) t = int(line[0])
if t > next_render_time: if t > next_render_time:
plot_nodes(nodes, frame) plot_nodes(nodes, frame)
frame += 1 frame += 1
next_render_time += 100 next_render_time += 100
# sys.exit(0) # sys.exit(0)
if kind == '<>': if kind == '<>':
p = calculate_pos(l[1], 0) p = calculate_pos(line[1], 0)
dst = '0.0.0.0' dst = '0.0.0.0'
if not dst in nodes: if dst not in nodes:
nodes[dst] = { 'conns': set(), 'p': p, 'c': 'blue', 's': 'circle'} nodes[dst] = {'conns': set(), 'p': p, 'c': 'blue', 's': 'circle'}
p = calculate_pos(l[2], 25) p = calculate_pos(line[2], 25)
dst = '255.255.255.255' dst = '255.255.255.255'
if not dst in nodes: if dst not in nodes:
nodes[dst] = { 'conns': set(), 'p': p, 'c': 'yellow', 's': 'circle'} nodes[dst] = {'conns': set(), 'p': p, 'c': 'yellow', 's': 'circle'}
elif kind == '->': elif kind == '->':
dst = l[3] dst = line[3]
if not dst in nodes: if dst not in nodes:
src = get_origin(dst) src = get_origin(dst)
p = calculate_pos(l[2], int(l[1])) p = calculate_pos(line[2], int(line[1]))
nodes[dst] = { 'conns': set(src), 'p': p, 'c': 'grey'} nodes[dst] = {'conns': set(src), 'p': p, 'c': 'grey'}
nodes[dst]['c'] = 'grey' nodes[dst]['c'] = 'grey'
elif kind == '+': elif kind == '+':
dst = l[3] dst = line[3]
src = l[4] src = line[4]
p = calculate_pos(l[2], int(l[1])) p = calculate_pos(line[2], int(line[1]))
if not dst in nodes: if dst not in nodes:
nodes[dst] = { 'conns': set(), 'p': p, 'c': 'white'} nodes[dst] = {'conns': set(), 'p': p, 'c': 'white'}
nodes[dst]['conns'].add(src) nodes[dst]['conns'].add(src)
elif kind == '<-': elif kind == '<-':
dst = l[3] dst = line[3]
nodes[dst]['c'] = 'green' nodes[dst]['c'] = 'green'
elif kind == 'x': elif kind == 'x':
dst = l[3] dst = line[3]
nodes[dst]['c'] = 'orange' nodes[dst]['c'] = 'orange'
elif kind == 'X': elif kind == 'X':
dst = l[3] dst = line[3]
nodes[dst]['c'] = 'red' nodes[dst]['c'] = 'red'
f.close() f.close()

View File

@ -1,5 +1,8 @@
#!/usr/bin/env python #!/usr/bin/env python
import os, sys, time from __future__ import print_function
import os
import sys
# usage: memory.log memory_index.log # usage: memory.log memory_index.log
@ -13,58 +16,60 @@ index = open(sys.argv[2], 'rb').readlines()
allocation_points_to_print = 30 allocation_points_to_print = 30
def print_allocation_point(ap): def print_allocation_point(ap):
print 'space_time: %d kBms' % (ap['spacetime'] / 1024) print('space_time: %d kBms' % (ap['spacetime'] / 1024))
print 'allocations: %d' % ap['allocations'] print('allocations: %d' % ap['allocations'])
print 'peak: %d kB' % (ap['peak'] / 1024) print('peak: %d kB' % (ap['peak'] / 1024))
print 'stack: ' print('stack: ')
counter = 0 counter = 0
for e in ap['stack']: for e in ap['stack']:
print '#%d %s' % (counter, e) print('#%d %s' % (counter, e))
counter += 1 counter += 1
allocation_points = [] allocation_points = []
for l in index: for line in index:
l = l.split('#') line = line.split('#')
l.pop(0) line.pop(0)
ap = { 'allocations': 0, 'peak': 0, 'spacetime': 0, 'allocation_point': len(allocation_points), 'stack': l} ap = {'allocations': 0, 'peak': 0, 'spacetime': 0, 'allocation_point': len(allocation_points), 'stack': line}
allocation_points.append(ap); allocation_points.append(ap)
for l in lines: for line in lines:
l = l.lstrip('#').rstrip('\n').split(' ') line = line.lstrip('#').rstrip('\n').split(' ')
if len(l) != 8: if len(line) != 8:
print l print(line)
continue continue
try: try:
ap = int(l[0]) ap = int(line[0])
allocation_points[ap]['allocations'] += 1 allocation_points[ap]['allocations'] += 1
allocation_points[ap]['peak'] = int(l[7]) allocation_points[ap]['peak'] = int(line[7])
allocation_points[ap]['spacetime'] = int(l[6]) allocation_points[ap]['spacetime'] = int(line[6])
except Exception, e: except Exception as e:
print type(e), e, l print(type(e), e, line)
print '=== space time ===' print('=== space time ===')
hot_ap = [] hot_ap = []
allocation_points.sort(key = lambda x:x['spacetime'], reverse=True); allocation_points.sort(key=lambda x: x['spacetime'], reverse=True)
counter = 0 counter = 0
for ap in allocation_points[0:allocation_points_to_print]: for ap in allocation_points[0:allocation_points_to_print]:
print '== %d ==' % counter print('== %d ==' % counter)
counter += 1 counter += 1
print_allocation_point(ap) print_allocation_point(ap)
hot_ap.append(ap['allocation_point']); hot_ap.append(ap['allocation_point'])
print '=== allocations ===' print('=== allocations ===')
allocation_points.sort(key = lambda x:x['allocations'], reverse=True); allocation_points.sort(key=lambda x: x['allocations'], reverse=True)
for ap in allocation_points[0:allocation_points_to_print]: for ap in allocation_points[0:allocation_points_to_print]:
print_allocation_point(ap) print_allocation_point(ap)
print '=== peak ===' print('=== peak ===')
allocation_points.sort(key = lambda x:x['peak'], reverse=True); allocation_points.sort(key=lambda x: x['peak'], reverse=True)
for ap in allocation_points[0:allocation_points_to_print]: for ap in allocation_points[0:allocation_points_to_print]:
print_allocation_point(ap) print_allocation_point(ap)
# generate graph # generate graph
lines = open(sys.argv[1], 'rb').readlines() lines = open(sys.argv[1], 'rb').readlines()
@ -74,53 +79,54 @@ cur_line = [0] * allocation_points_to_print
prev_line = [0] * allocation_points_to_print prev_line = [0] * allocation_points_to_print
last_time = 0 last_time = 0
for l in lines: for line in lines:
l = l.lstrip('#').rstrip('\n').split(' ') line = line.lstrip('#').rstrip('\n').split(' ')
if len(l) != 8: if len(line) != 8:
print l print(line)
continue continue
try: try:
time = int(l[1]) time = int(line[1])
if time != last_time: if time != last_time:
print >>out, last_time, '\t', print(last_time, '\t', end=' ', file=out)
for i in range(allocation_points_to_print): for i in range(allocation_points_to_print):
if cur_line[i] == -1: if cur_line[i] == -1:
print >>out, prev_line[i], '\t', print(prev_line[i], '\t', end=' ', file=out)
else: else:
print >>out, cur_line[i], '\t', print(cur_line[i], '\t', end=' ', file=out)
prev_line[i] = cur_line[i] prev_line[i] = cur_line[i]
print >>out print(file=out)
cur_line = [-1] * allocation_points_to_print cur_line = [-1] * allocation_points_to_print
last_time = time last_time = time
size = int(l[5]) size = int(line[5])
ap = int(l[0]) ap = int(line[0])
if ap in hot_ap: if ap in hot_ap:
index = hot_ap.index(ap) index = hot_ap.index(ap)
cur_line[index] = max(cur_line[index], size) cur_line[index] = max(cur_line[index], size)
except Exception, e: except Exception as e:
print type(e), e, l print(type(e), e, line)
out.close() out.close()
out = open('memory.gnuplot', 'wb') out = open('memory.gnuplot', 'wb')
print >>out, "set term png size 1200,700" print("set term png size 1200,700", file=out)
print >>out, 'set output "memory.png"' print('set output "memory.png"', file=out)
print >>out, 'set xrange [0:*]' print('set xrange [0:*]', file=out)
print >>out, 'set xlabel "time (ms)"' print('set xlabel "time (ms)"', file=out)
print >>out, 'set ylabel "bytes (B)"' print('set ylabel "bytes (B)"', file=out)
print >>out, "set style data lines" print("set style data lines", file=out)
print >>out, "set key box" print("set key box", file=out)
print >>out, 'plot', print('plot', end=' ', file=out)
for k in range(allocation_points_to_print): for k in range(allocation_points_to_print):
print >>out, ' "memory.dat" using 1:(', print(' "memory.dat" using 1:(', end=' ', file=out)
for i in range(k, allocation_points_to_print): for i in range(k, allocation_points_to_print):
if i == k: print >>out, '$%d' % (i + 2), if i == k:
else: print >>out, '+$%d' % (i + 2), print('$%d' % (i + 2), end=' ', file=out)
print >>out, ') title "%d" with filledcurves x1, \\' % k else:
print >>out, 'x=0' print('+$%d' % (i + 2), end=' ', file=out)
print(') title "%d" with filledcurves x1, \\' % k, file=out)
print('x=0', file=out)
out.close() out.close()
os.system('gnuplot memory.gnuplot'); os.system('gnuplot memory.gnuplot')

View File

@ -1,5 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
from __future__ import print_function
import glob import glob
import os import os
import sys import sys
@ -9,64 +11,64 @@ import sys
log_files = [] log_files = []
for p in glob.iglob(os.path.join(sys.argv[1], '*.log')): for p in glob.iglob(os.path.join(sys.argv[1], '*.log')):
name = os.path.split(p)[1] name = os.path.split(p)[1]
if name == 'main_session.log': continue if name == 'main_session.log':
print name continue
f = open(p, 'r') print(name)
out_file = p + '.dat' f = open(p, 'r')
log_files.append(out_file) out_file = p + '.dat'
out = open(out_file, 'w+') log_files.append(out_file)
out = open(out_file, 'w+')
uploaded_blocks = 0; uploaded_blocks = 0
downloaded_blocks = 0; downloaded_blocks = 0
for l in f: for l in f:
t = l.split(': ')[0].split('.')[0] t = l.split(': ')[0].split('.')[0]
log_line = False log_line = False
if ' ==> PIECE' in l: if ' ==> PIECE' in l:
uploaded_blocks+= 1 uploaded_blocks += 1
log_line = True log_line = True
if ' <== PIECE' in l: if ' <== PIECE' in l:
downloaded_blocks+= 1 downloaded_blocks += 1
log_line = True log_line = True
if log_line: if log_line:
print >>out, '%s\t%d\t%d' % (t, uploaded_blocks, downloaded_blocks) print('%s\t%d\t%d' % (t, uploaded_blocks, downloaded_blocks), file=out)
out.close() out.close()
f.close() f.close()
out = open('peers.gnuplot', 'wb') out = open('peers.gnuplot', 'wb')
print >>out, "set term png size 1200,700" print("set term png size 1200,700", file=out)
print >>out, 'set xrange [0:*]' print('set xrange [0:*]', file=out)
print >>out, 'set xlabel "time"' print('set xlabel "time"', file=out)
print >>out, 'set ylabel "blocks"' print('set ylabel "blocks"', file=out)
print >>out, 'set key box' print('set key box', file=out)
print >>out, 'set xdata time' print('set xdata time', file=out)
print >>out, 'set timefmt "%H:%M:%S"' print('set timefmt "%H:%M:%S"', file=out)
print >>out, 'set title "uploaded blocks"' print('set title "uploaded blocks"', file=out)
print >>out, 'set output "peers_upload.png"' print('set output "peers_upload.png"', file=out)
print >>out, 'plot', print('plot', end=' ', file=out)
first = True first = True
for n in log_files: for n in log_files:
if not first: if not first:
print >>out, ',', print(',', end=' ', file=out)
first = False first = False
print >>out, ' "%s" using 1:2 title "%s" with steps' % (n, os.path.split(n)[1].split('.log')[0]), print(' "%s" using 1:2 title "%s" with steps' % (n, os.path.split(n)[1].split('.log')[0]), end=' ', file=out)
print >>out, '' print('', file=out)
print >>out, 'set title "downloaded blocks"' print('set title "downloaded blocks"', file=out)
print >>out, 'set output "peers_download.png"' print('set output "peers_download.png"', file=out)
print >>out, 'plot', print('plot', end=' ', file=out)
first = True first = True
for n in log_files: for n in log_files:
if not first: if not first:
print >>out, ',', print(',', end=' ', file=out)
first = False first = False
print >>out, ' "%s" using 1:3 title "%s" with steps' % (n, os.path.split(n)[1].split('.log')[0]), print(' "%s" using 1:3 title "%s" with steps' % (n, os.path.split(n)[1].split('.log')[0]), end=' ', file=out)
print >>out, '' print('', file=out)
out.close() out.close()
os.system('gnuplot peers.gnuplot'); os.system('gnuplot peers.gnuplot')

View File

@ -7,103 +7,155 @@ import sys
# relevant depths and to filter out low sample counts # relevant depths and to filter out low sample counts
f = open(sys.argv[1]) f = open(sys.argv[1])
def parse_line(l):
indentation = 0
while indentation < len(l) and l[indentation] == ' ':
indentation += 1
if indentation == 0:
return (0, 0, '')
def parse_line(line):
indentation = 0
while indentation < len(line) and line[indentation] == ' ':
indentation += 1
if indentation == 0:
return (0, 0, '')
l = l.strip().split(' ') line = line.strip().split(' ')
samples = int(l[0]) samples = int(line[0])
fun = ' '.join(l[1:]) fun = ' '.join(line[1:])
return (indentation, samples, fun)
return (indentation, samples, fun)
fold = -1 fold = -1
try: try:
sample_limit = int(sys.argv[2]) sample_limit = int(sys.argv[2])
except: except BaseException:
sample_limit = 5 sample_limit = 5
fun_samples = {} fun_samples = {}
for l in f: for l in f:
if 'Sort by top of stack' in l: break if 'Sort by top of stack' in l:
break
indentation, samples, fun = parse_line(l) indentation, samples, fun = parse_line(l)
if samples < sample_limit: continue if samples < sample_limit:
if fold != -1 and indentation > fold: continue continue
fold = -1 if fold != -1 and indentation > fold:
continue
fold = -1
if '__gnu_cxx::__normal_iterator<' in fun: if '__gnu_cxx::__normal_iterator<' in fun:
fold = indentation - 1 fold = indentation - 1
continue continue
if 'boost::_bi::bind_t' in fun: continue if 'boost::_bi::bind_t' in fun:
if 'boost::_bi::list' in fun: continue continue
if 'boost::_mfi::mf' in fun: continue if 'boost::_bi::list' in fun:
if 'boost::_bi::storage' in fun: continue continue
if 'boost::_mfi::mf' in fun:
continue
if 'boost::_bi::storage' in fun:
continue
# should only add leaves # should only add leaves
if fun in fun_samples: fun_samples[fun] += samples if fun in fun_samples:
else: fun_samples[fun] = samples fun_samples[fun] += samples
else:
fun_samples[fun] = samples
output = '%s%-4d %s' % (' ' * (indentation/2), samples, fun) output = '%s%-4d %s' % (' ' * (indentation / 2), samples, fun)
if len(output) > 200: output = output[0:200] if len(output) > 200:
print output output = output[0:200]
print(output)
if 'invariant_checker_impl' in fun: fold = indentation if 'invariant_checker_impl' in fun:
if 'free_multiple_buffers' in fun: fold = indentation fold = indentation
if 'libtorrent::condition::wait' in fun: fold = indentation if 'free_multiple_buffers' in fun:
if 'allocate_buffer' in fun: fold = indentation fold = indentation
if '::find_POD' in fun: fold = indentation if 'libtorrent::condition::wait' in fun:
if 'SHA1_Update' in fun: fold = indentation fold = indentation
if 'boost::detail::function::basic_vtable' in fun: fold = indentation if 'allocate_buffer' in fun:
if 'operator new' in fun: fold = indentation fold = indentation
if 'malloc' == fun: fold = indentation if '::find_POD' in fun:
if 'free' == fun: fold = indentation fold = indentation
if 'std::_Rb_tree' in fun: fold = indentation if 'SHA1_Update' in fun:
if 'pthread_cond_wait' in fun: fold = indentation fold = indentation
if 'mp_exptmod' == fun: fold = indentation if 'boost::detail::function::basic_vtable' in fun:
if '::check_invariant()' in fun: fold = indentation fold = indentation
if 'libtorrent::condition::wait' in fun: fold = indentation if 'operator new' in fun:
if '_sigtramp' in fun: fold = indentation fold = indentation
if 'time_now_hires' in fun: fold = indentation if 'malloc' == fun:
if 'libtorrent::sleep' in fun: fold = indentation fold = indentation
if 'puts' == fun: fold = indentation if 'free' == fun:
if 'boost::asio::basic_stream_socket' in fun: fold = indentation fold = indentation
if 'recvmsg' == fun: fold = indentation if 'std::_Rb_tree' in fun:
if 'sendmsg' == fun: fold = indentation fold = indentation
if 'semaphore_signal_trap' == fun: fold = indentation if 'pthread_cond_wait' in fun:
if 'boost::detail::atomic_count::operator' in fun: fold = indentation fold = indentation
if 'pthread_mutex_lock' == fun: fold = indentation if 'mp_exptmod' == fun:
if 'pthread_mutex_unlock' == fun: fold = indentation fold = indentation
if '>::~vector()' == fun: fold = indentation if '::check_invariant()' in fun:
if 'szone_free_definite_size' == fun: fold = indentation fold = indentation
if 'snprintf' == fun: fold = indentation if 'libtorrent::condition::wait' in fun:
if 'usleep' == fun: fold = indentation fold = indentation
if 'pthread_mutex_lock' == fun: fold = indentation if '_sigtramp' in fun:
if 'pthread_mutex_unlock' == fun: fold = indentation fold = indentation
if 'std::string::append' in fun: fold = indentation if 'time_now_hires' in fun:
if 'getipnodebyname' == fun: fold = indentation fold = indentation
if '__gnu_debug::_Safe_iterator<std::' in fun: fold = indentation if 'libtorrent::sleep' in fun:
if 'fflush' == fun: fold = indentation fold = indentation
if 'vfprintf' == fun: fold = indentation if 'puts' == fun:
if 'fprintf' == fun: fold = indentation fold = indentation
if 'BN_mod_exp' == fun: fold = indentation if 'boost::asio::basic_stream_socket' in fun:
if 'BN_CTX_free' == fun: fold = indentation fold = indentation
if 'cerror' == fun: fold = indentation if 'recvmsg' == fun:
if '0xffffffff' == fun: fold = indentation fold = indentation
if 'sendmsg' == fun:
fold = indentation
if 'semaphore_signal_trap' == fun:
fold = indentation
if 'boost::detail::atomic_count::operator' in fun:
fold = indentation
if 'pthread_mutex_lock' == fun:
fold = indentation
if 'pthread_mutex_unlock' == fun:
fold = indentation
if '>::~vector()' == fun:
fold = indentation
if 'szone_free_definite_size' == fun:
fold = indentation
if 'snprintf' == fun:
fold = indentation
if 'usleep' == fun:
fold = indentation
if 'pthread_mutex_lock' == fun:
fold = indentation
if 'pthread_mutex_unlock' == fun:
fold = indentation
if 'std::string::append' in fun:
fold = indentation
if 'getipnodebyname' == fun:
fold = indentation
if '__gnu_debug::_Safe_iterator<std::' in fun:
fold = indentation
if 'fflush' == fun:
fold = indentation
if 'vfprintf' == fun:
fold = indentation
if 'fprintf' == fun:
fold = indentation
if 'BN_mod_exp' == fun:
fold = indentation
if 'BN_CTX_free' == fun:
fold = indentation
if 'cerror' == fun:
fold = indentation
if '0xffffffff' == fun:
fold = indentation
list = [] list = []
for k in fun_samples: for k in fun_samples:
list.append((fun_samples[k], k)) list.append((fun_samples[k], k))
list = sorted(list, reverse=True) list = sorted(list, reverse=True)
for i in list: for i in list:
print '%-4d %s' % (i[0], i[1]) print('%-4d %s' % (i[0], i[1]))

File diff suppressed because it is too large Load Diff

View File

@ -1,54 +1,59 @@
#!/usr/bin/env python #!/usr/bin/env python
from __future__ import print_function
import os, sys, time import os
import sys
from functools import reduce
# usage: parse_log.py log-file [socket-index to focus on] # usage: parse_log.py log-file [socket-index to focus on]
socket_filter = None socket_filter = None
if len(sys.argv) >= 3: if len(sys.argv) >= 3:
socket_filter = sys.argv[2].strip() socket_filter = sys.argv[2].strip()
if socket_filter == None: if socket_filter is None:
print "scanning for socket with the most packets" print("scanning for socket with the most packets")
file = open(sys.argv[1], 'rb') file = open(sys.argv[1], 'rb')
sockets = {} sockets = {}
for l in file: for l in file:
if not 'our_delay' in l: continue if 'our_delay' not in l:
continue
try: try:
a = l.strip().split(" ") a = l.strip().split(" ")
socket_index = a[1][:-1] socket_index = a[1][:-1]
except: except BaseException:
continue continue
# msvc's runtime library doesn't prefix pointers # msvc's runtime library doesn't prefix pointers
# with '0x' # with '0x'
# if socket_index[:2] != '0x': # if socket_index[:2] != '0x':
# continue # continue
if socket_index in sockets: if socket_index in sockets:
sockets[socket_index] += 1 sockets[socket_index] += 1
else: else:
sockets[socket_index] = 1 sockets[socket_index] = 1
items = sockets.items() items = list(sockets.items())
items.sort(lambda x, y: y[1] - x[1]) items.sort(lambda x, y: y[1] - x[1])
count = 0 count = 0
for i in items: for i in items:
print '%s: %d' % (i[0], i[1]) print('%s: %d' % (i[0], i[1]))
count += 1 count += 1
if count > 5: break if count > 5:
break
file.close() file.close()
socket_filter = items[0][0] socket_filter = items[0][0]
print '\nfocusing on socket %s' % socket_filter print('\nfocusing on socket %s' % socket_filter)
file = open(sys.argv[1], 'rb') file = open(sys.argv[1], 'rb')
out_file = 'utp.out%s' % socket_filter; out_file = 'utp.out%s' % socket_filter
out = open(out_file, 'wb') out = open(out_file, 'wb')
delay_samples = 'points lc rgb "blue"' delay_samples = 'points lc rgb "blue"'
@ -61,27 +66,27 @@ rtt = 'lines lc rgb "light-blue"'
send_buffer = 'lines lc rgb "light-red"' send_buffer = 'lines lc rgb "light-red"'
metrics = { metrics = {
'our_delay':['our delay (ms)', 'x1y2', delay_samples], 'our_delay': ['our delay (ms)', 'x1y2', delay_samples],
'upload_rate':['send rate (B/s)', 'x1y1', 'lines'], 'upload_rate': ['send rate (B/s)', 'x1y1', 'lines'],
'max_window':['cwnd (B)', 'x1y1', cwnd], 'max_window': ['cwnd (B)', 'x1y1', cwnd],
'target_delay':['target delay (ms)', 'x1y2', target_delay], 'target_delay': ['target delay (ms)', 'x1y2', target_delay],
'cur_window':['bytes in-flight (B)', 'x1y1', window_size], 'cur_window': ['bytes in-flight (B)', 'x1y1', window_size],
'cur_window_packets':['number of packets in-flight', 'x1y2', 'steps'], 'cur_window_packets': ['number of packets in-flight', 'x1y2', 'steps'],
'packet_size':['current packet size (B)', 'x1y2', 'steps'], 'packet_size': ['current packet size (B)', 'x1y2', 'steps'],
'rtt':['rtt (ms)', 'x1y2', rtt], 'rtt': ['rtt (ms)', 'x1y2', rtt],
'off_target':['off-target (ms)', 'x1y2', off_target], 'off_target': ['off-target (ms)', 'x1y2', off_target],
'delay_sum':['delay sum (ms)', 'x1y2', 'steps'], 'delay_sum': ['delay sum (ms)', 'x1y2', 'steps'],
'their_delay':['their delay (ms)', 'x1y2', delay_samples], 'their_delay': ['their delay (ms)', 'x1y2', delay_samples],
'get_microseconds':['clock (us)', 'x1y1', 'steps'], 'get_microseconds': ['clock (us)', 'x1y1', 'steps'],
'wnduser':['advertised window size (B)', 'x1y1', 'steps'], 'wnduser': ['advertised window size (B)', 'x1y1', 'steps'],
'ssthres':['slow-start threshold (B)', 'x1y1', 'steps'], 'ssthres': ['slow-start threshold (B)', 'x1y1', 'steps'],
'delay_base':['delay base (us)', 'x1y1', delay_base], 'delay_base': ['delay base (us)', 'x1y1', delay_base],
'their_delay_base':['their delay base (us)', 'x1y1', delay_base], 'their_delay_base': ['their delay base (us)', 'x1y1', delay_base],
'their_actual_delay':['their actual delay (us)', 'x1y1', delay_samples], 'their_actual_delay': ['their actual delay (us)', 'x1y1', delay_samples],
'actual_delay':['actual_delay (us)', 'x1y1', delay_samples], 'actual_delay': ['actual_delay (us)', 'x1y1', delay_samples],
'send_buffer':['send buffer size (B)', 'x1y1', send_buffer], 'send_buffer': ['send buffer size (B)', 'x1y1', send_buffer],
'recv_buffer':['receive buffer size (B)', 'x1y1', 'lines'] 'recv_buffer': ['receive buffer size (B)', 'x1y1', 'lines']
} }
histogram_quantization = 1 histogram_quantization = 1
@ -99,49 +104,51 @@ delay_histogram = {}
packet_size_histogram = {} packet_size_histogram = {}
window_size = {'0': 0, '1': 0} window_size = {'0': 0, '1': 0}
# [35301484] 0x00ec1190: actual_delay:1021583 our_delay:102 their_delay:-1021345 off_target:297 max_window:2687 upload_rate:18942 delay_base:1021481154 delay_sum:-1021242 target_delay:400 acked_bytes:1441 cur_window:2882 scaled_gain:2.432 # [35301484] 0x00ec1190: actual_delay:1021583 our_delay:102 their_delay:-1021345 off_target:297 max_window:2687
# upload_rate:18942 delay_base:1021481154 delay_sum:-1021242 target_delay:400 acked_bytes:1441 cur_window:2882
# scaled_gain:2.432
counter = 0 counter = 0
print "reading log file" print("reading log file")
for l in file: for line in file:
if "UTP_Connect" in l: if "UTP_Connect" in line:
title = l[:-2] title = line[:-2]
if socket_filter != None: if socket_filter is not None:
title += ' socket: %s' % socket_filter title += ' socket: %s' % socket_filter
else: else:
title += ' sum of all sockets' title += ' sum of all sockets'
continue continue
try: try:
a = l.strip().split(" ") a = line.strip().split(" ")
t = a[0][1:-1] t = a[0][1:-1]
socket_index = a[1][:-1] socket_index = a[1][:-1]
except: except BaseException:
continue continue
# if socket_index[:2] != '0x': # if socket_index[:2] != '0x':
# continue # continue
if socket_filter != None and socket_index != socket_filter: if socket_filter is not None and socket_index != socket_filter:
continue continue
counter += 1 counter += 1
if (counter % 300 == 0): if (counter % 300 == 0):
print "\r%d " % counter, print("\r%d " % counter, end=' ')
if "lost." in l: if "lost." in line:
packet_loss = packet_loss + 1 packet_loss = packet_loss + 1
continue continue
if "Packet timeout" in l: if "Packet timeout" in line:
packet_timeout = packet_timeout + 1 packet_timeout = packet_timeout + 1
continue continue
if "sending packet" in l: if "sending packet" in line:
v = l.split('size:')[1].split(' ')[0] v = line.split('size:')[1].split(' ')[0]
packet_size_histogram[v] = 1 + packet_size_histogram.get(v, 0) packet_size_histogram[v] = 1 + packet_size_histogram.get(v, 0)
if "our_delay:" not in l: if "our_delay:" not in line:
continue continue
# used for Logf timestamps # used for Logf timestamps
@ -161,159 +168,161 @@ for l in file:
begin = t begin = t
t = t - begin t = t - begin
# print time. Convert from milliseconds to seconds # print time. Convert from milliseconds to seconds
print >>out, '%f\t' % (float(t)/1000.), print('%f\t' % (float(t) / 1000.), end=' ', file=out)
#if t > 200000: # if t > 200000:
# break # break
fill_columns = not columns fill_columns = not columns
for i in a[2:]: for i in a[2:]:
try: try:
n, v = i.split(':') n, v = i.split(':')
except: except BaseException:
continue continue
v = float(v) v = float(v)
if n == "our_delay": if n == "our_delay":
bucket = int(v / histogram_quantization) bucket = int(v / histogram_quantization)
delay_histogram[bucket] = 1 + delay_histogram.get(bucket, 0) delay_histogram[bucket] = 1 + delay_histogram.get(bucket, 0)
if not n in metrics: continue if n not in metrics:
continue
if fill_columns: if fill_columns:
columns.append(n) columns.append(n)
if n == "max_window": if n == "max_window":
window_size[socket_index] = v window_size[socket_index] = v
print >>out, '%f\t' % int(reduce(lambda a,b: a+b, window_size.values())), print('%f\t' % int(reduce(lambda a, b: a + b, list(window_size.values()))), end=' ', file=out)
else: else:
print >>out, '%f\t' % v, print('%f\t' % v, end=' ', file=out)
print >>out, float(packet_loss * 8000), float(packet_timeout * 8000) print(float(packet_loss * 8000), float(packet_timeout * 8000), file=out)
packet_loss = 0 packet_loss = 0
packet_timeout = 0 packet_timeout = 0
out.close() out.close()
out = open('%s.histogram' % out_file, 'wb') out = open('%s.histogram' % out_file, 'wb')
for d,f in delay_histogram.iteritems(): for d, f in delay_histogram.items():
print >>out, float(d*histogram_quantization) + histogram_quantization / 2, f print(float(d * histogram_quantization) + histogram_quantization / 2, f, file=out)
out.close() out.close()
out = open('%s_packet_size.histogram' % out_file, 'wb') out = open('%s_packet_size.histogram' % out_file, 'wb')
for d,f in packet_size_histogram.iteritems(): for d, f in packet_size_histogram.items():
print >>out, d, f print(d, f, file=out)
out.close() out.close()
plot = [ plot = [
{ {
'data': ['max_window', 'send_buffer', 'cur_window', 'rtt'], 'data': ['max_window', 'send_buffer', 'cur_window', 'rtt'],
'title': 'send-packet-size', 'title': 'send-packet-size',
'y1': 'Bytes', 'y1': 'Bytes',
'y2': 'Time (ms)' 'y2': 'Time (ms)'
}, },
{ {
'data': ['upload_rate', 'max_window', 'cur_window', 'wnduser', 'cur_window_packets', 'packet_size', 'rtt'], 'data': ['upload_rate', 'max_window', 'cur_window', 'wnduser', 'cur_window_packets', 'packet_size', 'rtt'],
'title': 'slow-start', 'title': 'slow-start',
'y1': 'Bytes', 'y1': 'Bytes',
'y2': 'Time (ms)' 'y2': 'Time (ms)'
}, },
{ {
'data': ['max_window', 'cur_window', 'our_delay', 'target_delay', 'ssthres'], 'data': ['max_window', 'cur_window', 'our_delay', 'target_delay', 'ssthres'],
'title': 'cwnd', 'title': 'cwnd',
'y1': 'Bytes', 'y1': 'Bytes',
'y2': 'Time (ms)' 'y2': 'Time (ms)'
}, },
{ {
'data': ['our_delay', 'max_window', 'target_delay', 'cur_window', 'wnduser', 'cur_window_packets'], 'data': ['our_delay', 'max_window', 'target_delay', 'cur_window', 'wnduser', 'cur_window_packets'],
'title': 'uploading', 'title': 'uploading',
'y1': 'Bytes', 'y1': 'Bytes',
'y2': 'Time (ms)' 'y2': 'Time (ms)'
}, },
{ {
'data': ['our_delay', 'max_window', 'target_delay', 'cur_window', 'send_buffer'], 'data': ['our_delay', 'max_window', 'target_delay', 'cur_window', 'send_buffer'],
'title': 'uploading_packets', 'title': 'uploading_packets',
'y1': 'Bytes', 'y1': 'Bytes',
'y2': 'Time (ms)' 'y2': 'Time (ms)'
}, },
{ {
'data': ['their_delay', 'target_delay', 'rtt'], 'data': ['their_delay', 'target_delay', 'rtt'],
'title': 'their_delay', 'title': 'their_delay',
'y1': '', 'y1': '',
'y2': 'Time (ms)' 'y2': 'Time (ms)'
}, },
{ {
'data': ['their_actual_delay','their_delay_base'], 'data': ['their_actual_delay', 'their_delay_base'],
'title': 'their_delay_base', 'title': 'their_delay_base',
'y1': 'Time (us)', 'y1': 'Time (us)',
'y2': '' 'y2': ''
}, },
{ {
'data': ['our_delay', 'target_delay', 'rtt'], 'data': ['our_delay', 'target_delay', 'rtt'],
'title': 'our-delay', 'title': 'our-delay',
'y1': '', 'y1': '',
'y2': 'Time (ms)' 'y2': 'Time (ms)'
}, },
{ {
'data': ['actual_delay', 'delay_base'], 'data': ['actual_delay', 'delay_base'],
'title': 'our_delay_base', 'title': 'our_delay_base',
'y1': 'Time (us)', 'y1': 'Time (us)',
'y2': '' 'y2': ''
} }
] ]
out = open('utp.gnuplot', 'w+') out = open('utp.gnuplot', 'w+')
files = '' files = ''
#print >>out, 'set xtics 0, 20' # print('set xtics 0, 20', file=out)
print >>out, "set term png size 1280,800" print("set term png size 1280,800", file=out)
print >>out, 'set output "%s.delays.png"' % out_file print('set output "%s.delays.png"' % out_file, file=out)
print >>out, 'set xrange [0:200]' print('set xrange [0:200]', file=out)
print >>out, 'set xlabel "delay (ms)"' print('set xlabel "delay (ms)"', file=out)
print >>out, 'set boxwidth 1' print('set boxwidth 1', file=out)
print >>out, 'set ylabel "number of packets"' print('set ylabel "number of packets"', file=out)
print >>out, 'plot "%s.histogram" using 1:2 with boxes fs solid 0.3' % out_file print('plot "%s.histogram" using 1:2 with boxes fs solid 0.3' % out_file, file=out)
files += out_file + '.delays.png ' files += out_file + '.delays.png '
print >>out, 'set output "%s.packet_sizes.png"' % out_file print('set output "%s.packet_sizes.png"' % out_file, file=out)
print >>out, 'set xrange [0:*]' print('set xrange [0:*]', file=out)
print >>out, 'set xlabel "packet size (B)"' print('set xlabel "packet size (B)"', file=out)
print >>out, 'set boxwidth 1' print('set boxwidth 1', file=out)
print >>out, 'set ylabel "number of packets sent"' print('set ylabel "number of packets sent"', file=out)
print >>out, 'set logscale y' print('set logscale y', file=out)
print >>out, 'plot "%s_packet_size.histogram" using 1:2 with boxes fs solid 0.3' % out_file print('plot "%s_packet_size.histogram" using 1:2 with boxes fs solid 0.3' % out_file, file=out)
print >>out, 'set nologscale y' print('set nologscale y', file=out)
files += out_file + '.packet_sizes.png ' files += out_file + '.packet_sizes.png '
print >>out, "set style data steps" print("set style data steps", file=out)
#print >>out, "set yrange [0:*]" # print("set yrange [0:*]", file=out)
print >>out, "set y2range [*:*]" print("set y2range [*:*]", file=out)
#set hidden3d # set hidden3d
#set title "Peer bandwidth distribution" # set title "Peer bandwidth distribution"
#set xlabel "Ratio" # set xlabel "Ratio"
for p in plot: for p in plot:
print >>out, 'set title "%s %s"' % (p['title'], title) print('set title "%s %s"' % (p['title'], title), file=out)
print >>out, 'set xlabel "time (s)"' print('set xlabel "time (s)"', file=out)
print >>out, 'set ylabel "%s"' % p['y1'] print('set ylabel "%s"' % p['y1'], file=out)
print >>out, "set tics nomirror" print("set tics nomirror", file=out)
print >>out, 'set y2tics' print('set y2tics', file=out)
print >>out, 'set y2label "%s"' % p['y2'] print('set y2label "%s"' % p['y2'], file=out)
print >>out, 'set xrange [0:*]' print('set xrange [0:*]', file=out)
print >>out, "set key box" print("set key box", file=out)
print >>out, "set term png size 1280,800" print("set term png size 1280,800", file=out)
print >>out, 'set output "%s-%s.png"' % (out_file, p['title']) print('set output "%s-%s.png"' % (out_file, p['title']), file=out)
files += '%s-%s.png ' % (out_file, p['title']) files += '%s-%s.png ' % (out_file, p['title'])
comma = '' comma = ''
print >>out, "plot", print("plot", end=' ', file=out)
for c in p['data']: for c in p['data']:
if not c in metrics: continue if c not in metrics:
i = columns.index(c) continue
print >>out, '%s"%s" using ($1/1000):%d title "%s-%s" axes %s with %s' % (comma, out_file, i + 2, metrics[c][0], metrics[c][1], metrics[c][1], metrics[c][2]), i = columns.index(c)
comma = ', ' print('%s"%s" using ($1/1000):%d title "%s-%s" axes %s with %s' %
print >>out, '' (comma, out_file, i + 2, metrics[c][0], metrics[c][1], metrics[c][1], metrics[c][2]), end=' ', file=out)
comma = ', '
print('', file=out)
out.close() out.close()
os.system("gnuplot utp.gnuplot") os.system("gnuplot utp.gnuplot")
os.system("open %s" % files) os.system("open %s" % files)

View File

@ -6,89 +6,114 @@ import shutil
import subprocess import subprocess
import sys import sys
cache_size = 300 # in MiB cache_size = 300 # in MiB
toolset = '' toolset = ''
if len(sys.argv) > 1: if len(sys.argv) > 1:
toolset = sys.argv[1] toolset = sys.argv[1]
ret = os.system('cd ../examples && bjam profile statistics=on %s stage_client_test' % toolset) ret = os.system('cd ../examples && bjam profile statistics=on %s stage_client_test' % toolset)
if ret != 0: if ret != 0:
print 'ERROR: build failed: %d' % ret print('ERROR: build failed: %d' % ret)
sys.exit(1) sys.exit(1)
ret = os.system('cd ../examples && bjam release %s stage_connection_tester' % toolset) ret = os.system('cd ../examples && bjam release %s stage_connection_tester' % toolset)
if ret != 0: if ret != 0:
print 'ERROR: build failed: %d' % ret print('ERROR: build failed: %d' % ret)
sys.exit(1) sys.exit(1)
try: os.remove('.ses_state') try:
except Exception, e: print e os.remove('.ses_state')
try: shutil.rmtree('.resume') except Exception as e:
except Exception, e: print e print(e)
try: shutil.rmtree('cpu_benchmark') try:
except Exception, e: print e shutil.rmtree('.resume')
except Exception as e:
print(e)
try:
shutil.rmtree('cpu_benchmark')
except Exception as e:
print(e)
if not os.path.exists('cpu_benchmark.torrent'): if not os.path.exists('cpu_benchmark.torrent'):
ret = os.system('../examples/connection_tester gen-torrent -s 10000 -n 15 -t cpu_benchmark.torrent') ret = os.system('../examples/connection_tester gen-torrent -s 10000 -n 15 -t cpu_benchmark.torrent')
if ret != 0: if ret != 0:
print 'ERROR: connection_tester failed: %d' % ret print('ERROR: connection_tester failed: %d' % ret)
sys.exit(1) sys.exit(1)
try:
shutil.rmtree('t')
except BaseException:
pass
try: shutil.rmtree('t')
except: pass
def run_test(name, test_cmd, client_arg, num_peers): def run_test(name, test_cmd, client_arg, num_peers):
output_dir = 'logs_%s' % name output_dir = 'logs_%s' % name
try: shutil.rmtree(output_dir) try:
except: pass shutil.rmtree(output_dir)
try: os.mkdir(output_dir) except BaseException:
except: pass pass
try:
os.mkdir(output_dir)
except BaseException:
pass
port = (int(time.time()) % 50000) + 2000 port = (int(time.time()) % 50000) + 2000
try: shutil.rmtree('session_stats') try:
except: pass shutil.rmtree('session_stats')
try: shutil.rmtree('session_stats_report') except BaseException:
except: pass pass
try:
shutil.rmtree('session_stats_report')
except BaseException:
pass
start = time.time(); start = time.time()
client_cmd = '../examples/client_test -p %d cpu_benchmark.torrent -k -z -H -X -q 120 %s -h -c %d -T %d -C %d -f %s/events.log' \ client_cmd = ('../examples/client_test -p %d cpu_benchmark.torrent -k -z -H -X -q 120 %s'
% (port, client_arg, num_peers*2, num_peers*2, cache_size * 16, output_dir) '-h -c %d -T %d -C %d -f %s/events.log').format(
test_cmd = '../examples/connection_tester %s -c %d -d 127.0.0.1 -p %d -t cpu_benchmark.torrent' % (test_cmd, num_peers, port) port, client_arg, num_peers * 2, num_peers * 2, cache_size * 16, output_dir)
test_cmd = '../examples/connection_tester %s -c %d -d 127.0.0.1 -p %d -t cpu_benchmark.torrent' % (
test_cmd, num_peers, port)
client_out = open('%s/client.out' % output_dir, 'w+') client_out = open('%s/client.out' % output_dir, 'w+')
test_out = open('%s/test.out' % output_dir, 'w+') test_out = open('%s/test.out' % output_dir, 'w+')
print client_cmd print(client_cmd)
c = subprocess.Popen(client_cmd.split(' '), stdout=client_out, stderr=client_out, stdin=subprocess.PIPE) c = subprocess.Popen(client_cmd.split(' '), stdout=client_out, stderr=client_out, stdin=subprocess.PIPE)
time.sleep(2) time.sleep(2)
print test_cmd print(test_cmd)
t = subprocess.Popen(test_cmd.split(' '), stdout=test_out, stderr=test_out) t = subprocess.Popen(test_cmd.split(' '), stdout=test_out, stderr=test_out)
t.wait() t.wait()
end = time.time(); end = time.time()
try: c.communicate('q') try:
except: pass c.communicate('q')
c.wait() except BaseException:
pass
c.wait()
client_out.close(); client_out.close()
test_out.close(); test_out.close()
print 'runtime %d seconds' % (end - start) print('runtime %d seconds' % (end - start))
print 'analyzing proile...' print('analyzing proile...')
os.system('gprof ../examples/client_test >%s/gprof.out' % output_dir) os.system('gprof ../examples/client_test >%s/gprof.out' % output_dir)
print 'generating profile graph...' print('generating profile graph...')
os.system('python gprof2dot.py --strip <%s/gprof.out | dot -Tpng -o %s/cpu_profile.png' % (output_dir, output_dir)) os.system('python gprof2dot.py --strip <%s/gprof.out | dot -Tpng -o %s/cpu_profile.png' % (output_dir, output_dir))
os.system('python parse_session_stats.py session_stats/*.log')
try:
shutil.move('session_stats_report', '%s/session_stats_report' % output_dir)
except BaseException:
pass
try:
shutil.move('session_stats', '%s/session_stats' % output_dir)
except BaseException:
pass
os.system('python parse_session_stats.py session_stats/*.log')
try: shutil.move('session_stats_report', '%s/session_stats_report' % output_dir)
except: pass
try: shutil.move('session_stats', '%s/session_stats' % output_dir)
except: pass
run_test('download', 'upload', '', 50) run_test('download', 'upload', '', 50)
run_test('upload', 'download', '-G', 20) run_test('upload', 'download', '-G', 20)

View File

@ -7,48 +7,54 @@ import re
version = (int(sys.argv[1]), int(sys.argv[2]), int(sys.argv[3]), int(sys.argv[4])) version = (int(sys.argv[1]), int(sys.argv[2]), int(sys.argv[3]), int(sys.argv[4]))
def v(version): def v(version):
ret = () ret = ()
for i in version: for i in version:
if i < 9: ret = ret + (chr(ord('0') + i),) if i < 9:
else: ret = ret + (chr(ord('A') + i - 10),) ret = ret + (chr(ord('0') + i),)
return ret else:
ret = ret + (chr(ord('A') + i - 10),)
return ret
revision = os.popen('git log -1 --format=format:%h').read().strip() revision = os.popen('git log -1 --format=format:%h').read().strip()
def substitute_file(name): def substitute_file(name):
subst = '' subst = ''
f = open(name) f = open(name)
for l in f: for line in f:
if '#define LIBTORRENT_VERSION_MAJOR' in l and name.endswith('.hpp'): if '#define LIBTORRENT_VERSION_MAJOR' in line and name.endswith('.hpp'):
l = '#define LIBTORRENT_VERSION_MAJOR %d\n' % version[0] line = '#define LIBTORRENT_VERSION_MAJOR %d\n' % version[0]
elif '#define LIBTORRENT_VERSION_MINOR' in l and name.endswith('.hpp'): elif '#define LIBTORRENT_VERSION_MINOR' in line and name.endswith('.hpp'):
l = '#define LIBTORRENT_VERSION_MINOR %d\n' % version[1] line = '#define LIBTORRENT_VERSION_MINOR %d\n' % version[1]
elif '#define LIBTORRENT_VERSION_TINY' in l and name.endswith('.hpp'): elif '#define LIBTORRENT_VERSION_TINY' in line and name.endswith('.hpp'):
l = '#define LIBTORRENT_VERSION_TINY %d\n' % version[2] line = '#define LIBTORRENT_VERSION_TINY %d\n' % version[2]
elif '#define LIBTORRENT_VERSION ' in l and name.endswith('.hpp'): elif '#define LIBTORRENT_VERSION ' in line and name.endswith('.hpp'):
l = '#define LIBTORRENT_VERSION "%d.%d.%d.%d"\n' % (version[0], version[1], version[2], version[3]) line = '#define LIBTORRENT_VERSION "%d.%d.%d.%d"\n' % (version[0], version[1], version[2], version[3])
elif '#define LIBTORRENT_REVISION ' in l and name.endswith('.hpp'): elif '#define LIBTORRENT_REVISION ' in line and name.endswith('.hpp'):
l = '#define LIBTORRENT_REVISION "%s"\n' % revision line = '#define LIBTORRENT_REVISION "%s"\n' % revision
elif 'AC_INIT([libtorrent-rasterbar]' in l and name.endswith('.ac'): elif 'AC_INIT([libtorrent-rasterbar]' in line and name.endswith('.ac'):
l = 'AC_INIT([libtorrent-rasterbar],[%d.%d.%d],[arvid@libtorrent.org],\n' % (version[0], version[1], version[2]) line = 'AC_INIT([libtorrent-rasterbar],[%d.%d.%d],[arvid@libtorrent.org],\n' % (
elif 'set (VERSION ' in l and name.endswith('.txt'): version[0], version[1], version[2])
l = 'set (VERSION "%d.%d.%d")\n' % (version[0], version[1], version[2]) elif 'set (VERSION ' in line and name.endswith('.txt'):
elif ':Version: ' in l and (name.endswith('.rst') or name.endswith('.py')): line = 'set (VERSION "%d.%d.%d")\n' % (version[0], version[1], version[2])
l = ':Version: %d.%d.%d\n' % (version[0], version[1], version[2]) elif ':Version: ' in line and (name.endswith('.rst') or name.endswith('.py')):
elif 'VERSION = ' in l and name.endswith('Jamfile'): line = ':Version: %d.%d.%d\n' % (version[0], version[1], version[2])
l = 'VERSION = %d.%d.%d ;\n' % (version[0], version[1], version[2]) elif 'VERSION = ' in line and name.endswith('Jamfile'):
elif 'version=' in l and name.endswith('setup.py'): line = 'VERSION = %d.%d.%d ;\n' % (version[0], version[1], version[2])
l = "\tversion = '%d.%d.%d',\n" % (version[0], version[1], version[2]) elif 'version=' in line and name.endswith('setup.py'):
elif "version = '" in l and name.endswith('setup.py'): line = "\tversion = '%d.%d.%d',\n" % (version[0], version[1], version[2])
l = "\tversion = '%d.%d.%d',\n" % (version[0], version[1], version[2]) elif "version = '" in line and name.endswith('setup.py'):
elif '"-LT' in l and name.endswith('settings_pack.cpp'): line = "\tversion = '%d.%d.%d',\n" % (version[0], version[1], version[2])
l = re.sub('"-LT[0-9A-Za-z]{4}-"', '"-LT%c%c%c%c-"' % v(version), l) elif '"-LT' in line and name.endswith('settings_pack.cpp'):
line = re.sub('"-LT[0-9A-Za-z]{4}-"', '"-LT%c%c%c%c-"' % v(version), line)
subst += l subst += line
f.close() f.close()
open(name, 'w+').write(subst) open(name, 'w+').write(subst)
substitute_file('include/libtorrent/version.hpp') substitute_file('include/libtorrent/version.hpp')
@ -58,7 +64,5 @@ substitute_file('bindings/python/setup.py')
substitute_file('docs/gen_reference_doc.py') substitute_file('docs/gen_reference_doc.py')
substitute_file('src/settings_pack.cpp') substitute_file('src/settings_pack.cpp')
for i in glob.glob('docs/*.rst'): for i in glob.glob('docs/*.rst'):
substitute_file(i) substitute_file(i)
substitute_file('Jamfile') substitute_file('Jamfile')

View File

@ -1,36 +1,35 @@
#!/usr/bin/env python #!/usr/bin/env python
import os
import sys
import glob import glob
import datetime import datetime
this_year = datetime.date.today().year this_year = datetime.date.today().year
print 'current year: %d' % this_year print('current year: %d' % this_year)
def update_file(name): def update_file(name):
subst = '' subst = ''
f = open(name) f = open(name)
for l in f: for line in f:
if 'Copyright (c) ' in l and 'Arvid Norberg' in l: if 'Copyright (c) ' in line and 'Arvid Norberg' in line:
year_idx = l.index('Copyright (c) ') year_idx = line.index('Copyright (c) ')
first_year = int(l[year_idx + 14: year_idx + 18]) first_year = int(line[year_idx + 14: year_idx + 18])
if first_year != this_year: if first_year != this_year:
if l[year_idx + 18] == '-': if line[year_idx + 18] == '-':
l = l[:year_idx + 19] + str(this_year) + l[year_idx + 23:] line = line[:year_idx + 19] + str(this_year) + line[year_idx + 23:]
else: else:
l = l[:year_idx + 18] + '-' + str(this_year) + l[year_idx + 18:] line = line[:year_idx + 18] + '-' + str(this_year) + line[year_idx + 18:]
subst += l subst += line
f.close()
open(name, 'w+').write(subst)
f.close()
open(name, 'w+').write(subst)
for i in glob.glob('src/*.cpp') + \ for i in glob.glob('src/*.cpp') + \
glob.glob('include/libtorrent/*.hpp') + \ glob.glob('include/libtorrent/*.hpp') + \
glob.glob('include/libtorrent/extensions/*.hpp') + \ glob.glob('include/libtorrent/extensions/*.hpp') + \
glob.glob('include/libtorrent/kademlia/*.hpp') + \ glob.glob('include/libtorrent/kademlia/*.hpp') + \
glob.glob('src/kademlia/*.cpp') + \ glob.glob('src/kademlia/*.cpp') + \
['COPYING', 'LICENSE', 'AUTHORS']: ['COPYING', 'LICENSE', 'AUTHORS']:
update_file(i) update_file(i)