merged RC_1_1

This commit is contained in:
arvidn 2017-09-12 00:22:55 +02:00
commit 686e3ed27b
6 changed files with 141 additions and 110 deletions

View File

@ -79,6 +79,7 @@
1.1.5 release
* fix python binding for torrent_handle, to be hashable
* fix IPv6 tracker support by performing the second announce in more cases
* fix utf-8 encoding check in torrent parser
* fix infinite loop when parsing maliciously crafted torrents

View File

@ -105,30 +105,12 @@ def print_peer_info(console, peers):
out += '%2d ' % p.download_queue_length
out += '%2d ' % p.upload_queue_length
if p.flags & lt.peer_info.interesting:
out += 'I'
else:
out += '.'
if p.flags & lt.peer_info.choked:
out += 'C'
else:
out += '.'
if p.flags & lt.peer_info.remote_interested:
out += 'i'
else:
out += '.'
if p.flags & lt.peer_info.remote_choked:
out += 'c'
else:
out += '.'
if p.flags & lt.peer_info.supports_extensions:
out += 'e'
else:
out += '.'
if p.flags & lt.peer_info.local_connection:
out += 'l'
else:
out += 'r'
out += 'I' if p.flags & lt.peer_info.interesting else '.'
out += 'C' if p.flags & lt.peer_info.choked else '.'
out += 'i' if p.flags & lt.peer_info.remote_interested else '.'
out += 'c' if p.flags & lt.peer_info.remote_choked else '.'
out += 'e' if p.flags & lt.peer_info.supports_extensions else '.'
out += 'l' if p.flags & lt.peer_info.local_connection else 'r'
out += ' '
if p.downloading_piece_index >= 0:
@ -171,6 +153,23 @@ def print_download_queue(console, download_queue):
write_line(console, out)
def add_torrent(ses, filename, options):
atp = lt.add_torrent_params()
if filename.startswith('magnet:'):
atp = lt.parse_magnet_uti(filename)
else:
atp.ti = lt.torrent_info(filename)
try:
at.resume_data = open(os.path.join(options.save_path, info.name() + '.fastresume'), 'rb').read()
except:
pass
atp.save_path = options.save_path
atp.storage_mode = lt.storage_mode_t.storage_mode_sparse
atp.flags |= lt.torrent_flags.duplicate_is_error \
| lt.torrent_flags.auto_managed \
| lt.torrent_flags.duplicate_is_error
ses.async_add_torrent(atp)
def main():
from optparse import OptionParser
@ -216,51 +215,26 @@ def main():
if options.max_download_rate <= 0:
options.max_download_rate = -1
ses = lt.session({'user_agent': 'python_client/' + lt.__version__,
'listen_interfaces':'0.0.0.0:' + str(options.port),
settings = { 'user_agent': 'python_client/' + lt.__version__,
'listen_interfaces': '0.0.0.0:%d' % options.port,
'download_rate_limit': int(options.max_download_rate),
'upload_rate_limit': int(options.max_upload_rate)
})
'upload_rate_limit': int(options.max_upload_rate),
'alert_mask': lt.alert.category_t.all_categories
}
if options.proxy_host != '':
ps = lt.proxy_settings()
ps.type = lt.proxy_type.http
ps.hostname = options.proxy_host.split(':')[0]
ps.port = int(options.proxy_host.split(':')[1])
ses.set_proxy(ps)
settings['proxy_hostname'] = options.proxy_host.split(':')[0]
settings['proxy_type'] = lt.proxy_type_t.http
settings['proxy_port'] = options.proxy_host.split(':')[1]
handles = []
alerts = []
ses = lt.session(settings)
# map torrent_handle to torrent_status
torrents = {}
alerts_log = []
for f in args:
atp = {}
atp["save_path"] = options.save_path
atp["storage_mode"] = lt.storage_mode_t.storage_mode_sparse
atp["flags"] = lt.torrent_flags.duplicate_is_error \
| lt.torrent_flags.auto_managed
if f.startswith('magnet:') or f.startswith(
'http://') or f.startswith('https://'):
atp["url"] = f
else:
info = lt.torrent_info(f)
print('Adding \'%s\'...' % info.name())
try:
atp["resume_data"] = open(os.path.join(
options.save_path,
info.name() + '.fastresume'), 'rb').read()
except:
pass
atp["ti"] = info
h = ses.add_torrent(atp)
handles.append(h)
h.set_max_connections(60)
h.set_max_uploads(-1)
add_torrent(ses, f, options)
if os.name == 'nt':
console = WindowsConsole()
@ -273,49 +247,44 @@ def main():
out = ''
for h in handles:
s = h.status()
if s.has_metadata:
name = h.torrent_file().name()[:40]
else:
name = '-'
out += 'name: %-40s\n' % name
for h,t in torrents.items():
out += 'name: %-40s\n' % t.name[:40]
if s.state != lt.torrent_status.seeding:
state_str = ['queued', 'checking', 'downloading metadata',
'downloading', 'finished', 'seeding',
if t.state != lt.torrent_status.seeding:
state_str = ['queued', 'checking', 'downloading metadata', \
'downloading', 'finished', 'seeding', \
'allocating', 'checking fastresume']
out += state_str[s.state] + ' '
out += state_str[t.state] + ' '
out += '%5.4f%% ' % (s.progress*100)
out += progress_bar(s.progress, 49)
out += '%5.4f%% ' % (t.progress*100)
out += progress_bar(t.progress, 49)
out += '\n'
out += 'total downloaded: %d Bytes\n' % s.total_done
out += 'total downloaded: %d Bytes\n' % t.total_done
out += 'peers: %d seeds: %d distributed copies: %d\n' % \
(s.num_peers, s.num_seeds, s.distributed_copies)
(t.num_peers, t.num_seeds, t.distributed_copies)
out += '\n'
out += 'download: %s/s (%s) ' \
% (add_suffix(s.download_rate), add_suffix(s.total_download))
% (add_suffix(t.download_rate), add_suffix(t.total_download))
out += 'upload: %s/s (%s) ' \
% (add_suffix(s.upload_rate), add_suffix(s.total_upload))
% (add_suffix(t.upload_rate), add_suffix(t.total_upload))
if s.state != lt.torrent_status.seeding:
out += 'info-hash: %s\n' % h.info_hash()
out += 'next announce: %s\n' % s.next_announce
out += 'tracker: %s\n' % s.current_tracker
if t.state != lt.torrent_status.seeding:
out += 'info-hash: %s\n' % t.info_hash
out += 'next announce: %s\n' % t.next_announce
out += 'tracker: %s\n' % t.current_tracker
write_line(console, out)
print_peer_info(console, h.get_peer_info())
print_download_queue(console, h.get_download_queue())
print_peer_info(console, t.handle.get_peer_info())
print_download_queue(console, t.handle.get_download_queue())
if s.state != lt.torrent_status.seeding:
if t.state != lt.torrent_status.seeding:
try:
out = '\n'
fp = h.file_progress()
ti = h.get_torrent_info()
ti = t.torrent_file
for f, p in zip(ti.files(), fp):
out += progress_bar(p / float(f.size), 20)
out += ' ' + f.path + '\n'
@ -327,39 +296,65 @@ def main():
write_line(console, '(q)uit), (p)ause), (u)npause), (r)eannounce\n')
write_line(console, 76 * '-' + '\n')
# only print the last 20 alerts
alerts = ses.pop_alerts()[-20:]
alerts = ses.pop_alerts()
for a in alerts:
if type(a) == str:
write_line(console, a + '\n')
else:
write_line(console, a.message() + '\n')
alerts_log.append(a.message())
# add new torrents to our list of torrent_status
if type(a) == lt.add_torrent_alert:
h = a.handle
h.set_max_connections(60)
h.set_max_uploads(-1)
torrents[h] = h.status()
# update our torrent_status array for torrents that have
# changed some of their state
if type(a) == lt.state_update_alert:
for s in a.status:
torrents[s.handle] = s
if len(alerts_log) > 20:
alerts_log = alerts_log[-20:]
for a in alerts_log:
write_line(console, a + '\n')
c = console.sleep_and_input(0.5)
if not c:
continue
ses.post_torrent_updates()
if not c: continue
if c == 'r':
for h in handles:
h.force_reannounce()
for h in torrents.keys(): h.force_reannounce()
elif c == 'q':
alive = False
elif c == 'p':
for h in handles:
h.pause()
for h in torrents.keys(): h.pause()
elif c == 'u':
for h in handles:
h.resume()
for h in torrents.keys(): h.resume()
ses.pause()
for h in handles:
if not h.is_valid() or not s.has_metadata:
for h,t in torrents.items():
if not h.is_valid() or not t.has_metadata:
continue
data = lt.bencode(h.write_resume_data())
open(os.path.join(options.save_path, h.get_torrent_info().name() +
'.fastresume'), 'wb').write(data)
h.save_resume_data()
while len(torrents) > 0:
alerts = ses.pop_alerts()
for a in alerts:
if type(a) == lt.save_resume_data_alert:
print(a)
data = lt.bencode(a.resume_data)
h = a.handle
if h in torrents:
open(os.path.join(options.save_path, torrents[h].name + '.fastresume'), 'wb').write(data)
del torrents[h]
if type(a) == lt.save_resume_data_failed_alert:
h = a.handle
if h in torrents:
print('failed to save resume data for ', torrents[h].name)
del torrents[h]
time.sleep(0.5)
main()

View File

@ -337,7 +337,7 @@ void bind_alert()
class_<torrent_alert, bases<alert>, noncopyable>(
"torrent_alert", no_init)
.def_readonly("handle", &torrent_alert::handle)
.add_property("handle", make_getter(&torrent_alert::handle, by_value()))
;
class_<tracker_alert, bases<torrent_alert>, noncopyable>(

View File

@ -14,6 +14,7 @@
#include <libtorrent/bdecode.hpp>
#include <libtorrent/bencode.hpp>
#include <libtorrent/read_resume_data.hpp>
#include <libtorrent/write_resume_data.hpp>
#include <libtorrent/torrent_info.hpp>
#include <libtorrent/kademlia/item.hpp> // for sign_mutable_item
#include <libtorrent/alert.hpp>
@ -671,7 +672,7 @@ void bind_session()
// .def_readwrite("storage", &add_torrent_params::storage)
.add_property("file_priorities", PROP(&add_torrent_params::file_priorities))
.def_readwrite("trackerid", &add_torrent_params::trackerid)
.def_readwrite("flags", &add_torrent_params::flags)
.add_property("flags", PROP(&add_torrent_params::flags))
.def_readwrite("info_hash", &add_torrent_params::info_hash)
.def_readwrite("max_uploads", &add_torrent_params::max_uploads)
.def_readwrite("max_connections", &add_torrent_params::max_connections)
@ -991,6 +992,8 @@ void bind_session()
def("min_memory_usage", min_memory_usage_wrapper);
def("default_settings", default_settings_wrapper);
def("read_resume_data", read_resume_data_wrapper);
def("write_resume_data", write_resume_data);
def("write_resume_data_buf", write_resume_data_buf);
class_<stats_metric>("stats_metric")
.def_readonly("name", &stats_metric::name)

View File

@ -481,6 +481,7 @@ void bind_torrent_handle()
.def(self == self)
.def(self != self)
.def(self < self)
.def("__hash__", (std::size_t (*)(torrent_handle const&))&libtorrent::hash_value)
.def("get_peer_info", get_peer_info)
.def("status", _(&torrent_handle::status), arg("flags") = 0xffffffff)
.def("get_download_queue", get_download_queue)

View File

@ -76,6 +76,37 @@ class test_torrent_handle(unittest.TestCase):
# also test the overload that takes a list of piece->priority mappings
self.h.prioritize_pieces([(0, 1)])
self.assertEqual(self.h.piece_priorities(), [1])
def test_torrent_handle_in_set(self):
self.setup()
torrents = set()
torrents.add(self.h)
# get another instance of a torrent_handle that represents the same
# torrent. Make sure that when we add it to a set, it just replaces the
# existing object
t = self.ses.get_torrents()
self.assertEqual(len(t), 1)
for h in t:
torrents.add(h)
self.assertEqual(len(torrents), 1)
def test_torrent_handle_in_dict(self):
self.setup()
torrents = {}
torrents[self.h] = 'foo'
# get another instance of a torrent_handle that represents the same
# torrent. Make sure that when we add it to a dict, it just replaces the
# existing object
t = self.ses.get_torrents()
self.assertEqual(len(t), 1)
for h in t:
torrents[h] = 'bar'
self.assertEqual(len(torrents), 1)
self.assertEqual(torrents[self.h], 'bar')
def test_replace_trackers(self):
self.setup()