remove rss_reader.py, since rss has been reprecated. fix python binding for torrent_handle, to be hashable. update client_test.py to not use deprecated APIs and follow best (libtorrent) practice
This commit is contained in:
parent
1cd40ee7e3
commit
ccbd6cbcfe
|
@ -1,6 +1,7 @@
|
||||||
|
|
||||||
1.1.5 release
|
1.1.5 release
|
||||||
|
|
||||||
|
* fix python binding for torrent_handle, to be hashable
|
||||||
* fix IPv6 tracker support by performing the second announce in more cases
|
* fix IPv6 tracker support by performing the second announce in more cases
|
||||||
* fix utf-8 encoding check in torrent parser
|
* fix utf-8 encoding check in torrent parser
|
||||||
* fix infinite loop when parsing maliciously crafted torrents
|
* fix infinite loop when parsing maliciously crafted torrents
|
||||||
|
|
|
@ -3,7 +3,6 @@ EXTRA_DIST = \
|
||||||
Jamfile \
|
Jamfile \
|
||||||
setup.py \
|
setup.py \
|
||||||
client.py \
|
client.py \
|
||||||
rss_reader.py \
|
|
||||||
simple_client.py \
|
simple_client.py \
|
||||||
make_torrent.py \
|
make_torrent.py \
|
||||||
src/alert.cpp \
|
src/alert.cpp \
|
||||||
|
|
|
@ -96,18 +96,12 @@ def print_peer_info(console, peers):
|
||||||
out += '%2d ' % p.download_queue_length
|
out += '%2d ' % p.download_queue_length
|
||||||
out += '%2d ' % p.upload_queue_length
|
out += '%2d ' % p.upload_queue_length
|
||||||
|
|
||||||
if p.flags & lt.peer_info.interesting: out += 'I'
|
out += 'I' if p.flags & lt.peer_info.interesting else '.'
|
||||||
else: out += '.'
|
out += 'C' if p.flags & lt.peer_info.choked else '.'
|
||||||
if p.flags & lt.peer_info.choked: out += 'C'
|
out += 'i' if p.flags & lt.peer_info.remote_interested else '.'
|
||||||
else: out += '.'
|
out += 'c' if p.flags & lt.peer_info.remote_choked else '.'
|
||||||
if p.flags & lt.peer_info.remote_interested: out += 'i'
|
out += 'e' if p.flags & lt.peer_info.supports_extensions else '.'
|
||||||
else: out += '.'
|
out += 'l' if p.flags & lt.peer_info.local_connection else 'r'
|
||||||
if p.flags & lt.peer_info.remote_choked: out += 'c'
|
|
||||||
else: out += '.'
|
|
||||||
if p.flags & lt.peer_info.supports_extensions: out += 'e'
|
|
||||||
else: out += '.'
|
|
||||||
if p.flags & lt.peer_info.local_connection: out += 'l'
|
|
||||||
else: out += 'r'
|
|
||||||
out += ' '
|
out += ' '
|
||||||
|
|
||||||
if p.downloading_piece_index >= 0:
|
if p.downloading_piece_index >= 0:
|
||||||
|
@ -149,6 +143,24 @@ def print_download_queue(console, download_queue):
|
||||||
|
|
||||||
write_line(console, out)
|
write_line(console, out)
|
||||||
|
|
||||||
|
def add_torrent(ses, filename, options):
|
||||||
|
atp = {}
|
||||||
|
if filename.startswith('magnet:'):
|
||||||
|
atp = lt.parse_magnet_uti(filename)
|
||||||
|
else:
|
||||||
|
atp['ti'] = lt.torrent_info(filename)
|
||||||
|
try:
|
||||||
|
atp["resume_data"] = open(os.path.join(options.save_path, info.name() + '.fastresume'), 'rb').read()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
atp["save_path"] = options.save_path
|
||||||
|
atp["storage_mode"] = lt.storage_mode_t.storage_mode_sparse
|
||||||
|
atp["paused"] = False
|
||||||
|
atp["auto_managed"] = True
|
||||||
|
atp["duplicate_is_error"] = True
|
||||||
|
ses.async_add_torrent(atp)
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
|
||||||
|
@ -190,53 +202,26 @@ def main():
|
||||||
if options.max_download_rate <= 0:
|
if options.max_download_rate <= 0:
|
||||||
options.max_download_rate = -1
|
options.max_download_rate = -1
|
||||||
|
|
||||||
settings = lt.session_settings()
|
settings = { 'user_agent': 'python_client/' + lt.__version__,
|
||||||
settings.user_agent = 'python_client/' + lt.version
|
'download_rate_limit': int(options.max_download_rate),
|
||||||
|
'upload_rate_limit': int(options.max_upload_rate),
|
||||||
ses = lt.session()
|
'listen_interfaces': '0.0.0.0:%d' % options.port,
|
||||||
ses.set_download_rate_limit(int(options.max_download_rate))
|
'alert_mask': lt.alert.category_t.all_categories
|
||||||
ses.set_upload_rate_limit(int(options.max_upload_rate))
|
}
|
||||||
ses.listen_on(options.port, options.port + 10)
|
|
||||||
ses.set_settings(settings)
|
|
||||||
ses.set_alert_mask(0xfffffff)
|
|
||||||
|
|
||||||
if options.proxy_host != '':
|
if options.proxy_host != '':
|
||||||
ps = lt.proxy_settings()
|
settings['proxy_hostname'] = options.proxy_host.split(':')[0]
|
||||||
ps.type = lt.proxy_type.http
|
settings['proxy_type'] = lt.proxy_type_t.http
|
||||||
ps.hostname = options.proxy_host.split(':')[0]
|
settings['proxy_port'] = options.proxy_host.split(':')[1]
|
||||||
ps.port = int(options.proxy_host.split(':')[1])
|
|
||||||
ses.set_proxy(ps)
|
|
||||||
|
|
||||||
handles = []
|
ses = lt.session(settings)
|
||||||
alerts = []
|
|
||||||
|
# map torrent_handle to torrent_status
|
||||||
|
torrents = {}
|
||||||
|
alerts_log = []
|
||||||
|
|
||||||
for f in args:
|
for f in args:
|
||||||
|
add_torrent(ses, f, options)
|
||||||
atp = {}
|
|
||||||
atp["save_path"] = options.save_path
|
|
||||||
atp["storage_mode"] = lt.storage_mode_t.storage_mode_sparse
|
|
||||||
atp["paused"] = False
|
|
||||||
atp["auto_managed"] = True
|
|
||||||
atp["duplicate_is_error"] = True
|
|
||||||
if f.startswith('magnet:') or f.startswith('http://') or f.startswith('https://'):
|
|
||||||
atp["url"] = f
|
|
||||||
else:
|
|
||||||
info = lt.torrent_info(f)
|
|
||||||
print('Adding \'%s\'...' % info.name())
|
|
||||||
|
|
||||||
try:
|
|
||||||
atp["resume_data"] = open(os.path.join(options.save_path, info.name() + '.fastresume'), 'rb').read()
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
atp["ti"] = info
|
|
||||||
|
|
||||||
h = ses.add_torrent(atp)
|
|
||||||
|
|
||||||
handles.append(h)
|
|
||||||
|
|
||||||
h.set_max_connections(60)
|
|
||||||
h.set_max_uploads(-1)
|
|
||||||
|
|
||||||
if os.name == 'nt':
|
if os.name == 'nt':
|
||||||
console = WindowsConsole()
|
console = WindowsConsole()
|
||||||
|
@ -249,50 +234,44 @@ def main():
|
||||||
|
|
||||||
out = ''
|
out = ''
|
||||||
|
|
||||||
for h in handles:
|
for h,t in torrents.items():
|
||||||
if h.has_metadata():
|
out += 'name: %-40s\n' % t.name[:40]
|
||||||
name = h.get_torrent_info().name()[:40]
|
|
||||||
else:
|
|
||||||
name = '-'
|
|
||||||
out += 'name: %-40s\n' % name
|
|
||||||
|
|
||||||
s = h.status()
|
if t.state != lt.torrent_status.seeding:
|
||||||
|
|
||||||
if s.state != lt.torrent_status.seeding:
|
|
||||||
state_str = ['queued', 'checking', 'downloading metadata', \
|
state_str = ['queued', 'checking', 'downloading metadata', \
|
||||||
'downloading', 'finished', 'seeding', \
|
'downloading', 'finished', 'seeding', \
|
||||||
'allocating', 'checking fastresume']
|
'allocating', 'checking fastresume']
|
||||||
out += state_str[s.state] + ' '
|
out += state_str[t.state] + ' '
|
||||||
|
|
||||||
out += '%5.4f%% ' % (s.progress*100)
|
out += '%5.4f%% ' % (t.progress*100)
|
||||||
out += progress_bar(s.progress, 49)
|
out += progress_bar(t.progress, 49)
|
||||||
out += '\n'
|
out += '\n'
|
||||||
|
|
||||||
out += 'total downloaded: %d Bytes\n' % s.total_done
|
out += 'total downloaded: %d Bytes\n' % t.total_done
|
||||||
out += 'peers: %d seeds: %d distributed copies: %d\n' % \
|
out += 'peers: %d seeds: %d distributed copies: %d\n' % \
|
||||||
(s.num_peers, s.num_seeds, s.distributed_copies)
|
(t.num_peers, t.num_seeds, t.distributed_copies)
|
||||||
out += '\n'
|
out += '\n'
|
||||||
|
|
||||||
out += 'download: %s/s (%s) ' \
|
out += 'download: %s/s (%s) ' \
|
||||||
% (add_suffix(s.download_rate), add_suffix(s.total_download))
|
% (add_suffix(t.download_rate), add_suffix(t.total_download))
|
||||||
out += 'upload: %s/s (%s) ' \
|
out += 'upload: %s/s (%s) ' \
|
||||||
% (add_suffix(s.upload_rate), add_suffix(s.total_upload))
|
% (add_suffix(t.upload_rate), add_suffix(t.total_upload))
|
||||||
|
|
||||||
if s.state != lt.torrent_status.seeding:
|
if t.state != lt.torrent_status.seeding:
|
||||||
out += 'info-hash: %s\n' % h.info_hash()
|
out += 'info-hash: %s\n' % t.info_hash
|
||||||
out += 'next announce: %s\n' % s.next_announce
|
out += 'next announce: %s\n' % t.next_announce
|
||||||
out += 'tracker: %s\n' % s.current_tracker
|
out += 'tracker: %s\n' % t.current_tracker
|
||||||
|
|
||||||
write_line(console, out)
|
write_line(console, out)
|
||||||
|
|
||||||
print_peer_info(console, h.get_peer_info())
|
print_peer_info(console, t.handle.get_peer_info())
|
||||||
print_download_queue(console, h.get_download_queue())
|
print_download_queue(console, t.handle.get_download_queue())
|
||||||
|
|
||||||
if s.state != lt.torrent_status.seeding:
|
if t.state != lt.torrent_status.seeding:
|
||||||
try:
|
try:
|
||||||
out = '\n'
|
out = '\n'
|
||||||
fp = h.file_progress()
|
fp = t.handle.file_progress()
|
||||||
ti = h.get_torrent_info()
|
ti = t.torrent_file
|
||||||
for f,p in zip(ti.files(), fp):
|
for f,p in zip(ti.files(), fp):
|
||||||
out += progress_bar(p / float(f.size), 20)
|
out += progress_bar(p / float(f.size), 20)
|
||||||
out += ' ' + f.path + '\n'
|
out += ' ' + f.path + '\n'
|
||||||
|
@ -304,40 +283,65 @@ def main():
|
||||||
write_line(console, '(q)uit), (p)ause), (u)npause), (r)eannounce\n')
|
write_line(console, '(q)uit), (p)ause), (u)npause), (r)eannounce\n')
|
||||||
write_line(console, 76 * '-' + '\n')
|
write_line(console, 76 * '-' + '\n')
|
||||||
|
|
||||||
while 1:
|
alerts = ses.pop_alerts()
|
||||||
a = ses.pop_alert()
|
|
||||||
if not a: break
|
|
||||||
alerts.append(a)
|
|
||||||
|
|
||||||
if len(alerts) > 8:
|
|
||||||
del alerts[:len(alerts) - 8]
|
|
||||||
|
|
||||||
for a in alerts:
|
for a in alerts:
|
||||||
if type(a) == str:
|
alerts_log.append(a.message())
|
||||||
write_line(console, a + '\n')
|
|
||||||
else:
|
# add new torrents to our list of torrent_status
|
||||||
write_line(console, a.message() + '\n')
|
if type(a) == lt.add_torrent_alert:
|
||||||
|
h = a.handle
|
||||||
|
h.set_max_connections(60)
|
||||||
|
h.set_max_uploads(-1)
|
||||||
|
torrents[h] = h.status()
|
||||||
|
|
||||||
|
# update our torrent_status array for torrents that have
|
||||||
|
# changed some of their state
|
||||||
|
if type(a) == lt.state_update_alert:
|
||||||
|
for s in a.status:
|
||||||
|
torrents[s.handle] = s
|
||||||
|
|
||||||
|
if len(alerts_log) > 8:
|
||||||
|
del alerts_log[:len(alerts_log) - 8]
|
||||||
|
|
||||||
|
for a in alerts_log:
|
||||||
|
write_line(console, a + '\n')
|
||||||
|
|
||||||
c = console.sleep_and_input(0.5)
|
c = console.sleep_and_input(0.5)
|
||||||
|
|
||||||
if not c:
|
ses.post_torrent_updates()
|
||||||
continue
|
if not c: continue
|
||||||
|
|
||||||
if c == 'r':
|
if c == 'r':
|
||||||
for h in handles: h.force_reannounce()
|
for h in torrents.keys(): h.force_reannounce()
|
||||||
elif c == 'q':
|
elif c == 'q':
|
||||||
alive = False
|
alive = False
|
||||||
elif c == 'p':
|
elif c == 'p':
|
||||||
for h in handles: h.pause()
|
for h in torrents.keys(): h.pause()
|
||||||
elif c == 'u':
|
elif c == 'u':
|
||||||
for h in handles: h.resume()
|
for h in torrents.keys(): h.resume()
|
||||||
|
|
||||||
ses.pause()
|
ses.pause()
|
||||||
for h in handles:
|
for h,t in torrents.items():
|
||||||
if not h.is_valid() or not h.has_metadata():
|
if not h.is_valid() or not t.has_metadata:
|
||||||
continue
|
continue
|
||||||
data = lt.bencode(h.write_resume_data())
|
h.save_resume_data()
|
||||||
open(os.path.join(options.save_path, h.get_torrent_info().name() + '.fastresume'), 'wb').write(data)
|
|
||||||
|
while len(torrents) > 0:
|
||||||
|
alerts = ses.pop_alerts()
|
||||||
|
for a in alerts:
|
||||||
|
if type(a) == lt.save_resume_data_alert:
|
||||||
|
data = lt.bencode(a.resume_data)
|
||||||
|
h = a.handle
|
||||||
|
if h in torrents:
|
||||||
|
open(os.path.join(options.save_path, torrents[h].name + '.fastresume'), 'wb').write(data)
|
||||||
|
del torrents[h]
|
||||||
|
|
||||||
|
if type(a) == lt.save_resume_data_failed_alert:
|
||||||
|
h = a.handle
|
||||||
|
if h in torrents:
|
||||||
|
print('failed to save resume data for ', torrents[h].name)
|
||||||
|
del torrents[h]
|
||||||
|
time.sleep(0.5)
|
||||||
|
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|
|
@ -1,35 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import libtorrent as lt
|
|
||||||
import time
|
|
||||||
|
|
||||||
if len(sys.argv) != 2:
|
|
||||||
print('usage: rss_reader.py rss-feed-url')
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
ses = lt.session()
|
|
||||||
|
|
||||||
h = ses.add_feed({'url': sys.argv[1], 'auto_download': False})
|
|
||||||
f = h.get_feed_status()
|
|
||||||
spinner = ['|', '/', '-', '\\']
|
|
||||||
i = 0
|
|
||||||
while f['updating']:
|
|
||||||
time.sleep(0.1)
|
|
||||||
i = (i + 1) % 4
|
|
||||||
print('\b%s' % spinner[i]),
|
|
||||||
sys.stdout.flush()
|
|
||||||
f = h.get_feed_status()
|
|
||||||
|
|
||||||
print('\n\nFEED: %s' % f['url'])
|
|
||||||
if len(f['error']) > 0:
|
|
||||||
print('ERROR: %s' % f['error'])
|
|
||||||
|
|
||||||
print(' %s\n %s\n' % (f['title'], f['description']))
|
|
||||||
print(' ttl: %d minutes' % f['ttl'])
|
|
||||||
|
|
||||||
for item in f['items']:
|
|
||||||
print('\n%s\n------------------------------------------------------' % item['title'])
|
|
||||||
print(' url: %s\n size: %d\n uuid: %s\n description: %s' % (item['url'], item['size'], item['uuid'], item['description']))
|
|
||||||
print(' comment: %s\n category: %s' % (item['comment'], item['category']))
|
|
||||||
|
|
|
@ -426,6 +426,7 @@ void bind_torrent_handle()
|
||||||
.def(self == self)
|
.def(self == self)
|
||||||
.def(self != self)
|
.def(self != self)
|
||||||
.def(self < self)
|
.def(self < self)
|
||||||
|
.def("__hash__", (std::size_t (*)(torrent_handle const&))&libtorrent::hash_value)
|
||||||
.def("get_peer_info", get_peer_info)
|
.def("get_peer_info", get_peer_info)
|
||||||
.def("status", _(&torrent_handle::status), arg("flags") = 0xffffffff)
|
.def("status", _(&torrent_handle::status), arg("flags") = 0xffffffff)
|
||||||
.def("get_download_queue", get_download_queue)
|
.def("get_download_queue", get_download_queue)
|
||||||
|
|
|
@ -59,6 +59,37 @@ class test_torrent_handle(unittest.TestCase):
|
||||||
self.h.prioritize_pieces([(0, 1)])
|
self.h.prioritize_pieces([(0, 1)])
|
||||||
self.assertEqual(self.h.piece_priorities(), [1])
|
self.assertEqual(self.h.piece_priorities(), [1])
|
||||||
|
|
||||||
|
def test_torrent_handle_in_set(self):
|
||||||
|
self.setup()
|
||||||
|
torrents = set()
|
||||||
|
torrents.add(self.h)
|
||||||
|
|
||||||
|
# get another instance of a torrent_handle that represents the same
|
||||||
|
# torrent. Make sure that when we add it to a set, it just replaces the
|
||||||
|
# existing object
|
||||||
|
t = self.ses.get_torrents()
|
||||||
|
self.assertEqual(len(t), 1)
|
||||||
|
for h in t:
|
||||||
|
torrents.add(h)
|
||||||
|
|
||||||
|
self.assertEqual(len(torrents), 1)
|
||||||
|
|
||||||
|
def test_torrent_handle_in_dict(self):
|
||||||
|
self.setup()
|
||||||
|
torrents = {}
|
||||||
|
torrents[self.h] = 'foo'
|
||||||
|
|
||||||
|
# get another instance of a torrent_handle that represents the same
|
||||||
|
# torrent. Make sure that when we add it to a dict, it just replaces the
|
||||||
|
# existing object
|
||||||
|
t = self.ses.get_torrents()
|
||||||
|
self.assertEqual(len(t), 1)
|
||||||
|
for h in t:
|
||||||
|
torrents[h] = 'bar'
|
||||||
|
|
||||||
|
self.assertEqual(len(torrents), 1)
|
||||||
|
self.assertEqual(torrents[self.h], 'bar')
|
||||||
|
|
||||||
def test_replace_trackers(self):
|
def test_replace_trackers(self):
|
||||||
self.setup()
|
self.setup()
|
||||||
trackers = []
|
trackers = []
|
||||||
|
|
|
@ -214,7 +214,7 @@ namespace libtorrent
|
||||||
};
|
};
|
||||||
|
|
||||||
// for boost::hash (and to support using this type in unordered_map etc.)
|
// for boost::hash (and to support using this type in unordered_map etc.)
|
||||||
std::size_t hash_value(torrent_handle const& h);
|
TORRENT_EXPORT std::size_t hash_value(torrent_handle const& h);
|
||||||
|
|
||||||
// You will usually have to store your torrent handles somewhere, since it's
|
// You will usually have to store your torrent handles somewhere, since it's
|
||||||
// the object through which you retrieve information about the torrent and
|
// the object through which you retrieve information about the torrent and
|
||||||
|
|
|
@ -4871,8 +4871,7 @@ retry:
|
||||||
|
|
||||||
std::pair<boost::shared_ptr<torrent>, bool>
|
std::pair<boost::shared_ptr<torrent>, bool>
|
||||||
session_impl::add_torrent_impl(
|
session_impl::add_torrent_impl(
|
||||||
add_torrent_params& params
|
add_torrent_params& params, error_code& ec)
|
||||||
, error_code& ec)
|
|
||||||
{
|
{
|
||||||
TORRENT_ASSERT(!params.save_path.empty());
|
TORRENT_ASSERT(!params.save_path.empty());
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue