2013-03-29 03:46:12 +01:00
|
|
|
#!/usr/bin/env python
|
2007-01-10 17:11:43 +01:00
|
|
|
|
|
|
|
# Copyright Daniel Wallin 2006. Use, modification and distribution is
|
|
|
|
# subject to the Boost Software License, Version 1.0. (See accompanying
|
|
|
|
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
|
2017-01-20 16:07:09 +01:00
|
|
|
from __future__ import print_function
|
2007-01-10 17:11:43 +01:00
|
|
|
|
|
|
|
import sys
|
2012-07-16 03:12:39 +02:00
|
|
|
import atexit
|
2007-01-10 17:11:43 +01:00
|
|
|
import libtorrent as lt
|
|
|
|
import time
|
|
|
|
import os.path
|
2017-01-20 16:07:09 +01:00
|
|
|
|
2007-01-10 20:58:54 +01:00
|
|
|
|
|
|
|
class WindowsConsole:
|
|
|
|
def __init__(self):
|
|
|
|
self.console = Console.getconsole()
|
|
|
|
|
|
|
|
def clear(self):
|
|
|
|
self.console.page()
|
|
|
|
|
|
|
|
def write(self, str):
|
|
|
|
self.console.write(str)
|
|
|
|
|
|
|
|
def sleep_and_input(self, seconds):
|
|
|
|
time.sleep(seconds)
|
|
|
|
if msvcrt.kbhit():
|
|
|
|
return msvcrt.getch()
|
|
|
|
return None
|
|
|
|
|
2017-01-20 16:07:09 +01:00
|
|
|
|
2007-01-10 20:58:54 +01:00
|
|
|
class UnixConsole:
|
|
|
|
def __init__(self):
|
|
|
|
self.fd = sys.stdin
|
|
|
|
self.old = termios.tcgetattr(self.fd.fileno())
|
|
|
|
new = termios.tcgetattr(self.fd.fileno())
|
|
|
|
new[3] = new[3] & ~termios.ICANON
|
|
|
|
new[6][termios.VTIME] = 0
|
|
|
|
new[6][termios.VMIN] = 1
|
|
|
|
termios.tcsetattr(self.fd.fileno(), termios.TCSADRAIN, new)
|
|
|
|
|
2012-07-16 03:12:39 +02:00
|
|
|
atexit.register(self._onexit)
|
2007-01-10 20:58:54 +01:00
|
|
|
|
|
|
|
def _onexit(self):
|
|
|
|
termios.tcsetattr(self.fd.fileno(), termios.TCSADRAIN, self.old)
|
|
|
|
|
|
|
|
def clear(self):
|
|
|
|
sys.stdout.write('\033[2J\033[0;0H')
|
|
|
|
sys.stdout.flush()
|
|
|
|
|
|
|
|
def write(self, str):
|
|
|
|
sys.stdout.write(str)
|
|
|
|
sys.stdout.flush()
|
|
|
|
|
|
|
|
def sleep_and_input(self, seconds):
|
2017-01-20 16:07:09 +01:00
|
|
|
read, __, __ = select.select(
|
|
|
|
[self.fd.fileno()], [], [], seconds)
|
2007-01-10 20:58:54 +01:00
|
|
|
if len(read) > 0:
|
|
|
|
return self.fd.read(1)
|
|
|
|
return None
|
|
|
|
|
2017-01-20 16:07:09 +01:00
|
|
|
|
2007-01-10 20:58:54 +01:00
|
|
|
if os.name == 'nt':
|
|
|
|
import Console
|
|
|
|
import msvcrt
|
|
|
|
else:
|
|
|
|
import termios
|
|
|
|
import select
|
2007-01-10 17:11:43 +01:00
|
|
|
|
2017-01-20 16:07:09 +01:00
|
|
|
|
2007-01-10 17:11:43 +01:00
|
|
|
def write_line(console, line):
|
|
|
|
console.write(line)
|
|
|
|
|
2017-01-20 16:07:09 +01:00
|
|
|
|
2007-01-10 17:11:43 +01:00
|
|
|
def add_suffix(val):
|
|
|
|
prefix = ['B', 'kB', 'MB', 'GB', 'TB']
|
|
|
|
for i in range(len(prefix)):
|
|
|
|
if abs(val) < 1000:
|
|
|
|
if i == 0:
|
|
|
|
return '%5.3g%s' % (val, prefix[i])
|
|
|
|
else:
|
|
|
|
return '%4.3g%s' % (val, prefix[i])
|
|
|
|
val /= 1000
|
|
|
|
|
|
|
|
return '%6.3gPB' % val
|
|
|
|
|
2017-01-20 16:07:09 +01:00
|
|
|
|
2007-01-10 17:11:43 +01:00
|
|
|
def progress_bar(progress, width):
|
2008-11-21 18:54:04 +01:00
|
|
|
assert(progress <= 1)
|
2007-01-10 17:11:43 +01:00
|
|
|
progress_chars = int(progress * width + 0.5)
|
|
|
|
return progress_chars * '#' + (width - progress_chars) * '-'
|
|
|
|
|
2017-01-20 16:07:09 +01:00
|
|
|
|
2007-01-10 17:11:43 +01:00
|
|
|
def print_peer_info(console, peers):
|
|
|
|
|
2017-01-20 16:07:09 +01:00
|
|
|
out = (' down (total ) up (total )'
|
|
|
|
' q r flags block progress client\n')
|
2007-01-10 17:11:43 +01:00
|
|
|
|
|
|
|
for p in peers:
|
|
|
|
|
|
|
|
out += '%s/s ' % add_suffix(p.down_speed)
|
|
|
|
out += '(%s) ' % add_suffix(p.total_download)
|
|
|
|
out += '%s/s ' % add_suffix(p.up_speed)
|
|
|
|
out += '(%s) ' % add_suffix(p.total_upload)
|
|
|
|
out += '%2d ' % p.download_queue_length
|
|
|
|
out += '%2d ' % p.upload_queue_length
|
|
|
|
|
2017-01-20 16:07:09 +01:00
|
|
|
if p.flags & lt.peer_info.interesting:
|
|
|
|
out += 'I'
|
|
|
|
else:
|
|
|
|
out += '.'
|
|
|
|
if p.flags & lt.peer_info.choked:
|
|
|
|
out += 'C'
|
|
|
|
else:
|
|
|
|
out += '.'
|
|
|
|
if p.flags & lt.peer_info.remote_interested:
|
|
|
|
out += 'i'
|
|
|
|
else:
|
|
|
|
out += '.'
|
|
|
|
if p.flags & lt.peer_info.remote_choked:
|
|
|
|
out += 'c'
|
|
|
|
else:
|
|
|
|
out += '.'
|
|
|
|
if p.flags & lt.peer_info.supports_extensions:
|
|
|
|
out += 'e'
|
|
|
|
else:
|
|
|
|
out += '.'
|
|
|
|
if p.flags & lt.peer_info.local_connection:
|
|
|
|
out += 'l'
|
|
|
|
else:
|
|
|
|
out += 'r'
|
2007-01-10 17:11:43 +01:00
|
|
|
out += ' '
|
|
|
|
|
|
|
|
if p.downloading_piece_index >= 0:
|
2008-11-21 18:54:04 +01:00
|
|
|
assert(p.downloading_progress <= p.downloading_total)
|
2017-01-20 16:07:09 +01:00
|
|
|
out += progress_bar(float(p.downloading_progress) /
|
|
|
|
p.downloading_total, 15)
|
2007-01-10 17:11:43 +01:00
|
|
|
else:
|
|
|
|
out += progress_bar(0, 15)
|
|
|
|
out += ' '
|
|
|
|
|
2013-03-29 03:46:12 +01:00
|
|
|
if p.flags & lt.peer_info.handshake:
|
2007-01-10 17:11:43 +01:00
|
|
|
id = 'waiting for handshake'
|
2013-03-29 03:46:12 +01:00
|
|
|
elif p.flags & lt.peer_info.connecting:
|
2017-01-20 16:07:09 +01:00
|
|
|
id = 'connecting to peer'
|
2007-01-10 17:11:43 +01:00
|
|
|
else:
|
|
|
|
id = p.client
|
|
|
|
|
|
|
|
out += '%s\n' % id[:10]
|
|
|
|
|
|
|
|
write_line(console, out)
|
|
|
|
|
2007-04-19 03:26:23 +02:00
|
|
|
|
|
|
|
def print_download_queue(console, download_queue):
|
|
|
|
|
|
|
|
out = ""
|
|
|
|
|
|
|
|
for e in download_queue:
|
2017-01-20 16:07:09 +01:00
|
|
|
out += '%4d: [' % e['piece_index']
|
2007-06-11 05:28:07 +02:00
|
|
|
for b in e['blocks']:
|
|
|
|
s = b['state']
|
|
|
|
if s == 3:
|
2007-04-19 03:26:23 +02:00
|
|
|
out += '#'
|
2007-06-11 05:28:07 +02:00
|
|
|
elif s == 2:
|
|
|
|
out += '='
|
|
|
|
elif s == 1:
|
2007-04-19 03:26:23 +02:00
|
|
|
out += '-'
|
2007-06-11 05:28:07 +02:00
|
|
|
else:
|
|
|
|
out += ' '
|
2007-04-19 03:26:23 +02:00
|
|
|
out += ']\n'
|
|
|
|
|
2007-04-19 19:56:12 +02:00
|
|
|
write_line(console, out)
|
|
|
|
|
2017-01-20 16:07:09 +01:00
|
|
|
|
2007-01-10 17:11:43 +01:00
|
|
|
def main():
|
|
|
|
from optparse import OptionParser
|
|
|
|
|
|
|
|
parser = OptionParser()
|
|
|
|
|
2017-01-20 16:07:09 +01:00
|
|
|
parser.add_option('-p', '--port', type='int', help='set listening port')
|
2007-01-10 17:11:43 +01:00
|
|
|
|
2017-01-20 16:07:09 +01:00
|
|
|
parser.add_option(
|
|
|
|
'-d', '--max-download-rate', type='float',
|
|
|
|
help='the maximum download rate given in kB/s. 0 means infinite.')
|
2007-01-10 17:11:43 +01:00
|
|
|
|
2017-01-20 16:07:09 +01:00
|
|
|
parser.add_option(
|
|
|
|
'-u', '--max-upload-rate', type='float',
|
|
|
|
help='the maximum upload rate given in kB/s. 0 means infinite.')
|
2007-01-10 17:11:43 +01:00
|
|
|
|
2017-01-20 16:07:09 +01:00
|
|
|
parser.add_option(
|
|
|
|
'-s', '--save-path', type='string',
|
|
|
|
help='the path where the downloaded file/folder should be placed.')
|
2007-01-10 17:11:43 +01:00
|
|
|
|
2017-01-20 16:07:09 +01:00
|
|
|
parser.add_option(
|
|
|
|
'-r', '--proxy-host', type='string',
|
|
|
|
help='sets HTTP proxy host and port (separated by \':\')')
|
2012-09-29 07:10:34 +02:00
|
|
|
|
2007-01-10 17:11:43 +01:00
|
|
|
parser.set_defaults(
|
2017-01-20 16:07:09 +01:00
|
|
|
port=6881,
|
|
|
|
max_download_rate=0,
|
|
|
|
max_upload_rate=0,
|
|
|
|
save_path='.',
|
|
|
|
proxy_host=''
|
2007-01-10 17:11:43 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
(options, args) = parser.parse_args()
|
|
|
|
|
|
|
|
if options.port < 0 or options.port > 65525:
|
|
|
|
options.port = 6881
|
|
|
|
|
|
|
|
options.max_upload_rate *= 1000
|
|
|
|
options.max_download_rate *= 1000
|
|
|
|
|
|
|
|
if options.max_upload_rate <= 0:
|
|
|
|
options.max_upload_rate = -1
|
|
|
|
if options.max_download_rate <= 0:
|
|
|
|
options.max_download_rate = -1
|
|
|
|
|
2017-01-24 14:50:36 +01:00
|
|
|
ses = lt.session({'user_agent': 'python_client/' + lt.__version__,
|
|
|
|
'listen_interfaces':'0.0.0.0:' + str(options.port),
|
|
|
|
'download_rate_limit': int(options.max_download_rate),
|
2017-01-26 05:42:26 +01:00
|
|
|
'upload_rate_limit': int(options.max_upload_rate)
|
2017-01-24 14:50:36 +01:00
|
|
|
})
|
2007-01-10 17:11:43 +01:00
|
|
|
|
2012-09-29 07:10:34 +02:00
|
|
|
if options.proxy_host != '':
|
|
|
|
ps = lt.proxy_settings()
|
|
|
|
ps.type = lt.proxy_type.http
|
|
|
|
ps.hostname = options.proxy_host.split(':')[0]
|
|
|
|
ps.port = int(options.proxy_host.split(':')[1])
|
|
|
|
ses.set_proxy(ps)
|
|
|
|
|
2007-01-10 17:11:43 +01:00
|
|
|
handles = []
|
|
|
|
alerts = []
|
|
|
|
|
|
|
|
for f in args:
|
|
|
|
|
2008-11-21 18:54:04 +01:00
|
|
|
atp = {}
|
2008-10-02 08:49:52 +02:00
|
|
|
atp["save_path"] = options.save_path
|
|
|
|
atp["storage_mode"] = lt.storage_mode_t.storage_mode_sparse
|
2017-01-26 05:42:26 +01:00
|
|
|
atp["flags"] = lt.add_torrent_params_flags_t.flag_duplicate_is_error \
|
|
|
|
| lt.add_torrent_params_flags_t.flag_auto_managed
|
2017-01-20 16:07:09 +01:00
|
|
|
if f.startswith('magnet:') or f.startswith(
|
|
|
|
'http://') or f.startswith('https://'):
|
2012-10-13 18:50:07 +02:00
|
|
|
atp["url"] = f
|
|
|
|
else:
|
2014-11-19 10:23:08 +01:00
|
|
|
info = lt.torrent_info(f)
|
2012-10-13 18:50:07 +02:00
|
|
|
print('Adding \'%s\'...' % info.name())
|
|
|
|
|
|
|
|
try:
|
2017-01-20 16:07:09 +01:00
|
|
|
atp["resume_data"] = open(os.path.join(
|
|
|
|
options.save_path,
|
|
|
|
info.name() + '.fastresume'), 'rb').read()
|
2012-10-13 18:50:07 +02:00
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
atp["ti"] = info
|
2008-11-21 18:54:04 +01:00
|
|
|
|
2008-10-02 08:49:52 +02:00
|
|
|
h = ses.add_torrent(atp)
|
2007-01-10 17:11:43 +01:00
|
|
|
|
|
|
|
handles.append(h)
|
|
|
|
|
|
|
|
h.set_max_connections(60)
|
|
|
|
h.set_max_uploads(-1)
|
|
|
|
|
2007-01-10 20:58:54 +01:00
|
|
|
if os.name == 'nt':
|
|
|
|
console = WindowsConsole()
|
|
|
|
else:
|
|
|
|
console = UnixConsole()
|
2007-01-10 17:11:43 +01:00
|
|
|
|
|
|
|
alive = True
|
|
|
|
while alive:
|
2007-01-10 20:58:54 +01:00
|
|
|
console.clear()
|
2007-01-10 17:11:43 +01:00
|
|
|
|
|
|
|
out = ''
|
|
|
|
|
|
|
|
for h in handles:
|
2017-01-24 14:50:36 +01:00
|
|
|
s = h.status()
|
|
|
|
if s.has_metadata:
|
|
|
|
name = h.torrent_file().name()[:40]
|
2007-01-10 17:11:43 +01:00
|
|
|
else:
|
|
|
|
name = '-'
|
|
|
|
out += 'name: %-40s\n' % name
|
|
|
|
|
|
|
|
if s.state != lt.torrent_status.seeding:
|
2017-01-20 16:07:09 +01:00
|
|
|
state_str = ['queued', 'checking', 'downloading metadata',
|
|
|
|
'downloading', 'finished', 'seeding',
|
2008-11-19 01:46:48 +01:00
|
|
|
'allocating', 'checking fastresume']
|
2007-01-10 17:11:43 +01:00
|
|
|
out += state_str[s.state] + ' '
|
|
|
|
|
|
|
|
out += '%5.4f%% ' % (s.progress*100)
|
|
|
|
out += progress_bar(s.progress, 49)
|
|
|
|
out += '\n'
|
|
|
|
|
|
|
|
out += 'total downloaded: %d Bytes\n' % s.total_done
|
|
|
|
out += 'peers: %d seeds: %d distributed copies: %d\n' % \
|
|
|
|
(s.num_peers, s.num_seeds, s.distributed_copies)
|
|
|
|
out += '\n'
|
|
|
|
|
|
|
|
out += 'download: %s/s (%s) ' \
|
|
|
|
% (add_suffix(s.download_rate), add_suffix(s.total_download))
|
|
|
|
out += 'upload: %s/s (%s) ' \
|
|
|
|
% (add_suffix(s.upload_rate), add_suffix(s.total_upload))
|
|
|
|
|
|
|
|
if s.state != lt.torrent_status.seeding:
|
|
|
|
out += 'info-hash: %s\n' % h.info_hash()
|
|
|
|
out += 'next announce: %s\n' % s.next_announce
|
|
|
|
out += 'tracker: %s\n' % s.current_tracker
|
|
|
|
|
|
|
|
write_line(console, out)
|
|
|
|
|
2007-04-19 03:26:23 +02:00
|
|
|
print_peer_info(console, h.get_peer_info())
|
|
|
|
print_download_queue(console, h.get_download_queue())
|
2007-01-10 17:11:43 +01:00
|
|
|
|
2012-10-13 18:50:07 +02:00
|
|
|
if s.state != lt.torrent_status.seeding:
|
|
|
|
try:
|
|
|
|
out = '\n'
|
|
|
|
fp = h.file_progress()
|
|
|
|
ti = h.get_torrent_info()
|
2017-01-20 16:07:09 +01:00
|
|
|
for f, p in zip(ti.files(), fp):
|
2014-02-19 09:50:59 +01:00
|
|
|
out += progress_bar(p / float(f.size), 20)
|
2012-10-13 18:50:07 +02:00
|
|
|
out += ' ' + f.path + '\n'
|
|
|
|
write_line(console, out)
|
|
|
|
except:
|
|
|
|
pass
|
2007-01-10 17:11:43 +01:00
|
|
|
|
|
|
|
write_line(console, 76 * '-' + '\n')
|
|
|
|
write_line(console, '(q)uit), (p)ause), (u)npause), (r)eannounce\n')
|
|
|
|
write_line(console, 76 * '-' + '\n')
|
|
|
|
|
2017-01-26 05:42:26 +01:00
|
|
|
# only print the last 20 alerts
|
|
|
|
alerts = ses.pop_alerts()[-20:]
|
2007-01-10 17:11:43 +01:00
|
|
|
|
|
|
|
for a in alerts:
|
|
|
|
if type(a) == str:
|
|
|
|
write_line(console, a + '\n')
|
|
|
|
else:
|
2008-10-02 08:49:52 +02:00
|
|
|
write_line(console, a.message() + '\n')
|
2007-01-10 17:11:43 +01:00
|
|
|
|
2007-01-10 20:58:54 +01:00
|
|
|
c = console.sleep_and_input(0.5)
|
|
|
|
|
|
|
|
if not c:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if c == 'r':
|
2017-01-20 16:07:09 +01:00
|
|
|
for h in handles:
|
|
|
|
h.force_reannounce()
|
2007-01-10 20:58:54 +01:00
|
|
|
elif c == 'q':
|
|
|
|
alive = False
|
|
|
|
elif c == 'p':
|
2017-01-20 16:07:09 +01:00
|
|
|
for h in handles:
|
|
|
|
h.pause()
|
2007-01-10 20:58:54 +01:00
|
|
|
elif c == 'u':
|
2017-01-20 16:07:09 +01:00
|
|
|
for h in handles:
|
|
|
|
h.resume()
|
2007-01-10 17:11:43 +01:00
|
|
|
|
2008-11-21 18:54:04 +01:00
|
|
|
ses.pause()
|
2007-01-10 17:11:43 +01:00
|
|
|
for h in handles:
|
2017-01-24 14:50:36 +01:00
|
|
|
if not h.is_valid() or not s.has_metadata:
|
2007-01-10 17:11:43 +01:00
|
|
|
continue
|
|
|
|
data = lt.bencode(h.write_resume_data())
|
2017-01-20 16:07:09 +01:00
|
|
|
open(os.path.join(options.save_path, h.get_torrent_info().name() +
|
|
|
|
'.fastresume'), 'wb').write(data)
|
2007-01-10 17:11:43 +01:00
|
|
|
|
|
|
|
|
2017-01-20 16:07:09 +01:00
|
|
|
main()
|