add file_status to python binding (#1448)

add more missing functions to python bindings
This commit is contained in:
Arvid Norberg 2016-12-23 13:57:10 -08:00 committed by GitHub
parent 820fd29bff
commit a6de426d1e
5 changed files with 106 additions and 23 deletions

View File

@ -6,7 +6,8 @@
#include "libtorrent/address.hpp"
#include "libtorrent/socket.hpp"
#include "libtorrent/error_code.hpp"
#include "libtorrent/session_stats.hpp"
#include "libtorrent/session_stats.hpp" // for stats_metric
#include "libtorrent/file_pool.hpp" // for file_pool_status
using namespace boost::python;
namespace bp = boost::python;
@ -91,4 +92,7 @@ void bind_converters()
tuple_to_pair<int, int>();
to_python_converter<std::vector<lt::stats_metric>, vector_to_list<lt::stats_metric>>();
to_python_converter<std::vector<lt::pool_file_status>, vector_to_list<lt::pool_file_status>>();
to_python_converter<std::vector<std::string>, vector_to_list<std::string>>();
to_python_converter<std::vector<lt::sha1_hash>, vector_to_list<lt::sha1_hash>>();
}

View File

@ -12,6 +12,7 @@
#include <libtorrent/peer_info.hpp>
#include "libtorrent/announce_entry.hpp"
#include <libtorrent/storage.hpp>
#include <libtorrent/file_pool.hpp>
#include <boost/lexical_cast.hpp>
#include "gil.hpp"
@ -314,6 +315,13 @@ void connect_peer(torrent_handle& th, tuple ip, int source)
th.connect_peer(tuple_to_endpoint(ip), source);
}
std::vector<pool_file_status> file_status(torrent_handle const& h)
{
std::vector<pool_file_status> ret;
h.file_status(ret);
return ret;
}
#ifndef TORRENT_NO_DEPRECATE
#if BOOST_VERSION > 104200
@ -444,6 +452,7 @@ void bind_torrent_handle()
.def("set_piece_deadline", _(&torrent_handle::set_piece_deadline)
, (arg("index"), arg("deadline"), arg("flags") = 0))
.def("reset_piece_deadline", _(&torrent_handle::reset_piece_deadline), (arg("index")))
.def("clear_piece_deadlines", _(&torrent_handle::clear_piece_deadlines), (arg("index")))
.def("piece_availability", &piece_availability)
.def("piece_priority", _(piece_priority0))
.def("piece_priority", _(piece_priority1))
@ -453,6 +462,7 @@ void bind_torrent_handle()
.def("file_priorities", &file_priorities)
.def("file_priority", &file_prioritity0)
.def("file_priority", &file_prioritity1)
.def("file_status", &::file_status)
.def("save_resume_data", _(&torrent_handle::save_resume_data), arg("flags") = 0)
.def("need_save_resume_data", _(&torrent_handle::need_save_resume_data))
.def("force_reannounce", _(force_reannounce0)
@ -495,16 +505,27 @@ void bind_torrent_handle()
#endif
;
class_<pool_file_status>("pool_file_status")
.def_readonly("file_index", &pool_file_status::file_index)
.def_readonly("last_use", &pool_file_status::last_use)
.def_readonly("open_mode", &pool_file_status::open_mode)
;
enum_<torrent_handle::file_progress_flags_t>("file_progress_flags")
.value("piece_granularity", torrent_handle::piece_granularity)
;
enum_<torrent_handle::flags_t>("add_piece_flags_t")
.value("overwrite_existing", torrent_handle::overwrite_existing)
;
enum_<torrent_handle::pause_flags_t>("pause_flags_t")
.value("graceful_pause", torrent_handle::graceful_pause)
;
enum_<torrent_handle::save_resume_flags_t>("save_resume_flags_t")
.value("flush_disk_cache", torrent_handle::flush_disk_cache)
.value("save_info_dict", torrent_handle::save_info_dict)
.value("only_if_modified", torrent_handle::only_if_modified)
;
enum_<torrent_handle::deadline_flags>("deadline_flags")

View File

@ -62,13 +62,28 @@ namespace
d["url"] = i->url;
d["type"] = i->type;
d["auth"] = i->auth;
d["extra_headers"] = i->extra_headers;
ret.append(d);
}
return ret;
}
void set_web_seeds(torrent_info& ti, list ws)
{
std::vector<web_seed_entry> web_seeds;
int const len = boost::python::len(ws);
for (int i = 0; i < len; i++)
{
dict e = extract<dict>(ws[i]);
int const type = extract<int>(e["type"]);
web_seeds.push_back(web_seed_entry(
extract<std::string>(e["url"])
, static_cast<web_seed_entry::type_t>(type)
, extract<std::string>(e["auth"])));
}
ti.set_web_seeds(web_seeds);
}
list get_merkle_tree(torrent_info const& ti)
{
std::vector<sha1_hash> const& mt = ti.merkle_tree();
@ -228,6 +243,7 @@ void bind_torrent_info()
.def("add_url_seed", &torrent_info::add_url_seed)
.def("add_http_seed", &torrent_info::add_http_seed)
.def("web_seeds", get_web_seeds)
.def("set_web_seeds", set_web_seeds)
.def("name", &torrent_info::name, copy)
.def("comment", &torrent_info::comment, copy)
@ -241,6 +257,9 @@ void bind_torrent_info()
.def("set_merkle_tree", set_merkle_tree)
.def("piece_size", &torrent_info::piece_size)
.def("similar_torrents", &torrent_info::similar_torrents)
.def("collections", &torrent_info::collections)
.def("ssl_cert", &torrent_info::ssl_cert)
.def("num_files", &torrent_info::num_files)
.def("rename_file", rename_file0)
.def("remap_files", &torrent_info::remap_files)
@ -254,7 +273,10 @@ void bind_torrent_info()
#endif // TORRENT_USE_WSTRING
#endif // TORRENT_NO_DEPRECATE
.def("is_valid", &torrent_info::is_valid)
.def("priv", &torrent_info::priv)
.def("is_i2p", &torrent_info::is_i2p)
.def("is_merkle_torrent", &torrent_info::is_merkle_torrent)
.def("trackers", range(begin_trackers, end_trackers))
.def("creation_date", &torrent_info::creation_date)

View File

@ -22,10 +22,18 @@ object bitfield_to_list(bitfield const& bf)
object pieces(torrent_status const& s) { return bitfield_to_list(s.pieces); }
object verified_pieces(torrent_status const& s) { return bitfield_to_list(s.verified_pieces); }
boost::shared_ptr<const torrent_info> get_torrent_file(torrent_status const& st)
{
return st.torrent_file.lock();
}
void bind_torrent_status()
{
scope status = class_<torrent_status>("torrent_status")
.def(self == self)
.def_readonly("handle", &torrent_status::handle)
.def_readonly("info_hash", &torrent_status::info_hash)
.add_property("torrent_file", &get_torrent_file)
.def_readonly("state", &torrent_status::state)
.def_readonly("paused", &torrent_status::paused)
.def_readonly("stop_when_ready", &torrent_status::stop_when_ready)

View File

@ -37,38 +37,53 @@ class test_session_stats(unittest.TestCase):
class test_torrent_handle(unittest.TestCase):
def setup(self):
self.ses = lt.session({'alert_mask': lt.alert.category_t.all_categories, 'enable_dht': False})
self.ti = lt.torrent_info('url_seed_multi.torrent');
self.h = self.ses.add_torrent({'ti': self.ti, 'save_path': os.getcwd()})
def test_torrent_handle(self):
ses = lt.session({'alert_mask': lt.alert.category_t.all_categories, 'enable_dht': False})
ti = lt.torrent_info('url_seed_multi.torrent');
h = ses.add_torrent({'ti': ti, 'save_path': os.getcwd()})
self.setup()
self.assertEqual(self.h.file_priorities(), [4,4])
self.assertEqual(self.h.piece_priorities(), [4])
self.assertEqual(h.file_priorities(), [4,4])
self.assertEqual(h.piece_priorities(), [4])
self.h.prioritize_files([0,1])
self.assertEqual(self.h.file_priorities(), [0,1])
h.prioritize_files([0,1])
self.assertEqual(h.file_priorities(), [0,1])
self.h.prioritize_pieces([0])
self.assertEqual(self.h.piece_priorities(), [0])
h.prioritize_pieces([0])
self.assertEqual(h.piece_priorities(), [0])
# also test the overload that takes a list of piece->priority mappings
self.h.prioritize_pieces([(0, 1)])
self.assertEqual(self.h.piece_priorities(), [1])
# also test the overload that takes a list of piece->priority mappings
h.prioritize_pieces([(0, 1)])
self.assertEqual(h.piece_priorities(), [1])
def test_file_status(self):
self.setup()
l = self.h.file_status()
print(l)
def test_piece_deadlines(self):
self.setup()
self.h.clear_piece_deadlines()
def test_torrent_status(self):
self.setup()
st = self.h.status()
ti = st.handle;
self.assertEqual(ti.info_hash(), self.ti.info_hash())
# make sure we can compare torrent_status objects
st2 = self.h.status()
self.assertEqual(st2, st)
def test_scrape(self):
ses = lt.session({'alert_mask': lt.alert.category_t.all_categories, 'enable_dht': False})
ti = lt.torrent_info('url_seed_multi.torrent');
h = ses.add_torrent({'ti': ti, 'save_path': os.getcwd()})
self.setup()
# this is just to make sure this function can be called like this
# from python
h.scrape_tracker()
self.h.scrape_tracker()
def test_cache_info(self):
ses = lt.session({'alert_mask': lt.alert.category_t.all_categories, 'enable_dht': False})
ti = lt.torrent_info('url_seed_multi.torrent');
h = ses.add_torrent({'ti': ti, 'save_path': os.getcwd()})
cs = ses.get_cache_info(h)
self.setup()
cs = self.ses.get_cache_info(self.h)
self.assertEqual(cs.pieces, [])
class test_torrent_info(unittest.TestCase):
@ -91,6 +106,19 @@ class test_torrent_info(unittest.TestCase):
self.assertTrue(len(ti.metadata()) != 0)
self.assertTrue(len(ti.hash_for_piece(0)) != 0)
def test_web_seeds(self):
ti = lt.torrent_info('base.torrent');
ws = [{'url': 'http://foo/test', 'auth': '', 'type': 0},
{'url': 'http://bar/test', 'auth': '', 'type': 1} ]
ti.set_web_seeds(ws)
web_seeds = ti.web_seeds()
self.assertEqual(len(ws), len(web_seeds))
for i in range(len(web_seeds)):
self.assertEqual(web_seeds[i]["url"], ws[i]["url"])
self.assertEqual(web_seeds[i]["auth"], ws[i]["auth"])
self.assertEqual(web_seeds[i]["type"], ws[i]["type"])
def test_iterable_files(self):
# this detects whether libtorrent was built with deprecated APIs