Merge pull request #576 from arvidn/python3-readpiece-1.1
fix python3 binding for read_piece::buffer
This commit is contained in:
commit
55c35332ba
|
@ -8,14 +8,15 @@
|
|||
#include <libtorrent/piece_picker.hpp> // for piece_block
|
||||
#include <libtorrent/session_stats.hpp>
|
||||
#include <memory>
|
||||
#include "bytes.hpp"
|
||||
|
||||
using namespace boost::python;
|
||||
using namespace libtorrent;
|
||||
|
||||
std::string get_buffer(read_piece_alert const& rpa)
|
||||
bytes get_buffer(read_piece_alert const& rpa)
|
||||
{
|
||||
return rpa.buffer ? std::string(rpa.buffer.get(), rpa.size)
|
||||
: std::string();
|
||||
return rpa.buffer ? bytes(rpa.buffer.get(), rpa.size)
|
||||
: bytes();
|
||||
}
|
||||
|
||||
tuple endpoint_to_tuple(tcp::endpoint const& ep)
|
||||
|
@ -267,7 +268,7 @@ void bind_alert()
|
|||
|
||||
class_<torrent_removed_alert, bases<torrent_alert>, noncopyable>(
|
||||
"torrent_removed_alert", no_init)
|
||||
.def_readonly("info_hash", &torrent_removed_alert::info_hash)
|
||||
.def_readonly("info_hash", &torrent_removed_alert::info_hash)
|
||||
;
|
||||
|
||||
class_<read_piece_alert, bases<torrent_alert>, noncopyable>(
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
|
||||
#include <boost/python.hpp>
|
||||
#include <boost/python/tuple.hpp>
|
||||
#include <boost/python/stl_iterator.hpp>
|
||||
#include <libtorrent/torrent_handle.hpp>
|
||||
#include <libtorrent/torrent_info.hpp>
|
||||
#include <libtorrent/torrent_status.hpp>
|
||||
|
@ -163,25 +164,9 @@ void prioritize_pieces(torrent_handle& info, object o)
|
|||
void prioritize_files(torrent_handle& info, object o)
|
||||
{
|
||||
std::vector<int> result;
|
||||
try
|
||||
{
|
||||
object iter_obj = object( handle<>( PyObject_GetIter( o.ptr() ) ));
|
||||
while( 1 )
|
||||
{
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
object obj = extract<object>( iter_obj.attr( "__next__" )() );
|
||||
#else
|
||||
object obj = extract<object>( iter_obj.attr( "next" )() );
|
||||
#endif
|
||||
result.push_back(extract<int const>( obj ));
|
||||
}
|
||||
}
|
||||
catch( error_already_set )
|
||||
{
|
||||
PyErr_Clear();
|
||||
info.prioritize_files(result);
|
||||
return;
|
||||
}
|
||||
stl_input_iterator<int const> begin(o), end;
|
||||
result.insert(result.begin(), begin, end);
|
||||
info.prioritize_files(result);
|
||||
}
|
||||
|
||||
list file_priorities(torrent_handle& handle)
|
||||
|
|
|
@ -12,7 +12,6 @@ class test_torrent_handle(unittest.TestCase):
|
|||
|
||||
def test_torrent_handle(self):
|
||||
ses = lt.session({'alert_mask': lt.alert.category_t.all_categories})
|
||||
shutil.copy(os.path.join('..', '..', 'test', 'test_torrents', 'url_seed_multi.torrent'), '.')
|
||||
ti = lt.torrent_info('url_seed_multi.torrent');
|
||||
h = ses.add_torrent({'ti': ti, 'save_path': os.getcwd()})
|
||||
|
||||
|
@ -37,7 +36,6 @@ class test_torrent_info(unittest.TestCase):
|
|||
self.assertEqual(info.total_size(), 1234)
|
||||
|
||||
def test_metadata(self):
|
||||
shutil.copy(os.path.join('..', '..', 'test', 'test_torrents', 'base.torrent'), '.')
|
||||
ti = lt.torrent_info('base.torrent');
|
||||
|
||||
self.assertTrue(len(ti.metadata()) != 0)
|
||||
|
@ -48,7 +46,6 @@ class test_alerts(unittest.TestCase):
|
|||
def test_alert(self):
|
||||
|
||||
ses = lt.session({'alert_mask': lt.alert.category_t.all_categories})
|
||||
shutil.copy(os.path.join('..', '..', 'test', 'test_torrents', 'base.torrent'), '.')
|
||||
ti = lt.torrent_info('base.torrent');
|
||||
h = ses.add_torrent({'ti': ti, 'save_path': os.getcwd()})
|
||||
st = h.status()
|
||||
|
@ -106,5 +103,7 @@ class test_session(unittest.TestCase):
|
|||
|
||||
if __name__ == '__main__':
|
||||
print(lt.__version__)
|
||||
shutil.copy(os.path.join('..', '..', 'test', 'test_torrents', 'url_seed_multi.torrent'), '.')
|
||||
shutil.copy(os.path.join('..', '..', 'test', 'test_torrents', 'base.torrent'), '.')
|
||||
unittest.main()
|
||||
|
||||
|
|
Loading…
Reference in New Issue