*** empty log message ***
This commit is contained in:
parent
522016da37
commit
3dac95965e
|
@ -185,10 +185,9 @@ namespace libtorrent
|
|||
}
|
||||
|
||||
template<class InIt>
|
||||
entry bdecode_recursive(InIt& in, InIt end)
|
||||
void bdecode_recursive(InIt& in, InIt end, entry& ret)
|
||||
{
|
||||
if (in == end) throw invalid_encoding();
|
||||
entry ret;
|
||||
switch (*in)
|
||||
{
|
||||
|
||||
|
@ -212,7 +211,9 @@ namespace libtorrent
|
|||
++in; // 'l'
|
||||
while (*in != 'e')
|
||||
{
|
||||
ret.list().push_back(bdecode_recursive(in, end));
|
||||
ret.list().push_back(entry());
|
||||
entry& e = ret.list().back();
|
||||
bdecode_recursive(in, end, e);
|
||||
if (in == end) throw invalid_encoding();
|
||||
}
|
||||
assert(*in == 'e');
|
||||
|
@ -227,8 +228,10 @@ namespace libtorrent
|
|||
++in; // 'd'
|
||||
while (*in != 'e')
|
||||
{
|
||||
entry key = bdecode_recursive(in, end);
|
||||
ret.dict()[key.string()] = bdecode_recursive(in, end);
|
||||
entry key;
|
||||
bdecode_recursive(in, end, key);
|
||||
entry& e = ret[key.string()];
|
||||
bdecode_recursive(in, end, e);
|
||||
if (in == end) throw invalid_encoding();
|
||||
}
|
||||
assert(*in == 'e');
|
||||
|
@ -252,9 +255,7 @@ namespace libtorrent
|
|||
throw invalid_encoding();
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
template<class OutIt>
|
||||
|
@ -268,7 +269,9 @@ namespace libtorrent
|
|||
{
|
||||
try
|
||||
{
|
||||
return detail::bdecode_recursive(start, end);
|
||||
entry e;
|
||||
detail::bdecode_recursive(start, end, e);
|
||||
return e;
|
||||
}
|
||||
catch(type_error&)
|
||||
{
|
||||
|
|
|
@ -192,6 +192,32 @@ namespace libtorrent
|
|||
return *reinterpret_cast<const dictionary_type*>(data);
|
||||
}
|
||||
|
||||
entry& operator[](const char* key)
|
||||
{
|
||||
return dict()[key];
|
||||
}
|
||||
|
||||
entry& operator[](const std::string& key)
|
||||
{
|
||||
return dict()[key.c_str()];
|
||||
}
|
||||
|
||||
const entry& operator[](const char* key) const
|
||||
{
|
||||
dictionary_type::const_iterator i =
|
||||
dict().find(key);
|
||||
if (i == dict().end()) throw type_error("key not found");
|
||||
return i->second;
|
||||
}
|
||||
|
||||
const entry& operator[](const std::string& key) const
|
||||
{
|
||||
dictionary_type::const_iterator i =
|
||||
dict().find(key);
|
||||
if (i == dict().end()) throw type_error("key not found");
|
||||
return i->second;
|
||||
}
|
||||
|
||||
void print(std::ostream& os, int indent = 0) const;
|
||||
|
||||
private:
|
||||
|
|
|
@ -42,6 +42,9 @@ POSSIBILITY OF SUCH DAMAGE.
|
|||
#endif
|
||||
|
||||
#include <boost/date_time/posix_time/posix_time.hpp>
|
||||
#include <boost/date_time/gregorian/gregorian_types.hpp>
|
||||
#include <boost/optional.hpp>
|
||||
#include <boost/filesystem/path.hpp>
|
||||
|
||||
#ifdef _MSC_VER
|
||||
#pragma warning(pop)
|
||||
|
@ -53,21 +56,12 @@ POSSIBILITY OF SUCH DAMAGE.
|
|||
#include "libtorrent/size_type.hpp"
|
||||
|
||||
|
||||
/*
|
||||
* This file declares the following functions:
|
||||
*
|
||||
*
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
namespace libtorrent
|
||||
{
|
||||
|
||||
struct file_entry
|
||||
{
|
||||
std::string path;
|
||||
std::string filename;
|
||||
boost::filesystem::path path;
|
||||
size_type size;
|
||||
};
|
||||
|
||||
|
@ -94,8 +88,14 @@ namespace libtorrent
|
|||
torrent_info(const entry& torrent_file);
|
||||
|
||||
torrent_info(
|
||||
const std::vector<file_entry>& files
|
||||
, int piece_size);
|
||||
int piece_size
|
||||
, const char* name
|
||||
, const char* comment = 0);
|
||||
|
||||
entry create_torrent(const char* created_by = 0) const;
|
||||
void set_hash(int index, const sha1_hash& h);
|
||||
void add_tracker(std::string const& url, int tier = 0);
|
||||
void add_file(boost::filesystem::path file, size_type size);
|
||||
|
||||
typedef std::vector<file_entry>::const_iterator file_iterator;
|
||||
typedef std::vector<file_entry>::const_reverse_iterator reverse_file_iterator;
|
||||
|
@ -124,21 +124,7 @@ namespace libtorrent
|
|||
void print(std::ostream& os) const;
|
||||
|
||||
void convert_file_names();
|
||||
|
||||
int piece_size(int index) const
|
||||
{
|
||||
assert(index >= 0 && index < num_pieces());
|
||||
if (index == num_pieces()-1)
|
||||
{
|
||||
int s = static_cast<int>(total_size()
|
||||
- (size_type)(num_pieces() - 1) * piece_length());
|
||||
assert(s > 0);
|
||||
assert(s <= piece_length());
|
||||
return s;
|
||||
}
|
||||
else
|
||||
return piece_length();
|
||||
}
|
||||
int piece_size(int index) const;
|
||||
|
||||
const sha1_hash& hash_for_piece(int index) const
|
||||
{
|
||||
|
@ -147,8 +133,8 @@ namespace libtorrent
|
|||
return m_piece_hash[index];
|
||||
}
|
||||
|
||||
boost::posix_time::ptime creation_date() const
|
||||
{ return m_creation_date; }
|
||||
boost::optional<boost::posix_time::ptime>
|
||||
creation_date() const;
|
||||
|
||||
const std::string& comment() const
|
||||
{ return m_comment; }
|
||||
|
|
|
@ -493,24 +493,18 @@ namespace libtorrent
|
|||
{
|
||||
// parse the response
|
||||
|
||||
entry::dictionary_type::const_iterator i = e.dict().find("failure reason");
|
||||
if (i != e.dict().end())
|
||||
try
|
||||
{
|
||||
throw std::runtime_error(i->second.string().c_str());
|
||||
const entry& failure = e["failure reason"];
|
||||
throw std::runtime_error(failure.string().c_str());
|
||||
}
|
||||
catch (const type_error&) {}
|
||||
|
||||
const entry::dictionary_type& msg = e.dict();
|
||||
i = msg.find("interval");
|
||||
if (i == msg.end()) throw std::runtime_error("invalid response from tracker (no interval)");
|
||||
|
||||
int interval = (int)i->second.integer();
|
||||
|
||||
i = msg.find("peers");
|
||||
if (i == msg.end()) throw std::runtime_error("invalid response from tracker (no peers)");
|
||||
int interval = e["interval"].integer();
|
||||
|
||||
peer_list.clear();
|
||||
|
||||
const entry::list_type& l = i->second.list();
|
||||
const entry::list_type& l = e["peers"].list();
|
||||
for(entry::list_type::const_iterator i = l.begin(); i != l.end(); ++i)
|
||||
{
|
||||
peer_entry p = extract_peer_info(*i);
|
||||
|
|
|
@ -865,11 +865,7 @@ namespace libtorrent
|
|||
try
|
||||
{
|
||||
entry d = bdecode(m_recv_buffer.begin()+5, m_recv_buffer.end());
|
||||
entry::dictionary_type::const_iterator i = d.dict().find("msg");
|
||||
if (i == d.dict().end())
|
||||
throw protocol_error("CHAT message did not contain any 'msg'");
|
||||
|
||||
const std::string& str = i->second.string();
|
||||
const std::string& str = d["msg"].string();
|
||||
|
||||
if (m_torrent->alerts().should_post(alert::critical))
|
||||
{
|
||||
|
|
|
@ -72,14 +72,6 @@ namespace
|
|||
|
||||
using namespace libtorrent;
|
||||
|
||||
float to_seconds(const boost::posix_time::time_duration& d)
|
||||
{
|
||||
return d.hours() * 60.f * 60.f
|
||||
+ d.minutes() * 60.f
|
||||
+ d.seconds()
|
||||
+ d.fractional_seconds() / 1000.f;
|
||||
}
|
||||
|
||||
// the case where ignore_peer is motivated is if two peers
|
||||
// have only one piece that we don't have, and it's the
|
||||
// same piece for both peers. Then they might get into an
|
||||
|
|
|
@ -162,6 +162,14 @@ namespace
|
|||
|
||||
namespace libtorrent { namespace detail
|
||||
{
|
||||
|
||||
// This is the checker thread
|
||||
// it is looping in an infinite loop
|
||||
// until the session is aborted. It will
|
||||
// normally just block in a wait() call,
|
||||
// waiting for a signal from session that
|
||||
// there's a new torrent to check.
|
||||
|
||||
void checker_impl::operator()()
|
||||
{
|
||||
eh_initializer();
|
||||
|
@ -1013,21 +1021,21 @@ namespace libtorrent
|
|||
|
||||
try
|
||||
{
|
||||
if (rd.dict()["file-format"].string() != "libtorrent resume file")
|
||||
if (rd["file-format"].string() != "libtorrent resume file")
|
||||
return;
|
||||
|
||||
if (rd.dict()["file-version"].integer() != 1)
|
||||
if (rd["file-version"].integer() != 1)
|
||||
return;
|
||||
|
||||
// verify info_hash
|
||||
const std::string &hash = rd.dict()["info-hash"].string();
|
||||
const std::string &hash = rd["info-hash"].string();
|
||||
std::string real_hash((char*)info.info_hash().begin(), (char*)info.info_hash().end());
|
||||
if (hash != real_hash)
|
||||
return;
|
||||
|
||||
// the peers
|
||||
|
||||
entry::list_type& peer_list = rd.dict()["peers"].list();
|
||||
entry::list_type& peer_list = rd["peers"].list();
|
||||
|
||||
std::vector<address> tmp_peers;
|
||||
tmp_peers.reserve(peer_list.size());
|
||||
|
@ -1036,8 +1044,8 @@ namespace libtorrent
|
|||
++i)
|
||||
{
|
||||
address a(
|
||||
i->dict()["ip"].string().c_str()
|
||||
, (unsigned short)i->dict()["port"].integer());
|
||||
(*i)["ip"].string().c_str()
|
||||
, (unsigned short)(*i)["port"].integer());
|
||||
tmp_peers.push_back(a);
|
||||
}
|
||||
|
||||
|
@ -1046,7 +1054,7 @@ namespace libtorrent
|
|||
|
||||
|
||||
// read piece map
|
||||
const entry::list_type& slots = rd.dict()["slots"].list();
|
||||
const entry::list_type& slots = rd["slots"].list();
|
||||
if ((int)slots.size() > info.num_pieces())
|
||||
return;
|
||||
|
||||
|
@ -1117,13 +1125,11 @@ namespace libtorrent
|
|||
, torrent_ptr->block_size()
|
||||
, p.finished_blocks);
|
||||
|
||||
entry::dictionary_type::iterator ad = i->dict().find("adler32");
|
||||
if (ad != i->dict().end())
|
||||
{
|
||||
// crc's didn't match, don't use the resume data
|
||||
if (ad->second.integer() != adler)
|
||||
return;
|
||||
}
|
||||
const entry& ad = (*i)["adler32"];
|
||||
|
||||
// crc's didn't match, don't use the resume data
|
||||
if (ad.integer() != adler)
|
||||
return;
|
||||
|
||||
tmp_unfinished.push_back(p);
|
||||
}
|
||||
|
@ -1131,7 +1137,7 @@ namespace libtorrent
|
|||
// verify file sizes
|
||||
|
||||
std::vector<size_type> file_sizes;
|
||||
entry::list_type& l = rd.dict()["file sizes"].list();
|
||||
entry::list_type& l = rd["file sizes"].list();
|
||||
|
||||
#if defined(_MSC_VER) && _MSC_VER < 1300
|
||||
for (entry::list_type::iterator i = l.begin();
|
||||
|
|
|
@ -130,7 +130,7 @@ namespace libtorrent
|
|||
size_type file_size;
|
||||
try
|
||||
{
|
||||
file f(p / i->path / i->filename, file::in);
|
||||
file f(p / t.name() / i->path, file::in);
|
||||
f.seek(0, file::end);
|
||||
file_size = f.tell();
|
||||
}
|
||||
|
@ -158,7 +158,7 @@ namespace libtorrent
|
|||
size_type file_size;
|
||||
try
|
||||
{
|
||||
file f(p / i->path / i->filename, file::in);
|
||||
file f(p / t.name() / i->path, file::in);
|
||||
f.seek(0, file::end);
|
||||
file_size = f.tell();
|
||||
}
|
||||
|
@ -297,7 +297,7 @@ namespace libtorrent
|
|||
}
|
||||
|
||||
file in(
|
||||
m_pimpl->save_path / file_iter->path / file_iter->filename
|
||||
m_pimpl->save_path / m_pimpl->info.name() / file_iter->path
|
||||
, file::in);
|
||||
|
||||
assert(file_offset < file_iter->size);
|
||||
|
@ -351,7 +351,7 @@ namespace libtorrent
|
|||
if (left_to_read > 0)
|
||||
{
|
||||
++file_iter;
|
||||
fs::path path = m_pimpl->save_path / file_iter->path / file_iter->filename;
|
||||
fs::path path = m_pimpl->save_path / m_pimpl->info.name() / file_iter->path;
|
||||
|
||||
file_offset = 0;
|
||||
in.open(path, file::in);
|
||||
|
@ -391,7 +391,7 @@ namespace libtorrent
|
|||
++file_iter;
|
||||
}
|
||||
|
||||
fs::path path(m_pimpl->save_path / file_iter->path / file_iter->filename);
|
||||
fs::path path(m_pimpl->save_path / m_pimpl->info.name() / file_iter->path);
|
||||
file out(path, file::out);
|
||||
|
||||
assert(file_offset < file_iter->size);
|
||||
|
@ -447,7 +447,7 @@ namespace libtorrent
|
|||
|
||||
assert(file_iter != m_pimpl->info.end_files());
|
||||
|
||||
fs::path path = m_pimpl->save_path / file_iter->path / file_iter->filename;
|
||||
fs::path path = m_pimpl->save_path / m_pimpl->info.name() / file_iter->path;
|
||||
|
||||
file_offset = 0;
|
||||
/*
|
||||
|
@ -962,7 +962,7 @@ namespace libtorrent
|
|||
file_iter != end_iter;
|
||||
++file_iter)
|
||||
{
|
||||
fs::path dir = m_save_path / file_iter->path;
|
||||
fs::path dir = m_save_path / m_info.name() / file_iter->path;
|
||||
fs::create_directories(dir);
|
||||
}
|
||||
|
||||
|
|
|
@ -303,13 +303,13 @@ namespace libtorrent
|
|||
|
||||
entry ret(entry::dictionary_t);
|
||||
|
||||
ret.dict()["file-format"] = "libtorrent resume file";
|
||||
ret.dict()["file-version"] = 1;
|
||||
ret["file-format"] = "libtorrent resume file";
|
||||
ret["file-version"] = 1;
|
||||
|
||||
const sha1_hash& info_hash = t->torrent_file().info_hash();
|
||||
ret.dict()["info-hash"] = std::string((char*)info_hash.begin(), (char*)info_hash.end());
|
||||
ret["info-hash"] = std::string((char*)info_hash.begin(), (char*)info_hash.end());
|
||||
|
||||
ret.dict()["slots"] = entry(entry::list_t);
|
||||
ret["slots"] = entry(entry::list_t);
|
||||
entry::list_type& slots = ret.dict()["slots"].list();
|
||||
std::copy(piece_index.begin(), piece_index.end(), std::back_inserter(slots));
|
||||
|
||||
|
@ -321,12 +321,12 @@ namespace libtorrent
|
|||
// blocks per piece
|
||||
int num_blocks_per_piece =
|
||||
t->torrent_file().piece_length() / t->block_size();
|
||||
ret.dict()["blocks per piece"] = num_blocks_per_piece;
|
||||
ret["blocks per piece"] = num_blocks_per_piece;
|
||||
|
||||
// num unfinished pieces
|
||||
int num_unfinished = (int)q.size();
|
||||
ret.dict()["unfinished"] = entry::list_type();
|
||||
entry::list_type& up = ret.dict()["unfinished"].list();
|
||||
ret["unfinished"] = entry::list_type();
|
||||
entry::list_type& up = ret["unfinished"].list();
|
||||
|
||||
// info for each unfinished piece
|
||||
for (std::vector<piece_picker::downloading_piece>::const_iterator i
|
||||
|
@ -369,8 +369,8 @@ namespace libtorrent
|
|||
|
||||
// write local peers
|
||||
|
||||
ret.dict()["peers"] = entry::list_type();
|
||||
entry::list_type& peer_list = ret.dict()["peers"].list();
|
||||
ret["peers"] = entry::list_type();
|
||||
entry::list_type& peer_list = ret["peers"].list();
|
||||
|
||||
for (torrent::const_peer_iterator i = t->begin();
|
||||
i != t->end();
|
||||
|
@ -390,11 +390,11 @@ namespace libtorrent
|
|||
std::vector<size_type> file_sizes
|
||||
= get_filesizes(t->torrent_file(), t->save_path());
|
||||
|
||||
ret.dict()["file sizes"] = entry::list_type();
|
||||
ret["file sizes"] = entry::list_type();
|
||||
std::copy(
|
||||
file_sizes.begin()
|
||||
, file_sizes.end()
|
||||
, std::back_inserter(ret.dict()["file sizes"].list()));
|
||||
, std::back_inserter(ret["file sizes"].list()));
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
|
|
@ -44,6 +44,8 @@ POSSIBILITY OF SUCH DAMAGE.
|
|||
|
||||
#include <boost/lexical_cast.hpp>
|
||||
#include <boost/date_time/time.hpp>
|
||||
#include <boost/date_time/gregorian/gregorian_types.hpp>
|
||||
#include <boost/filesystem/path.hpp>
|
||||
|
||||
#ifdef _MSC_VER
|
||||
#pragma warning(pop)
|
||||
|
@ -58,44 +60,44 @@ using namespace libtorrent;
|
|||
|
||||
namespace
|
||||
{
|
||||
void extract_single_file(const entry::dictionary_type& dict, file_entry& target)
|
||||
void extract_single_file(const entry& dict, file_entry& target)
|
||||
{
|
||||
entry::dictionary_type::const_iterator i = dict.find("length");
|
||||
if (i == dict.end()) throw invalid_torrent_file();
|
||||
target.size = i->second.integer();
|
||||
|
||||
i = dict.find("path");
|
||||
if (i == dict.end()) throw invalid_torrent_file();
|
||||
|
||||
const entry::list_type& list = i->second.list();
|
||||
for (entry::list_type::const_iterator i = list.begin(); i != list.end()-1; ++i)
|
||||
target.size = dict["length"].integer();
|
||||
const entry::list_type& list = dict["path"].list();
|
||||
for (entry::list_type::const_iterator i = list.begin();
|
||||
i != list.end();
|
||||
++i)
|
||||
{
|
||||
target.path += '/';
|
||||
target.path += i->string();
|
||||
target.path /= i->string();
|
||||
}
|
||||
target.path += '/';
|
||||
target.filename = list.back().string();
|
||||
}
|
||||
|
||||
void extract_files(const entry::list_type& list, std::vector<file_entry>& target, const std::string& root_directory)
|
||||
void extract_files(const entry::list_type& list, std::vector<file_entry>& target)
|
||||
{
|
||||
for (entry::list_type::const_iterator i = list.begin(); i != list.end(); ++i)
|
||||
{
|
||||
target.push_back(file_entry());
|
||||
target.back().path = root_directory;
|
||||
extract_single_file(i->dict(), target.back());
|
||||
extract_single_file(*i, target.back());
|
||||
}
|
||||
}
|
||||
|
||||
size_type to_seconds(const boost::posix_time::time_duration& d)
|
||||
{
|
||||
return d.hours() * 60 * 60
|
||||
+ d.minutes() * 60
|
||||
+ d.seconds();
|
||||
}
|
||||
}
|
||||
|
||||
namespace libtorrent
|
||||
{
|
||||
|
||||
using namespace boost::gregorian;
|
||||
using namespace boost::posix_time;
|
||||
|
||||
// standard constructor that parses a torrent file
|
||||
torrent_info::torrent_info(const entry& torrent_file)
|
||||
: m_creation_date(boost::gregorian::date(1970
|
||||
, boost::gregorian::Jan
|
||||
, 1))
|
||||
: m_creation_date(date(not_a_date_time))
|
||||
{
|
||||
try
|
||||
{
|
||||
|
@ -111,21 +113,17 @@ namespace libtorrent
|
|||
// will not contain any hashes, comments, creation date
|
||||
// just the necessary to use it with piece manager
|
||||
torrent_info::torrent_info(
|
||||
const std::vector<file_entry>& files
|
||||
, int piece_size)
|
||||
int piece_size
|
||||
, const char* name
|
||||
, const char* comment)
|
||||
: m_piece_length(piece_size)
|
||||
, m_files(files)
|
||||
, m_creation_date(boost::gregorian::date(1970
|
||||
, boost::gregorian::Jan
|
||||
, 1))
|
||||
, m_total_size(0)
|
||||
, m_creation_date(second_clock::local_time())
|
||||
{
|
||||
// calculate total size of all pieces
|
||||
m_total_size = 0;
|
||||
for (std::vector<file_entry>::iterator i = m_files.begin(); i != m_files.end(); ++i)
|
||||
m_total_size += i->size;
|
||||
|
||||
int num_pieces = static_cast<int>((m_total_size + m_piece_length - 1) / m_piece_length);
|
||||
m_info_hash.clear();
|
||||
|
||||
if (comment)
|
||||
m_comment = comment;
|
||||
}
|
||||
|
||||
|
||||
|
@ -182,8 +180,8 @@ namespace libtorrent
|
|||
if (i != dict.end() && i->second.type() == entry::int_t)
|
||||
{
|
||||
m_creation_date
|
||||
= m_creation_date
|
||||
+ boost::posix_time::seconds((long)i->second.integer());
|
||||
= ptime(date(1970, Jan, 1))
|
||||
+ seconds((long)i->second.integer());
|
||||
}
|
||||
|
||||
// extract comment
|
||||
|
@ -205,14 +203,10 @@ namespace libtorrent
|
|||
m_info_hash = h.final();
|
||||
|
||||
// extract piece length
|
||||
i = info.dict().find("piece length");
|
||||
if (i == info.dict().end()) throw invalid_torrent_file();
|
||||
m_piece_length = (int)i->second.integer();
|
||||
m_piece_length = (int)info["piece length"].integer();
|
||||
|
||||
// extract file name (or the directory name if it's a multifile libtorrent)
|
||||
i = info.dict().find("name");
|
||||
if (i == info.dict().end()) throw invalid_torrent_file();
|
||||
m_name = i->second.string();
|
||||
m_name = info["name"].string();
|
||||
|
||||
// extract file list
|
||||
i = info.dict().find("files");
|
||||
|
@ -220,16 +214,14 @@ namespace libtorrent
|
|||
{
|
||||
// if there's no list of files, there has to be a length
|
||||
// field.
|
||||
i = info.dict().find("length");
|
||||
if (i == info.dict().end()) throw invalid_torrent_file();
|
||||
|
||||
m_files.push_back(file_entry());
|
||||
m_files.back().filename = m_name;
|
||||
m_files.back().size = i->second.integer();
|
||||
file_entry e;
|
||||
e.path = m_name;
|
||||
e.size = info["length"].integer();
|
||||
m_files.push_back(e);
|
||||
}
|
||||
else
|
||||
{
|
||||
extract_files(i->second.list(), m_files, m_name);
|
||||
extract_files(i->second.list(), m_files);
|
||||
}
|
||||
|
||||
// calculate total size of all pieces
|
||||
|
@ -240,13 +232,11 @@ namespace libtorrent
|
|||
// extract sha-1 hashes for all pieces
|
||||
// we want this division to round upwards, that's why we have the
|
||||
// extra addition
|
||||
|
||||
int num_pieces = static_cast<int>((m_total_size + m_piece_length - 1) / m_piece_length);
|
||||
i = info.dict().find("pieces");
|
||||
if (i == info.dict().end()) throw invalid_torrent_file();
|
||||
|
||||
m_piece_hash.resize(num_pieces);
|
||||
const std::string& hash_string = info["pieces"].string();
|
||||
|
||||
const std::string& hash_string = i->second.string();
|
||||
if ((int)hash_string.length() != num_pieces * 20)
|
||||
throw invalid_torrent_file();
|
||||
|
||||
|
@ -254,33 +244,157 @@ namespace libtorrent
|
|||
std::copy(hash_string.begin() + i*20, hash_string.begin() + (i+1)*20, m_piece_hash[i].begin());
|
||||
}
|
||||
|
||||
void torrent_info::convert_file_names()
|
||||
boost::optional<boost::posix_time::ptime>
|
||||
torrent_info::creation_date() const
|
||||
{
|
||||
for (std::vector<file_entry>::iterator i = m_files.begin(); i != m_files.end(); ++i)
|
||||
if (m_creation_date !=
|
||||
boost::posix_time::ptime(
|
||||
boost::gregorian::date(
|
||||
boost::date_time::not_a_date_time)))
|
||||
{
|
||||
// replace all dots in directory names with underscores
|
||||
std::string& path = i->path;
|
||||
std::string& filename = i->filename;
|
||||
for (std::string::iterator c = path.begin(); c != path.end(); ++c)
|
||||
{
|
||||
if (*c == '.') *c = '_';
|
||||
if (*c == ' ') *c = '_';
|
||||
if (*c == '[') *c = '_';
|
||||
if (*c == ']') *c = '_';
|
||||
}
|
||||
return m_creation_date;
|
||||
}
|
||||
return boost::optional<boost::posix_time::ptime>();
|
||||
}
|
||||
|
||||
// replace all dots, but the last one,
|
||||
// in file names with underscores
|
||||
std::string::reverse_iterator last_dot
|
||||
= std::find(filename.rbegin(), filename.rend(), '.');
|
||||
for (std::string::reverse_iterator c = filename.rbegin(); c != filename.rend(); ++c)
|
||||
int torrent_info::piece_size(int index) const
|
||||
{
|
||||
assert(index >= 0 && index < num_pieces());
|
||||
if (index == num_pieces()-1)
|
||||
{
|
||||
int s = static_cast<int>(total_size()
|
||||
- (size_type)(num_pieces() - 1) * piece_length());
|
||||
assert(s > 0);
|
||||
assert(s <= piece_length());
|
||||
return s;
|
||||
}
|
||||
else
|
||||
return piece_length();
|
||||
}
|
||||
|
||||
void torrent_info::add_tracker(std::string const& url, int tier)
|
||||
{
|
||||
announce_entry e;
|
||||
e.url = url;
|
||||
e.tier = tier;
|
||||
m_urls.push_back(e);
|
||||
}
|
||||
|
||||
void torrent_info::add_file(boost::filesystem::path file, size_type size)
|
||||
{
|
||||
file_entry e;
|
||||
e.path = file;
|
||||
e.size = size;
|
||||
m_files.push_back(e);
|
||||
|
||||
m_total_size += size;
|
||||
|
||||
int num_pieces = static_cast<int>((m_total_size + m_piece_length - 1) / m_piece_length);
|
||||
|
||||
int old_num_pieces = m_piece_hash.size();
|
||||
|
||||
m_piece_hash.resize(num_pieces);
|
||||
for (std::vector<sha1_hash>::iterator i = m_piece_hash.begin() + old_num_pieces;
|
||||
i != m_piece_hash.end();
|
||||
++i)
|
||||
{
|
||||
i->clear();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
entry torrent_info::create_torrent(const char* created_by) const
|
||||
{
|
||||
using namespace boost::gregorian;
|
||||
using namespace boost::posix_time;
|
||||
|
||||
namespace fs = boost::filesystem;
|
||||
|
||||
entry dict(entry::dictionary_t);
|
||||
|
||||
if (m_urls.empty() || m_files.empty())
|
||||
{
|
||||
// TODO: throw something here
|
||||
// throw
|
||||
return entry();
|
||||
}
|
||||
|
||||
dict["announce"] = m_urls.front().url;
|
||||
if (!m_comment.empty())
|
||||
dict["comment"] = m_comment;
|
||||
|
||||
dict["creation date"] =
|
||||
to_seconds(m_creation_date - ptime(date(1970, Jan, 1)));
|
||||
|
||||
if (created_by)
|
||||
dict["created by"] = std::string(created_by);
|
||||
|
||||
entry& info = dict["info"];
|
||||
info = entry(entry::dictionary_t);
|
||||
|
||||
info["length"] = m_total_size;
|
||||
|
||||
if (m_files.size() == 1)
|
||||
{
|
||||
info["name"] = m_files.front().path.string();
|
||||
}
|
||||
else
|
||||
{
|
||||
info["name"] = m_name;
|
||||
}
|
||||
|
||||
if (m_files.size() > 1)
|
||||
{
|
||||
entry& files = info["files"];
|
||||
files = entry(entry::list_t);
|
||||
|
||||
for (std::vector<file_entry>::const_iterator i = m_files.begin();
|
||||
i != m_files.end();
|
||||
++i)
|
||||
{
|
||||
if (c != last_dot && *c == '.') *c = '_';
|
||||
if (*c == ' ') *c = '_';
|
||||
if (*c == '[') *c = '_';
|
||||
if (*c == ']') *c = '_';
|
||||
files.list().push_back(entry(entry::dictionary_t));
|
||||
entry& file_e = files.list().back();
|
||||
file_e["length"] = i->size;
|
||||
entry& path_e = file_e["path"];
|
||||
path_e = entry(entry::list_t);
|
||||
|
||||
fs::path file_path(i->path);
|
||||
|
||||
for (fs::path::iterator j = file_path.begin();
|
||||
j != file_path.end();
|
||||
++j)
|
||||
{
|
||||
path_e.list().push_back(*j);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
info["piece length"] = piece_length();
|
||||
entry& pieces = info["pieces"];
|
||||
pieces = entry(entry::string_t);
|
||||
|
||||
std::string& p = pieces.string();
|
||||
|
||||
for (std::vector<sha1_hash>::const_iterator i = m_piece_hash.begin();
|
||||
i != m_piece_hash.end();
|
||||
++i)
|
||||
{
|
||||
p.append((char*)i->begin(), (char*)i->end());
|
||||
}
|
||||
|
||||
return dict;
|
||||
}
|
||||
|
||||
void torrent_info::set_hash(int index, const sha1_hash& h)
|
||||
{
|
||||
assert(index >= 0);
|
||||
assert(index < m_piece_hash.size());
|
||||
m_piece_hash[index] = h;
|
||||
}
|
||||
|
||||
void torrent_info::convert_file_names()
|
||||
{
|
||||
assert(false);
|
||||
}
|
||||
|
||||
int torrent_info::prioritize_tracker(int index)
|
||||
|
@ -313,7 +427,7 @@ namespace libtorrent
|
|||
os << "piece length: " << piece_length() << "\n";
|
||||
os << "files:\n";
|
||||
for (file_iterator i = begin_files(); i != end_files(); ++i)
|
||||
os << " " << std::setw(11) << i->size << " " << i->path << " " << i->filename << "\n";
|
||||
os << " " << std::setw(11) << i->size << " " << i->path.string() << "\n";
|
||||
}
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue