use of std::unordered_multimap and fix in resolve_links::match (#1067)
use of std::unordered_multimap and fix in resolve_links::match
This commit is contained in:
parent
49ebef6eeb
commit
7a0da72813
|
@ -33,12 +33,11 @@ POSSIBILITY OF SUCH DAMAGE.
|
||||||
#ifndef TORRENT_RESOLVE_LINKS_HPP
|
#ifndef TORRENT_RESOLVE_LINKS_HPP
|
||||||
#define TORRENT_RESOLVE_LINKS_HPP
|
#define TORRENT_RESOLVE_LINKS_HPP
|
||||||
|
|
||||||
#include "libtorrent/aux_/disable_warnings_push.hpp"
|
|
||||||
#include <boost/unordered_map.hpp>
|
|
||||||
#include "libtorrent/aux_/disable_warnings_pop.hpp"
|
|
||||||
|
|
||||||
#include <vector>
|
#include <vector>
|
||||||
#include <utility>
|
#include <utility>
|
||||||
|
#include <unordered_map>
|
||||||
|
#include <memory>
|
||||||
|
#include <string>
|
||||||
|
|
||||||
#include "libtorrent/export.hpp"
|
#include "libtorrent/export.hpp"
|
||||||
|
|
||||||
|
@ -77,7 +76,7 @@ namespace libtorrent
|
||||||
std::vector<link_t> m_links;
|
std::vector<link_t> m_links;
|
||||||
|
|
||||||
// maps file size to file index, in m_torrent_file
|
// maps file size to file index, in m_torrent_file
|
||||||
boost::unordered_multimap<std::int64_t, int> m_file_sizes;
|
std::unordered_multimap<std::int64_t, int> m_file_sizes;
|
||||||
};
|
};
|
||||||
#endif // TORRENT_DISABLE_MUTABLE_TORRENTS
|
#endif // TORRENT_DISABLE_MUTABLE_TORRENTS
|
||||||
|
|
||||||
|
|
|
@ -84,48 +84,48 @@ void resolve_links::match(std::shared_ptr<const torrent_info> const& ti
|
||||||
|
|
||||||
std::int64_t file_size = fs.file_size(i);
|
std::int64_t file_size = fs.file_size(i);
|
||||||
|
|
||||||
auto iter = m_file_sizes.find(file_size);
|
auto range = m_file_sizes.equal_range(file_size);
|
||||||
|
for (auto iter = range.first; iter != range.second; ++iter)
|
||||||
// we don't have a file whose size matches, look at the next one
|
|
||||||
if (iter == m_file_sizes.end()) continue;
|
|
||||||
|
|
||||||
TORRENT_ASSERT(iter->second < m_torrent_file->files().num_files());
|
|
||||||
TORRENT_ASSERT(iter->second >= 0);
|
|
||||||
|
|
||||||
// if we already have found a duplicate for this file, no need
|
|
||||||
// to keep looking
|
|
||||||
if (m_links[iter->second].ti) continue;
|
|
||||||
|
|
||||||
// files are aligned and have the same size, now start comparing
|
|
||||||
// piece hashes, to see if the files are identical
|
|
||||||
|
|
||||||
// the pieces of the incoming file
|
|
||||||
int their_piece = fs.map_file(i, 0, 0).piece;
|
|
||||||
// the pieces of "this" file (from m_torrent_file)
|
|
||||||
int our_piece = m_torrent_file->files().map_file(
|
|
||||||
iter->second, 0, 0).piece;
|
|
||||||
|
|
||||||
int num_pieces = (file_size + piece_size - 1) / piece_size;
|
|
||||||
|
|
||||||
bool match = true;
|
|
||||||
for (int p = 0; p < num_pieces; ++p, ++their_piece, ++our_piece)
|
|
||||||
{
|
{
|
||||||
if (m_torrent_file->hash_for_piece(our_piece)
|
TORRENT_ASSERT(iter->second < m_torrent_file->files().num_files());
|
||||||
!= ti->hash_for_piece(their_piece))
|
TORRENT_ASSERT(iter->second >= 0);
|
||||||
|
|
||||||
|
// if we already have found a duplicate for this file, no need
|
||||||
|
// to keep looking
|
||||||
|
if (m_links[iter->second].ti) continue;
|
||||||
|
|
||||||
|
// files are aligned and have the same size, now start comparing
|
||||||
|
// piece hashes, to see if the files are identical
|
||||||
|
|
||||||
|
// the pieces of the incoming file
|
||||||
|
int their_piece = fs.map_file(i, 0, 0).piece;
|
||||||
|
// the pieces of "this" file (from m_torrent_file)
|
||||||
|
int our_piece = m_torrent_file->files().map_file(
|
||||||
|
iter->second, 0, 0).piece;
|
||||||
|
|
||||||
|
int num_pieces = (file_size + piece_size - 1) / piece_size;
|
||||||
|
|
||||||
|
bool match = true;
|
||||||
|
for (int p = 0; p < num_pieces; ++p, ++their_piece, ++our_piece)
|
||||||
{
|
{
|
||||||
match = false;
|
if (m_torrent_file->hash_for_piece(our_piece)
|
||||||
break;
|
!= ti->hash_for_piece(their_piece))
|
||||||
|
{
|
||||||
|
match = false;
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
if (!match) continue;
|
||||||
|
|
||||||
|
m_links[iter->second].ti = ti;
|
||||||
|
m_links[iter->second].save_path = save_path;
|
||||||
|
m_links[iter->second].file_idx = i;
|
||||||
|
|
||||||
|
// since we have a duplicate for this file, we may as well remove
|
||||||
|
// it from the file-size map, so we won't find it again.
|
||||||
|
m_file_sizes.erase(iter);
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
if (!match) continue;
|
|
||||||
|
|
||||||
m_links[iter->second].ti = ti;
|
|
||||||
m_links[iter->second].save_path = save_path;
|
|
||||||
m_links[iter->second].file_idx = i;
|
|
||||||
|
|
||||||
// since we have a duplicate for this file, we may as well remove
|
|
||||||
// it from the file-size map, so we won't find it again.
|
|
||||||
m_file_sizes.erase(iter);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,10 +31,14 @@ POSSIBILITY OF SUCH DAMAGE.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include "test.hpp"
|
#include "test.hpp"
|
||||||
|
|
||||||
|
#ifndef TORRENT_DISABLE_MUTABLE_TORRENTS
|
||||||
|
|
||||||
#include "libtorrent/torrent_info.hpp"
|
#include "libtorrent/torrent_info.hpp"
|
||||||
#include "libtorrent/resolve_links.hpp"
|
#include "libtorrent/resolve_links.hpp"
|
||||||
#include "libtorrent/file.hpp" // for combine_path
|
#include "libtorrent/file.hpp" // for combine_path
|
||||||
#include "libtorrent/hex.hpp" // to_hex
|
#include "libtorrent/hex.hpp" // to_hex
|
||||||
|
#include "libtorrent/create_torrent.hpp"
|
||||||
|
|
||||||
#include <functional>
|
#include <functional>
|
||||||
|
|
||||||
|
@ -87,8 +91,6 @@ static test_torrent_t test_torrents[] = {
|
||||||
|
|
||||||
TORRENT_TEST(resolve_links)
|
TORRENT_TEST(resolve_links)
|
||||||
{
|
{
|
||||||
|
|
||||||
#ifndef TORRENT_DISABLE_MUTABLE_TORRENTS
|
|
||||||
std::string path = combine_path(parent_path(current_working_directory())
|
std::string path = combine_path(parent_path(current_working_directory())
|
||||||
, "mutable_test_torrents");
|
, "mutable_test_torrents");
|
||||||
|
|
||||||
|
@ -131,6 +133,48 @@ TORRENT_TEST(resolve_links)
|
||||||
TEST_EQUAL(num_matches, e.expected_matches);
|
TEST_EQUAL(num_matches, e.expected_matches);
|
||||||
|
|
||||||
}
|
}
|
||||||
#endif // TORRENT_DISABLE_MUTABLE_TORRENTS
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// this ensure that internally the is a range lookup
|
||||||
|
// since the zero-hash piece is in the second place
|
||||||
|
TORRENT_TEST(range_lookup_duplicated_files)
|
||||||
|
{
|
||||||
|
file_storage fs1;
|
||||||
|
file_storage fs2;
|
||||||
|
|
||||||
|
fs1.add_file("test_resolve_links_dir/tmp1", 1024);
|
||||||
|
fs1.add_file("test_resolve_links_dir/tmp2", 1024);
|
||||||
|
fs2.add_file("test_resolve_links_dir/tmp1", 1024);
|
||||||
|
fs2.add_file("test_resolve_links_dir/tmp2", 1024);
|
||||||
|
|
||||||
|
libtorrent::create_torrent t1(fs1, 1024);
|
||||||
|
libtorrent::create_torrent t2(fs2, 1024);
|
||||||
|
|
||||||
|
t1.set_hash(0, sha1_hash::max());
|
||||||
|
|
||||||
|
std::vector<char> tmp1;
|
||||||
|
std::vector<char> tmp2;
|
||||||
|
bencode(std::back_inserter(tmp1), t1.generate());
|
||||||
|
bencode(std::back_inserter(tmp2), t2.generate());
|
||||||
|
error_code ec;
|
||||||
|
auto ti1 = std::make_shared<torrent_info>(&tmp1[0], int(tmp1.size()), ec);
|
||||||
|
auto ti2 = std::make_shared<torrent_info>(&tmp2[0], int(tmp2.size()), ec);
|
||||||
|
|
||||||
|
std::fprintf(stderr, "resolving\n");
|
||||||
|
resolve_links l(ti1);
|
||||||
|
l.match(ti2, ".");
|
||||||
|
|
||||||
|
std::vector<resolve_links::link_t> const& links = l.get_links();
|
||||||
|
|
||||||
|
std::string::size_type num_matches = std::count_if(links.begin(), links.end()
|
||||||
|
, std::bind(&resolve_links::link_t::ti, _1));
|
||||||
|
|
||||||
|
TEST_EQUAL(num_matches, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#else
|
||||||
|
TORRENT_TEST(empty)
|
||||||
|
{
|
||||||
|
TEST_CHECK(true);
|
||||||
|
}
|
||||||
|
#endif // TORRENT_DISABLE_MUTABLE_TORRENTS
|
||||||
|
|
Loading…
Reference in New Issue