From e4a27c0b4bf7ee3272b90fece91db9522b8be7c5 Mon Sep 17 00:00:00 2001 From: Alden Torres Date: Tue, 6 Sep 2016 18:30:30 -0400 Subject: [PATCH] backport of fix in resolve_links::match (#1068) backport of fix in resolve_links::match --- ChangeLog | 1 + src/resolve_links.cpp | 76 ++++++++++++++++++++++--------------------- 2 files changed, 40 insertions(+), 37 deletions(-) diff --git a/ChangeLog b/ChangeLog index da4c2fa53..669e78277 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,3 +1,4 @@ + * fix internal resolve links lookup for mutable torrents * hint DHT bootstrap nodes of actual bootstrap request 1.1.1 release diff --git a/src/resolve_links.cpp b/src/resolve_links.cpp index 510998e3e..b0fc40bfb 100644 --- a/src/resolve_links.cpp +++ b/src/resolve_links.cpp @@ -90,48 +90,50 @@ void resolve_links::match(boost::shared_ptr const& ti boost::int64_t file_size = fs.file_size(i); typedef boost::unordered_multimap::iterator iterator; - iterator iter = m_file_sizes.find(file_size); + typedef std::pair range_iterator; - // we don't have a file whose size matches, look at the next one - if (iter == m_file_sizes.end()) continue; - - TORRENT_ASSERT(iter->second < m_torrent_file->files().num_files()); - TORRENT_ASSERT(iter->second >= 0); - - // if we already have found a duplicate for this file, no need - // to keep looking - if (m_links[iter->second].ti) continue; - - // files are aligned and have the same size, now start comparing - // piece hashes, to see if the files are identical - - // the pieces of the incoming file - int their_piece = fs.map_file(i, 0, 0).piece; - // the pieces of "this" file (from m_torrent_file) - int our_piece = m_torrent_file->files().map_file( - iter->second, 0, 0).piece; - - int num_pieces = (file_size + piece_size - 1) / piece_size; - - bool match = true; - for (int p = 0; p < num_pieces; ++p, ++their_piece, ++our_piece) + range_iterator range = m_file_sizes.equal_range(file_size); + for (iterator iter = range.first; iter != range.second; ++iter) { - if (m_torrent_file->hash_for_piece(our_piece) - != ti->hash_for_piece(their_piece)) + TORRENT_ASSERT(iter->second < m_torrent_file->files().num_files()); + TORRENT_ASSERT(iter->second >= 0); + + // if we already have found a duplicate for this file, no need + // to keep looking + if (m_links[iter->second].ti) continue; + + // files are aligned and have the same size, now start comparing + // piece hashes, to see if the files are identical + + // the pieces of the incoming file + int their_piece = fs.map_file(i, 0, 0).piece; + // the pieces of "this" file (from m_torrent_file) + int our_piece = m_torrent_file->files().map_file( + iter->second, 0, 0).piece; + + int num_pieces = (file_size + piece_size - 1) / piece_size; + + bool match = true; + for (int p = 0; p < num_pieces; ++p, ++their_piece, ++our_piece) { - match = false; - break; + if (m_torrent_file->hash_for_piece(our_piece) + != ti->hash_for_piece(their_piece)) + { + match = false; + break; + } } + if (!match) continue; + + m_links[iter->second].ti = ti; + m_links[iter->second].save_path = save_path; + m_links[iter->second].file_idx = i; + + // since we have a duplicate for this file, we may as well remove + // it from the file-size map, so we won't find it again. + m_file_sizes.erase(iter); + break; } - if (!match) continue; - - m_links[iter->second].ti = ti; - m_links[iter->second].save_path = save_path; - m_links[iter->second].file_idx = i; - - // since we have a duplicate for this file, we may as well remove - // it from the file-size map, so we won't find it again. - m_file_sizes.erase(iter); } }