fixed another fast resume bug

This commit is contained in:
Arvid Norberg 2007-10-09 07:26:57 +00:00
parent bd0a997854
commit 0681d3fd61
2 changed files with 5 additions and 9 deletions

View File

@ -1092,14 +1092,14 @@ namespace libtorrent
for (last = m_slot_to_piece.rbegin();
last != m_slot_to_piece.rend(); ++last)
{
if (*last != unallocated) break;
if (*last != unallocated && have[*last]) break;
}
for (std::vector<int>::const_iterator i =
m_slot_to_piece.begin();
i != last.base(); ++i)
{
p.push_back(*i);
p.push_back(have[*i] ? *i : unassigned);
}
}
else

View File

@ -661,7 +661,7 @@ namespace libtorrent
if (!t->valid_metadata()) return entry();
t->filesystem().export_piece_map(piece_index, t->pieces());
std::vector<bool> have_pieces = t->pieces();
entry ret(entry::dictionary_t);
@ -702,12 +702,7 @@ namespace libtorrent
// the unfinished piece's index
piece_struct["piece"] = i->index;
// if the unfinished piece is not in the exported piece map, it means
// we're in full or sparse storage mode, in which case we have
// to insert the unfinished piece where it's stored
if (std::find(piece_index.begin(), piece_index.end(), i->index)
== piece_index.end() && i->index < int(piece_index.size()))
piece_index[i->index] = i->index;
have_pieces[i->index] = true;
std::string bitmask;
const int num_bitmask_bytes
@ -739,6 +734,7 @@ namespace libtorrent
}
}
t->filesystem().export_piece_map(piece_index, have_pieces);
entry::list_type& slots = ret["slots"].list();
std::copy(piece_index.begin(), piece_index.end(), std::back_inserter(slots));