forked from premiere/premiere-libtorrent
factor out removing chunk headers from http streams and fix chunked encoding support when adding torrents by url
This commit is contained in:
parent
f2238169c9
commit
403c49ac29
|
@ -98,6 +98,13 @@ namespace libtorrent
|
|||
// the start of the body.
|
||||
bool chunked_encoding() const { return m_chunked_encoding; }
|
||||
|
||||
// removes the chunk headers from the supplied buffer. The buffer
|
||||
// must be the stream received from the http server this parser
|
||||
// instanced parsed. It will use the internal chunk list to determine
|
||||
// where the chunks are in the buffer. It returns the new length of
|
||||
// the buffer
|
||||
int collapse_chunk_headers(char* buffer, int size) const;
|
||||
|
||||
// returns false if the buffer doesn't contain a complete
|
||||
// chunk header. In this case, call the function again with
|
||||
// a bigger buffer once more bytes have been received.
|
||||
|
|
|
@ -621,28 +621,7 @@ void http_connection::callback(error_code e, char const* data, int size)
|
|||
std::vector<char> buf;
|
||||
if (data && m_bottled && m_parser.header_finished())
|
||||
{
|
||||
if (m_parser.chunked_encoding())
|
||||
{
|
||||
// go through all chunks and compact them
|
||||
// since we're bottled, and the buffer is our after all
|
||||
// it's OK to mutate it
|
||||
char* write_ptr = (char*)data;
|
||||
// the offsets in the array are from the start of the
|
||||
// buffer, not start of the body, so subtract the size
|
||||
// of the HTTP header from them
|
||||
int offset = m_parser.body_start();
|
||||
std::vector<std::pair<size_type, size_type> > const& chunks = m_parser.chunks();
|
||||
for (std::vector<std::pair<size_type, size_type> >::const_iterator i = chunks.begin()
|
||||
, end(chunks.end()); i != end; ++i)
|
||||
{
|
||||
TORRENT_ASSERT(i->second - i->first < INT_MAX);
|
||||
int len = int(i->second - i->first);
|
||||
if (i->first - offset + len > size) len = size - int(i->first) + offset;
|
||||
memmove(write_ptr, data + i->first - offset, len);
|
||||
write_ptr += len;
|
||||
}
|
||||
size = write_ptr - data;
|
||||
}
|
||||
size = m_parser.collapse_chunk_headers((char*)data, size);
|
||||
|
||||
std::string const& encoding = m_parser.header("content-encoding");
|
||||
if ((encoding == "gzip" || encoding == "x-gzip") && size > 0 && data)
|
||||
|
|
|
@ -456,5 +456,31 @@ restart_response:
|
|||
m_partial_chunk_header = 0;
|
||||
}
|
||||
|
||||
int http_parser::collapse_chunk_headers(char* buffer, int size) const
|
||||
{
|
||||
if (!chunked_encoding()) return size;
|
||||
|
||||
// go through all chunks and compact them
|
||||
// since we're bottled, and the buffer is our after all
|
||||
// it's OK to mutate it
|
||||
char* write_ptr = (char*)buffer;
|
||||
// the offsets in the array are from the start of the
|
||||
// buffer, not start of the body, so subtract the size
|
||||
// of the HTTP header from them
|
||||
int offset = body_start();
|
||||
std::vector<std::pair<size_type, size_type> > const& c = chunks();
|
||||
for (std::vector<std::pair<size_type, size_type> >::const_iterator i = c.begin()
|
||||
, end(c.end()); i != end; ++i)
|
||||
{
|
||||
TORRENT_ASSERT(i->second - i->first < INT_MAX);
|
||||
TORRENT_ASSERT(i->second - offset <= size);
|
||||
int len = int(i->second - i->first);
|
||||
if (i->first - offset + len > size) len = size - int(i->first) + offset;
|
||||
memmove(write_ptr, buffer + i->first - offset, len);
|
||||
write_ptr += len;
|
||||
}
|
||||
size = write_ptr - buffer;
|
||||
return size;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -582,7 +582,7 @@ namespace libtorrent
|
|||
prioritize_udp_trackers();
|
||||
}
|
||||
|
||||
#if 1
|
||||
#if 0
|
||||
|
||||
// NON BOTTLED VERSION. SUPPORTS PROGRESS REPORTING
|
||||
|
||||
|
@ -619,6 +619,11 @@ namespace libtorrent
|
|||
|
||||
if (!ec) return;
|
||||
|
||||
// if this was received with chunked encoding, we need to strip out
|
||||
// the chunk headers
|
||||
size = parser.collapse_chunk_headers((char*)&m_torrent_file_buf[0], m_torrent_file_buf.size());
|
||||
m_torrent_file_buf.resize(size);
|
||||
|
||||
std::string const& encoding = parser.header("content-encoding");
|
||||
if ((encoding == "gzip" || encoding == "x-gzip") && m_torrent_file_buf.size())
|
||||
{
|
||||
|
@ -764,7 +769,13 @@ namespace libtorrent
|
|||
#if defined TORRENT_DEBUG || TORRENT_RELEASE_ASSERTS
|
||||
int num_torrents = m_ses.m_torrents.size();
|
||||
#endif
|
||||
m_ses.m_torrents.erase(m_torrent_file->info_hash());
|
||||
|
||||
// we're about to erase the session's reference to this
|
||||
// torrent, create another reference
|
||||
boost::shared_ptr<torrent> me(shared_from_this());
|
||||
|
||||
m_ses.remove_torrent_impl(me, 0);
|
||||
|
||||
m_torrent_file = tf;
|
||||
m_ses.m_torrents.insert(std::make_pair(m_torrent_file->info_hash(), shared_from_this()));
|
||||
|
||||
|
@ -857,7 +868,7 @@ namespace libtorrent
|
|||
boost::shared_ptr<http_connection> conn(
|
||||
new http_connection(m_ses.m_io_service, m_ses.m_half_open
|
||||
, boost::bind(&torrent::on_torrent_download, shared_from_this()
|
||||
, _1, _2, _3, _4), false));
|
||||
, _1, _2, _3, _4)));
|
||||
conn->get(m_url, seconds(30), 0, 0, 5, m_ses.m_settings.user_agent);
|
||||
set_state(torrent_status::downloading_metadata);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue