2014-09-24 14:16:56 +02:00
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
|
|
|
import re
|
2021-04-28 12:47:30 +02:00
|
|
|
import io
|
2016-01-10 20:09:53 +01:00
|
|
|
import binascii
|
2013-09-23 17:59:27 +02:00
|
|
|
|
2021-02-08 17:46:01 +01:00
|
|
|
from ..downloader import _get_real_downloader
|
2021-06-21 20:59:50 +02:00
|
|
|
from .fragment import FragmentFD, can_decrypt_frag
|
2016-05-01 09:56:51 +02:00
|
|
|
from .external import FFmpegFD
|
2015-07-28 22:28:30 +02:00
|
|
|
|
2016-01-10 20:09:53 +01:00
|
|
|
from ..compat import (
|
|
|
|
compat_urlparse,
|
|
|
|
)
|
2014-12-13 12:24:42 +01:00
|
|
|
from ..utils import (
|
2016-01-10 20:09:53 +01:00
|
|
|
parse_m3u8_attributes,
|
2016-08-13 23:53:07 +02:00
|
|
|
update_url_query,
|
2021-04-28 12:47:30 +02:00
|
|
|
bug_reports_message,
|
2013-09-23 17:59:27 +02:00
|
|
|
)
|
2021-04-28 12:47:30 +02:00
|
|
|
from .. import webvtt
|
2013-09-23 17:59:27 +02:00
|
|
|
|
|
|
|
|
2016-02-19 19:29:24 +01:00
|
|
|
class HlsFD(FragmentFD):
|
2021-03-10 16:26:24 +01:00
|
|
|
"""
|
|
|
|
Download segments in a m3u8 manifest. External downloaders can take over
|
2021-04-10 17:08:33 +02:00
|
|
|
the fragment downloads by supporting the 'm3u8_frag_urls' protocol and
|
2021-03-10 16:26:24 +01:00
|
|
|
re-defining 'supports_manifest' function
|
|
|
|
"""
|
2014-09-24 14:16:56 +02:00
|
|
|
|
2015-07-28 22:28:30 +02:00
|
|
|
FD_NAME = 'hlsnative'
|
|
|
|
|
2016-05-01 09:56:51 +02:00
|
|
|
@staticmethod
|
2021-03-10 16:26:24 +01:00
|
|
|
def can_download(manifest, info_dict, allow_unplayable_formats=False, with_crypto=can_decrypt_frag):
|
2021-02-12 04:51:59 +01:00
|
|
|
UNSUPPORTED_FEATURES = [
|
2017-04-13 13:21:17 +02:00
|
|
|
# r'#EXT-X-BYTERANGE', # playlists composed of byte ranges of media files [2]
|
2016-06-04 22:16:05 +02:00
|
|
|
|
2016-05-09 16:45:03 +02:00
|
|
|
# Live streams heuristic does not always work (e.g. geo restricted to Germany
|
|
|
|
# http://hls-geo.daserste.de/i/videoportal/Film/c_620000/622873/format,716451,716457,716450,716458,716459,.mp4.csmil/index_4_av.m3u8?null=0)
|
2016-05-09 18:16:33 +02:00
|
|
|
# r'#EXT-X-MEDIA-SEQUENCE:(?!0$)', # live streams [3]
|
2016-06-04 22:16:05 +02:00
|
|
|
|
|
|
|
# This heuristic also is not correct since segments may not be appended as well.
|
2016-06-04 22:31:10 +02:00
|
|
|
# Twitch vods of finished streams have EXT-X-PLAYLIST-TYPE:EVENT despite
|
|
|
|
# no segments will definitely be appended to the end of the playlist.
|
2016-06-04 22:16:05 +02:00
|
|
|
# r'#EXT-X-PLAYLIST-TYPE:EVENT', # media segments may be appended to the end of
|
2016-06-04 22:21:43 +02:00
|
|
|
# # event media playlists [4]
|
2021-02-23 17:00:56 +01:00
|
|
|
# r'#EXT-X-MAP:', # media initialization [5]
|
2016-05-01 09:56:51 +02:00
|
|
|
# 1. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.2.4
|
|
|
|
# 2. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.2.2
|
|
|
|
# 3. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.3.2
|
2016-05-09 16:55:37 +02:00
|
|
|
# 4. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.3.5
|
2021-01-01 13:26:37 +01:00
|
|
|
# 5. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.2.5
|
2021-02-12 04:51:59 +01:00
|
|
|
]
|
|
|
|
if not allow_unplayable_formats:
|
|
|
|
UNSUPPORTED_FEATURES += [
|
|
|
|
r'#EXT-X-KEY:METHOD=(?!NONE|AES-128)', # encrypted streams [1]
|
|
|
|
]
|
2021-03-10 16:26:24 +01:00
|
|
|
|
|
|
|
def check_results():
|
|
|
|
yield not info_dict.get('is_live')
|
|
|
|
is_aes128_enc = '#EXT-X-KEY:METHOD=AES-128' in manifest
|
|
|
|
yield with_crypto or not is_aes128_enc
|
|
|
|
yield not (is_aes128_enc and r'#EXT-X-BYTERANGE' in manifest)
|
|
|
|
for feature in UNSUPPORTED_FEATURES:
|
|
|
|
yield not re.search(feature, manifest)
|
|
|
|
return all(check_results())
|
2016-05-01 09:56:51 +02:00
|
|
|
|
2014-09-24 14:16:56 +02:00
|
|
|
def real_download(self, filename, info_dict):
|
2015-07-28 22:28:30 +02:00
|
|
|
man_url = info_dict['url']
|
|
|
|
self.to_screen('[%s] Downloading m3u8 manifest' % self.FD_NAME)
|
2016-11-13 16:06:16 +01:00
|
|
|
|
2017-07-29 10:02:41 +02:00
|
|
|
urlh = self.ydl.urlopen(self._prepare_url(info_dict, man_url))
|
|
|
|
man_url = urlh.geturl()
|
|
|
|
s = urlh.read().decode('utf-8', 'ignore')
|
2016-05-01 09:56:51 +02:00
|
|
|
|
2021-03-10 16:26:24 +01:00
|
|
|
if not self.can_download(s, info_dict, self.params.get('allow_unplayable_formats')):
|
2019-12-03 12:23:08 +01:00
|
|
|
if info_dict.get('extra_param_to_segment_url') or info_dict.get('_decryption_key_url'):
|
2021-03-20 04:20:08 +01:00
|
|
|
self.report_error('pycryptodome not found. Please install')
|
2016-12-20 19:49:45 +01:00
|
|
|
return False
|
2021-02-20 22:48:03 +01:00
|
|
|
if self.can_download(s, info_dict, with_crypto=True):
|
2021-03-20 04:20:08 +01:00
|
|
|
self.report_warning('pycryptodome is needed to download this file natively')
|
2017-03-25 23:06:33 +01:00
|
|
|
fd = FFmpegFD(self.ydl, self.params)
|
2021-03-20 04:20:08 +01:00
|
|
|
self.report_warning(
|
|
|
|
'%s detected unsupported features; extraction will be delegated to %s' % (self.FD_NAME, fd.get_basename()))
|
2021-02-08 17:46:01 +01:00
|
|
|
# TODO: Make progress updates work without hooking twice
|
|
|
|
# for ph in self._progress_hooks:
|
|
|
|
# fd.add_progress_hook(ph)
|
2017-03-25 23:06:33 +01:00
|
|
|
return fd.real_download(filename, info_dict)
|
2016-05-01 09:56:51 +02:00
|
|
|
|
2021-05-30 18:41:11 +02:00
|
|
|
is_webvtt = info_dict['ext'] == 'vtt'
|
|
|
|
if is_webvtt:
|
|
|
|
real_downloader = None # Packing the fragments is not currently supported for external downloader
|
|
|
|
else:
|
|
|
|
real_downloader = _get_real_downloader(info_dict, 'm3u8_frag_urls', self.params, None)
|
2021-03-10 16:26:24 +01:00
|
|
|
if real_downloader and not real_downloader.supports_manifest(s):
|
|
|
|
real_downloader = None
|
2021-03-20 04:20:08 +01:00
|
|
|
if real_downloader:
|
|
|
|
self.to_screen(
|
|
|
|
'[%s] Fragment downloads will be delegated to %s' % (self.FD_NAME, real_downloader.get_basename()))
|
2021-03-10 16:26:24 +01:00
|
|
|
|
2019-01-13 10:01:26 +01:00
|
|
|
def is_ad_fragment_start(s):
|
2019-05-10 22:56:22 +02:00
|
|
|
return (s.startswith('#ANVATO-SEGMENT-INFO') and 'type=ad' in s
|
|
|
|
or s.startswith('#UPLYNK-SEGMENT') and s.endswith(',ad'))
|
2017-10-15 01:13:48 +02:00
|
|
|
|
2019-01-13 10:01:26 +01:00
|
|
|
def is_ad_fragment_end(s):
|
2019-05-10 22:56:22 +02:00
|
|
|
return (s.startswith('#ANVATO-SEGMENT-INFO') and 'type=master' in s
|
|
|
|
or s.startswith('#UPLYNK-SEGMENT') and s.endswith(',segment'))
|
2019-01-13 10:01:26 +01:00
|
|
|
|
2021-03-10 15:39:40 +01:00
|
|
|
fragments = []
|
2021-02-08 17:46:01 +01:00
|
|
|
|
2017-10-15 01:13:48 +02:00
|
|
|
media_frags = 0
|
|
|
|
ad_frags = 0
|
|
|
|
ad_frag_next = False
|
2014-09-24 14:16:56 +02:00
|
|
|
for line in s.splitlines():
|
|
|
|
line = line.strip()
|
2017-10-15 01:13:48 +02:00
|
|
|
if not line:
|
|
|
|
continue
|
|
|
|
if line.startswith('#'):
|
2019-01-13 10:01:26 +01:00
|
|
|
if is_ad_fragment_start(line):
|
2017-10-15 06:03:54 +02:00
|
|
|
ad_frag_next = True
|
2019-01-13 10:01:26 +01:00
|
|
|
elif is_ad_fragment_end(line):
|
|
|
|
ad_frag_next = False
|
2017-10-15 01:13:48 +02:00
|
|
|
continue
|
|
|
|
if ad_frag_next:
|
2019-01-13 10:01:26 +01:00
|
|
|
ad_frags += 1
|
2017-10-15 01:13:48 +02:00
|
|
|
continue
|
|
|
|
media_frags += 1
|
2014-09-24 14:16:56 +02:00
|
|
|
|
2015-07-28 22:28:30 +02:00
|
|
|
ctx = {
|
2014-09-24 14:16:56 +02:00
|
|
|
'filename': filename,
|
2017-10-15 01:13:48 +02:00
|
|
|
'total_frags': media_frags,
|
|
|
|
'ad_frags': ad_frags,
|
2015-07-28 22:28:30 +02:00
|
|
|
}
|
|
|
|
|
2021-02-08 17:46:01 +01:00
|
|
|
if real_downloader:
|
|
|
|
self._prepare_external_frag_download(ctx)
|
|
|
|
else:
|
|
|
|
self._prepare_and_start_frag_download(ctx)
|
2015-07-28 22:28:30 +02:00
|
|
|
|
2021-04-28 12:47:30 +02:00
|
|
|
extra_state = ctx.setdefault('extra_state', {})
|
|
|
|
|
2021-02-24 15:47:53 +01:00
|
|
|
format_index = info_dict.get('format_index')
|
2016-08-28 18:51:53 +02:00
|
|
|
extra_query = None
|
2016-08-13 23:53:07 +02:00
|
|
|
extra_param_to_segment_url = info_dict.get('extra_param_to_segment_url')
|
2016-08-28 18:51:53 +02:00
|
|
|
if extra_param_to_segment_url:
|
|
|
|
extra_query = compat_urlparse.parse_qs(extra_param_to_segment_url)
|
2016-01-10 20:09:53 +01:00
|
|
|
i = 0
|
|
|
|
media_sequence = 0
|
|
|
|
decrypt_info = {'METHOD': 'NONE'}
|
2017-04-13 13:21:17 +02:00
|
|
|
byte_range = {}
|
2021-02-24 15:47:53 +01:00
|
|
|
discontinuity_count = 0
|
2016-06-28 19:07:50 +02:00
|
|
|
frag_index = 0
|
2017-10-15 01:13:48 +02:00
|
|
|
ad_frag_next = False
|
2016-01-10 20:09:53 +01:00
|
|
|
for line in s.splitlines():
|
|
|
|
line = line.strip()
|
|
|
|
if line:
|
|
|
|
if not line.startswith('#'):
|
2021-02-24 15:47:53 +01:00
|
|
|
if format_index and discontinuity_count != format_index:
|
|
|
|
continue
|
2017-10-15 01:13:48 +02:00
|
|
|
if ad_frag_next:
|
|
|
|
continue
|
2016-06-28 19:07:50 +02:00
|
|
|
frag_index += 1
|
2017-04-22 17:42:24 +02:00
|
|
|
if frag_index <= ctx['fragment_index']:
|
2016-06-28 19:07:50 +02:00
|
|
|
continue
|
2016-01-10 20:09:53 +01:00
|
|
|
frag_url = (
|
|
|
|
line
|
|
|
|
if re.match(r'^https?://', line)
|
|
|
|
else compat_urlparse.urljoin(man_url, line))
|
2016-08-28 18:51:53 +02:00
|
|
|
if extra_query:
|
|
|
|
frag_url = update_url_query(frag_url, extra_query)
|
2021-02-08 17:46:01 +01:00
|
|
|
|
2021-03-13 05:46:58 +01:00
|
|
|
fragments.append({
|
|
|
|
'frag_index': frag_index,
|
|
|
|
'url': frag_url,
|
|
|
|
'decrypt_info': decrypt_info,
|
|
|
|
'byte_range': byte_range,
|
|
|
|
'media_sequence': media_sequence,
|
|
|
|
})
|
2021-02-08 17:46:01 +01:00
|
|
|
|
2021-02-23 17:00:56 +01:00
|
|
|
elif line.startswith('#EXT-X-MAP'):
|
2021-02-24 15:47:53 +01:00
|
|
|
if format_index and discontinuity_count != format_index:
|
|
|
|
continue
|
2021-02-23 17:00:56 +01:00
|
|
|
if frag_index > 0:
|
|
|
|
self.report_error(
|
2021-03-20 04:20:08 +01:00
|
|
|
'Initialization fragment found after media fragments, unable to download')
|
2021-02-23 17:00:56 +01:00
|
|
|
return False
|
|
|
|
frag_index += 1
|
|
|
|
map_info = parse_m3u8_attributes(line[11:])
|
|
|
|
frag_url = (
|
|
|
|
map_info.get('URI')
|
|
|
|
if re.match(r'^https?://', map_info.get('URI'))
|
|
|
|
else compat_urlparse.urljoin(man_url, map_info.get('URI')))
|
|
|
|
if extra_query:
|
|
|
|
frag_url = update_url_query(frag_url, extra_query)
|
2021-03-13 05:46:58 +01:00
|
|
|
|
|
|
|
fragments.append({
|
|
|
|
'frag_index': frag_index,
|
|
|
|
'url': frag_url,
|
|
|
|
'decrypt_info': decrypt_info,
|
|
|
|
'byte_range': byte_range,
|
|
|
|
'media_sequence': media_sequence
|
|
|
|
})
|
2021-02-23 17:00:56 +01:00
|
|
|
|
|
|
|
if map_info.get('BYTERANGE'):
|
|
|
|
splitted_byte_range = map_info.get('BYTERANGE').split('@')
|
|
|
|
sub_range_start = int(splitted_byte_range[1]) if len(splitted_byte_range) == 2 else byte_range['end']
|
|
|
|
byte_range = {
|
|
|
|
'start': sub_range_start,
|
|
|
|
'end': sub_range_start + int(splitted_byte_range[0]),
|
|
|
|
}
|
|
|
|
|
|
|
|
elif line.startswith('#EXT-X-KEY'):
|
|
|
|
decrypt_url = decrypt_info.get('URI')
|
|
|
|
decrypt_info = parse_m3u8_attributes(line[11:])
|
|
|
|
if decrypt_info['METHOD'] == 'AES-128':
|
|
|
|
if 'IV' in decrypt_info:
|
|
|
|
decrypt_info['IV'] = binascii.unhexlify(decrypt_info['IV'][2:].zfill(32))
|
|
|
|
if not re.match(r'^https?://', decrypt_info['URI']):
|
|
|
|
decrypt_info['URI'] = compat_urlparse.urljoin(
|
|
|
|
man_url, decrypt_info['URI'])
|
|
|
|
if extra_query:
|
|
|
|
decrypt_info['URI'] = update_url_query(decrypt_info['URI'], extra_query)
|
|
|
|
if decrypt_url != decrypt_info['URI']:
|
|
|
|
decrypt_info['KEY'] = None
|
|
|
|
|
|
|
|
elif line.startswith('#EXT-X-MEDIA-SEQUENCE'):
|
|
|
|
media_sequence = int(line[22:])
|
|
|
|
elif line.startswith('#EXT-X-BYTERANGE'):
|
|
|
|
splitted_byte_range = line[17:].split('@')
|
|
|
|
sub_range_start = int(splitted_byte_range[1]) if len(splitted_byte_range) == 2 else byte_range['end']
|
|
|
|
byte_range = {
|
|
|
|
'start': sub_range_start,
|
|
|
|
'end': sub_range_start + int(splitted_byte_range[0]),
|
|
|
|
}
|
|
|
|
elif is_ad_fragment_start(line):
|
|
|
|
ad_frag_next = True
|
|
|
|
elif is_ad_fragment_end(line):
|
|
|
|
ad_frag_next = False
|
2021-02-24 15:47:53 +01:00
|
|
|
elif line.startswith('#EXT-X-DISCONTINUITY'):
|
|
|
|
discontinuity_count += 1
|
2021-03-13 05:46:58 +01:00
|
|
|
i += 1
|
|
|
|
media_sequence += 1
|
2021-02-23 17:00:56 +01:00
|
|
|
|
2021-03-13 05:46:58 +01:00
|
|
|
# We only download the first fragment during the test
|
2021-06-21 20:59:50 +02:00
|
|
|
if self.params.get('test', False):
|
2021-03-13 05:46:58 +01:00
|
|
|
fragments = [fragments[0] if fragments else None]
|
2015-07-28 22:28:30 +02:00
|
|
|
|
2021-02-08 17:46:01 +01:00
|
|
|
if real_downloader:
|
|
|
|
info_copy = info_dict.copy()
|
2021-03-10 15:39:40 +01:00
|
|
|
info_copy['fragments'] = fragments
|
2021-02-08 17:46:01 +01:00
|
|
|
fd = real_downloader(self.ydl, self.params)
|
|
|
|
# TODO: Make progress updates work without hooking twice
|
|
|
|
# for ph in self._progress_hooks:
|
|
|
|
# fd.add_progress_hook(ph)
|
|
|
|
success = fd.real_download(filename, info_copy)
|
|
|
|
if not success:
|
|
|
|
return False
|
|
|
|
else:
|
2021-04-28 12:47:30 +02:00
|
|
|
if is_webvtt:
|
|
|
|
def pack_fragment(frag_content, frag_index):
|
|
|
|
output = io.StringIO()
|
|
|
|
adjust = 0
|
|
|
|
for block in webvtt.parse_fragment(frag_content):
|
|
|
|
if isinstance(block, webvtt.CueBlock):
|
|
|
|
block.start += adjust
|
|
|
|
block.end += adjust
|
2021-04-23 10:52:21 +02:00
|
|
|
|
|
|
|
dedup_window = extra_state.setdefault('webvtt_dedup_window', [])
|
|
|
|
cue = block.as_json
|
|
|
|
|
|
|
|
# skip the cue if an identical one appears
|
|
|
|
# in the window of potential duplicates
|
|
|
|
# and prune the window of unviable candidates
|
|
|
|
i = 0
|
|
|
|
skip = True
|
|
|
|
while i < len(dedup_window):
|
|
|
|
window_cue = dedup_window[i]
|
|
|
|
if window_cue == cue:
|
|
|
|
break
|
|
|
|
if window_cue['end'] >= cue['start']:
|
|
|
|
i += 1
|
|
|
|
continue
|
|
|
|
del dedup_window[i]
|
|
|
|
else:
|
|
|
|
skip = False
|
|
|
|
|
|
|
|
if skip:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# add the cue to the window
|
|
|
|
dedup_window.append(cue)
|
2021-04-28 12:47:30 +02:00
|
|
|
elif isinstance(block, webvtt.Magic):
|
2021-04-23 10:55:09 +02:00
|
|
|
# take care of MPEG PES timestamp overflow
|
|
|
|
if block.mpegts is None:
|
|
|
|
block.mpegts = 0
|
|
|
|
extra_state.setdefault('webvtt_mpegts_adjust', 0)
|
|
|
|
block.mpegts += extra_state['webvtt_mpegts_adjust'] << 33
|
|
|
|
if block.mpegts < extra_state.get('webvtt_mpegts_last', 0):
|
|
|
|
extra_state['webvtt_mpegts_adjust'] += 1
|
|
|
|
block.mpegts += 1 << 33
|
|
|
|
extra_state['webvtt_mpegts_last'] = block.mpegts
|
|
|
|
|
2021-04-28 12:47:30 +02:00
|
|
|
if frag_index == 1:
|
|
|
|
extra_state['webvtt_mpegts'] = block.mpegts or 0
|
|
|
|
extra_state['webvtt_local'] = block.local or 0
|
|
|
|
# XXX: block.local = block.mpegts = None ?
|
|
|
|
else:
|
|
|
|
if block.mpegts is not None and block.local is not None:
|
|
|
|
adjust = (
|
|
|
|
(block.mpegts - extra_state.get('webvtt_mpegts', 0))
|
|
|
|
- (block.local - extra_state.get('webvtt_local', 0))
|
|
|
|
)
|
|
|
|
continue
|
|
|
|
elif isinstance(block, webvtt.HeaderBlock):
|
|
|
|
if frag_index != 1:
|
|
|
|
# XXX: this should probably be silent as well
|
|
|
|
# or verify that all segments contain the same data
|
|
|
|
self.report_warning(bug_reports_message(
|
|
|
|
'Discarding a %s block found in the middle of the stream; '
|
|
|
|
'if the subtitles display incorrectly,'
|
|
|
|
% (type(block).__name__)))
|
|
|
|
continue
|
|
|
|
block.write_into(output)
|
|
|
|
|
|
|
|
return output.getvalue().encode('utf-8')
|
2021-03-13 05:46:58 +01:00
|
|
|
else:
|
2021-06-21 20:59:50 +02:00
|
|
|
pack_fragment = None
|
|
|
|
self.download_and_append_fragments(ctx, fragments, info_dict, pack_fragment)
|
2014-09-24 14:16:56 +02:00
|
|
|
return True
|