Add option `--throttled-rate` below which video data is re-extracted

Currently only for HTTP downloads

Closes #430, workaround for https://github.com/ytdl-org/youtube-dl/issues/29326
pull/310/head
pukkandan 4 years ago
parent 4c7853de14
commit 51d9739f80
No known key found for this signature in database
GPG Key ID: 0F00D95A001F4698

@ -101,6 +101,7 @@ from .utils import (
str_or_none, str_or_none,
strftime_or_none, strftime_or_none,
subtitles_filename, subtitles_filename,
ThrottledDownload,
to_high_limit_path, to_high_limit_path,
traverse_obj, traverse_obj,
UnavailableVideoError, UnavailableVideoError,
@ -398,10 +399,9 @@ class YoutubeDL(object):
The following parameters are not used by YoutubeDL itself, they are used by The following parameters are not used by YoutubeDL itself, they are used by
the downloader (see yt_dlp/downloader/common.py): the downloader (see yt_dlp/downloader/common.py):
nopart, updatetime, buffersize, ratelimit, min_filesize, max_filesize, test, nopart, updatetime, buffersize, ratelimit, throttledratelimit, min_filesize,
noresizebuffer, retries, continuedl, noprogress, consoletitle, max_filesize, test, noresizebuffer, retries, continuedl, noprogress, consoletitle,
xattr_set_filesize, external_downloader_args, hls_use_mpegts, xattr_set_filesize, external_downloader_args, hls_use_mpegts, http_chunk_size.
http_chunk_size.
The following options are used by the post processors: The following options are used by the post processors:
prefer_ffmpeg: If False, use avconv instead of ffmpeg if both are available, prefer_ffmpeg: If False, use avconv instead of ffmpeg if both are available,
@ -1145,6 +1145,10 @@ class YoutubeDL(object):
self.report_error(msg) self.report_error(msg)
except ExtractorError as e: # An error we somewhat expected except ExtractorError as e: # An error we somewhat expected
self.report_error(compat_str(e), e.format_traceback()) self.report_error(compat_str(e), e.format_traceback())
except ThrottledDownload:
self.to_stderr('\r')
self.report_warning('The download speed is below throttle limit. Re-extracting data')
return wrapper(self, *args, **kwargs)
except (MaxDownloadsReached, ExistingVideoReached, RejectedVideoReached): except (MaxDownloadsReached, ExistingVideoReached, RejectedVideoReached):
raise raise
except Exception as e: except Exception as e:

@ -151,6 +151,11 @@ def _real_main(argv=None):
if numeric_limit is None: if numeric_limit is None:
parser.error('invalid rate limit specified') parser.error('invalid rate limit specified')
opts.ratelimit = numeric_limit opts.ratelimit = numeric_limit
if opts.throttledratelimit is not None:
numeric_limit = FileDownloader.parse_bytes(opts.throttledratelimit)
if numeric_limit is None:
parser.error('invalid rate limit specified')
opts.throttledratelimit = numeric_limit
if opts.min_filesize is not None: if opts.min_filesize is not None:
numeric_limit = FileDownloader.parse_bytes(opts.min_filesize) numeric_limit = FileDownloader.parse_bytes(opts.min_filesize)
if numeric_limit is None: if numeric_limit is None:
@ -552,6 +557,7 @@ def _real_main(argv=None):
'ignoreerrors': opts.ignoreerrors, 'ignoreerrors': opts.ignoreerrors,
'force_generic_extractor': opts.force_generic_extractor, 'force_generic_extractor': opts.force_generic_extractor,
'ratelimit': opts.ratelimit, 'ratelimit': opts.ratelimit,
'throttledratelimit': opts.throttledratelimit,
'overwrites': opts.overwrites, 'overwrites': opts.overwrites,
'retries': opts.retries, 'retries': opts.retries,
'fragment_retries': opts.fragment_retries, 'fragment_retries': opts.fragment_retries,

@ -14,6 +14,7 @@ from ..utils import (
format_bytes, format_bytes,
shell_quote, shell_quote,
timeconvert, timeconvert,
ThrottledDownload,
) )
@ -32,6 +33,7 @@ class FileDownloader(object):
verbose: Print additional info to stdout. verbose: Print additional info to stdout.
quiet: Do not print messages to stdout. quiet: Do not print messages to stdout.
ratelimit: Download speed limit, in bytes/sec. ratelimit: Download speed limit, in bytes/sec.
throttledratelimit: Assume the download is being throttled below this speed (bytes/sec)
retries: Number of times to retry for HTTP error 5xx retries: Number of times to retry for HTTP error 5xx
buffersize: Size of download buffer in bytes. buffersize: Size of download buffer in bytes.
noresizebuffer: Do not automatically resize the download buffer. noresizebuffer: Do not automatically resize the download buffer.
@ -170,7 +172,7 @@ class FileDownloader(object):
def slow_down(self, start_time, now, byte_counter): def slow_down(self, start_time, now, byte_counter):
"""Sleep if the download speed is over the rate limit.""" """Sleep if the download speed is over the rate limit."""
rate_limit = self.params.get('ratelimit') rate_limit = self.params.get('ratelimit')
if rate_limit is None or byte_counter == 0: if byte_counter == 0:
return return
if now is None: if now is None:
now = time.time() now = time.time()
@ -178,7 +180,7 @@ class FileDownloader(object):
if elapsed <= 0.0: if elapsed <= 0.0:
return return
speed = float(byte_counter) / elapsed speed = float(byte_counter) / elapsed
if speed > rate_limit: if rate_limit is not None and speed > rate_limit:
sleep_time = float(byte_counter) / rate_limit - elapsed sleep_time = float(byte_counter) / rate_limit - elapsed
if sleep_time > 0: if sleep_time > 0:
time.sleep(sleep_time) time.sleep(sleep_time)

@ -18,6 +18,7 @@ from ..utils import (
int_or_none, int_or_none,
sanitize_open, sanitize_open,
sanitized_Request, sanitized_Request,
ThrottledDownload,
write_xattr, write_xattr,
XAttrMetadataError, XAttrMetadataError,
XAttrUnavailableError, XAttrUnavailableError,
@ -223,6 +224,7 @@ class HttpFD(FileDownloader):
# measure time over whole while-loop, so slow_down() and best_block_size() work together properly # measure time over whole while-loop, so slow_down() and best_block_size() work together properly
now = None # needed for slow_down() in the first loop run now = None # needed for slow_down() in the first loop run
before = start # start measuring before = start # start measuring
throttle_start = None
def retry(e): def retry(e):
to_stdout = ctx.tmpfilename == '-' to_stdout = ctx.tmpfilename == '-'
@ -313,6 +315,18 @@ class HttpFD(FileDownloader):
if data_len is not None and byte_counter == data_len: if data_len is not None and byte_counter == data_len:
break break
if speed and speed < (self.params.get('throttledratelimit') or 0):
# The speed must stay below the limit for 3 seconds
# This prevents raising error when the speed temporarily goes down
if throttle_start is None:
throttle_start = now
elif now - throttle_start > 3:
if ctx.stream is not None and ctx.tmpfilename != '-':
ctx.stream.close()
raise ThrottledDownload()
else:
throttle_start = None
if not is_test and ctx.chunk_size and ctx.data_len is not None and byte_counter < ctx.data_len: if not is_test and ctx.chunk_size and ctx.data_len is not None and byte_counter < ctx.data_len:
ctx.resume_len = byte_counter ctx.resume_len = byte_counter
# ctx.block_size = block_size # ctx.block_size = block_size

@ -599,6 +599,10 @@ def parseOpts(overrideArguments=None):
'-r', '--limit-rate', '--rate-limit', '-r', '--limit-rate', '--rate-limit',
dest='ratelimit', metavar='RATE', dest='ratelimit', metavar='RATE',
help='Maximum download rate in bytes per second (e.g. 50K or 4.2M)') help='Maximum download rate in bytes per second (e.g. 50K or 4.2M)')
downloader.add_option(
'--throttled-rate',
dest='throttledratelimit', metavar='RATE',
help='Minimum download rate in bytes per second below which throttling is assumed and the video data is re-extracted (e.g. 100K)')
downloader.add_option( downloader.add_option(
'-R', '--retries', '-R', '--retries',
dest='retries', metavar='RETRIES', default=10, dest='retries', metavar='RETRIES', default=10,

@ -2504,6 +2504,11 @@ class RejectedVideoReached(YoutubeDLError):
pass pass
class ThrottledDownload(YoutubeDLError):
""" Download speed below --throttled-rate. """
pass
class MaxDownloadsReached(YoutubeDLError): class MaxDownloadsReached(YoutubeDLError):
""" --max-downloads limit has been reached. """ """ --max-downloads limit has been reached. """
pass pass

Loading…
Cancel
Save