Add option `--file-access-retries` (#2066)

Closes #517
Authored by: ehoogeveen-medweb
pull/2094/head
Emanuel Hoogeveen 3 years ago committed by GitHub
parent 663949f825
commit 205a0654c0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -463,6 +463,8 @@ You can also fork the project on github and run your fork's [build workflow](.gi
video data is re-extracted (e.g. 100K) video data is re-extracted (e.g. 100K)
-R, --retries RETRIES Number of retries (default is 10), or -R, --retries RETRIES Number of retries (default is 10), or
"infinite" "infinite"
--file-access-retries RETRIES Number of times to retry on file access error
(default is 10), or "infinite"
--fragment-retries RETRIES Number of retries for a fragment (default --fragment-retries RETRIES Number of retries for a fragment (default
is 10), or "infinite" (DASH, hlsnative and is 10), or "infinite" (DASH, hlsnative and
ISM) ISM)

@ -450,8 +450,8 @@ class YoutubeDL(object):
The following parameters are not used by YoutubeDL itself, they are used by The following parameters are not used by YoutubeDL itself, they are used by
the downloader (see yt_dlp/downloader/common.py): the downloader (see yt_dlp/downloader/common.py):
nopart, updatetime, buffersize, ratelimit, throttledratelimit, min_filesize, nopart, updatetime, buffersize, ratelimit, throttledratelimit, min_filesize,
max_filesize, test, noresizebuffer, retries, fragment_retries, continuedl, max_filesize, test, noresizebuffer, retries, file_access_retries, fragment_retries,
noprogress, xattr_set_filesize, hls_use_mpegts, http_chunk_size, continuedl, noprogress, xattr_set_filesize, hls_use_mpegts, http_chunk_size,
external_downloader_args, concurrent_fragment_downloads. external_downloader_args, concurrent_fragment_downloads.
The following options are used by the post processors: The following options are used by the post processors:

@ -222,6 +222,8 @@ def _real_main(argv=None):
return parsed_retries return parsed_retries
if opts.retries is not None: if opts.retries is not None:
opts.retries = parse_retries(opts.retries) opts.retries = parse_retries(opts.retries)
if opts.file_access_retries is not None:
opts.file_access_retries = parse_retries(opts.file_access_retries, 'file access ')
if opts.fragment_retries is not None: if opts.fragment_retries is not None:
opts.fragment_retries = parse_retries(opts.fragment_retries, 'fragment ') opts.fragment_retries = parse_retries(opts.fragment_retries, 'fragment ')
if opts.extractor_retries is not None: if opts.extractor_retries is not None:
@ -673,6 +675,7 @@ def _real_main(argv=None):
'throttledratelimit': opts.throttledratelimit, 'throttledratelimit': opts.throttledratelimit,
'overwrites': opts.overwrites, 'overwrites': opts.overwrites,
'retries': opts.retries, 'retries': opts.retries,
'file_access_retries': opts.file_access_retries,
'fragment_retries': opts.fragment_retries, 'fragment_retries': opts.fragment_retries,
'extractor_retries': opts.extractor_retries, 'extractor_retries': opts.extractor_retries,
'skip_unavailable_fragments': opts.skip_unavailable_fragments, 'skip_unavailable_fragments': opts.skip_unavailable_fragments,

@ -4,12 +4,14 @@ import os
import re import re
import time import time
import random import random
import errno
from ..utils import ( from ..utils import (
decodeArgument, decodeArgument,
encodeFilename, encodeFilename,
error_to_compat_str, error_to_compat_str,
format_bytes, format_bytes,
sanitize_open,
shell_quote, shell_quote,
timeconvert, timeconvert,
timetuple_from_msec, timetuple_from_msec,
@ -39,6 +41,7 @@ class FileDownloader(object):
ratelimit: Download speed limit, in bytes/sec. ratelimit: Download speed limit, in bytes/sec.
throttledratelimit: Assume the download is being throttled below this speed (bytes/sec) throttledratelimit: Assume the download is being throttled below this speed (bytes/sec)
retries: Number of times to retry for HTTP error 5xx retries: Number of times to retry for HTTP error 5xx
file_access_retries: Number of times to retry on file access error
buffersize: Size of download buffer in bytes. buffersize: Size of download buffer in bytes.
noresizebuffer: Do not automatically resize the download buffer. noresizebuffer: Do not automatically resize the download buffer.
continuedl: Try to continue downloads if possible. continuedl: Try to continue downloads if possible.
@ -207,6 +210,21 @@ class FileDownloader(object):
def ytdl_filename(self, filename): def ytdl_filename(self, filename):
return filename + '.ytdl' return filename + '.ytdl'
def sanitize_open(self, filename, open_mode):
file_access_retries = self.params.get('file_access_retries', 10)
retry = 0
while True:
try:
return sanitize_open(filename, open_mode)
except (IOError, OSError) as err:
retry = retry + 1
if retry > file_access_retries or err.errno not in (errno.EACCES,):
raise
self.to_screen(
'[download] Got file access error. Retrying (attempt %d of %s) ...'
% (retry, self.format_retries(file_access_retries)))
time.sleep(0.01)
def try_rename(self, old_filename, new_filename): def try_rename(self, old_filename, new_filename):
if old_filename == new_filename: if old_filename == new_filename:
return return

@ -22,7 +22,6 @@ from ..utils import (
handle_youtubedl_headers, handle_youtubedl_headers,
check_executable, check_executable,
Popen, Popen,
sanitize_open,
) )
@ -144,11 +143,11 @@ class ExternalFD(FragmentFD):
return -1 return -1
decrypt_fragment = self.decrypter(info_dict) decrypt_fragment = self.decrypter(info_dict)
dest, _ = sanitize_open(tmpfilename, 'wb') dest, _ = self.sanitize_open(tmpfilename, 'wb')
for frag_index, fragment in enumerate(info_dict['fragments']): for frag_index, fragment in enumerate(info_dict['fragments']):
fragment_filename = '%s-Frag%d' % (tmpfilename, frag_index) fragment_filename = '%s-Frag%d' % (tmpfilename, frag_index)
try: try:
src, _ = sanitize_open(fragment_filename, 'rb') src, _ = self.sanitize_open(fragment_filename, 'rb')
except IOError as err: except IOError as err:
if skip_unavailable_fragments and frag_index > 1: if skip_unavailable_fragments and frag_index > 1:
self.report_skip_fragment(frag_index, err) self.report_skip_fragment(frag_index, err)
@ -290,7 +289,7 @@ class Aria2cFD(ExternalFD):
for frag_index, fragment in enumerate(info_dict['fragments']): for frag_index, fragment in enumerate(info_dict['fragments']):
fragment_filename = '%s-Frag%d' % (os.path.basename(tmpfilename), frag_index) fragment_filename = '%s-Frag%d' % (os.path.basename(tmpfilename), frag_index)
url_list.append('%s\n\tout=%s' % (fragment['url'], fragment_filename)) url_list.append('%s\n\tout=%s' % (fragment['url'], fragment_filename))
stream, _ = sanitize_open(url_list_file, 'wb') stream, _ = self.sanitize_open(url_list_file, 'wb')
stream.write('\n'.join(url_list).encode('utf-8')) stream.write('\n'.join(url_list).encode('utf-8'))
stream.close() stream.close()
cmd += ['-i', url_list_file] cmd += ['-i', url_list_file]

@ -24,7 +24,6 @@ from ..utils import (
DownloadError, DownloadError,
error_to_compat_str, error_to_compat_str,
encodeFilename, encodeFilename,
sanitize_open,
sanitized_Request, sanitized_Request,
) )
@ -96,7 +95,7 @@ class FragmentFD(FileDownloader):
def _read_ytdl_file(self, ctx): def _read_ytdl_file(self, ctx):
assert 'ytdl_corrupt' not in ctx assert 'ytdl_corrupt' not in ctx
stream, _ = sanitize_open(self.ytdl_filename(ctx['filename']), 'r') stream, _ = self.sanitize_open(self.ytdl_filename(ctx['filename']), 'r')
try: try:
ytdl_data = json.loads(stream.read()) ytdl_data = json.loads(stream.read())
ctx['fragment_index'] = ytdl_data['downloader']['current_fragment']['index'] ctx['fragment_index'] = ytdl_data['downloader']['current_fragment']['index']
@ -108,7 +107,7 @@ class FragmentFD(FileDownloader):
stream.close() stream.close()
def _write_ytdl_file(self, ctx): def _write_ytdl_file(self, ctx):
frag_index_stream, _ = sanitize_open(self.ytdl_filename(ctx['filename']), 'w') frag_index_stream, _ = self.sanitize_open(self.ytdl_filename(ctx['filename']), 'w')
try: try:
downloader = { downloader = {
'current_fragment': { 'current_fragment': {
@ -140,7 +139,7 @@ class FragmentFD(FileDownloader):
return True, self._read_fragment(ctx) return True, self._read_fragment(ctx)
def _read_fragment(self, ctx): def _read_fragment(self, ctx):
down, frag_sanitized = sanitize_open(ctx['fragment_filename_sanitized'], 'rb') down, frag_sanitized = self.sanitize_open(ctx['fragment_filename_sanitized'], 'rb')
ctx['fragment_filename_sanitized'] = frag_sanitized ctx['fragment_filename_sanitized'] = frag_sanitized
frag_content = down.read() frag_content = down.read()
down.close() down.close()
@ -216,7 +215,7 @@ class FragmentFD(FileDownloader):
self._write_ytdl_file(ctx) self._write_ytdl_file(ctx)
assert ctx['fragment_index'] == 0 assert ctx['fragment_index'] == 0
dest_stream, tmpfilename = sanitize_open(tmpfilename, open_mode) dest_stream, tmpfilename = self.sanitize_open(tmpfilename, open_mode)
ctx.update({ ctx.update({
'dl': dl, 'dl': dl,

@ -16,7 +16,6 @@ from ..utils import (
ContentTooShortError, ContentTooShortError,
encodeFilename, encodeFilename,
int_or_none, int_or_none,
sanitize_open,
sanitized_Request, sanitized_Request,
ThrottledDownload, ThrottledDownload,
write_xattr, write_xattr,
@ -263,7 +262,7 @@ class HttpFD(FileDownloader):
# Open destination file just in time # Open destination file just in time
if ctx.stream is None: if ctx.stream is None:
try: try:
ctx.stream, ctx.tmpfilename = sanitize_open( ctx.stream, ctx.tmpfilename = self.sanitize_open(
ctx.tmpfilename, ctx.open_mode) ctx.tmpfilename, ctx.open_mode)
assert ctx.stream is not None assert ctx.stream is not None
ctx.filename = self.undo_temp_name(ctx.tmpfilename) ctx.filename = self.undo_temp_name(ctx.tmpfilename)

@ -681,6 +681,10 @@ def parseOpts(overrideArguments=None):
'-R', '--retries', '-R', '--retries',
dest='retries', metavar='RETRIES', default=10, dest='retries', metavar='RETRIES', default=10,
help='Number of retries (default is %default), or "infinite"') help='Number of retries (default is %default), or "infinite"')
downloader.add_option(
'--file-access-retries',
dest='file_access_retries', metavar='RETRIES', default=10,
help='Number of times to retry on file access error (default is %default), or "infinite"')
downloader.add_option( downloader.add_option(
'--fragment-retries', '--fragment-retries',
dest='fragment_retries', metavar='RETRIES', default=10, dest='fragment_retries', metavar='RETRIES', default=10,

Loading…
Cancel
Save