|
|
@ -20,6 +20,8 @@ from youtube_dl.utils import *
|
|
|
|
DEF_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'tests.json')
|
|
|
|
DEF_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'tests.json')
|
|
|
|
PARAMETERS_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "parameters.json")
|
|
|
|
PARAMETERS_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "parameters.json")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
RETRIES = 3
|
|
|
|
|
|
|
|
|
|
|
|
# General configuration (from __init__, not very elegant...)
|
|
|
|
# General configuration (from __init__, not very elegant...)
|
|
|
|
jar = compat_cookiejar.CookieJar()
|
|
|
|
jar = compat_cookiejar.CookieJar()
|
|
|
|
cookie_processor = compat_urllib_request.HTTPCookieProcessor(jar)
|
|
|
|
cookie_processor = compat_urllib_request.HTTPCookieProcessor(jar)
|
|
|
@ -94,7 +96,19 @@ def generator(test_case):
|
|
|
|
_try_rm(tc['file'] + '.part')
|
|
|
|
_try_rm(tc['file'] + '.part')
|
|
|
|
_try_rm(tc['file'] + '.info.json')
|
|
|
|
_try_rm(tc['file'] + '.info.json')
|
|
|
|
try:
|
|
|
|
try:
|
|
|
|
fd.download([test_case['url']])
|
|
|
|
for retry in range(1, RETRIES + 1):
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
|
|
|
fd.download([test_case['url']])
|
|
|
|
|
|
|
|
except (DownloadError, ExtractorError) as err:
|
|
|
|
|
|
|
|
if retry == RETRIES: raise
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Check if the exception is not a network related one
|
|
|
|
|
|
|
|
if not err.exc_info[0] in (ZeroDivisionError, compat_urllib_error.URLError, socket.timeout):
|
|
|
|
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
print('Retrying: {0} failed tries\n\n##########\n\n'.format(retry))
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
|
|
for tc in test_cases:
|
|
|
|
for tc in test_cases:
|
|
|
|
if not test_case.get('params', {}).get('skip_download', False):
|
|
|
|
if not test_case.get('params', {}).get('skip_download', False):
|
|
|
|