pull/14422/merge
Andy Huang 2 days ago committed by GitHub
commit 02b29ee4aa
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -0,0 +1,61 @@
import json
import unittest
from test.helper import FakeYDL
from yt_dlp.extractor.acfun import AcFunVideoIE
class TestAcFunPlaylist(unittest.TestCase):
def setUp(self):
self.ie = AcFunVideoIE()
self.ie.set_downloader(FakeYDL({'noplaylist': False}))
def test_playlist_entries_are_generated_for_multi_part_videos(self):
video_info = {
'title': 'Sample Playlist',
'description': 'Sample description',
'coverUrl': 'https://example.com/thumb.jpg',
'user': {
'name': 'Uploader Name',
'href': 'uploader-id',
},
'videoList': [
{
'id': 'part-1',
'title': 'Episode 1',
},
{
'id': 'part-2',
'title': 'Episode 2',
},
],
'currentVideoInfo': {
'id': 'part-1',
},
}
webpage = f'<script>window.videoInfo = {json.dumps(video_info)};</script>'
self.ie._download_webpage = lambda url, video_id: webpage
result = self.ie._real_extract('https://www.acfun.cn/v/ac12345?foo=bar')
self.assertEqual(result['_type'], 'playlist')
self.assertEqual(result['id'], '12345')
self.assertEqual(result['title'], 'Sample Playlist')
self.assertEqual(result['description'], 'Sample description')
self.assertEqual(result['uploader'], 'Uploader Name')
self.assertEqual(result['uploader_id'], 'uploader-id')
entry_urls = [entry['url'] for entry in result['entries']]
entry_ids = [entry['id'] for entry in result['entries']]
entry_titles = [entry['title'] for entry in result['entries']]
self.assertEqual(
entry_urls,
[
'https://www.acfun.cn/v/ac12345?foo=bar',
'https://www.acfun.cn/v/ac12345_2?foo=bar',
],
)
self.assertEqual(entry_ids, ['12345', '12345_2'])
self.assertEqual(entry_titles, ['Episode 1', 'Episode 2'])
self.assertTrue(all(entry['ie_key'] == 'AcFunVideo' for entry in result['entries']))

@ -13,8 +13,23 @@ from yt_dlp import YoutubeDL
from yt_dlp.utils import DownloadError from yt_dlp.utils import DownloadError
def _is_expected_error(err):
if not err.exc_info:
return False
exc = err.exc_info[1]
if getattr(exc, 'expected', False):
return True
cause = getattr(exc, 'exc_info', None)
if not cause:
return False
return getattr(cause[1], 'expected', False)
def _download_restricted(url, filename, age): def _download_restricted(url, filename, age):
""" Returns true if the file has been downloaded """ """Attempt to download ``url`` while respecting ``age`` restrictions."""
params = { params = {
'age_limit': age, 'age_limit': age,
@ -26,21 +41,35 @@ def _download_restricted(url, filename, age):
ydl.add_default_info_extractors() ydl.add_default_info_extractors()
json_filename = os.path.splitext(filename)[0] + '.info.json' json_filename = os.path.splitext(filename)[0] + '.info.json'
try_rm(json_filename) try_rm(json_filename)
downloaded = False
error = None
try: try:
ydl.download([url]) ydl.download([url])
except DownloadError: downloaded = os.path.exists(json_filename)
pass except DownloadError as err:
else: error = err
return os.path.exists(json_filename)
finally: finally:
try_rm(json_filename) try_rm(json_filename)
return downloaded, error
@is_download_test @is_download_test
class TestAgeRestriction(unittest.TestCase): class TestAgeRestriction(unittest.TestCase):
def _assert_restricted(self, url, filename, age, old_age=None): def _assert_restricted(self, url, filename, age, old_age=None):
self.assertTrue(_download_restricted(url, filename, old_age)) can_download, err = _download_restricted(url, filename, old_age)
self.assertFalse(_download_restricted(url, filename, age)) if err:
if _is_expected_error(err):
self.fail(f'Expected unrestricted download but got: {err}')
self.skipTest(f'Download failed: {err}')
self.assertTrue(can_download)
restricted, err = _download_restricted(url, filename, age)
if err:
if _is_expected_error(err):
self.assertFalse(restricted)
return
self.skipTest(f'Download failed: {err}')
self.assertFalse(restricted)
def test_youtube(self): def test_youtube(self):
self._assert_restricted('HtVdAasjOgU', 'HtVdAasjOgU.mp4', 10) self._assert_restricted('HtVdAasjOgU', 'HtVdAasjOgU.mp4', 10)

@ -1,12 +1,17 @@
import urllib.parse
from .common import InfoExtractor from .common import InfoExtractor
from ..utils import ( from ..utils import (
float_or_none, float_or_none,
format_field, format_field,
int_or_none, int_or_none,
parse_codecs, parse_codecs,
parse_qs,
str_or_none, str_or_none,
traverse_obj, traverse_obj,
update_url_query,
)
from ..utils import (
parse_qs as compat_parse_qs,
) )
@ -78,6 +83,13 @@ class AcFunVideoIE(AcFunVideoBaseIE):
'thumbnail': r're:^https?://.*\.(jpg|jpeg)', 'thumbnail': r're:^https?://.*\.(jpg|jpeg)',
'description': 'md5:67583aaf3a0f933bd606bc8a2d3ebb17', 'description': 'md5:67583aaf3a0f933bd606bc8a2d3ebb17',
}, },
}, {
'url': 'https://www.acfun.cn/v/ac35468952',
'info_dict': {
'id': '35468952',
'title': 'regex:.+',
},
'playlist_mincount': 2,
}] }]
def _real_extract(self, url): def _real_extract(self, url):
@ -89,6 +101,26 @@ class AcFunVideoIE(AcFunVideoBaseIE):
title = json_all.get('title') title = json_all.get('title')
video_list = json_all.get('videoList') or [] video_list = json_all.get('videoList') or []
video_internal_id = traverse_obj(json_all, ('currentVideoInfo', 'id')) video_internal_id = traverse_obj(json_all, ('currentVideoInfo', 'id'))
playlist_id = video_id.partition('_')[0]
if video_id == playlist_id and len(video_list) > 1 and self._yes_playlist(playlist_id, video_id):
entries = []
parsed_url = urllib.parse.urlparse(url)
query = urllib.parse.parse_qs(parsed_url.query, keep_blank_values=True)
for idx, part_video_info in enumerate(video_list, start=1):
part_suffix = '' if idx == 1 else f'_{idx}'
part_id = f'{playlist_id}{part_suffix}'
entry_url = update_url_query(f'https://www.acfun.cn/v/ac{part_id}', query)
entries.append(self.url_result(
entry_url, ie=self.ie_key(), video_id=part_id,
video_title=traverse_obj(part_video_info, 'title')))
return self.playlist_result(
entries, playlist_id, title,
description=json_all.get('description'),
thumbnail=json_all.get('coverUrl'),
uploader=traverse_obj(json_all, ('user', 'name')),
uploader_id=traverse_obj(json_all, ('user', 'href')))
if video_internal_id and len(video_list) > 1: if video_internal_id and len(video_list) > 1:
part_idx, part_video_info = next( part_idx, part_video_info = next(
(idx + 1, v) for (idx, v) in enumerate(video_list) (idx + 1, v) for (idx, v) in enumerate(video_list)
@ -160,7 +192,7 @@ class AcFunBangumiIE(AcFunVideoBaseIE):
def _real_extract(self, url): def _real_extract(self, url):
video_id = self._match_id(url) video_id = self._match_id(url)
ac_idx = parse_qs(url).get('ac', [None])[-1] ac_idx = compat_parse_qs(url).get('ac', [None])[-1]
video_id = f'{video_id}{format_field(ac_idx, None, "__%s")}' video_id = f'{video_id}{format_field(ac_idx, None, "__%s")}'
webpage = self._download_webpage(url, video_id) webpage = self._download_webpage(url, video_id)

Loading…
Cancel
Save