Merge branch 'yt-dlp:master' into json_ld_article

pull/13395/head
doe1080 3 weeks ago committed by GitHub
commit 117300e1bd
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -0,0 +1,28 @@
self-hosted-runner:
labels:
# Workaround for the outdated runner list in actionlint v1.7.7
# Ref: https://github.com/rhysd/actionlint/issues/533
- windows-11-arm
config-variables:
- KEEP_CACHE_WARM
- PUSH_VERSION_COMMIT
- UPDATE_TO_VERIFICATION
- PYPI_PROJECT
- PYPI_SUFFIX
- NIGHTLY_PYPI_PROJECT
- NIGHTLY_PYPI_SUFFIX
- NIGHTLY_ARCHIVE_REPO
- BUILD_NIGHTLY
- MASTER_PYPI_PROJECT
- MASTER_PYPI_SUFFIX
- MASTER_ARCHIVE_REPO
- BUILD_MASTER
- ISSUE_LOCKDOWN
- SANITIZE_COMMENT
paths:
.github/workflows/build.yml:
ignore:
# SC1090 "Can't follow non-constant source": ignore when using `source` to activate venv
- '.+SC1090.+'

@ -9,6 +9,9 @@ on:
required: false required: false
default: stable default: stable
type: string type: string
origin:
required: true
type: string
unix: unix:
default: true default: true
type: boolean type: boolean
@ -27,10 +30,6 @@ on:
windows: windows:
default: true default: true
type: boolean type: boolean
origin:
required: false
default: ''
type: string
secrets: secrets:
GPG_SIGNING_KEY: GPG_SIGNING_KEY:
required: false required: false
@ -74,13 +73,6 @@ on:
description: yt-dlp.exe, yt-dlp_win.zip, yt-dlp_x86.exe, yt-dlp_win_x86.zip, yt-dlp_arm64.exe, yt-dlp_win_arm64.zip description: yt-dlp.exe, yt-dlp_win.zip, yt-dlp_x86.exe, yt-dlp_win_x86.zip, yt-dlp_arm64.exe, yt-dlp_win_arm64.zip
default: true default: true
type: boolean type: boolean
origin:
description: Origin
required: false
default: 'current repo'
type: choice
options:
- 'current repo'
permissions: permissions:
contents: read contents: read
@ -89,25 +81,24 @@ jobs:
process: process:
runs-on: ubuntu-latest runs-on: ubuntu-latest
outputs: outputs:
origin: ${{ steps.process_origin.outputs.origin }} origin: ${{ steps.process_inputs.outputs.origin }}
timestamp: ${{ steps.process_origin.outputs.timestamp }} timestamp: ${{ steps.process_inputs.outputs.timestamp }}
version: ${{ steps.process_origin.outputs.version }} version: ${{ steps.process_inputs.outputs.version }}
steps: steps:
- name: Process origin - name: Process inputs
id: process_origin id: process_inputs
env: env:
ORIGIN: ${{ inputs.origin }} INPUTS: ${{ toJSON(inputs) }}
REPOSITORY: ${{ github.repository }} REPOSITORY: ${{ github.repository }}
VERSION: ${{ inputs.version }}
shell: python shell: python
run: | run: |
import datetime as dt import datetime as dt
import json import json
import os import os
import re import re
origin = os.environ['ORIGIN'] INPUTS = json.loads(os.environ['INPUTS'])
timestamp = dt.datetime.now(tz=dt.timezone.utc).strftime('%Y.%m.%d.%H%M%S.%f') timestamp = dt.datetime.now(tz=dt.timezone.utc).strftime('%Y.%m.%d.%H%M%S.%f')
version = os.getenv('VERSION') version = INPUTS.get('version')
if version and '.' not in version: if version and '.' not in version:
# build.yml was dispatched with only a revision as the version input value # build.yml was dispatched with only a revision as the version input value
version_parts = [*timestamp.split('.')[:3], version] version_parts = [*timestamp.split('.')[:3], version]
@ -119,7 +110,7 @@ jobs:
version_parts = version.split('.') version_parts = version.split('.')
assert all(re.fullmatch(r'[0-9]+', part) for part in version_parts), 'Version must be numeric' assert all(re.fullmatch(r'[0-9]+', part) for part in version_parts), 'Version must be numeric'
outputs = { outputs = {
'origin': os.environ['REPOSITORY'] if origin == 'current repo' else origin, 'origin': INPUTS.get('origin') or os.environ['REPOSITORY'],
'timestamp': timestamp, 'timestamp': timestamp,
'version': '.'.join(version_parts), 'version': '.'.join(version_parts),
} }
@ -135,6 +126,7 @@ jobs:
CHANNEL: ${{ inputs.channel }} CHANNEL: ${{ inputs.channel }}
ORIGIN: ${{ needs.process.outputs.origin }} ORIGIN: ${{ needs.process.outputs.origin }}
VERSION: ${{ needs.process.outputs.version }} VERSION: ${{ needs.process.outputs.version }}
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
@ -159,7 +151,7 @@ jobs:
chmod +x ./yt-dlp chmod +x ./yt-dlp
cp ./yt-dlp ./yt-dlp_downgraded cp ./yt-dlp ./yt-dlp_downgraded
version="$(./yt-dlp --version)" version="$(./yt-dlp --version)"
./yt-dlp_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04 ./yt-dlp_downgraded -v --update-to "${UPDATE_TO}"
downgraded_version="$(./yt-dlp_downgraded --version)" downgraded_version="$(./yt-dlp_downgraded --version)"
[[ "${version}" != "${downgraded_version}" ]] [[ "${version}" != "${downgraded_version}" ]]
- name: Upload artifacts - name: Upload artifacts
@ -190,6 +182,7 @@ jobs:
ORIGIN: ${{ needs.process.outputs.origin }} ORIGIN: ${{ needs.process.outputs.origin }}
VERSION: ${{ needs.process.outputs.version }} VERSION: ${{ needs.process.outputs.version }}
EXE_NAME: ${{ matrix.exe }} EXE_NAME: ${{ matrix.exe }}
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Build executable - name: Build executable
@ -215,7 +208,7 @@ jobs:
mkdir -p ~/testing mkdir -p ~/testing
cp "./dist/${EXE_NAME}" ~/testing/"${EXE_NAME}_downgraded" cp "./dist/${EXE_NAME}" ~/testing/"${EXE_NAME}_downgraded"
version="$("./dist/${EXE_NAME}" --version)" version="$("./dist/${EXE_NAME}" --version)"
~/testing/"${EXE_NAME}_downgraded" -v --update-to yt-dlp/yt-dlp@2023.03.04 ~/testing/"${EXE_NAME}_downgraded" -v --update-to "${UPDATE_TO}"
downgraded_version="$(~/testing/"${EXE_NAME}_downgraded" --version)" downgraded_version="$(~/testing/"${EXE_NAME}_downgraded" --version)"
[[ "${version}" != "${downgraded_version}" ]] [[ "${version}" != "${downgraded_version}" ]]
- name: Upload artifacts - name: Upload artifacts
@ -333,6 +326,7 @@ jobs:
CHANNEL: ${{ inputs.channel }} CHANNEL: ${{ inputs.channel }}
ORIGIN: ${{ needs.process.outputs.origin }} ORIGIN: ${{ needs.process.outputs.origin }}
VERSION: ${{ needs.process.outputs.version }} VERSION: ${{ needs.process.outputs.version }}
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@ -409,7 +403,7 @@ jobs:
chmod +x ./dist/yt-dlp_macos chmod +x ./dist/yt-dlp_macos
cp ./dist/yt-dlp_macos ./dist/yt-dlp_macos_downgraded cp ./dist/yt-dlp_macos ./dist/yt-dlp_macos_downgraded
version="$(./dist/yt-dlp_macos --version)" version="$(./dist/yt-dlp_macos --version)"
./dist/yt-dlp_macos_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04 ./dist/yt-dlp_macos_downgraded -v --update-to "${UPDATE_TO}"
downgraded_version="$(./dist/yt-dlp_macos_downgraded --version)" downgraded_version="$(./dist/yt-dlp_macos_downgraded --version)"
[[ "$version" != "$downgraded_version" ]] [[ "$version" != "$downgraded_version" ]]
@ -449,6 +443,7 @@ jobs:
ORIGIN: ${{ needs.process.outputs.origin }} ORIGIN: ${{ needs.process.outputs.origin }}
VERSION: ${{ needs.process.outputs.version }} VERSION: ${{ needs.process.outputs.version }}
SUFFIX: ${{ matrix.suffix }} SUFFIX: ${{ matrix.suffix }}
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
BASE_CACHE_KEY: cache-reqs-${{ github.job }}_${{ matrix.arch }}-${{ matrix.python_version }} BASE_CACHE_KEY: cache-reqs-${{ github.job }}_${{ matrix.arch }}-${{ matrix.python_version }}
# Use custom PyInstaller built with https://github.com/yt-dlp/Pyinstaller-builds # Use custom PyInstaller built with https://github.com/yt-dlp/Pyinstaller-builds
PYINSTALLER_URL: https://yt-dlp.github.io/Pyinstaller-Builds/${{ matrix.arch }}/pyinstaller-6.15.0-py3-none-any.whl PYINSTALLER_URL: https://yt-dlp.github.io/Pyinstaller-Builds/${{ matrix.arch }}/pyinstaller-6.15.0-py3-none-any.whl
@ -510,7 +505,7 @@ jobs:
$name = "yt-dlp${Env:SUFFIX}" $name = "yt-dlp${Env:SUFFIX}"
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe" Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
$version = & "./dist/${name}.exe" --version $version = & "./dist/${name}.exe" --version
& "./dist/${name}_downgraded.exe" -v --update-to yt-dlp/yt-dlp@2025.08.20 & "./dist/${name}_downgraded.exe" -v --update-to "${Env:UPDATE_TO}"
$downgraded_version = & "./dist/${name}_downgraded.exe" --version $downgraded_version = & "./dist/${name}_downgraded.exe" --version
if ($version -eq $downgraded_version) { if ($version -eq $downgraded_version) {
exit 1 exit 1

@ -12,6 +12,7 @@ jobs:
with: with:
version: '999999' version: '999999'
channel: stable channel: stable
origin: ${{ github.repository }}
unix: false unix: false
linux: false linux: false
linux_armv7l: true linux_armv7l: true

@ -81,7 +81,7 @@ jobs:
- uses: actions/setup-python@v5 - uses: actions/setup-python@v5
with: with:
python-version: "3.10" python-version: "3.10" # Keep this in sync with test-workflows.yml
- name: Process inputs - name: Process inputs
id: process_inputs id: process_inputs
@ -269,12 +269,11 @@ jobs:
"[![Master](https://img.shields.io/badge/Master%20builds-lightblue.svg?style=for-the-badge)]" \ "[![Master](https://img.shields.io/badge/Master%20builds-lightblue.svg?style=for-the-badge)]" \
"(https://github.com/${MASTER_REPO}/releases/latest \"Master builds\")" >> ./RELEASE_NOTES "(https://github.com/${MASTER_REPO}/releases/latest \"Master builds\")" >> ./RELEASE_NOTES
fi fi
printf '\n\n' >> ./RELEASE_NOTES printf '\n\n%s\n\n%s%s\n\n---\n' \
cat >> ./RELEASE_NOTES << EOF "#### A description of the various files is in the [README](https://github.com/${REPOSITORY}#release-files)" \
#### A description of the various files is in the [README](https://github.com/${REPOSITORY}#release-files) "The PyInstaller-bundled executables are subject to the licenses described in " \
--- "[THIRD_PARTY_LICENSES.txt](https://github.com/${BASE_REPO}/blob/master/THIRD_PARTY_LICENSES.txt)" >> ./RELEASE_NOTES
$(python ./devscripts/make_changelog.py -vv --collapsible) python ./devscripts/make_changelog.py -vv --collapsible >> ./RELEASE_NOTES
EOF
printf '%s\n\n' '**This is a pre-release build**' >> ./PRERELEASE_NOTES printf '%s\n\n' '**This is a pre-release build**' >> ./PRERELEASE_NOTES
cat ./RELEASE_NOTES >> ./PRERELEASE_NOTES cat ./RELEASE_NOTES >> ./PRERELEASE_NOTES
printf '%s\n\n' "Generated from: https://github.com/${REPOSITORY}/commit/${HEAD_SHA}" >> ./ARCHIVE_NOTES printf '%s\n\n' "Generated from: https://github.com/${REPOSITORY}/commit/${HEAD_SHA}" >> ./ARCHIVE_NOTES

@ -0,0 +1,46 @@
name: Test and lint workflows
on:
push:
paths:
- .github/workflows/*
- devscripts/setup_variables.py
- devscripts/setup_variables_tests.py
- devscripts/utils.py
pull_request:
paths:
- .github/workflows/*
- devscripts/setup_variables.py
- devscripts/setup_variables_tests.py
- devscripts/utils.py
permissions:
contents: read
env:
ACTIONLINT_VERSION: "1.7.7"
ACTIONLINT_SHA256SUM: 023070a287cd8cccd71515fedc843f1985bf96c436b7effaecce67290e7e0757
ACTIONLINT_REPO: https://github.com/rhysd/actionlint
jobs:
check:
name: Check workflows
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.10" # Keep this in sync with release.yml's prepare job
- name: Install requirements
env:
ACTIONLINT_TARBALL: ${{ format('actionlint_{0}_linux_amd64.tar.gz', env.ACTIONLINT_VERSION) }}
run: |
sudo apt -y install shellcheck
python -m pip install -U pyflakes
curl -LO "${ACTIONLINT_REPO}/releases/download/v${ACTIONLINT_VERSION}/${ACTIONLINT_TARBALL}"
printf '%s %s' "${ACTIONLINT_SHA256SUM}" "${ACTIONLINT_TARBALL}" | sha256sum -c -
tar xvzf "${ACTIONLINT_TARBALL}" actionlint
chmod +x actionlint
- name: Run actionlint
run: |
./actionlint -color
- name: Test GHA devscripts
run: |
python -m devscripts.setup_variables_tests

@ -138,6 +138,17 @@ curl -L https://github.com/yt-dlp/yt-dlp/raw/master/public.key | gpg --import
gpg --verify SHA2-256SUMS.sig SHA2-256SUMS gpg --verify SHA2-256SUMS.sig SHA2-256SUMS
gpg --verify SHA2-512SUMS.sig SHA2-512SUMS gpg --verify SHA2-512SUMS.sig SHA2-512SUMS
``` ```
#### Licensing
While yt-dlp is licensed under the [Unlicense](LICENSE), many of the release files contain code from other projects with different licenses.
Most notably, the PyInstaller-bundled executables include GPLv3+ licensed code, and as such the combined work is licensed under [GPLv3+](https://www.gnu.org/licenses/gpl-3.0.html).
See [THIRD_PARTY_LICENSES.txt](THIRD_PARTY_LICENSES.txt) for details.
The zipimport binary (`yt-dlp`), the source tarball (`yt-dlp.tar.gz`), and the PyPI source distribution & wheel only contain code licensed under the [Unlicense](LICENSE).
<!-- MANPAGE: END EXCLUDED SECTION --> <!-- MANPAGE: END EXCLUDED SECTION -->
**Note**: The manpages, shell completion (autocomplete) files etc. are available inside the [source tarball](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp.tar.gz) **Note**: The manpages, shell completion (autocomplete) files etc. are available inside the [source tarball](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp.tar.gz)

File diff suppressed because it is too large Load Diff

@ -26,6 +26,7 @@ services:
VERIFYIMAGE: quay.io/pypa/manylinux2014_x86_64:latest VERIFYIMAGE: quay.io/pypa/manylinux2014_x86_64:latest
environment: environment:
EXE_NAME: ${EXE_NAME:?} EXE_NAME: ${EXE_NAME:?}
UPDATE_TO:
volumes: volumes:
- ../../dist:/build - ../../dist:/build
@ -55,7 +56,7 @@ services:
VERIFYIMAGE: quay.io/pypa/manylinux2014_aarch64:latest VERIFYIMAGE: quay.io/pypa/manylinux2014_aarch64:latest
environment: environment:
EXE_NAME: ${EXE_NAME:?} EXE_NAME: ${EXE_NAME:?}
SKIP_UPDATE_TO: "1" # TODO: remove when there is a glibc2.17 aarch64 release to --update-to UPDATE_TO:
volumes: volumes:
- ../../dist:/build - ../../dist:/build
@ -87,6 +88,7 @@ services:
VERIFYIMAGE: arm32v7/debian:bullseye VERIFYIMAGE: arm32v7/debian:bullseye
environment: environment:
EXE_NAME: ${EXE_NAME:?} EXE_NAME: ${EXE_NAME:?}
UPDATE_TO:
TEST_ONEDIR_BUILD: "1" TEST_ONEDIR_BUILD: "1"
volumes: volumes:
- ../../dist:/build - ../../dist:/build
@ -117,7 +119,7 @@ services:
VERIFYIMAGE: alpine:3.22 VERIFYIMAGE: alpine:3.22
environment: environment:
EXE_NAME: ${EXE_NAME:?} EXE_NAME: ${EXE_NAME:?}
SKIP_UPDATE_TO: "1" # TODO: remove when there is a musllinux_aarch64 release to --update-to UPDATE_TO:
volumes: volumes:
- ../../dist:/build - ../../dist:/build
@ -148,6 +150,6 @@ services:
VERIFYIMAGE: alpine:3.22 VERIFYIMAGE: alpine:3.22
environment: environment:
EXE_NAME: ${EXE_NAME:?} EXE_NAME: ${EXE_NAME:?}
SKIP_UPDATE_TO: "1" # TODO: remove when there is a musllinux_aarch64 release to --update-to UPDATE_TO:
volumes: volumes:
- ../../dist:/build - ../../dist:/build

@ -35,9 +35,13 @@ if [ -n "${SKIP_UPDATE_TO:-}" ] || [ -n "${TEST_ONEDIR_BUILD:-}" ]; then
exit 0 exit 0
fi fi
if [ -z "${UPDATE_TO:-}" ]; then
UPDATE_TO="yt-dlp/yt-dlp@2025.09.05"
fi
cp "./${EXE_NAME}" "./${EXE_NAME}_downgraded" cp "./${EXE_NAME}" "./${EXE_NAME}_downgraded"
version="$("./${EXE_NAME}" --version)" version="$("./${EXE_NAME}" --version)"
"./${EXE_NAME}_downgraded" -v --update-to yt-dlp/yt-dlp@2023.03.04 "./${EXE_NAME}_downgraded" -v --update-to "${UPDATE_TO}"
downgraded_version="$("./${EXE_NAME}_downgraded" --version)" downgraded_version="$("./${EXE_NAME}_downgraded" --version)"
if [ "${version}" = "${downgraded_version}" ]; then if [ "${version}" = "${downgraded_version}" ]; then
exit 1 exit 1

@ -129,7 +129,6 @@ def windows_set_version(exe, version):
StringStruct('FileDescription', 'yt-dlp%s' % (MACHINE and f' ({MACHINE})')), StringStruct('FileDescription', 'yt-dlp%s' % (MACHINE and f' ({MACHINE})')),
StringStruct('FileVersion', version), StringStruct('FileVersion', version),
StringStruct('InternalName', f'yt-dlp{suffix}'), StringStruct('InternalName', f'yt-dlp{suffix}'),
StringStruct('LegalCopyright', 'pukkandan.ytdlp@gmail.com | UNLICENSE'),
StringStruct('OriginalFilename', f'yt-dlp{suffix}.exe'), StringStruct('OriginalFilename', f'yt-dlp{suffix}.exe'),
StringStruct('ProductName', f'yt-dlp{suffix}'), StringStruct('ProductName', f'yt-dlp{suffix}'),
StringStruct( StringStruct(

@ -0,0 +1,316 @@
import requests
from dataclasses import dataclass
from pathlib import Path
import hashlib
DEFAULT_OUTPUT = 'THIRD_PARTY_LICENSES.txt'
CACHE_LOCATION = '.license_cache'
HEADER = '''THIRD-PARTY LICENSES
This file aggregates license texts of third-party components included with the yt-dlp PyInstaller-bundled executables.
yt-dlp itself is licensed under the Unlicense (see LICENSE file).
Source code for bundled third-party components is available from the original projects.
If you cannot obtain it, the maintainers will provide it as per license obligation; maintainer emails are listed in pyproject.toml.'''
@dataclass(frozen=True)
class Dependency:
name: str
license_url: str
project_url: str = ''
license: str = ''
comment: str = ''
DEPENDENCIES: list[Dependency] = [
# Core runtime environment components
Dependency(
name='Python',
license='PSF-2.0',
license_url='https://raw.githubusercontent.com/python/cpython/refs/heads/main/LICENSE',
project_url='https://www.python.org/',
),
Dependency(
name='Microsoft Distributable Code',
license_url='https://raw.githubusercontent.com/python/cpython/refs/heads/main/PC/crtlicense.txt',
comment='Only included in Windows builds',
),
Dependency(
name='bzip2',
license='bzip2-1.0.6',
license_url='https://gitlab.com/federicomenaquintero/bzip2/-/raw/master/COPYING',
project_url='https://sourceware.org/bzip2/',
),
Dependency(
name='libffi',
license='MIT',
license_url='https://raw.githubusercontent.com/libffi/libffi/refs/heads/master/LICENSE',
project_url='https://sourceware.org/libffi/',
),
Dependency(
name='OpenSSL 3.0+',
license='Apache-2.0',
license_url='https://raw.githubusercontent.com/openssl/openssl/refs/heads/master/LICENSE.txt',
project_url='https://www.openssl.org/',
),
Dependency(
name='SQLite',
license='Public Domain', # Technically does not need to be included
license_url='https://sqlite.org/src/raw/e108e1e69ae8e8a59e93c455654b8ac9356a11720d3345df2a4743e9590fb20d?at=LICENSE.md',
project_url='https://www.sqlite.org/',
),
Dependency(
name='liblzma',
license='0BSD', # Technically does not need to be included
license_url='https://raw.githubusercontent.com/tukaani-project/xz/refs/heads/master/COPYING',
project_url='https://tukaani.org/xz/',
),
Dependency(
name='mpdecimal',
license='BSD-2-Clause',
# No official repo URL
license_url='https://gist.githubusercontent.com/seproDev/9e5dbfc08af35c3f2463e64eb9b27161/raw/61f5a98bc1a4ad7d48b1c793fc3314d4d43c2ab1/mpdecimal_COPYRIGHT.txt',
project_url='https://www.bytereef.org/mpdecimal/',
),
Dependency(
name='zlib',
license='zlib',
license_url='https://raw.githubusercontent.com/madler/zlib/refs/heads/develop/LICENSE',
project_url='https://zlib.net/',
),
Dependency(
name='Expat',
license='MIT',
license_url='https://raw.githubusercontent.com/libexpat/libexpat/refs/heads/master/COPYING',
project_url='https://libexpat.github.io/',
),
Dependency(
name='ncurses',
license='X11-distribute-modifications-variant',
license_url='https://raw.githubusercontent.com/mirror/ncurses/refs/heads/master/COPYING',
comment='Only included in Linux/macOS builds',
project_url='https://invisible-island.net/ncurses/',
),
Dependency(
name='GNU Readline',
license='GPL-3.0-or-later',
license_url='https://tiswww.case.edu/php/chet/readline/COPYING',
comment='Only included in Linux builds',
project_url='https://www.gnu.org/software/readline/',
),
Dependency(
name='libstdc++',
license='GPL-3.0-with-GCC-exception',
license_url='https://raw.githubusercontent.com/gcc-mirror/gcc/refs/heads/master/COPYING.RUNTIME',
comment='Only included in Linux builds',
project_url='https://gcc.gnu.org/onlinedocs/libstdc++/',
),
Dependency(
name='libgcc',
license='GPL-3.0-with-GCC-exception',
license_url='https://raw.githubusercontent.com/gcc-mirror/gcc/refs/heads/master/COPYING.RUNTIME',
comment='Only included in Linux builds',
project_url='https://gcc.gnu.org/',
),
Dependency(
name='libuuid',
license='BSD-3-Clause',
license_url='https://git.kernel.org/pub/scm/fs/ext2/e2fsprogs.git/plain/lib/uuid/COPYING',
comment='Only included in Linux builds',
project_url='https://git.kernel.org/pub/scm/fs/ext2/e2fsprogs.git/tree/lib/uuid',
),
Dependency(
name='libintl',
license='LGPL-2.1-or-later',
license_url='https://raw.githubusercontent.com/autotools-mirror/gettext/refs/heads/master/gettext-runtime/intl/COPYING.LIB',
comment='Only included in macOS builds',
project_url='https://www.gnu.org/software/gettext/',
),
Dependency(
name='libidn2',
license='LGPL-3.0-or-later',
license_url='https://gitlab.com/libidn/libidn2/-/raw/master/COPYING.LESSERv3',
comment='Only included in macOS builds',
project_url='https://www.gnu.org/software/libidn/',
),
Dependency(
name='libidn2 (Unicode character data files)',
license='Unicode-TOU AND Unicode-DFS-2016',
license_url='https://gitlab.com/libidn/libidn2/-/raw/master/COPYING.unicode',
comment='Only included in macOS builds',
project_url='https://www.gnu.org/software/libidn/',
),
Dependency(
name='libunistring',
license='LGPL-3.0-or-later',
license_url='https://gitweb.git.savannah.gnu.org/gitweb/?p=libunistring.git;a=blob_plain;f=COPYING.LIB;hb=HEAD',
comment='Only included in macOS builds',
project_url='https://www.gnu.org/software/libunistring/',
),
Dependency(
name='librtmp',
license='LGPL-2.1-or-later',
# No official repo URL
license_url='https://gist.githubusercontent.com/seproDev/31d8c691ccddebe37b8b379307cb232d/raw/053408e98547ea8c7d9ba3a80c965f33e163b881/librtmp_COPYING.txt',
comment='Only included in macOS builds',
project_url='https://rtmpdump.mplayerhq.hu/',
),
Dependency(
name='zstd',
license='BSD-3-Clause',
license_url='https://raw.githubusercontent.com/facebook/zstd/refs/heads/dev/LICENSE',
comment='Only included in macOS builds',
project_url='https://facebook.github.io/zstd/',
),
# Python packages
Dependency(
name='brotli',
license='MIT',
license_url='https://raw.githubusercontent.com/google/brotli/refs/heads/master/LICENSE',
project_url='https://brotli.org/',
),
Dependency(
name='curl_cffi',
license='MIT',
license_url='https://raw.githubusercontent.com/lexiforest/curl_cffi/refs/heads/main/LICENSE',
comment='Not included in `yt-dlp_x86` and `yt-dlp_musllinux_aarch64` builds',
project_url='https://curl-cffi.readthedocs.io/',
),
# Dependency of curl_cffi
Dependency(
name='curl-impersonate',
license='MIT',
license_url='https://raw.githubusercontent.com/lexiforest/curl-impersonate/refs/heads/main/LICENSE',
comment='Not included in `yt-dlp_x86` and `yt-dlp_musllinux_aarch64` builds',
project_url='https://github.com/lexiforest/curl-impersonate',
),
Dependency(
name='cffi',
license='MIT-0', # Technically does not need to be included
license_url='https://raw.githubusercontent.com/python-cffi/cffi/refs/heads/main/LICENSE',
project_url='https://cffi.readthedocs.io/',
),
# Dependecy of cffi
Dependency(
name='pycparser',
license='BSD-3-Clause',
license_url='https://raw.githubusercontent.com/eliben/pycparser/refs/heads/main/LICENSE',
project_url='https://github.com/eliben/pycparser',
),
Dependency(
name='mutagen',
license='GPL-2.0-or-later',
license_url='https://raw.githubusercontent.com/quodlibet/mutagen/refs/heads/main/COPYING',
project_url='https://mutagen.readthedocs.io/',
),
Dependency(
name='PyCryptodome',
license='Public Domain and BSD-2-Clause',
license_url='https://raw.githubusercontent.com/Legrandin/pycryptodome/refs/heads/master/LICENSE.rst',
project_url='https://www.pycryptodome.org/',
),
Dependency(
name='certifi',
license='MPL-2.0',
license_url='https://raw.githubusercontent.com/certifi/python-certifi/refs/heads/master/LICENSE',
project_url='https://github.com/certifi/python-certifi',
),
Dependency(
name='requests',
license='Apache-2.0',
license_url='https://raw.githubusercontent.com/psf/requests/refs/heads/main/LICENSE',
project_url='https://requests.readthedocs.io/',
),
# Dependency of requests
Dependency(
name='charset-normalizer',
license='MIT',
license_url='https://raw.githubusercontent.com/jawah/charset_normalizer/refs/heads/master/LICENSE',
project_url='https://charset-normalizer.readthedocs.io/',
),
# Dependency of requests
Dependency(
name='idna',
license='BSD-3-Clause',
license_url='https://raw.githubusercontent.com/kjd/idna/refs/heads/master/LICENSE.md',
project_url='https://github.com/kjd/idna',
),
Dependency(
name='urllib3',
license='MIT',
license_url='https://raw.githubusercontent.com/urllib3/urllib3/refs/heads/main/LICENSE.txt',
project_url='https://urllib3.readthedocs.io/',
),
Dependency(
name='SecretStorage',
license='BSD-3-Clause',
license_url='https://raw.githubusercontent.com/mitya57/secretstorage/refs/heads/master/LICENSE',
comment='Only included in Linux builds',
project_url='https://secretstorage.readthedocs.io/',
),
# Dependency of SecretStorage
Dependency(
name='cryptography',
license='Apache-2.0', # Also available as BSD-3-Clause
license_url='https://raw.githubusercontent.com/pyca/cryptography/refs/heads/main/LICENSE.APACHE',
comment='Only included in Linux builds',
project_url='https://cryptography.io/',
),
# Dependency of SecretStorage
Dependency(
name='Jeepney',
license='MIT',
license_url='https://gitlab.com/takluyver/jeepney/-/raw/master/LICENSE',
comment='Only included in Linux builds',
project_url='https://jeepney.readthedocs.io/',
),
Dependency(
name='websockets',
license='BSD-3-Clause',
license_url='https://raw.githubusercontent.com/python-websockets/websockets/refs/heads/main/LICENSE',
project_url='https://websockets.readthedocs.io/',
),
]
def fetch_text(dep: Dependency) -> str:
cache_dir = Path(CACHE_LOCATION)
cache_dir.mkdir(exist_ok=True)
url_hash = hashlib.sha256(dep.license_url.encode('utf-8')).hexdigest()
cache_file = cache_dir / f'{url_hash}.txt'
if cache_file.exists():
return cache_file.read_text()
# UA needed since some domains block requests default UA
req = requests.get(dep.license_url, headers={'User-Agent': 'yt-dlp license fetcher'})
req.raise_for_status()
text = req.text
cache_file.write_text(text)
return text
def build_output() -> str:
lines = [HEADER]
for d in DEPENDENCIES:
lines.append('\n')
lines.append('-' * 80)
header = f'{d.name}'
if d.license:
header += f' | {d.license}'
if d.comment:
header += f'\nNote: {d.comment}'
if d.project_url:
header += f'\nURL: {d.project_url}'
lines.append(header)
lines.append('-' * 80)
text = fetch_text(d)
lines.append(text.strip('\n') + '\n')
return '\n'.join(lines)
if __name__ == '__main__':
content = build_output()
Path(DEFAULT_OUTPUT).write_text(content)

@ -1,5 +1,5 @@
[build-system] [build-system]
requires = ["hatchling"] requires = ["hatchling>=1.27.0"]
build-backend = "hatchling.build" build-backend = "hatchling.build"
[project] [project]
@ -22,7 +22,8 @@ keywords = [
"sponsorblock", "sponsorblock",
"yt-dlp", "yt-dlp",
] ]
license = {file = "LICENSE"} license = "Unlicense"
license-files = ["LICENSE"]
classifiers = [ classifiers = [
"Topic :: Multimedia :: Video", "Topic :: Multimedia :: Video",
"Development Status :: 5 - Production/Stable", "Development Status :: 5 - Production/Stable",
@ -37,7 +38,6 @@ classifiers = [
"Programming Language :: Python :: Implementation", "Programming Language :: Python :: Implementation",
"Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy", "Programming Language :: Python :: Implementation :: PyPy",
"License :: OSI Approved :: The Unlicense (Unlicense)",
"Operating System :: OS Independent", "Operating System :: OS Independent",
] ]
dynamic = ["version"] dynamic = ["version"]
@ -63,7 +63,7 @@ secretstorage = [
] ]
build = [ build = [
"build", "build",
"hatchling", "hatchling>=1.27.0",
"pip", "pip",
"setuptools>=71.0.2,<81", # See https://github.com/pyinstaller/pyinstaller/issues/9149 "setuptools>=71.0.2,<81", # See https://github.com/pyinstaller/pyinstaller/issues/9149
"wheel", "wheel",

@ -1,6 +1,7 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# Allow direct execution # Allow direct execution
import datetime as dt
import os import os
import sys import sys
import unittest import unittest
@ -12,7 +13,7 @@ import struct
from yt_dlp import compat from yt_dlp import compat
from yt_dlp.compat import urllib # isort: split from yt_dlp.compat import urllib # isort: split
from yt_dlp.compat import compat_etree_fromstring, compat_expanduser from yt_dlp.compat import compat_etree_fromstring, compat_expanduser, compat_datetime_from_timestamp
from yt_dlp.compat.urllib.request import getproxies from yt_dlp.compat.urllib.request import getproxies
@ -59,6 +60,45 @@ class TestCompat(unittest.TestCase):
def test_struct_unpack(self): def test_struct_unpack(self):
self.assertEqual(struct.unpack('!B', b'\x00'), (0,)) self.assertEqual(struct.unpack('!B', b'\x00'), (0,))
def test_compat_datetime_from_timestamp(self):
self.assertEqual(
compat_datetime_from_timestamp(0),
dt.datetime(1970, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc))
self.assertEqual(
compat_datetime_from_timestamp(1),
dt.datetime(1970, 1, 1, 0, 0, 1, tzinfo=dt.timezone.utc))
self.assertEqual(
compat_datetime_from_timestamp(3600),
dt.datetime(1970, 1, 1, 1, 0, 0, tzinfo=dt.timezone.utc))
self.assertEqual(
compat_datetime_from_timestamp(-1),
dt.datetime(1969, 12, 31, 23, 59, 59, tzinfo=dt.timezone.utc))
self.assertEqual(
compat_datetime_from_timestamp(-86400),
dt.datetime(1969, 12, 31, 0, 0, 0, tzinfo=dt.timezone.utc))
self.assertEqual(
compat_datetime_from_timestamp(0.5),
dt.datetime(1970, 1, 1, 0, 0, 0, 500000, tzinfo=dt.timezone.utc))
self.assertEqual(
compat_datetime_from_timestamp(1.000001),
dt.datetime(1970, 1, 1, 0, 0, 1, 1, tzinfo=dt.timezone.utc))
self.assertEqual(
compat_datetime_from_timestamp(-1.25),
dt.datetime(1969, 12, 31, 23, 59, 58, 750000, tzinfo=dt.timezone.utc))
self.assertEqual(
compat_datetime_from_timestamp(-1577923200),
dt.datetime(1920, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc))
self.assertEqual(
compat_datetime_from_timestamp(4102444800),
dt.datetime(2100, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc))
self.assertEqual(
compat_datetime_from_timestamp(173568960000),
dt.datetime(7470, 3, 8, 0, 0, 0, tzinfo=dt.timezone.utc))
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

@ -12,6 +12,7 @@ import datetime as dt
import io import io
import itertools import itertools
import json import json
import ntpath
import pickle import pickle
import subprocess import subprocess
import unittest import unittest
@ -101,11 +102,13 @@ from yt_dlp.utils import (
remove_start, remove_start,
render_table, render_table,
replace_extension, replace_extension,
datetime_round,
rot47, rot47,
sanitize_filename, sanitize_filename,
sanitize_path, sanitize_path,
sanitize_url, sanitize_url,
shell_quote, shell_quote,
strftime_or_none,
smuggle_url, smuggle_url,
str_to_int, str_to_int,
strip_jsonp, strip_jsonp,
@ -251,12 +254,6 @@ class TestUtil(unittest.TestCase):
self.assertEqual(sanitize_path('abc.../def...'), 'abc..#\\def..#') self.assertEqual(sanitize_path('abc.../def...'), 'abc..#\\def..#')
self.assertEqual(sanitize_path('C:\\abc:%(title)s.%(ext)s'), 'C:\\abc#%(title)s.%(ext)s') self.assertEqual(sanitize_path('C:\\abc:%(title)s.%(ext)s'), 'C:\\abc#%(title)s.%(ext)s')
# Check with nt._path_normpath if available
try:
from nt import _path_normpath as nt_path_normpath
except ImportError:
nt_path_normpath = None
for test, expected in [ for test, expected in [
('C:\\', 'C:\\'), ('C:\\', 'C:\\'),
('../abc', '..\\abc'), ('../abc', '..\\abc'),
@ -274,8 +271,7 @@ class TestUtil(unittest.TestCase):
result = sanitize_path(test) result = sanitize_path(test)
assert result == expected, f'{test} was incorrectly resolved' assert result == expected, f'{test} was incorrectly resolved'
assert result == sanitize_path(result), f'{test} changed after sanitizing again' assert result == sanitize_path(result), f'{test} changed after sanitizing again'
if nt_path_normpath: assert result == ntpath.normpath(test), f'{test} does not match ntpath.normpath'
assert result == nt_path_normpath(test), f'{test} does not match nt._path_normpath'
def test_sanitize_url(self): def test_sanitize_url(self):
self.assertEqual(sanitize_url('//foo.bar'), 'http://foo.bar') self.assertEqual(sanitize_url('//foo.bar'), 'http://foo.bar')
@ -409,6 +405,25 @@ class TestUtil(unittest.TestCase):
self.assertEqual(datetime_from_str('now+1day', precision='hour'), datetime_from_str('now+24hours', precision='auto')) self.assertEqual(datetime_from_str('now+1day', precision='hour'), datetime_from_str('now+24hours', precision='auto'))
self.assertEqual(datetime_from_str('now+23hours', precision='hour'), datetime_from_str('now+23hours', precision='auto')) self.assertEqual(datetime_from_str('now+23hours', precision='hour'), datetime_from_str('now+23hours', precision='auto'))
def test_datetime_round(self):
self.assertEqual(datetime_round(dt.datetime.strptime('1820-05-12T01:23:45Z', '%Y-%m-%dT%H:%M:%SZ')),
dt.datetime(1820, 5, 12, tzinfo=dt.timezone.utc))
self.assertEqual(datetime_round(dt.datetime.strptime('1969-12-31T23:34:45Z', '%Y-%m-%dT%H:%M:%SZ'), 'hour'),
dt.datetime(1970, 1, 1, 0, tzinfo=dt.timezone.utc))
self.assertEqual(datetime_round(dt.datetime.strptime('2024-12-25T01:23:45Z', '%Y-%m-%dT%H:%M:%SZ'), 'minute'),
dt.datetime(2024, 12, 25, 1, 24, tzinfo=dt.timezone.utc))
self.assertEqual(datetime_round(dt.datetime.strptime('2024-12-25T01:23:45.123Z', '%Y-%m-%dT%H:%M:%S.%fZ'), 'second'),
dt.datetime(2024, 12, 25, 1, 23, 45, tzinfo=dt.timezone.utc))
self.assertEqual(datetime_round(dt.datetime.strptime('2024-12-25T01:23:45.678Z', '%Y-%m-%dT%H:%M:%S.%fZ'), 'second'),
dt.datetime(2024, 12, 25, 1, 23, 46, tzinfo=dt.timezone.utc))
def test_strftime_or_none(self):
self.assertEqual(strftime_or_none(-4722192000), '18200512')
self.assertEqual(strftime_or_none(0), '19700101')
self.assertEqual(strftime_or_none(1735084800), '20241225')
# Throws OverflowError
self.assertEqual(strftime_or_none(1735084800000), None)
def test_daterange(self): def test_daterange(self):
_20century = DateRange('19000101', '20000101') _20century = DateRange('19000101', '20000101')
self.assertFalse('17890714' in _20century) self.assertFalse('17890714' in _20century)

@ -2717,11 +2717,7 @@ class YoutubeDL:
('modified_timestamp', 'modified_date'), ('modified_timestamp', 'modified_date'),
): ):
if info_dict.get(date_key) is None and info_dict.get(ts_key) is not None: if info_dict.get(date_key) is None and info_dict.get(ts_key) is not None:
# Working around out-of-range timestamp values (e.g. negative ones on Windows, info_dict[date_key] = strftime_or_none(info_dict[ts_key])
# see http://bugs.python.org/issue1646728)
with contextlib.suppress(ValueError, OverflowError, OSError):
upload_date = dt.datetime.fromtimestamp(info_dict[ts_key], dt.timezone.utc)
info_dict[date_key] = upload_date.strftime('%Y%m%d')
if not info_dict.get('release_year'): if not info_dict.get('release_year'):
info_dict['release_year'] = traverse_obj(info_dict, ('release_date', {lambda x: int(x[:4])})) info_dict['release_year'] = traverse_obj(info_dict, ('release_date', {lambda x: int(x[:4])}))

@ -1,3 +1,4 @@
import datetime as dt
import os import os
import xml.etree.ElementTree as etree import xml.etree.ElementTree as etree
@ -27,6 +28,13 @@ def compat_ord(c):
return c if isinstance(c, int) else ord(c) return c if isinstance(c, int) else ord(c)
def compat_datetime_from_timestamp(timestamp):
# Calling dt.datetime.fromtimestamp with negative timestamps throws error in Windows
# Ref: https://github.com/yt-dlp/yt-dlp/issues/5185, https://github.com/python/cpython/issues/81708,
# https://github.com/yt-dlp/yt-dlp/issues/6706#issuecomment-1496842642
return (dt.datetime.fromtimestamp(0, dt.timezone.utc) + dt.timedelta(seconds=timestamp))
# Python 3.8+ does not honor %HOME% on windows, but this breaks compatibility with youtube-dl # Python 3.8+ does not honor %HOME% on windows, but this breaks compatibility with youtube-dl
# See https://github.com/yt-dlp/yt-dlp/issues/792 # See https://github.com/yt-dlp/yt-dlp/issues/792
# https://docs.python.org/3/library/os.path.html#os.path.expanduser # https://docs.python.org/3/library/os.path.html#os.path.expanduser

@ -2,7 +2,14 @@ import itertools
from .common import InfoExtractor from .common import InfoExtractor
from ..networking import HEADRequest from ..networking import HEADRequest
from ..utils import int_or_none, traverse_obj, url_or_none, urljoin from ..utils import (
ExtractorError,
int_or_none,
update_url_query,
url_or_none,
urljoin,
)
from ..utils.traversal import traverse_obj
class TenPlayIE(InfoExtractor): class TenPlayIE(InfoExtractor):
@ -102,14 +109,19 @@ class TenPlayIE(InfoExtractor):
video_data = self._download_json( video_data = self._download_json(
f'https://vod.ten.com.au/api/videos/bcquery?command=find_videos_by_id&video_id={data["altId"]}', f'https://vod.ten.com.au/api/videos/bcquery?command=find_videos_by_id&video_id={data["altId"]}',
content_id, 'Downloading video JSON') content_id, 'Downloading video JSON')
# Dash URL 403s, changing the m3u8 format works
m3u8_url = self._request_webpage( m3u8_url = self._request_webpage(
HEADRequest(video_data['items'][0]['HLSURL']), HEADRequest(update_url_query(video_data['items'][0]['dashManifestUrl'], {
'manifest': 'm3u',
})),
content_id, 'Checking stream URL').url content_id, 'Checking stream URL').url
if '10play-not-in-oz' in m3u8_url: if '10play-not-in-oz' in m3u8_url:
self.raise_geo_restricted(countries=['AU']) self.raise_geo_restricted(countries=['AU'])
if '10play_unsupported' in m3u8_url:
raise ExtractorError('Unable to extract stream')
# Attempt to get a higher quality stream # Attempt to get a higher quality stream
formats = self._extract_m3u8_formats( formats = self._extract_m3u8_formats(
m3u8_url.replace(',150,75,55,0000', ',300,150,75,55,0000'), m3u8_url.replace(',150,75,55,0000', ',500,300,150,75,55,0000'),
content_id, 'mp4', fatal=False) content_id, 'mp4', fatal=False)
if not formats: if not formats:
formats = self._extract_m3u8_formats(m3u8_url, content_id, 'mp4') formats = self._extract_m3u8_formats(m3u8_url, content_id, 'mp4')

@ -47,6 +47,7 @@ import xml.etree.ElementTree
from . import traversal from . import traversal
from ..compat import ( from ..compat import (
compat_datetime_from_timestamp,
compat_etree_fromstring, compat_etree_fromstring,
compat_expanduser, compat_expanduser,
compat_HTMLParseError, compat_HTMLParseError,
@ -1376,6 +1377,7 @@ def datetime_round(dt_, precision='day'):
if precision == 'microsecond': if precision == 'microsecond':
return dt_ return dt_
time_scale = 1_000_000
unit_seconds = { unit_seconds = {
'day': 86400, 'day': 86400,
'hour': 3600, 'hour': 3600,
@ -1383,8 +1385,8 @@ def datetime_round(dt_, precision='day'):
'second': 1, 'second': 1,
} }
roundto = lambda x, n: ((x + n / 2) // n) * n roundto = lambda x, n: ((x + n / 2) // n) * n
timestamp = roundto(calendar.timegm(dt_.timetuple()), unit_seconds[precision]) timestamp = roundto(calendar.timegm(dt_.timetuple()) + dt_.microsecond / time_scale, unit_seconds[precision])
return dt.datetime.fromtimestamp(timestamp, dt.timezone.utc) return compat_datetime_from_timestamp(timestamp)
def hyphenate_date(date_str): def hyphenate_date(date_str):
@ -2056,18 +2058,13 @@ def strftime_or_none(timestamp, date_format='%Y%m%d', default=None):
datetime_object = None datetime_object = None
try: try:
if isinstance(timestamp, (int, float)): # unix timestamp if isinstance(timestamp, (int, float)): # unix timestamp
# Using naive datetime here can break timestamp() in Windows datetime_object = compat_datetime_from_timestamp(timestamp)
# Ref: https://github.com/yt-dlp/yt-dlp/issues/5185, https://github.com/python/cpython/issues/94414
# Also, dt.datetime.fromtimestamp breaks for negative timestamps
# Ref: https://github.com/yt-dlp/yt-dlp/issues/6706#issuecomment-1496842642
datetime_object = (dt.datetime.fromtimestamp(0, dt.timezone.utc)
+ dt.timedelta(seconds=timestamp))
elif isinstance(timestamp, str): # assume YYYYMMDD elif isinstance(timestamp, str): # assume YYYYMMDD
datetime_object = dt.datetime.strptime(timestamp, '%Y%m%d') datetime_object = dt.datetime.strptime(timestamp, '%Y%m%d')
date_format = re.sub( # Support %s on windows date_format = re.sub( # Support %s on windows
r'(?<!%)(%%)*%s', rf'\g<1>{int(datetime_object.timestamp())}', date_format) r'(?<!%)(%%)*%s', rf'\g<1>{int(datetime_object.timestamp())}', date_format)
return datetime_object.strftime(date_format) return datetime_object.strftime(date_format)
except (ValueError, TypeError, AttributeError): except (ValueError, TypeError, AttributeError, OverflowError, OSError):
return default return default

Loading…
Cancel
Save