[build] Overhaul Linux builds and refactor release workflow (#13997)

- Use `manylinux-shared` images for Linux builds
- Discontinue `yt-dlp_linux_armv7l`/`linux_armv7l_exe` release binary
- Add `yt-dlp_linux_armv7l.zip`/`linux_armv7l_dir` release binary
- Add `yt-dlp_musllinux` and `yt-dlp_musllinux_aarch64` release binaries
- Migrate `linux_exe` build strategy from staticx+musl to manylinux2014/glibc2.17
- Rewrite release.yml's "unholy bash monstrosity" as devscripts/setup_variables.py

Closes #10072, Closes #10630, Closes #10578, Closes #13976, Closes #13977, Closes #14106
Authored by: bashonly
pull/11911/merge
bashonly 12 hours ago committed by GitHub
parent 603acdff07
commit 50136eeeb3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -12,10 +12,13 @@ on:
unix:
default: true
type: boolean
linux_static:
linux:
default: true
type: boolean
linux_arm:
linux_armv7l:
default: true
type: boolean
musllinux:
default: true
type: boolean
macos:
@ -37,7 +40,9 @@ on:
version:
description: |
VERSION: yyyy.mm.dd[.rev] or rev
required: true
(default: auto-generated)
required: false
default: ''
type: string
channel:
description: |
@ -49,12 +54,16 @@ on:
description: yt-dlp, yt-dlp.tar.gz
default: true
type: boolean
linux_static:
description: yt-dlp_linux
linux:
description: yt-dlp_linux, yt-dlp_linux.zip, yt-dlp_linux_aarch64, yt-dlp_linux_aarch64.zip
default: true
type: boolean
linux_arm:
description: yt-dlp_linux_aarch64, yt-dlp_linux_armv7l
linux_armv7l:
description: yt-dlp_linux_armv7l.zip
default: true
type: boolean
musllinux:
description: yt-dlp_musllinux, yt-dlp_musllinux.zip, yt-dlp_musllinux_aarch64, yt-dlp_musllinux_aarch64.zip
default: true
type: boolean
macos:
@ -81,16 +90,51 @@ jobs:
runs-on: ubuntu-latest
outputs:
origin: ${{ steps.process_origin.outputs.origin }}
timestamp: ${{ steps.process_origin.outputs.timestamp }}
version: ${{ steps.process_origin.outputs.version }}
steps:
- name: Process origin
id: process_origin
env:
ORIGIN: ${{ inputs.origin }}
REPOSITORY: ${{ github.repository }}
VERSION: ${{ inputs.version }}
shell: python
run: |
echo "origin=${{ inputs.origin == 'current repo' && github.repository || inputs.origin }}" | tee "$GITHUB_OUTPUT"
import datetime as dt
import json
import os
import re
origin = os.environ['ORIGIN']
timestamp = dt.datetime.now(tz=dt.timezone.utc).strftime('%Y.%m.%d.%H%M%S.%f')
version = os.getenv('VERSION')
if version and '.' not in version:
# build.yml was dispatched with only a revision as the version input value
version_parts = [*timestamp.split('.')[:3], version]
elif not version:
# build.yml was dispatched without any version input value, so include .HHMMSS revision
version_parts = timestamp.split('.')[:4]
else:
# build.yml was called or dispatched with a complete version input value
version_parts = version.split('.')
assert all(re.fullmatch(r'[0-9]+', part) for part in version_parts), 'Version must be numeric'
outputs = {
'origin': os.environ['REPOSITORY'] if origin == 'current repo' else origin,
'timestamp': timestamp,
'version': '.'.join(version_parts),
}
print(json.dumps(outputs, indent=2))
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
f.write('\n'.join(f'{key}={value}' for key, value in outputs.items()))
unix:
needs: process
if: inputs.unix
runs-on: ubuntu-latest
env:
CHANNEL: ${{ inputs.channel }}
ORIGIN: ${{ needs.process.outputs.origin }}
VERSION: ${{ needs.process.outputs.version }}
steps:
- uses: actions/checkout@v4
with:
@ -103,7 +147,7 @@ jobs:
sudo apt -y install zip pandoc man sed
- name: Prepare
run: |
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
python devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}"
python devscripts/update_changelog.py -vv
python devscripts/make_lazy_extractors.py
- name: Build Unix platform-independent binary
@ -117,7 +161,7 @@ jobs:
version="$(./yt-dlp --version)"
./yt-dlp_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
downgraded_version="$(./yt-dlp_downgraded --version)"
[[ "$version" != "$downgraded_version" ]]
[[ "${version}" != "${downgraded_version}" ]]
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
@ -127,99 +171,156 @@ jobs:
yt-dlp.tar.gz
compression-level: 0
linux_static:
linux:
needs: process
if: inputs.linux_static
runs-on: ubuntu-latest
if: inputs.linux
runs-on: ${{ matrix.runner }}
strategy:
fail-fast: false
matrix:
include:
- exe: yt-dlp_linux
platform: x86_64
runner: ubuntu-24.04
- exe: yt-dlp_linux_aarch64
platform: aarch64
runner: ubuntu-24.04-arm
env:
CHANNEL: ${{ inputs.channel }}
ORIGIN: ${{ needs.process.outputs.origin }}
VERSION: ${{ needs.process.outputs.version }}
EXE_NAME: ${{ matrix.exe }}
steps:
- uses: actions/checkout@v4
- name: Build static executable
- name: Build executable
env:
channel: ${{ inputs.channel }}
origin: ${{ needs.process.outputs.origin }}
version: ${{ inputs.version }}
SERVICE: linux_${{ matrix.platform }}
run: |
mkdir -p ./dist
pushd bundle/docker
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
popd
sudo chown "${USER}:docker" "./dist/${EXE_NAME}"
- name: Verify executable in container
if: vars.UPDATE_TO_VERIFICATION
env:
SERVICE: linux_${{ matrix.platform }}_verify
run: |
mkdir ~/build
cd bundle/docker
docker compose up --build static
sudo chown "${USER}:docker" ~/build/yt-dlp_linux
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
- name: Verify --update-to
if: vars.UPDATE_TO_VERIFICATION
run: |
chmod +x ~/build/yt-dlp_linux
cp ~/build/yt-dlp_linux ~/build/yt-dlp_linux_downgraded
version="$(~/build/yt-dlp_linux --version)"
~/build/yt-dlp_linux_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
downgraded_version="$(~/build/yt-dlp_linux_downgraded --version)"
[[ "$version" != "$downgraded_version" ]]
chmod +x "./dist/${EXE_NAME}"
mkdir -p ~/testing
cp "./dist/${EXE_NAME}" ~/testing/"${EXE_NAME}_downgraded"
version="$("./dist/${EXE_NAME}" --version)"
~/testing/"${EXE_NAME}_downgraded" -v --update-to yt-dlp/yt-dlp@2023.03.04
downgraded_version="$(~/testing/"${EXE_NAME}_downgraded" --version)"
[[ "${version}" != "${downgraded_version}" ]]
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: build-bin-${{ github.job }}
name: build-bin-${{ github.job }}_${{ matrix.platform }}
path: |
~/build/yt-dlp_linux
dist/${{ matrix.exe }}*
compression-level: 0
linux_arm:
linux_armv7l:
needs: process
if: inputs.linux_arm
if: inputs.linux_armv7l
permissions:
contents: read
packages: write # for creating cache
runs-on: ubuntu-latest
strategy:
matrix:
architecture:
- armv7
- aarch64
runs-on: ubuntu-24.04-arm
env:
CHANNEL: ${{ inputs.channel }}
ORIGIN: ${{ needs.process.outputs.origin }}
VERSION: ${{ needs.process.outputs.version }}
EXE_NAME: yt-dlp_linux_armv7l
steps:
- uses: actions/checkout@v4
- name: Cache requirements
id: cache-venv
uses: actions/cache@v4
env:
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
with:
path: ./repo
- name: Virtualized Install, Prepare & Build
uses: yt-dlp/run-on-arch-action@v3
path: |
~/yt-dlp-build-venv
key: cache-reqs-${{ github.job }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
restore-keys: |
cache-reqs-${{ github.job }}-${{ github.ref }}-
cache-reqs-${{ github.job }}-
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
# Ref: https://github.com/uraimo/run-on-arch-action/issues/55
env: |
GITHUB_WORKFLOW: build
githubToken: ${{ github.token }} # To cache image
arch: ${{ matrix.architecture }}
distro: ubuntu20.04 # Standalone executable should be built on minimum supported OS
dockerRunArgs: --volume "${PWD}/repo:/repo"
install: | # Installing Python 3.10 from the Deadsnakes repo raises errors
apt update
apt -y install zlib1g-dev libffi-dev python3.9 python3.9-dev python3.9-distutils python3-pip \
python3-secretstorage # Cannot build cryptography wheel in virtual armv7 environment
python3.9 -m pip install -U pip wheel 'setuptools>=71.0.2'
# XXX: Keep this in sync with pyproject.toml (it can't be accessed at this stage) and exclude secretstorage
python3.9 -m pip install -U Pyinstaller mutagen pycryptodomex brotli certifi cffi \
'requests>=2.32.2,<3' 'urllib3>=2.0.2,<3' 'websockets>=13.0'
run: |
cd repo
python3.9 devscripts/install_deps.py -o --include build
python3.9 devscripts/install_deps.py --include pyinstaller # Cached versions may be out of date
python3.9 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
python3.9 devscripts/make_lazy_extractors.py
python3.9 -m bundle.pyinstaller
if ${{ vars.UPDATE_TO_VERIFICATION && 'true' || 'false' }}; then
arch="${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}"
chmod +x ./dist/yt-dlp_linux_${arch}
cp ./dist/yt-dlp_linux_${arch} ./dist/yt-dlp_linux_${arch}_downgraded
version="$(./dist/yt-dlp_linux_${arch} --version)"
./dist/yt-dlp_linux_${arch}_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
downgraded_version="$(./dist/yt-dlp_linux_${arch}_downgraded --version)"
[[ "$version" != "$downgraded_version" ]]
fi
platforms: linux/arm/v7
- name: Build executable
env:
SERVICE: linux_armv7l
run: |
mkdir -p ./dist
mkdir -p ~/yt-dlp-build-venv
cd bundle/docker
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
- name: Verify executable in container
if: vars.UPDATE_TO_VERIFICATION
env:
SERVICE: linux_armv7l_verify
run: |
cd bundle/docker
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: build-bin-${{ github.job }}
path: |
dist/yt-dlp_linux_armv7l.zip
compression-level: 0
musllinux:
needs: process
if: inputs.musllinux
runs-on: ${{ matrix.runner }}
strategy:
fail-fast: false
matrix:
include:
- exe: yt-dlp_musllinux
platform: x86_64
runner: ubuntu-24.04
- exe: yt-dlp_musllinux_aarch64
platform: aarch64
runner: ubuntu-24.04-arm
env:
CHANNEL: ${{ inputs.channel }}
ORIGIN: ${{ needs.process.outputs.origin }}
VERSION: ${{ needs.process.outputs.version }}
EXE_NAME: ${{ matrix.exe }}
steps:
- uses: actions/checkout@v4
- name: Build executable
env:
SERVICE: musllinux_${{ matrix.platform }}
run: |
mkdir -p ./dist
pushd bundle/docker
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
popd
sudo chown "${USER}:docker" "./dist/${EXE_NAME}"
- name: Verify executable in container
if: vars.UPDATE_TO_VERIFICATION
env:
SERVICE: musllinux_${{ matrix.platform }}_verify
run: |
cd bundle/docker
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: build-bin-linux_${{ matrix.architecture }}
path: | # run-on-arch-action designates armv7l as armv7
repo/dist/yt-dlp_linux_${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}
name: build-bin-${{ github.job }}_${{ matrix.platform }}
path: |
dist/${{ matrix.exe }}*
compression-level: 0
macos:
@ -227,22 +328,28 @@ jobs:
if: inputs.macos
permissions:
contents: read
actions: write # For cleaning up cache
runs-on: macos-14
env:
CHANNEL: ${{ inputs.channel }}
ORIGIN: ${{ needs.process.outputs.origin }}
VERSION: ${{ needs.process.outputs.version }}
steps:
- uses: actions/checkout@v4
# NB: Building universal2 does not work with python from actions/setup-python
- name: Restore cached requirements
id: restore-cache
uses: actions/cache/restore@v4
- name: Cache requirements
id: cache-venv
uses: actions/cache@v4
env:
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
with:
path: |
~/yt-dlp-build-venv
key: cache-reqs-${{ github.job }}-${{ github.ref }}
key: cache-reqs-${{ github.job }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
restore-keys: |
cache-reqs-${{ github.job }}-${{ github.ref }}-
cache-reqs-${{ github.job }}-
- name: Install Requirements
run: |
@ -287,7 +394,7 @@ jobs:
- name: Prepare
run: |
python3 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
python3 devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}"
python3 devscripts/make_lazy_extractors.py
- name: Build
run: |
@ -315,27 +422,11 @@ jobs:
dist/yt-dlp_macos.zip
compression-level: 0
- name: Cleanup cache
if: steps.restore-cache.outputs.cache-hit == 'true'
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
cache_key: cache-reqs-${{ github.job }}-${{ github.ref }}
run: |
gh cache delete "${cache_key}"
- name: Cache requirements
uses: actions/cache/save@v4
with:
path: |
~/yt-dlp-build-venv
key: cache-reqs-${{ github.job }}-${{ github.ref }}
windows:
needs: process
if: inputs.windows
permissions:
contents: read
actions: write # For cleaning up cache
runs-on: ${{ matrix.runner }}
strategy:
fail-fast: false
@ -353,6 +444,14 @@ jobs:
runner: windows-11-arm
python_version: '3.13' # arm64 only has Python >= 3.11 available
suffix: '_arm64'
env:
CHANNEL: ${{ inputs.channel }}
ORIGIN: ${{ needs.process.outputs.origin }}
VERSION: ${{ needs.process.outputs.version }}
SUFFIX: ${{ matrix.suffix }}
BASE_CACHE_KEY: cache-reqs-${{ github.job }}_${{ matrix.arch }}-${{ matrix.python_version }}
# Use custom PyInstaller built with https://github.com/yt-dlp/Pyinstaller-builds
PYINSTALLER_URL: https://yt-dlp.github.io/Pyinstaller-Builds/${{ matrix.arch }}/pyinstaller-6.15.0-py3-none-any.whl
steps:
- uses: actions/checkout@v4
@ -361,49 +460,60 @@ jobs:
python-version: ${{ matrix.python_version }}
architecture: ${{ matrix.arch }}
- name: Restore cached requirements
id: restore-cache
- name: Cache requirements
id: cache-venv
if: matrix.arch == 'arm64'
uses: actions/cache/restore@v4
uses: actions/cache@v4
env:
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
with:
path: |
/yt-dlp-build-venv
key: cache-reqs-${{ github.job }}_${{ matrix.arch }}-${{ matrix.python_version }}-${{ github.ref }}
key: ${{ env.BASE_CACHE_KEY }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
restore-keys: |
${{ env.BASE_CACHE_KEY }}-${{ github.ref }}-
${{ env.BASE_CACHE_KEY }}-
- name: Install Requirements
env:
ARCH: ${{ matrix.arch }}
shell: pwsh
run: |
python -m venv /yt-dlp-build-venv
/yt-dlp-build-venv/Scripts/Activate.ps1
python devscripts/install_deps.py -o --include build
python devscripts/install_deps.py ${{ (matrix.arch != 'x86' && '--include curl-cffi') || '' }}
# Use custom pyinstaller built with https://github.com/yt-dlp/pyinstaller-builds
python -m pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/${{ matrix.arch }}/pyinstaller-6.15.0-py3-none-any.whl"
if ("${Env:ARCH}" -eq "x86") {
python devscripts/install_deps.py
} else {
python devscripts/install_deps.py --include curl-cffi
}
python -m pip install -U "${Env:PYINSTALLER_URL}"
- name: Prepare
shell: pwsh
run: |
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
python devscripts/update-version.py -c "${Env:CHANNEL}" -r "${Env:ORIGIN}" "${Env:VERSION}"
python devscripts/make_lazy_extractors.py
- name: Build
shell: pwsh
run: |
/yt-dlp-build-venv/Scripts/Activate.ps1
python -m bundle.pyinstaller
python -m bundle.pyinstaller --onedir
Compress-Archive -Path ./dist/yt-dlp${{ matrix.suffix }}/* -DestinationPath ./dist/yt-dlp_win${{ matrix.suffix }}.zip
Compress-Archive -Path ./dist/yt-dlp${Env:SUFFIX}/* -DestinationPath ./dist/yt-dlp_win${Env:SUFFIX}.zip
- name: Verify --update-to
if: vars.UPDATE_TO_VERIFICATION
shell: pwsh
run: |
foreach ($name in @("yt-dlp${{ matrix.suffix }}")) {
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
$version = & "./dist/${name}.exe" --version
& "./dist/${name}_downgraded.exe" -v --update-to yt-dlp/yt-dlp@2025.08.20
$downgraded_version = & "./dist/${name}_downgraded.exe" --version
if ($version -eq $downgraded_version) {
exit 1
}
$name = "yt-dlp${Env:SUFFIX}"
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
$version = & "./dist/${name}.exe" --version
& "./dist/${name}_downgraded.exe" -v --update-to yt-dlp/yt-dlp@2025.08.20
$downgraded_version = & "./dist/${name}_downgraded.exe" --version
if ($version -eq $downgraded_version) {
exit 1
}
- name: Upload artifacts
@ -415,30 +525,14 @@ jobs:
dist/yt-dlp_win${{ matrix.suffix }}.zip
compression-level: 0
- name: Cleanup cache
if: |
matrix.arch == 'arm64' && steps.restore-cache.outputs.cache-hit == 'true'
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
cache_key: cache-reqs-${{ github.job }}_${{ matrix.arch }}-${{ matrix.python_version }}-${{ github.ref }}
run: |
gh cache delete "${cache_key}"
- name: Cache requirements
if: matrix.arch == 'arm64'
uses: actions/cache/save@v4
with:
path: |
/yt-dlp-build-venv
key: cache-reqs-${{ github.job }}_${{ matrix.arch }}-${{ matrix.python_version }}-${{ github.ref }}
meta_files:
if: always() && !cancelled()
needs:
- process
- unix
- linux_static
- linux_arm
- linux
- linux_armv7l
- musllinux
- macos
- windows
runs-on: ubuntu-latest
@ -469,38 +563,38 @@ jobs:
lock 2023.11.16 (?!win_x86_exe).+ Python 3\.7
lock 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
lock 2024.10.22 py2exe .+
lock 2024.10.22 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
lock 2024.10.22 zip Python 3\.8
lock 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
lock 2025.08.11 darwin_legacy_exe .+
lock 2025.08.27 linux_armv7l_exe .+
lockV2 yt-dlp/yt-dlp 2022.08.18.36 .+ Python 3\.6
lockV2 yt-dlp/yt-dlp 2023.11.16 (?!win_x86_exe).+ Python 3\.7
lockV2 yt-dlp/yt-dlp 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
lockV2 yt-dlp/yt-dlp 2024.10.22 py2exe .+
lockV2 yt-dlp/yt-dlp 2024.10.22 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
lockV2 yt-dlp/yt-dlp 2024.10.22 zip Python 3\.8
lockV2 yt-dlp/yt-dlp 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
lockV2 yt-dlp/yt-dlp 2025.08.11 darwin_legacy_exe .+
lockV2 yt-dlp/yt-dlp 2025.08.27 linux_armv7l_exe .+
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 (?!win_x86_exe).+ Python 3\.7
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 win_x86_exe .+ Windows-(?:Vista|2008Server)
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 py2exe .+
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 zip Python 3\.8
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.12.233030 darwin_legacy_exe .+
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.30.232839 linux_armv7l_exe .+
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 (?!win_x86_exe).+ Python 3\.7
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 win_x86_exe .+ Windows-(?:Vista|2008Server)
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.045052 py2exe .+
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 zip Python 3\.8
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
lockV2 yt-dlp/yt-dlp-master-builds 2025.08.12.232447 darwin_legacy_exe .+
lockV2 yt-dlp/yt-dlp-master-builds 2025.09.05.212910 linux_armv7l_exe .+
EOF
- name: Sign checksum files
env:
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
if: env.GPG_SIGNING_KEY != ''
if: env.GPG_SIGNING_KEY
run: |
gpg --batch --import <<< "${{ secrets.GPG_SIGNING_KEY }}"
for signfile in ./SHA*SUMS; do

@ -0,0 +1,22 @@
name: Keep cache warm
on:
workflow_dispatch:
schedule:
- cron: '0 22 1,6,11,16,21,27 * *'
jobs:
build:
if: |
vars.KEEP_CACHE_WARM || github.event_name == 'workflow_dispatch'
uses: ./.github/workflows/build.yml
with:
version: '999999'
channel: stable
unix: false
linux: false
linux_armv7l: true
musllinux: false
macos: true
windows: true
permissions:
contents: read

@ -6,10 +6,12 @@ on:
paths:
- "yt_dlp/**.py"
- "!yt_dlp/version.py"
- "bundle/*.py"
- "bundle/**"
- "pyproject.toml"
- "Makefile"
- ".github/workflows/build.yml"
- ".github/workflows/release.yml"
- ".github/workflows/release-master.yml"
concurrency:
group: release-master
permissions:
@ -17,21 +19,20 @@ permissions:
jobs:
release:
if: vars.BUILD_MASTER != ''
if: vars.BUILD_MASTER
uses: ./.github/workflows/release.yml
with:
prerelease: true
source: master
source: ${{ (github.repository != 'yt-dlp/yt-dlp' && vars.MASTER_ARCHIVE_REPO) || 'master' }}
target: 'master'
permissions:
contents: write
packages: write # For package cache
actions: write # For cleaning up cache
id-token: write # mandatory for trusted publishing
secrets: inherit
publish_pypi:
needs: [release]
if: vars.MASTER_PYPI_PROJECT != ''
if: vars.MASTER_PYPI_PROJECT
runs-on: ubuntu-latest
permissions:
id-token: write # mandatory for trusted publishing

@ -7,7 +7,7 @@ permissions:
jobs:
check_nightly:
if: vars.BUILD_NIGHTLY != ''
if: vars.BUILD_NIGHTLY
runs-on: ubuntu-latest
outputs:
commit: ${{ steps.check_for_new_commits.outputs.commit }}
@ -22,9 +22,13 @@ jobs:
"yt_dlp/*.py"
':!yt_dlp/version.py'
"bundle/*.py"
"bundle/docker/compose.yml"
"bundle/docker/linux/*"
"pyproject.toml"
"Makefile"
".github/workflows/build.yml"
".github/workflows/release.yml"
".github/workflows/release-nightly.yml"
)
echo "commit=$(git log --format=%H -1 --since="24 hours ago" -- "${relevant_files[@]}")" | tee "$GITHUB_OUTPUT"
@ -34,17 +38,16 @@ jobs:
uses: ./.github/workflows/release.yml
with:
prerelease: true
source: nightly
source: ${{ (github.repository != 'yt-dlp/yt-dlp' && vars.NIGHTLY_ARCHIVE_REPO) || 'nightly' }}
target: 'nightly'
permissions:
contents: write
packages: write # For package cache
actions: write # For cleaning up cache
id-token: write # mandatory for trusted publishing
secrets: inherit
publish_pypi:
needs: [release]
if: vars.NIGHTLY_PYPI_PROJECT != ''
if: vars.NIGHTLY_PYPI_PROJECT
runs-on: ubuntu-latest
permissions:
id-token: write # mandatory for trusted publishing

@ -14,6 +14,10 @@ on:
required: false
default: ''
type: string
linux_armv7l:
required: false
default: false
type: boolean
prerelease:
required: false
default: true
@ -43,6 +47,10 @@ on:
required: false
default: ''
type: string
linux_armv7l:
description: Include linux_armv7l
default: true
type: boolean
prerelease:
description: Pre-release
default: false
@ -77,135 +85,57 @@ jobs:
- name: Process inputs
id: process_inputs
env:
INPUTS: ${{ toJSON(inputs) }}
run: |
cat << EOF
::group::Inputs
prerelease=${{ inputs.prerelease }}
source=${{ inputs.source }}
target=${{ inputs.target }}
version=${{ inputs.version }}
::endgroup::
EOF
IFS='@' read -r source_repo source_tag <<<"${{ inputs.source }}"
IFS='@' read -r target_repo target_tag <<<"${{ inputs.target }}"
cat << EOF >> "$GITHUB_OUTPUT"
source_repo=${source_repo}
source_tag=${source_tag}
target_repo=${target_repo}
target_tag=${target_tag}
EOF
python -m devscripts.setup_variables process_inputs
- name: Setup variables
id: setup_variables
env:
source_repo: ${{ steps.process_inputs.outputs.source_repo }}
source_tag: ${{ steps.process_inputs.outputs.source_tag }}
target_repo: ${{ steps.process_inputs.outputs.target_repo }}
target_tag: ${{ steps.process_inputs.outputs.target_tag }}
INPUTS: ${{ toJSON(inputs) }}
PROCESSED: ${{ toJSON(steps.process_inputs.outputs) }}
REPOSITORY: ${{ github.repository }}
PUSH_VERSION_COMMIT: ${{ vars.PUSH_VERSION_COMMIT }}
PYPI_PROJECT: ${{ vars.PYPI_PROJECT }}
SOURCE_PYPI_PROJECT: ${{ vars[format('{0}_pypi_project', steps.process_inputs.outputs.source_repo)] }}
SOURCE_PYPI_SUFFIX: ${{ vars[format('{0}_pypi_suffix', steps.process_inputs.outputs.source_repo)] }}
TARGET_PYPI_PROJECT: ${{ vars[format('{0}_pypi_project', steps.process_inputs.outputs.target_repo)] }}
TARGET_PYPI_SUFFIX: ${{ vars[format('{0}_pypi_suffix', steps.process_inputs.outputs.target_repo)] }}
SOURCE_ARCHIVE_REPO: ${{ vars[format('{0}_archive_repo', steps.process_inputs.outputs.source_repo)] }}
TARGET_ARCHIVE_REPO: ${{ vars[format('{0}_archive_repo', steps.process_inputs.outputs.target_repo)] }}
HAS_SOURCE_ARCHIVE_REPO_TOKEN: ${{ !!secrets[format('{0}_archive_repo_token', steps.process_inputs.outputs.source_repo)] }}
HAS_TARGET_ARCHIVE_REPO_TOKEN: ${{ !!secrets[format('{0}_archive_repo_token', steps.process_inputs.outputs.target_repo)] }}
HAS_ARCHIVE_REPO_TOKEN: ${{ !!secrets.ARCHIVE_REPO_TOKEN }}
run: |
# unholy bash monstrosity (sincere apologies)
fallback_token () {
if ${{ !secrets.ARCHIVE_REPO_TOKEN }}; then
echo "::error::Repository access secret ${target_repo_token^^} not found"
exit 1
fi
target_repo_token=ARCHIVE_REPO_TOKEN
return 0
}
source_is_channel=0
[[ "${source_repo}" == 'stable' ]] && source_repo='yt-dlp/yt-dlp'
if [[ -z "${source_repo}" ]]; then
source_repo='${{ github.repository }}'
elif [[ '${{ vars[format('{0}_archive_repo', env.source_repo)] }}' ]]; then
source_is_channel=1
source_channel='${{ vars[format('{0}_archive_repo', env.source_repo)] }}'
elif [[ -z "${source_tag}" && "${source_repo}" != */* ]]; then
source_tag="${source_repo}"
source_repo='${{ github.repository }}'
fi
resolved_source="${source_repo}"
if [[ "${source_tag}" ]]; then
resolved_source="${resolved_source}@${source_tag}"
elif [[ "${source_repo}" == 'yt-dlp/yt-dlp' ]]; then
resolved_source='stable'
fi
revision="${{ (inputs.prerelease || !vars.PUSH_VERSION_COMMIT) && '$(date -u +"%H%M%S")' || '' }}"
version="$(
python devscripts/update-version.py \
-c "${resolved_source}" -r "${{ github.repository }}" ${{ inputs.version || '$revision' }} | \
grep -Po "version=\K\d+\.\d+\.\d+(\.\d+)?")"
python -m devscripts.setup_variables
if [[ "${target_repo}" ]]; then
if [[ -z "${target_tag}" ]]; then
if [[ '${{ vars[format('{0}_archive_repo', env.target_repo)] }}' ]]; then
target_tag="${source_tag:-${version}}"
else
target_tag="${target_repo}"
target_repo='${{ github.repository }}'
fi
fi
if [[ "${target_repo}" != '${{ github.repository}}' ]]; then
target_repo='${{ vars[format('{0}_archive_repo', env.target_repo)] }}'
target_repo_token='${{ env.target_repo }}_archive_repo_token'
${{ !!secrets[format('{0}_archive_repo_token', env.target_repo)] }} || fallback_token
pypi_project='${{ vars[format('{0}_pypi_project', env.target_repo)] }}'
pypi_suffix='${{ vars[format('{0}_pypi_suffix', env.target_repo)] }}'
fi
else
target_tag="${source_tag:-${version}}"
if ((source_is_channel)); then
target_repo="${source_channel}"
target_repo_token='${{ env.source_repo }}_archive_repo_token'
${{ !!secrets[format('{0}_archive_repo_token', env.source_repo)] }} || fallback_token
pypi_project='${{ vars[format('{0}_pypi_project', env.source_repo)] }}'
pypi_suffix='${{ vars[format('{0}_pypi_suffix', env.source_repo)] }}'
else
target_repo='${{ github.repository }}'
fi
fi
if [[ "${target_repo}" == '${{ github.repository }}' ]] && ${{ !inputs.prerelease }}; then
pypi_project='${{ vars.PYPI_PROJECT }}'
fi
echo "::group::Output variables"
cat << EOF | tee -a "$GITHUB_OUTPUT"
channel=${resolved_source}
version=${version}
target_repo=${target_repo}
target_repo_token=${target_repo_token}
target_tag=${target_tag}
pypi_project=${pypi_project}
pypi_suffix=${pypi_suffix}
EOF
echo "::endgroup::"
- name: Update documentation
- name: Update version & documentation
env:
version: ${{ steps.setup_variables.outputs.version }}
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
if: |
!inputs.prerelease && env.target_repo == github.repository
CHANNEL: ${{ steps.setup_variables.outputs.channel }}
# Use base repo since this could be committed; build jobs will call this again with true origin
REPOSITORY: ${{ github.repository }}
VERSION: ${{ steps.setup_variables.outputs.version }}
run: |
python devscripts/update-version.py -c "${CHANNEL}" -r "${REPOSITORY}" "${VERSION}"
python devscripts/update_changelog.py -vv
make doc
- name: Push to release
id: push_release
env:
version: ${{ steps.setup_variables.outputs.version }}
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
VERSION: ${{ steps.setup_variables.outputs.version }}
GITHUB_EVENT_SENDER_LOGIN: ${{ github.event.sender.login }}
GITHUB_EVENT_REF: ${{ github.event.ref }}
if: |
!inputs.prerelease && env.target_repo == github.repository
!inputs.prerelease && steps.setup_variables.outputs.target_repo == github.repository
run: |
git config --global user.name "github-actions[bot]"
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
git add -u
git commit -m "Release ${{ env.version }}" \
-m "Created by: ${{ github.event.sender.login }}" -m ":ci skip all"
git push origin --force ${{ github.event.ref }}:release
git commit -m "Release ${VERSION}" \
-m "Created by: ${GITHUB_EVENT_SENDER_LOGIN}" -m ":ci skip all"
git push origin --force "${GITHUB_EVENT_REF}:release"
- name: Get target commitish
id: get_target
@ -214,10 +144,10 @@ jobs:
- name: Update master
env:
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
GITHUB_EVENT_REF: ${{ github.event.ref }}
if: |
vars.PUSH_VERSION_COMMIT != '' && !inputs.prerelease && env.target_repo == github.repository
run: git push origin ${{ github.event.ref }}
vars.PUSH_VERSION_COMMIT && !inputs.prerelease && steps.setup_variables.outputs.target_repo == github.repository
run: git push origin "${GITHUB_EVENT_REF}"
build:
needs: prepare
@ -226,10 +156,9 @@ jobs:
version: ${{ needs.prepare.outputs.version }}
channel: ${{ needs.prepare.outputs.channel }}
origin: ${{ needs.prepare.outputs.target_repo }}
linux_armv7l: ${{ inputs.linux_armv7l }}
permissions:
contents: read
packages: write # For package cache
actions: write # For cleaning up cache
secrets:
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
@ -255,16 +184,16 @@ jobs:
- name: Prepare
env:
version: ${{ needs.prepare.outputs.version }}
suffix: ${{ needs.prepare.outputs.pypi_suffix }}
channel: ${{ needs.prepare.outputs.channel }}
target_repo: ${{ needs.prepare.outputs.target_repo }}
pypi_project: ${{ needs.prepare.outputs.pypi_project }}
VERSION: ${{ needs.prepare.outputs.version }}
SUFFIX: ${{ needs.prepare.outputs.pypi_suffix }}
CHANNEL: ${{ needs.prepare.outputs.channel }}
TARGET_REPO: ${{ needs.prepare.outputs.target_repo }}
PYPI_PROJECT: ${{ needs.prepare.outputs.pypi_project }}
run: |
python devscripts/update-version.py -c "${{ env.channel }}" -r "${{ env.target_repo }}" -s "${{ env.suffix }}" "${{ env.version }}"
python devscripts/update-version.py -c "${CHANNEL}" -r "${TARGET_REPO}" -s "${SUFFIX}" "${VERSION}"
python devscripts/update_changelog.py -vv
python devscripts/make_lazy_extractors.py
sed -i -E '0,/(name = ")[^"]+(")/s//\1${{ env.pypi_project }}\2/' pyproject.toml
sed -i -E '0,/(name = ")[^"]+(")/s//\1'"${PYPI_PROJECT}"'\2/' pyproject.toml
- name: Build
run: |
@ -298,7 +227,11 @@ jobs:
permissions:
contents: write
runs-on: ubuntu-latest
env:
TARGET_REPO: ${{ needs.prepare.outputs.target_repo }}
TARGET_TAG: ${{ needs.prepare.outputs.target_tag }}
VERSION: ${{ needs.prepare.outputs.version }}
HEAD_SHA: ${{ needs.prepare.outputs.head_sha }}
steps:
- uses: actions/checkout@v4
with:
@ -314,81 +247,80 @@ jobs:
- name: Generate release notes
env:
head_sha: ${{ needs.prepare.outputs.head_sha }}
target_repo: ${{ needs.prepare.outputs.target_repo }}
target_tag: ${{ needs.prepare.outputs.target_tag }}
REPOSITORY: ${{ github.repository }}
BASE_REPO: yt-dlp/yt-dlp
NIGHTLY_REPO: yt-dlp/yt-dlp-nightly-builds
MASTER_REPO: yt-dlp/yt-dlp-master-builds
DOCS_PATH: ${{ env.TARGET_REPO == github.repository && format('/tree/{0}', env.TARGET_TAG) || '' }}
run: |
printf '%s' \
'[![Installation](https://img.shields.io/badge/-Which%20file%20to%20download%3F-white.svg?style=for-the-badge)]' \
'(https://github.com/${{ github.repository }}#installation "Installation instructions") ' \
'[![Discord](https://img.shields.io/discord/807245652072857610?color=blue&labelColor=555555&label=&logo=discord&style=for-the-badge)]' \
'(https://discord.gg/H5MNcFW63r "Discord") ' \
'[![Donate](https://img.shields.io/badge/_-Donate-red.svg?logo=githubsponsors&labelColor=555555&style=for-the-badge)]' \
'(https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators "Donate") ' \
'[![Documentation](https://img.shields.io/badge/-Docs-brightgreen.svg?style=for-the-badge&logo=GitBook&labelColor=555555)]' \
'(https://github.com/${{ github.repository }}' \
'${{ env.target_repo == github.repository && format('/tree/{0}', env.target_tag) || '' }}#readme "Documentation") ' \
${{ env.target_repo == 'yt-dlp/yt-dlp' && '\
"[![Installation](https://img.shields.io/badge/-Which%20file%20to%20download%3F-white.svg?style=for-the-badge)]" \
"(https://github.com/${REPOSITORY}#installation \"Installation instructions\") " \
"[![Discord](https://img.shields.io/discord/807245652072857610?color=blue&labelColor=555555&label=&logo=discord&style=for-the-badge)]" \
"(https://discord.gg/H5MNcFW63r \"Discord\") " \
"[![Donate](https://img.shields.io/badge/_-Donate-red.svg?logo=githubsponsors&labelColor=555555&style=for-the-badge)]" \
"(https://github.com/${BASE_REPO}/blob/master/Collaborators.md#collaborators \"Donate\") " \
"[![Documentation](https://img.shields.io/badge/-Docs-brightgreen.svg?style=for-the-badge&logo=GitBook&labelColor=555555)]" \
"(https://github.com/${REPOSITORY}${DOCS_PATH}#readme \"Documentation\") " > ./RELEASE_NOTES
if [[ "${TARGET_REPO}" == "${BASE_REPO}" ]]; then
printf '%s' \
"[![Nightly](https://img.shields.io/badge/Nightly%20builds-purple.svg?style=for-the-badge)]" \
"(https://github.com/yt-dlp/yt-dlp-nightly-builds/releases/latest \"Nightly builds\") " \
"(https://github.com/${NIGHTLY_REPO}/releases/latest \"Nightly builds\") " \
"[![Master](https://img.shields.io/badge/Master%20builds-lightblue.svg?style=for-the-badge)]" \
"(https://github.com/yt-dlp/yt-dlp-master-builds/releases/latest \"Master builds\")"' || '' }} > ./RELEASE_NOTES
"(https://github.com/${MASTER_REPO}/releases/latest \"Master builds\")" >> ./RELEASE_NOTES
fi
printf '\n\n' >> ./RELEASE_NOTES
cat >> ./RELEASE_NOTES << EOF
#### A description of the various files is in the [README](https://github.com/${{ github.repository }}#release-files)
#### A description of the various files is in the [README](https://github.com/${REPOSITORY}#release-files)
---
$(python ./devscripts/make_changelog.py -vv --collapsible)
EOF
printf '%s\n\n' '**This is a pre-release build**' >> ./PRERELEASE_NOTES
cat ./RELEASE_NOTES >> ./PRERELEASE_NOTES
printf '%s\n\n' 'Generated from: https://github.com/${{ github.repository }}/commit/${{ env.head_sha }}' >> ./ARCHIVE_NOTES
printf '%s\n\n' "Generated from: https://github.com/${REPOSITORY}/commit/${HEAD_SHA}" >> ./ARCHIVE_NOTES
cat ./RELEASE_NOTES >> ./ARCHIVE_NOTES
- name: Publish to archive repo
env:
GH_TOKEN: ${{ secrets[needs.prepare.outputs.target_repo_token] }}
GH_REPO: ${{ needs.prepare.outputs.target_repo }}
version: ${{ needs.prepare.outputs.version }}
channel: ${{ needs.prepare.outputs.channel }}
TITLE_PREFIX: ${{ startswith(env.TARGET_REPO, 'yt-dlp/') && 'yt-dlp ' || '' }}
TITLE: ${{ inputs.target != env.TARGET_REPO && inputs.target || needs.prepare.outputs.channel }}
if: |
inputs.prerelease && env.GH_TOKEN != '' && env.GH_REPO != '' && env.GH_REPO != github.repository
inputs.prerelease && env.GH_TOKEN && env.GH_REPO && env.GH_REPO != github.repository
run: |
title="${{ startswith(env.GH_REPO, 'yt-dlp/') && 'yt-dlp ' || '' }}${{ env.channel }}"
gh release create \
--notes-file ARCHIVE_NOTES \
--title "${title} ${{ env.version }}" \
${{ env.version }} \
--title "${TITLE_PREFIX}${TITLE} ${VERSION}" \
"${VERSION}" \
artifact/*
- name: Prune old release
env:
GH_TOKEN: ${{ github.token }}
version: ${{ needs.prepare.outputs.version }}
target_repo: ${{ needs.prepare.outputs.target_repo }}
target_tag: ${{ needs.prepare.outputs.target_tag }}
if: |
env.target_repo == github.repository && env.target_tag != env.version
env.TARGET_REPO == github.repository && env.TARGET_TAG != env.VERSION
run: |
gh release delete --yes --cleanup-tag "${{ env.target_tag }}" || true
git tag --delete "${{ env.target_tag }}" || true
gh release delete --yes --cleanup-tag "${TARGET_TAG}" || true
git tag --delete "${TARGET_TAG}" || true
sleep 5 # Enough time to cover deletion race condition
- name: Publish release
env:
GH_TOKEN: ${{ github.token }}
version: ${{ needs.prepare.outputs.version }}
target_repo: ${{ needs.prepare.outputs.target_repo }}
target_tag: ${{ needs.prepare.outputs.target_tag }}
head_sha: ${{ needs.prepare.outputs.head_sha }}
NOTES_FILE: ${{ inputs.prerelease && 'PRERELEASE_NOTES' || 'RELEASE_NOTES' }}
TITLE_PREFIX: ${{ github.repository == 'yt-dlp/yt-dlp' && 'yt-dlp ' || '' }}
TITLE: ${{ env.TARGET_TAG != env.VERSION && format('{0} ', env.TARGET_TAG) || '' }}
PRERELEASE: ${{ inputs.prerelease && '1' || '0' }}
if: |
env.target_repo == github.repository
env.TARGET_REPO == github.repository
run: |
title="${{ github.repository == 'yt-dlp/yt-dlp' && 'yt-dlp ' || '' }}"
title+="${{ env.target_tag != env.version && format('{0} ', env.target_tag) || '' }}"
gh release create \
--notes-file ${{ inputs.prerelease && 'PRERELEASE_NOTES' || 'RELEASE_NOTES' }} \
--target ${{ env.head_sha }} \
--title "${title}${{ env.version }}" \
${{ inputs.prerelease && '--prerelease' || '' }} \
${{ env.target_tag }} \
artifact/*
gh_options=(
--notes-file "${NOTES_FILE}"
--target "${HEAD_SHA}"
--title "${TITLE_PREFIX}${TITLE}${VERSION}"
)
if ((PRERELEASE)); then
gh_options+=(--prerelease)
fi
gh release create "${gh_options[@]}" "${TARGET_TAG}" artifact/*

@ -105,14 +105,20 @@ File|Description
File|Description
:---|:---
[yt-dlp_linux](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux)|Linux (glibc 2.17+) standalone x86_64 binary
[yt-dlp_linux.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux.zip)|Unpackaged Linux (glibc 2.17+) x86_64 executable (no auto-update)
[yt-dlp_linux_aarch64](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_aarch64)|Linux (glibc 2.17+) standalone aarch64 binary
[yt-dlp_linux_aarch64.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_aarch64.zip)|Unpackaged Linux (glibc 2.17+) aarch64 executable (no auto-update)
[yt-dlp_linux_armv7l.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_armv7l.zip)|Unpackaged Linux (glibc 2.31+) armv7l executable (no auto-update)
[yt-dlp_musllinux](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux)|Linux (musl 1.2+) standalone x86_64 binary
[yt-dlp_musllinux.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux.zip)|Unpackaged Linux (musl 1.2+) x86_64 executable (no auto-update)
[yt-dlp_musllinux_aarch64](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux_aarch64)|Linux (musl 1.2+) standalone aarch64 binary
[yt-dlp_musllinux_aarch64.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux_aarch64.zip)|Unpackaged Linux (musl 1.2+) aarch64 executable (no auto-update)
[yt-dlp_x86.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_x86.exe)|Windows (Win8+) standalone x86 (32-bit) binary
[yt-dlp_arm64.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_arm64.exe)|Windows (Win10+) standalone arm64 (64-bit) binary
[yt-dlp_linux](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux)|Linux standalone x64 binary
[yt-dlp_linux_armv7l](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_armv7l)|Linux standalone armv7l (32-bit) binary
[yt-dlp_linux_aarch64](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_aarch64)|Linux standalone aarch64 (64-bit) binary
[yt-dlp_win_x86.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win_x86.zip)|Unpackaged Windows (Win8+) x86 (32-bit) executable (no auto-update)
[yt-dlp_arm64.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_arm64.exe)|Windows (Win10+) standalone ARM64 binary
[yt-dlp_win_arm64.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win_arm64.zip)|Unpackaged Windows (Win10+) ARM64 executable (no auto-update)
[yt-dlp_win.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win.zip)|Unpackaged Windows (Win8+) x64 executable (no auto-update)
[yt-dlp_win_x86.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win_x86.zip)|Unpackaged Windows (Win8+) x86 executable (no auto-update)
[yt-dlp_win_arm64.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win_arm64.zip)|Unpackaged Windows (Win10+) arm64 executable (no auto-update)
[yt-dlp_macos.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_macos.zip)|Unpackaged MacOS (10.15+) executable (no auto-update)
#### Misc
@ -206,7 +212,7 @@ The following provide support for impersonating browser requests. This may be re
* [**curl_cffi**](https://github.com/lexiforest/curl_cffi) (recommended) - Python binding for [curl-impersonate](https://github.com/lexiforest/curl-impersonate). Provides impersonation targets for Chrome, Edge and Safari. Licensed under [MIT](https://github.com/lexiforest/curl_cffi/blob/main/LICENSE)
* Can be installed with the `curl-cffi` group, e.g. `pip install "yt-dlp[default,curl-cffi]"`
* Currently included in `yt-dlp.exe`, `yt-dlp_linux` and `yt-dlp_macos` builds
* Currently included in most builds *except* `yt-dlp` (Unix zipimport binary), `yt-dlp_x86` (Windows 32-bit) and `yt-dlp_musllinux_aarch64`
### Metadata

@ -1,10 +1,153 @@
services:
static:
build: static
linux_x86_64:
build:
context: linux
target: build
platforms:
- "linux/amd64"
args:
BUILDIMAGE: ghcr.io/yt-dlp/manylinux2014_x86_64-shared:latest
environment:
channel: ${channel}
origin: ${origin}
version: ${version}
EXE_NAME: ${EXE_NAME:?}
CHANNEL: ${CHANNEL:?}
ORIGIN: ${ORIGIN:?}
VERSION:
volumes:
- ~/build:/build
- ../..:/yt-dlp
linux_x86_64_verify:
build:
context: linux
target: verify
platforms:
- "linux/amd64"
args:
VERIFYIMAGE: quay.io/pypa/manylinux2014_x86_64:latest
environment:
EXE_NAME: ${EXE_NAME:?}
volumes:
- ../../dist:/build
linux_aarch64:
build:
context: linux
target: build
platforms:
- "linux/arm64"
args:
BUILDIMAGE: ghcr.io/yt-dlp/manylinux2014_aarch64-shared:latest
environment:
EXE_NAME: ${EXE_NAME:?}
CHANNEL: ${CHANNEL:?}
ORIGIN: ${ORIGIN:?}
VERSION:
volumes:
- ../..:/yt-dlp
linux_aarch64_verify:
build:
context: linux
target: verify
platforms:
- "linux/arm64"
args:
VERIFYIMAGE: quay.io/pypa/manylinux2014_aarch64:latest
environment:
EXE_NAME: ${EXE_NAME:?}
SKIP_UPDATE_TO: "1" # TODO: remove when there is a glibc2.17 aarch64 release to --update-to
volumes:
- ../../dist:/build
linux_armv7l:
build:
context: linux
target: build
platforms:
- "linux/arm/v7"
args:
BUILDIMAGE: ghcr.io/yt-dlp/manylinux_2_31_armv7l-shared:latest
environment:
EXE_NAME: ${EXE_NAME:?}
CHANNEL: ${CHANNEL:?}
ORIGIN: ${ORIGIN:?}
VERSION:
SKIP_ONEFILE_BUILD: "1"
volumes:
- ../..:/yt-dlp
- ~/yt-dlp-build-venv:/yt-dlp-build-venv
linux_armv7l_verify:
build:
context: linux
target: verify
platforms:
- "linux/arm/v7"
args:
VERIFYIMAGE: arm32v7/debian:bullseye
environment:
EXE_NAME: ${EXE_NAME:?}
TEST_ONEDIR_BUILD: "1"
volumes:
- ../../dist:/build
musllinux_x86_64:
build:
context: linux
target: build
platforms:
- "linux/amd64"
args:
BUILDIMAGE: ghcr.io/yt-dlp/musllinux_1_2_x86_64-shared:latest
environment:
EXE_NAME: ${EXE_NAME:?}
CHANNEL: ${CHANNEL:?}
ORIGIN: ${ORIGIN:?}
VERSION:
volumes:
- ../..:/yt-dlp
musllinux_x86_64_verify:
build:
context: linux
target: verify
platforms:
- "linux/amd64"
args:
VERIFYIMAGE: alpine:3.22
environment:
EXE_NAME: ${EXE_NAME:?}
SKIP_UPDATE_TO: "1" # TODO: remove when there is a musllinux_aarch64 release to --update-to
volumes:
- ../../dist:/build
musllinux_aarch64:
build:
context: linux
target: build
platforms:
- "linux/arm64"
args:
BUILDIMAGE: ghcr.io/yt-dlp/musllinux_1_2_aarch64-shared:latest
environment:
EXE_NAME: ${EXE_NAME:?}
CHANNEL: ${CHANNEL:?}
ORIGIN: ${ORIGIN:?}
VERSION:
EXCLUDE_CURL_CFFI: "1"
volumes:
- ../..:/yt-dlp
musllinux_aarch64_verify:
build:
context: linux
target: verify
platforms:
- "linux/arm64"
args:
VERIFYIMAGE: alpine:3.22
environment:
EXE_NAME: ${EXE_NAME:?}
SKIP_UPDATE_TO: "1" # TODO: remove when there is a musllinux_aarch64 release to --update-to
volumes:
- ../../dist:/build

@ -0,0 +1,16 @@
ARG BUILDIMAGE=ghcr.io/yt-dlp/manylinux2014_x86_64-shared:latest
ARG VERIFYIMAGE=alpine:3.22
FROM $BUILDIMAGE AS build
WORKDIR /yt-dlp
COPY build.sh /build.sh
ENTRYPOINT ["/build.sh"]
FROM $VERIFYIMAGE AS verify
WORKDIR /testing
COPY verify.sh /verify.sh
ENTRYPOINT ["/verify.sh"]

@ -0,0 +1,46 @@
#!/bin/bash
set -exuo pipefail
if [[ -z "${USE_PYTHON_VERSION:-}" ]]; then
USE_PYTHON_VERSION="3.13"
fi
function runpy {
"/opt/shared-cpython-${USE_PYTHON_VERSION}/bin/python${USE_PYTHON_VERSION}" "$@"
}
function venvpy {
"python${USE_PYTHON_VERSION}" "$@"
}
INCLUDES=(
--include pyinstaller
--include secretstorage
)
if [[ -z "${EXCLUDE_CURL_CFFI:-}" ]]; then
INCLUDES+=(--include curl-cffi)
fi
runpy -m venv /yt-dlp-build-venv
source /yt-dlp-build-venv/bin/activate
# Inside the venv we use venvpy instead of runpy
venvpy -m ensurepip --upgrade --default-pip
venvpy -m devscripts.install_deps -o --include build
venvpy -m devscripts.install_deps "${INCLUDES[@]}"
venvpy -m devscripts.make_lazy_extractors
venvpy devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}"
if [[ -z "${SKIP_ONEDIR_BUILD:-}" ]]; then
mkdir -p /build
venvpy -m bundle.pyinstaller --onedir --distpath=/build
pushd "/build/${EXE_NAME}"
chmod +x "${EXE_NAME}"
venvpy -m zipfile -c "/yt-dlp/dist/${EXE_NAME}.zip" ./
popd
fi
if [[ -z "${SKIP_ONEFILE_BUILD:-}" ]]; then
venvpy -m bundle.pyinstaller
chmod +x "./dist/${EXE_NAME}"
fi

@ -0,0 +1,44 @@
#!/bin/sh
set -eu
if [ -n "${TEST_ONEDIR_BUILD:-}" ]; then
echo "Extracting zip to verify onedir build"
if command -v python3 >/dev/null 2>&1; then
python3 -m zipfile -e "/build/${EXE_NAME}.zip" ./
else
echo "Attempting to install unzip"
if command -v dnf >/dev/null 2>&1; then
dnf -y install --allowerasing unzip
elif command -v yum >/dev/null 2>&1; then
yum -y install unzip
elif command -v apt-get >/dev/null 2>&1; then
DEBIAN_FRONTEND=noninteractive apt-get update -qq
DEBIAN_FRONTEND=noninteractive apt-get install -qq -y --no-install-recommends unzip
elif command -v apk >/dev/null 2>&1; then
apk add --no-cache unzip
else
echo "Unsupported image"
exit 1
fi
unzip "/build/${EXE_NAME}.zip" -d ./
fi
else
echo "Verifying onefile build"
cp "/build/${EXE_NAME}" ./
fi
chmod +x "./${EXE_NAME}"
if [ -n "${SKIP_UPDATE_TO:-}" ] || [ -n "${TEST_ONEDIR_BUILD:-}" ]; then
"./${EXE_NAME}" -v || true
"./${EXE_NAME}" --version
exit 0
fi
cp "./${EXE_NAME}" "./${EXE_NAME}_downgraded"
version="$("./${EXE_NAME}" --version)"
"./${EXE_NAME}_downgraded" -v --update-to yt-dlp/yt-dlp@2023.03.04
downgraded_version="$("./${EXE_NAME}_downgraded" --version)"
if [ "${version}" = "${downgraded_version}" ]; then
exit 1
fi

@ -1,21 +0,0 @@
FROM alpine:3.19 as base
RUN apk --update add --no-cache \
build-base \
python3 \
pipx \
;
RUN pipx install pyinstaller
# Requires above step to prepare the shared venv
RUN ~/.local/share/pipx/shared/bin/python -m pip install -U wheel
RUN apk --update add --no-cache \
scons \
patchelf \
binutils \
;
RUN pipx install staticx
WORKDIR /yt-dlp
COPY entrypoint.sh /entrypoint.sh
ENTRYPOINT /entrypoint.sh

@ -1,14 +0,0 @@
#!/bin/ash
set -e
source ~/.local/share/pipx/venvs/pyinstaller/bin/activate
python -m devscripts.install_deps -o --include build
python -m devscripts.install_deps --include secretstorage --include curl-cffi
python -m devscripts.make_lazy_extractors
python devscripts/update-version.py -c "${channel}" -r "${origin}" "${version}"
python -m bundle.pyinstaller
deactivate
source ~/.local/share/pipx/venvs/staticx/bin/activate
staticx /yt-dlp/dist/yt-dlp_linux /build/yt-dlp_linux
deactivate

@ -13,6 +13,8 @@ from PyInstaller.__main__ import run as run_pyinstaller
from devscripts.utils import read_version
OS_NAME, MACHINE, ARCH = sys.platform, platform.machine().lower(), platform.architecture()[0][:2]
if OS_NAME == 'linux' and platform.libc_ver()[0] != 'glibc':
OS_NAME = 'musllinux'
if MACHINE in ('x86', 'x86_64', 'amd64', 'i386', 'i686'):
MACHINE = 'x86' if ARCH == '32' else ''

@ -0,0 +1,157 @@
# Allow direct execution
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import datetime as dt
import json
from devscripts.utils import calculate_version
STABLE_REPOSITORY = 'yt-dlp/yt-dlp'
def setup_variables(environment):
"""
`environment` must contain these keys:
REPOSITORY, INPUTS, PROCESSED,
PUSH_VERSION_COMMIT, PYPI_PROJECT,
SOURCE_PYPI_PROJECT, SOURCE_PYPI_SUFFIX,
TARGET_PYPI_PROJECT, TARGET_PYPI_SUFFIX,
SOURCE_ARCHIVE_REPO, TARGET_ARCHIVE_REPO,
HAS_SOURCE_ARCHIVE_REPO_TOKEN,
HAS_TARGET_ARCHIVE_REPO_TOKEN,
HAS_ARCHIVE_REPO_TOKEN
`INPUTS` must contain these keys:
prerelease
`PROCESSED` must contain these keys:
source_repo, source_tag,
target_repo, target_tag
"""
REPOSITORY = environment['REPOSITORY']
INPUTS = json.loads(environment['INPUTS'])
PROCESSED = json.loads(environment['PROCESSED'])
source_channel = None
does_not_have_needed_token = False
target_repo_token = None
pypi_project = None
pypi_suffix = None
source_repo = PROCESSED['source_repo']
source_tag = PROCESSED['source_tag']
if source_repo == 'stable':
source_repo = STABLE_REPOSITORY
if not source_repo:
source_repo = REPOSITORY
elif environment['SOURCE_ARCHIVE_REPO']:
source_channel = environment['SOURCE_ARCHIVE_REPO']
elif not source_tag and '/' not in source_repo:
source_tag = source_repo
source_repo = REPOSITORY
resolved_source = source_repo
if source_tag:
resolved_source = f'{resolved_source}@{source_tag}'
elif source_repo == STABLE_REPOSITORY:
resolved_source = 'stable'
revision = None
if INPUTS['prerelease'] or not environment['PUSH_VERSION_COMMIT']:
revision = dt.datetime.now(tz=dt.timezone.utc).strftime('%H%M%S')
version = calculate_version(INPUTS.get('version') or revision)
target_repo = PROCESSED['target_repo']
target_tag = PROCESSED['target_tag']
if target_repo:
if target_repo == 'stable':
target_repo = STABLE_REPOSITORY
if not target_tag:
if target_repo == STABLE_REPOSITORY:
target_tag = version
elif environment['TARGET_ARCHIVE_REPO']:
target_tag = source_tag or version
else:
target_tag = target_repo
target_repo = REPOSITORY
if target_repo != REPOSITORY:
target_repo = environment['TARGET_ARCHIVE_REPO']
target_repo_token = f'{PROCESSED["target_repo"].upper()}_ARCHIVE_REPO_TOKEN'
if not json.loads(environment['HAS_TARGET_ARCHIVE_REPO_TOKEN']):
does_not_have_needed_token = True
pypi_project = environment['TARGET_PYPI_PROJECT'] or None
pypi_suffix = environment['TARGET_PYPI_SUFFIX'] or None
else:
target_tag = source_tag or version
if source_channel:
target_repo = source_channel
target_repo_token = f'{PROCESSED["source_repo"].upper()}_ARCHIVE_REPO_TOKEN'
if not json.loads(environment['HAS_SOURCE_ARCHIVE_REPO_TOKEN']):
does_not_have_needed_token = True
pypi_project = environment['SOURCE_PYPI_PROJECT'] or None
pypi_suffix = environment['SOURCE_PYPI_SUFFIX'] or None
else:
target_repo = REPOSITORY
if does_not_have_needed_token:
if not json.loads(environment['HAS_ARCHIVE_REPO_TOKEN']):
print(f'::error::Repository access secret {target_repo_token} not found')
return None
target_repo_token = 'ARCHIVE_REPO_TOKEN'
if target_repo == REPOSITORY and not INPUTS['prerelease']:
pypi_project = environment['PYPI_PROJECT'] or None
return {
'channel': resolved_source,
'version': version,
'target_repo': target_repo,
'target_repo_token': target_repo_token,
'target_tag': target_tag,
'pypi_project': pypi_project,
'pypi_suffix': pypi_suffix,
}
def process_inputs(inputs):
outputs = {}
for key in ('source', 'target'):
repo, _, tag = inputs.get(key, '').partition('@')
outputs[f'{key}_repo'] = repo
outputs[f'{key}_tag'] = tag
return outputs
if __name__ == '__main__':
if not os.getenv('GITHUB_OUTPUT'):
print('This script is only intended for use with GitHub Actions', file=sys.stderr)
sys.exit(1)
if 'process_inputs' in sys.argv:
inputs = json.loads(os.environ['INPUTS'])
print('::group::Inputs')
print(json.dumps(inputs, indent=2))
print('::endgroup::')
outputs = process_inputs(inputs)
print('::group::Processed')
print(json.dumps(outputs, indent=2))
print('::endgroup::')
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
f.write('\n'.join(f'{key}={value}' for key, value in outputs.items()))
sys.exit(0)
outputs = setup_variables(dict(os.environ))
if not outputs:
sys.exit(1)
print('::group::Output variables')
print(json.dumps(outputs, indent=2))
print('::endgroup::')
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
f.write('\n'.join(f'{key}={value or ""}' for key, value in outputs.items()))

@ -0,0 +1,331 @@
# Allow direct execution
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import datetime as dt
import json
from devscripts.setup_variables import STABLE_REPOSITORY, process_inputs, setup_variables
from devscripts.utils import calculate_version
def _test(github_repository, note, repo_vars, repo_secrets, inputs, expected=None, ignore_revision=False):
inp = inputs.copy()
inp.setdefault('linux_armv7l', True)
inp.setdefault('prerelease', False)
processed = process_inputs(inp)
source_repo = processed['source_repo'].upper()
target_repo = processed['target_repo'].upper()
variables = {k.upper(): v for k, v in repo_vars.items()}
secrets = {k.upper(): v for k, v in repo_secrets.items()}
env = {
# Keep this in sync with prepare.setup_variables in release.yml
'INPUTS': json.dumps(inp),
'PROCESSED': json.dumps(processed),
'REPOSITORY': github_repository,
'PUSH_VERSION_COMMIT': variables.get('PUSH_VERSION_COMMIT') or '',
'PYPI_PROJECT': variables.get('PYPI_PROJECT') or '',
'SOURCE_PYPI_PROJECT': variables.get(f'{source_repo}_PYPI_PROJECT') or '',
'SOURCE_PYPI_SUFFIX': variables.get(f'{source_repo}_PYPI_SUFFIX') or '',
'TARGET_PYPI_PROJECT': variables.get(f'{target_repo}_PYPI_PROJECT') or '',
'TARGET_PYPI_SUFFIX': variables.get(f'{target_repo}_PYPI_SUFFIX') or '',
'SOURCE_ARCHIVE_REPO': variables.get(f'{source_repo}_ARCHIVE_REPO') or '',
'TARGET_ARCHIVE_REPO': variables.get(f'{target_repo}_ARCHIVE_REPO') or '',
'HAS_SOURCE_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get(f'{source_repo}_ARCHIVE_REPO_TOKEN'))),
'HAS_TARGET_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get(f'{target_repo}_ARCHIVE_REPO_TOKEN'))),
'HAS_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get('ARCHIVE_REPO_TOKEN'))),
}
result = setup_variables(env)
if not expected:
print(' {\n' + '\n'.join(f' {k!r}: {v!r},' for k, v in result.items()) + '\n }')
return
exp = expected.copy()
if ignore_revision:
assert len(result['version']) == len(exp['version']), f'revision missing: {github_repository} {note}'
version_is_tag = result['version'] == result['target_tag']
for dct in (result, exp):
dct['version'] = '.'.join(dct['version'].split('.')[:3])
if version_is_tag:
dct['target_tag'] = dct['version']
assert result == exp, f'unexpected result: {github_repository} {note}'
def main():
DEFAULT_VERSION_WITH_REVISION = dt.datetime.now(tz=dt.timezone.utc).strftime('%Y.%m.%d.%H%M%S')
DEFAULT_VERSION = calculate_version()
BASE_REPO_VARS = {
'MASTER_ARCHIVE_REPO': 'yt-dlp/yt-dlp-master-builds',
'NIGHTLY_ARCHIVE_REPO': 'yt-dlp/yt-dlp-nightly-builds',
'NIGHTLY_PYPI_PROJECT': 'yt-dlp',
'NIGHTLY_PYPI_SUFFIX': 'dev',
'PUSH_VERSION_COMMIT': '1',
'PYPI_PROJECT': 'yt-dlp',
}
BASE_REPO_SECRETS = {
'ARCHIVE_REPO_TOKEN': '1',
}
FORK_REPOSITORY = 'fork/yt-dlp'
FORK_ORG = FORK_REPOSITORY.partition('/')[0]
_test(
STABLE_REPOSITORY, 'official vars/secrets, stable',
BASE_REPO_VARS, BASE_REPO_SECRETS, {}, {
'channel': 'stable',
'version': DEFAULT_VERSION,
'target_repo': STABLE_REPOSITORY,
'target_repo_token': None,
'target_tag': DEFAULT_VERSION,
'pypi_project': 'yt-dlp',
'pypi_suffix': None,
})
_test(
STABLE_REPOSITORY, 'official vars/secrets, nightly (w/o target)',
BASE_REPO_VARS, BASE_REPO_SECRETS, {
'source': 'nightly',
'prerelease': True,
}, {
'channel': 'nightly',
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': 'yt-dlp/yt-dlp-nightly-builds',
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
'target_tag': DEFAULT_VERSION_WITH_REVISION,
'pypi_project': 'yt-dlp',
'pypi_suffix': 'dev',
}, ignore_revision=True)
_test(
STABLE_REPOSITORY, 'official vars/secrets, nightly',
BASE_REPO_VARS, BASE_REPO_SECRETS, {
'source': 'nightly',
'target': 'nightly',
'prerelease': True,
}, {
'channel': 'nightly',
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': 'yt-dlp/yt-dlp-nightly-builds',
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
'target_tag': DEFAULT_VERSION_WITH_REVISION,
'pypi_project': 'yt-dlp',
'pypi_suffix': 'dev',
}, ignore_revision=True)
_test(
STABLE_REPOSITORY, 'official vars/secrets, master (w/o target)',
BASE_REPO_VARS, BASE_REPO_SECRETS, {
'source': 'master',
'prerelease': True,
}, {
'channel': 'master',
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': 'yt-dlp/yt-dlp-master-builds',
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
'target_tag': DEFAULT_VERSION_WITH_REVISION,
'pypi_project': None,
'pypi_suffix': None,
}, ignore_revision=True)
_test(
STABLE_REPOSITORY, 'official vars/secrets, master',
BASE_REPO_VARS, BASE_REPO_SECRETS, {
'source': 'master',
'target': 'master',
'prerelease': True,
}, {
'channel': 'master',
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': 'yt-dlp/yt-dlp-master-builds',
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
'target_tag': DEFAULT_VERSION_WITH_REVISION,
'pypi_project': None,
'pypi_suffix': None,
}, ignore_revision=True)
_test(
STABLE_REPOSITORY, 'official vars/secrets, special tag, updates to stable',
BASE_REPO_VARS, BASE_REPO_SECRETS, {
'target': f'{STABLE_REPOSITORY}@experimental',
'prerelease': True,
}, {
'channel': 'stable',
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': STABLE_REPOSITORY,
'target_repo_token': None,
'target_tag': 'experimental',
'pypi_project': None,
'pypi_suffix': None,
}, ignore_revision=True)
_test(
STABLE_REPOSITORY, 'official vars/secrets, special tag, "stable" as target repo',
BASE_REPO_VARS, BASE_REPO_SECRETS, {
'target': 'stable@experimental',
'prerelease': True,
}, {
'channel': 'stable',
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': STABLE_REPOSITORY,
'target_repo_token': None,
'target_tag': 'experimental',
'pypi_project': None,
'pypi_suffix': None,
}, ignore_revision=True)
_test(
FORK_REPOSITORY, 'fork w/o vars/secrets, stable',
{}, {}, {}, {
'channel': FORK_REPOSITORY,
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': FORK_REPOSITORY,
'target_repo_token': None,
'target_tag': DEFAULT_VERSION_WITH_REVISION,
'pypi_project': None,
'pypi_suffix': None,
}, ignore_revision=True)
_test(
FORK_REPOSITORY, 'fork w/o vars/secrets, prerelease',
{}, {}, {'prerelease': True}, {
'channel': FORK_REPOSITORY,
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': FORK_REPOSITORY,
'target_repo_token': None,
'target_tag': DEFAULT_VERSION_WITH_REVISION,
'pypi_project': None,
'pypi_suffix': None,
}, ignore_revision=True)
_test(
FORK_REPOSITORY, 'fork w/o vars/secrets, nightly',
{}, {}, {
'prerelease': True,
'source': 'nightly',
'target': 'nightly',
}, {
'channel': f'{FORK_REPOSITORY}@nightly',
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': FORK_REPOSITORY,
'target_repo_token': None,
'target_tag': 'nightly',
'pypi_project': None,
'pypi_suffix': None,
}, ignore_revision=True)
_test(
FORK_REPOSITORY, 'fork w/o vars/secrets, master',
{}, {}, {
'prerelease': True,
'source': 'master',
'target': 'master',
}, {
'channel': f'{FORK_REPOSITORY}@master',
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': FORK_REPOSITORY,
'target_repo_token': None,
'target_tag': 'master',
'pypi_project': None,
'pypi_suffix': None,
}, ignore_revision=True)
_test(
FORK_REPOSITORY, 'fork w/o vars/secrets, revision',
{}, {}, {'version': '123'}, {
'channel': FORK_REPOSITORY,
'version': f'{DEFAULT_VERSION[:10]}.123',
'target_repo': FORK_REPOSITORY,
'target_repo_token': None,
'target_tag': f'{DEFAULT_VERSION[:10]}.123',
'pypi_project': None,
'pypi_suffix': None,
})
_test(
FORK_REPOSITORY, 'fork w/ PUSH_VERSION_COMMIT, stable',
{'PUSH_VERSION_COMMIT': '1'}, {}, {}, {
'channel': FORK_REPOSITORY,
'version': DEFAULT_VERSION,
'target_repo': FORK_REPOSITORY,
'target_repo_token': None,
'target_tag': DEFAULT_VERSION,
'pypi_project': None,
'pypi_suffix': None,
})
_test(
FORK_REPOSITORY, 'fork w/ PUSH_VERSION_COMMIT, prerelease',
{'PUSH_VERSION_COMMIT': '1'}, {}, {'prerelease': True}, {
'channel': FORK_REPOSITORY,
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': FORK_REPOSITORY,
'target_repo_token': None,
'target_tag': DEFAULT_VERSION_WITH_REVISION,
'pypi_project': None,
'pypi_suffix': None,
}, ignore_revision=True)
_test(
FORK_REPOSITORY, 'fork w/NIGHTLY_ARCHIVE_REPO_TOKEN, nightly', {
'NIGHTLY_ARCHIVE_REPO': f'{FORK_ORG}/yt-dlp-nightly-builds',
'PYPI_PROJECT': 'yt-dlp-test',
}, {
'NIGHTLY_ARCHIVE_REPO_TOKEN': '1',
}, {
'source': f'{FORK_ORG}/yt-dlp-nightly-builds',
'target': 'nightly',
'prerelease': True,
}, {
'channel': f'{FORK_ORG}/yt-dlp-nightly-builds',
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': f'{FORK_ORG}/yt-dlp-nightly-builds',
'target_repo_token': 'NIGHTLY_ARCHIVE_REPO_TOKEN',
'target_tag': DEFAULT_VERSION_WITH_REVISION,
'pypi_project': None,
'pypi_suffix': None,
}, ignore_revision=True)
_test(
FORK_REPOSITORY, 'fork w/MASTER_ARCHIVE_REPO_TOKEN, master', {
'MASTER_ARCHIVE_REPO': f'{FORK_ORG}/yt-dlp-master-builds',
'MASTER_PYPI_PROJECT': 'yt-dlp-test',
'MASTER_PYPI_SUFFIX': 'dev',
}, {
'MASTER_ARCHIVE_REPO_TOKEN': '1',
}, {
'source': f'{FORK_ORG}/yt-dlp-master-builds',
'target': 'master',
'prerelease': True,
}, {
'channel': f'{FORK_ORG}/yt-dlp-master-builds',
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': f'{FORK_ORG}/yt-dlp-master-builds',
'target_repo_token': 'MASTER_ARCHIVE_REPO_TOKEN',
'target_tag': DEFAULT_VERSION_WITH_REVISION,
'pypi_project': 'yt-dlp-test',
'pypi_suffix': 'dev',
}, ignore_revision=True)
_test(
FORK_REPOSITORY, 'fork, non-numeric tag',
{}, {}, {'source': 'experimental'}, {
'channel': f'{FORK_REPOSITORY}@experimental',
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': FORK_REPOSITORY,
'target_repo_token': None,
'target_tag': 'experimental',
'pypi_project': None,
'pypi_suffix': None,
}, ignore_revision=True)
_test(
FORK_REPOSITORY, 'fork, non-numeric tag, updates to stable',
{}, {}, {
'prerelease': True,
'source': 'stable',
'target': 'experimental',
}, {
'channel': 'stable',
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': FORK_REPOSITORY,
'target_repo_token': None,
'target_tag': 'experimental',
'pypi_project': None,
'pypi_suffix': None,
}, ignore_revision=True)
print('all tests passed')
if __name__ == '__main__':
main()

@ -9,24 +9,9 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import argparse
import contextlib
import datetime as dt
import sys
from devscripts.utils import read_version, run_process, write_file
def get_new_version(version, revision):
if not version:
version = dt.datetime.now(dt.timezone.utc).strftime('%Y.%m.%d')
if revision:
assert revision.isdecimal(), 'Revision must be a number'
else:
old_version = read_version().split('.')
if version.split('.') == old_version[:3]:
revision = str(int(([*old_version, 0])[3]) + 1)
return f'{version}.{revision}' if revision else version
from devscripts.utils import calculate_version, run_process, write_file
def get_git_head():
@ -72,9 +57,7 @@ if __name__ == '__main__':
args = parser.parse_args()
git_head = get_git_head()
version = (
args.version if args.version and '.' in args.version
else get_new_version(None, args.version))
version = calculate_version(args.version)
write_file(args.output, VERSION_TEMPLATE.format(
version=version, git_head=git_head, channel=args.channel, origin=args.origin,
package_version=f'{version}{args.suffix}'))

@ -20,7 +20,9 @@ if __name__ == '__main__':
'--changelog-path', type=Path, default=Path(__file__).parent.parent / 'Changelog.md',
help='path to the Changelog file')
args = parser.parse_args()
new_entry = create_changelog(args)
header, sep, changelog = read_file(args.changelog_path).partition('\n### ')
write_file(args.changelog_path, f'{header}{sep}{read_version()}\n{new_entry}\n{sep}{changelog}')
current_version = read_version()
if current_version != changelog.splitlines()[0]:
new_entry = create_changelog(args)
write_file(args.changelog_path, f'{header}{sep}{current_version}\n{new_entry}\n{sep}{changelog}')

@ -1,5 +1,7 @@
import argparse
import datetime as dt
import functools
import re
import subprocess
@ -20,6 +22,23 @@ def read_version(fname='yt_dlp/version.py', varname='__version__'):
return items[varname]
def calculate_version(version=None, fname='yt_dlp/version.py'):
if version and '.' in version:
return version
revision = version
version = dt.datetime.now(dt.timezone.utc).strftime('%Y.%m.%d')
if revision:
assert re.fullmatch(r'[0-9]+', revision), 'Revision must be numeric'
else:
old_version = read_version(fname=fname).split('.')
if version.split('.') == old_version[:3]:
revision = str(int(([*old_version, 0])[3]) + 1)
return f'{version}.{revision}' if revision else version
def get_filename_args(has_infile=False, default_outfile=None):
parser = argparse.ArgumentParser()
if has_infile:

@ -9,7 +9,7 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from test.helper import FakeYDL, report_warning
from yt_dlp.update import UpdateInfo, Updater
from yt_dlp.update import UpdateInfo, Updater, UPDATE_SOURCES, _make_label
# XXX: Keep in sync with yt_dlp.update.UPDATE_SOURCES
@ -280,6 +280,26 @@ class TestUpdate(unittest.TestCase):
test('testing', None, current_commit='9' * 40)
test('testing', UpdateInfo('testing', commit='9' * 40))
def test_make_label(self):
STABLE_REPO = UPDATE_SOURCES['stable']
NIGHTLY_REPO = UPDATE_SOURCES['nightly']
MASTER_REPO = UPDATE_SOURCES['master']
for inputs, expected in [
([STABLE_REPO, '2025.09.02', '2025.09.02'], f'stable@2025.09.02 from {STABLE_REPO}'),
([NIGHTLY_REPO, '2025.09.02.123456', '2025.09.02.123456'], f'nightly@2025.09.02.123456 from {NIGHTLY_REPO}'),
([MASTER_REPO, '2025.09.02.987654', '2025.09.02.987654'], f'master@2025.09.02.987654 from {MASTER_REPO}'),
(['fork/yt-dlp', 'experimental', '2025.12.31.000000'], 'fork/yt-dlp@experimental build 2025.12.31.000000'),
(['fork/yt-dlp', '2025.09.02', '2025.09.02'], 'fork/yt-dlp@2025.09.02'),
([STABLE_REPO, 'experimental', '2025.12.31.000000'], f'{STABLE_REPO}@experimental build 2025.12.31.000000'),
([STABLE_REPO, 'experimental'], f'{STABLE_REPO}@experimental'),
(['fork/yt-dlp', 'experimental'], 'fork/yt-dlp@experimental'),
]:
result = _make_label(*inputs)
self.assertEqual(
result, expected,
f'{inputs!r} returned {result!r} instead of {expected!r}')
if __name__ == '__main__':
unittest.main()

@ -58,15 +58,28 @@ def _get_variant_and_executable_path():
"""@returns (variant, executable_path)"""
if getattr(sys, 'frozen', False):
path = sys.executable
# py2exe is unsupported but we should still correctly identify it for debugging purposes
# py2exe: No longer officially supported, but still identify it to block updates
if not hasattr(sys, '_MEIPASS'):
return 'py2exe', path
if sys._MEIPASS == os.path.dirname(path):
return f'{sys.platform}_dir', path
if sys.platform == 'darwin':
# staticx builds: sys.executable returns a /tmp/ path
# No longer officially supported, but still identify them to block updates
# Ref: https://staticx.readthedocs.io/en/latest/usage.html#run-time-information
if static_exe_path := os.getenv('STATICX_PROG_PATH'):
return 'linux_static_exe', static_exe_path
# We know it's a PyInstaller bundle, but is it "onedir" or "onefile"?
suffix = 'dir' if sys._MEIPASS == os.path.dirname(path) else 'exe'
system_platform = remove_end(sys.platform, '32')
if system_platform == 'darwin':
# darwin_legacy_exe is no longer supported, but still identify it to block updates
machine = '_legacy' if version_tuple(platform.mac_ver()[0]) < (10, 15) else ''
return f'darwin{machine}_exe', path
return f'darwin{machine}_{suffix}', path
if system_platform == 'linux' and platform.libc_ver()[0] != 'glibc':
system_platform = 'musllinux'
machine = f'_{platform.machine().lower()}'
is_64bits = sys.maxsize > 2**32
@ -77,12 +90,8 @@ def _get_variant_and_executable_path():
# See: https://github.com/yt-dlp/yt-dlp/issues/11813
elif machine[1:] == 'aarch64' and not is_64bits:
machine = '_armv7l'
# sys.executable returns a /tmp/ path for staticx builds (linux_static)
# Ref: https://staticx.readthedocs.io/en/latest/usage.html#run-time-information
if static_exe_path := os.getenv('STATICX_PROG_PATH'):
path = static_exe_path
return f'{remove_end(sys.platform, "32")}{machine}_exe', path
return f'{system_platform}{machine}_{suffix}', path
path = os.path.dirname(__file__)
if isinstance(__loader__, zipimporter):
@ -118,7 +127,8 @@ _FILE_SUFFIXES = {
'darwin_exe': '_macos',
'linux_exe': '_linux',
'linux_aarch64_exe': '_linux_aarch64',
'linux_armv7l_exe': '_linux_armv7l',
'musllinux_exe': '_musllinux',
'musllinux_aarch64_exe': '_musllinux_aarch64',
}
_NON_UPDATEABLE_REASONS = {
@ -146,21 +156,6 @@ def _get_binary_name():
def _get_system_deprecation():
MIN_SUPPORTED, MIN_RECOMMENDED = (3, 9), (3, 10)
EXE_MSG_TMPL = ('Support for {} has been deprecated. '
'See https://github.com/yt-dlp/yt-dlp/{} for details.\n{}')
STOP_MSG = 'You may stop receiving updates on this version at any time!'
variant = detect_variant()
# Temporary until linux_armv7l executable builds are discontinued
if variant == 'linux_armv7l_exe':
return EXE_MSG_TMPL.format(
f'{variant} (the PyInstaller-bundled executable for the Linux armv7l platform)',
'issues/13976', STOP_MSG)
# Temporary until linux_aarch64_exe is built with Python >=3.10 instead of Python 3.9
if variant == 'linux_aarch64_exe':
return None
if sys.version_info > MIN_RECOMMENDED:
return None
@ -199,16 +194,14 @@ def _sha256_file(path):
def _make_label(origin, tag, version=None):
if '/' in origin:
channel = _INVERSE_UPDATE_SOURCES.get(origin, origin)
else:
channel = origin
label = f'{channel}@{tag}'
if version and version != tag:
label += f' build {version}'
if channel != origin:
label += f' from {origin}'
return label
if tag != version:
if version:
return f'{origin}@{tag} build {version}'
return f'{origin}@{tag}'
if channel := _INVERSE_UPDATE_SOURCES.get(origin):
return f'{channel}@{tag} from {origin}'
return f'{origin}@{tag}'
@dataclass

Loading…
Cancel
Save