mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2025-12-06 06:45:00 +01:00
Merge branch 'master' into browser_settings_improvements
This commit is contained in:
commit
2f5663cd2f
241 changed files with 15509 additions and 5857 deletions
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
|
|
@ -10,4 +10,4 @@ liberapay: # Replace with a single Liberapay username
|
|||
issuehunt: # Replace with a single IssueHunt username
|
||||
otechie: # Replace with a single Otechie username
|
||||
|
||||
custom: ['https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators']
|
||||
custom: ['https://github.com/yt-dlp/yt-dlp/blob/master/Maintainers.md#maintainers']
|
||||
|
|
|
|||
2
.github/ISSUE_TEMPLATE/1_broken_site.yml
vendored
2
.github/ISSUE_TEMPLATE/1_broken_site.yml
vendored
|
|
@ -24,6 +24,8 @@ body:
|
|||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||
- type: input
|
||||
id: region
|
||||
|
|
|
|||
|
|
@ -24,6 +24,8 @@ body:
|
|||
required: true
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and am willing to share it if required
|
||||
- type: input
|
||||
id: region
|
||||
|
|
|
|||
|
|
@ -22,6 +22,8 @@ body:
|
|||
required: true
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||
- type: input
|
||||
id: region
|
||||
|
|
|
|||
2
.github/ISSUE_TEMPLATE/4_bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE/4_bug_report.yml
vendored
|
|
@ -20,6 +20,8 @@ body:
|
|||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
|
|
|
|||
2
.github/ISSUE_TEMPLATE/5_feature_request.yml
vendored
2
.github/ISSUE_TEMPLATE/5_feature_request.yml
vendored
|
|
@ -22,6 +22,8 @@ body:
|
|||
required: true
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
|
|
|
|||
2
.github/ISSUE_TEMPLATE/6_question.yml
vendored
2
.github/ISSUE_TEMPLATE/6_question.yml
vendored
|
|
@ -28,6 +28,8 @@ body:
|
|||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar questions **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- type: textarea
|
||||
id: question
|
||||
attributes:
|
||||
|
|
|
|||
|
|
@ -20,6 +20,8 @@ body:
|
|||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||
- type: input
|
||||
id: region
|
||||
|
|
|
|||
|
|
@ -20,6 +20,8 @@ body:
|
|||
required: true
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and am willing to share it if required
|
||||
- type: input
|
||||
id: region
|
||||
|
|
|
|||
|
|
@ -18,6 +18,8 @@ body:
|
|||
required: true
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||
- type: input
|
||||
id: region
|
||||
|
|
|
|||
2
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
|
|
@ -16,6 +16,8 @@ body:
|
|||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
|
|
|
|||
|
|
@ -18,6 +18,8 @@ body:
|
|||
required: true
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
|
|
|
|||
2
.github/ISSUE_TEMPLATE_tmpl/6_question.yml
vendored
2
.github/ISSUE_TEMPLATE_tmpl/6_question.yml
vendored
|
|
@ -24,6 +24,8 @@ body:
|
|||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar questions **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- type: textarea
|
||||
id: question
|
||||
attributes:
|
||||
|
|
|
|||
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
|
|
@ -33,6 +33,7 @@ Fixes #
|
|||
### In order to be accepted and merged into yt-dlp each piece of code must be in public domain or released under [Unlicense](http://unlicense.org/). Check those that apply and remove the others:
|
||||
- [ ] I am the original author of the code in this PR, and I am willing to release it under [Unlicense](http://unlicense.org/)
|
||||
- [ ] I am not the original author of the code in this PR, but it is in the public domain or released under [Unlicense](http://unlicense.org/) (provide reliable evidence)
|
||||
- [ ] I have read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
|
||||
### What is the purpose of your *pull request*? Check those that apply and remove the others:
|
||||
- [ ] Fix or improvement to an extractor (Make sure to add/update tests)
|
||||
|
|
|
|||
22
.github/actionlint.yml
vendored
Normal file
22
.github/actionlint.yml
vendored
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
config-variables:
|
||||
- KEEP_CACHE_WARM
|
||||
- PUSH_VERSION_COMMIT
|
||||
- UPDATE_TO_VERIFICATION
|
||||
- PYPI_PROJECT
|
||||
- PYPI_SUFFIX
|
||||
- NIGHTLY_PYPI_PROJECT
|
||||
- NIGHTLY_PYPI_SUFFIX
|
||||
- NIGHTLY_ARCHIVE_REPO
|
||||
- BUILD_NIGHTLY
|
||||
- MASTER_PYPI_PROJECT
|
||||
- MASTER_PYPI_SUFFIX
|
||||
- MASTER_ARCHIVE_REPO
|
||||
- BUILD_MASTER
|
||||
- ISSUE_LOCKDOWN
|
||||
- SANITIZE_COMMENT
|
||||
|
||||
paths:
|
||||
.github/workflows/build.yml:
|
||||
ignore:
|
||||
# SC1090 "Can't follow non-constant source": ignore when using `source` to activate venv
|
||||
- '.+SC1090.+'
|
||||
480
.github/workflows/build.yml
vendored
480
.github/workflows/build.yml
vendored
|
|
@ -9,13 +9,19 @@ on:
|
|||
required: false
|
||||
default: stable
|
||||
type: string
|
||||
origin:
|
||||
required: true
|
||||
type: string
|
||||
unix:
|
||||
default: true
|
||||
type: boolean
|
||||
linux_static:
|
||||
linux:
|
||||
default: true
|
||||
type: boolean
|
||||
linux_arm:
|
||||
linux_armv7l:
|
||||
default: true
|
||||
type: boolean
|
||||
musllinux:
|
||||
default: true
|
||||
type: boolean
|
||||
macos:
|
||||
|
|
@ -24,10 +30,6 @@ on:
|
|||
windows:
|
||||
default: true
|
||||
type: boolean
|
||||
origin:
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
secrets:
|
||||
GPG_SIGNING_KEY:
|
||||
required: false
|
||||
|
|
@ -37,7 +39,9 @@ on:
|
|||
version:
|
||||
description: |
|
||||
VERSION: yyyy.mm.dd[.rev] or rev
|
||||
required: true
|
||||
(default: auto-generated)
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
channel:
|
||||
description: |
|
||||
|
|
@ -49,12 +53,16 @@ on:
|
|||
description: yt-dlp, yt-dlp.tar.gz
|
||||
default: true
|
||||
type: boolean
|
||||
linux_static:
|
||||
description: yt-dlp_linux
|
||||
linux:
|
||||
description: yt-dlp_linux, yt-dlp_linux.zip, yt-dlp_linux_aarch64, yt-dlp_linux_aarch64.zip
|
||||
default: true
|
||||
type: boolean
|
||||
linux_arm:
|
||||
description: yt-dlp_linux_aarch64, yt-dlp_linux_armv7l
|
||||
linux_armv7l:
|
||||
description: yt-dlp_linux_armv7l.zip
|
||||
default: true
|
||||
type: boolean
|
||||
musllinux:
|
||||
description: yt-dlp_musllinux, yt-dlp_musllinux.zip, yt-dlp_musllinux_aarch64, yt-dlp_musllinux_aarch64.zip
|
||||
default: true
|
||||
type: boolean
|
||||
macos:
|
||||
|
|
@ -65,13 +73,6 @@ on:
|
|||
description: yt-dlp.exe, yt-dlp_win.zip, yt-dlp_x86.exe, yt-dlp_win_x86.zip, yt-dlp_arm64.exe, yt-dlp_win_arm64.zip
|
||||
default: true
|
||||
type: boolean
|
||||
origin:
|
||||
description: Origin
|
||||
required: false
|
||||
default: 'current repo'
|
||||
type: choice
|
||||
options:
|
||||
- 'current repo'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
|
@ -80,44 +81,153 @@ jobs:
|
|||
process:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
origin: ${{ steps.process_origin.outputs.origin }}
|
||||
origin: ${{ steps.process_inputs.outputs.origin }}
|
||||
timestamp: ${{ steps.process_inputs.outputs.timestamp }}
|
||||
version: ${{ steps.process_inputs.outputs.version }}
|
||||
linux_matrix: ${{ steps.linux_matrix.outputs.matrix }}
|
||||
|
||||
steps:
|
||||
- name: Process origin
|
||||
id: process_origin
|
||||
- name: Process inputs
|
||||
id: process_inputs
|
||||
env:
|
||||
INPUTS: ${{ toJSON(inputs) }}
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
shell: python
|
||||
run: |
|
||||
echo "origin=${{ inputs.origin == 'current repo' && github.repository || inputs.origin }}" | tee "$GITHUB_OUTPUT"
|
||||
import datetime as dt
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
INPUTS = json.loads(os.environ['INPUTS'])
|
||||
timestamp = dt.datetime.now(tz=dt.timezone.utc).strftime('%Y.%m.%d.%H%M%S.%f')
|
||||
version = INPUTS.get('version')
|
||||
if version and '.' not in version:
|
||||
# build.yml was dispatched with only a revision as the version input value
|
||||
version_parts = [*timestamp.split('.')[:3], version]
|
||||
elif not version:
|
||||
# build.yml was dispatched without any version input value, so include .HHMMSS revision
|
||||
version_parts = timestamp.split('.')[:4]
|
||||
else:
|
||||
# build.yml was called or dispatched with a complete version input value
|
||||
version_parts = version.split('.')
|
||||
assert all(re.fullmatch(r'[0-9]+', part) for part in version_parts), 'Version must be numeric'
|
||||
outputs = {
|
||||
'origin': INPUTS.get('origin') or os.environ['REPOSITORY'],
|
||||
'timestamp': timestamp,
|
||||
'version': '.'.join(version_parts),
|
||||
}
|
||||
print(json.dumps(outputs, indent=2))
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||
f.write('\n'.join(f'{key}={value}' for key, value in outputs.items()))
|
||||
|
||||
- name: Build Linux matrix
|
||||
id: linux_matrix
|
||||
env:
|
||||
INPUTS: ${{ toJSON(inputs) }}
|
||||
PYTHON_VERSION: '3.13'
|
||||
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||
shell: python
|
||||
run: |
|
||||
import json
|
||||
import os
|
||||
EXE_MAP = {
|
||||
'linux': [{
|
||||
'os': 'linux',
|
||||
'arch': 'x86_64',
|
||||
'runner': 'ubuntu-24.04',
|
||||
}, {
|
||||
'os': 'linux',
|
||||
'arch': 'aarch64',
|
||||
'runner': 'ubuntu-24.04-arm',
|
||||
}],
|
||||
'linux_armv7l': [{
|
||||
'os': 'linux',
|
||||
'arch': 'armv7l',
|
||||
'runner': 'ubuntu-24.04-arm',
|
||||
'qemu_platform': 'linux/arm/v7',
|
||||
'onefile': False,
|
||||
'cache_requirements': True,
|
||||
'update_to': 'yt-dlp/yt-dlp@2023.03.04',
|
||||
}],
|
||||
'musllinux': [{
|
||||
'os': 'musllinux',
|
||||
'arch': 'x86_64',
|
||||
'runner': 'ubuntu-24.04',
|
||||
'python_version': '3.14',
|
||||
}, {
|
||||
'os': 'musllinux',
|
||||
'arch': 'aarch64',
|
||||
'runner': 'ubuntu-24.04-arm',
|
||||
'python_version': '3.14',
|
||||
}],
|
||||
}
|
||||
INPUTS = json.loads(os.environ['INPUTS'])
|
||||
matrix = [exe for key, group in EXE_MAP.items() for exe in group if INPUTS.get(key)]
|
||||
if not matrix:
|
||||
# If we send an empty matrix when no linux inputs are given, the entire workflow fails
|
||||
matrix = [EXE_MAP['linux'][0]]
|
||||
for exe in matrix:
|
||||
exe['exe'] = '_'.join(filter(None, (
|
||||
'yt-dlp',
|
||||
exe['os'],
|
||||
exe['arch'] != 'x86_64' and exe['arch'],
|
||||
)))
|
||||
exe.setdefault('qemu_platform', None)
|
||||
exe.setdefault('onefile', True)
|
||||
exe.setdefault('onedir', True)
|
||||
exe.setdefault('cache_requirements', False)
|
||||
exe.setdefault('python_version', os.environ['PYTHON_VERSION'])
|
||||
exe.setdefault('update_to', os.environ['UPDATE_TO'])
|
||||
if not any(INPUTS.get(key) for key in EXE_MAP):
|
||||
print('skipping linux job')
|
||||
else:
|
||||
print(json.dumps(matrix, indent=2))
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||
f.write(f'matrix={json.dumps(matrix)}')
|
||||
|
||||
unix:
|
||||
needs: process
|
||||
if: inputs.unix
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CHANNEL: ${{ inputs.channel }}
|
||||
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||
VERSION: ${{ needs.process.outputs.version }}
|
||||
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0 # Needed for changelog
|
||||
- uses: actions/setup-python@v5
|
||||
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Install Requirements
|
||||
run: |
|
||||
sudo apt -y install zip pandoc man sed
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||
python devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}"
|
||||
python devscripts/update_changelog.py -vv
|
||||
python devscripts/make_lazy_extractors.py
|
||||
|
||||
- name: Build Unix platform-independent binary
|
||||
run: |
|
||||
make all tar
|
||||
make all-extra tar
|
||||
|
||||
- name: Verify --update-to
|
||||
if: vars.UPDATE_TO_VERIFICATION
|
||||
run: |
|
||||
chmod +x ./yt-dlp
|
||||
cp ./yt-dlp ./yt-dlp_downgraded
|
||||
version="$(./yt-dlp --version)"
|
||||
./yt-dlp_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||
./yt-dlp_downgraded -v --update-to "${UPDATE_TO}"
|
||||
downgraded_version="$(./yt-dlp_downgraded --version)"
|
||||
[[ "$version" != "$downgraded_version" ]]
|
||||
[[ "${version}" != "${downgraded_version}" ]]
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
|
|
@ -127,99 +237,74 @@ jobs:
|
|||
yt-dlp.tar.gz
|
||||
compression-level: 0
|
||||
|
||||
linux_static:
|
||||
linux:
|
||||
name: ${{ matrix.os }} (${{ matrix.arch }})
|
||||
if: inputs.linux || inputs.linux_armv7l || inputs.musllinux
|
||||
needs: process
|
||||
if: inputs.linux_static
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build static executable
|
||||
env:
|
||||
channel: ${{ inputs.channel }}
|
||||
origin: ${{ needs.process.outputs.origin }}
|
||||
version: ${{ inputs.version }}
|
||||
run: |
|
||||
mkdir ~/build
|
||||
cd bundle/docker
|
||||
docker compose up --build static
|
||||
sudo chown "${USER}:docker" ~/build/yt-dlp_linux
|
||||
- name: Verify --update-to
|
||||
if: vars.UPDATE_TO_VERIFICATION
|
||||
run: |
|
||||
chmod +x ~/build/yt-dlp_linux
|
||||
cp ~/build/yt-dlp_linux ~/build/yt-dlp_linux_downgraded
|
||||
version="$(~/build/yt-dlp_linux --version)"
|
||||
~/build/yt-dlp_linux_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||
downgraded_version="$(~/build/yt-dlp_linux_downgraded --version)"
|
||||
[[ "$version" != "$downgraded_version" ]]
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-bin-${{ github.job }}
|
||||
path: |
|
||||
~/build/yt-dlp_linux
|
||||
compression-level: 0
|
||||
|
||||
linux_arm:
|
||||
needs: process
|
||||
if: inputs.linux_arm
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write # for creating cache
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
architecture:
|
||||
- armv7
|
||||
- aarch64
|
||||
include: ${{ fromJSON(needs.process.outputs.linux_matrix) }}
|
||||
env:
|
||||
CHANNEL: ${{ inputs.channel }}
|
||||
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||
VERSION: ${{ needs.process.outputs.version }}
|
||||
EXE_NAME: ${{ matrix.exe }}
|
||||
PYTHON_VERSION: ${{ matrix.python_version }}
|
||||
UPDATE_TO: ${{ (vars.UPDATE_TO_VERIFICATION && matrix.update_to) || '' }}
|
||||
SKIP_ONEDIR_BUILD: ${{ (!matrix.onedir && '1') || '' }}
|
||||
SKIP_ONEFILE_BUILD: ${{ (!matrix.onefile && '1') || '' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
path: ./repo
|
||||
- name: Virtualized Install, Prepare & Build
|
||||
uses: yt-dlp/run-on-arch-action@v3
|
||||
with:
|
||||
# Ref: https://github.com/uraimo/run-on-arch-action/issues/55
|
||||
env: |
|
||||
GITHUB_WORKFLOW: build
|
||||
githubToken: ${{ github.token }} # To cache image
|
||||
arch: ${{ matrix.architecture }}
|
||||
distro: ubuntu20.04 # Standalone executable should be built on minimum supported OS
|
||||
dockerRunArgs: --volume "${PWD}/repo:/repo"
|
||||
install: | # Installing Python 3.10 from the Deadsnakes repo raises errors
|
||||
apt update
|
||||
apt -y install zlib1g-dev libffi-dev python3.9 python3.9-dev python3.9-distutils python3-pip \
|
||||
python3-secretstorage # Cannot build cryptography wheel in virtual armv7 environment
|
||||
python3.9 -m pip install -U pip wheel 'setuptools>=71.0.2'
|
||||
# XXX: Keep this in sync with pyproject.toml (it can't be accessed at this stage) and exclude secretstorage
|
||||
python3.9 -m pip install -U Pyinstaller mutagen pycryptodomex brotli certifi cffi \
|
||||
'requests>=2.32.2,<3' 'urllib3>=2.0.2,<3' 'websockets>=13.0'
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
run: |
|
||||
cd repo
|
||||
python3.9 devscripts/install_deps.py -o --include build
|
||||
python3.9 devscripts/install_deps.py --include pyinstaller # Cached versions may be out of date
|
||||
python3.9 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||
python3.9 devscripts/make_lazy_extractors.py
|
||||
python3.9 -m bundle.pyinstaller
|
||||
- name: Cache requirements
|
||||
if: matrix.cache_requirements
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
||||
with:
|
||||
path: |
|
||||
venv
|
||||
key: cache-reqs-${{ matrix.os }}_${{ matrix.arch }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
|
||||
restore-keys: |
|
||||
cache-reqs-${{ matrix.os }}_${{ matrix.arch }}-${{ github.ref }}-
|
||||
cache-reqs-${{ matrix.os }}_${{ matrix.arch }}-
|
||||
|
||||
if ${{ vars.UPDATE_TO_VERIFICATION && 'true' || 'false' }}; then
|
||||
arch="${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}"
|
||||
chmod +x ./dist/yt-dlp_linux_${arch}
|
||||
cp ./dist/yt-dlp_linux_${arch} ./dist/yt-dlp_linux_${arch}_downgraded
|
||||
version="$(./dist/yt-dlp_linux_${arch} --version)"
|
||||
./dist/yt-dlp_linux_${arch}_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||
downgraded_version="$(./dist/yt-dlp_linux_${arch}_downgraded --version)"
|
||||
[[ "$version" != "$downgraded_version" ]]
|
||||
fi
|
||||
- name: Set up QEMU
|
||||
if: matrix.qemu_platform
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
platforms: ${{ matrix.qemu_platform }}
|
||||
|
||||
- name: Build executable
|
||||
env:
|
||||
SERVICE: ${{ matrix.os }}_${{ matrix.arch }}
|
||||
run: |
|
||||
mkdir -p ./venv
|
||||
mkdir -p ./dist
|
||||
pushd bundle/docker
|
||||
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
|
||||
popd
|
||||
if [[ -z "${SKIP_ONEFILE_BUILD}" ]]; then
|
||||
sudo chown "${USER}:docker" "./dist/${EXE_NAME}"
|
||||
fi
|
||||
|
||||
- name: Verify executable in container
|
||||
env:
|
||||
SERVICE: ${{ matrix.os }}_${{ matrix.arch }}_verify
|
||||
run: |
|
||||
cd bundle/docker
|
||||
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-bin-linux_${{ matrix.architecture }}
|
||||
path: | # run-on-arch-action designates armv7l as armv7
|
||||
repo/dist/yt-dlp_linux_${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}
|
||||
name: build-bin-${{ matrix.os }}_${{ matrix.arch }}
|
||||
path: |
|
||||
dist/${{ matrix.exe }}*
|
||||
compression-level: 0
|
||||
|
||||
macos:
|
||||
|
|
@ -227,22 +312,29 @@ jobs:
|
|||
if: inputs.macos
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write # For cleaning up cache
|
||||
runs-on: macos-14
|
||||
env:
|
||||
CHANNEL: ${{ inputs.channel }}
|
||||
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||
VERSION: ${{ needs.process.outputs.version }}
|
||||
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
# NB: Building universal2 does not work with python from actions/setup-python
|
||||
|
||||
- name: Restore cached requirements
|
||||
id: restore-cache
|
||||
uses: actions/cache/restore@v4
|
||||
- name: Cache requirements
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
||||
with:
|
||||
path: |
|
||||
~/yt-dlp-build-venv
|
||||
key: cache-reqs-${{ github.job }}-${{ github.ref }}
|
||||
key: cache-reqs-${{ github.job }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
|
||||
restore-keys: |
|
||||
cache-reqs-${{ github.job }}-${{ github.ref }}-
|
||||
cache-reqs-${{ github.job }}-
|
||||
|
||||
- name: Install Requirements
|
||||
run: |
|
||||
|
|
@ -251,14 +343,14 @@ jobs:
|
|||
brew uninstall --ignore-dependencies python3
|
||||
python3 -m venv ~/yt-dlp-build-venv
|
||||
source ~/yt-dlp-build-venv/bin/activate
|
||||
python3 devscripts/install_deps.py -o --include build
|
||||
python3 devscripts/install_deps.py --print --include pyinstaller > requirements.txt
|
||||
python3 devscripts/install_deps.py --only-optional-groups --include-group build
|
||||
python3 devscripts/install_deps.py --print --include-group pyinstaller > requirements.txt
|
||||
# We need to ignore wheels otherwise we break universal2 builds
|
||||
python3 -m pip install -U --no-binary :all: -r requirements.txt
|
||||
# We need to fuse our own universal2 wheels for curl_cffi
|
||||
python3 -m pip install -U 'delocate==0.11.0'
|
||||
mkdir curl_cffi_whls curl_cffi_universal2
|
||||
python3 devscripts/install_deps.py --print -o --include curl-cffi > requirements.txt
|
||||
python3 devscripts/install_deps.py --print --only-optional-groups --include-group curl-cffi > requirements.txt
|
||||
for platform in "macosx_11_0_arm64" "macosx_11_0_x86_64"; do
|
||||
python3 -m pip download \
|
||||
--only-binary=:all: \
|
||||
|
|
@ -287,7 +379,7 @@ jobs:
|
|||
|
||||
- name: Prepare
|
||||
run: |
|
||||
python3 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||
python3 devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}"
|
||||
python3 devscripts/make_lazy_extractors.py
|
||||
- name: Build
|
||||
run: |
|
||||
|
|
@ -302,7 +394,7 @@ jobs:
|
|||
chmod +x ./dist/yt-dlp_macos
|
||||
cp ./dist/yt-dlp_macos ./dist/yt-dlp_macos_downgraded
|
||||
version="$(./dist/yt-dlp_macos --version)"
|
||||
./dist/yt-dlp_macos_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||
./dist/yt-dlp_macos_downgraded -v --update-to "${UPDATE_TO}"
|
||||
downgraded_version="$(./dist/yt-dlp_macos_downgraded --version)"
|
||||
[[ "$version" != "$downgraded_version" ]]
|
||||
|
||||
|
|
@ -315,27 +407,12 @@ jobs:
|
|||
dist/yt-dlp_macos.zip
|
||||
compression-level: 0
|
||||
|
||||
- name: Cleanup cache
|
||||
if: steps.restore-cache.outputs.cache-hit == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
cache_key: cache-reqs-${{ github.job }}-${{ github.ref }}
|
||||
run: |
|
||||
gh cache delete "${cache_key}"
|
||||
|
||||
- name: Cache requirements
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: |
|
||||
~/yt-dlp-build-venv
|
||||
key: cache-reqs-${{ github.job }}-${{ github.ref }}
|
||||
|
||||
windows:
|
||||
name: windows (${{ matrix.arch }})
|
||||
needs: process
|
||||
if: inputs.windows
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write # For cleaning up cache
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
|
@ -344,66 +421,101 @@ jobs:
|
|||
- arch: 'x64'
|
||||
runner: windows-2025
|
||||
python_version: '3.10'
|
||||
suffix: ''
|
||||
platform_tag: win_amd64
|
||||
pyi_version: '6.17.0'
|
||||
pyi_tag: '2025.11.29.054325'
|
||||
pyi_hash: e28cc13e4ad0cc74330d832202806d0c1976e9165da6047309348ca663c0ed3d
|
||||
- arch: 'x86'
|
||||
runner: windows-2025
|
||||
python_version: '3.10'
|
||||
suffix: '_x86'
|
||||
platform_tag: win32
|
||||
pyi_version: '6.17.0'
|
||||
pyi_tag: '2025.11.29.054325'
|
||||
pyi_hash: c00f600c17de3bdd589f043f60ab64fc34fcba6dd902ad973af9c8afc74f80d1
|
||||
- arch: 'arm64'
|
||||
runner: windows-11-arm
|
||||
python_version: '3.13' # arm64 only has Python >= 3.11 available
|
||||
suffix: '_arm64'
|
||||
platform_tag: win_arm64
|
||||
pyi_version: '6.17.0'
|
||||
pyi_tag: '2025.11.29.054325'
|
||||
pyi_hash: a2033b18b4f7bc6108b5fd76a92c6c1de0a12ec4fe98a23396a9f978cb4b7d7b
|
||||
env:
|
||||
CHANNEL: ${{ inputs.channel }}
|
||||
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||
VERSION: ${{ needs.process.outputs.version }}
|
||||
SUFFIX: ${{ (matrix.arch != 'x64' && format('_{0}', matrix.arch)) || '' }}
|
||||
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||
BASE_CACHE_KEY: cache-reqs-${{ github.job }}_${{ matrix.arch }}-${{ matrix.python_version }}
|
||||
PYI_REPO: https://github.com/yt-dlp/Pyinstaller-Builds
|
||||
PYI_WHEEL: pyinstaller-${{ matrix.pyi_version }}-py3-none-${{ matrix.platform_tag }}.whl
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python_version }}
|
||||
architecture: ${{ matrix.arch }}
|
||||
|
||||
- name: Restore cached requirements
|
||||
id: restore-cache
|
||||
- name: Cache requirements
|
||||
id: cache-venv
|
||||
if: matrix.arch == 'arm64'
|
||||
uses: actions/cache/restore@v4
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
||||
with:
|
||||
path: |
|
||||
/yt-dlp-build-venv
|
||||
key: cache-reqs-${{ github.job }}_${{ matrix.arch }}-${{ matrix.python_version }}-${{ github.ref }}
|
||||
key: ${{ env.BASE_CACHE_KEY }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
|
||||
restore-keys: |
|
||||
${{ env.BASE_CACHE_KEY }}-${{ github.ref }}-
|
||||
${{ env.BASE_CACHE_KEY }}-
|
||||
|
||||
- name: Install Requirements
|
||||
env:
|
||||
ARCH: ${{ matrix.arch }}
|
||||
PYI_URL: ${{ env.PYI_REPO }}/releases/download/${{ matrix.pyi_tag }}/${{ env.PYI_WHEEL }}
|
||||
PYI_HASH: ${{ matrix.pyi_hash }}
|
||||
shell: pwsh
|
||||
run: |
|
||||
python -m venv /yt-dlp-build-venv
|
||||
/yt-dlp-build-venv/Scripts/Activate.ps1
|
||||
python devscripts/install_deps.py -o --include build
|
||||
python devscripts/install_deps.py ${{ (matrix.arch != 'x86' && '--include curl-cffi') || '' }}
|
||||
# Use custom pyinstaller built with https://github.com/yt-dlp/pyinstaller-builds
|
||||
python -m pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/${{ matrix.arch }}/pyinstaller-6.15.0-py3-none-any.whl"
|
||||
python -m pip install -U pip
|
||||
# Install custom PyInstaller build and verify hash
|
||||
mkdir /pyi-wheels
|
||||
python -m pip download -d /pyi-wheels --no-deps --require-hashes "pyinstaller@${Env:PYI_URL}#sha256=${Env:PYI_HASH}"
|
||||
python -m pip install --force-reinstall -U "/pyi-wheels/${Env:PYI_WHEEL}"
|
||||
python devscripts/install_deps.py --only-optional-groups --include-group build
|
||||
if ("${Env:ARCH}" -eq "x86") {
|
||||
python devscripts/install_deps.py
|
||||
} else {
|
||||
python devscripts/install_deps.py --include-group curl-cffi
|
||||
}
|
||||
|
||||
- name: Prepare
|
||||
shell: pwsh
|
||||
run: |
|
||||
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||
python devscripts/update-version.py -c "${Env:CHANNEL}" -r "${Env:ORIGIN}" "${Env:VERSION}"
|
||||
python devscripts/make_lazy_extractors.py
|
||||
|
||||
- name: Build
|
||||
shell: pwsh
|
||||
run: |
|
||||
/yt-dlp-build-venv/Scripts/Activate.ps1
|
||||
python -m bundle.pyinstaller
|
||||
python -m bundle.pyinstaller --onedir
|
||||
Compress-Archive -Path ./dist/yt-dlp${{ matrix.suffix }}/* -DestinationPath ./dist/yt-dlp_win${{ matrix.suffix }}.zip
|
||||
Compress-Archive -Path ./dist/yt-dlp${Env:SUFFIX}/* -DestinationPath ./dist/yt-dlp_win${Env:SUFFIX}.zip
|
||||
|
||||
- name: Verify --update-to
|
||||
if: vars.UPDATE_TO_VERIFICATION
|
||||
shell: pwsh
|
||||
run: |
|
||||
foreach ($name in @("yt-dlp${{ matrix.suffix }}")) {
|
||||
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
|
||||
$version = & "./dist/${name}.exe" --version
|
||||
& "./dist/${name}_downgraded.exe" -v --update-to yt-dlp/yt-dlp@2025.08.20
|
||||
$downgraded_version = & "./dist/${name}_downgraded.exe" --version
|
||||
if ($version -eq $downgraded_version) {
|
||||
exit 1
|
||||
}
|
||||
$name = "yt-dlp${Env:SUFFIX}"
|
||||
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
|
||||
$version = & "./dist/${name}.exe" --version
|
||||
& "./dist/${name}_downgraded.exe" -v --update-to "${Env:UPDATE_TO}"
|
||||
$downgraded_version = & "./dist/${name}_downgraded.exe" --version
|
||||
if ($version -eq $downgraded_version) {
|
||||
exit 1
|
||||
}
|
||||
|
||||
- name: Upload artifacts
|
||||
|
|
@ -411,40 +523,22 @@ jobs:
|
|||
with:
|
||||
name: build-bin-${{ github.job }}-${{ matrix.arch }}
|
||||
path: |
|
||||
dist/yt-dlp${{ matrix.suffix }}.exe
|
||||
dist/yt-dlp_win${{ matrix.suffix }}.zip
|
||||
dist/yt-dlp${{ env.SUFFIX }}.exe
|
||||
dist/yt-dlp_win${{ env.SUFFIX }}.zip
|
||||
compression-level: 0
|
||||
|
||||
- name: Cleanup cache
|
||||
if: |
|
||||
matrix.arch == 'arm64' && steps.restore-cache.outputs.cache-hit == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
cache_key: cache-reqs-${{ github.job }}_${{ matrix.arch }}-${{ matrix.python_version }}-${{ github.ref }}
|
||||
run: |
|
||||
gh cache delete "${cache_key}"
|
||||
|
||||
- name: Cache requirements
|
||||
if: matrix.arch == 'arm64'
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: |
|
||||
/yt-dlp-build-venv
|
||||
key: cache-reqs-${{ github.job }}_${{ matrix.arch }}-${{ matrix.python_version }}-${{ github.ref }}
|
||||
|
||||
meta_files:
|
||||
if: always() && !cancelled()
|
||||
needs:
|
||||
- process
|
||||
- unix
|
||||
- linux_static
|
||||
- linux_arm
|
||||
- linux
|
||||
- macos
|
||||
- windows
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
path: artifact
|
||||
pattern: build-bin-*
|
||||
|
|
@ -466,41 +560,45 @@ jobs:
|
|||
cat >> _update_spec << EOF
|
||||
# This file is used for regulating self-update
|
||||
lock 2022.08.18.36 .+ Python 3\.6
|
||||
lock 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
||||
lock 2023.11.16 zip Python 3\.7
|
||||
lock 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||
lock 2024.10.22 py2exe .+
|
||||
lock 2024.10.22 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
||||
lock 2024.10.22 zip Python 3\.8
|
||||
lock 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||
lock 2025.08.11 darwin_legacy_exe .+
|
||||
lock 2025.08.27 linux_armv7l_exe .+
|
||||
lock 2025.10.14 zip Python 3\.9
|
||||
lockV2 yt-dlp/yt-dlp 2022.08.18.36 .+ Python 3\.6
|
||||
lockV2 yt-dlp/yt-dlp 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp 2023.11.16 zip Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||
lockV2 yt-dlp/yt-dlp 2024.10.22 py2exe .+
|
||||
lockV2 yt-dlp/yt-dlp 2024.10.22 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
||||
lockV2 yt-dlp/yt-dlp 2024.10.22 zip Python 3\.8
|
||||
lockV2 yt-dlp/yt-dlp 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||
lockV2 yt-dlp/yt-dlp 2025.08.11 darwin_legacy_exe .+
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 (?!win_x86_exe).+ Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp 2025.08.27 linux_armv7l_exe .+
|
||||
lockV2 yt-dlp/yt-dlp 2025.10.14 zip Python 3\.9
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 zip Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 py2exe .+
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 zip Python 3\.8
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.12.233030 darwin_legacy_exe .+
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 (?!win_x86_exe).+ Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.30.232839 linux_armv7l_exe .+
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.10.14.232845 zip Python 3\.9
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 zip Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.045052 py2exe .+
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 zip Python 3\.8
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2025.08.12.232447 darwin_legacy_exe .+
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2025.09.05.212910 linux_armv7l_exe .+
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2025.10.14.232330 zip Python 3\.9
|
||||
EOF
|
||||
|
||||
- name: Sign checksum files
|
||||
env:
|
||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||
if: env.GPG_SIGNING_KEY != ''
|
||||
if: env.GPG_SIGNING_KEY
|
||||
run: |
|
||||
gpg --batch --import <<< "${{ secrets.GPG_SIGNING_KEY }}"
|
||||
for signfile in ./SHA*SUMS; do
|
||||
|
|
|
|||
23
.github/workflows/cache-warmer.yml
vendored
Normal file
23
.github/workflows/cache-warmer.yml
vendored
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
name: Keep cache warm
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 22 1,6,11,16,21,27 * *'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
if: |
|
||||
vars.KEEP_CACHE_WARM || github.event_name == 'workflow_dispatch'
|
||||
uses: ./.github/workflows/build.yml
|
||||
with:
|
||||
version: '999999'
|
||||
channel: stable
|
||||
origin: ${{ github.repository }}
|
||||
unix: false
|
||||
linux: false
|
||||
linux_armv7l: true
|
||||
musllinux: false
|
||||
macos: true
|
||||
windows: true
|
||||
permissions:
|
||||
contents: read
|
||||
77
.github/workflows/challenge-tests.yml
vendored
Normal file
77
.github/workflows/challenge-tests.yml
vendored
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
name: Challenge Tests
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- .github/workflows/challenge-tests.yml
|
||||
- test/test_jsc/*.py
|
||||
- yt_dlp/extractor/youtube/jsc/**.js
|
||||
- yt_dlp/extractor/youtube/jsc/**.py
|
||||
- yt_dlp/extractor/youtube/pot/**.py
|
||||
- yt_dlp/utils/_jsruntime.py
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/challenge-tests.yml
|
||||
- test/test_jsc/*.py
|
||||
- yt_dlp/extractor/youtube/jsc/**.js
|
||||
- yt_dlp/extractor/youtube/jsc/**.py
|
||||
- yt_dlp/extractor/youtube/pot/**.py
|
||||
- yt_dlp/utils/_jsruntime.py
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: challenge-tests-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
name: Challenge Tests
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
python-version: ['3.10', '3.11', '3.12', '3.13', '3.14', pypy-3.11]
|
||||
env:
|
||||
QJS_VERSION: '2025-04-26' # Earliest version with rope strings
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Deno
|
||||
uses: denoland/setup-deno@v2
|
||||
with:
|
||||
deno-version: '2.0.0' # minimum supported version
|
||||
- name: Install Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
# minimum supported version is 1.0.31 but earliest available Windows version is 1.1.0
|
||||
bun-version: ${{ (matrix.os == 'windows-latest' && '1.1.0') || '1.0.31' }}
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: '20.0' # minimum supported version
|
||||
- name: Install QuickJS (Linux)
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
run: |
|
||||
wget "https://bellard.org/quickjs/binary_releases/quickjs-linux-x86_64-${QJS_VERSION}.zip" -O quickjs.zip
|
||||
unzip quickjs.zip qjs
|
||||
sudo install qjs /usr/local/bin/qjs
|
||||
- name: Install QuickJS (Windows)
|
||||
if: matrix.os == 'windows-latest'
|
||||
shell: pwsh
|
||||
run: |
|
||||
Invoke-WebRequest "https://bellard.org/quickjs/binary_releases/quickjs-win-x86_64-${Env:QJS_VERSION}.zip" -OutFile quickjs.zip
|
||||
unzip quickjs.zip
|
||||
- name: Install test requirements
|
||||
run: |
|
||||
python ./devscripts/install_deps.py --print --only-optional-groups --include-group test > requirements.txt
|
||||
python ./devscripts/install_deps.py --print -c certifi -c requests -c urllib3 -c yt-dlp-ejs >> requirements.txt
|
||||
python -m pip install -U -r requirements.txt
|
||||
- name: Run tests
|
||||
timeout-minutes: 15
|
||||
run: |
|
||||
python -m yt_dlp -v --js-runtimes node --js-runtimes bun --js-runtimes quickjs || true
|
||||
python ./devscripts/run_tests.py test/test_jsc -k download
|
||||
2
.github/workflows/codeql.yml
vendored
2
.github/workflows/codeql.yml
vendored
|
|
@ -29,7 +29,7 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
|
|
|
|||
41
.github/workflows/core.yml
vendored
41
.github/workflows/core.yml
vendored
|
|
@ -7,6 +7,7 @@ on:
|
|||
- test/**
|
||||
- yt_dlp/**.py
|
||||
- '!yt_dlp/extractor/**.py'
|
||||
- yt_dlp/extractor/youtube/**.py
|
||||
- yt_dlp/extractor/__init__.py
|
||||
- yt_dlp/extractor/common.py
|
||||
- yt_dlp/extractor/extractors.py
|
||||
|
|
@ -17,6 +18,7 @@ on:
|
|||
- test/**
|
||||
- yt_dlp/**.py
|
||||
- '!yt_dlp/extractor/**.py'
|
||||
- yt_dlp/extractor/youtube/**.py
|
||||
- yt_dlp/extractor/__init__.py
|
||||
- yt_dlp/extractor/common.py
|
||||
- yt_dlp/extractor/extractors.py
|
||||
|
|
@ -36,31 +38,54 @@ jobs:
|
|||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
# CPython 3.9 is in quick-test
|
||||
python-version: ['3.10', '3.11', '3.12', '3.13', pypy-3.11]
|
||||
# CPython 3.10 is in quick-test
|
||||
python-version: ['3.11', '3.12', '3.13', '3.14', pypy-3.11]
|
||||
include:
|
||||
# atleast one of each CPython/PyPy tests must be in windows
|
||||
- os: windows-latest
|
||||
python-version: '3.9'
|
||||
- os: windows-latest
|
||||
python-version: '3.10'
|
||||
- os: windows-latest
|
||||
python-version: '3.11'
|
||||
- os: windows-latest
|
||||
python-version: '3.12'
|
||||
- os: windows-latest
|
||||
python-version: '3.13'
|
||||
- os: windows-latest
|
||||
python-version: '3.14'
|
||||
- os: windows-latest
|
||||
python-version: pypy-3.11
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install test requirements
|
||||
run: python3 ./devscripts/install_deps.py --include test --include curl-cffi
|
||||
run: python ./devscripts/install_deps.py --include-group test --include-group curl-cffi
|
||||
- name: Run tests
|
||||
timeout-minutes: 15
|
||||
continue-on-error: False
|
||||
env:
|
||||
source: ${{ (github.event_name == 'push' && github.event.before) || 'origin/master' }}
|
||||
target: ${{ (github.event_name == 'push' && github.event.after) || 'HEAD' }}
|
||||
shell: bash
|
||||
run: |
|
||||
flags=()
|
||||
# Check if a networking file is involved
|
||||
patterns="\
|
||||
^yt_dlp/networking/
|
||||
^yt_dlp/utils/networking\.py$
|
||||
^test/test_http_proxy\.py$
|
||||
^test/test_networking\.py$
|
||||
^test/test_networking_utils\.py$
|
||||
^test/test_socks\.py$
|
||||
^test/test_websockets\.py$
|
||||
^pyproject\.toml$
|
||||
"
|
||||
if git diff --name-only "${source}" "${target}" | grep -Ef <(printf '%s' "${patterns}"); then
|
||||
flags+=(--flaky)
|
||||
fi
|
||||
python3 -m yt_dlp -v || true # Print debug head
|
||||
python3 ./devscripts/run_tests.py --pytest-args '--reruns 2 --reruns-delay 3.0' core
|
||||
python3 -m devscripts.run_tests "${flags[@]}" --pytest-args '--reruns 2 --reruns-delay 3.0' core
|
||||
|
|
|
|||
22
.github/workflows/download.yml
vendored
22
.github/workflows/download.yml
vendored
|
|
@ -9,16 +9,16 @@ jobs:
|
|||
if: "contains(github.event.head_commit.message, 'ci run dl')"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: '3.10'
|
||||
- name: Install test requirements
|
||||
run: python3 ./devscripts/install_deps.py --include dev
|
||||
run: python ./devscripts/install_deps.py --include-group dev
|
||||
- name: Run tests
|
||||
continue-on-error: true
|
||||
run: python3 ./devscripts/run_tests.py download
|
||||
run: python ./devscripts/run_tests.py download
|
||||
|
||||
full:
|
||||
name: Full Download Tests
|
||||
|
|
@ -28,21 +28,21 @@ jobs:
|
|||
fail-fast: true
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
python-version: ['3.10', '3.11', '3.12', '3.13', pypy-3.11]
|
||||
python-version: ['3.11', '3.12', '3.13', '3.14', pypy-3.11]
|
||||
include:
|
||||
# atleast one of each CPython/PyPy tests must be in windows
|
||||
- os: windows-latest
|
||||
python-version: '3.9'
|
||||
python-version: '3.10'
|
||||
- os: windows-latest
|
||||
python-version: pypy-3.11
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install test requirements
|
||||
run: python3 ./devscripts/install_deps.py --include dev
|
||||
run: python ./devscripts/install_deps.py --include-group dev
|
||||
- name: Run tests
|
||||
continue-on-error: true
|
||||
run: python3 ./devscripts/run_tests.py download
|
||||
run: python ./devscripts/run_tests.py download
|
||||
|
|
|
|||
20
.github/workflows/quick-test.yml
vendored
20
.github/workflows/quick-test.yml
vendored
|
|
@ -9,13 +9,13 @@ jobs:
|
|||
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.9'
|
||||
python-version: '3.10'
|
||||
- name: Install test requirements
|
||||
run: python3 ./devscripts/install_deps.py -o --include test
|
||||
run: python ./devscripts/install_deps.py --only-optional-groups --include-group test
|
||||
- name: Run tests
|
||||
timeout-minutes: 15
|
||||
run: |
|
||||
|
|
@ -26,14 +26,14 @@ jobs:
|
|||
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.9'
|
||||
python-version: '3.10'
|
||||
- name: Install dev dependencies
|
||||
run: python3 ./devscripts/install_deps.py -o --include static-analysis
|
||||
run: python ./devscripts/install_deps.py --only-optional-groups --include-group static-analysis
|
||||
- name: Make lazy extractors
|
||||
run: python3 ./devscripts/make_lazy_extractors.py
|
||||
run: python ./devscripts/make_lazy_extractors.py
|
||||
- name: Run ruff
|
||||
run: ruff check --output-format github .
|
||||
- name: Run autopep8
|
||||
|
|
|
|||
15
.github/workflows/release-master.yml
vendored
15
.github/workflows/release-master.yml
vendored
|
|
@ -6,10 +6,12 @@ on:
|
|||
paths:
|
||||
- "yt_dlp/**.py"
|
||||
- "!yt_dlp/version.py"
|
||||
- "bundle/*.py"
|
||||
- "bundle/**"
|
||||
- "pyproject.toml"
|
||||
- "Makefile"
|
||||
- ".github/workflows/build.yml"
|
||||
- ".github/workflows/release.yml"
|
||||
- ".github/workflows/release-master.yml"
|
||||
concurrency:
|
||||
group: release-master
|
||||
permissions:
|
||||
|
|
@ -17,27 +19,26 @@ permissions:
|
|||
|
||||
jobs:
|
||||
release:
|
||||
if: vars.BUILD_MASTER != ''
|
||||
if: vars.BUILD_MASTER
|
||||
uses: ./.github/workflows/release.yml
|
||||
with:
|
||||
prerelease: true
|
||||
source: master
|
||||
source: ${{ (github.repository != 'yt-dlp/yt-dlp' && vars.MASTER_ARCHIVE_REPO) || 'master' }}
|
||||
target: 'master'
|
||||
permissions:
|
||||
contents: write
|
||||
packages: write # For package cache
|
||||
actions: write # For cleaning up cache
|
||||
id-token: write # mandatory for trusted publishing
|
||||
secrets: inherit
|
||||
|
||||
publish_pypi:
|
||||
needs: [release]
|
||||
if: vars.MASTER_PYPI_PROJECT != ''
|
||||
if: vars.MASTER_PYPI_PROJECT
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write # mandatory for trusted publishing
|
||||
steps:
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
path: dist
|
||||
name: build-pypi
|
||||
|
|
|
|||
17
.github/workflows/release-nightly.yml
vendored
17
.github/workflows/release-nightly.yml
vendored
|
|
@ -7,12 +7,12 @@ permissions:
|
|||
|
||||
jobs:
|
||||
check_nightly:
|
||||
if: vars.BUILD_NIGHTLY != ''
|
||||
if: vars.BUILD_NIGHTLY
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
commit: ${{ steps.check_for_new_commits.outputs.commit }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Check for new commits
|
||||
|
|
@ -22,9 +22,13 @@ jobs:
|
|||
"yt_dlp/*.py"
|
||||
':!yt_dlp/version.py'
|
||||
"bundle/*.py"
|
||||
"bundle/docker/compose.yml"
|
||||
"bundle/docker/linux/*"
|
||||
"pyproject.toml"
|
||||
"Makefile"
|
||||
".github/workflows/build.yml"
|
||||
".github/workflows/release.yml"
|
||||
".github/workflows/release-nightly.yml"
|
||||
)
|
||||
echo "commit=$(git log --format=%H -1 --since="24 hours ago" -- "${relevant_files[@]}")" | tee "$GITHUB_OUTPUT"
|
||||
|
||||
|
|
@ -34,23 +38,22 @@ jobs:
|
|||
uses: ./.github/workflows/release.yml
|
||||
with:
|
||||
prerelease: true
|
||||
source: nightly
|
||||
source: ${{ (github.repository != 'yt-dlp/yt-dlp' && vars.NIGHTLY_ARCHIVE_REPO) || 'nightly' }}
|
||||
target: 'nightly'
|
||||
permissions:
|
||||
contents: write
|
||||
packages: write # For package cache
|
||||
actions: write # For cleaning up cache
|
||||
id-token: write # mandatory for trusted publishing
|
||||
secrets: inherit
|
||||
|
||||
publish_pypi:
|
||||
needs: [release]
|
||||
if: vars.NIGHTLY_PYPI_PROJECT != ''
|
||||
if: vars.NIGHTLY_PYPI_PROJECT
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write # mandatory for trusted publishing
|
||||
steps:
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
path: dist
|
||||
name: build-pypi
|
||||
|
|
|
|||
290
.github/workflows/release.yml
vendored
290
.github/workflows/release.yml
vendored
|
|
@ -14,6 +14,10 @@ on:
|
|||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
linux_armv7l:
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
prerelease:
|
||||
required: false
|
||||
default: true
|
||||
|
|
@ -43,6 +47,10 @@ on:
|
|||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
linux_armv7l:
|
||||
description: Include linux_armv7l
|
||||
default: true
|
||||
type: boolean
|
||||
prerelease:
|
||||
description: Pre-release
|
||||
default: false
|
||||
|
|
@ -67,145 +75,67 @@ jobs:
|
|||
head_sha: ${{ steps.get_target.outputs.head_sha }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.10"
|
||||
python-version: "3.10" # Keep this in sync with test-workflows.yml
|
||||
|
||||
- name: Process inputs
|
||||
id: process_inputs
|
||||
env:
|
||||
INPUTS: ${{ toJSON(inputs) }}
|
||||
run: |
|
||||
cat << EOF
|
||||
::group::Inputs
|
||||
prerelease=${{ inputs.prerelease }}
|
||||
source=${{ inputs.source }}
|
||||
target=${{ inputs.target }}
|
||||
version=${{ inputs.version }}
|
||||
::endgroup::
|
||||
EOF
|
||||
IFS='@' read -r source_repo source_tag <<<"${{ inputs.source }}"
|
||||
IFS='@' read -r target_repo target_tag <<<"${{ inputs.target }}"
|
||||
cat << EOF >> "$GITHUB_OUTPUT"
|
||||
source_repo=${source_repo}
|
||||
source_tag=${source_tag}
|
||||
target_repo=${target_repo}
|
||||
target_tag=${target_tag}
|
||||
EOF
|
||||
python -m devscripts.setup_variables process_inputs
|
||||
|
||||
- name: Setup variables
|
||||
id: setup_variables
|
||||
env:
|
||||
source_repo: ${{ steps.process_inputs.outputs.source_repo }}
|
||||
source_tag: ${{ steps.process_inputs.outputs.source_tag }}
|
||||
target_repo: ${{ steps.process_inputs.outputs.target_repo }}
|
||||
target_tag: ${{ steps.process_inputs.outputs.target_tag }}
|
||||
INPUTS: ${{ toJSON(inputs) }}
|
||||
PROCESSED: ${{ toJSON(steps.process_inputs.outputs) }}
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
PUSH_VERSION_COMMIT: ${{ vars.PUSH_VERSION_COMMIT }}
|
||||
PYPI_PROJECT: ${{ vars.PYPI_PROJECT }}
|
||||
SOURCE_PYPI_PROJECT: ${{ vars[format('{0}_pypi_project', steps.process_inputs.outputs.source_repo)] }}
|
||||
SOURCE_PYPI_SUFFIX: ${{ vars[format('{0}_pypi_suffix', steps.process_inputs.outputs.source_repo)] }}
|
||||
TARGET_PYPI_PROJECT: ${{ vars[format('{0}_pypi_project', steps.process_inputs.outputs.target_repo)] }}
|
||||
TARGET_PYPI_SUFFIX: ${{ vars[format('{0}_pypi_suffix', steps.process_inputs.outputs.target_repo)] }}
|
||||
SOURCE_ARCHIVE_REPO: ${{ vars[format('{0}_archive_repo', steps.process_inputs.outputs.source_repo)] }}
|
||||
TARGET_ARCHIVE_REPO: ${{ vars[format('{0}_archive_repo', steps.process_inputs.outputs.target_repo)] }}
|
||||
HAS_SOURCE_ARCHIVE_REPO_TOKEN: ${{ !!secrets[format('{0}_archive_repo_token', steps.process_inputs.outputs.source_repo)] }}
|
||||
HAS_TARGET_ARCHIVE_REPO_TOKEN: ${{ !!secrets[format('{0}_archive_repo_token', steps.process_inputs.outputs.target_repo)] }}
|
||||
HAS_ARCHIVE_REPO_TOKEN: ${{ !!secrets.ARCHIVE_REPO_TOKEN }}
|
||||
run: |
|
||||
# unholy bash monstrosity (sincere apologies)
|
||||
fallback_token () {
|
||||
if ${{ !secrets.ARCHIVE_REPO_TOKEN }}; then
|
||||
echo "::error::Repository access secret ${target_repo_token^^} not found"
|
||||
exit 1
|
||||
fi
|
||||
target_repo_token=ARCHIVE_REPO_TOKEN
|
||||
return 0
|
||||
}
|
||||
python -m devscripts.setup_variables
|
||||
|
||||
source_is_channel=0
|
||||
[[ "${source_repo}" == 'stable' ]] && source_repo='yt-dlp/yt-dlp'
|
||||
if [[ -z "${source_repo}" ]]; then
|
||||
source_repo='${{ github.repository }}'
|
||||
elif [[ '${{ vars[format('{0}_archive_repo', env.source_repo)] }}' ]]; then
|
||||
source_is_channel=1
|
||||
source_channel='${{ vars[format('{0}_archive_repo', env.source_repo)] }}'
|
||||
elif [[ -z "${source_tag}" && "${source_repo}" != */* ]]; then
|
||||
source_tag="${source_repo}"
|
||||
source_repo='${{ github.repository }}'
|
||||
fi
|
||||
resolved_source="${source_repo}"
|
||||
if [[ "${source_tag}" ]]; then
|
||||
resolved_source="${resolved_source}@${source_tag}"
|
||||
elif [[ "${source_repo}" == 'yt-dlp/yt-dlp' ]]; then
|
||||
resolved_source='stable'
|
||||
fi
|
||||
|
||||
revision="${{ (inputs.prerelease || !vars.PUSH_VERSION_COMMIT) && '$(date -u +"%H%M%S")' || '' }}"
|
||||
version="$(
|
||||
python devscripts/update-version.py \
|
||||
-c "${resolved_source}" -r "${{ github.repository }}" ${{ inputs.version || '$revision' }} | \
|
||||
grep -Po "version=\K\d+\.\d+\.\d+(\.\d+)?")"
|
||||
|
||||
if [[ "${target_repo}" ]]; then
|
||||
if [[ -z "${target_tag}" ]]; then
|
||||
if [[ '${{ vars[format('{0}_archive_repo', env.target_repo)] }}' ]]; then
|
||||
target_tag="${source_tag:-${version}}"
|
||||
else
|
||||
target_tag="${target_repo}"
|
||||
target_repo='${{ github.repository }}'
|
||||
fi
|
||||
fi
|
||||
if [[ "${target_repo}" != '${{ github.repository}}' ]]; then
|
||||
target_repo='${{ vars[format('{0}_archive_repo', env.target_repo)] }}'
|
||||
target_repo_token='${{ env.target_repo }}_archive_repo_token'
|
||||
${{ !!secrets[format('{0}_archive_repo_token', env.target_repo)] }} || fallback_token
|
||||
pypi_project='${{ vars[format('{0}_pypi_project', env.target_repo)] }}'
|
||||
pypi_suffix='${{ vars[format('{0}_pypi_suffix', env.target_repo)] }}'
|
||||
fi
|
||||
else
|
||||
target_tag="${source_tag:-${version}}"
|
||||
if ((source_is_channel)); then
|
||||
target_repo="${source_channel}"
|
||||
target_repo_token='${{ env.source_repo }}_archive_repo_token'
|
||||
${{ !!secrets[format('{0}_archive_repo_token', env.source_repo)] }} || fallback_token
|
||||
pypi_project='${{ vars[format('{0}_pypi_project', env.source_repo)] }}'
|
||||
pypi_suffix='${{ vars[format('{0}_pypi_suffix', env.source_repo)] }}'
|
||||
else
|
||||
target_repo='${{ github.repository }}'
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "${target_repo}" == '${{ github.repository }}' ]] && ${{ !inputs.prerelease }}; then
|
||||
pypi_project='${{ vars.PYPI_PROJECT }}'
|
||||
fi
|
||||
|
||||
echo "::group::Output variables"
|
||||
cat << EOF | tee -a "$GITHUB_OUTPUT"
|
||||
channel=${resolved_source}
|
||||
version=${version}
|
||||
target_repo=${target_repo}
|
||||
target_repo_token=${target_repo_token}
|
||||
target_tag=${target_tag}
|
||||
pypi_project=${pypi_project}
|
||||
pypi_suffix=${pypi_suffix}
|
||||
EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Update documentation
|
||||
- name: Update version & documentation
|
||||
env:
|
||||
version: ${{ steps.setup_variables.outputs.version }}
|
||||
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
||||
if: |
|
||||
!inputs.prerelease && env.target_repo == github.repository
|
||||
CHANNEL: ${{ steps.setup_variables.outputs.channel }}
|
||||
# Use base repo since this could be committed; build jobs will call this again with true origin
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
VERSION: ${{ steps.setup_variables.outputs.version }}
|
||||
run: |
|
||||
python devscripts/update-version.py -c "${CHANNEL}" -r "${REPOSITORY}" "${VERSION}"
|
||||
python devscripts/update_changelog.py -vv
|
||||
make doc
|
||||
|
||||
- name: Push to release
|
||||
id: push_release
|
||||
env:
|
||||
version: ${{ steps.setup_variables.outputs.version }}
|
||||
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
||||
VERSION: ${{ steps.setup_variables.outputs.version }}
|
||||
GITHUB_EVENT_SENDER_LOGIN: ${{ github.event.sender.login }}
|
||||
GITHUB_EVENT_REF: ${{ github.event.ref }}
|
||||
if: |
|
||||
!inputs.prerelease && env.target_repo == github.repository
|
||||
!inputs.prerelease && steps.setup_variables.outputs.target_repo == github.repository
|
||||
run: |
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git add -u
|
||||
git commit -m "Release ${{ env.version }}" \
|
||||
-m "Created by: ${{ github.event.sender.login }}" -m ":ci skip all"
|
||||
git push origin --force ${{ github.event.ref }}:release
|
||||
git commit -m "Release ${VERSION}" \
|
||||
-m "Created by: ${GITHUB_EVENT_SENDER_LOGIN}" -m ":ci skip all"
|
||||
git push origin --force "${GITHUB_EVENT_REF}:release"
|
||||
|
||||
- name: Get target commitish
|
||||
id: get_target
|
||||
|
|
@ -214,10 +144,10 @@ jobs:
|
|||
|
||||
- name: Update master
|
||||
env:
|
||||
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
||||
GITHUB_EVENT_REF: ${{ github.event.ref }}
|
||||
if: |
|
||||
vars.PUSH_VERSION_COMMIT != '' && !inputs.prerelease && env.target_repo == github.repository
|
||||
run: git push origin ${{ github.event.ref }}
|
||||
vars.PUSH_VERSION_COMMIT && !inputs.prerelease && steps.setup_variables.outputs.target_repo == github.repository
|
||||
run: git push origin "${GITHUB_EVENT_REF}"
|
||||
|
||||
build:
|
||||
needs: prepare
|
||||
|
|
@ -226,10 +156,9 @@ jobs:
|
|||
version: ${{ needs.prepare.outputs.version }}
|
||||
channel: ${{ needs.prepare.outputs.channel }}
|
||||
origin: ${{ needs.prepare.outputs.target_repo }}
|
||||
linux_armv7l: ${{ inputs.linux_armv7l }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write # For package cache
|
||||
actions: write # For cleaning up cache
|
||||
secrets:
|
||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||
|
||||
|
|
@ -241,30 +170,30 @@ jobs:
|
|||
id-token: write # mandatory for trusted publishing
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Install Requirements
|
||||
run: |
|
||||
sudo apt -y install pandoc man
|
||||
python devscripts/install_deps.py -o --include build
|
||||
python devscripts/install_deps.py --only-optional-groups --include-group build
|
||||
|
||||
- name: Prepare
|
||||
env:
|
||||
version: ${{ needs.prepare.outputs.version }}
|
||||
suffix: ${{ needs.prepare.outputs.pypi_suffix }}
|
||||
channel: ${{ needs.prepare.outputs.channel }}
|
||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
||||
pypi_project: ${{ needs.prepare.outputs.pypi_project }}
|
||||
VERSION: ${{ needs.prepare.outputs.version }}
|
||||
SUFFIX: ${{ needs.prepare.outputs.pypi_suffix }}
|
||||
CHANNEL: ${{ needs.prepare.outputs.channel }}
|
||||
TARGET_REPO: ${{ needs.prepare.outputs.target_repo }}
|
||||
PYPI_PROJECT: ${{ needs.prepare.outputs.pypi_project }}
|
||||
run: |
|
||||
python devscripts/update-version.py -c "${{ env.channel }}" -r "${{ env.target_repo }}" -s "${{ env.suffix }}" "${{ env.version }}"
|
||||
python devscripts/update-version.py -c "${CHANNEL}" -r "${TARGET_REPO}" -s "${SUFFIX}" "${VERSION}"
|
||||
python devscripts/update_changelog.py -vv
|
||||
python devscripts/make_lazy_extractors.py
|
||||
sed -i -E '0,/(name = ")[^"]+(")/s//\1${{ env.pypi_project }}\2/' pyproject.toml
|
||||
sed -i -E '0,/(name = ")[^"]+(")/s//\1'"${PYPI_PROJECT}"'\2/' pyproject.toml
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
|
|
@ -298,97 +227,100 @@ jobs:
|
|||
permissions:
|
||||
contents: write
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
env:
|
||||
TARGET_REPO: ${{ needs.prepare.outputs.target_repo }}
|
||||
TARGET_TAG: ${{ needs.prepare.outputs.target_tag }}
|
||||
VERSION: ${{ needs.prepare.outputs.version }}
|
||||
HEAD_SHA: ${{ needs.prepare.outputs.head_sha }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/download-artifact@v4
|
||||
- uses: actions/download-artifact@v5
|
||||
with:
|
||||
path: artifact
|
||||
pattern: build-*
|
||||
merge-multiple: true
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Generate release notes
|
||||
env:
|
||||
head_sha: ${{ needs.prepare.outputs.head_sha }}
|
||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
||||
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
BASE_REPO: yt-dlp/yt-dlp
|
||||
NIGHTLY_REPO: yt-dlp/yt-dlp-nightly-builds
|
||||
MASTER_REPO: yt-dlp/yt-dlp-master-builds
|
||||
DOCS_PATH: ${{ env.TARGET_REPO == github.repository && format('/tree/{0}', env.TARGET_TAG) || '' }}
|
||||
run: |
|
||||
printf '%s' \
|
||||
'[]' \
|
||||
'(https://github.com/${{ github.repository }}#installation "Installation instructions") ' \
|
||||
'[]' \
|
||||
'(https://discord.gg/H5MNcFW63r "Discord") ' \
|
||||
'[]' \
|
||||
'(https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators "Donate") ' \
|
||||
'[]' \
|
||||
'(https://github.com/${{ github.repository }}' \
|
||||
'${{ env.target_repo == github.repository && format('/tree/{0}', env.target_tag) || '' }}#readme "Documentation") ' \
|
||||
${{ env.target_repo == 'yt-dlp/yt-dlp' && '\
|
||||
"[]" \
|
||||
"(https://github.com/${REPOSITORY}#installation \"Installation instructions\") " \
|
||||
"[]" \
|
||||
"(https://discord.gg/H5MNcFW63r \"Discord\") " \
|
||||
"[]" \
|
||||
"(https://github.com/${BASE_REPO}/blob/master/Maintainers.md#maintainers \"Donate\") " \
|
||||
"[]" \
|
||||
"(https://github.com/${REPOSITORY}${DOCS_PATH}#readme \"Documentation\") " > ./RELEASE_NOTES
|
||||
if [[ "${TARGET_REPO}" == "${BASE_REPO}" ]]; then
|
||||
printf '%s' \
|
||||
"[]" \
|
||||
"(https://github.com/yt-dlp/yt-dlp-nightly-builds/releases/latest \"Nightly builds\") " \
|
||||
"(https://github.com/${NIGHTLY_REPO}/releases/latest \"Nightly builds\") " \
|
||||
"[]" \
|
||||
"(https://github.com/yt-dlp/yt-dlp-master-builds/releases/latest \"Master builds\")"' || '' }} > ./RELEASE_NOTES
|
||||
printf '\n\n' >> ./RELEASE_NOTES
|
||||
cat >> ./RELEASE_NOTES << EOF
|
||||
#### A description of the various files is in the [README](https://github.com/${{ github.repository }}#release-files)
|
||||
---
|
||||
$(python ./devscripts/make_changelog.py -vv --collapsible)
|
||||
EOF
|
||||
"(https://github.com/${MASTER_REPO}/releases/latest \"Master builds\")" >> ./RELEASE_NOTES
|
||||
fi
|
||||
printf '\n\n%s\n\n%s%s%s\n\n---\n' \
|
||||
"#### A description of the various files is in the [README](https://github.com/${REPOSITORY}#release-files)" \
|
||||
"The zipimport Unix executable contains code licensed under ISC and MIT. " \
|
||||
"The PyInstaller-bundled executables are subject to these and other licenses, all of which are compiled in " \
|
||||
"[THIRD_PARTY_LICENSES.txt](https://github.com/${BASE_REPO}/blob/${HEAD_SHA}/THIRD_PARTY_LICENSES.txt)" >> ./RELEASE_NOTES
|
||||
python ./devscripts/make_changelog.py -vv --collapsible >> ./RELEASE_NOTES
|
||||
printf '%s\n\n' '**This is a pre-release build**' >> ./PRERELEASE_NOTES
|
||||
cat ./RELEASE_NOTES >> ./PRERELEASE_NOTES
|
||||
printf '%s\n\n' 'Generated from: https://github.com/${{ github.repository }}/commit/${{ env.head_sha }}' >> ./ARCHIVE_NOTES
|
||||
printf '%s\n\n' "Generated from: https://github.com/${REPOSITORY}/commit/${HEAD_SHA}" >> ./ARCHIVE_NOTES
|
||||
cat ./RELEASE_NOTES >> ./ARCHIVE_NOTES
|
||||
|
||||
- name: Publish to archive repo
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets[needs.prepare.outputs.target_repo_token] }}
|
||||
GH_REPO: ${{ needs.prepare.outputs.target_repo }}
|
||||
version: ${{ needs.prepare.outputs.version }}
|
||||
channel: ${{ needs.prepare.outputs.channel }}
|
||||
TITLE_PREFIX: ${{ startswith(env.TARGET_REPO, 'yt-dlp/') && 'yt-dlp ' || '' }}
|
||||
TITLE: ${{ inputs.target != env.TARGET_REPO && inputs.target || needs.prepare.outputs.channel }}
|
||||
if: |
|
||||
inputs.prerelease && env.GH_TOKEN != '' && env.GH_REPO != '' && env.GH_REPO != github.repository
|
||||
inputs.prerelease && env.GH_TOKEN && env.GH_REPO && env.GH_REPO != github.repository
|
||||
run: |
|
||||
title="${{ startswith(env.GH_REPO, 'yt-dlp/') && 'yt-dlp ' || '' }}${{ env.channel }}"
|
||||
gh release create \
|
||||
--notes-file ARCHIVE_NOTES \
|
||||
--title "${title} ${{ env.version }}" \
|
||||
${{ env.version }} \
|
||||
--title "${TITLE_PREFIX}${TITLE} ${VERSION}" \
|
||||
"${VERSION}" \
|
||||
artifact/*
|
||||
|
||||
- name: Prune old release
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
version: ${{ needs.prepare.outputs.version }}
|
||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
||||
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
||||
if: |
|
||||
env.target_repo == github.repository && env.target_tag != env.version
|
||||
env.TARGET_REPO == github.repository && env.TARGET_TAG != env.VERSION
|
||||
run: |
|
||||
gh release delete --yes --cleanup-tag "${{ env.target_tag }}" || true
|
||||
git tag --delete "${{ env.target_tag }}" || true
|
||||
gh release delete --yes --cleanup-tag "${TARGET_TAG}" || true
|
||||
git tag --delete "${TARGET_TAG}" || true
|
||||
sleep 5 # Enough time to cover deletion race condition
|
||||
|
||||
- name: Publish release
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
version: ${{ needs.prepare.outputs.version }}
|
||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
||||
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
||||
head_sha: ${{ needs.prepare.outputs.head_sha }}
|
||||
NOTES_FILE: ${{ inputs.prerelease && 'PRERELEASE_NOTES' || 'RELEASE_NOTES' }}
|
||||
TITLE_PREFIX: ${{ github.repository == 'yt-dlp/yt-dlp' && 'yt-dlp ' || '' }}
|
||||
TITLE: ${{ env.TARGET_TAG != env.VERSION && format('{0} ', env.TARGET_TAG) || '' }}
|
||||
PRERELEASE: ${{ inputs.prerelease && '1' || '0' }}
|
||||
if: |
|
||||
env.target_repo == github.repository
|
||||
env.TARGET_REPO == github.repository
|
||||
run: |
|
||||
title="${{ github.repository == 'yt-dlp/yt-dlp' && 'yt-dlp ' || '' }}"
|
||||
title+="${{ env.target_tag != env.version && format('{0} ', env.target_tag) || '' }}"
|
||||
gh release create \
|
||||
--notes-file ${{ inputs.prerelease && 'PRERELEASE_NOTES' || 'RELEASE_NOTES' }} \
|
||||
--target ${{ env.head_sha }} \
|
||||
--title "${title}${{ env.version }}" \
|
||||
${{ inputs.prerelease && '--prerelease' || '' }} \
|
||||
${{ env.target_tag }} \
|
||||
artifact/*
|
||||
gh_options=(
|
||||
--notes-file "${NOTES_FILE}"
|
||||
--target "${HEAD_SHA}"
|
||||
--title "${TITLE_PREFIX}${TITLE}${VERSION}"
|
||||
)
|
||||
if ((PRERELEASE)); then
|
||||
gh_options+=(--prerelease)
|
||||
fi
|
||||
gh release create "${gh_options[@]}" "${TARGET_TAG}" artifact/*
|
||||
|
|
|
|||
41
.github/workflows/signature-tests.yml
vendored
41
.github/workflows/signature-tests.yml
vendored
|
|
@ -1,41 +0,0 @@
|
|||
name: Signature Tests
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- .github/workflows/signature-tests.yml
|
||||
- test/test_youtube_signature.py
|
||||
- yt_dlp/jsinterp.py
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/signature-tests.yml
|
||||
- test/test_youtube_signature.py
|
||||
- yt_dlp/jsinterp.py
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: signature-tests-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
name: Signature Tests
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
python-version: ['3.9', '3.10', '3.11', '3.12', '3.13', pypy-3.11]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install test requirements
|
||||
run: python3 ./devscripts/install_deps.py --only-optional --include test
|
||||
- name: Run tests
|
||||
timeout-minutes: 15
|
||||
run: |
|
||||
python3 -m yt_dlp -v || true # Print debug head
|
||||
python3 ./devscripts/run_tests.py test/test_youtube_signature.py
|
||||
52
.github/workflows/test-workflows.yml
vendored
Normal file
52
.github/workflows/test-workflows.yml
vendored
Normal file
|
|
@ -0,0 +1,52 @@
|
|||
name: Test and lint workflows
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- .github/workflows/*
|
||||
- bundle/docker/linux/*.sh
|
||||
- devscripts/setup_variables.py
|
||||
- devscripts/setup_variables_tests.py
|
||||
- devscripts/utils.py
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/*
|
||||
- bundle/docker/linux/*.sh
|
||||
- devscripts/setup_variables.py
|
||||
- devscripts/setup_variables_tests.py
|
||||
- devscripts/utils.py
|
||||
permissions:
|
||||
contents: read
|
||||
env:
|
||||
ACTIONLINT_VERSION: "1.7.8"
|
||||
ACTIONLINT_SHA256SUM: be92c2652ab7b6d08425428797ceabeb16e31a781c07bc388456b4e592f3e36a
|
||||
ACTIONLINT_REPO: https://github.com/rhysd/actionlint
|
||||
|
||||
jobs:
|
||||
check:
|
||||
name: Check workflows
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.10" # Keep this in sync with release.yml's prepare job
|
||||
- name: Install requirements
|
||||
env:
|
||||
ACTIONLINT_TARBALL: ${{ format('actionlint_{0}_linux_amd64.tar.gz', env.ACTIONLINT_VERSION) }}
|
||||
run: |
|
||||
python -m devscripts.install_deps --only-optional-groups --include-group test
|
||||
sudo apt -y install shellcheck
|
||||
python -m pip install -U pyflakes
|
||||
curl -LO "${ACTIONLINT_REPO}/releases/download/v${ACTIONLINT_VERSION}/${ACTIONLINT_TARBALL}"
|
||||
printf '%s %s' "${ACTIONLINT_SHA256SUM}" "${ACTIONLINT_TARBALL}" | sha256sum -c -
|
||||
tar xvzf "${ACTIONLINT_TARBALL}" actionlint
|
||||
chmod +x actionlint
|
||||
- name: Run actionlint
|
||||
run: |
|
||||
./actionlint -color
|
||||
- name: Check Docker shell scripts
|
||||
run: |
|
||||
shellcheck bundle/docker/linux/*.sh
|
||||
- name: Test GHA devscripts
|
||||
run: |
|
||||
pytest -Werror --tb=short --color=yes devscripts/setup_variables_tests.py
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
|
|
@ -107,6 +107,7 @@ README.txt
|
|||
test/testdata/sigs/player-*.js
|
||||
test/testdata/thumbnails/empty.webp
|
||||
test/testdata/thumbnails/foo\ %d\ bar/foo_%d.*
|
||||
.ejs-*
|
||||
|
||||
# Binary
|
||||
/youtube-dl
|
||||
|
|
@ -129,3 +130,6 @@ yt-dlp.zip
|
|||
# Plugins
|
||||
ytdlp_plugins/
|
||||
yt-dlp-plugins
|
||||
|
||||
# Packages
|
||||
yt_dlp_ejs/
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@
|
|||
- [Is your question about yt-dlp?](#is-your-question-about-yt-dlp)
|
||||
- [Are you willing to share account details if needed?](#are-you-willing-to-share-account-details-if-needed)
|
||||
- [Is the website primarily used for piracy](#is-the-website-primarily-used-for-piracy)
|
||||
- [AUTOMATED CONTRIBUTIONS (AI / LLM) POLICY](#automated-contributions-ai--llm-policy)
|
||||
- [DEVELOPER INSTRUCTIONS](#developer-instructions)
|
||||
- [Adding new feature or making overarching changes](#adding-new-feature-or-making-overarching-changes)
|
||||
- [Adding support for a new site](#adding-support-for-a-new-site)
|
||||
|
|
@ -134,6 +135,17 @@ While these steps won't necessarily ensure that no misuse of the account takes p
|
|||
We follow [youtube-dl's policy](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) to not support services that is primarily used for infringing copyright. Additionally, it has been decided to not to support porn sites that specialize in fakes. We also cannot support any service that serves only [DRM protected content](https://en.wikipedia.org/wiki/Digital_rights_management).
|
||||
|
||||
|
||||
# AUTOMATED CONTRIBUTIONS (AI / LLM) POLICY
|
||||
|
||||
Please refrain from submitting issues or pull requests that have been generated by an LLM or other fully-automated tools. Any submission that is in violation of this policy will be closed, and the submitter may be blocked from this repository without warning.
|
||||
|
||||
If you submit an issue, you need to understand what your issue description is saying. You need to be able to answer questions about your bug report or feature request. Using an AI tool to *proofread* your issue/comment text is acceptable. Using an AI tool to *write* your issue/comment text is unacceptable.
|
||||
|
||||
If you submit a pull request, you need to understand what every line of code you've changed does. If you can't explain why your PR is doing something, then do not submit it. Using an AI tool to generate entire lines of code is unacceptable.
|
||||
|
||||
The rationale behind this policy is that automated contributions are a waste of the maintainers' time. Humans spend their time and brainpower reviewing every submission. Issues or pull requests generated by automation tools create an imbalance of effort between the submitter and the reviewer. Nobody learns anything when a maintainer reviews code written by an LLM.
|
||||
|
||||
Additionally, AI-generated code conflicts with this project's license (Unlicense), since you cannot truly release code into the public domain if you didn't author it yourself.
|
||||
|
||||
|
||||
# DEVELOPER INSTRUCTIONS
|
||||
|
|
@ -165,7 +177,7 @@ While it is strongly recommended to use `hatch` for yt-dlp development, if you a
|
|||
|
||||
```shell
|
||||
# To only install development dependencies:
|
||||
$ python -m devscripts.install_deps --include dev
|
||||
$ python -m devscripts.install_deps --include-group dev
|
||||
|
||||
# Or, for an editable install plus dev dependencies:
|
||||
$ python -m pip install -e ".[default,dev]"
|
||||
|
|
@ -272,7 +284,7 @@ After you have ensured this site is distributing its content legally, you can fo
|
|||
|
||||
You can use `hatch fmt` to automatically fix problems. Rules that the linter/formatter enforces should not be disabled with `# noqa` unless a maintainer requests it. The only exception allowed is for old/printf-style string formatting in GraphQL query templates (use `# noqa: UP031`).
|
||||
|
||||
1. Make sure your code works under all [Python](https://www.python.org/) versions supported by yt-dlp, namely CPython >=3.9 and PyPy >=3.11. Backward compatibility is not required for even older versions of Python.
|
||||
1. Make sure your code works under all [Python](https://www.python.org/) versions supported by yt-dlp, namely CPython >=3.10 and PyPy >=3.11. Backward compatibility is not required for even older versions of Python.
|
||||
1. When the tests pass, [add](https://git-scm.com/docs/git-add) the new files, [commit](https://git-scm.com/docs/git-commit) them and [push](https://git-scm.com/docs/git-push) the result, like this:
|
||||
|
||||
```shell
|
||||
|
|
@ -768,12 +780,10 @@ view_count = int_or_none(video.get('views'))
|
|||
```
|
||||
|
||||
|
||||
# My pull request is labeled pending-fixes
|
||||
## My pull request is labeled pending-fixes
|
||||
|
||||
The `pending-fixes` label is added when there are changes requested to a PR. When the necessary changes are made, the label should be removed. However, despite our best efforts, it may sometimes happen that the maintainer did not see the changes or forgot to remove the label. If your PR is still marked as `pending-fixes` a few days after all requested changes have been made, feel free to ping the maintainer who labeled your issue and ask them to re-review and remove the label.
|
||||
|
||||
|
||||
|
||||
|
||||
# EMBEDDING YT-DLP
|
||||
See [README.md#embedding-yt-dlp](README.md#embedding-yt-dlp) for instructions on how to embed yt-dlp in another Python program
|
||||
|
|
|
|||
40
CONTRIBUTORS
40
CONTRIBUTORS
|
|
@ -1,10 +1,10 @@
|
|||
pukkandan (owner)
|
||||
shirt-dev (collaborator)
|
||||
coletdjnz/colethedj (collaborator)
|
||||
Ashish0804 (collaborator)
|
||||
bashonly (collaborator)
|
||||
Grub4K (collaborator)
|
||||
seproDev (collaborator)
|
||||
shirt-dev (maintainer)
|
||||
coletdjnz (maintainer)
|
||||
Ashish0804 (maintainer)
|
||||
bashonly (maintainer)
|
||||
Grub4K (maintainer)
|
||||
seproDev (maintainer)
|
||||
h-h-h-h
|
||||
pauldubois98
|
||||
nixxo
|
||||
|
|
@ -806,3 +806,31 @@ junyilou
|
|||
PierreMesure
|
||||
Randalix
|
||||
runarmod
|
||||
gitchasing
|
||||
zakaryan2004
|
||||
cdce8p
|
||||
nicolaasjan
|
||||
willsmillie
|
||||
CasualYT31
|
||||
cecilia-sanare
|
||||
dhwz
|
||||
robin-mu
|
||||
shssoichiro
|
||||
thanhtaivtt
|
||||
uoag
|
||||
CaramelConnoisseur
|
||||
ctengel
|
||||
einstein95
|
||||
evilpie
|
||||
i3p9
|
||||
JrM2628
|
||||
krystophny
|
||||
matyb08
|
||||
pha1n0q
|
||||
PierceLBrooks
|
||||
sepro
|
||||
TheQWERTYCodr
|
||||
thomasmllt
|
||||
w4grfw
|
||||
WeidiDeng
|
||||
Zer0spectrum
|
||||
|
|
|
|||
212
Changelog.md
212
Changelog.md
|
|
@ -4,6 +4,218 @@
|
|||
# To create a release, dispatch the https://github.com/yt-dlp/yt-dlp/actions/workflows/release.yml workflow on master
|
||||
-->
|
||||
|
||||
### 2025.11.12
|
||||
|
||||
#### Important changes
|
||||
- **An external JavaScript runtime is now required for full YouTube support**
|
||||
yt-dlp now requires users to have an external JavaScript runtime (e.g. Deno) installed in order to solve the JavaScript challenges presented by YouTube. [Read more](https://github.com/yt-dlp/yt-dlp/issues/15012)
|
||||
|
||||
#### Core changes
|
||||
- **cookies**
|
||||
- [Allow `--cookies-from-browser` for Safari on iOS](https://github.com/yt-dlp/yt-dlp/commit/e6414d64e73d86d65bb357e5ad59d0ca080d5812) ([#14950](https://github.com/yt-dlp/yt-dlp/issues/14950)) by [pha1n0q](https://github.com/pha1n0q)
|
||||
- [Support Firefox cookies database v17](https://github.com/yt-dlp/yt-dlp/commit/bf7e04e9d8bd3c4a4614b67ce617b7ae5d17d62a) ([#15010](https://github.com/yt-dlp/yt-dlp/issues/15010)) by [Grub4K](https://github.com/Grub4K)
|
||||
- **sponsorblock**: [Add `hook` category](https://github.com/yt-dlp/yt-dlp/commit/52f3c56e83bbb25eec2496b0499768753732a093) ([#14845](https://github.com/yt-dlp/yt-dlp/issues/14845)) by [seproDev](https://github.com/seproDev)
|
||||
- **update**: [Fix PyInstaller onedir variant detection](https://github.com/yt-dlp/yt-dlp/commit/1c2ad94353d1c9e03615d20b6bbfc293286c7a32) ([#14800](https://github.com/yt-dlp/yt-dlp/issues/14800)) by [bashonly](https://github.com/bashonly)
|
||||
|
||||
#### Extractor changes
|
||||
- **1tv**: live: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/19c5d7c53013440ec4f3f56ebbb067531b272f3f) ([#14299](https://github.com/yt-dlp/yt-dlp/issues/14299)) by [swayll](https://github.com/swayll)
|
||||
- **ardaudiothek**: [Add extractors](https://github.com/yt-dlp/yt-dlp/commit/0046fbcbfceee32fa2f68a8ea00cca02765470b6) ([#14309](https://github.com/yt-dlp/yt-dlp/issues/14309)) by [evilpie](https://github.com/evilpie), [marieell](https://github.com/marieell)
|
||||
- **bunnycdn**
|
||||
- [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/228ae9f0f2b441fa1296db2ed2b7afbd4a9a62a1) ([#14954](https://github.com/yt-dlp/yt-dlp/issues/14954)) by [seproDev](https://github.com/seproDev)
|
||||
- [Support player subdomain URLs](https://github.com/yt-dlp/yt-dlp/commit/3ef867451cd9604b4195dfee00db768619629b2d) ([#14979](https://github.com/yt-dlp/yt-dlp/issues/14979)) by [einstein95](https://github.com/einstein95)
|
||||
- **discoverynetworksde**: [Fix extraction](https://github.com/yt-dlp/yt-dlp/commit/10dea209d2460daf924c93835ddc2f0301cf2cd4) ([#14818](https://github.com/yt-dlp/yt-dlp/issues/14818)) by [dirkf](https://github.com/dirkf), [w4grfw](https://github.com/w4grfw) (With fixes in [f3c255b](https://github.com/yt-dlp/yt-dlp/commit/f3c255b63bd26069151fc3d3ba6dc626bb62ad6e) by [bashonly](https://github.com/bashonly))
|
||||
- **floatplane**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/1ac7e6005cd3be9fff0b28be189c3a68ecd4c593) ([#14984](https://github.com/yt-dlp/yt-dlp/issues/14984)) by [i3p9](https://github.com/i3p9)
|
||||
- **googledrive**
|
||||
- [Fix subtitles extraction](https://github.com/yt-dlp/yt-dlp/commit/6d05cee4df30774ddce5c5c751fd2118f40c24fe) ([#14809](https://github.com/yt-dlp/yt-dlp/issues/14809)) by [seproDev](https://github.com/seproDev)
|
||||
- [Rework extractor](https://github.com/yt-dlp/yt-dlp/commit/70f1098312fe53bc85358f7bd624370878b2fa28) ([#14746](https://github.com/yt-dlp/yt-dlp/issues/14746)) by [seproDev](https://github.com/seproDev)
|
||||
- **kika**: [Do not extract non-existent subtitles](https://github.com/yt-dlp/yt-dlp/commit/79f9232ffbd57dde91c372b673b42801edaa9e53) ([#14813](https://github.com/yt-dlp/yt-dlp/issues/14813)) by [InvalidUsernameException](https://github.com/InvalidUsernameException)
|
||||
- **mux**: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/a0bda3b78609593ce1127215fc035c1a308a89b6) ([#14914](https://github.com/yt-dlp/yt-dlp/issues/14914)) by [PierceLBrooks](https://github.com/PierceLBrooks), [seproDev](https://github.com/seproDev)
|
||||
- **nascarclassics**: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/e8a6b1ca92f2a0ce2c187668165be23dc5506aab) ([#14866](https://github.com/yt-dlp/yt-dlp/issues/14866)) by [JrM2628](https://github.com/JrM2628)
|
||||
- **nbc**: [Detect and discard DRM formats](https://github.com/yt-dlp/yt-dlp/commit/ee3a106f34124f0e2d28f062f5302863fd7639be) ([#14844](https://github.com/yt-dlp/yt-dlp/issues/14844)) by [bashonly](https://github.com/bashonly)
|
||||
- **ntv.ru**: [Rework extractor](https://github.com/yt-dlp/yt-dlp/commit/5dde0d0c9fcef2ce57e486b2e563e0dff9b2845a) ([#14934](https://github.com/yt-dlp/yt-dlp/issues/14934)) by [anlar](https://github.com/anlar), [seproDev](https://github.com/seproDev) (With fixes in [a86eeaa](https://github.com/yt-dlp/yt-dlp/commit/a86eeaadf236ceaf6bb232eb410cf21572538aa6) by [seproDev](https://github.com/seproDev))
|
||||
- **play.tv**: [Update extractor for new domain](https://github.com/yt-dlp/yt-dlp/commit/73fd850d170e01c47c31aaa6aa8fe90856d9ad18) ([#14905](https://github.com/yt-dlp/yt-dlp/issues/14905)) by [thomasmllt](https://github.com/thomasmllt)
|
||||
- **tubetugraz**: [Support alternate URL format](https://github.com/yt-dlp/yt-dlp/commit/f3597cfafcab4d7d4c6d41bff3647681301f1e6b) ([#14718](https://github.com/yt-dlp/yt-dlp/issues/14718)) by [krystophny](https://github.com/krystophny)
|
||||
- **twitch**
|
||||
- [Fix playlist extraction](https://github.com/yt-dlp/yt-dlp/commit/cb78440e468608fd55546280b537387d375335f2) ([#15008](https://github.com/yt-dlp/yt-dlp/issues/15008)) by [bashonly](https://github.com/bashonly), [ctengel](https://github.com/ctengel)
|
||||
- stream: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/7eff676183518175ce495ae63291c89f9b39f02a) ([#14988](https://github.com/yt-dlp/yt-dlp/issues/14988)) by [seproDev](https://github.com/seproDev)
|
||||
- vod: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/b46c572b26be15683584102c5fb7e7bfde0c9821) ([#14999](https://github.com/yt-dlp/yt-dlp/issues/14999)) by [Zer0spectrum](https://github.com/Zer0spectrum)
|
||||
- **urplay**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/808b1fed76fbd07840cc23a346c11334e3d34f43) ([#14785](https://github.com/yt-dlp/yt-dlp/issues/14785)) by [seproDev](https://github.com/seproDev)
|
||||
- **web.archive**: youtube: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/d9e3011fd1c3a75871a50e78533afe78ad427ce3) ([#14753](https://github.com/yt-dlp/yt-dlp/issues/14753)) by [seproDev](https://github.com/seproDev)
|
||||
- **xhamster**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/a1d6351c3fc82c07fa0ee70811ed84807f6bbb58) ([#14948](https://github.com/yt-dlp/yt-dlp/issues/14948)) by [CaramelConnoisseur](https://github.com/CaramelConnoisseur), [dhwz](https://github.com/dhwz)
|
||||
- **youtube**
|
||||
- [Add `tv_downgraded` client](https://github.com/yt-dlp/yt-dlp/commit/61cf34f5447177a73ba25ea9a47d7df516ca3b3b) ([#14887](https://github.com/yt-dlp/yt-dlp/issues/14887)) by [seproDev](https://github.com/seproDev) (With fixes in [fa35eb2](https://github.com/yt-dlp/yt-dlp/commit/fa35eb27eaf27df7b5854f527a89fc828c9e0ec0))
|
||||
- [Fix `web_embedded` client extraction](https://github.com/yt-dlp/yt-dlp/commit/d6ee67725397807bbb5edcd0b2c94f5bca62d3f4) ([#14843](https://github.com/yt-dlp/yt-dlp/issues/14843)) by [bashonly](https://github.com/bashonly), [seproDev](https://github.com/seproDev)
|
||||
- [Fix auto-generated metadata extraction](https://github.com/yt-dlp/yt-dlp/commit/a56217f9f6c594f6c419ce8dce9134198a9d90d0) ([#13896](https://github.com/yt-dlp/yt-dlp/issues/13896)) by [TheQWERTYCodr](https://github.com/TheQWERTYCodr)
|
||||
- [Fix original language detection](https://github.com/yt-dlp/yt-dlp/commit/afc44022d0b736b2b3e87b52490bd35c53c53632) ([#14919](https://github.com/yt-dlp/yt-dlp/issues/14919)) by [bashonly](https://github.com/bashonly)
|
||||
- [Implement external n/sig solver](https://github.com/yt-dlp/yt-dlp/commit/6224a3898821965a7d6a2cb9cc2de40a0fd6e6bc) ([#14157](https://github.com/yt-dlp/yt-dlp/issues/14157)) by [bashonly](https://github.com/bashonly), [coletdjnz](https://github.com/coletdjnz), [Grub4K](https://github.com/Grub4K), [seproDev](https://github.com/seproDev) (With fixes in [4b4223b](https://github.com/yt-dlp/yt-dlp/commit/4b4223b436fb03a12628679daed32ae4fc15ae4b), [ee98be4](https://github.com/yt-dlp/yt-dlp/commit/ee98be4ad767b77e4d8dd9bfd3c7d10f2e8397ff), [c0c9f30](https://github.com/yt-dlp/yt-dlp/commit/c0c9f30695db314df084e8701a7c376eb54f283c), [cacd163](https://github.com/yt-dlp/yt-dlp/commit/cacd1630a1a59e92f857d0d175c8730cffbf9801), [8636a9b](https://github.com/yt-dlp/yt-dlp/commit/8636a9bac3bed99984c1e297453660468ecf504b))
|
||||
- [Support collaborators](https://github.com/yt-dlp/yt-dlp/commit/f87cfadb5c3cba8e9dc4231c9554548e9edb3882) ([#14677](https://github.com/yt-dlp/yt-dlp/issues/14677)) by [seproDev](https://github.com/seproDev)
|
||||
- tab: [Fix duration extraction for feeds](https://github.com/yt-dlp/yt-dlp/commit/1d2f0edaf978a5541cfb8f7e83fec433c65c1011) ([#14668](https://github.com/yt-dlp/yt-dlp/issues/14668)) by [WeidiDeng](https://github.com/WeidiDeng)
|
||||
|
||||
#### Downloader changes
|
||||
- **ffmpeg**
|
||||
- [Apply `ffmpeg_args` for each format](https://github.com/yt-dlp/yt-dlp/commit/ffb7b7f446b6c67a28c66598ae91f4f2263e0d75) ([#14886](https://github.com/yt-dlp/yt-dlp/issues/14886)) by [bashonly](https://github.com/bashonly)
|
||||
- [Limit read rate for DASH livestreams](https://github.com/yt-dlp/yt-dlp/commit/7af6d81f35aea8832023daa30ada10e6673a0529) ([#14918](https://github.com/yt-dlp/yt-dlp/issues/14918)) by [bashonly](https://github.com/bashonly)
|
||||
|
||||
#### Networking changes
|
||||
- [Ensure underlying file object is closed when fully read](https://github.com/yt-dlp/yt-dlp/commit/5767fb4ab108dddb07fc839a3b0f4d323a7c4bea) ([#14935](https://github.com/yt-dlp/yt-dlp/issues/14935)) by [coletdjnz](https://github.com/coletdjnz)
|
||||
|
||||
#### Misc. changes
|
||||
- [Fix zsh path argument completion](https://github.com/yt-dlp/yt-dlp/commit/c96e9291ab7bd6e7da66d33424982c8b0b4431c7) ([#14953](https://github.com/yt-dlp/yt-dlp/issues/14953)) by [matyb08](https://github.com/matyb08)
|
||||
- **build**: [Bump musllinux Python version to 3.14](https://github.com/yt-dlp/yt-dlp/commit/646904cd3a79429ec5fdc43f904b3f57ae213f34) ([#14623](https://github.com/yt-dlp/yt-dlp/issues/14623)) by [bashonly](https://github.com/bashonly)
|
||||
- **cleanup**
|
||||
- Miscellaneous
|
||||
- [c63b4e2](https://github.com/yt-dlp/yt-dlp/commit/c63b4e2a2b81cc78397c8709ef53ffd29bada213) by [bashonly](https://github.com/bashonly), [matyb08](https://github.com/matyb08), [sepro](https://github.com/sepro)
|
||||
- [335653b](https://github.com/yt-dlp/yt-dlp/commit/335653be82d5ef999cfc2879d005397402eebec1) by [bashonly](https://github.com/bashonly), [seproDev](https://github.com/seproDev)
|
||||
- **devscripts**: [Improve `install_deps` script](https://github.com/yt-dlp/yt-dlp/commit/73922e66e437fb4bb618bdc119a96375081bf508) ([#14766](https://github.com/yt-dlp/yt-dlp/issues/14766)) by [bashonly](https://github.com/bashonly)
|
||||
- **test**: [Skip flaky tests if source unchanged](https://github.com/yt-dlp/yt-dlp/commit/ade8c2b36ff300edef87d48fd1ba835ac35c5b63) ([#14970](https://github.com/yt-dlp/yt-dlp/issues/14970)) by [bashonly](https://github.com/bashonly), [Grub4K](https://github.com/Grub4K)
|
||||
|
||||
### 2025.10.22
|
||||
|
||||
#### Important changes
|
||||
- **A stopgap release with a *TEMPORARY partial* fix for YouTube support**
|
||||
Some formats may still be unavailable, especially if cookies are passed to yt-dlp. The ***NEXT*** release, expected very soon, **will require an external JS runtime (e.g. Deno)** in order for YouTube downloads to work properly. [Read more](https://github.com/yt-dlp/yt-dlp/issues/14404)
|
||||
- **The minimum *required* Python version has been raised to 3.10**
|
||||
Python 3.9 has reached its end-of-life as of October 2025, and yt-dlp has now removed support for it. [Read more](https://github.com/yt-dlp/yt-dlp/issues/13858)
|
||||
|
||||
#### Core changes
|
||||
- [Remove Python 3.9 support](https://github.com/yt-dlp/yt-dlp/commit/4e6a693057cfaf1ce1f07b019ed3bfce2bf936f6) ([#13861](https://github.com/yt-dlp/yt-dlp/issues/13861)) by [bashonly](https://github.com/bashonly)
|
||||
|
||||
#### Extractor changes
|
||||
- **appleconnect**: [Rework extractor](https://github.com/yt-dlp/yt-dlp/commit/78748b506f0dca8236ac0045ed7f72f7cf334b62) ([#13229](https://github.com/yt-dlp/yt-dlp/issues/13229)) by [doe1080](https://github.com/doe1080)
|
||||
- **idagio**: [Support URLs with country codes](https://github.com/yt-dlp/yt-dlp/commit/c9356f308dd3c5f9f494cb40ed14c5df017b4fe0) ([#14655](https://github.com/yt-dlp/yt-dlp/issues/14655)) by [robin-mu](https://github.com/robin-mu)
|
||||
- **tvnoe**: [Rework Extractor](https://github.com/yt-dlp/yt-dlp/commit/fe5ae54a7b08ebe679f03afdeafbe1cee5784d5b) ([#13369](https://github.com/yt-dlp/yt-dlp/issues/13369)) by [doe1080](https://github.com/doe1080)
|
||||
- **youtube**: [Use temporary player client workaround](https://github.com/yt-dlp/yt-dlp/commit/2c9091e355a7ba5d1edb69796ecdca48199b77fb) ([#14693](https://github.com/yt-dlp/yt-dlp/issues/14693)) by [gamer191](https://github.com/gamer191)
|
||||
|
||||
#### Misc. changes
|
||||
- **cleanup**
|
||||
- Miscellaneous
|
||||
- [c7bda21](https://github.com/yt-dlp/yt-dlp/commit/c7bda2192aa24afce40fdbbbe056d269aa3b2872) by [bashonly](https://github.com/bashonly), [seproDev](https://github.com/seproDev)
|
||||
- [de7b3c0](https://github.com/yt-dlp/yt-dlp/commit/de7b3c0705022cb777c5b4b7f0c69c59ad6ff538) by [bashonly](https://github.com/bashonly)
|
||||
- **docs**: [Update list of maintainers](https://github.com/yt-dlp/yt-dlp/commit/dfc0a84c192a7357dd1768cc345d590253a14fe5) ([#14148](https://github.com/yt-dlp/yt-dlp/issues/14148)) by [bashonly](https://github.com/bashonly), [coletdjnz](https://github.com/coletdjnz), [seproDev](https://github.com/seproDev)
|
||||
|
||||
### 2025.10.14
|
||||
|
||||
#### Core changes
|
||||
- [Fix `prefer-vp9-sort` compat option](https://github.com/yt-dlp/yt-dlp/commit/a6673a8e82276ea529c1773ed09e5bc4a22e822a) ([#14603](https://github.com/yt-dlp/yt-dlp/issues/14603)) by [seproDev](https://github.com/seproDev)
|
||||
|
||||
#### Extractor changes
|
||||
- **10play**
|
||||
- [Handle geo-restriction errors](https://github.com/yt-dlp/yt-dlp/commit/ad55bfcfb700fbfc1364c04e3425761d6f95c0a7) ([#14618](https://github.com/yt-dlp/yt-dlp/issues/14618)) by [bashonly](https://github.com/bashonly)
|
||||
- [Rework extractor](https://github.com/yt-dlp/yt-dlp/commit/eafedc21817bb0de20e9aaccd7151a1d4c4e1ebd) ([#14417](https://github.com/yt-dlp/yt-dlp/issues/14417)) by [seproDev](https://github.com/seproDev), [Sipherdrakon](https://github.com/Sipherdrakon)
|
||||
- **abc.net.au**: [Support listen URLs](https://github.com/yt-dlp/yt-dlp/commit/0ea5d5882def84415f946907cfc00ab431c18fed) ([#14389](https://github.com/yt-dlp/yt-dlp/issues/14389)) by [uoag](https://github.com/uoag)
|
||||
- **cbc.ca**: listen: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/df160ab18db523f6629f2e7e20123d7a3551df28) ([#14391](https://github.com/yt-dlp/yt-dlp/issues/14391)) by [uoag](https://github.com/uoag)
|
||||
- **dropout**: [Update extractor for new domain](https://github.com/yt-dlp/yt-dlp/commit/8eb8695139dece6351aac10463df63b87b45b000) ([#14531](https://github.com/yt-dlp/yt-dlp/issues/14531)) by [cecilia-sanare](https://github.com/cecilia-sanare)
|
||||
- **idagio**: [Add extractors](https://github.com/yt-dlp/yt-dlp/commit/a98e7f9f58a9492d2cb216baa59c890ed8ce02f3) ([#14586](https://github.com/yt-dlp/yt-dlp/issues/14586)) by [robin-mu](https://github.com/robin-mu)
|
||||
- **musescore**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/87be1bb96ac47abaaa4cfc6d7dd651e511b74551) ([#14598](https://github.com/yt-dlp/yt-dlp/issues/14598)) by [seproDev](https://github.com/seproDev)
|
||||
- **prankcastpost**: [Rework extractor](https://github.com/yt-dlp/yt-dlp/commit/5d7678195a7d0c045a9fe0418383171a71a7ea43) ([#14445](https://github.com/yt-dlp/yt-dlp/issues/14445)) by [columndeeply](https://github.com/columndeeply)
|
||||
- **slideslive**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/c2e124881f9aa02097589e853b3d3505e78372c4) ([#14619](https://github.com/yt-dlp/yt-dlp/issues/14619)) by [bashonly](https://github.com/bashonly)
|
||||
- **soundcloud**: [Support new API URLs](https://github.com/yt-dlp/yt-dlp/commit/6d41aaf21c61a87e74564646abd0a8ee887e888d) ([#14449](https://github.com/yt-dlp/yt-dlp/issues/14449)) by [seproDev](https://github.com/seproDev)
|
||||
- **tiktok**
|
||||
- [Support browser impersonation](https://github.com/yt-dlp/yt-dlp/commit/5513036104ed9710f624c537fb3644b07a0680db) ([#14473](https://github.com/yt-dlp/yt-dlp/issues/14473)) by [bashonly](https://github.com/bashonly), [thanhtaivtt](https://github.com/thanhtaivtt)
|
||||
- user: [Fix private account extraction](https://github.com/yt-dlp/yt-dlp/commit/cdc533b114c35ceb8a2e9dd3eb9c172a8737ae5e) ([#14585](https://github.com/yt-dlp/yt-dlp/issues/14585)) by [CasualYT31](https://github.com/CasualYT31)
|
||||
- **vidyard**: [Extract chapters](https://github.com/yt-dlp/yt-dlp/commit/5f94f054907c12e68129cd9ac2508ed8aba1b223) ([#14478](https://github.com/yt-dlp/yt-dlp/issues/14478)) by [exterrestris](https://github.com/exterrestris)
|
||||
- **xhamster**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/739125d40f8ede3beb7be68fc4df55bec0d226fd) ([#14446](https://github.com/yt-dlp/yt-dlp/issues/14446)) by [dhwz](https://github.com/dhwz), [dirkf](https://github.com/dirkf), [shssoichiro](https://github.com/shssoichiro)
|
||||
- **youtube**
|
||||
- [Detect experiment binding GVS PO Token to video id](https://github.com/yt-dlp/yt-dlp/commit/bd5ed90419eea18adfb2f0d8efa9d22b2029119f) ([#14471](https://github.com/yt-dlp/yt-dlp/issues/14471)) by [coletdjnz](https://github.com/coletdjnz)
|
||||
- tab: [Fix approximate timestamp extraction for feeds](https://github.com/yt-dlp/yt-dlp/commit/ccc25d6710a4aa373b7e15c558e07f8a2ffae5f3) ([#14539](https://github.com/yt-dlp/yt-dlp/issues/14539)) by [coletdjnz](https://github.com/coletdjnz)
|
||||
|
||||
### 2025.09.26
|
||||
|
||||
#### Extractor changes
|
||||
- **twitch**: vod: [Fix `live_status` detection](https://github.com/yt-dlp/yt-dlp/commit/50e452fd7dfb8a648bd3b9aaabc8f94f37ce2051) ([#14457](https://github.com/yt-dlp/yt-dlp/issues/14457)) by [bashonly](https://github.com/bashonly)
|
||||
- **youtube**
|
||||
- [Fix player JS overrides](https://github.com/yt-dlp/yt-dlp/commit/b7b7910d96359a539b7997890342ab4a59dd685d) ([#14430](https://github.com/yt-dlp/yt-dlp/issues/14430)) by [bashonly](https://github.com/bashonly), [seproDev](https://github.com/seproDev)
|
||||
- [Improve PO token logging](https://github.com/yt-dlp/yt-dlp/commit/7df5acc546dccd32213c3a125d721e32b06d71b0) ([#14447](https://github.com/yt-dlp/yt-dlp/issues/14447)) by [seproDev](https://github.com/seproDev)
|
||||
- [Player client maintenance](https://github.com/yt-dlp/yt-dlp/commit/94c5622be96474ca3c637e52898c4daee4d8fb69) ([#14448](https://github.com/yt-dlp/yt-dlp/issues/14448)) by [seproDev](https://github.com/seproDev)
|
||||
- [Replace `tv_simply` with `web_safari` in default clients](https://github.com/yt-dlp/yt-dlp/commit/12b57d2858845c0c7fb33bf9aa8ed7be6905535d) ([#14465](https://github.com/yt-dlp/yt-dlp/issues/14465)) by [bashonly](https://github.com/bashonly)
|
||||
|
||||
### 2025.09.23
|
||||
|
||||
#### Important changes
|
||||
- **Several options have been deprecated**
|
||||
In order to simplify the codebase and reduce maintenance burden, various options have been deprecated. Please remove them from your commands/configurations. [Read more](https://github.com/yt-dlp/yt-dlp/issues/14198)
|
||||
|
||||
#### Core changes
|
||||
- **compat**: [Add `compat_datetime_from_timestamp`](https://github.com/yt-dlp/yt-dlp/commit/6a763a55d8a93b2a964ecf7699248ad342485412) ([#11902](https://github.com/yt-dlp/yt-dlp/issues/11902)) by [pzhlkj6612](https://github.com/pzhlkj6612), [seproDev](https://github.com/seproDev)
|
||||
- **utils**
|
||||
- `mimetype2ext`: [Recognize `vnd.dlna.mpeg-tts`](https://github.com/yt-dlp/yt-dlp/commit/98b6b0d339130e955f9d45ce67c0357c633c1627) ([#14388](https://github.com/yt-dlp/yt-dlp/issues/14388)) by [seproDev](https://github.com/seproDev)
|
||||
- `random_user_agent`: [Bump versions](https://github.com/yt-dlp/yt-dlp/commit/f3829463c728a5b5e62b3fc157e71c99b26edac7) ([#14317](https://github.com/yt-dlp/yt-dlp/issues/14317)) by [seproDev](https://github.com/seproDev)
|
||||
|
||||
#### Extractor changes
|
||||
- **10play**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/067062bb87ac057e453ce9efdac7ca117a6a7da0) ([#14242](https://github.com/yt-dlp/yt-dlp/issues/14242)) by [Sipherdrakon](https://github.com/Sipherdrakon)
|
||||
- **applepodcast**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/b2c01d0498653e0239c7226c5a7fcb614dd4dbc8) ([#14372](https://github.com/yt-dlp/yt-dlp/issues/14372)) by [seproDev](https://github.com/seproDev)
|
||||
- **loco**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/f5cb721185e8725cf4eb4080e86aa9aa73ef25b3) ([#14256](https://github.com/yt-dlp/yt-dlp/issues/14256)) by [seproDev](https://github.com/seproDev)
|
||||
- **mitele**: [Remove extractor](https://github.com/yt-dlp/yt-dlp/commit/820c6e244571557fcfc127d4b3680e2d07c04dca) ([#14348](https://github.com/yt-dlp/yt-dlp/issues/14348)) by [bashonly](https://github.com/bashonly)
|
||||
- **newspicks**: [Warn when only preview is available](https://github.com/yt-dlp/yt-dlp/commit/9def9a4b0e958285e055eb350e5dd43b5c423336) ([#14197](https://github.com/yt-dlp/yt-dlp/issues/14197)) by [doe1080](https://github.com/doe1080)
|
||||
- **onsen**: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/17bfaa53edf5c52fce73cf0cef4592f929c2462d) ([#10971](https://github.com/yt-dlp/yt-dlp/issues/10971)) by [doe1080](https://github.com/doe1080)
|
||||
- **pixivsketch**: [Remove extractors](https://github.com/yt-dlp/yt-dlp/commit/3d9a88bd8ef149d781c7e569e48e61551eda395e) ([#14196](https://github.com/yt-dlp/yt-dlp/issues/14196)) by [doe1080](https://github.com/doe1080)
|
||||
- **smotrim**: [Rework extractors](https://github.com/yt-dlp/yt-dlp/commit/8cb037c0b06c2815080f87d61ea2e95c412785fc) ([#14200](https://github.com/yt-dlp/yt-dlp/issues/14200)) by [doe1080](https://github.com/doe1080), [swayll](https://github.com/swayll)
|
||||
- **telecinco**: [Support browser impersonation](https://github.com/yt-dlp/yt-dlp/commit/e123a48f1155703d8709a4221a42bd45c0a2b3ce) ([#14351](https://github.com/yt-dlp/yt-dlp/issues/14351)) by [bashonly](https://github.com/bashonly)
|
||||
- **tiktok**: live: [Fix room ID extraction](https://github.com/yt-dlp/yt-dlp/commit/5c1abcdc49b9d23e1dcb77b95d063cf2bf93e352) ([#14287](https://github.com/yt-dlp/yt-dlp/issues/14287)) by [bashonly](https://github.com/bashonly)
|
||||
- **ttinglive**: [Adapt FlexTV extractor to new domain](https://github.com/yt-dlp/yt-dlp/commit/4bc19adc8798e7564513898cf34adc432c6c5709) ([#14375](https://github.com/yt-dlp/yt-dlp/issues/14375)) by [seproDev](https://github.com/seproDev)
|
||||
- **tunein**: [Fix extractors](https://github.com/yt-dlp/yt-dlp/commit/7d9e48b22a780c2e8d2d2d68940d49fd2029ab70) ([#13981](https://github.com/yt-dlp/yt-dlp/issues/13981)) by [doe1080](https://github.com/doe1080)
|
||||
- **twitch**: clips: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/f8750504c2f71b54586fb857d60dce4e354a13ea) ([#14397](https://github.com/yt-dlp/yt-dlp/issues/14397)) by [seproDev](https://github.com/seproDev)
|
||||
- **vimeo**: [Fix login error handling](https://github.com/yt-dlp/yt-dlp/commit/679587dac7cd011a1472255e1f06efb017ba91b6) ([#14280](https://github.com/yt-dlp/yt-dlp/issues/14280)) by [bashonly](https://github.com/bashonly)
|
||||
- **vk**
|
||||
- [Support vksport URLs](https://github.com/yt-dlp/yt-dlp/commit/b81e9272dce5844e8fba371cb4b4fd95ad3ed819) ([#14341](https://github.com/yt-dlp/yt-dlp/issues/14341)) by [seproDev](https://github.com/seproDev)
|
||||
- uservideos: [Support alternate URL format](https://github.com/yt-dlp/yt-dlp/commit/bf5d18016b03a3f2fd5d3494d9efe85d3f8beeac) ([#14376](https://github.com/yt-dlp/yt-dlp/issues/14376)) by [seproDev](https://github.com/seproDev)
|
||||
- **xhamster**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/a1c98226a4e869a34cc764a9dcf7a4558516308e) ([#14286](https://github.com/yt-dlp/yt-dlp/issues/14286)) by [nicolaasjan](https://github.com/nicolaasjan), [willsmillie](https://github.com/willsmillie) (With fixes in [677997d](https://github.com/yt-dlp/yt-dlp/commit/677997d84eaec0037397f7d935386daa3025b004) by [arand](https://github.com/arand), [thegymguy](https://github.com/thegymguy))
|
||||
- **youtube**: [Force player `0004de42`](https://github.com/yt-dlp/yt-dlp/commit/7f5d9f8543d19590eeec9473d54fa00151afa78a) ([#14398](https://github.com/yt-dlp/yt-dlp/issues/14398)) by [seproDev](https://github.com/seproDev)
|
||||
|
||||
#### Misc. changes
|
||||
- **build**
|
||||
- [Fix cache warmer](https://github.com/yt-dlp/yt-dlp/commit/8597a4331e8535a246d777bb8397bdcab251766c) ([#14261](https://github.com/yt-dlp/yt-dlp/issues/14261)) by [bashonly](https://github.com/bashonly)
|
||||
- [Post-release workflow cleanup](https://github.com/yt-dlp/yt-dlp/commit/cd94e7004036e0149d7d3fa236c7dd44cf460788) ([#14250](https://github.com/yt-dlp/yt-dlp/issues/14250)) by [bashonly](https://github.com/bashonly)
|
||||
- [Refactor Linux build jobs](https://github.com/yt-dlp/yt-dlp/commit/e2d37bcc8e84be9ce0f67fc24cb830c13963d10f) ([#14275](https://github.com/yt-dlp/yt-dlp/issues/14275)) by [bashonly](https://github.com/bashonly)
|
||||
- [Use PyInstaller 6.16 for Windows](https://github.com/yt-dlp/yt-dlp/commit/df4b4e8ccf3385be6d2ad65465a0704c223dfdfb) ([#14318](https://github.com/yt-dlp/yt-dlp/issues/14318)) by [bashonly](https://github.com/bashonly)
|
||||
- [Use SPDX license identifier](https://github.com/yt-dlp/yt-dlp/commit/48a214bef4bfd5984362d3d24b09dce50ba449ea) ([#14260](https://github.com/yt-dlp/yt-dlp/issues/14260)) by [cdce8p](https://github.com/cdce8p)
|
||||
- [Use new PyInstaller builds for Windows](https://github.com/yt-dlp/yt-dlp/commit/c8ede5f34d6c95c442b936bb01ecbcb724aefdef) ([#14273](https://github.com/yt-dlp/yt-dlp/issues/14273)) by [bashonly](https://github.com/bashonly)
|
||||
- **ci**
|
||||
- [Bump actions/setup-python to v6](https://github.com/yt-dlp/yt-dlp/commit/22ea0688ed6bcdbe4c51401a84239cda3decfc9c) ([#14282](https://github.com/yt-dlp/yt-dlp/issues/14282)) by [bashonly](https://github.com/bashonly)
|
||||
- [Improve workflow checks](https://github.com/yt-dlp/yt-dlp/commit/ae3923b6b23bc62115be55510d6b5842f7a46b5f) ([#14316](https://github.com/yt-dlp/yt-dlp/issues/14316)) by [bashonly](https://github.com/bashonly)
|
||||
- [Test and lint workflows](https://github.com/yt-dlp/yt-dlp/commit/7c9b10ebc83907d37f9f65ea9d4bd6f5e3bd1371) ([#14249](https://github.com/yt-dlp/yt-dlp/issues/14249)) by [bashonly](https://github.com/bashonly)
|
||||
- [Test with Python 3.14](https://github.com/yt-dlp/yt-dlp/commit/83b8409366d0f9554eaeae56394b244dab64a2cb) ([#13468](https://github.com/yt-dlp/yt-dlp/issues/13468)) by [bashonly](https://github.com/bashonly)
|
||||
- **cleanup**
|
||||
- [Bump ruff to 0.13.x](https://github.com/yt-dlp/yt-dlp/commit/ba8044685537e8e14adc6826fb4d730856fd2e2b) ([#14293](https://github.com/yt-dlp/yt-dlp/issues/14293)) by [bashonly](https://github.com/bashonly)
|
||||
- [Deprecate various options](https://github.com/yt-dlp/yt-dlp/commit/08d78996831bd8e1e3c2592d740c3def00bbf548) ([#13821](https://github.com/yt-dlp/yt-dlp/issues/13821)) by [seproDev](https://github.com/seproDev)
|
||||
- [Remove broken extractors](https://github.com/yt-dlp/yt-dlp/commit/65e90aea29cf3bfc9d1ae3e009fbf9a8db3a23c9) ([#14305](https://github.com/yt-dlp/yt-dlp/issues/14305)) by [bashonly](https://github.com/bashonly)
|
||||
- [Remove setup.cfg](https://github.com/yt-dlp/yt-dlp/commit/eb4b3a5fc7765a6cd0370ca44ccee0d7d5111dd7) ([#14314](https://github.com/yt-dlp/yt-dlp/issues/14314)) by [seproDev](https://github.com/seproDev) (With fixes in [8ab262c](https://github.com/yt-dlp/yt-dlp/commit/8ab262c66bd3e1d8874fb2d070068ba1f0d48f16) by [bashonly](https://github.com/bashonly))
|
||||
- Miscellaneous: [2e81e29](https://github.com/yt-dlp/yt-dlp/commit/2e81e298cdce23afadb06a95836284acb38f7018) by [bashonly](https://github.com/bashonly), [doe1080](https://github.com/doe1080), [seproDev](https://github.com/seproDev)
|
||||
- **docs**
|
||||
- [Clarify license of PyInstaller-bundled executables](https://github.com/yt-dlp/yt-dlp/commit/e6e6b512141e66b1b36058966804fe59c02a2b4d) ([#14257](https://github.com/yt-dlp/yt-dlp/issues/14257)) by [seproDev](https://github.com/seproDev)
|
||||
- [Establish AI/LLM contribution policy](https://github.com/yt-dlp/yt-dlp/commit/8821682f15af59047bc1f92724ef8a9ba30d6f7e) ([#14194](https://github.com/yt-dlp/yt-dlp/issues/14194)) by [bashonly](https://github.com/bashonly), [seproDev](https://github.com/seproDev)
|
||||
- **test**: utils: [Fix `sanitize_path` test for Windows CPython 3.11](https://github.com/yt-dlp/yt-dlp/commit/a183837ec8bb5e28fe6eb3a9d77ea2d0d7a106bd) ([#13878](https://github.com/yt-dlp/yt-dlp/issues/13878)) by [Grub4K](https://github.com/Grub4K)
|
||||
|
||||
### 2025.09.05
|
||||
|
||||
#### Core changes
|
||||
- [Fix `--id` deprecation warning](https://github.com/yt-dlp/yt-dlp/commit/76bb46002c9a9655f2b1d29d4840e75e79037cfa) ([#14190](https://github.com/yt-dlp/yt-dlp/issues/14190)) by [seproDev](https://github.com/seproDev)
|
||||
|
||||
#### Extractor changes
|
||||
- **charlierose**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/603acdff07f0226088916886002d2ad8309ff9d3) ([#14231](https://github.com/yt-dlp/yt-dlp/issues/14231)) by [gitchasing](https://github.com/gitchasing)
|
||||
- **googledrive**: [Fix subtitles extraction](https://github.com/yt-dlp/yt-dlp/commit/18fe696df9d60804a8f5cb8cd74f38111d6eb711) ([#14139](https://github.com/yt-dlp/yt-dlp/issues/14139)) by [zakaryan2004](https://github.com/zakaryan2004)
|
||||
- **itvbtcc**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/0b51005b4819e7cea222fcbaf8e60391db4f732c) ([#14161](https://github.com/yt-dlp/yt-dlp/issues/14161)) by [garret1317](https://github.com/garret1317)
|
||||
- **kick**: vod: [Support ongoing livestream VODs](https://github.com/yt-dlp/yt-dlp/commit/1e28f6bf743627b909135bb9a88537ad2deccaf0) ([#14154](https://github.com/yt-dlp/yt-dlp/issues/14154)) by [InvalidUsernameException](https://github.com/InvalidUsernameException)
|
||||
- **lrt**: [Fix extractors](https://github.com/yt-dlp/yt-dlp/commit/ed24640943872c4cf30d7cc4601bec87b50ba03c) ([#14193](https://github.com/yt-dlp/yt-dlp/issues/14193)) by [seproDev](https://github.com/seproDev)
|
||||
- **tver**: [Extract more metadata](https://github.com/yt-dlp/yt-dlp/commit/223baa81f6637dcdef108f817180d8d1ae9fa213) ([#14165](https://github.com/yt-dlp/yt-dlp/issues/14165)) by [arabcoders](https://github.com/arabcoders)
|
||||
- **vevo**: [Restore extractors](https://github.com/yt-dlp/yt-dlp/commit/d925e92b710153d0d51d030f115b3c87226bc0f0) ([#14203](https://github.com/yt-dlp/yt-dlp/issues/14203)) by [seproDev](https://github.com/seproDev)
|
||||
|
||||
#### Misc. changes
|
||||
- **build**: [Overhaul Linux builds and refactor release workflow](https://github.com/yt-dlp/yt-dlp/commit/50136eeeb3767289b236f140b759f23b39b00888) ([#13997](https://github.com/yt-dlp/yt-dlp/issues/13997)) by [bashonly](https://github.com/bashonly)
|
||||
|
||||
### 2025.08.27
|
||||
|
||||
#### Extractor changes
|
||||
- **generic**
|
||||
- [Simplify invalid URL error message](https://github.com/yt-dlp/yt-dlp/commit/1ddbd033f0fd65917526b1271cea66913ac8647f) ([#14167](https://github.com/yt-dlp/yt-dlp/issues/14167)) by [seproDev](https://github.com/seproDev)
|
||||
- [Use https as fallback protocol](https://github.com/yt-dlp/yt-dlp/commit/fec30c56f0e97e573ace659104ff0d72c4cc9809) ([#14160](https://github.com/yt-dlp/yt-dlp/issues/14160)) by [seproDev](https://github.com/seproDev)
|
||||
- **skeb**: [Support wav files](https://github.com/yt-dlp/yt-dlp/commit/d6950c27af31908363c5c815e3b7eb4f9ff41643) ([#14147](https://github.com/yt-dlp/yt-dlp/issues/14147)) by [seproDev](https://github.com/seproDev)
|
||||
- **youtube**
|
||||
- [Add `tcc` player JS variant](https://github.com/yt-dlp/yt-dlp/commit/8f4a908300f55054bc96814bceeaa1034fdf4110) ([#14134](https://github.com/yt-dlp/yt-dlp/issues/14134)) by [bashonly](https://github.com/bashonly)
|
||||
- [Deprioritize `web_safari` m3u8 formats](https://github.com/yt-dlp/yt-dlp/commit/5c7ad68ff1643ad80d18cef8be9db8fcab05ee6c) ([#14168](https://github.com/yt-dlp/yt-dlp/issues/14168)) by [bashonly](https://github.com/bashonly)
|
||||
- [Player client maintenance](https://github.com/yt-dlp/yt-dlp/commit/3bd91544122142a87863d79e54e995c26cfd7f92) ([#14135](https://github.com/yt-dlp/yt-dlp/issues/14135)) by [bashonly](https://github.com/bashonly)
|
||||
- [Use alternative `tv` user-agent when authenticated](https://github.com/yt-dlp/yt-dlp/commit/8cd37b85d492edb56a4f7506ea05527b85a6b02b) ([#14169](https://github.com/yt-dlp/yt-dlp/issues/14169)) by [bashonly](https://github.com/bashonly)
|
||||
|
||||
### 2025.08.22
|
||||
|
||||
#### Core changes
|
||||
|
|
|
|||
|
|
@ -1,59 +1,36 @@
|
|||
# Collaborators
|
||||
# Maintainers
|
||||
|
||||
This is a list of the collaborators of the project and their major contributions. See the [Changelog](Changelog.md) for more details.
|
||||
This file lists the maintainers of yt-dlp and their major contributions. See the [Changelog](Changelog.md) for more details.
|
||||
|
||||
You can also find lists of all [contributors of yt-dlp](CONTRIBUTORS) and [authors of youtube-dl](https://github.com/ytdl-org/youtube-dl/blob/master/AUTHORS)
|
||||
|
||||
## Core Maintainers
|
||||
|
||||
## [pukkandan](https://github.com/pukkandan)
|
||||
Core Maintainers are responsible for reviewing and merging contributions, publishing releases, and steering the overall direction of the project.
|
||||
|
||||
[](https://ko-fi.com/pukkandan)
|
||||
[](https://github.com/sponsors/pukkandan)
|
||||
**You can contact the core maintainers via `maintainers@yt-dlp.org`.**
|
||||
|
||||
* Owner of the fork
|
||||
This is **NOT** a support channel. [Open an issue](https://github.com/yt-dlp/yt-dlp/issues/new/choose) if you need help or want to report a bug.
|
||||
|
||||
|
||||
|
||||
## [shirt](https://github.com/shirt-dev)
|
||||
|
||||
[](https://ko-fi.com/shirt)
|
||||
|
||||
* Multithreading (`-N`) and aria2c support for fragment downloads
|
||||
* Support for media initialization and discontinuity in HLS
|
||||
* The self-updater (`-U`)
|
||||
|
||||
|
||||
|
||||
## [coletdjnz](https://github.com/coletdjnz)
|
||||
### [coletdjnz](https://github.com/coletdjnz)
|
||||
|
||||
[](https://github.com/sponsors/coletdjnz)
|
||||
|
||||
* Improved plugin architecture
|
||||
* Rewrote the networking infrastructure, implemented support for `requests`
|
||||
* YouTube improvements including: age-gate bypass, private playlists, multiple-clients (to avoid throttling) and a lot of under-the-hood improvements
|
||||
* Added support for new websites YoutubeWebArchive, MainStreaming, PRX, nzherald, Mediaklikk, StarTV etc
|
||||
* Improved/fixed support for Patreon, panopto, gfycat, itv, pbs, SouthParkDE etc
|
||||
* Overhauled the networking stack and implemented support for `requests` and `curl_cffi` (`--impersonate`) HTTP clients
|
||||
* Reworked the plugin architecture to support installing plugins across all yt-dlp distributions (exe, pip, etc.)
|
||||
* Maintains support for YouTube
|
||||
* Added and fixed support for various other sites
|
||||
|
||||
### [bashonly](https://github.com/bashonly)
|
||||
|
||||
* Rewrote and maintains the build/release workflows and the self-updater: executables, automated/nightly/master releases, `--update-to`
|
||||
* Overhauled external downloader cookie handling
|
||||
* Added `--cookies-from-browser` support for Firefox containers
|
||||
* Overhauled and maintains support for sites like Youtube, Vimeo, Twitter, TikTok, etc
|
||||
* Added support for sites like Dacast, Kick, Loom, SproutVideo, Triller, Weverse, etc
|
||||
|
||||
|
||||
|
||||
## [Ashish0804](https://github.com/Ashish0804) <sub><sup>[Inactive]</sup></sub>
|
||||
|
||||
[](https://ko-fi.com/ashish0804)
|
||||
|
||||
* Added support for new websites BiliIntl, DiscoveryPlusIndia, OlympicsReplay, PlanetMarathi, ShemarooMe, Utreon, Zee5 etc
|
||||
* Added playlist/series downloads for Hotstar, ParamountPlus, Rumble, SonyLIV, Trovo, TubiTv, Voot etc
|
||||
* Improved/fixed support for HiDive, HotStar, Hungama, LBRY, LinkedInLearning, Mxplayer, SonyLiv, TV2, Vimeo, VLive etc
|
||||
|
||||
|
||||
## [bashonly](https://github.com/bashonly)
|
||||
|
||||
* `--update-to`, self-updater rewrite, automated/nightly/master releases
|
||||
* `--cookies-from-browser` support for Firefox containers, external downloader cookie handling overhaul
|
||||
* Added support for new websites like Dacast, Kick, NBCStations, Triller, VideoKen, Weverse, WrestleUniverse etc
|
||||
* Improved/fixed support for Anvato, Brightcove, Reddit, SlidesLive, TikTok, Twitter, Vimeo etc
|
||||
|
||||
|
||||
## [Grub4K](https://github.com/Grub4K)
|
||||
### [Grub4K](https://github.com/Grub4K)
|
||||
|
||||
[](https://github.com/sponsors/Grub4K) [](https://ko-fi.com/Grub4K)
|
||||
|
||||
|
|
@ -63,8 +40,48 @@ You can also find lists of all [contributors of yt-dlp](CONTRIBUTORS) and [autho
|
|||
* Improved/fixed/added Bundestag, crunchyroll, pr0gramm, Twitter, WrestleUniverse etc
|
||||
|
||||
|
||||
## [sepro](https://github.com/seproDev)
|
||||
### [sepro](https://github.com/seproDev)
|
||||
|
||||
* UX improvements: Warn when ffmpeg is missing, warn when double-clicking exe
|
||||
* Code cleanup: Remove dead extractors, mark extractors as broken, enable/apply ruff rules
|
||||
* Improved/fixed/added ArdMediathek, DRTV, Floatplane, MagentaMusik, Naver, Nebula, OnDemandKorea, Vbox7 etc
|
||||
|
||||
|
||||
## Inactive Core Maintainers
|
||||
|
||||
### [pukkandan](https://github.com/pukkandan)
|
||||
|
||||
[](https://ko-fi.com/pukkandan)
|
||||
[](https://github.com/sponsors/pukkandan)
|
||||
|
||||
* Founder of the fork
|
||||
* Lead Maintainer from 2021-2024
|
||||
|
||||
|
||||
### [shirt](https://github.com/shirt-dev)
|
||||
|
||||
[](https://ko-fi.com/shirt)
|
||||
|
||||
* Multithreading (`-N`) and aria2c support for fragment downloads
|
||||
* Support for media initialization and discontinuity in HLS
|
||||
* The self-updater (`-U`)
|
||||
|
||||
|
||||
### [Ashish0804](https://github.com/Ashish0804)
|
||||
|
||||
[](https://ko-fi.com/ashish0804)
|
||||
|
||||
* Added support for new websites BiliIntl, DiscoveryPlusIndia, OlympicsReplay, PlanetMarathi, ShemarooMe, Utreon, Zee5 etc
|
||||
* Added playlist/series downloads for Hotstar, ParamountPlus, Rumble, SonyLIV, Trovo, TubiTv, Voot etc
|
||||
* Improved/fixed support for HiDive, HotStar, Hungama, LBRY, LinkedInLearning, Mxplayer, SonyLiv, TV2, Vimeo, VLive etc
|
||||
|
||||
## Triage Maintainers
|
||||
|
||||
Triage Maintainers are frequent contributors who can manage issues and pull requests.
|
||||
|
||||
- [gamer191](https://github.com/gamer191)
|
||||
- [garret1317](https://github.com/garret1317)
|
||||
- [pzhlkj6612](https://github.com/pzhlkj6612)
|
||||
- [DTrombett](https://github.com/dtrombett)
|
||||
- [doe1080](https://github.com/doe1080)
|
||||
- [grqz](https://github.com/grqz)
|
||||
109
Makefile
109
Makefile
|
|
@ -1,4 +1,5 @@
|
|||
all: lazy-extractors yt-dlp doc pypi-files
|
||||
all-extra: lazy-extractors yt-dlp-extra doc pypi-files
|
||||
clean: clean-test clean-dist
|
||||
clean-all: clean clean-cache
|
||||
completions: completion-bash completion-fish completion-zsh
|
||||
|
|
@ -10,12 +11,16 @@ tar: yt-dlp.tar.gz
|
|||
# intended use: when building a source distribution,
|
||||
# make pypi-files && python3 -m build -sn .
|
||||
pypi-files: AUTHORS Changelog.md LICENSE README.md README.txt supportedsites \
|
||||
completions yt-dlp.1 pyproject.toml setup.cfg devscripts/* test/*
|
||||
completions yt-dlp.1 pyproject.toml devscripts/* test/*
|
||||
|
||||
.PHONY: all clean clean-all clean-test clean-dist clean-cache \
|
||||
completions completion-bash completion-fish completion-zsh \
|
||||
doc issuetemplates supportedsites ot offlinetest codetest test \
|
||||
tar pypi-files lazy-extractors install uninstall
|
||||
tar pypi-files lazy-extractors install uninstall \
|
||||
all-extra yt-dlp-extra current-ejs-version
|
||||
|
||||
.IGNORE: current-ejs-version
|
||||
.SILENT: current-ejs-version
|
||||
|
||||
clean-test:
|
||||
rm -rf tmp/ *.annotations.xml *.aria2 *.description *.dump *.frag \
|
||||
|
|
@ -25,7 +30,8 @@ clean-test:
|
|||
test/testdata/sigs/player-*.js test/testdata/thumbnails/empty.webp "test/testdata/thumbnails/foo %d bar/foo_%d."*
|
||||
clean-dist:
|
||||
rm -rf yt-dlp.1.temp.md yt-dlp.1 README.txt MANIFEST build/ dist/ .coverage cover/ yt-dlp.tar.gz completions/ \
|
||||
yt_dlp/extractor/lazy_extractors.py *.spec CONTRIBUTING.md.tmp yt-dlp yt-dlp.exe yt_dlp.egg-info/ AUTHORS
|
||||
yt_dlp/extractor/lazy_extractors.py *.spec CONTRIBUTING.md.tmp yt-dlp yt-dlp.exe yt_dlp.egg-info/ AUTHORS \
|
||||
yt-dlp.zip .ejs-* yt_dlp_ejs/
|
||||
clean-cache:
|
||||
find . \( \
|
||||
-type d -name ".*_cache" -o -type d -name __pycache__ -o -name "*.pyc" -o -name "*.class" \
|
||||
|
|
@ -81,28 +87,49 @@ test:
|
|||
offlinetest: codetest
|
||||
$(PYTHON) -m pytest -Werror -m "not download"
|
||||
|
||||
CODE_FOLDERS_CMD = find yt_dlp -type f -name '__init__.py' | sed 's,/__init__.py,,' | grep -v '/__' | sort
|
||||
CODE_FOLDERS != $(CODE_FOLDERS_CMD)
|
||||
CODE_FOLDERS ?= $(shell $(CODE_FOLDERS_CMD))
|
||||
CODE_FILES_CMD = for f in $(CODE_FOLDERS) ; do echo "$$f" | sed 's,$$,/*.py,' ; done
|
||||
CODE_FILES != $(CODE_FILES_CMD)
|
||||
CODE_FILES ?= $(shell $(CODE_FILES_CMD))
|
||||
yt-dlp: $(CODE_FILES)
|
||||
PY_CODE_FOLDERS_CMD = find yt_dlp -type f -name '__init__.py' | sed 's|/__init__\.py||' | grep -v '/__' | sort
|
||||
PY_CODE_FOLDERS != $(PY_CODE_FOLDERS_CMD)
|
||||
PY_CODE_FOLDERS ?= $(shell $(PY_CODE_FOLDERS_CMD))
|
||||
|
||||
PY_CODE_FILES_CMD = for f in $(PY_CODE_FOLDERS) ; do echo "$$f" | sed 's|$$|/*.py|' ; done
|
||||
PY_CODE_FILES != $(PY_CODE_FILES_CMD)
|
||||
PY_CODE_FILES ?= $(shell $(PY_CODE_FILES_CMD))
|
||||
|
||||
JS_CODE_FOLDERS_CMD = find yt_dlp -type f -name '*.js' | sed 's|/[^/]\{1,\}\.js$$||' | uniq
|
||||
JS_CODE_FOLDERS != $(JS_CODE_FOLDERS_CMD)
|
||||
JS_CODE_FOLDERS ?= $(shell $(JS_CODE_FOLDERS_CMD))
|
||||
|
||||
JS_CODE_FILES_CMD = for f in $(JS_CODE_FOLDERS) ; do echo "$$f" | sed 's|$$|/*.js|' ; done
|
||||
JS_CODE_FILES != $(JS_CODE_FILES_CMD)
|
||||
JS_CODE_FILES ?= $(shell $(JS_CODE_FILES_CMD))
|
||||
|
||||
yt-dlp.zip: $(PY_CODE_FILES) $(JS_CODE_FILES)
|
||||
mkdir -p zip
|
||||
for d in $(CODE_FOLDERS) ; do \
|
||||
for d in $(PY_CODE_FOLDERS) ; do \
|
||||
mkdir -p zip/$$d ;\
|
||||
cp -pPR $$d/*.py zip/$$d/ ;\
|
||||
done
|
||||
(cd zip && touch -t 200001010101 $(CODE_FILES))
|
||||
mv zip/yt_dlp/__main__.py zip/
|
||||
(cd zip && zip -q ../yt-dlp $(CODE_FILES) __main__.py)
|
||||
for d in $(JS_CODE_FOLDERS) ; do \
|
||||
mkdir -p zip/$$d ;\
|
||||
cp -pPR $$d/*.js zip/$$d/ ;\
|
||||
done
|
||||
(cd zip && touch -t 200001010101 $(PY_CODE_FILES) $(JS_CODE_FILES))
|
||||
rm -f zip/yt_dlp/__main__.py
|
||||
(cd zip && zip -q ../yt-dlp.zip $(PY_CODE_FILES) $(JS_CODE_FILES))
|
||||
rm -rf zip
|
||||
|
||||
yt-dlp: yt-dlp.zip
|
||||
mkdir -p zip
|
||||
cp -pP yt_dlp/__main__.py zip/
|
||||
touch -t 200001010101 zip/__main__.py
|
||||
(cd zip && zip -q ../yt-dlp.zip __main__.py)
|
||||
echo '#!$(PYTHON)' > yt-dlp
|
||||
cat yt-dlp.zip >> yt-dlp
|
||||
rm yt-dlp.zip
|
||||
chmod a+x yt-dlp
|
||||
rm -rf zip
|
||||
|
||||
README.md: $(CODE_FILES) devscripts/make_readme.py
|
||||
README.md: $(PY_CODE_FILES) devscripts/make_readme.py
|
||||
COLUMNS=80 $(PYTHON) yt_dlp/__main__.py --ignore-config --help | $(PYTHON) devscripts/make_readme.py
|
||||
|
||||
CONTRIBUTING.md: README.md devscripts/make_contributing.py
|
||||
|
|
@ -127,15 +154,15 @@ yt-dlp.1: README.md devscripts/prepare_manpage.py
|
|||
pandoc -s -f $(MARKDOWN) -t man yt-dlp.1.temp.md -o yt-dlp.1
|
||||
rm -f yt-dlp.1.temp.md
|
||||
|
||||
completions/bash/yt-dlp: $(CODE_FILES) devscripts/bash-completion.in
|
||||
completions/bash/yt-dlp: $(PY_CODE_FILES) devscripts/bash-completion.in
|
||||
mkdir -p completions/bash
|
||||
$(PYTHON) devscripts/bash-completion.py
|
||||
|
||||
completions/zsh/_yt-dlp: $(CODE_FILES) devscripts/zsh-completion.in
|
||||
completions/zsh/_yt-dlp: $(PY_CODE_FILES) devscripts/zsh-completion.in
|
||||
mkdir -p completions/zsh
|
||||
$(PYTHON) devscripts/zsh-completion.py
|
||||
|
||||
completions/fish/yt-dlp.fish: $(CODE_FILES) devscripts/fish-completion.in
|
||||
completions/fish/yt-dlp.fish: $(PY_CODE_FILES) devscripts/fish-completion.in
|
||||
mkdir -p completions/fish
|
||||
$(PYTHON) devscripts/fish-completion.py
|
||||
|
||||
|
|
@ -157,9 +184,9 @@ yt-dlp.tar.gz: all
|
|||
--exclude '.git' \
|
||||
-- \
|
||||
README.md supportedsites.md Changelog.md LICENSE \
|
||||
CONTRIBUTING.md Collaborators.md CONTRIBUTORS AUTHORS \
|
||||
CONTRIBUTING.md Maintainers.md CONTRIBUTORS AUTHORS \
|
||||
Makefile yt-dlp.1 README.txt completions .gitignore \
|
||||
setup.cfg yt-dlp yt_dlp pyproject.toml devscripts test
|
||||
yt-dlp yt_dlp pyproject.toml devscripts test
|
||||
|
||||
AUTHORS: Changelog.md
|
||||
@if [ -d '.git' ] && command -v git > /dev/null ; then \
|
||||
|
|
@ -172,3 +199,45 @@ CONTRIBUTORS: Changelog.md
|
|||
echo 'Updating $@ from git commit history' ; \
|
||||
$(PYTHON) devscripts/make_changelog.py -v -c > /dev/null ; \
|
||||
fi
|
||||
|
||||
# The following EJS_-prefixed variables are auto-generated by devscripts/update_ejs.py
|
||||
# DO NOT EDIT!
|
||||
EJS_VERSION = 0.3.1
|
||||
EJS_WHEEL_NAME = yt_dlp_ejs-0.3.1-py3-none-any.whl
|
||||
EJS_WHEEL_HASH = sha256:a6e3548874db7c774388931752bb46c7f4642c044b2a189e56968f3d5ecab622
|
||||
EJS_PY_FOLDERS = yt_dlp_ejs yt_dlp_ejs/yt yt_dlp_ejs/yt/solver
|
||||
EJS_PY_FILES = yt_dlp_ejs/__init__.py yt_dlp_ejs/_version.py yt_dlp_ejs/yt/__init__.py yt_dlp_ejs/yt/solver/__init__.py
|
||||
EJS_JS_FOLDERS = yt_dlp_ejs/yt/solver
|
||||
EJS_JS_FILES = yt_dlp_ejs/yt/solver/core.min.js yt_dlp_ejs/yt/solver/lib.min.js
|
||||
|
||||
yt-dlp-extra: current-ejs-version .ejs-$(EJS_VERSION) $(EJS_PY_FILES) $(EJS_JS_FILES) yt-dlp.zip
|
||||
mkdir -p zip
|
||||
for d in $(EJS_PY_FOLDERS) ; do \
|
||||
mkdir -p zip/$$d ;\
|
||||
cp -pPR $$d/*.py zip/$$d/ ;\
|
||||
done
|
||||
for d in $(EJS_JS_FOLDERS) ; do \
|
||||
mkdir -p zip/$$d ;\
|
||||
cp -pPR $$d/*.js zip/$$d/ ;\
|
||||
done
|
||||
(cd zip && touch -t 200001010101 $(EJS_PY_FILES) $(EJS_JS_FILES))
|
||||
(cd zip && zip -q ../yt-dlp.zip $(EJS_PY_FILES) $(EJS_JS_FILES))
|
||||
cp -pP yt_dlp/__main__.py zip/
|
||||
touch -t 200001010101 zip/__main__.py
|
||||
(cd zip && zip -q ../yt-dlp.zip __main__.py)
|
||||
echo '#!$(PYTHON)' > yt-dlp
|
||||
cat yt-dlp.zip >> yt-dlp
|
||||
rm yt-dlp.zip
|
||||
chmod a+x yt-dlp
|
||||
rm -rf zip
|
||||
|
||||
.ejs-$(EJS_VERSION):
|
||||
@echo Downloading yt-dlp-ejs
|
||||
@echo "yt-dlp-ejs==$(EJS_VERSION) --hash $(EJS_WHEEL_HASH)" > .ejs-requirements.txt
|
||||
$(PYTHON) -m pip download -d ./build --no-deps --require-hashes -r .ejs-requirements.txt
|
||||
unzip -o build/$(EJS_WHEEL_NAME) "yt_dlp_ejs/*"
|
||||
@touch .ejs-$(EJS_VERSION)
|
||||
|
||||
current-ejs-version:
|
||||
rm -rf .ejs-*
|
||||
touch .ejs-$$($(PYTHON) -c 'import sys; sys.path = [""]; from yt_dlp_ejs import version; print(version)' 2>/dev/null)
|
||||
|
|
|
|||
154
README.md
154
README.md
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
[](#installation "Installation")
|
||||
[](https://pypi.org/project/yt-dlp "PyPI")
|
||||
[](Collaborators.md#collaborators "Donate")
|
||||
[](Maintainers.md#maintainers "Donate")
|
||||
[](https://discord.gg/H5MNcFW63r "Discord")
|
||||
[](supportedsites.md "Supported Sites")
|
||||
[](LICENSE "License")
|
||||
|
|
@ -105,14 +105,20 @@ File|Description
|
|||
|
||||
File|Description
|
||||
:---|:---
|
||||
[yt-dlp_linux](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux)|Linux (glibc 2.17+) standalone x86_64 binary
|
||||
[yt-dlp_linux.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux.zip)|Unpackaged Linux (glibc 2.17+) x86_64 executable (no auto-update)
|
||||
[yt-dlp_linux_aarch64](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_aarch64)|Linux (glibc 2.17+) standalone aarch64 binary
|
||||
[yt-dlp_linux_aarch64.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_aarch64.zip)|Unpackaged Linux (glibc 2.17+) aarch64 executable (no auto-update)
|
||||
[yt-dlp_linux_armv7l.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_armv7l.zip)|Unpackaged Linux (glibc 2.31+) armv7l executable (no auto-update)
|
||||
[yt-dlp_musllinux](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux)|Linux (musl 1.2+) standalone x86_64 binary
|
||||
[yt-dlp_musllinux.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux.zip)|Unpackaged Linux (musl 1.2+) x86_64 executable (no auto-update)
|
||||
[yt-dlp_musllinux_aarch64](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux_aarch64)|Linux (musl 1.2+) standalone aarch64 binary
|
||||
[yt-dlp_musllinux_aarch64.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux_aarch64.zip)|Unpackaged Linux (musl 1.2+) aarch64 executable (no auto-update)
|
||||
[yt-dlp_x86.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_x86.exe)|Windows (Win8+) standalone x86 (32-bit) binary
|
||||
[yt-dlp_arm64.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_arm64.exe)|Windows (Win10+) standalone arm64 (64-bit) binary
|
||||
[yt-dlp_linux](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux)|Linux standalone x64 binary
|
||||
[yt-dlp_linux_armv7l](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_armv7l)|Linux standalone armv7l (32-bit) binary
|
||||
[yt-dlp_linux_aarch64](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_aarch64)|Linux standalone aarch64 (64-bit) binary
|
||||
[yt-dlp_win_x86.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win_x86.zip)|Unpackaged Windows (Win8+) x86 (32-bit) executable (no auto-update)
|
||||
[yt-dlp_arm64.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_arm64.exe)|Windows (Win10+) standalone ARM64 binary
|
||||
[yt-dlp_win_arm64.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win_arm64.zip)|Unpackaged Windows (Win10+) ARM64 executable (no auto-update)
|
||||
[yt-dlp_win.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win.zip)|Unpackaged Windows (Win8+) x64 executable (no auto-update)
|
||||
[yt-dlp_win_x86.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win_x86.zip)|Unpackaged Windows (Win8+) x86 executable (no auto-update)
|
||||
[yt-dlp_win_arm64.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win_arm64.zip)|Unpackaged Windows (Win10+) arm64 executable (no auto-update)
|
||||
[yt-dlp_macos.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_macos.zip)|Unpackaged MacOS (10.15+) executable (no auto-update)
|
||||
|
||||
#### Misc
|
||||
|
|
@ -132,6 +138,19 @@ curl -L https://github.com/yt-dlp/yt-dlp/raw/master/public.key | gpg --import
|
|||
gpg --verify SHA2-256SUMS.sig SHA2-256SUMS
|
||||
gpg --verify SHA2-512SUMS.sig SHA2-512SUMS
|
||||
```
|
||||
|
||||
#### Licensing
|
||||
|
||||
While yt-dlp is licensed under the [Unlicense](LICENSE), many of the release files contain code from other projects with different licenses.
|
||||
|
||||
Most notably, the PyInstaller-bundled executables include GPLv3+ licensed code, and as such the combined work is licensed under [GPLv3+](https://www.gnu.org/licenses/gpl-3.0.html).
|
||||
|
||||
The zipimport Unix executable (`yt-dlp`) contains [ISC](https://github.com/meriyah/meriyah/blob/main/LICENSE.md) licensed code from [`meriyah`](https://github.com/meriyah/meriyah) and [MIT](https://github.com/davidbonnet/astring/blob/main/LICENSE) licensed code from [`astring`](https://github.com/davidbonnet/astring).
|
||||
|
||||
See [THIRD_PARTY_LICENSES.txt](THIRD_PARTY_LICENSES.txt) for more details.
|
||||
|
||||
The git repository, the source tarball (`yt-dlp.tar.gz`), the PyPI source distribution and the PyPI built distribution (wheel) only contain code licensed under the [Unlicense](LICENSE).
|
||||
|
||||
<!-- MANPAGE: END EXCLUDED SECTION -->
|
||||
|
||||
**Note**: The manpages, shell completion (autocomplete) files etc. are available inside the [source tarball](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp.tar.gz)
|
||||
|
|
@ -170,21 +189,21 @@ Example usage:
|
|||
yt-dlp --update-to nightly
|
||||
|
||||
# To install nightly with pip:
|
||||
python3 -m pip install -U --pre "yt-dlp[default]"
|
||||
python -m pip install -U --pre "yt-dlp[default]"
|
||||
```
|
||||
|
||||
When running a yt-dlp version that is older than 90 days, you will see a warning message suggesting to update to the latest version.
|
||||
You can suppress this warning by adding `--no-update` to your command or configuration file.
|
||||
|
||||
## DEPENDENCIES
|
||||
Python versions 3.9+ (CPython) and 3.11+ (PyPy) are supported. Other versions and implementations may or may not work correctly.
|
||||
Python versions 3.10+ (CPython) and 3.11+ (PyPy) are supported. Other versions and implementations may or may not work correctly.
|
||||
|
||||
<!-- Python 3.5+ uses VC++14 and it is already embedded in the binary created
|
||||
<!x-- https://www.microsoft.com/en-us/download/details.aspx?id=26999 --x>
|
||||
On Windows, [Microsoft Visual C++ 2010 SP1 Redistributable Package (x86)](https://download.microsoft.com/download/1/6/5/165255E7-1014-4D0A-B094-B6A430A6BFFC/vcredist_x86.exe) is also necessary to run yt-dlp. You probably already have this, but if the executable throws an error due to missing `MSVCR100.dll` you need to install it manually.
|
||||
-->
|
||||
|
||||
While all the other dependencies are optional, `ffmpeg` and `ffprobe` are highly recommended
|
||||
While all the other dependencies are optional, `ffmpeg`, `ffprobe`, `yt-dlp-ejs` and a JavaScript runtime are highly recommended
|
||||
|
||||
### Strongly recommended
|
||||
|
||||
|
|
@ -194,6 +213,10 @@ While all the other dependencies are optional, `ffmpeg` and `ffprobe` are highly
|
|||
|
||||
**Important**: What you need is ffmpeg *binary*, **NOT** [the Python package of the same name](https://pypi.org/project/ffmpeg)
|
||||
|
||||
* [**yt-dlp-ejs**](https://github.com/yt-dlp/ejs) - Required for deciphering YouTube n/sig values. Licensed under [Unlicense](https://github.com/yt-dlp/ejs/blob/main/LICENSE), bundles [MIT](https://github.com/davidbonnet/astring/blob/main/LICENSE) and [ISC](https://github.com/meriyah/meriyah/blob/main/LICENSE.md) components.
|
||||
|
||||
A JavaScript runtime like [**deno**](https://deno.land) (recommended), [**node.js**](https://nodejs.org), [**bun**](https://bun.sh), or [**QuickJS**](https://bellard.org/quickjs/) is also required to run yt-dlp-ejs. See [the wiki](https://github.com/yt-dlp/yt-dlp/wiki/EJS).
|
||||
|
||||
### Networking
|
||||
* [**certifi**](https://github.com/certifi/python-certifi)\* - Provides Mozilla's root certificate bundle. Licensed under [MPLv2](https://github.com/certifi/python-certifi/blob/master/LICENSE)
|
||||
* [**brotli**](https://github.com/google/brotli)\* or [**brotlicffi**](https://github.com/python-hyper/brotlicffi) - [Brotli](https://en.wikipedia.org/wiki/Brotli) content encoding support. Both licensed under MIT <sup>[1](https://github.com/google/brotli/blob/master/LICENSE) [2](https://github.com/python-hyper/brotlicffi/blob/master/LICENSE) </sup>
|
||||
|
|
@ -206,7 +229,7 @@ The following provide support for impersonating browser requests. This may be re
|
|||
|
||||
* [**curl_cffi**](https://github.com/lexiforest/curl_cffi) (recommended) - Python binding for [curl-impersonate](https://github.com/lexiforest/curl-impersonate). Provides impersonation targets for Chrome, Edge and Safari. Licensed under [MIT](https://github.com/lexiforest/curl_cffi/blob/main/LICENSE)
|
||||
* Can be installed with the `curl-cffi` group, e.g. `pip install "yt-dlp[default,curl-cffi]"`
|
||||
* Currently included in `yt-dlp.exe`, `yt-dlp_linux` and `yt-dlp_macos` builds
|
||||
* Currently included in most builds *except* `yt-dlp` (Unix zipimport binary), `yt-dlp_x86` (Windows 32-bit) and `yt-dlp_musllinux_aarch64`
|
||||
|
||||
|
||||
### Metadata
|
||||
|
|
@ -218,14 +241,12 @@ The following provide support for impersonating browser requests. This may be re
|
|||
### Misc
|
||||
|
||||
* [**pycryptodomex**](https://github.com/Legrandin/pycryptodome)\* - For decrypting AES-128 HLS streams and various other data. Licensed under [BSD-2-Clause](https://github.com/Legrandin/pycryptodome/blob/master/LICENSE.rst)
|
||||
* [**phantomjs**](https://github.com/ariya/phantomjs) - Used in extractors where javascript needs to be run. Licensed under [BSD-3-Clause](https://github.com/ariya/phantomjs/blob/master/LICENSE.BSD)
|
||||
* [**phantomjs**](https://github.com/ariya/phantomjs) - Used in some extractors where JavaScript needs to be run. No longer used for YouTube. To be deprecated in the near future. Licensed under [BSD-3-Clause](https://github.com/ariya/phantomjs/blob/master/LICENSE.BSD)
|
||||
* [**secretstorage**](https://github.com/mitya57/secretstorage)\* - For `--cookies-from-browser` to access the **Gnome** keyring while decrypting cookies of **Chromium**-based browsers on **Linux**. Licensed under [BSD-3-Clause](https://github.com/mitya57/secretstorage/blob/master/LICENSE)
|
||||
* Any external downloader that you want to use with `--downloader`
|
||||
|
||||
### Deprecated
|
||||
|
||||
* [**avconv** and **avprobe**](https://www.libav.org) - Now **deprecated** alternative to ffmpeg. License [depends on the build](https://libav.org/legal)
|
||||
* [**sponskrub**](https://github.com/faissaloo/SponSkrub) - For using the now **deprecated** [sponskrub options](#sponskrub-options). Licensed under [GPLv3+](https://github.com/faissaloo/SponSkrub/blob/master/LICENCE.md)
|
||||
* [**rtmpdump**](http://rtmpdump.mplayerhq.hu) - For downloading `rtmp` streams. ffmpeg can be used instead with `--downloader ffmpeg`. Licensed under [GPLv2+](http://rtmpdump.mplayerhq.hu)
|
||||
* [**mplayer**](http://mplayerhq.hu/design7/info.html) or [**mpv**](https://mpv.io) - For downloading `rstp`/`mms` streams. ffmpeg can be used instead with `--downloader ffmpeg`. Licensed under [GPLv2+](https://github.com/mpv-player/mpv/blob/master/Copyright)
|
||||
|
||||
|
|
@ -244,12 +265,12 @@ To build the standalone executable, you must have Python and `pyinstaller` (plus
|
|||
You can run the following commands:
|
||||
|
||||
```
|
||||
python3 devscripts/install_deps.py --include pyinstaller
|
||||
python3 devscripts/make_lazy_extractors.py
|
||||
python3 -m bundle.pyinstaller
|
||||
python devscripts/install_deps.py --include-group pyinstaller
|
||||
python devscripts/make_lazy_extractors.py
|
||||
python -m bundle.pyinstaller
|
||||
```
|
||||
|
||||
On some systems, you may need to use `py` or `python` instead of `python3`.
|
||||
On some systems, you may need to use `py` or `python3` instead of `python`.
|
||||
|
||||
`python -m bundle.pyinstaller` accepts any arguments that can be passed to `pyinstaller`, such as `--onefile/-F` or `--onedir/-D`, which is further [documented here](https://pyinstaller.org/en/stable/usage.html#what-to-generate).
|
||||
|
||||
|
|
@ -258,7 +279,7 @@ On some systems, you may need to use `py` or `python` instead of `python3`.
|
|||
**Important**: Running `pyinstaller` directly **instead of** using `python -m bundle.pyinstaller` is **not** officially supported. This may or may not work correctly.
|
||||
|
||||
### Platform-independent Binary (UNIX)
|
||||
You will need the build tools `python` (3.9+), `zip`, `make` (GNU), `pandoc`\* and `pytest`\*.
|
||||
You will need the build tools `python` (3.10+), `zip`, `make` (GNU), `pandoc`\* and `pytest`\*.
|
||||
|
||||
After installing these, simply run `make`.
|
||||
|
||||
|
|
@ -305,7 +326,6 @@ Tip: Use `CTRL`+`F` (or `Command`+`F`) to search by keywords
|
|||
playlist (default)
|
||||
--abort-on-error Abort downloading of further videos if an
|
||||
error occurs (Alias: --no-ignore-errors)
|
||||
--dump-user-agent Display the current user-agent and exit
|
||||
--list-extractors List all supported extractors and exit
|
||||
--extractor-descriptions Output descriptions of all supported
|
||||
extractors and exit
|
||||
|
|
@ -340,7 +360,7 @@ Tip: Use `CTRL`+`F` (or `Command`+`F`) to search by keywords
|
|||
containing directory ("-" for stdin). Can be
|
||||
used multiple times and inside other
|
||||
configuration files
|
||||
--plugin-dirs PATH Path to an additional directory to search
|
||||
--plugin-dirs DIR Path to an additional directory to search
|
||||
for plugins. This option can be used
|
||||
multiple times to add multiple directories.
|
||||
Use "default" to search the default plugin
|
||||
|
|
@ -348,6 +368,37 @@ Tip: Use `CTRL`+`F` (or `Command`+`F`) to search by keywords
|
|||
--no-plugin-dirs Clear plugin directories to search,
|
||||
including defaults and those provided by
|
||||
previous --plugin-dirs
|
||||
--js-runtimes RUNTIME[:PATH] Additional JavaScript runtime to enable,
|
||||
with an optional location for the runtime
|
||||
(either the path to the binary or its
|
||||
containing directory). This option can be
|
||||
used multiple times to enable multiple
|
||||
runtimes. Supported runtimes are (in order
|
||||
of priority, from highest to lowest): deno,
|
||||
node, quickjs, bun. Only "deno" is enabled
|
||||
by default. The highest priority runtime
|
||||
that is both enabled and available will be
|
||||
used. In order to use a lower priority
|
||||
runtime when "deno" is available, --no-js-
|
||||
runtimes needs to be passed before enabling
|
||||
other runtimes
|
||||
--no-js-runtimes Clear JavaScript runtimes to enable,
|
||||
including defaults and those provided by
|
||||
previous --js-runtimes
|
||||
--remote-components COMPONENT Remote components to allow yt-dlp to fetch
|
||||
when required. This option is currently not
|
||||
needed if you are using an official
|
||||
executable or have the requisite version of
|
||||
the yt-dlp-ejs package installed. You can
|
||||
use this option multiple times to allow
|
||||
multiple components. Supported values:
|
||||
ejs:npm (external JavaScript components from
|
||||
npm), ejs:github (external JavaScript
|
||||
components from yt-dlp-ejs GitHub). By
|
||||
default, no remote components are allowed
|
||||
--no-remote-components Disallow fetching of all remote components,
|
||||
including any previously allowed by
|
||||
--remote-components or defaults.
|
||||
--flat-playlist Do not extract a playlist's URL result
|
||||
entries; some entry metadata may be missing
|
||||
and downloading may be bypassed
|
||||
|
|
@ -556,8 +607,6 @@ Tip: Use `CTRL`+`F` (or `Command`+`F`) to search by keywords
|
|||
--playlist-random and --playlist-reverse
|
||||
--no-lazy-playlist Process videos in the playlist only after
|
||||
the entire playlist is parsed (default)
|
||||
--xattr-set-filesize Set file xattribute ytdl.filesize with
|
||||
expected file size
|
||||
--hls-use-mpegts Use the mpegts container for HLS videos;
|
||||
allowing some players to play the video
|
||||
while downloading, and reducing the chance
|
||||
|
|
@ -581,9 +630,9 @@ Tip: Use `CTRL`+`F` (or `Command`+`F`) to search by keywords
|
|||
use (optionally) prefixed by the protocols
|
||||
(http, ftp, m3u8, dash, rstp, rtmp, mms) to
|
||||
use it for. Currently supports native,
|
||||
aria2c, avconv, axel, curl, ffmpeg, httpie,
|
||||
wget. You can use this option multiple times
|
||||
to set different downloaders for different
|
||||
aria2c, axel, curl, ffmpeg, httpie, wget.
|
||||
You can use this option multiple times to
|
||||
set different downloaders for different
|
||||
protocols. E.g. --downloader aria2c
|
||||
--downloader "dash,m3u8:native" will use
|
||||
aria2c for http/ftp downloads, and the
|
||||
|
|
@ -1067,11 +1116,12 @@ Make chapter entries for, or remove various segments (sponsor,
|
|||
for, separated by commas. Available
|
||||
categories are sponsor, intro, outro,
|
||||
selfpromo, preview, filler, interaction,
|
||||
music_offtopic, poi_highlight, chapter, all
|
||||
and default (=all). You can prefix the
|
||||
category with a "-" to exclude it. See [1]
|
||||
for descriptions of the categories. E.g.
|
||||
--sponsorblock-mark all,-preview
|
||||
music_offtopic, hook, poi_highlight,
|
||||
chapter, all and default (=all). You can
|
||||
prefix the category with a "-" to exclude
|
||||
it. See [1] for descriptions of the
|
||||
categories. E.g. --sponsorblock-mark
|
||||
all,-preview
|
||||
[1] https://wiki.sponsor.ajay.app/w/Segment_Categories
|
||||
--sponsorblock-remove CATS SponsorBlock categories to be removed from
|
||||
the video file, separated by commas. If a
|
||||
|
|
@ -1136,7 +1186,7 @@ Predefined aliases for convenience and ease of use. Note that future
|
|||
You can configure yt-dlp by placing any supported command line option in a configuration file. The configuration is loaded from the following locations:
|
||||
|
||||
1. **Main Configuration**:
|
||||
* The file given to `--config-location`
|
||||
* The file given to `--config-locations`
|
||||
1. **Portable Configuration**: (Recommended for portable installations)
|
||||
* If using a binary, `yt-dlp.conf` in the same directory as the binary
|
||||
* If running from source-code, `yt-dlp.conf` in the parent directory of `yt_dlp`
|
||||
|
|
@ -1218,7 +1268,7 @@ yt-dlp --netrc-cmd 'gpg --decrypt ~/.authinfo.gpg' 'https://www.youtube.com/watc
|
|||
|
||||
### Notes about environment variables
|
||||
* Environment variables are normally specified as `${VARIABLE}`/`$VARIABLE` on UNIX and `%VARIABLE%` on Windows; but is always shown as `${VARIABLE}` in this documentation
|
||||
* yt-dlp also allows using UNIX-style variables on Windows for path-like options; e.g. `--output`, `--config-location`
|
||||
* yt-dlp also allows using UNIX-style variables on Windows for path-like options; e.g. `--output`, `--config-locations`
|
||||
* If unset, `${XDG_CONFIG_HOME}` defaults to `~/.config` and `${XDG_CACHE_HOME}` to `~/.cache`
|
||||
* On Windows, `~` points to `${HOME}` if present; or, `${USERPROFILE}` or `${HOMEDRIVE}${HOMEPATH}` otherwise
|
||||
* On Windows, `${USERPROFILE}` generally points to `C:\Users\<user name>` and `${APPDATA}` to `${USERPROFILE}\AppData\Roaming`
|
||||
|
|
@ -1802,11 +1852,12 @@ The following extractors use this feature:
|
|||
#### youtube
|
||||
* `lang`: Prefer translated metadata (`title`, `description` etc) of this language code (case-sensitive). By default, the video primary language metadata is preferred, with a fallback to `en` translated. See [youtube/_base.py](https://github.com/yt-dlp/yt-dlp/blob/415b4c9f955b1a0391204bd24a7132590e7b3bdb/yt_dlp/extractor/youtube/_base.py#L402-L409) for the list of supported content language codes
|
||||
* `skip`: One or more of `hls`, `dash` or `translated_subs` to skip extraction of the m3u8 manifests, dash manifests and [auto-translated subtitles](https://github.com/yt-dlp/yt-dlp/issues/4090#issuecomment-1158102032) respectively
|
||||
* `player_client`: Clients to extract video data from. The currently available clients are `web`, `web_safari`, `web_embedded`, `web_music`, `web_creator`, `mweb`, `ios`, `android`, `android_vr`, `tv`, `tv_simply` and `tv_embedded`. By default, `tv_simply,tv,web` is used, but `tv,web_safari,web` is used when authenticating with cookies and `tv,web_creator,web` is used with premium accounts. The `web_music` client is added for `music.youtube.com` URLs when logged-in cookies are used. The `web_embedded` client is added for age-restricted videos but only works if the video is embeddable. The `tv_embedded` and `web_creator` clients are added for age-restricted videos if account age-verification is required. Some clients, such as `web` and `web_music`, require a `po_token` for their formats to be downloadable. Some clients, such as `web_creator`, will only work with authentication. Not all clients support authentication via cookies. You can use `default` for the default clients, or you can use `all` for all clients (not recommended). You can prefix a client with `-` to exclude it, e.g. `youtube:player_client=default,-ios`
|
||||
* `player_client`: Clients to extract video data from. The currently available clients are `web`, `web_safari`, `web_embedded`, `web_music`, `web_creator`, `mweb`, `ios`, `android`, `android_sdkless`, `android_vr`, `tv`, `tv_simply`, `tv_downgraded`, and `tv_embedded`. By default, `tv,android_sdkless,web` is used. If no JavaScript runtime is available, then `android_sdkless,web_safari,web` is used. If logged-in cookies are passed to yt-dlp, then `tv_downgraded,web_safari,web` is used for free accounts and `tv_downgraded,web_creator,web` is used for premium accounts. The `web_music` client is added for `music.youtube.com` URLs when logged-in cookies are used. The `web_embedded` client is added for age-restricted videos but only works if the video is embeddable. The `tv_embedded` and `web_creator` clients are added for age-restricted videos if account age-verification is required. Some clients, such as `web` and `web_music`, require a `po_token` for their formats to be downloadable. Some clients, such as `web_creator`, will only work with authentication. Not all clients support authentication via cookies. You can use `default` for the default clients, or you can use `all` for all clients (not recommended). You can prefix a client with `-` to exclude it, e.g. `youtube:player_client=default,-ios`
|
||||
* `player_skip`: Skip some network requests that are generally needed for robust extraction. One or more of `configs` (skip client configs), `webpage` (skip initial webpage), `js` (skip js player), `initial_data` (skip initial data/next ep request). While these options can help reduce the number of requests needed or avoid some rate-limiting, they could cause issues such as missing formats or metadata. See [#860](https://github.com/yt-dlp/yt-dlp/pull/860) and [#12826](https://github.com/yt-dlp/yt-dlp/issues/12826) for more details
|
||||
* `webpage_skip`: Skip extraction of embedded webpage data. One or both of `player_response`, `initial_data`. These options are for testing purposes and don't skip any network requests
|
||||
* `player_params`: YouTube player parameters to use for player requests. Will overwrite any default ones set by yt-dlp.
|
||||
* `player_js_variant`: The player javascript variant to use for signature and nsig deciphering. The known variants are: `main`, `tce`, `tv`, `tv_es6`, `phone`, `tablet`. The default is `main`, and the others are for debugging purposes. You can use `actual` to go with what is prescribed by the site
|
||||
* `player_js_variant`: The player javascript variant to use for n/sig deciphering. The known variants are: `main`, `tcc`, `tce`, `es5`, `es6`, `tv`, `tv_es6`, `phone`, `tablet`. The default is `main`, and the others are for debugging purposes. You can use `actual` to go with what is prescribed by the site
|
||||
* `player_js_version`: The player javascript version to use for n/sig deciphering, in the format of `signature_timestamp@hash` (e.g. `20348@0004de42`). The default is to use what is prescribed by the site, and can be selected with `actual`
|
||||
* `comment_sort`: `top` or `new` (default) - choose comment sorting mode (on YouTube's side)
|
||||
* `max_comments`: Limit the amount of comments to gather. Comma-separated list of integers representing `max-comments,max-parents,max-replies,max-replies-per-thread`. Default is `all,all,all,all`
|
||||
* E.g. `all,all,1000,10` will get a maximum of 1000 replies total, with up to 10 replies per thread. `1000,all,100` will get a maximum of 1000 comments, with a maximum of 100 replies total
|
||||
|
|
@ -1819,7 +1870,11 @@ The following extractors use this feature:
|
|||
* `po_token`: Proof of Origin (PO) Token(s) to use. Comma seperated list of PO Tokens in the format `CLIENT.CONTEXT+PO_TOKEN`, e.g. `youtube:po_token=web.gvs+XXX,web.player=XXX,web_safari.gvs+YYY`. Context can be any of `gvs` (Google Video Server URLs), `player` (Innertube player request) or `subs` (Subtitles)
|
||||
* `pot_trace`: Enable debug logging for PO Token fetching. Either `true` or `false` (default)
|
||||
* `fetch_pot`: Policy to use for fetching a PO Token from providers. One of `always` (always try fetch a PO Token regardless if the client requires one for the given context), `never` (never fetch a PO Token), or `auto` (default; only fetch a PO Token if the client requires one for the given context)
|
||||
* `playback_wait`: Duration (in seconds) to wait inbetween the extraction and download stages in order to ensure the formats are available. The default is `6` seconds
|
||||
* `jsc_trace`: Enable debug logging for JS Challenge fetching. Either `true` or `false` (default)
|
||||
* `use_ad_playback_context`: Skip preroll ads to eliminate the mandatory wait period before download. Do NOT use this when passing premium account cookies to yt-dlp, as it will result in a loss of premium formats. Only effective with the `web`, `web_safari`, `web_music` and `mweb` player clients. Either `true` or `false` (default)
|
||||
|
||||
#### youtube-ejs
|
||||
* `jitless`: Run suported Javascript engines in JIT-less mode. Supported runtimes are `deno`, `node` and `bun`. Provides better security at the cost of performance/speed. Do note that `node` and `bun` are still considered unsecure. Either `true` or `false` (default)
|
||||
|
||||
#### youtubepot-webpo
|
||||
* `bind_to_visitor_id`: Whether to use the Visitor ID instead of Visitor Data for caching WebPO tokens. Either `true` (default) or `false`
|
||||
|
|
@ -2201,7 +2256,6 @@ with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
|||
* Fix for [n-sig based throttling](https://github.com/ytdl-org/youtube-dl/issues/29326) **\***
|
||||
* Download livestreams from the start using `--live-from-start` (*experimental*)
|
||||
* Channel URLs download all uploads of the channel, including shorts and live
|
||||
* Support for [logging in with OAuth](https://github.com/yt-dlp/yt-dlp/wiki/Extractors#logging-in-with-oauth)
|
||||
|
||||
* **Cookies from browser**: Cookies can be automatically extracted from all major web browsers using `--cookies-from-browser BROWSER[+KEYRING][:PROFILE][::CONTAINER]`
|
||||
|
||||
|
|
@ -2243,7 +2297,7 @@ Features marked with a **\*** have been back-ported to youtube-dl
|
|||
|
||||
Some of yt-dlp's default options are different from that of youtube-dl and youtube-dlc:
|
||||
|
||||
* yt-dlp supports only [Python 3.9+](## "Windows 8"), and will remove support for more versions as they [become EOL](https://devguide.python.org/versions/#python-release-cycle); while [youtube-dl still supports Python 2.6+ and 3.2+](https://github.com/ytdl-org/youtube-dl/issues/30568#issue-1118238743)
|
||||
* yt-dlp supports only [Python 3.10+](## "Windows 8"), and will remove support for more versions as they [become EOL](https://devguide.python.org/versions/#python-release-cycle); while [youtube-dl still supports Python 2.6+ and 3.2+](https://github.com/ytdl-org/youtube-dl/issues/30568#issue-1118238743)
|
||||
* The options `--auto-number` (`-A`), `--title` (`-t`) and `--literal` (`-l`), no longer work. See [removed options](#Removed) for details
|
||||
* `avconv` is not supported as an alternative to `ffmpeg`
|
||||
* yt-dlp stores config files in slightly different locations to youtube-dl. See [CONFIGURATION](#configuration) for a list of correct locations
|
||||
|
|
@ -2345,11 +2399,7 @@ While these options still work, their use is not recommended since there are oth
|
|||
--hls-prefer-native --downloader "m3u8:native"
|
||||
--hls-prefer-ffmpeg --downloader "m3u8:ffmpeg"
|
||||
--list-formats-old --compat-options list-formats (Alias: --no-list-formats-as-table)
|
||||
--list-formats-as-table --compat-options -list-formats [Default] (Alias: --no-list-formats-old)
|
||||
--youtube-skip-dash-manifest --extractor-args "youtube:skip=dash" (Alias: --no-youtube-include-dash-manifest)
|
||||
--youtube-skip-hls-manifest --extractor-args "youtube:skip=hls" (Alias: --no-youtube-include-hls-manifest)
|
||||
--youtube-include-dash-manifest Default (Alias: --no-youtube-skip-dash-manifest)
|
||||
--youtube-include-hls-manifest Default (Alias: --no-youtube-skip-hls-manifest)
|
||||
--list-formats-as-table --compat-options -list-formats [Default]
|
||||
--geo-bypass --xff "default"
|
||||
--no-geo-bypass --xff "never"
|
||||
--geo-bypass-country CODE --xff CODE
|
||||
|
|
@ -2360,18 +2410,13 @@ These options are not intended to be used by the end-user
|
|||
|
||||
--test Download only part of video for testing extractors
|
||||
--load-pages Load pages dumped by --write-pages
|
||||
--youtube-print-sig-code For testing youtube signatures
|
||||
--allow-unplayable-formats List unplayable formats also
|
||||
--no-allow-unplayable-formats Default
|
||||
|
||||
#### Old aliases
|
||||
These are aliases that are no longer documented for various reasons
|
||||
|
||||
--avconv-location --ffmpeg-location
|
||||
--clean-infojson --clean-info-json
|
||||
--cn-verification-proxy URL --geo-verification-proxy URL
|
||||
--dump-headers --print-traffic
|
||||
--dump-intermediate-pages --dump-pages
|
||||
--force-write-download-archive --force-write-archive
|
||||
--no-clean-infojson --no-clean-info-json
|
||||
--no-split-tracks --no-split-chapters
|
||||
|
|
@ -2385,7 +2430,7 @@ These are aliases that are no longer documented for various reasons
|
|||
--yes-overwrites --force-overwrites
|
||||
|
||||
#### Sponskrub Options
|
||||
Support for [SponSkrub](https://github.com/faissaloo/SponSkrub) has been deprecated in favor of the `--sponsorblock` options
|
||||
Support for [SponSkrub](https://github.com/faissaloo/SponSkrub) has been removed in favor of the `--sponsorblock` options
|
||||
|
||||
--sponskrub --sponsorblock-mark all
|
||||
--no-sponskrub --no-sponsorblock
|
||||
|
|
@ -2407,6 +2452,17 @@ These options may no longer work as intended
|
|||
--no-include-ads Default
|
||||
--write-annotations No supported site has annotations now
|
||||
--no-write-annotations Default
|
||||
--avconv-location Removed alias for --ffmpeg-location
|
||||
--cn-verification-proxy URL Removed alias for --geo-verification-proxy URL
|
||||
--dump-headers Removed alias for --print-traffic
|
||||
--dump-intermediate-pages Removed alias for --dump-pages
|
||||
--youtube-skip-dash-manifest Removed alias for --extractor-args "youtube:skip=dash" (Alias: --no-youtube-include-dash-manifest)
|
||||
--youtube-skip-hls-manifest Removed alias for --extractor-args "youtube:skip=hls" (Alias: --no-youtube-include-hls-manifest)
|
||||
--youtube-include-dash-manifest Default (Alias: --no-youtube-skip-dash-manifest)
|
||||
--youtube-include-hls-manifest Default (Alias: --no-youtube-skip-hls-manifest)
|
||||
--youtube-print-sig-code Removed testing functionality
|
||||
--dump-user-agent No longer supported
|
||||
--xattr-set-filesize No longer supported
|
||||
--compat-options seperate-video-versions No longer needed
|
||||
--compat-options no-youtube-prefer-utc-upload-date No longer supported
|
||||
|
||||
|
|
|
|||
4473
THIRD_PARTY_LICENSES.txt
Normal file
4473
THIRD_PARTY_LICENSES.txt
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -1,10 +1,178 @@
|
|||
services:
|
||||
static:
|
||||
build: static
|
||||
|
||||
linux_x86_64:
|
||||
build:
|
||||
context: linux
|
||||
target: build
|
||||
platforms:
|
||||
- "linux/amd64"
|
||||
args:
|
||||
BUILDIMAGE: ghcr.io/yt-dlp/manylinux2014_x86_64-shared:latest
|
||||
environment:
|
||||
channel: ${channel}
|
||||
origin: ${origin}
|
||||
version: ${version}
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
CHANNEL: ${CHANNEL:?}
|
||||
ORIGIN: ${ORIGIN:?}
|
||||
VERSION:
|
||||
PYTHON_VERSION:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ~/build:/build
|
||||
- ../..:/yt-dlp
|
||||
|
||||
linux_x86_64_verify:
|
||||
build:
|
||||
context: linux
|
||||
target: verify
|
||||
platforms:
|
||||
- "linux/amd64"
|
||||
args:
|
||||
VERIFYIMAGE: quay.io/pypa/manylinux2014_x86_64:latest
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
UPDATE_TO:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../../dist:/build
|
||||
|
||||
linux_aarch64:
|
||||
build:
|
||||
context: linux
|
||||
target: build
|
||||
platforms:
|
||||
- "linux/arm64"
|
||||
args:
|
||||
BUILDIMAGE: ghcr.io/yt-dlp/manylinux2014_aarch64-shared:latest
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
CHANNEL: ${CHANNEL:?}
|
||||
ORIGIN: ${ORIGIN:?}
|
||||
VERSION:
|
||||
PYTHON_VERSION:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../..:/yt-dlp
|
||||
|
||||
linux_aarch64_verify:
|
||||
build:
|
||||
context: linux
|
||||
target: verify
|
||||
platforms:
|
||||
- "linux/arm64"
|
||||
args:
|
||||
VERIFYIMAGE: quay.io/pypa/manylinux2014_aarch64:latest
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
UPDATE_TO:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../../dist:/build
|
||||
|
||||
linux_armv7l:
|
||||
build:
|
||||
context: linux
|
||||
target: build
|
||||
platforms:
|
||||
- "linux/arm/v7"
|
||||
args:
|
||||
BUILDIMAGE: ghcr.io/yt-dlp/manylinux_2_31_armv7l-shared:latest
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
CHANNEL: ${CHANNEL:?}
|
||||
ORIGIN: ${ORIGIN:?}
|
||||
VERSION:
|
||||
PYTHON_VERSION:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../..:/yt-dlp
|
||||
- ../../venv:/yt-dlp-build-venv
|
||||
|
||||
linux_armv7l_verify:
|
||||
build:
|
||||
context: linux
|
||||
target: verify
|
||||
platforms:
|
||||
- "linux/arm/v7"
|
||||
args:
|
||||
VERIFYIMAGE: arm32v7/debian:bullseye
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
UPDATE_TO:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../../dist:/build
|
||||
|
||||
musllinux_x86_64:
|
||||
build:
|
||||
context: linux
|
||||
target: build
|
||||
platforms:
|
||||
- "linux/amd64"
|
||||
args:
|
||||
BUILDIMAGE: ghcr.io/yt-dlp/musllinux_1_2_x86_64-shared:latest
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
CHANNEL: ${CHANNEL:?}
|
||||
ORIGIN: ${ORIGIN:?}
|
||||
VERSION:
|
||||
PYTHON_VERSION:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../..:/yt-dlp
|
||||
|
||||
musllinux_x86_64_verify:
|
||||
build:
|
||||
context: linux
|
||||
target: verify
|
||||
platforms:
|
||||
- "linux/amd64"
|
||||
args:
|
||||
VERIFYIMAGE: alpine:3.22
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
UPDATE_TO:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../../dist:/build
|
||||
|
||||
musllinux_aarch64:
|
||||
build:
|
||||
context: linux
|
||||
target: build
|
||||
platforms:
|
||||
- "linux/arm64"
|
||||
args:
|
||||
BUILDIMAGE: ghcr.io/yt-dlp/musllinux_1_2_aarch64-shared:latest
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
CHANNEL: ${CHANNEL:?}
|
||||
ORIGIN: ${ORIGIN:?}
|
||||
VERSION:
|
||||
PYTHON_VERSION:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
EXCLUDE_CURL_CFFI: "1"
|
||||
volumes:
|
||||
- ../..:/yt-dlp
|
||||
|
||||
musllinux_aarch64_verify:
|
||||
build:
|
||||
context: linux
|
||||
target: verify
|
||||
platforms:
|
||||
- "linux/arm64"
|
||||
args:
|
||||
VERIFYIMAGE: alpine:3.22
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
UPDATE_TO:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../../dist:/build
|
||||
|
|
|
|||
16
bundle/docker/linux/Dockerfile
Normal file
16
bundle/docker/linux/Dockerfile
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
ARG BUILDIMAGE=ghcr.io/yt-dlp/manylinux2014_x86_64-shared:latest
|
||||
ARG VERIFYIMAGE=alpine:3.22
|
||||
|
||||
|
||||
FROM $BUILDIMAGE AS build
|
||||
|
||||
WORKDIR /yt-dlp
|
||||
COPY build.sh /build.sh
|
||||
ENTRYPOINT ["/build.sh"]
|
||||
|
||||
|
||||
FROM $VERIFYIMAGE AS verify
|
||||
|
||||
WORKDIR /testing
|
||||
COPY verify.sh /verify.sh
|
||||
ENTRYPOINT ["/verify.sh"]
|
||||
48
bundle/docker/linux/build.sh
Executable file
48
bundle/docker/linux/build.sh
Executable file
|
|
@ -0,0 +1,48 @@
|
|||
#!/bin/bash
|
||||
set -exuo pipefail
|
||||
|
||||
if [[ -z "${PYTHON_VERSION:-}" ]]; then
|
||||
PYTHON_VERSION="3.13"
|
||||
echo "Defaulting to using Python ${PYTHON_VERSION}"
|
||||
fi
|
||||
|
||||
function runpy {
|
||||
"/opt/shared-cpython-${PYTHON_VERSION}/bin/python${PYTHON_VERSION}" "$@"
|
||||
}
|
||||
|
||||
function venvpy {
|
||||
"python${PYTHON_VERSION}" "$@"
|
||||
}
|
||||
|
||||
INCLUDES=(
|
||||
--include-group pyinstaller
|
||||
--include-group secretstorage
|
||||
)
|
||||
|
||||
if [[ -z "${EXCLUDE_CURL_CFFI:-}" ]]; then
|
||||
INCLUDES+=(--include-group curl-cffi)
|
||||
fi
|
||||
|
||||
runpy -m venv /yt-dlp-build-venv
|
||||
# shellcheck disable=SC1091
|
||||
source /yt-dlp-build-venv/bin/activate
|
||||
# Inside the venv we use venvpy instead of runpy
|
||||
venvpy -m ensurepip --upgrade --default-pip
|
||||
venvpy -m devscripts.install_deps --only-optional-groups --include-group build
|
||||
venvpy -m devscripts.install_deps "${INCLUDES[@]}"
|
||||
venvpy -m devscripts.make_lazy_extractors
|
||||
venvpy devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}"
|
||||
|
||||
if [[ -z "${SKIP_ONEDIR_BUILD:-}" ]]; then
|
||||
mkdir -p /build
|
||||
venvpy -m bundle.pyinstaller --onedir --distpath=/build
|
||||
pushd "/build/${EXE_NAME}"
|
||||
chmod +x "${EXE_NAME}"
|
||||
venvpy -m zipfile -c "/yt-dlp/dist/${EXE_NAME}.zip" ./
|
||||
popd
|
||||
fi
|
||||
|
||||
if [[ -z "${SKIP_ONEFILE_BUILD:-}" ]]; then
|
||||
venvpy -m bundle.pyinstaller
|
||||
chmod +x "./dist/${EXE_NAME}"
|
||||
fi
|
||||
51
bundle/docker/linux/verify.sh
Executable file
51
bundle/docker/linux/verify.sh
Executable file
|
|
@ -0,0 +1,51 @@
|
|||
#!/bin/sh
|
||||
set -eu
|
||||
|
||||
if [ -n "${SKIP_ONEFILE_BUILD:-}" ]; then
|
||||
if [ -n "${SKIP_ONEDIR_BUILD:-}" ]; then
|
||||
echo "All executable builds were skipped"
|
||||
exit 1
|
||||
fi
|
||||
echo "Extracting zip to verify onedir build"
|
||||
if command -v python3 >/dev/null 2>&1; then
|
||||
python3 -m zipfile -e "/build/${EXE_NAME}.zip" ./
|
||||
else
|
||||
echo "Attempting to install unzip"
|
||||
if command -v dnf >/dev/null 2>&1; then
|
||||
dnf -y install --allowerasing unzip
|
||||
elif command -v yum >/dev/null 2>&1; then
|
||||
yum -y install unzip
|
||||
elif command -v apt-get >/dev/null 2>&1; then
|
||||
DEBIAN_FRONTEND=noninteractive apt-get update -qq
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -qq -y --no-install-recommends unzip
|
||||
elif command -v apk >/dev/null 2>&1; then
|
||||
apk add --no-cache unzip
|
||||
else
|
||||
echo "Unsupported image"
|
||||
exit 1
|
||||
fi
|
||||
unzip "/build/${EXE_NAME}.zip" -d ./
|
||||
fi
|
||||
chmod +x "./${EXE_NAME}"
|
||||
"./${EXE_NAME}" -v || true
|
||||
"./${EXE_NAME}" --version
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Verifying onefile build"
|
||||
cp "/build/${EXE_NAME}" ./
|
||||
chmod +x "./${EXE_NAME}"
|
||||
|
||||
if [ -z "${UPDATE_TO:-}" ]; then
|
||||
"./${EXE_NAME}" -v || true
|
||||
"./${EXE_NAME}" --version
|
||||
exit 0
|
||||
fi
|
||||
|
||||
cp "./${EXE_NAME}" "./${EXE_NAME}_downgraded"
|
||||
version="$("./${EXE_NAME}" --version)"
|
||||
"./${EXE_NAME}_downgraded" -v --update-to "${UPDATE_TO}"
|
||||
downgraded_version="$("./${EXE_NAME}_downgraded" --version)"
|
||||
if [ "${version}" = "${downgraded_version}" ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
FROM alpine:3.19 as base
|
||||
|
||||
RUN apk --update add --no-cache \
|
||||
build-base \
|
||||
python3 \
|
||||
pipx \
|
||||
;
|
||||
|
||||
RUN pipx install pyinstaller
|
||||
# Requires above step to prepare the shared venv
|
||||
RUN ~/.local/share/pipx/shared/bin/python -m pip install -U wheel
|
||||
RUN apk --update add --no-cache \
|
||||
scons \
|
||||
patchelf \
|
||||
binutils \
|
||||
;
|
||||
RUN pipx install staticx
|
||||
|
||||
WORKDIR /yt-dlp
|
||||
COPY entrypoint.sh /entrypoint.sh
|
||||
ENTRYPOINT /entrypoint.sh
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
#!/bin/ash
|
||||
set -e
|
||||
|
||||
source ~/.local/share/pipx/venvs/pyinstaller/bin/activate
|
||||
python -m devscripts.install_deps -o --include build
|
||||
python -m devscripts.install_deps --include secretstorage --include curl-cffi
|
||||
python -m devscripts.make_lazy_extractors
|
||||
python devscripts/update-version.py -c "${channel}" -r "${origin}" "${version}"
|
||||
python -m bundle.pyinstaller
|
||||
deactivate
|
||||
|
||||
source ~/.local/share/pipx/venvs/staticx/bin/activate
|
||||
staticx /yt-dlp/dist/yt-dlp_linux /build/yt-dlp_linux
|
||||
deactivate
|
||||
|
|
@ -13,6 +13,8 @@ from PyInstaller.__main__ import run as run_pyinstaller
|
|||
from devscripts.utils import read_version
|
||||
|
||||
OS_NAME, MACHINE, ARCH = sys.platform, platform.machine().lower(), platform.architecture()[0][:2]
|
||||
if OS_NAME == 'linux' and platform.libc_ver()[0] != 'glibc':
|
||||
OS_NAME = 'musllinux'
|
||||
if MACHINE in ('x86', 'x86_64', 'amd64', 'i386', 'i686'):
|
||||
MACHINE = 'x86' if ARCH == '32' else ''
|
||||
|
||||
|
|
@ -127,7 +129,6 @@ def windows_set_version(exe, version):
|
|||
StringStruct('FileDescription', 'yt-dlp%s' % (MACHINE and f' ({MACHINE})')),
|
||||
StringStruct('FileVersion', version),
|
||||
StringStruct('InternalName', f'yt-dlp{suffix}'),
|
||||
StringStruct('LegalCopyright', 'pukkandan.ytdlp@gmail.com | UNLICENSE'),
|
||||
StringStruct('OriginalFilename', f'yt-dlp{suffix}.exe'),
|
||||
StringStruct('ProductName', f'yt-dlp{suffix}'),
|
||||
StringStruct(
|
||||
|
|
|
|||
|
|
@ -293,5 +293,31 @@
|
|||
"action": "add",
|
||||
"when": "c76ce28e06c816eb5b261dfb6aff6e69dd9b7382",
|
||||
"short": "[priority] **linux_armv7l_exe builds are being discontinued**\nThis release's `yt-dlp_linux_armv7l` binary could be the last one. [Read more](https://github.com/yt-dlp/yt-dlp/issues/13976)"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "08d78996831bd8e1e3c2592d740c3def00bbf548",
|
||||
"short": "[priority] **Several options have been deprecated**\nIn order to simplify the codebase and reduce maintenance burden, various options have been deprecated. Please remove them from your commands/configurations. [Read more](https://github.com/yt-dlp/yt-dlp/issues/14198)"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "4e6a693057cfaf1ce1f07b019ed3bfce2bf936f6",
|
||||
"short": "[priority] **The minimum *required* Python version has been raised to 3.10**\nPython 3.9 has reached its end-of-life as of October 2025, and yt-dlp has now removed support for it. [Read more](https://github.com/yt-dlp/yt-dlp/issues/13858)"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "2c9091e355a7ba5d1edb69796ecdca48199b77fb",
|
||||
"short": "[priority] **A stopgap release with a *TEMPORARY partial* fix for YouTube support**\nSome formats may still be unavailable, especially if cookies are passed to yt-dlp. The ***NEXT*** release, expected very soon, **will require an external JS runtime (e.g. Deno)** in order for YouTube downloads to work properly. [Read more](https://github.com/yt-dlp/yt-dlp/issues/14404)"
|
||||
},
|
||||
{
|
||||
"action": "change",
|
||||
"when": "8636a9bac3bed99984c1e297453660468ecf504b",
|
||||
"short": "Fix 6224a3898821965a7d6a2cb9cc2de40a0fd6e6bc",
|
||||
"authors": ["Grub4K"]
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "6224a3898821965a7d6a2cb9cc2de40a0fd6e6bc",
|
||||
"short": "[priority] **An external JavaScript runtime is now required for full YouTube support**\nyt-dlp now requires users to have an external JavaScript runtime (e.g. Deno) installed in order to solve the JavaScript challenges presented by YouTube. [Read more](https://github.com/yt-dlp/yt-dlp/issues/15012)"
|
||||
}
|
||||
]
|
||||
|
|
|
|||
329
devscripts/generate_third_party_licenses.py
Normal file
329
devscripts/generate_third_party_licenses.py
Normal file
|
|
@ -0,0 +1,329 @@
|
|||
import requests
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
import hashlib
|
||||
|
||||
DEFAULT_OUTPUT = 'THIRD_PARTY_LICENSES.txt'
|
||||
CACHE_LOCATION = '.license_cache'
|
||||
HEADER = '''THIRD-PARTY LICENSES
|
||||
|
||||
This file aggregates license texts of third-party components included with the yt-dlp PyInstaller-bundled executables.
|
||||
yt-dlp itself is licensed under the Unlicense (see LICENSE file).
|
||||
Source code for bundled third-party components is available from the original projects.
|
||||
If you cannot obtain it, the maintainers will provide it as per license obligation; maintainer emails are listed in pyproject.toml.'''
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Dependency:
|
||||
name: str
|
||||
license_url: str
|
||||
project_url: str = ''
|
||||
license: str = ''
|
||||
comment: str = ''
|
||||
|
||||
|
||||
DEPENDENCIES: list[Dependency] = [
|
||||
# Core runtime environment components
|
||||
Dependency(
|
||||
name='Python',
|
||||
license='PSF-2.0',
|
||||
license_url='https://raw.githubusercontent.com/python/cpython/refs/heads/main/LICENSE',
|
||||
project_url='https://www.python.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='Microsoft Distributable Code',
|
||||
license_url='https://raw.githubusercontent.com/python/cpython/refs/heads/main/PC/crtlicense.txt',
|
||||
comment='Only included in Windows builds',
|
||||
),
|
||||
Dependency(
|
||||
name='bzip2',
|
||||
license='bzip2-1.0.6',
|
||||
license_url='https://gitlab.com/federicomenaquintero/bzip2/-/raw/master/COPYING',
|
||||
project_url='https://sourceware.org/bzip2/',
|
||||
),
|
||||
Dependency(
|
||||
name='libffi',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/libffi/libffi/refs/heads/master/LICENSE',
|
||||
project_url='https://sourceware.org/libffi/',
|
||||
),
|
||||
Dependency(
|
||||
name='OpenSSL 3.0+',
|
||||
license='Apache-2.0',
|
||||
license_url='https://raw.githubusercontent.com/openssl/openssl/refs/heads/master/LICENSE.txt',
|
||||
project_url='https://www.openssl.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='SQLite',
|
||||
license='Public Domain', # Technically does not need to be included
|
||||
license_url='https://sqlite.org/src/raw/e108e1e69ae8e8a59e93c455654b8ac9356a11720d3345df2a4743e9590fb20d?at=LICENSE.md',
|
||||
project_url='https://www.sqlite.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='liblzma',
|
||||
license='0BSD', # Technically does not need to be included
|
||||
license_url='https://raw.githubusercontent.com/tukaani-project/xz/refs/heads/master/COPYING',
|
||||
project_url='https://tukaani.org/xz/',
|
||||
),
|
||||
Dependency(
|
||||
name='mpdecimal',
|
||||
license='BSD-2-Clause',
|
||||
# No official repo URL
|
||||
license_url='https://gist.githubusercontent.com/seproDev/9e5dbfc08af35c3f2463e64eb9b27161/raw/61f5a98bc1a4ad7d48b1c793fc3314d4d43c2ab1/mpdecimal_COPYRIGHT.txt',
|
||||
project_url='https://www.bytereef.org/mpdecimal/',
|
||||
),
|
||||
Dependency(
|
||||
name='zlib',
|
||||
license='zlib',
|
||||
license_url='https://raw.githubusercontent.com/madler/zlib/refs/heads/develop/LICENSE',
|
||||
project_url='https://zlib.net/',
|
||||
),
|
||||
Dependency(
|
||||
name='Expat',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/libexpat/libexpat/refs/heads/master/COPYING',
|
||||
project_url='https://libexpat.github.io/',
|
||||
),
|
||||
Dependency(
|
||||
name='ncurses',
|
||||
license='X11-distribute-modifications-variant',
|
||||
license_url='https://raw.githubusercontent.com/mirror/ncurses/refs/heads/master/COPYING',
|
||||
comment='Only included in Linux/macOS builds',
|
||||
project_url='https://invisible-island.net/ncurses/',
|
||||
),
|
||||
Dependency(
|
||||
name='GNU Readline',
|
||||
license='GPL-3.0-or-later',
|
||||
license_url='https://tiswww.case.edu/php/chet/readline/COPYING',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://www.gnu.org/software/readline/',
|
||||
),
|
||||
Dependency(
|
||||
name='libstdc++',
|
||||
license='GPL-3.0-with-GCC-exception',
|
||||
license_url='https://raw.githubusercontent.com/gcc-mirror/gcc/refs/heads/master/COPYING.RUNTIME',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://gcc.gnu.org/onlinedocs/libstdc++/',
|
||||
),
|
||||
Dependency(
|
||||
name='libgcc',
|
||||
license='GPL-3.0-with-GCC-exception',
|
||||
license_url='https://raw.githubusercontent.com/gcc-mirror/gcc/refs/heads/master/COPYING.RUNTIME',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://gcc.gnu.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='libuuid',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://git.kernel.org/pub/scm/fs/ext2/e2fsprogs.git/plain/lib/uuid/COPYING',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://git.kernel.org/pub/scm/fs/ext2/e2fsprogs.git/tree/lib/uuid',
|
||||
),
|
||||
Dependency(
|
||||
name='libintl',
|
||||
license='LGPL-2.1-or-later',
|
||||
license_url='https://raw.githubusercontent.com/autotools-mirror/gettext/refs/heads/master/gettext-runtime/intl/COPYING.LIB',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://www.gnu.org/software/gettext/',
|
||||
),
|
||||
Dependency(
|
||||
name='libidn2',
|
||||
license='LGPL-3.0-or-later',
|
||||
license_url='https://gitlab.com/libidn/libidn2/-/raw/master/COPYING.LESSERv3',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://www.gnu.org/software/libidn/',
|
||||
),
|
||||
Dependency(
|
||||
name='libidn2 (Unicode character data files)',
|
||||
license='Unicode-TOU AND Unicode-DFS-2016',
|
||||
license_url='https://gitlab.com/libidn/libidn2/-/raw/master/COPYING.unicode',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://www.gnu.org/software/libidn/',
|
||||
),
|
||||
Dependency(
|
||||
name='libunistring',
|
||||
license='LGPL-3.0-or-later',
|
||||
license_url='https://gitweb.git.savannah.gnu.org/gitweb/?p=libunistring.git;a=blob_plain;f=COPYING.LIB;hb=HEAD',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://www.gnu.org/software/libunistring/',
|
||||
),
|
||||
Dependency(
|
||||
name='librtmp',
|
||||
license='LGPL-2.1-or-later',
|
||||
# No official repo URL
|
||||
license_url='https://gist.githubusercontent.com/seproDev/31d8c691ccddebe37b8b379307cb232d/raw/053408e98547ea8c7d9ba3a80c965f33e163b881/librtmp_COPYING.txt',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://rtmpdump.mplayerhq.hu/',
|
||||
),
|
||||
Dependency(
|
||||
name='zstd',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://raw.githubusercontent.com/facebook/zstd/refs/heads/dev/LICENSE',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://facebook.github.io/zstd/',
|
||||
),
|
||||
|
||||
# Python packages
|
||||
Dependency(
|
||||
name='brotli',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/google/brotli/refs/heads/master/LICENSE',
|
||||
project_url='https://brotli.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='curl_cffi',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/lexiforest/curl_cffi/refs/heads/main/LICENSE',
|
||||
comment='Not included in `yt-dlp_x86` and `yt-dlp_musllinux_aarch64` builds',
|
||||
project_url='https://curl-cffi.readthedocs.io/',
|
||||
),
|
||||
# Dependency of curl_cffi
|
||||
Dependency(
|
||||
name='curl-impersonate',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/lexiforest/curl-impersonate/refs/heads/main/LICENSE',
|
||||
comment='Not included in `yt-dlp_x86` and `yt-dlp_musllinux_aarch64` builds',
|
||||
project_url='https://github.com/lexiforest/curl-impersonate',
|
||||
),
|
||||
Dependency(
|
||||
name='cffi',
|
||||
license='MIT-0', # Technically does not need to be included
|
||||
license_url='https://raw.githubusercontent.com/python-cffi/cffi/refs/heads/main/LICENSE',
|
||||
project_url='https://cffi.readthedocs.io/',
|
||||
),
|
||||
# Dependecy of cffi
|
||||
Dependency(
|
||||
name='pycparser',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://raw.githubusercontent.com/eliben/pycparser/refs/heads/main/LICENSE',
|
||||
project_url='https://github.com/eliben/pycparser',
|
||||
),
|
||||
Dependency(
|
||||
name='mutagen',
|
||||
license='GPL-2.0-or-later',
|
||||
license_url='https://raw.githubusercontent.com/quodlibet/mutagen/refs/heads/main/COPYING',
|
||||
project_url='https://mutagen.readthedocs.io/',
|
||||
),
|
||||
Dependency(
|
||||
name='PyCryptodome',
|
||||
license='Public Domain and BSD-2-Clause',
|
||||
license_url='https://raw.githubusercontent.com/Legrandin/pycryptodome/refs/heads/master/LICENSE.rst',
|
||||
project_url='https://www.pycryptodome.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='certifi',
|
||||
license='MPL-2.0',
|
||||
license_url='https://raw.githubusercontent.com/certifi/python-certifi/refs/heads/master/LICENSE',
|
||||
project_url='https://github.com/certifi/python-certifi',
|
||||
),
|
||||
Dependency(
|
||||
name='requests',
|
||||
license='Apache-2.0',
|
||||
license_url='https://raw.githubusercontent.com/psf/requests/refs/heads/main/LICENSE',
|
||||
project_url='https://requests.readthedocs.io/',
|
||||
),
|
||||
# Dependency of requests
|
||||
Dependency(
|
||||
name='charset-normalizer',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/jawah/charset_normalizer/refs/heads/master/LICENSE',
|
||||
project_url='https://charset-normalizer.readthedocs.io/',
|
||||
),
|
||||
# Dependency of requests
|
||||
Dependency(
|
||||
name='idna',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://raw.githubusercontent.com/kjd/idna/refs/heads/master/LICENSE.md',
|
||||
project_url='https://github.com/kjd/idna',
|
||||
),
|
||||
Dependency(
|
||||
name='urllib3',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/urllib3/urllib3/refs/heads/main/LICENSE.txt',
|
||||
project_url='https://urllib3.readthedocs.io/',
|
||||
),
|
||||
Dependency(
|
||||
name='SecretStorage',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://raw.githubusercontent.com/mitya57/secretstorage/refs/heads/master/LICENSE',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://secretstorage.readthedocs.io/',
|
||||
),
|
||||
# Dependency of SecretStorage
|
||||
Dependency(
|
||||
name='cryptography',
|
||||
license='Apache-2.0', # Also available as BSD-3-Clause
|
||||
license_url='https://raw.githubusercontent.com/pyca/cryptography/refs/heads/main/LICENSE.APACHE',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://cryptography.io/',
|
||||
),
|
||||
# Dependency of SecretStorage
|
||||
Dependency(
|
||||
name='Jeepney',
|
||||
license='MIT',
|
||||
license_url='https://gitlab.com/takluyver/jeepney/-/raw/master/LICENSE',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://jeepney.readthedocs.io/',
|
||||
),
|
||||
Dependency(
|
||||
name='websockets',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://raw.githubusercontent.com/python-websockets/websockets/refs/heads/main/LICENSE',
|
||||
project_url='https://websockets.readthedocs.io/',
|
||||
),
|
||||
# Dependencies of yt-dlp-ejs
|
||||
Dependency(
|
||||
name='Meriyah',
|
||||
license='ISC',
|
||||
license_url='https://raw.githubusercontent.com/meriyah/meriyah/refs/heads/main/LICENSE.md',
|
||||
project_url='https://github.com/meriyah/meriyah',
|
||||
),
|
||||
Dependency(
|
||||
name='Astring',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/davidbonnet/astring/refs/heads/main/LICENSE',
|
||||
project_url='https://github.com/davidbonnet/astring/',
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def fetch_text(dep: Dependency) -> str:
|
||||
cache_dir = Path(CACHE_LOCATION)
|
||||
cache_dir.mkdir(exist_ok=True)
|
||||
url_hash = hashlib.sha256(dep.license_url.encode('utf-8')).hexdigest()
|
||||
cache_file = cache_dir / f'{url_hash}.txt'
|
||||
|
||||
if cache_file.exists():
|
||||
return cache_file.read_text()
|
||||
|
||||
# UA needed since some domains block requests default UA
|
||||
req = requests.get(dep.license_url, headers={'User-Agent': 'yt-dlp license fetcher'})
|
||||
req.raise_for_status()
|
||||
text = req.text
|
||||
cache_file.write_text(text)
|
||||
return text
|
||||
|
||||
|
||||
def build_output() -> str:
|
||||
lines = [HEADER]
|
||||
for d in DEPENDENCIES:
|
||||
lines.append('\n')
|
||||
lines.append('-' * 80)
|
||||
header = f'{d.name}'
|
||||
if d.license:
|
||||
header += f' | {d.license}'
|
||||
if d.comment:
|
||||
header += f'\nNote: {d.comment}'
|
||||
if d.project_url:
|
||||
header += f'\nURL: {d.project_url}'
|
||||
lines.append(header)
|
||||
lines.append('-' * 80)
|
||||
|
||||
text = fetch_text(d)
|
||||
lines.append(text.strip('\n') + '\n')
|
||||
return '\n'.join(lines)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
content = build_output()
|
||||
Path(DEFAULT_OUTPUT).write_text(content)
|
||||
|
|
@ -22,14 +22,19 @@ def parse_args():
|
|||
'input', nargs='?', metavar='TOMLFILE', default=Path(__file__).parent.parent / 'pyproject.toml',
|
||||
help='input file (default: %(default)s)')
|
||||
parser.add_argument(
|
||||
'-e', '--exclude', metavar='DEPENDENCY', action='append',
|
||||
help='exclude a dependency')
|
||||
'-e', '--exclude-dependency', metavar='DEPENDENCY', action='append',
|
||||
help='exclude a dependency (can be used multiple times)')
|
||||
parser.add_argument(
|
||||
'-i', '--include', metavar='GROUP', action='append',
|
||||
help='include an optional dependency group')
|
||||
'-i', '--include-group', metavar='GROUP', action='append',
|
||||
help='include an optional dependency group (can be used multiple times)')
|
||||
parser.add_argument(
|
||||
'-o', '--only-optional', action='store_true',
|
||||
help='only install optional dependencies')
|
||||
'-c', '--cherry-pick', metavar='DEPENDENCY', action='append',
|
||||
help=(
|
||||
'only include a specific dependency from the resulting dependency list '
|
||||
'(can be used multiple times)'))
|
||||
parser.add_argument(
|
||||
'-o', '--only-optional-groups', action='store_true',
|
||||
help='omit default dependencies unless the "default" group is specified with --include-group')
|
||||
parser.add_argument(
|
||||
'-p', '--print', action='store_true',
|
||||
help='only print requirements to stdout')
|
||||
|
|
@ -39,30 +44,41 @@ def parse_args():
|
|||
return parser.parse_args()
|
||||
|
||||
|
||||
def uniq(arg) -> dict[str, None]:
|
||||
return dict.fromkeys(map(str.lower, arg or ()))
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
project_table = parse_toml(read_file(args.input))['project']
|
||||
recursive_pattern = re.compile(rf'{project_table["name"]}\[(?P<group_name>[\w-]+)\]')
|
||||
optional_groups = project_table['optional-dependencies']
|
||||
excludes = args.exclude or []
|
||||
|
||||
excludes = uniq(args.exclude_dependency)
|
||||
only_includes = uniq(args.cherry_pick)
|
||||
include_groups = uniq(args.include_group)
|
||||
|
||||
def yield_deps(group):
|
||||
for dep in group:
|
||||
if mobj := recursive_pattern.fullmatch(dep):
|
||||
yield from optional_groups.get(mobj.group('group_name'), [])
|
||||
yield from optional_groups.get(mobj.group('group_name'), ())
|
||||
else:
|
||||
yield dep
|
||||
|
||||
targets = []
|
||||
if not args.only_optional: # `-o` should exclude 'dependencies' and the 'default' group
|
||||
targets.extend(project_table['dependencies'])
|
||||
if 'default' not in excludes: # `--exclude default` should exclude entire 'default' group
|
||||
targets.extend(yield_deps(optional_groups['default']))
|
||||
targets = {}
|
||||
if not args.only_optional_groups:
|
||||
# legacy: 'dependencies' is empty now
|
||||
targets.update(dict.fromkeys(project_table['dependencies']))
|
||||
targets.update(dict.fromkeys(yield_deps(optional_groups['default'])))
|
||||
|
||||
for include in filter(None, map(optional_groups.get, args.include or [])):
|
||||
targets.extend(yield_deps(include))
|
||||
for include in filter(None, map(optional_groups.get, include_groups)):
|
||||
targets.update(dict.fromkeys(yield_deps(include)))
|
||||
|
||||
targets = [t for t in targets if re.match(r'[\w-]+', t).group(0).lower() not in excludes]
|
||||
def target_filter(target):
|
||||
name = re.match(r'[\w-]+', target).group(0).lower()
|
||||
return name not in excludes and (not only_includes or name in only_includes)
|
||||
|
||||
targets = list(filter(target_filter, targets))
|
||||
|
||||
if args.print:
|
||||
for target in targets:
|
||||
|
|
|
|||
|
|
@ -353,6 +353,13 @@ class CommitRange:
|
|||
continue
|
||||
commit = Commit(override_hash, override['short'], override.get('authors') or [])
|
||||
logger.info(f'CHANGE {self._commits[commit.hash]} -> {commit}')
|
||||
if match := self.FIXES_RE.search(commit.short):
|
||||
fix_commitish = match.group(1)
|
||||
if fix_commitish in self._commits:
|
||||
del self._commits[commit.hash]
|
||||
self._fixes[fix_commitish].append(commit)
|
||||
logger.info(f'Found fix for {fix_commitish[:HASH_LENGTH]}: {commit.hash[:HASH_LENGTH]}')
|
||||
continue
|
||||
self._commits[commit.hash] = commit
|
||||
|
||||
self._commits = dict(reversed(self._commits.items()))
|
||||
|
|
@ -373,7 +380,7 @@ class CommitRange:
|
|||
issues = [issue.strip()[1:] for issue in issues.split(',')] if issues else []
|
||||
|
||||
if prefix:
|
||||
groups, details, sub_details = zip(*map(self.details_from_prefix, prefix.split(',')))
|
||||
groups, details, sub_details = zip(*map(self.details_from_prefix, prefix.split(',')), strict=True)
|
||||
group = next(iter(filter(None, groups)), None)
|
||||
details = ', '.join(unique(details))
|
||||
sub_details = list(itertools.chain.from_iterable(sub_details))
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ def main():
|
|||
return # This is unused in yt-dlp
|
||||
|
||||
parser = optparse.OptionParser(usage='%prog INFILE OUTFILE')
|
||||
options, args = parser.parse_args()
|
||||
_, args = parser.parse_args()
|
||||
if len(args) != 2:
|
||||
parser.error('Expected an input and an output filename')
|
||||
|
||||
|
|
|
|||
|
|
@ -17,6 +17,18 @@ def parse_args():
|
|||
parser = argparse.ArgumentParser(description='Run selected yt-dlp tests')
|
||||
parser.add_argument(
|
||||
'test', help='an extractor test, test path, or one of "core" or "download"', nargs='*')
|
||||
parser.add_argument(
|
||||
'--flaky',
|
||||
action='store_true',
|
||||
default=None,
|
||||
help='Allow running flaky tests. (default: run, unless in CI)',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--no-flaky',
|
||||
action='store_false',
|
||||
dest='flaky',
|
||||
help=argparse.SUPPRESS,
|
||||
)
|
||||
parser.add_argument(
|
||||
'-k', help='run a test matching EXPRESSION. Same as "pytest -k"', metavar='EXPRESSION')
|
||||
parser.add_argument(
|
||||
|
|
@ -24,10 +36,11 @@ def parse_args():
|
|||
return parser.parse_args()
|
||||
|
||||
|
||||
def run_tests(*tests, pattern=None, ci=False):
|
||||
def run_tests(*tests, pattern=None, ci=False, flaky: bool | None = None):
|
||||
# XXX: hatch uses `tests` if no arguments are passed
|
||||
run_core = 'core' in tests or 'tests' in tests or (not pattern and not tests)
|
||||
run_download = 'download' in tests
|
||||
run_flaky = flaky or (flaky is None and not ci)
|
||||
|
||||
pytest_args = args.pytest_args or os.getenv('HATCH_TEST_ARGS', '')
|
||||
arguments = ['pytest', '-Werror', '--tb=short', *shlex.split(pytest_args)]
|
||||
|
|
@ -44,6 +57,8 @@ def run_tests(*tests, pattern=None, ci=False):
|
|||
test if '/' in test
|
||||
else f'test/test_download.py::TestDownload::test_{fix_test_name(test)}'
|
||||
for test in tests)
|
||||
if not run_flaky:
|
||||
arguments.append('--disallow-flaky')
|
||||
|
||||
print(f'Running {arguments}', flush=True)
|
||||
try:
|
||||
|
|
@ -72,6 +87,11 @@ if __name__ == '__main__':
|
|||
args = parse_args()
|
||||
|
||||
os.chdir(Path(__file__).parent.parent)
|
||||
sys.exit(run_tests(*args.test, pattern=args.k, ci=bool(os.getenv('CI'))))
|
||||
sys.exit(run_tests(
|
||||
*args.test,
|
||||
pattern=args.k,
|
||||
ci=bool(os.getenv('CI')),
|
||||
flaky=args.flaky,
|
||||
))
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
|
|
|||
157
devscripts/setup_variables.py
Normal file
157
devscripts/setup_variables.py
Normal file
|
|
@ -0,0 +1,157 @@
|
|||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
import datetime as dt
|
||||
import json
|
||||
|
||||
from devscripts.utils import calculate_version
|
||||
|
||||
|
||||
STABLE_REPOSITORY = 'yt-dlp/yt-dlp'
|
||||
|
||||
|
||||
def setup_variables(environment):
|
||||
"""
|
||||
`environment` must contain these keys:
|
||||
REPOSITORY, INPUTS, PROCESSED,
|
||||
PUSH_VERSION_COMMIT, PYPI_PROJECT,
|
||||
SOURCE_PYPI_PROJECT, SOURCE_PYPI_SUFFIX,
|
||||
TARGET_PYPI_PROJECT, TARGET_PYPI_SUFFIX,
|
||||
SOURCE_ARCHIVE_REPO, TARGET_ARCHIVE_REPO,
|
||||
HAS_SOURCE_ARCHIVE_REPO_TOKEN,
|
||||
HAS_TARGET_ARCHIVE_REPO_TOKEN,
|
||||
HAS_ARCHIVE_REPO_TOKEN
|
||||
|
||||
`INPUTS` must contain these keys:
|
||||
prerelease
|
||||
|
||||
`PROCESSED` must contain these keys:
|
||||
source_repo, source_tag,
|
||||
target_repo, target_tag
|
||||
"""
|
||||
REPOSITORY = environment['REPOSITORY']
|
||||
INPUTS = json.loads(environment['INPUTS'])
|
||||
PROCESSED = json.loads(environment['PROCESSED'])
|
||||
|
||||
source_channel = None
|
||||
does_not_have_needed_token = False
|
||||
target_repo_token = None
|
||||
pypi_project = None
|
||||
pypi_suffix = None
|
||||
|
||||
source_repo = PROCESSED['source_repo']
|
||||
source_tag = PROCESSED['source_tag']
|
||||
if source_repo == 'stable':
|
||||
source_repo = STABLE_REPOSITORY
|
||||
if not source_repo:
|
||||
source_repo = REPOSITORY
|
||||
elif environment['SOURCE_ARCHIVE_REPO']:
|
||||
source_channel = environment['SOURCE_ARCHIVE_REPO']
|
||||
elif not source_tag and '/' not in source_repo:
|
||||
source_tag = source_repo
|
||||
source_repo = REPOSITORY
|
||||
|
||||
resolved_source = source_repo
|
||||
if source_tag:
|
||||
resolved_source = f'{resolved_source}@{source_tag}'
|
||||
elif source_repo == STABLE_REPOSITORY:
|
||||
resolved_source = 'stable'
|
||||
|
||||
revision = None
|
||||
if INPUTS['prerelease'] or not environment['PUSH_VERSION_COMMIT']:
|
||||
revision = dt.datetime.now(tz=dt.timezone.utc).strftime('%H%M%S')
|
||||
|
||||
version = calculate_version(INPUTS.get('version') or revision)
|
||||
|
||||
target_repo = PROCESSED['target_repo']
|
||||
target_tag = PROCESSED['target_tag']
|
||||
if target_repo:
|
||||
if target_repo == 'stable':
|
||||
target_repo = STABLE_REPOSITORY
|
||||
if not target_tag:
|
||||
if target_repo == STABLE_REPOSITORY:
|
||||
target_tag = version
|
||||
elif environment['TARGET_ARCHIVE_REPO']:
|
||||
target_tag = source_tag or version
|
||||
else:
|
||||
target_tag = target_repo
|
||||
target_repo = REPOSITORY
|
||||
if target_repo != REPOSITORY:
|
||||
target_repo = environment['TARGET_ARCHIVE_REPO']
|
||||
target_repo_token = f'{PROCESSED["target_repo"].upper()}_ARCHIVE_REPO_TOKEN'
|
||||
if not json.loads(environment['HAS_TARGET_ARCHIVE_REPO_TOKEN']):
|
||||
does_not_have_needed_token = True
|
||||
pypi_project = environment['TARGET_PYPI_PROJECT'] or None
|
||||
pypi_suffix = environment['TARGET_PYPI_SUFFIX'] or None
|
||||
else:
|
||||
target_tag = source_tag or version
|
||||
if source_channel:
|
||||
target_repo = source_channel
|
||||
target_repo_token = f'{PROCESSED["source_repo"].upper()}_ARCHIVE_REPO_TOKEN'
|
||||
if not json.loads(environment['HAS_SOURCE_ARCHIVE_REPO_TOKEN']):
|
||||
does_not_have_needed_token = True
|
||||
pypi_project = environment['SOURCE_PYPI_PROJECT'] or None
|
||||
pypi_suffix = environment['SOURCE_PYPI_SUFFIX'] or None
|
||||
else:
|
||||
target_repo = REPOSITORY
|
||||
|
||||
if does_not_have_needed_token:
|
||||
if not json.loads(environment['HAS_ARCHIVE_REPO_TOKEN']):
|
||||
print(f'::error::Repository access secret {target_repo_token} not found')
|
||||
return None
|
||||
target_repo_token = 'ARCHIVE_REPO_TOKEN'
|
||||
|
||||
if target_repo == REPOSITORY and not INPUTS['prerelease']:
|
||||
pypi_project = environment['PYPI_PROJECT'] or None
|
||||
|
||||
return {
|
||||
'channel': resolved_source,
|
||||
'version': version,
|
||||
'target_repo': target_repo,
|
||||
'target_repo_token': target_repo_token,
|
||||
'target_tag': target_tag,
|
||||
'pypi_project': pypi_project,
|
||||
'pypi_suffix': pypi_suffix,
|
||||
}
|
||||
|
||||
|
||||
def process_inputs(inputs):
|
||||
outputs = {}
|
||||
for key in ('source', 'target'):
|
||||
repo, _, tag = inputs.get(key, '').partition('@')
|
||||
outputs[f'{key}_repo'] = repo
|
||||
outputs[f'{key}_tag'] = tag
|
||||
return outputs
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if not os.getenv('GITHUB_OUTPUT'):
|
||||
print('This script is only intended for use with GitHub Actions', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if 'process_inputs' in sys.argv:
|
||||
inputs = json.loads(os.environ['INPUTS'])
|
||||
print('::group::Inputs')
|
||||
print(json.dumps(inputs, indent=2))
|
||||
print('::endgroup::')
|
||||
outputs = process_inputs(inputs)
|
||||
print('::group::Processed')
|
||||
print(json.dumps(outputs, indent=2))
|
||||
print('::endgroup::')
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||
f.write('\n'.join(f'{key}={value}' for key, value in outputs.items()))
|
||||
sys.exit(0)
|
||||
|
||||
outputs = setup_variables(dict(os.environ))
|
||||
if not outputs:
|
||||
sys.exit(1)
|
||||
|
||||
print('::group::Output variables')
|
||||
print(json.dumps(outputs, indent=2))
|
||||
print('::endgroup::')
|
||||
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||
f.write('\n'.join(f'{key}={value or ""}' for key, value in outputs.items()))
|
||||
324
devscripts/setup_variables_tests.py
Normal file
324
devscripts/setup_variables_tests.py
Normal file
|
|
@ -0,0 +1,324 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
import datetime as dt
|
||||
import json
|
||||
|
||||
from devscripts.setup_variables import STABLE_REPOSITORY, process_inputs, setup_variables
|
||||
from devscripts.utils import calculate_version
|
||||
|
||||
|
||||
def _test(github_repository, note, repo_vars, repo_secrets, inputs, expected=None, ignore_revision=False):
|
||||
inp = inputs.copy()
|
||||
inp.setdefault('linux_armv7l', True)
|
||||
inp.setdefault('prerelease', False)
|
||||
processed = process_inputs(inp)
|
||||
source_repo = processed['source_repo'].upper()
|
||||
target_repo = processed['target_repo'].upper()
|
||||
variables = {k.upper(): v for k, v in repo_vars.items()}
|
||||
secrets = {k.upper(): v for k, v in repo_secrets.items()}
|
||||
|
||||
env = {
|
||||
# Keep this in sync with prepare.setup_variables in release.yml
|
||||
'INPUTS': json.dumps(inp),
|
||||
'PROCESSED': json.dumps(processed),
|
||||
'REPOSITORY': github_repository,
|
||||
'PUSH_VERSION_COMMIT': variables.get('PUSH_VERSION_COMMIT') or '',
|
||||
'PYPI_PROJECT': variables.get('PYPI_PROJECT') or '',
|
||||
'SOURCE_PYPI_PROJECT': variables.get(f'{source_repo}_PYPI_PROJECT') or '',
|
||||
'SOURCE_PYPI_SUFFIX': variables.get(f'{source_repo}_PYPI_SUFFIX') or '',
|
||||
'TARGET_PYPI_PROJECT': variables.get(f'{target_repo}_PYPI_PROJECT') or '',
|
||||
'TARGET_PYPI_SUFFIX': variables.get(f'{target_repo}_PYPI_SUFFIX') or '',
|
||||
'SOURCE_ARCHIVE_REPO': variables.get(f'{source_repo}_ARCHIVE_REPO') or '',
|
||||
'TARGET_ARCHIVE_REPO': variables.get(f'{target_repo}_ARCHIVE_REPO') or '',
|
||||
'HAS_SOURCE_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get(f'{source_repo}_ARCHIVE_REPO_TOKEN'))),
|
||||
'HAS_TARGET_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get(f'{target_repo}_ARCHIVE_REPO_TOKEN'))),
|
||||
'HAS_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get('ARCHIVE_REPO_TOKEN'))),
|
||||
}
|
||||
|
||||
result = setup_variables(env)
|
||||
if not expected:
|
||||
print(' {\n' + '\n'.join(f' {k!r}: {v!r},' for k, v in result.items()) + '\n }')
|
||||
return
|
||||
|
||||
exp = expected.copy()
|
||||
if ignore_revision:
|
||||
assert len(result['version']) == len(exp['version']), f'revision missing: {github_repository} {note}'
|
||||
version_is_tag = result['version'] == result['target_tag']
|
||||
for dct in (result, exp):
|
||||
dct['version'] = '.'.join(dct['version'].split('.')[:3])
|
||||
if version_is_tag:
|
||||
dct['target_tag'] = dct['version']
|
||||
assert result == exp, f'unexpected result: {github_repository} {note}'
|
||||
|
||||
|
||||
def test_setup_variables():
|
||||
DEFAULT_VERSION_WITH_REVISION = dt.datetime.now(tz=dt.timezone.utc).strftime('%Y.%m.%d.%H%M%S')
|
||||
DEFAULT_VERSION = calculate_version()
|
||||
BASE_REPO_VARS = {
|
||||
'MASTER_ARCHIVE_REPO': 'yt-dlp/yt-dlp-master-builds',
|
||||
'NIGHTLY_ARCHIVE_REPO': 'yt-dlp/yt-dlp-nightly-builds',
|
||||
'NIGHTLY_PYPI_PROJECT': 'yt-dlp',
|
||||
'NIGHTLY_PYPI_SUFFIX': 'dev',
|
||||
'PUSH_VERSION_COMMIT': '1',
|
||||
'PYPI_PROJECT': 'yt-dlp',
|
||||
}
|
||||
BASE_REPO_SECRETS = {
|
||||
'ARCHIVE_REPO_TOKEN': '1',
|
||||
}
|
||||
FORK_REPOSITORY = 'fork/yt-dlp'
|
||||
FORK_ORG = FORK_REPOSITORY.partition('/')[0]
|
||||
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, stable',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {}, {
|
||||
'channel': 'stable',
|
||||
'version': DEFAULT_VERSION,
|
||||
'target_repo': STABLE_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': DEFAULT_VERSION,
|
||||
'pypi_project': 'yt-dlp',
|
||||
'pypi_suffix': None,
|
||||
})
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, nightly (w/o target)',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||
'source': 'nightly',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': 'nightly',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': 'yt-dlp/yt-dlp-nightly-builds',
|
||||
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': 'yt-dlp',
|
||||
'pypi_suffix': 'dev',
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, nightly',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||
'source': 'nightly',
|
||||
'target': 'nightly',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': 'nightly',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': 'yt-dlp/yt-dlp-nightly-builds',
|
||||
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': 'yt-dlp',
|
||||
'pypi_suffix': 'dev',
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, master (w/o target)',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||
'source': 'master',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': 'master',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': 'yt-dlp/yt-dlp-master-builds',
|
||||
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, master',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||
'source': 'master',
|
||||
'target': 'master',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': 'master',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': 'yt-dlp/yt-dlp-master-builds',
|
||||
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, special tag, updates to stable',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||
'target': f'{STABLE_REPOSITORY}@experimental',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': 'stable',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': STABLE_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'experimental',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, special tag, "stable" as target repo',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||
'target': 'stable@experimental',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': 'stable',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': STABLE_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'experimental',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/o vars/secrets, stable',
|
||||
{}, {}, {}, {
|
||||
'channel': FORK_REPOSITORY,
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/o vars/secrets, prerelease',
|
||||
{}, {}, {'prerelease': True}, {
|
||||
'channel': FORK_REPOSITORY,
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/o vars/secrets, nightly',
|
||||
{}, {}, {
|
||||
'prerelease': True,
|
||||
'source': 'nightly',
|
||||
'target': 'nightly',
|
||||
}, {
|
||||
'channel': f'{FORK_REPOSITORY}@nightly',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'nightly',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/o vars/secrets, master',
|
||||
{}, {}, {
|
||||
'prerelease': True,
|
||||
'source': 'master',
|
||||
'target': 'master',
|
||||
}, {
|
||||
'channel': f'{FORK_REPOSITORY}@master',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'master',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/o vars/secrets, revision',
|
||||
{}, {}, {'version': '123'}, {
|
||||
'channel': FORK_REPOSITORY,
|
||||
'version': f'{DEFAULT_VERSION[:10]}.123',
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': f'{DEFAULT_VERSION[:10]}.123',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
})
|
||||
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/ PUSH_VERSION_COMMIT, stable',
|
||||
{'PUSH_VERSION_COMMIT': '1'}, {}, {}, {
|
||||
'channel': FORK_REPOSITORY,
|
||||
'version': DEFAULT_VERSION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': DEFAULT_VERSION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
})
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/ PUSH_VERSION_COMMIT, prerelease',
|
||||
{'PUSH_VERSION_COMMIT': '1'}, {}, {'prerelease': True}, {
|
||||
'channel': FORK_REPOSITORY,
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/NIGHTLY_ARCHIVE_REPO_TOKEN, nightly', {
|
||||
'NIGHTLY_ARCHIVE_REPO': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||
'PYPI_PROJECT': 'yt-dlp-test',
|
||||
}, {
|
||||
'NIGHTLY_ARCHIVE_REPO_TOKEN': '1',
|
||||
}, {
|
||||
'source': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||
'target': 'nightly',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||
'target_repo_token': 'NIGHTLY_ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/MASTER_ARCHIVE_REPO_TOKEN, master', {
|
||||
'MASTER_ARCHIVE_REPO': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||
'MASTER_PYPI_PROJECT': 'yt-dlp-test',
|
||||
'MASTER_PYPI_SUFFIX': 'dev',
|
||||
}, {
|
||||
'MASTER_ARCHIVE_REPO_TOKEN': '1',
|
||||
}, {
|
||||
'source': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||
'target': 'master',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||
'target_repo_token': 'MASTER_ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': 'yt-dlp-test',
|
||||
'pypi_suffix': 'dev',
|
||||
}, ignore_revision=True)
|
||||
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork, non-numeric tag',
|
||||
{}, {}, {'source': 'experimental'}, {
|
||||
'channel': f'{FORK_REPOSITORY}@experimental',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'experimental',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork, non-numeric tag, updates to stable',
|
||||
{}, {}, {
|
||||
'prerelease': True,
|
||||
'source': 'stable',
|
||||
'target': 'experimental',
|
||||
}, {
|
||||
'channel': 'stable',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'experimental',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
|
|
@ -9,24 +9,9 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|||
|
||||
import argparse
|
||||
import contextlib
|
||||
import datetime as dt
|
||||
import sys
|
||||
|
||||
from devscripts.utils import read_version, run_process, write_file
|
||||
|
||||
|
||||
def get_new_version(version, revision):
|
||||
if not version:
|
||||
version = dt.datetime.now(dt.timezone.utc).strftime('%Y.%m.%d')
|
||||
|
||||
if revision:
|
||||
assert revision.isdecimal(), 'Revision must be a number'
|
||||
else:
|
||||
old_version = read_version().split('.')
|
||||
if version.split('.') == old_version[:3]:
|
||||
revision = str(int(([*old_version, 0])[3]) + 1)
|
||||
|
||||
return f'{version}.{revision}' if revision else version
|
||||
from devscripts.utils import calculate_version, run_process, write_file
|
||||
|
||||
|
||||
def get_git_head():
|
||||
|
|
@ -72,9 +57,7 @@ if __name__ == '__main__':
|
|||
args = parser.parse_args()
|
||||
|
||||
git_head = get_git_head()
|
||||
version = (
|
||||
args.version if args.version and '.' in args.version
|
||||
else get_new_version(None, args.version))
|
||||
version = calculate_version(args.version)
|
||||
write_file(args.output, VERSION_TEMPLATE.format(
|
||||
version=version, git_head=git_head, channel=args.channel, origin=args.origin,
|
||||
package_version=f'{version}{args.suffix}'))
|
||||
|
|
|
|||
|
|
@ -20,7 +20,9 @@ if __name__ == '__main__':
|
|||
'--changelog-path', type=Path, default=Path(__file__).parent.parent / 'Changelog.md',
|
||||
help='path to the Changelog file')
|
||||
args = parser.parse_args()
|
||||
new_entry = create_changelog(args)
|
||||
|
||||
header, sep, changelog = read_file(args.changelog_path).partition('\n### ')
|
||||
write_file(args.changelog_path, f'{header}{sep}{read_version()}\n{new_entry}\n{sep}{changelog}')
|
||||
current_version = read_version()
|
||||
if current_version != changelog.splitlines()[0]:
|
||||
new_entry = create_changelog(args)
|
||||
write_file(args.changelog_path, f'{header}{sep}{current_version}\n{new_entry}\n{sep}{changelog}')
|
||||
|
|
|
|||
166
devscripts/update_ejs.py
Executable file
166
devscripts/update_ejs.py
Executable file
|
|
@ -0,0 +1,166 @@
|
|||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import io
|
||||
import json
|
||||
import hashlib
|
||||
import pathlib
|
||||
import urllib.request
|
||||
import zipfile
|
||||
|
||||
|
||||
TEMPLATE = '''\
|
||||
# This file is generated by devscripts/update_ejs.py. DO NOT MODIFY!
|
||||
|
||||
VERSION = {version!r}
|
||||
HASHES = {{
|
||||
{hash_mapping}
|
||||
}}
|
||||
'''
|
||||
PREFIX = ' "yt-dlp-ejs=='
|
||||
BASE_PATH = pathlib.Path(__file__).parent.parent
|
||||
PYPROJECT_PATH = BASE_PATH / 'pyproject.toml'
|
||||
PACKAGE_PATH = BASE_PATH / 'yt_dlp/extractor/youtube/jsc/_builtin/vendor'
|
||||
RELEASE_URL = 'https://api.github.com/repos/yt-dlp/ejs/releases/latest'
|
||||
ASSETS = {
|
||||
'yt.solver.lib.js': False,
|
||||
'yt.solver.lib.min.js': False,
|
||||
'yt.solver.deno.lib.js': True,
|
||||
'yt.solver.bun.lib.js': True,
|
||||
'yt.solver.core.min.js': False,
|
||||
'yt.solver.core.js': True,
|
||||
}
|
||||
MAKEFILE_PATH = BASE_PATH / 'Makefile'
|
||||
|
||||
|
||||
def request(url: str):
|
||||
return contextlib.closing(urllib.request.urlopen(url))
|
||||
|
||||
|
||||
def makefile_variables(
|
||||
version: str | None = None,
|
||||
name: str | None = None,
|
||||
digest: str | None = None,
|
||||
data: bytes | None = None,
|
||||
keys_only: bool = False,
|
||||
) -> dict[str, str | None]:
|
||||
assert keys_only or all(arg is not None for arg in (version, name, digest, data))
|
||||
|
||||
return {
|
||||
'EJS_VERSION': None if keys_only else version,
|
||||
'EJS_WHEEL_NAME': None if keys_only else name,
|
||||
'EJS_WHEEL_HASH': None if keys_only else digest,
|
||||
'EJS_PY_FOLDERS': None if keys_only else list_wheel_contents(data, 'py', files=False),
|
||||
'EJS_PY_FILES': None if keys_only else list_wheel_contents(data, 'py', folders=False),
|
||||
'EJS_JS_FOLDERS': None if keys_only else list_wheel_contents(data, 'js', files=False),
|
||||
'EJS_JS_FILES': None if keys_only else list_wheel_contents(data, 'js', folders=False),
|
||||
}
|
||||
|
||||
|
||||
def list_wheel_contents(
|
||||
wheel_data: bytes,
|
||||
suffix: str | None = None,
|
||||
folders: bool = True,
|
||||
files: bool = True,
|
||||
) -> str:
|
||||
assert folders or files, 'at least one of "folders" or "files" must be True'
|
||||
|
||||
with zipfile.ZipFile(io.BytesIO(wheel_data)) as zipf:
|
||||
path_gen = (zinfo.filename for zinfo in zipf.infolist())
|
||||
|
||||
filtered = filter(lambda path: path.startswith('yt_dlp_ejs/'), path_gen)
|
||||
if suffix:
|
||||
filtered = filter(lambda path: path.endswith(f'.{suffix}'), filtered)
|
||||
|
||||
files_list = list(filtered)
|
||||
if not folders:
|
||||
return ' '.join(files_list)
|
||||
|
||||
folders_list = list(dict.fromkeys(path.rpartition('/')[0] for path in files_list))
|
||||
if not files:
|
||||
return ' '.join(folders_list)
|
||||
|
||||
return ' '.join(folders_list + files_list)
|
||||
|
||||
|
||||
def main():
|
||||
current_version = None
|
||||
with PYPROJECT_PATH.open() as file:
|
||||
for line in file:
|
||||
if not line.startswith(PREFIX):
|
||||
continue
|
||||
current_version, _, _ = line.removeprefix(PREFIX).partition('"')
|
||||
|
||||
if not current_version:
|
||||
print('yt-dlp-ejs dependency line could not be found')
|
||||
return
|
||||
|
||||
makefile_info = makefile_variables(keys_only=True)
|
||||
prefixes = tuple(f'{key} = ' for key in makefile_info)
|
||||
with MAKEFILE_PATH.open() as file:
|
||||
for line in file:
|
||||
if not line.startswith(prefixes):
|
||||
continue
|
||||
key, _, val = line.partition(' = ')
|
||||
makefile_info[key] = val.rstrip()
|
||||
|
||||
with request(RELEASE_URL) as resp:
|
||||
info = json.load(resp)
|
||||
|
||||
version = info['tag_name']
|
||||
if version == current_version:
|
||||
print(f'yt-dlp-ejs is up to date! ({version})')
|
||||
return
|
||||
|
||||
print(f'Updating yt-dlp-ejs from {current_version} to {version}')
|
||||
hashes = []
|
||||
wheel_info = {}
|
||||
for asset in info['assets']:
|
||||
name = asset['name']
|
||||
is_wheel = name.startswith('yt_dlp_ejs-') and name.endswith('.whl')
|
||||
if not is_wheel and name not in ASSETS:
|
||||
continue
|
||||
with request(asset['browser_download_url']) as resp:
|
||||
data = resp.read()
|
||||
|
||||
# verify digest from github
|
||||
digest = asset['digest']
|
||||
algo, _, expected = digest.partition(':')
|
||||
hexdigest = hashlib.new(algo, data).hexdigest()
|
||||
assert hexdigest == expected, f'downloaded attest mismatch ({hexdigest!r} != {expected!r})'
|
||||
|
||||
if is_wheel:
|
||||
wheel_info = makefile_variables(version, name, digest, data)
|
||||
continue
|
||||
|
||||
# calculate sha3-512 digest
|
||||
asset_hash = hashlib.sha3_512(data).hexdigest()
|
||||
hashes.append(f' {name!r}: {asset_hash!r},')
|
||||
|
||||
if ASSETS[name]:
|
||||
(PACKAGE_PATH / name).write_bytes(data)
|
||||
|
||||
hash_mapping = '\n'.join(hashes)
|
||||
for asset_name in ASSETS:
|
||||
assert asset_name in hash_mapping, f'{asset_name} not found in release'
|
||||
|
||||
assert all(wheel_info.get(key) for key in makefile_info), 'wheel info not found in release'
|
||||
|
||||
(PACKAGE_PATH / '_info.py').write_text(TEMPLATE.format(
|
||||
version=version,
|
||||
hash_mapping=hash_mapping,
|
||||
))
|
||||
|
||||
content = PYPROJECT_PATH.read_text()
|
||||
updated = content.replace(PREFIX + current_version, PREFIX + version)
|
||||
PYPROJECT_PATH.write_text(updated)
|
||||
|
||||
makefile = MAKEFILE_PATH.read_text()
|
||||
for key in wheel_info:
|
||||
makefile = makefile.replace(f'{key} = {makefile_info[key]}', f'{key} = {wheel_info[key]}')
|
||||
MAKEFILE_PATH.write_text(makefile)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
@ -1,5 +1,7 @@
|
|||
import argparse
|
||||
import datetime as dt
|
||||
import functools
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
|
||||
|
|
@ -20,6 +22,23 @@ def read_version(fname='yt_dlp/version.py', varname='__version__'):
|
|||
return items[varname]
|
||||
|
||||
|
||||
def calculate_version(version=None, fname='yt_dlp/version.py'):
|
||||
if version and '.' in version:
|
||||
return version
|
||||
|
||||
revision = version
|
||||
version = dt.datetime.now(dt.timezone.utc).strftime('%Y.%m.%d')
|
||||
|
||||
if revision:
|
||||
assert re.fullmatch(r'[0-9]+', revision), 'Revision must be numeric'
|
||||
else:
|
||||
old_version = read_version(fname=fname).split('.')
|
||||
if version.split('.') == old_version[:3]:
|
||||
revision = str(int(([*old_version, 0])[3]) + 1)
|
||||
|
||||
return f'{version}.{revision}' if revision else version
|
||||
|
||||
|
||||
def get_filename_args(has_infile=False, default_outfile=None):
|
||||
parser = argparse.ArgumentParser()
|
||||
if has_infile:
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ def build_completion(opt_parser):
|
|||
for opt in group.option_list]
|
||||
opts_file = [opt for opt in opts if opt.metavar == 'FILE']
|
||||
opts_dir = [opt for opt in opts if opt.metavar == 'DIR']
|
||||
opts_path = [opt for opt in opts if opt.metavar == 'PATH']
|
||||
|
||||
fileopts = []
|
||||
for opt in opts_file:
|
||||
|
|
@ -26,6 +27,12 @@ def build_completion(opt_parser):
|
|||
if opt._long_opts:
|
||||
fileopts.extend(opt._long_opts)
|
||||
|
||||
for opt in opts_path:
|
||||
if opt._short_opts:
|
||||
fileopts.extend(opt._short_opts)
|
||||
if opt._long_opts:
|
||||
fileopts.extend(opt._long_opts)
|
||||
|
||||
diropts = []
|
||||
for opt in opts_dir:
|
||||
if opt._short_opts:
|
||||
|
|
|
|||
|
|
@ -1,11 +1,14 @@
|
|||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
requires = ["hatchling>=1.27.0"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[project]
|
||||
name = "yt-dlp"
|
||||
maintainers = [
|
||||
authors = [
|
||||
{name = "pukkandan", email = "pukkandan.ytdlp@gmail.com"},
|
||||
]
|
||||
maintainers = [
|
||||
{email = "maintainers@yt-dlp.org"},
|
||||
{name = "Grub4K", email = "contact@grub4k.xyz"},
|
||||
{name = "bashonly", email = "bashonly@protonmail.com"},
|
||||
{name = "coletdjnz", email = "coletdjnz@protonmail.com"},
|
||||
|
|
@ -13,7 +16,7 @@ maintainers = [
|
|||
]
|
||||
description = "A feature-rich command-line audio/video downloader"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.9"
|
||||
requires-python = ">=3.10"
|
||||
keywords = [
|
||||
"cli",
|
||||
"downloader",
|
||||
|
|
@ -22,22 +25,22 @@ keywords = [
|
|||
"sponsorblock",
|
||||
"yt-dlp",
|
||||
]
|
||||
license = {file = "LICENSE"}
|
||||
license = "Unlicense"
|
||||
license-files = ["LICENSE"]
|
||||
classifiers = [
|
||||
"Topic :: Multimedia :: Video",
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Environment :: Console",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: 3.13",
|
||||
"Programming Language :: Python :: 3.14",
|
||||
"Programming Language :: Python :: Implementation",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
"License :: OSI Approved :: The Unlicense (Unlicense)",
|
||||
"Operating System :: OS Independent",
|
||||
]
|
||||
dynamic = ["version"]
|
||||
|
|
@ -53,6 +56,7 @@ default = [
|
|||
"requests>=2.32.2,<3",
|
||||
"urllib3>=2.0.2,<3",
|
||||
"websockets>=13.0",
|
||||
"yt-dlp-ejs==0.3.1",
|
||||
]
|
||||
curl-cffi = [
|
||||
"curl-cffi>=0.5.10,!=0.6.*,!=0.7.*,!=0.8.*,!=0.9.*,<0.14; implementation_name=='cpython'",
|
||||
|
|
@ -63,9 +67,9 @@ secretstorage = [
|
|||
]
|
||||
build = [
|
||||
"build",
|
||||
"hatchling",
|
||||
"hatchling>=1.27.0",
|
||||
"pip",
|
||||
"setuptools>=71.0.2,<81", # See https://github.com/pyinstaller/pyinstaller/issues/9149
|
||||
"setuptools>=71.0.2",
|
||||
"wheel",
|
||||
]
|
||||
dev = [
|
||||
|
|
@ -75,21 +79,21 @@ dev = [
|
|||
]
|
||||
static-analysis = [
|
||||
"autopep8~=2.0",
|
||||
"ruff~=0.12.0",
|
||||
"ruff~=0.14.0",
|
||||
]
|
||||
test = [
|
||||
"pytest~=8.1",
|
||||
"pytest-rerunfailures~=14.0",
|
||||
]
|
||||
pyinstaller = [
|
||||
"pyinstaller>=6.13.0", # Windows temp cleanup fixed in 6.13.0
|
||||
"pyinstaller>=6.17.0", # 6.17.0+ needed for compat with setuptools 81+
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
Documentation = "https://github.com/yt-dlp/yt-dlp#readme"
|
||||
Repository = "https://github.com/yt-dlp/yt-dlp"
|
||||
Tracker = "https://github.com/yt-dlp/yt-dlp/issues"
|
||||
Funding = "https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators"
|
||||
Funding = "https://github.com/yt-dlp/yt-dlp/blob/master/Maintainers.md#maintainers"
|
||||
|
||||
[project.scripts]
|
||||
yt-dlp = "yt_dlp:main"
|
||||
|
|
@ -107,7 +111,6 @@ include = [
|
|||
"/LICENSE", # included as license
|
||||
"/pyproject.toml", # included by default
|
||||
"/README.md", # included as readme
|
||||
"/setup.cfg",
|
||||
"/supportedsites.md",
|
||||
]
|
||||
artifacts = [
|
||||
|
|
@ -120,7 +123,12 @@ artifacts = [
|
|||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["yt_dlp"]
|
||||
artifacts = ["/yt_dlp/extractor/lazy_extractors.py"]
|
||||
artifacts = [
|
||||
"/yt_dlp/extractor/lazy_extractors.py",
|
||||
]
|
||||
exclude = [
|
||||
"/yt_dlp/**/*.md",
|
||||
]
|
||||
|
||||
[tool.hatch.build.targets.wheel.shared-data]
|
||||
"completions/bash/yt-dlp" = "share/bash-completion/completions/yt-dlp"
|
||||
|
|
@ -168,12 +176,12 @@ run-cov = "echo Code coverage not implemented && exit 1"
|
|||
|
||||
[[tool.hatch.envs.hatch-test.matrix]]
|
||||
python = [
|
||||
"3.9",
|
||||
"3.10",
|
||||
"3.11",
|
||||
"3.12",
|
||||
"3.13",
|
||||
"pypy3.10",
|
||||
"3.14",
|
||||
"pypy3.11",
|
||||
]
|
||||
|
||||
[tool.ruff]
|
||||
|
|
|
|||
39
setup.cfg
39
setup.cfg
|
|
@ -1,39 +0,0 @@
|
|||
[flake8]
|
||||
exclude = build,venv,.tox,.git,.pytest_cache
|
||||
ignore = E402,E501,E731,E741,W503
|
||||
max_line_length = 120
|
||||
per_file_ignores =
|
||||
devscripts/lazy_load_template.py: F401
|
||||
|
||||
|
||||
[autoflake]
|
||||
ignore-init-module-imports = true
|
||||
ignore-pass-after-docstring = true
|
||||
remove-all-unused-imports = true
|
||||
remove-duplicate-keys = true
|
||||
remove-unused-variables = true
|
||||
|
||||
|
||||
[tox:tox]
|
||||
skipsdist = true
|
||||
envlist = py{39,310,311,312,313},pypy311
|
||||
skip_missing_interpreters = true
|
||||
|
||||
[testenv] # tox
|
||||
deps =
|
||||
pytest
|
||||
commands = pytest {posargs:"-m not download"}
|
||||
passenv = HOME # For test_compat_expanduser
|
||||
setenv =
|
||||
# PYTHONWARNINGS = error # Catches PIP's warnings too
|
||||
|
||||
|
||||
[isort]
|
||||
py_version = 39
|
||||
multi_line_output = VERTICAL_HANGING_INDENT
|
||||
line_length = 80
|
||||
reverse_relative = true
|
||||
ensure_newline_before_comments = true
|
||||
include_trailing_comma = true
|
||||
known_first_party =
|
||||
test
|
||||
|
|
@ -12,6 +12,7 @@ The only reliable way to check if a site is supported is to try it.
|
|||
- **17live:vod**
|
||||
- **1News**: 1news.co.nz article videos
|
||||
- **1tv**: Первый канал
|
||||
- **1tv:live**: Первый канал (прямой эфир)
|
||||
- **20min**: (**Currently broken**)
|
||||
- **23video**
|
||||
- **247sports**: (**Currently broken**)
|
||||
|
|
@ -20,7 +21,6 @@ The only reliable way to check if a site is supported is to try it.
|
|||
- **3sat**
|
||||
- **4tube**
|
||||
- **56.com**
|
||||
- **6play**
|
||||
- **7plus**
|
||||
- **8tracks**
|
||||
- **9c9media**
|
||||
|
|
@ -86,7 +86,7 @@ The only reliable way to check if a site is supported is to try it.
|
|||
- **aol.com**: Yahoo screen and movies (**Currently broken**)
|
||||
- **APA**
|
||||
- **Aparat**
|
||||
- **AppleConnect**
|
||||
- **apple:music:connect**: Apple Music Connect
|
||||
- **AppleDaily**: 臺灣蘋果日報
|
||||
- **ApplePodcasts**
|
||||
- **appletrailers**
|
||||
|
|
@ -94,6 +94,8 @@ The only reliable way to check if a site is supported is to try it.
|
|||
- **archive.org**: archive.org video and audio
|
||||
- **ArcPublishing**
|
||||
- **ARD**
|
||||
- **ARDAudiothek**
|
||||
- **ARDAudiothekPlaylist**
|
||||
- **ARDMediathek**
|
||||
- **ARDMediathekCollection**
|
||||
- **Art19**
|
||||
|
|
@ -243,6 +245,7 @@ The only reliable way to check if a site is supported is to try it.
|
|||
- **Canalsurmas**
|
||||
- **CaracolTvPlay**: [*caracoltv-play*](## "netrc machine")
|
||||
- **cbc.ca**
|
||||
- **cbc.ca:listen**
|
||||
- **cbc.ca:player**
|
||||
- **cbc.ca:player:playlist**
|
||||
- **CBS**: (**Currently broken**)
|
||||
|
|
@ -299,7 +302,6 @@ The only reliable way to check if a site is supported is to try it.
|
|||
- **cpac**
|
||||
- **cpac:playlist**
|
||||
- **Cracked**
|
||||
- **Crackle**
|
||||
- **Craftsy**
|
||||
- **CrooksAndLiars**
|
||||
- **CrowdBunker**
|
||||
|
|
@ -314,8 +316,6 @@ The only reliable way to check if a site is supported is to try it.
|
|||
- **curiositystream**: [*curiositystream*](## "netrc machine")
|
||||
- **curiositystream:collections**: [*curiositystream*](## "netrc machine")
|
||||
- **curiositystream:series**: [*curiositystream*](## "netrc machine")
|
||||
- **cwtv**
|
||||
- **cwtv:movie**
|
||||
- **Cybrary**: [*cybrary*](## "netrc machine")
|
||||
- **CybraryCourse**: [*cybrary*](## "netrc machine")
|
||||
- **DacastPlaylist**
|
||||
|
|
@ -450,7 +450,6 @@ The only reliable way to check if a site is supported is to try it.
|
|||
- **Filmweb**
|
||||
- **FiveThirtyEight**
|
||||
- **FiveTV**
|
||||
- **FlexTV**
|
||||
- **Flickr**
|
||||
- **Floatplane**
|
||||
- **FloatplaneChannel**
|
||||
|
|
@ -537,7 +536,6 @@ The only reliable way to check if a site is supported is to try it.
|
|||
- **google:podcasts:feed**
|
||||
- **GoogleDrive**
|
||||
- **GoogleDrive:Folder**
|
||||
- **GoPlay**: [*goplay*](## "netrc machine")
|
||||
- **GoPro**
|
||||
- **Goshgay**
|
||||
- **GoToStage**
|
||||
|
|
@ -584,6 +582,11 @@ The only reliable way to check if a site is supported is to try it.
|
|||
- **Hypem**
|
||||
- **Hytale**
|
||||
- **Icareus**
|
||||
- **IdagioAlbum**
|
||||
- **IdagioPersonalPlaylist**
|
||||
- **IdagioPlaylist**
|
||||
- **IdagioRecording**
|
||||
- **IdagioTrack**
|
||||
- **IdolPlus**
|
||||
- **iflix:episode**
|
||||
- **IflixSeries**
|
||||
|
|
@ -798,7 +801,6 @@ The only reliable way to check if a site is supported is to try it.
|
|||
- **mirrativ**
|
||||
- **mirrativ:user**
|
||||
- **MirrorCoUK**
|
||||
- **MiTele**: mitele.es
|
||||
- **mixch**
|
||||
- **mixch:archive**
|
||||
- **mixch:movie**
|
||||
|
|
@ -844,6 +846,7 @@ The only reliable way to check if a site is supported is to try it.
|
|||
- **MusicdexArtist**
|
||||
- **MusicdexPlaylist**
|
||||
- **MusicdexSong**
|
||||
- **Mux**
|
||||
- **Mx3**
|
||||
- **Mx3Neo**
|
||||
- **Mx3Volksmusik**
|
||||
|
|
@ -858,6 +861,7 @@ The only reliable way to check if a site is supported is to try it.
|
|||
- **n-tv.de**
|
||||
- **N1Info:article**
|
||||
- **N1InfoAsset**
|
||||
- **NascarClassics**
|
||||
- **Nate**
|
||||
- **NateProgram**
|
||||
- **natgeo:video**
|
||||
|
|
@ -1009,6 +1013,7 @@ The only reliable way to check if a site is supported is to try it.
|
|||
- **onet.tv:channel**
|
||||
- **OnetMVP**
|
||||
- **OnionStudios**
|
||||
- **onsen**: [*onsen*](## "netrc machine") インターネットラジオステーション<音泉>
|
||||
- **Opencast**
|
||||
- **OpencastPlaylist**
|
||||
- **openrec**
|
||||
|
|
@ -1033,8 +1038,6 @@ The only reliable way to check if a site is supported is to try it.
|
|||
- **Panopto**
|
||||
- **PanoptoList**
|
||||
- **PanoptoPlaylist**
|
||||
- **ParamountPlus**
|
||||
- **ParamountPlusSeries**
|
||||
- **ParamountPressExpress**
|
||||
- **Parler**: Posts on parler.com
|
||||
- **parliamentlive.tv**: UK parliament videos
|
||||
|
|
@ -1069,11 +1072,10 @@ The only reliable way to check if a site is supported is to try it.
|
|||
- **PinterestCollection**
|
||||
- **PiramideTV**
|
||||
- **PiramideTVChannel**
|
||||
- **pixiv:sketch**
|
||||
- **pixiv:sketch:user**
|
||||
- **PlanetMarathi**
|
||||
- **Platzi**: [*platzi*](## "netrc machine")
|
||||
- **PlatziCourse**: [*platzi*](## "netrc machine")
|
||||
- **play.tv**: [*goplay*](## "netrc machine") PLAY (formerly goplay.be)
|
||||
- **player.sky.it**
|
||||
- **PlayerFm**
|
||||
- **playeur**
|
||||
|
|
@ -1257,7 +1259,6 @@ The only reliable way to check if a site is supported is to try it.
|
|||
- **rutube:person**: Rutube person videos
|
||||
- **rutube:playlist**: Rutube playlists
|
||||
- **rutube:tags**: Rutube tags
|
||||
- **RUTV**: RUTV.RU
|
||||
- **Ruutu**: (**Currently broken**)
|
||||
- **Ruv**
|
||||
- **ruv.is:spila**
|
||||
|
|
@ -1332,7 +1333,10 @@ The only reliable way to check if a site is supported is to try it.
|
|||
- **Slideshare**
|
||||
- **SlidesLive**
|
||||
- **Slutload**
|
||||
- **Smotrim**
|
||||
- **smotrim**
|
||||
- **smotrim:audio**
|
||||
- **smotrim:live**
|
||||
- **smotrim:playlist**
|
||||
- **SnapchatSpotlight**
|
||||
- **Snotr**
|
||||
- **SoftWhiteUnderbelly**: [*softwhiteunderbelly*](## "netrc machine")
|
||||
|
|
@ -1370,8 +1374,6 @@ The only reliable way to check if a site is supported is to try it.
|
|||
- **Sport5**
|
||||
- **SportBox**: (**Currently broken**)
|
||||
- **SportDeutschland**
|
||||
- **spotify**: Spotify episodes (**Currently broken**)
|
||||
- **spotify:show**: Spotify shows (**Currently broken**)
|
||||
- **Spreaker**
|
||||
- **SpreakerShow**
|
||||
- **SpringboardPlatform**
|
||||
|
|
@ -1510,15 +1512,17 @@ The only reliable way to check if a site is supported is to try it.
|
|||
- **TrueID**
|
||||
- **TruNews**
|
||||
- **Truth**
|
||||
- **ttinglive**: 띵라이브 (formerly FlexTV)
|
||||
- **Tube8**: (**Currently broken**)
|
||||
- **TubeTuGraz**: [*tubetugraz*](## "netrc machine") tube.tugraz.at
|
||||
- **TubeTuGrazSeries**: [*tubetugraz*](## "netrc machine")
|
||||
- **tubitv**: [*tubitv*](## "netrc machine")
|
||||
- **tubitv:series**
|
||||
- **Tumblr**: [*tumblr*](## "netrc machine")
|
||||
- **TuneInPodcast**
|
||||
- **TuneInPodcastEpisode**
|
||||
- **TuneInStation**
|
||||
- **tunein:embed**
|
||||
- **tunein:podcast**
|
||||
- **tunein:podcast:program**
|
||||
- **tunein:station**
|
||||
- **tv.dfb.de**
|
||||
- **TV2**
|
||||
- **TV2Article**
|
||||
|
|
@ -1542,7 +1546,7 @@ The only reliable way to check if a site is supported is to try it.
|
|||
- **tvigle**: Интернет-телевидение Tvigle.ru
|
||||
- **TVIPlayer**
|
||||
- **TVN24**: (**Currently broken**)
|
||||
- **TVNoe**: (**Currently broken**)
|
||||
- **tvnoe**: Televize Noe
|
||||
- **tvopengr:embed**: tvopen.gr embedded videos
|
||||
- **tvopengr:watch**: tvopen.gr (and ethnos.gr) videos
|
||||
- **tvp**: Telewizja Polska
|
||||
|
|
@ -1560,12 +1564,12 @@ The only reliable way to check if a site is supported is to try it.
|
|||
- **TwitCastingLive**
|
||||
- **TwitCastingUser**
|
||||
- **twitch:clips**: [*twitch*](## "netrc machine")
|
||||
- **twitch:collection**: [*twitch*](## "netrc machine")
|
||||
- **twitch:stream**: [*twitch*](## "netrc machine")
|
||||
- **twitch:videos**: [*twitch*](## "netrc machine")
|
||||
- **twitch:videos:clips**: [*twitch*](## "netrc machine")
|
||||
- **twitch:videos:collections**: [*twitch*](## "netrc machine")
|
||||
- **twitch:vod**: [*twitch*](## "netrc machine")
|
||||
- **TwitchCollection**: [*twitch*](## "netrc machine")
|
||||
- **TwitchVideos**: [*twitch*](## "netrc machine")
|
||||
- **TwitchVideosClips**: [*twitch*](## "netrc machine")
|
||||
- **TwitchVideosCollections**: [*twitch*](## "netrc machine")
|
||||
- **twitter**: [*twitter*](## "netrc machine")
|
||||
- **twitter:amplify**: [*twitter*](## "netrc machine")
|
||||
- **twitter:broadcast**: [*twitter*](## "netrc machine")
|
||||
|
|
@ -1600,7 +1604,8 @@ The only reliable way to check if a site is supported is to try it.
|
|||
- **Varzesh3**: (**Currently broken**)
|
||||
- **Vbox7**
|
||||
- **Veo**
|
||||
- **Vesti**: Вести.Ru (**Currently broken**)
|
||||
- **Vevo**
|
||||
- **VevoPlaylist**
|
||||
- **VGTV**: VGTV, BTTV, FTV, Aftenposten and Aftonbladet
|
||||
- **vh1.com**
|
||||
- **vhx:embed**: [*vimeo*](## "netrc machine")
|
||||
|
|
@ -1746,7 +1751,6 @@ The only reliable way to check if a site is supported is to try it.
|
|||
- **wykop:dig:comment**
|
||||
- **wykop:post**
|
||||
- **wykop:post:comment**
|
||||
- **Xanimu**
|
||||
- **XboxClips**
|
||||
- **XHamster**
|
||||
- **XHamsterEmbed**
|
||||
|
|
|
|||
|
|
@ -52,6 +52,33 @@ def skip_handlers_if(request, handler):
|
|||
pytest.skip(marker.args[1] if len(marker.args) > 1 else '')
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def handler_flaky(request, handler):
|
||||
"""Mark a certain handler as being flaky.
|
||||
|
||||
This will skip the test if pytest does not get run using `--allow-flaky`
|
||||
|
||||
usage:
|
||||
pytest.mark.handler_flaky('my_handler', os.name != 'nt', reason='reason')
|
||||
"""
|
||||
for marker in request.node.iter_markers(handler_flaky.__name__):
|
||||
if (
|
||||
marker.args[0] == handler.RH_KEY
|
||||
and (not marker.args[1:] or any(marker.args[1:]))
|
||||
and request.config.getoption('disallow_flaky')
|
||||
):
|
||||
reason = marker.kwargs.get('reason')
|
||||
pytest.skip(f'flaky: {reason}' if reason else 'flaky')
|
||||
|
||||
|
||||
def pytest_addoption(parser, pluginmanager):
|
||||
parser.addoption(
|
||||
'--disallow-flaky',
|
||||
action='store_true',
|
||||
help='disallow flaky tests from running.',
|
||||
)
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
config.addinivalue_line(
|
||||
'markers', 'skip_handler(handler): skip test for the given handler',
|
||||
|
|
@ -62,3 +89,6 @@ def pytest_configure(config):
|
|||
config.addinivalue_line(
|
||||
'markers', 'skip_handlers_if(handler): skip test for handlers when the condition is true',
|
||||
)
|
||||
config.addinivalue_line(
|
||||
'markers', 'handler_flaky(handler): mark handler as flaky if condition is true',
|
||||
)
|
||||
|
|
|
|||
|
|
@ -176,7 +176,7 @@ def _iter_differences(got, expected, field):
|
|||
yield field, f'expected length of {len(expected)}, got {len(got)}'
|
||||
return
|
||||
|
||||
for index, (got_val, expected_val) in enumerate(zip(got, expected)):
|
||||
for index, (got_val, expected_val) in enumerate(zip(got, expected, strict=True)):
|
||||
field_name = str(index) if field is None else f'{field}.{index}'
|
||||
yield from _iter_differences(got_val, expected_val, field_name)
|
||||
return
|
||||
|
|
|
|||
|
|
@ -36,7 +36,6 @@
|
|||
"verbose": true,
|
||||
"writedescription": false,
|
||||
"writeinfojson": true,
|
||||
"writeannotations": false,
|
||||
"writelink": false,
|
||||
"writeurllink": false,
|
||||
"writewebloclink": false,
|
||||
|
|
|
|||
|
|
@ -1945,7 +1945,7 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||
server_thread.daemon = True
|
||||
server_thread.start()
|
||||
|
||||
(content, urlh) = self.ie._download_webpage_handle(
|
||||
content, _ = self.ie._download_webpage_handle(
|
||||
f'http://127.0.0.1:{port}/teapot', None,
|
||||
expected_status=TEAPOT_RESPONSE_STATUS)
|
||||
self.assertEqual(content, TEAPOT_RESPONSE_BODY)
|
||||
|
|
|
|||
|
|
@ -17,7 +17,6 @@ import json
|
|||
|
||||
from test.helper import FakeYDL, assertRegexpMatches, try_rm
|
||||
from yt_dlp import YoutubeDL
|
||||
from yt_dlp.extractor import YoutubeIE
|
||||
from yt_dlp.extractor.common import InfoExtractor
|
||||
from yt_dlp.postprocessor.common import PostProcessor
|
||||
from yt_dlp.utils import (
|
||||
|
|
@ -336,99 +335,6 @@ class TestFormatSelection(unittest.TestCase):
|
|||
ydl = YDL({'format': '[format_id!*=-]'})
|
||||
self.assertRaises(ExtractorError, ydl.process_ie_result, info_dict.copy())
|
||||
|
||||
def test_youtube_format_selection(self):
|
||||
# FIXME: Rewrite in accordance with the new format sorting options
|
||||
return
|
||||
|
||||
order = [
|
||||
'38', '37', '46', '22', '45', '35', '44', '18', '34', '43', '6', '5', '17', '36', '13',
|
||||
# Apple HTTP Live Streaming
|
||||
'96', '95', '94', '93', '92', '132', '151',
|
||||
# 3D
|
||||
'85', '84', '102', '83', '101', '82', '100',
|
||||
# Dash video
|
||||
'137', '248', '136', '247', '135', '246',
|
||||
'245', '244', '134', '243', '133', '242', '160',
|
||||
# Dash audio
|
||||
'141', '172', '140', '171', '139',
|
||||
]
|
||||
|
||||
def format_info(f_id):
|
||||
info = YoutubeIE._formats[f_id].copy()
|
||||
|
||||
# XXX: In real cases InfoExtractor._parse_mpd_formats() fills up 'acodec'
|
||||
# and 'vcodec', while in tests such information is incomplete since
|
||||
# commit a6c2c24479e5f4827ceb06f64d855329c0a6f593
|
||||
# test_YoutubeDL.test_youtube_format_selection is broken without
|
||||
# this fix
|
||||
if 'acodec' in info and 'vcodec' not in info:
|
||||
info['vcodec'] = 'none'
|
||||
elif 'vcodec' in info and 'acodec' not in info:
|
||||
info['acodec'] = 'none'
|
||||
|
||||
info['format_id'] = f_id
|
||||
info['url'] = 'url:' + f_id
|
||||
return info
|
||||
formats_order = [format_info(f_id) for f_id in order]
|
||||
|
||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||
ydl = YDL({'format': 'bestvideo+bestaudio'})
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], '248+172')
|
||||
self.assertEqual(downloaded['ext'], 'mp4')
|
||||
|
||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||
ydl = YDL({'format': 'bestvideo[height>=999999]+bestaudio/best'})
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], '38')
|
||||
|
||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||
ydl = YDL({'format': 'bestvideo/best,bestaudio'})
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||
self.assertEqual(downloaded_ids, ['137', '141'])
|
||||
|
||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||
ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])+bestaudio'})
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||
self.assertEqual(downloaded_ids, ['137+141', '248+141'])
|
||||
|
||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||
ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])[height<=720]+bestaudio'})
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||
self.assertEqual(downloaded_ids, ['136+141', '247+141'])
|
||||
|
||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||
ydl = YDL({'format': '(bestvideo[ext=none]/bestvideo[ext=webm])+bestaudio'})
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||
self.assertEqual(downloaded_ids, ['248+141'])
|
||||
|
||||
for f1, f2 in zip(formats_order, formats_order[1:]):
|
||||
info_dict = _make_result([f1, f2], extractor='youtube')
|
||||
ydl = YDL({'format': 'best/bestvideo'})
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], f1['format_id'])
|
||||
|
||||
info_dict = _make_result([f2, f1], extractor='youtube')
|
||||
ydl = YDL({'format': 'best/bestvideo'})
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], f1['format_id'])
|
||||
|
||||
def test_audio_only_extractor_format_selection(self):
|
||||
# For extractors with incomplete formats (all formats are audio-only or
|
||||
# video-only) best and worst should fallback to corresponding best/worst
|
||||
|
|
@ -749,7 +655,7 @@ class TestYoutubeDL(unittest.TestCase):
|
|||
|
||||
if not isinstance(expected, (list, tuple)):
|
||||
expected = (expected, expected)
|
||||
for (name, got), expect in zip((('outtmpl', out), ('filename', fname)), expected):
|
||||
for (name, got), expect in zip((('outtmpl', out), ('filename', fname)), expected, strict=True):
|
||||
if callable(expect):
|
||||
self.assertTrue(expect(got), f'Wrong {name} from {tmpl}')
|
||||
elif expect is not None:
|
||||
|
|
@ -1147,7 +1053,7 @@ class TestYoutubeDL(unittest.TestCase):
|
|||
entries = func(evaluated)
|
||||
results = [(v['playlist_autonumber'] - 1, (int(v['id']), v['playlist_index']))
|
||||
for v in get_downloaded_info_dicts(params, entries)]
|
||||
self.assertEqual(results, list(enumerate(zip(expected_ids, expected_ids))), f'Entries of {name} for {params}')
|
||||
self.assertEqual(results, list(enumerate(zip(expected_ids, expected_ids, strict=True))), f'Entries of {name} for {params}')
|
||||
self.assertEqual(sorted(evaluated), expected_eval, f'Evaluation of {name} for {params}')
|
||||
|
||||
test_selection({}, INDICES)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import datetime as dt
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
|
|
@ -12,7 +13,7 @@ import struct
|
|||
|
||||
from yt_dlp import compat
|
||||
from yt_dlp.compat import urllib # isort: split
|
||||
from yt_dlp.compat import compat_etree_fromstring, compat_expanduser
|
||||
from yt_dlp.compat import compat_etree_fromstring, compat_expanduser, compat_datetime_from_timestamp
|
||||
from yt_dlp.compat.urllib.request import getproxies
|
||||
|
||||
|
||||
|
|
@ -59,6 +60,45 @@ class TestCompat(unittest.TestCase):
|
|||
def test_struct_unpack(self):
|
||||
self.assertEqual(struct.unpack('!B', b'\x00'), (0,))
|
||||
|
||||
def test_compat_datetime_from_timestamp(self):
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(0),
|
||||
dt.datetime(1970, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(1),
|
||||
dt.datetime(1970, 1, 1, 0, 0, 1, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(3600),
|
||||
dt.datetime(1970, 1, 1, 1, 0, 0, tzinfo=dt.timezone.utc))
|
||||
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(-1),
|
||||
dt.datetime(1969, 12, 31, 23, 59, 59, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(-86400),
|
||||
dt.datetime(1969, 12, 31, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(0.5),
|
||||
dt.datetime(1970, 1, 1, 0, 0, 0, 500000, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(1.000001),
|
||||
dt.datetime(1970, 1, 1, 0, 0, 1, 1, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(-1.25),
|
||||
dt.datetime(1969, 12, 31, 23, 59, 58, 750000, tzinfo=dt.timezone.utc))
|
||||
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(-1577923200),
|
||||
dt.datetime(1920, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(4102444800),
|
||||
dt.datetime(2100, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(173568960000),
|
||||
dt.datetime(7470, 3, 8, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
|||
|
|
@ -247,6 +247,7 @@ def ctx(request):
|
|||
|
||||
@pytest.mark.parametrize(
|
||||
'handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
@pytest.mark.handler_flaky('CurlCFFI', reason='segfaults')
|
||||
@pytest.mark.parametrize('ctx', ['http'], indirect=True) # pure http proxy can only support http
|
||||
class TestHTTPProxy:
|
||||
def test_http_no_auth(self, handler, ctx):
|
||||
|
|
@ -315,6 +316,7 @@ class TestHTTPProxy:
|
|||
('Requests', 'https'),
|
||||
('CurlCFFI', 'https'),
|
||||
], indirect=True)
|
||||
@pytest.mark.handler_flaky('CurlCFFI', reason='segfaults')
|
||||
class TestHTTPConnectProxy:
|
||||
def test_http_connect_no_auth(self, handler, ctx):
|
||||
with ctx.http_server(HTTPConnectProxyHandler) as server_address:
|
||||
|
|
|
|||
60
test/test_jsc/conftest.py
Normal file
60
test/test_jsc/conftest.py
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
import re
|
||||
import pathlib
|
||||
|
||||
import pytest
|
||||
|
||||
import yt_dlp.globals
|
||||
from yt_dlp import YoutubeDL
|
||||
from yt_dlp.extractor.common import InfoExtractor
|
||||
|
||||
|
||||
_TESTDATA_PATH = pathlib.Path(__file__).parent.parent / 'testdata/sigs'
|
||||
_player_re = re.compile(r'^.+/player/(?P<id>[a-zA-Z0-9_/.-]+)\.js$')
|
||||
_player_id_trans = str.maketrans(dict.fromkeys('/.-', '_'))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def ie() -> InfoExtractor:
|
||||
runtime_names = yt_dlp.globals.supported_js_runtimes.value
|
||||
ydl = YoutubeDL({'js_runtimes': {key: {} for key in runtime_names}})
|
||||
ie = ydl.get_info_extractor('Youtube')
|
||||
|
||||
def _load_player(video_id, player_url, fatal=True):
|
||||
match = _player_re.match(player_url)
|
||||
test_id = match.group('id').translate(_player_id_trans)
|
||||
cached_file = _TESTDATA_PATH / f'player-{test_id}.js'
|
||||
|
||||
if cached_file.exists():
|
||||
return cached_file.read_text()
|
||||
|
||||
if code := ie._download_webpage(player_url, video_id, fatal=fatal):
|
||||
_TESTDATA_PATH.mkdir(exist_ok=True, parents=True)
|
||||
cached_file.write_text(code)
|
||||
return code
|
||||
|
||||
return None
|
||||
|
||||
ie._load_player = _load_player
|
||||
return ie
|
||||
|
||||
|
||||
class MockLogger:
|
||||
def trace(self, message: str):
|
||||
print(f'trace: {message}')
|
||||
|
||||
def debug(self, message: str, *, once=False):
|
||||
print(f'debug: {message}')
|
||||
|
||||
def info(self, message: str):
|
||||
print(f'info: {message}')
|
||||
|
||||
def warning(self, message: str, *, once=False):
|
||||
print(f'warning: {message}')
|
||||
|
||||
def error(self, message: str):
|
||||
print(f'error: {message}')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def logger():
|
||||
return MockLogger()
|
||||
128
test/test_jsc/test_ejs_integration.py
Normal file
128
test/test_jsc/test_ejs_integration.py
Normal file
|
|
@ -0,0 +1,128 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
import enum
|
||||
import importlib.util
|
||||
import json
|
||||
|
||||
import pytest
|
||||
|
||||
from yt_dlp.extractor.youtube.jsc.provider import (
|
||||
JsChallengeRequest,
|
||||
JsChallengeType,
|
||||
JsChallengeProviderResponse,
|
||||
JsChallengeResponse,
|
||||
NChallengeInput,
|
||||
NChallengeOutput,
|
||||
SigChallengeInput,
|
||||
SigChallengeOutput,
|
||||
)
|
||||
from yt_dlp.extractor.youtube.jsc._builtin.bun import BunJCP
|
||||
from yt_dlp.extractor.youtube.jsc._builtin.deno import DenoJCP
|
||||
from yt_dlp.extractor.youtube.jsc._builtin.node import NodeJCP
|
||||
from yt_dlp.extractor.youtube.jsc._builtin.quickjs import QuickJSJCP
|
||||
|
||||
|
||||
_has_ejs = bool(importlib.util.find_spec('yt_dlp_ejs'))
|
||||
pytestmark = pytest.mark.skipif(not _has_ejs, reason='yt-dlp-ejs not available')
|
||||
|
||||
|
||||
class Variant(enum.Enum):
|
||||
main = 'player_ias.vflset/en_US/base.js'
|
||||
tcc = 'player_ias_tcc.vflset/en_US/base.js'
|
||||
tce = 'player_ias_tce.vflset/en_US/base.js'
|
||||
es5 = 'player_es5.vflset/en_US/base.js'
|
||||
es6 = 'player_es6.vflset/en_US/base.js'
|
||||
tv = 'tv-player-ias.vflset/tv-player-ias.js'
|
||||
tv_es6 = 'tv-player-es6.vflset/tv-player-es6.js'
|
||||
phone = 'player-plasma-ias-phone-en_US.vflset/base.js'
|
||||
tablet = 'player-plasma-ias-tablet-en_US.vflset/base.js'
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class Challenge:
|
||||
player: str
|
||||
variant: Variant
|
||||
type: JsChallengeType
|
||||
values: dict[str, str] = dataclasses.field(default_factory=dict)
|
||||
|
||||
def url(self, /):
|
||||
return f'https://www.youtube.com/s/player/{self.player}/{self.variant.value}'
|
||||
|
||||
|
||||
CHALLENGES: list[Challenge] = [
|
||||
Challenge('3d3ba064', Variant.tce, JsChallengeType.N, {
|
||||
'ZdZIqFPQK-Ty8wId': 'qmtUsIz04xxiNW',
|
||||
'4GMrWHyKI5cEvhDO': 'N9gmEX7YhKTSmw',
|
||||
}),
|
||||
Challenge('3d3ba064', Variant.tce, JsChallengeType.SIG, {
|
||||
'gN7a-hudCuAuPH6fByOk1_GNXN0yNMHShjZXS2VOgsEItAJz0tipeavEOmNdYN-wUtcEqD3bCXjc0iyKfAyZxCBGgIARwsSdQfJ2CJtt':
|
||||
'ttJC2JfQdSswRAIgGBCxZyAfKyi0cjXCb3gqEctUw-NYdNmOEvaepit0zJAtIEsgOV2SXZjhSHMNy0NXNG_1kNyBf6HPuAuCduh-a7O',
|
||||
}),
|
||||
Challenge('5ec65609', Variant.tce, JsChallengeType.N, {
|
||||
'0eRGgQWJGfT5rFHFj': '4SvMpDQH-vBJCw',
|
||||
}),
|
||||
Challenge('5ec65609', Variant.tce, JsChallengeType.SIG, {
|
||||
'AAJAJfQdSswRQIhAMG5SN7-cAFChdrE7tLA6grH0rTMICA1mmDc0HoXgW3CAiAQQ4=CspfaF_vt82XH5yewvqcuEkvzeTsbRuHssRMyJQ=I':
|
||||
'AJfQdSswRQIhAMG5SN7-cAFChdrE7tLA6grI0rTMICA1mmDc0HoXgW3CAiAQQ4HCspfaF_vt82XH5yewvqcuEkvzeTsbRuHssRMyJQ==',
|
||||
}),
|
||||
Challenge('6742b2b9', Variant.tce, JsChallengeType.N, {
|
||||
'_HPB-7GFg1VTkn9u': 'qUAsPryAO_ByYg',
|
||||
'K1t_fcB6phzuq2SF': 'Y7PcOt3VE62mog',
|
||||
}),
|
||||
Challenge('6742b2b9', Variant.tce, JsChallengeType.SIG, {
|
||||
'MMGZJMUucirzS_SnrSPYsc85CJNnTUi6GgR5NKn-znQEICACojE8MHS6S7uYq4TGjQX_D4aPk99hNU6wbTvorvVVMgIARwsSdQfJAA':
|
||||
'AJfQdSswRAIgMVVvrovTbw6UNh99kPa4D_XQjGT4qYu7S6SHM8EjoCACIEQnz-nKN5RgG6iUTnNJC58csYPSrnS_SzricuUMJZGM',
|
||||
}),
|
||||
Challenge('2b83d2e0', Variant.main, JsChallengeType.N, {
|
||||
'0eRGgQWJGfT5rFHFj': 'euHbygrCMLksxd',
|
||||
}),
|
||||
Challenge('2b83d2e0', Variant.main, JsChallengeType.SIG, {
|
||||
'MMGZJMUucirzS_SnrSPYsc85CJNnTUi6GgR5NKn-znQEICACojE8MHS6S7uYq4TGjQX_D4aPk99hNU6wbTvorvVVMgIARwsSdQfJA':
|
||||
'-MGZJMUucirzS_SnrSPYsc85CJNnTUi6GgR5NKnMznQEICACojE8MHS6S7uYq4TGjQX_D4aPk99hNU6wbTvorvVVMgIARwsSdQfJ',
|
||||
}),
|
||||
Challenge('638ec5c6', Variant.main, JsChallengeType.N, {
|
||||
'ZdZIqFPQK-Ty8wId': '1qov8-KM-yH',
|
||||
}),
|
||||
Challenge('638ec5c6', Variant.main, JsChallengeType.SIG, {
|
||||
'gN7a-hudCuAuPH6fByOk1_GNXN0yNMHShjZXS2VOgsEItAJz0tipeavEOmNdYN-wUtcEqD3bCXjc0iyKfAyZxCBGgIARwsSdQfJ2CJtt':
|
||||
'MhudCuAuP-6fByOk1_GNXN7gNHHShjyXS2VOgsEItAJz0tipeav0OmNdYN-wUtcEqD3bCXjc0iyKfAyZxCBGgIARwsSdQfJ2CJtt',
|
||||
}),
|
||||
]
|
||||
|
||||
requests: list[JsChallengeRequest] = []
|
||||
responses: list[JsChallengeProviderResponse] = []
|
||||
for test in CHALLENGES:
|
||||
input_type, output_type = {
|
||||
JsChallengeType.N: (NChallengeInput, NChallengeOutput),
|
||||
JsChallengeType.SIG: (SigChallengeInput, SigChallengeOutput),
|
||||
}[test.type]
|
||||
|
||||
request = JsChallengeRequest(test.type, input_type(test.url(), list(test.values.keys())), test.player)
|
||||
requests.append(request)
|
||||
responses.append(JsChallengeProviderResponse(request, JsChallengeResponse(test.type, output_type(test.values))))
|
||||
|
||||
|
||||
@pytest.fixture(params=[BunJCP, DenoJCP, NodeJCP, QuickJSJCP])
|
||||
def jcp(request, ie, logger):
|
||||
obj = request.param(ie, logger, None)
|
||||
if not obj.is_available():
|
||||
pytest.skip(f'{obj.PROVIDER_NAME} is not available')
|
||||
obj.is_dev = True
|
||||
return obj
|
||||
|
||||
|
||||
@pytest.mark.download
|
||||
def test_bulk_requests(jcp):
|
||||
assert list(jcp.bulk_solve(requests)) == responses
|
||||
|
||||
|
||||
@pytest.mark.download
|
||||
def test_using_cached_player(jcp):
|
||||
first_player_requests = requests[:3]
|
||||
player = jcp._get_player(first_player_requests[0].video_id, first_player_requests[0].input.player_url)
|
||||
initial = json.loads(jcp._run_js_runtime(jcp._construct_stdin(player, False, first_player_requests)))
|
||||
preprocessed = initial.pop('preprocessed_player')
|
||||
result = json.loads(jcp._run_js_runtime(jcp._construct_stdin(preprocessed, True, first_player_requests)))
|
||||
|
||||
assert initial == result
|
||||
194
test/test_jsc/test_provider.py
Normal file
194
test/test_jsc/test_provider.py
Normal file
|
|
@ -0,0 +1,194 @@
|
|||
|
||||
import pytest
|
||||
|
||||
from yt_dlp.extractor.youtube.jsc.provider import (
|
||||
JsChallengeProvider,
|
||||
JsChallengeRequest,
|
||||
JsChallengeProviderResponse,
|
||||
JsChallengeProviderRejectedRequest,
|
||||
JsChallengeType,
|
||||
JsChallengeResponse,
|
||||
NChallengeOutput,
|
||||
NChallengeInput,
|
||||
JsChallengeProviderError,
|
||||
register_provider,
|
||||
register_preference,
|
||||
)
|
||||
from yt_dlp.extractor.youtube.pot._provider import IEContentProvider
|
||||
from yt_dlp.utils import ExtractorError
|
||||
from yt_dlp.extractor.youtube.jsc._registry import _jsc_preferences, _jsc_providers
|
||||
|
||||
|
||||
class ExampleJCP(JsChallengeProvider):
|
||||
PROVIDER_NAME = 'example-provider'
|
||||
PROVIDER_VERSION = '0.0.1'
|
||||
BUG_REPORT_LOCATION = 'https://example.com/issues'
|
||||
|
||||
_SUPPORTED_TYPES = [JsChallengeType.N]
|
||||
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
def _real_bulk_solve(self, requests):
|
||||
for request in requests:
|
||||
results = dict.fromkeys(request.input.challenges, 'example-solution')
|
||||
response = JsChallengeResponse(
|
||||
type=request.type,
|
||||
output=NChallengeOutput(results=results))
|
||||
yield JsChallengeProviderResponse(request=request, response=response)
|
||||
|
||||
|
||||
PLAYER_URL = 'https://example.com/player.js'
|
||||
|
||||
|
||||
class TestJsChallengeProvider:
|
||||
# note: some test covered in TestPoTokenProvider which shares the same base class
|
||||
def test_base_type(self):
|
||||
assert issubclass(JsChallengeProvider, IEContentProvider)
|
||||
|
||||
def test_create_provider_missing_bulk_solve_method(self, ie, logger):
|
||||
class MissingMethodsJCP(JsChallengeProvider):
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
with pytest.raises(TypeError, match='bulk_solve'):
|
||||
MissingMethodsJCP(ie=ie, logger=logger, settings={})
|
||||
|
||||
def test_create_provider_missing_available_method(self, ie, logger):
|
||||
class MissingMethodsJCP(JsChallengeProvider):
|
||||
def _real_bulk_solve(self, requests):
|
||||
raise JsChallengeProviderRejectedRequest('Not implemented')
|
||||
|
||||
with pytest.raises(TypeError, match='is_available'):
|
||||
MissingMethodsJCP(ie=ie, logger=logger, settings={})
|
||||
|
||||
def test_barebones_provider(self, ie, logger):
|
||||
class BarebonesProviderJCP(JsChallengeProvider):
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
def _real_bulk_solve(self, requests):
|
||||
raise JsChallengeProviderRejectedRequest('Not implemented')
|
||||
|
||||
provider = BarebonesProviderJCP(ie=ie, logger=logger, settings={})
|
||||
assert provider.PROVIDER_NAME == 'BarebonesProvider'
|
||||
assert provider.PROVIDER_KEY == 'BarebonesProvider'
|
||||
assert provider.PROVIDER_VERSION == '0.0.0'
|
||||
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at (developer has not provided a bug report location) .'
|
||||
|
||||
def test_example_provider_success(self, ie, logger):
|
||||
provider = ExampleJCP(ie=ie, logger=logger, settings={})
|
||||
|
||||
request = JsChallengeRequest(
|
||||
type=JsChallengeType.N,
|
||||
input=NChallengeInput(player_url=PLAYER_URL, challenges=['example-challenge']))
|
||||
|
||||
request_two = JsChallengeRequest(
|
||||
type=JsChallengeType.N,
|
||||
input=NChallengeInput(player_url=PLAYER_URL, challenges=['example-challenge-2']))
|
||||
|
||||
responses = list(provider.bulk_solve([request, request_two]))
|
||||
assert len(responses) == 2
|
||||
assert all(isinstance(r, JsChallengeProviderResponse) for r in responses)
|
||||
assert responses == [
|
||||
JsChallengeProviderResponse(
|
||||
request=request,
|
||||
response=JsChallengeResponse(
|
||||
type=JsChallengeType.N,
|
||||
output=NChallengeOutput(results={'example-challenge': 'example-solution'}),
|
||||
),
|
||||
),
|
||||
JsChallengeProviderResponse(
|
||||
request=request_two,
|
||||
response=JsChallengeResponse(
|
||||
type=JsChallengeType.N,
|
||||
output=NChallengeOutput(results={'example-challenge-2': 'example-solution'}),
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
def test_provider_unsupported_challenge_type(self, ie, logger):
|
||||
provider = ExampleJCP(ie=ie, logger=logger, settings={})
|
||||
request_supported = JsChallengeRequest(
|
||||
type=JsChallengeType.N,
|
||||
input=NChallengeInput(player_url=PLAYER_URL, challenges=['example-challenge']))
|
||||
request_unsupported = JsChallengeRequest(
|
||||
type=JsChallengeType.SIG,
|
||||
input=NChallengeInput(player_url=PLAYER_URL, challenges=['example-challenge']))
|
||||
responses = list(provider.bulk_solve([request_supported, request_unsupported, request_supported]))
|
||||
assert len(responses) == 3
|
||||
# Requests are validated first before continuing to _real_bulk_solve
|
||||
assert isinstance(responses[0], JsChallengeProviderResponse)
|
||||
assert isinstance(responses[0].error, JsChallengeProviderRejectedRequest)
|
||||
assert responses[0].request is request_unsupported
|
||||
assert str(responses[0].error) == 'JS Challenge type "JsChallengeType.SIG" is not supported by example-provider'
|
||||
|
||||
assert responses[1:] == [
|
||||
JsChallengeProviderResponse(
|
||||
request=request_supported,
|
||||
response=JsChallengeResponse(
|
||||
type=JsChallengeType.N,
|
||||
output=NChallengeOutput(results={'example-challenge': 'example-solution'}),
|
||||
),
|
||||
),
|
||||
JsChallengeProviderResponse(
|
||||
request=request_supported,
|
||||
response=JsChallengeResponse(
|
||||
type=JsChallengeType.N,
|
||||
output=NChallengeOutput(results={'example-challenge': 'example-solution'}),
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
def test_provider_get_player(self, ie, logger):
|
||||
ie._load_player = lambda video_id, player_url, fatal: (video_id, player_url, fatal)
|
||||
provider = ExampleJCP(ie=ie, logger=logger, settings={})
|
||||
assert provider._get_player('video123', PLAYER_URL) == ('video123', PLAYER_URL, True)
|
||||
|
||||
def test_provider_get_player_error(self, ie, logger):
|
||||
def raise_error(video_id, player_url, fatal):
|
||||
raise ExtractorError('Failed to load player')
|
||||
|
||||
ie._load_player = raise_error
|
||||
provider = ExampleJCP(ie=ie, logger=logger, settings={})
|
||||
with pytest.raises(JsChallengeProviderError, match='Failed to load player for JS challenge'):
|
||||
provider._get_player('video123', PLAYER_URL)
|
||||
|
||||
def test_require_class_end_with_suffix(self, ie, logger):
|
||||
class InvalidSuffix(JsChallengeProvider):
|
||||
PROVIDER_NAME = 'invalid-suffix'
|
||||
|
||||
def _real_bulk_solve(self, requests):
|
||||
raise JsChallengeProviderRejectedRequest('Not implemented')
|
||||
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
provider = InvalidSuffix(ie=ie, logger=logger, settings={})
|
||||
|
||||
with pytest.raises(AssertionError):
|
||||
provider.PROVIDER_KEY # noqa: B018
|
||||
|
||||
|
||||
def test_register_provider(ie):
|
||||
|
||||
@register_provider
|
||||
class UnavailableProviderJCP(JsChallengeProvider):
|
||||
def is_available(self) -> bool:
|
||||
return False
|
||||
|
||||
def _real_bulk_solve(self, requests):
|
||||
raise JsChallengeProviderRejectedRequest('Not implemented')
|
||||
|
||||
assert _jsc_providers.value.get('UnavailableProvider') == UnavailableProviderJCP
|
||||
_jsc_providers.value.pop('UnavailableProvider')
|
||||
|
||||
|
||||
def test_register_preference(ie):
|
||||
before = len(_jsc_preferences.value)
|
||||
|
||||
@register_preference(ExampleJCP)
|
||||
def unavailable_preference(*args, **kwargs):
|
||||
return 1
|
||||
|
||||
assert len(_jsc_preferences.value) == before + 1
|
||||
|
|
@ -3,6 +3,7 @@
|
|||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
|
|
@ -311,6 +312,7 @@ class TestRequestHandlerBase:
|
|||
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
@pytest.mark.handler_flaky('CurlCFFI', os.name == 'nt', reason='segfaults')
|
||||
class TestHTTPRequestHandler(TestRequestHandlerBase):
|
||||
|
||||
def test_verify_cert(self, handler):
|
||||
|
|
@ -614,8 +616,11 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
|
|||
@pytest.mark.skip_handler('CurlCFFI', 'not supported by curl-cffi')
|
||||
def test_gzip_trailing_garbage(self, handler):
|
||||
with handler() as rh:
|
||||
data = validate_and_send(rh, Request(f'http://localhost:{self.http_port}/trailing_garbage')).read().decode()
|
||||
res = validate_and_send(rh, Request(f'http://localhost:{self.http_port}/trailing_garbage'))
|
||||
data = res.read().decode()
|
||||
assert data == '<html><video src="/vid.mp4" /></html>'
|
||||
# Should auto-close and mark the response adaptor as closed
|
||||
assert res.closed
|
||||
|
||||
@pytest.mark.skip_handler('CurlCFFI', 'not applicable to curl-cffi')
|
||||
@pytest.mark.skipif(not brotli, reason='brotli support is not installed')
|
||||
|
|
@ -627,6 +632,8 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
|
|||
headers={'ytdl-encoding': 'br'}))
|
||||
assert res.headers.get('Content-Encoding') == 'br'
|
||||
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
||||
# Should auto-close and mark the response adaptor as closed
|
||||
assert res.closed
|
||||
|
||||
def test_deflate(self, handler):
|
||||
with handler() as rh:
|
||||
|
|
@ -636,6 +643,8 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
|
|||
headers={'ytdl-encoding': 'deflate'}))
|
||||
assert res.headers.get('Content-Encoding') == 'deflate'
|
||||
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
||||
# Should auto-close and mark the response adaptor as closed
|
||||
assert res.closed
|
||||
|
||||
def test_gzip(self, handler):
|
||||
with handler() as rh:
|
||||
|
|
@ -645,6 +654,8 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
|
|||
headers={'ytdl-encoding': 'gzip'}))
|
||||
assert res.headers.get('Content-Encoding') == 'gzip'
|
||||
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
||||
# Should auto-close and mark the response adaptor as closed
|
||||
assert res.closed
|
||||
|
||||
def test_multiple_encodings(self, handler):
|
||||
with handler() as rh:
|
||||
|
|
@ -655,6 +666,8 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
|
|||
headers={'ytdl-encoding': pair}))
|
||||
assert res.headers.get('Content-Encoding') == pair
|
||||
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
||||
# Should auto-close and mark the response adaptor as closed
|
||||
assert res.closed
|
||||
|
||||
@pytest.mark.skip_handler('CurlCFFI', 'not supported by curl-cffi')
|
||||
def test_unsupported_encoding(self, handler):
|
||||
|
|
@ -665,6 +678,8 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
|
|||
headers={'ytdl-encoding': 'unsupported', 'Accept-Encoding': '*'}))
|
||||
assert res.headers.get('Content-Encoding') == 'unsupported'
|
||||
assert res.read() == b'raw'
|
||||
# Should auto-close and mark the response adaptor as closed
|
||||
assert res.closed
|
||||
|
||||
def test_read(self, handler):
|
||||
with handler() as rh:
|
||||
|
|
@ -672,9 +687,13 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
|
|||
rh, Request(f'http://127.0.0.1:{self.http_port}/headers'))
|
||||
assert res.readable()
|
||||
assert res.read(1) == b'H'
|
||||
# Ensure we don't close the adaptor yet
|
||||
assert not res.closed
|
||||
assert res.read(3) == b'ost'
|
||||
assert res.read().decode().endswith('\n\n')
|
||||
assert res.read() == b''
|
||||
# Should auto-close and mark the response adaptor as closed
|
||||
assert res.closed
|
||||
|
||||
def test_request_disable_proxy(self, handler):
|
||||
for proxy_proto in handler._SUPPORTED_PROXY_SCHEMES or ['http']:
|
||||
|
|
@ -736,8 +755,20 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
|
|||
assert res.read(0) == b''
|
||||
assert res.read() == b'<video src="/vid.mp4" /></html>'
|
||||
|
||||
def test_partial_read_greater_than_response_then_full_read(self, handler):
|
||||
with handler() as rh:
|
||||
for encoding in ('', 'gzip', 'deflate'):
|
||||
res = validate_and_send(rh, Request(
|
||||
f'http://127.0.0.1:{self.http_port}/content-encoding',
|
||||
headers={'ytdl-encoding': encoding}))
|
||||
assert res.headers.get('Content-Encoding') == encoding
|
||||
assert res.read(512) == b'<html><video src="/vid.mp4" /></html>'
|
||||
assert res.read(0) == b''
|
||||
assert res.read() == b''
|
||||
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
@pytest.mark.handler_flaky('CurlCFFI', reason='segfaults')
|
||||
class TestClientCertificate:
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
|
|
@ -875,11 +906,53 @@ class TestUrllibRequestHandler(TestRequestHandlerBase):
|
|||
|
||||
with handler(enable_file_urls=True) as rh:
|
||||
res = validate_and_send(rh, req)
|
||||
assert res.read() == b'foobar'
|
||||
res.close()
|
||||
assert res.read(1) == b'f'
|
||||
assert not res.fp.closed
|
||||
assert res.read() == b'oobar'
|
||||
# Should automatically close the underlying file object
|
||||
assert res.fp.closed
|
||||
|
||||
os.unlink(tf.name)
|
||||
|
||||
def test_data_uri_auto_close(self, handler):
|
||||
with handler() as rh:
|
||||
res = validate_and_send(rh, Request('data:text/plain,hello%20world'))
|
||||
assert res.read() == b'hello world'
|
||||
# Should automatically close the underlying file object
|
||||
assert res.fp.closed
|
||||
assert res.closed
|
||||
|
||||
def test_http_response_auto_close(self, handler):
|
||||
with handler() as rh:
|
||||
res = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/gen_200'))
|
||||
assert res.read() == b'<html></html>'
|
||||
# Should automatically close the underlying file object in the HTTP Response
|
||||
assert isinstance(res.fp, http.client.HTTPResponse)
|
||||
assert res.fp.fp is None
|
||||
assert res.closed
|
||||
|
||||
def test_data_uri_partial_read_then_full_read(self, handler):
|
||||
with handler() as rh:
|
||||
res = validate_and_send(rh, Request('data:text/plain,hello%20world'))
|
||||
assert res.read(6) == b'hello '
|
||||
assert res.read(0) == b''
|
||||
assert res.read() == b'world'
|
||||
# Should automatically close the underlying file object
|
||||
assert res.fp.closed
|
||||
assert res.closed
|
||||
|
||||
def test_data_uri_partial_read_greater_than_response_then_full_read(self, handler):
|
||||
with handler() as rh:
|
||||
res = validate_and_send(rh, Request('data:text/plain,hello%20world'))
|
||||
assert res.read(512) == b'hello world'
|
||||
# Response and its underlying file object should already be closed now
|
||||
assert res.fp.closed
|
||||
assert res.closed
|
||||
assert res.read(0) == b''
|
||||
assert res.read() == b''
|
||||
assert res.fp.closed
|
||||
assert res.closed
|
||||
|
||||
def test_http_error_returns_content(self, handler):
|
||||
# urllib HTTPError will try close the underlying response if reference to the HTTPError object is lost
|
||||
def get_response():
|
||||
|
|
@ -1012,8 +1085,17 @@ class TestRequestsRequestHandler(TestRequestHandlerBase):
|
|||
rh.close()
|
||||
assert called
|
||||
|
||||
def test_http_response_auto_close(self, handler):
|
||||
with handler() as rh:
|
||||
res = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/gen_200'))
|
||||
assert res.read() == b'<html></html>'
|
||||
# Should automatically close the underlying file object in the HTTP Response
|
||||
assert res.fp.closed
|
||||
assert res.closed
|
||||
|
||||
|
||||
@pytest.mark.parametrize('handler', ['CurlCFFI'], indirect=True)
|
||||
@pytest.mark.handler_flaky('CurlCFFI', os.name == 'nt', reason='segfaults')
|
||||
class TestCurlCFFIRequestHandler(TestRequestHandlerBase):
|
||||
|
||||
@pytest.mark.parametrize('params,extensions', [
|
||||
|
|
@ -1177,6 +1259,14 @@ class TestCurlCFFIRequestHandler(TestRequestHandlerBase):
|
|||
assert res4.closed
|
||||
assert res4._buffer == b''
|
||||
|
||||
def test_http_response_auto_close(self, handler):
|
||||
with handler() as rh:
|
||||
res = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/gen_200'))
|
||||
assert res.read() == b'<html></html>'
|
||||
# Should automatically close the underlying file object in the HTTP Response
|
||||
assert res.fp.closed
|
||||
assert res.closed
|
||||
|
||||
|
||||
def run_validation(handler, error, req, **handler_kwargs):
|
||||
with handler(**handler_kwargs) as rh:
|
||||
|
|
@ -2032,6 +2122,30 @@ class TestResponse:
|
|||
assert res.info() is res.headers
|
||||
assert res.getheader('test') == res.get_header('test')
|
||||
|
||||
def test_auto_close(self):
|
||||
# Should mark the response as closed if the underlying file is closed
|
||||
class AutoCloseBytesIO(io.BytesIO):
|
||||
def read(self, size=-1, /):
|
||||
data = super().read(size)
|
||||
self.close()
|
||||
return data
|
||||
|
||||
fp = AutoCloseBytesIO(b'test')
|
||||
res = Response(fp, url='test://', headers={}, status=200)
|
||||
assert not res.closed
|
||||
res.read()
|
||||
assert res.closed
|
||||
|
||||
def test_close(self):
|
||||
# Should not call close() on the underlying file when already closed
|
||||
fp = MagicMock()
|
||||
fp.closed = False
|
||||
res = Response(fp, url='test://', headers={}, status=200)
|
||||
res.close()
|
||||
fp.closed = True
|
||||
res.close()
|
||||
assert fp.close.call_count == 1
|
||||
|
||||
|
||||
class TestImpersonateTarget:
|
||||
@pytest.mark.parametrize('target_str,expected', [
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ class TestOverwrites(unittest.TestCase):
|
|||
'-o', 'test.webm',
|
||||
'https://www.youtube.com/watch?v=jNQXAC9IVRw',
|
||||
], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
sout, serr = outp.communicate()
|
||||
sout, _ = outp.communicate()
|
||||
self.assertTrue(b'has already been downloaded' in sout)
|
||||
# if the file has no content, it has not been redownloaded
|
||||
self.assertTrue(os.path.getsize(download_file) < 1)
|
||||
|
|
@ -41,7 +41,7 @@ class TestOverwrites(unittest.TestCase):
|
|||
'-o', 'test.webm',
|
||||
'https://www.youtube.com/watch?v=jNQXAC9IVRw',
|
||||
], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
sout, serr = outp.communicate()
|
||||
sout, _ = outp.communicate()
|
||||
self.assertTrue(b'has already been downloaded' not in sout)
|
||||
# if the file has no content, it has not been redownloaded
|
||||
self.assertTrue(os.path.getsize(download_file) > 1)
|
||||
|
|
|
|||
|
|
@ -115,7 +115,7 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||
self.assertEqual(len(ends), len(titles))
|
||||
start = 0
|
||||
chapters = []
|
||||
for e, t in zip(ends, titles):
|
||||
for e, t in zip(ends, titles, strict=True):
|
||||
chapters.append(self._chapter(start, e, t))
|
||||
start = e
|
||||
return chapters
|
||||
|
|
|
|||
|
|
@ -45,3 +45,8 @@ class TestGetWebPoContentBinding:
|
|||
def test_invalid_base64(self, pot_request):
|
||||
pot_request.visitor_data = 'invalid-base64'
|
||||
assert get_webpo_content_binding(pot_request, bind_to_visitor_id=True) == (pot_request.visitor_data, ContentBindingType.VISITOR_DATA)
|
||||
|
||||
def test_gvs_video_id_binding_experiment(self, pot_request):
|
||||
pot_request.context = PoTokenContext.GVS
|
||||
pot_request._gvs_bind_to_video_id = True
|
||||
assert get_webpo_content_binding(pot_request) == ('example-video-id', ContentBindingType.VIDEO_ID)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import pytest
|
||||
|
||||
from yt_dlp.extractor.youtube.pot._provider import IEContentProvider
|
||||
from yt_dlp.extractor.youtube.pot._provider import IEContentProvider, configuration_arg
|
||||
from yt_dlp.cookies import YoutubeDLCookieJar
|
||||
from yt_dlp.utils.networking import HTTPHeaderDict
|
||||
from yt_dlp.extractor.youtube.pot.provider import (
|
||||
|
|
@ -153,7 +153,7 @@ class TestPoTokenProvider:
|
|||
|
||||
with pytest.raises(
|
||||
PoTokenProviderRejectedRequest,
|
||||
match='External requests by "example" provider do not support proxy scheme "socks4". Supported proxy '
|
||||
match=r'External requests by "example" provider do not support proxy scheme "socks4"\. Supported proxy '
|
||||
'schemes: http, socks5h',
|
||||
):
|
||||
provider.request_pot(pot_request)
|
||||
|
|
@ -627,3 +627,13 @@ def test_logger_log_level(logger):
|
|||
assert logger.LogLevel('debuG') == logger.LogLevel.DEBUG
|
||||
assert logger.LogLevel(10) == logger.LogLevel.DEBUG
|
||||
assert logger.LogLevel('UNKNOWN') == logger.LogLevel.INFO
|
||||
|
||||
|
||||
def test_configuration_arg():
|
||||
config = {'abc': ['123D'], 'xyz': ['456a', '789B']}
|
||||
|
||||
assert configuration_arg(config, 'abc') == ['123d']
|
||||
assert configuration_arg(config, 'abc', default=['default']) == ['123d']
|
||||
assert configuration_arg(config, 'ABC', default=['default']) == ['default']
|
||||
assert configuration_arg(config, 'abc', casesense=True) == ['123D']
|
||||
assert configuration_arg(config, 'xyz', casesense=False) == ['456a', '789b']
|
||||
|
|
|
|||
|
|
@ -295,6 +295,7 @@ def ctx(request):
|
|||
('Websockets', 'ws'),
|
||||
('CurlCFFI', 'http'),
|
||||
], indirect=True)
|
||||
@pytest.mark.handler_flaky('CurlCFFI', reason='segfaults')
|
||||
class TestSocks4Proxy:
|
||||
def test_socks4_no_auth(self, handler, ctx):
|
||||
with handler() as rh:
|
||||
|
|
@ -370,6 +371,7 @@ class TestSocks4Proxy:
|
|||
('Websockets', 'ws'),
|
||||
('CurlCFFI', 'http'),
|
||||
], indirect=True)
|
||||
@pytest.mark.handler_flaky('CurlCFFI', reason='segfaults')
|
||||
class TestSocks5Proxy:
|
||||
|
||||
def test_socks5_no_auth(self, handler, ctx):
|
||||
|
|
|
|||
|
|
@ -417,7 +417,7 @@ class TestTraversal:
|
|||
|
||||
def test_traversal_morsel(self):
|
||||
morsel = http.cookies.Morsel()
|
||||
values = dict(zip(morsel, 'abcdefghijklmnop'))
|
||||
values = dict(zip(morsel, 'abcdefghijklmnop', strict=False))
|
||||
morsel.set('item_key', 'item_value', 'coded_value')
|
||||
morsel.update(values)
|
||||
values['key'] = 'item_key'
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|||
|
||||
|
||||
from test.helper import FakeYDL, report_warning
|
||||
from yt_dlp.update import UpdateInfo, Updater
|
||||
from yt_dlp.update import UpdateInfo, Updater, UPDATE_SOURCES, _make_label
|
||||
|
||||
|
||||
# XXX: Keep in sync with yt_dlp.update.UPDATE_SOURCES
|
||||
|
|
@ -280,6 +280,26 @@ class TestUpdate(unittest.TestCase):
|
|||
test('testing', None, current_commit='9' * 40)
|
||||
test('testing', UpdateInfo('testing', commit='9' * 40))
|
||||
|
||||
def test_make_label(self):
|
||||
STABLE_REPO = UPDATE_SOURCES['stable']
|
||||
NIGHTLY_REPO = UPDATE_SOURCES['nightly']
|
||||
MASTER_REPO = UPDATE_SOURCES['master']
|
||||
|
||||
for inputs, expected in [
|
||||
([STABLE_REPO, '2025.09.02', '2025.09.02'], f'stable@2025.09.02 from {STABLE_REPO}'),
|
||||
([NIGHTLY_REPO, '2025.09.02.123456', '2025.09.02.123456'], f'nightly@2025.09.02.123456 from {NIGHTLY_REPO}'),
|
||||
([MASTER_REPO, '2025.09.02.987654', '2025.09.02.987654'], f'master@2025.09.02.987654 from {MASTER_REPO}'),
|
||||
(['fork/yt-dlp', 'experimental', '2025.12.31.000000'], 'fork/yt-dlp@experimental build 2025.12.31.000000'),
|
||||
(['fork/yt-dlp', '2025.09.02', '2025.09.02'], 'fork/yt-dlp@2025.09.02'),
|
||||
([STABLE_REPO, 'experimental', '2025.12.31.000000'], f'{STABLE_REPO}@experimental build 2025.12.31.000000'),
|
||||
([STABLE_REPO, 'experimental'], f'{STABLE_REPO}@experimental'),
|
||||
(['fork/yt-dlp', 'experimental'], 'fork/yt-dlp@experimental'),
|
||||
]:
|
||||
result = _make_label(*inputs)
|
||||
self.assertEqual(
|
||||
result, expected,
|
||||
f'{inputs!r} returned {result!r} instead of {expected!r}')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ import datetime as dt
|
|||
import io
|
||||
import itertools
|
||||
import json
|
||||
import ntpath
|
||||
import pickle
|
||||
import subprocess
|
||||
import unittest
|
||||
|
|
@ -101,11 +102,13 @@ from yt_dlp.utils import (
|
|||
remove_start,
|
||||
render_table,
|
||||
replace_extension,
|
||||
datetime_round,
|
||||
rot47,
|
||||
sanitize_filename,
|
||||
sanitize_path,
|
||||
sanitize_url,
|
||||
shell_quote,
|
||||
strftime_or_none,
|
||||
smuggle_url,
|
||||
str_to_int,
|
||||
strip_jsonp,
|
||||
|
|
@ -251,12 +254,6 @@ class TestUtil(unittest.TestCase):
|
|||
self.assertEqual(sanitize_path('abc.../def...'), 'abc..#\\def..#')
|
||||
self.assertEqual(sanitize_path('C:\\abc:%(title)s.%(ext)s'), 'C:\\abc#%(title)s.%(ext)s')
|
||||
|
||||
# Check with nt._path_normpath if available
|
||||
try:
|
||||
from nt import _path_normpath as nt_path_normpath
|
||||
except ImportError:
|
||||
nt_path_normpath = None
|
||||
|
||||
for test, expected in [
|
||||
('C:\\', 'C:\\'),
|
||||
('../abc', '..\\abc'),
|
||||
|
|
@ -274,8 +271,7 @@ class TestUtil(unittest.TestCase):
|
|||
result = sanitize_path(test)
|
||||
assert result == expected, f'{test} was incorrectly resolved'
|
||||
assert result == sanitize_path(result), f'{test} changed after sanitizing again'
|
||||
if nt_path_normpath:
|
||||
assert result == nt_path_normpath(test), f'{test} does not match nt._path_normpath'
|
||||
assert result == ntpath.normpath(test), f'{test} does not match ntpath.normpath'
|
||||
|
||||
def test_sanitize_url(self):
|
||||
self.assertEqual(sanitize_url('//foo.bar'), 'http://foo.bar')
|
||||
|
|
@ -409,6 +405,25 @@ class TestUtil(unittest.TestCase):
|
|||
self.assertEqual(datetime_from_str('now+1day', precision='hour'), datetime_from_str('now+24hours', precision='auto'))
|
||||
self.assertEqual(datetime_from_str('now+23hours', precision='hour'), datetime_from_str('now+23hours', precision='auto'))
|
||||
|
||||
def test_datetime_round(self):
|
||||
self.assertEqual(datetime_round(dt.datetime.strptime('1820-05-12T01:23:45Z', '%Y-%m-%dT%H:%M:%SZ')),
|
||||
dt.datetime(1820, 5, 12, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(datetime_round(dt.datetime.strptime('1969-12-31T23:34:45Z', '%Y-%m-%dT%H:%M:%SZ'), 'hour'),
|
||||
dt.datetime(1970, 1, 1, 0, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(datetime_round(dt.datetime.strptime('2024-12-25T01:23:45Z', '%Y-%m-%dT%H:%M:%SZ'), 'minute'),
|
||||
dt.datetime(2024, 12, 25, 1, 24, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(datetime_round(dt.datetime.strptime('2024-12-25T01:23:45.123Z', '%Y-%m-%dT%H:%M:%S.%fZ'), 'second'),
|
||||
dt.datetime(2024, 12, 25, 1, 23, 45, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(datetime_round(dt.datetime.strptime('2024-12-25T01:23:45.678Z', '%Y-%m-%dT%H:%M:%S.%fZ'), 'second'),
|
||||
dt.datetime(2024, 12, 25, 1, 23, 46, tzinfo=dt.timezone.utc))
|
||||
|
||||
def test_strftime_or_none(self):
|
||||
self.assertEqual(strftime_or_none(-4722192000), '18200512')
|
||||
self.assertEqual(strftime_or_none(0), '19700101')
|
||||
self.assertEqual(strftime_or_none(1735084800), '20241225')
|
||||
# Throws OverflowError
|
||||
self.assertEqual(strftime_or_none(1735084800000), None)
|
||||
|
||||
def test_daterange(self):
|
||||
_20century = DateRange('19000101', '20000101')
|
||||
self.assertFalse('17890714' in _20century)
|
||||
|
|
@ -1848,7 +1863,7 @@ Line 1
|
|||
|
||||
self.assertEqual(
|
||||
list(get_elements_text_and_html_by_attribute('class', 'foo bar', html)),
|
||||
list(zip(['nice', 'also nice'], self.GET_ELEMENTS_BY_CLASS_RES)))
|
||||
list(zip(['nice', 'also nice'], self.GET_ELEMENTS_BY_CLASS_RES, strict=True)))
|
||||
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'foo', html)), [])
|
||||
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'no-such-foo', html)), [])
|
||||
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ class TestVerboseOutput(unittest.TestCase):
|
|||
'--username', 'johnsmith@gmail.com',
|
||||
'--password', 'my_secret_password',
|
||||
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
sout, serr = outp.communicate()
|
||||
_, serr = outp.communicate()
|
||||
self.assertTrue(b'--username' in serr)
|
||||
self.assertTrue(b'johnsmith' not in serr)
|
||||
self.assertTrue(b'--password' in serr)
|
||||
|
|
@ -36,7 +36,7 @@ class TestVerboseOutput(unittest.TestCase):
|
|||
'-u', 'johnsmith@gmail.com',
|
||||
'-p', 'my_secret_password',
|
||||
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
sout, serr = outp.communicate()
|
||||
_, serr = outp.communicate()
|
||||
self.assertTrue(b'-u' in serr)
|
||||
self.assertTrue(b'johnsmith' not in serr)
|
||||
self.assertTrue(b'-p' in serr)
|
||||
|
|
@ -50,7 +50,7 @@ class TestVerboseOutput(unittest.TestCase):
|
|||
'--username=johnsmith@gmail.com',
|
||||
'--password=my_secret_password',
|
||||
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
sout, serr = outp.communicate()
|
||||
_, serr = outp.communicate()
|
||||
self.assertTrue(b'--username' in serr)
|
||||
self.assertTrue(b'johnsmith' not in serr)
|
||||
self.assertTrue(b'--password' in serr)
|
||||
|
|
@ -64,7 +64,7 @@ class TestVerboseOutput(unittest.TestCase):
|
|||
'-u=johnsmith@gmail.com',
|
||||
'-p=my_secret_password',
|
||||
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
sout, serr = outp.communicate()
|
||||
_, serr = outp.communicate()
|
||||
self.assertTrue(b'-u' in serr)
|
||||
self.assertTrue(b'johnsmith' not in serr)
|
||||
self.assertTrue(b'-p' in serr)
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ import random
|
|||
import ssl
|
||||
import threading
|
||||
|
||||
from yt_dlp import socks, traverse_obj
|
||||
from yt_dlp import socks
|
||||
from yt_dlp.cookies import YoutubeDLCookieJar
|
||||
from yt_dlp.dependencies import websockets
|
||||
from yt_dlp.networking import Request
|
||||
|
|
@ -32,11 +32,19 @@ from yt_dlp.networking.exceptions import (
|
|||
SSLError,
|
||||
TransportError,
|
||||
)
|
||||
from yt_dlp.utils.traversal import traverse_obj
|
||||
from yt_dlp.utils.networking import HTTPHeaderDict
|
||||
|
||||
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
pytestmark = pytest.mark.handler_flaky(
|
||||
'Websockets',
|
||||
os.name == 'nt' or sys.implementation.name == 'pypy',
|
||||
reason='segfaults',
|
||||
)
|
||||
|
||||
|
||||
def websocket_handler(websocket):
|
||||
for message in websocket:
|
||||
if isinstance(message, bytes):
|
||||
|
|
|
|||
|
|
@ -1,77 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import xml.etree.ElementTree
|
||||
|
||||
import yt_dlp.extractor
|
||||
import yt_dlp.YoutubeDL
|
||||
from test.helper import get_params, is_download_test, try_rm
|
||||
|
||||
|
||||
class YoutubeDL(yt_dlp.YoutubeDL):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.to_stderr = self.to_screen
|
||||
|
||||
|
||||
params = get_params({
|
||||
'writeannotations': True,
|
||||
'skip_download': True,
|
||||
'writeinfojson': False,
|
||||
'format': 'flv',
|
||||
})
|
||||
|
||||
|
||||
TEST_ID = 'gr51aVj-mLg'
|
||||
ANNOTATIONS_FILE = TEST_ID + '.annotations.xml'
|
||||
EXPECTED_ANNOTATIONS = ['Speech bubble', 'Note', 'Title', 'Spotlight', 'Label']
|
||||
|
||||
|
||||
@is_download_test
|
||||
class TestAnnotations(unittest.TestCase):
|
||||
def setUp(self):
|
||||
# Clear old files
|
||||
self.tearDown()
|
||||
|
||||
def test_info_json(self):
|
||||
expected = list(EXPECTED_ANNOTATIONS) # Two annotations could have the same text.
|
||||
ie = yt_dlp.extractor.YoutubeIE()
|
||||
ydl = YoutubeDL(params)
|
||||
ydl.add_info_extractor(ie)
|
||||
ydl.download([TEST_ID])
|
||||
self.assertTrue(os.path.exists(ANNOTATIONS_FILE))
|
||||
annoxml = None
|
||||
with open(ANNOTATIONS_FILE, encoding='utf-8') as annof:
|
||||
annoxml = xml.etree.ElementTree.parse(annof)
|
||||
self.assertTrue(annoxml is not None, 'Failed to parse annotations XML')
|
||||
root = annoxml.getroot()
|
||||
self.assertEqual(root.tag, 'document')
|
||||
annotationsTag = root.find('annotations')
|
||||
self.assertEqual(annotationsTag.tag, 'annotations')
|
||||
annotations = annotationsTag.findall('annotation')
|
||||
|
||||
# Not all the annotations have TEXT children and the annotations are returned unsorted.
|
||||
for a in annotations:
|
||||
self.assertEqual(a.tag, 'annotation')
|
||||
if a.get('type') == 'text':
|
||||
textTag = a.find('TEXT')
|
||||
text = textTag.text
|
||||
self.assertTrue(text in expected) # assertIn only added in python 2.7
|
||||
# remove the first occurrence, there could be more than one annotation with the same text
|
||||
expected.remove(text)
|
||||
# We should have seen (and removed) all the expected annotation texts.
|
||||
self.assertEqual(len(expected), 0, 'Not all expected annotations were found.')
|
||||
|
||||
def tearDown(self):
|
||||
try_rm(ANNOTATIONS_FILE)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
@ -1,504 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import contextlib
|
||||
import re
|
||||
import string
|
||||
import urllib.request
|
||||
|
||||
from test.helper import FakeYDL, is_download_test
|
||||
from yt_dlp.extractor import YoutubeIE
|
||||
from yt_dlp.jsinterp import JSInterpreter
|
||||
|
||||
_SIG_TESTS = [
|
||||
(
|
||||
'https://s.ytimg.com/yts/jsbin/html5player-vflHOr_nV.js',
|
||||
86,
|
||||
'>=<;:/.-[+*)(\'&%$#"!ZYX0VUTSRQPONMLKJIHGFEDCBA\\yxwvutsrqponmlkjihgfedcba987654321',
|
||||
),
|
||||
(
|
||||
'https://s.ytimg.com/yts/jsbin/html5player-vfldJ8xgI.js',
|
||||
85,
|
||||
'3456789a0cdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRS[UVWXYZ!"#$%&\'()*+,-./:;<=>?@',
|
||||
),
|
||||
(
|
||||
'https://s.ytimg.com/yts/jsbin/html5player-vfle-mVwz.js',
|
||||
90,
|
||||
']\\[@?>=<;:/.-,+*)(\'&%$#"hZYXWVUTSRQPONMLKJIHGFEDCBAzyxwvutsrqponmlkjiagfedcb39876',
|
||||
),
|
||||
(
|
||||
'https://s.ytimg.com/yts/jsbin/html5player-en_US-vfl0Cbn9e.js',
|
||||
84,
|
||||
'O1I3456789abcde0ghijklmnopqrstuvwxyzABCDEFGHfJKLMN2PQRSTUVW@YZ!"#$%&\'()*+,-./:;<=',
|
||||
),
|
||||
(
|
||||
'https://s.ytimg.com/yts/jsbin/html5player-en_US-vflXGBaUN.js',
|
||||
'2ACFC7A61CA478CD21425E5A57EBD73DDC78E22A.2094302436B2D377D14A3BBA23022D023B8BC25AA',
|
||||
'A52CB8B320D22032ABB3A41D773D2B6342034902.A22E87CDD37DBE75A5E52412DC874AC16A7CFCA2',
|
||||
),
|
||||
(
|
||||
'https://s.ytimg.com/yts/jsbin/html5player-en_US-vflBb0OQx.js',
|
||||
84,
|
||||
'123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQ0STUVWXYZ!"#$%&\'()*+,@./:;<=>',
|
||||
),
|
||||
(
|
||||
'https://s.ytimg.com/yts/jsbin/html5player-en_US-vfl9FYC6l.js',
|
||||
83,
|
||||
'123456789abcdefghijklmnopqr0tuvwxyzABCDETGHIJKLMNOPQRS>UVWXYZ!"#$%&\'()*+,-./:;<=F',
|
||||
),
|
||||
(
|
||||
'https://s.ytimg.com/yts/jsbin/html5player-en_US-vflCGk6yw/html5player.js',
|
||||
'4646B5181C6C3020DF1D9C7FCFEA.AD80ABF70C39BD369CCCAE780AFBB98FA6B6CB42766249D9488C288',
|
||||
'82C8849D94266724DC6B6AF89BBFA087EACCD963.B93C07FBA084ACAEFCF7C9D1FD0203C6C1815B6B',
|
||||
),
|
||||
(
|
||||
'https://s.ytimg.com/yts/jsbin/html5player-en_US-vflKjOTVq/html5player.js',
|
||||
'312AA52209E3623129A412D56A40F11CB0AF14AE.3EE09501CB14E3BCDC3B2AE808BF3F1D14E7FBF12',
|
||||
'112AA5220913623229A412D56A40F11CB0AF14AE.3EE0950FCB14EEBCDC3B2AE808BF331D14E7FBF3',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/6ed0d907/player_ias.vflset/en_US/base.js',
|
||||
'2aq0aqSyOoJXtK73m-uME_jv7-pT15gOFC02RFkGMqWpzEICs69VdbwQ0LDp1v7j8xx92efCJlYFYb1sUkkBSPOlPmXgIARw8JQ0qOAOAA',
|
||||
'AOq0QJ8wRAIgXmPlOPSBkkUs1bYFYlJCfe29xx8j7v1pDL2QwbdV96sCIEzpWqMGkFR20CFOg51Tp-7vj_EMu-m37KtXJoOySqa0',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/3bb1f723/player_ias.vflset/en_US/base.js',
|
||||
'2aq0aqSyOoJXtK73m-uME_jv7-pT15gOFC02RFkGMqWpzEICs69VdbwQ0LDp1v7j8xx92efCJlYFYb1sUkkBSPOlPmXgIARw8JQ0qOAOAA',
|
||||
'MyOSJXtKI3m-uME_jv7-pT12gOFC02RFkGoqWpzE0Cs69VdbwQ0LDp1v7j8xx92efCJlYFYb1sUkkBSPOlPmXgIARw8JQ0qOAOAA',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/2f1832d2/player_ias.vflset/en_US/base.js',
|
||||
'2aq0aqSyOoJXtK73m-uME_jv7-pT15gOFC02RFkGMqWpzEICs69VdbwQ0LDp1v7j8xx92efCJlYFYb1sUkkBSPOlPmXgIARw8JQ0qOAOAA',
|
||||
'0QJ8wRAIgXmPlOPSBkkUs1bYFYlJCfe29xxAj7v1pDL0QwbdV96sCIEzpWqMGkFR20CFOg51Tp-7vj_EMu-m37KtXJ2OySqa0q',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/643afba4/tv-player-ias.vflset/tv-player-ias.js',
|
||||
'2aq0aqSyOoJXtK73m-uME_jv7-pT15gOFC02RFkGMqWpzEICs69VdbwQ0LDp1v7j8xx92efCJlYFYb1sUkkBSPOlPmXgIARw8JQ0qOAOAA',
|
||||
'AAOAOq0QJ8wRAIgXmPlOPSBkkUs1bYFYlJCfe29xx8j7vgpDL0QwbdV06sCIEzpWqMGkFR20CFOS21Tp-7vj_EMu-m37KtXJoOy1',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/363db69b/player_ias.vflset/en_US/base.js',
|
||||
'2aq0aqSyOoJXtK73m-uME_jv7-pT15gOFC02RFkGMqWpzEICs69VdbwQ0LDp1v7j8xx92efCJlYFYb1sUkkBSPOlPmXgIARw8JQ0qOAOAA',
|
||||
'0aqSyOoJXtK73m-uME_jv7-pT15gOFC02RFkGMqWpz2ICs6EVdbwQ0LDp1v7j8xx92efCJlYFYb1sUkkBSPOlPmXgIARw8JQ0qOAOAA',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/363db69b/player_ias_tce.vflset/en_US/base.js',
|
||||
'2aq0aqSyOoJXtK73m-uME_jv7-pT15gOFC02RFkGMqWpzEICs69VdbwQ0LDp1v7j8xx92efCJlYFYb1sUkkBSPOlPmXgIARw8JQ0qOAOAA',
|
||||
'0aqSyOoJXtK73m-uME_jv7-pT15gOFC02RFkGMqWpz2ICs6EVdbwQ0LDp1v7j8xx92efCJlYFYb1sUkkBSPOlPmXgIARw8JQ0qOAOAA',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/4fcd6e4a/player_ias.vflset/en_US/base.js',
|
||||
'2aq0aqSyOoJXtK73m-uME_jv7-pT15gOFC02RFkGMqWpzEICs69VdbwQ0LDp1v7j8xx92efCJlYFYb1sUkkBSPOlPmXgIARw8JQ0qOAOAA',
|
||||
'wAOAOq0QJ8ARAIgXmPlOPSBkkUs1bYFYlJCfe29xx8q7v1pDL0QwbdV96sCIEzpWqMGkFR20CFOg51Tp-7vj_EMu-m37KtXJoOySqa0',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/4fcd6e4a/player_ias_tce.vflset/en_US/base.js',
|
||||
'2aq0aqSyOoJXtK73m-uME_jv7-pT15gOFC02RFkGMqWpzEICs69VdbwQ0LDp1v7j8xx92efCJlYFYb1sUkkBSPOlPmXgIARw8JQ0qOAOAA',
|
||||
'wAOAOq0QJ8ARAIgXmPlOPSBkkUs1bYFYlJCfe29xx8q7v1pDL0QwbdV96sCIEzpWqMGkFR20CFOg51Tp-7vj_EMu-m37KtXJoOySqa0',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/20830619/player_ias.vflset/en_US/base.js',
|
||||
'2aq0aqSyOoJXtK73m-uME_jv7-pT15gOFC02RFkGMqWpzEICs69VdbwQ0LDp1v7j8xx92efCJlYFYb1sUkkBSPOlPmXgIARw8JQ0qOAOAA',
|
||||
'7AOq0QJ8wRAIgXmPlOPSBkkAs1bYFYlJCfe29xx8jOv1pDL0Q2bdV96sCIEzpWqMGkFR20CFOg51Tp-7vj_EMu-m37KtXJoOySqa0qaw',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/20830619/player_ias_tce.vflset/en_US/base.js',
|
||||
'2aq0aqSyOoJXtK73m-uME_jv7-pT15gOFC02RFkGMqWpzEICs69VdbwQ0LDp1v7j8xx92efCJlYFYb1sUkkBSPOlPmXgIARw8JQ0qOAOAA',
|
||||
'7AOq0QJ8wRAIgXmPlOPSBkkAs1bYFYlJCfe29xx8jOv1pDL0Q2bdV96sCIEzpWqMGkFR20CFOg51Tp-7vj_EMu-m37KtXJoOySqa0qaw',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/20830619/player-plasma-ias-phone-en_US.vflset/base.js',
|
||||
'2aq0aqSyOoJXtK73m-uME_jv7-pT15gOFC02RFkGMqWpzEICs69VdbwQ0LDp1v7j8xx92efCJlYFYb1sUkkBSPOlPmXgIARw8JQ0qOAOAA',
|
||||
'7AOq0QJ8wRAIgXmPlOPSBkkAs1bYFYlJCfe29xx8jOv1pDL0Q2bdV96sCIEzpWqMGkFR20CFOg51Tp-7vj_EMu-m37KtXJoOySqa0qaw',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/20830619/player-plasma-ias-tablet-en_US.vflset/base.js',
|
||||
'2aq0aqSyOoJXtK73m-uME_jv7-pT15gOFC02RFkGMqWpzEICs69VdbwQ0LDp1v7j8xx92efCJlYFYb1sUkkBSPOlPmXgIARw8JQ0qOAOAA',
|
||||
'7AOq0QJ8wRAIgXmPlOPSBkkAs1bYFYlJCfe29xx8jOv1pDL0Q2bdV96sCIEzpWqMGkFR20CFOg51Tp-7vj_EMu-m37KtXJoOySqa0qaw',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/8a8ac953/player_ias_tce.vflset/en_US/base.js',
|
||||
'2aq0aqSyOoJXtK73m-uME_jv7-pT15gOFC02RFkGMqWpzEICs69VdbwQ0LDp1v7j8xx92efCJlYFYb1sUkkBSPOlPmXgIARw8JQ0qOAOAA',
|
||||
'IAOAOq0QJ8wRAAgXmPlOPSBkkUs1bYFYlJCfe29xx8j7v1pDL0QwbdV96sCIEzpWqMGkFR20CFOg51Tp-7vj_E2u-m37KtXJoOySqa0',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/8a8ac953/tv-player-es6.vflset/tv-player-es6.js',
|
||||
'2aq0aqSyOoJXtK73m-uME_jv7-pT15gOFC02RFkGMqWpzEICs69VdbwQ0LDp1v7j8xx92efCJlYFYb1sUkkBSPOlPmXgIARw8JQ0qOAOAA',
|
||||
'IAOAOq0QJ8wRAAgXmPlOPSBkkUs1bYFYlJCfe29xx8j7v1pDL0QwbdV96sCIEzpWqMGkFR20CFOg51Tp-7vj_E2u-m37KtXJoOySqa0',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/e12fbea4/player_ias.vflset/en_US/base.js',
|
||||
'gN7a-hudCuAuPH6fByOk1_GNXN0yNMHShjZXS2VOgsEItAJz0tipeavEOmNdYN-wUtcEqD3bCXjc0iyKfAyZxCBGgIARwsSdQfJ2CJtt',
|
||||
'JC2JfQdSswRAIgGBCxZyAfKyi0cjXCb3DqEctUw-NYdNmOEvaepit0zJAtIEsgOV2SXZjhSHMNy0NXNG_1kOyBf6HPuAuCduh-a',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/010fbc8d/player_es5.vflset/en_US/base.js',
|
||||
'gN7a-hudCuAuPH6fByOk1_GNXN0yNMHShjZXS2VOgsEItAJz0tipeavEOmNdYN-wUtcEqD3bCXjc0iyKfAyZxCBGgIARwsSdQfJ2CJtt',
|
||||
'ttJC2JfQdSswRAIgGBCxZyAfKyi0cjXCb3DqEctUw-NYdNmOEvaepit2zJAsIEggOVaSXZjhSHMNy0NXNG_1kOyBf6HPuAuCduh-',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/010fbc8d/player_es6.vflset/en_US/base.js',
|
||||
'gN7a-hudCuAuPH6fByOk1_GNXN0yNMHShjZXS2VOgsEItAJz0tipeavEOmNdYN-wUtcEqD3bCXjc0iyKfAyZxCBGgIARwsSdQfJ2CJtt',
|
||||
'ttJC2JfQdSswRAIgGBCxZyAfKyi0cjXCb3DqEctUw-NYdNmOEvaepit2zJAsIEggOVaSXZjhSHMNy0NXNG_1kOyBf6HPuAuCduh-',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/5ec65609/player_ias_tcc.vflset/en_US/base.js',
|
||||
'AAJAJfQdSswRAIgNSN0GDUcHnCIXkKcF61yLBgDHiX1sUhOJdY4_GxunRYCIDeYNYP_16mQTPm5f1OVq3oV1ijUNYPjP4iUSMAjO9bZ',
|
||||
'AJfQdSswRAIgNSN0GDUcHnCIXkKcF61ZLBgDHiX1sUhOJdY4_GxunRYCIDyYNYP_16mQTPm5f1OVq3oV1ijUNYPjP4iUSMAjO9be',
|
||||
),
|
||||
]
|
||||
|
||||
_NSIG_TESTS = [
|
||||
(
|
||||
'https://www.youtube.com/s/player/7862ca1f/player_ias.vflset/en_US/base.js',
|
||||
'X_LCxVDjAavgE5t', 'yxJ1dM6iz5ogUg',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/9216d1f7/player_ias.vflset/en_US/base.js',
|
||||
'SLp9F5bwjAdhE9F-', 'gWnb9IK2DJ8Q1w',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/f8cb7a3b/player_ias.vflset/en_US/base.js',
|
||||
'oBo2h5euWy6osrUt', 'ivXHpm7qJjJN',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/2dfe380c/player_ias.vflset/en_US/base.js',
|
||||
'oBo2h5euWy6osrUt', '3DIBbn3qdQ',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/f1ca6900/player_ias.vflset/en_US/base.js',
|
||||
'cu3wyu6LQn2hse', 'jvxetvmlI9AN9Q',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/8040e515/player_ias.vflset/en_US/base.js',
|
||||
'wvOFaY-yjgDuIEg5', 'HkfBFDHmgw4rsw',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/e06dea74/player_ias.vflset/en_US/base.js',
|
||||
'AiuodmaDDYw8d3y4bf', 'ankd8eza2T6Qmw',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/5dd88d1d/player-plasma-ias-phone-en_US.vflset/base.js',
|
||||
'kSxKFLeqzv_ZyHSAt', 'n8gS8oRlHOxPFA',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/324f67b9/player_ias.vflset/en_US/base.js',
|
||||
'xdftNy7dh9QGnhW', '22qLGxrmX8F1rA',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/4c3f79c5/player_ias.vflset/en_US/base.js',
|
||||
'TDCstCG66tEAO5pR9o', 'dbxNtZ14c-yWyw',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/c81bbb4a/player_ias.vflset/en_US/base.js',
|
||||
'gre3EcLurNY2vqp94', 'Z9DfGxWP115WTg',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/1f7d5369/player_ias.vflset/en_US/base.js',
|
||||
'batNX7sYqIJdkJ', 'IhOkL_zxbkOZBw',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/009f1d77/player_ias.vflset/en_US/base.js',
|
||||
'5dwFHw8aFWQUQtffRq', 'audescmLUzI3jw',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/dc0c6770/player_ias.vflset/en_US/base.js',
|
||||
'5EHDMgYLV6HPGk_Mu-kk', 'n9lUJLHbxUI0GQ',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/113ca41c/player_ias.vflset/en_US/base.js',
|
||||
'cgYl-tlYkhjT7A', 'hI7BBr2zUgcmMg',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/c57c113c/player_ias.vflset/en_US/base.js',
|
||||
'M92UUMHa8PdvPd3wyM', '3hPqLJsiNZx7yA',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/5a3b6271/player_ias.vflset/en_US/base.js',
|
||||
'B2j7f_UPT4rfje85Lu_e', 'm5DmNymaGQ5RdQ',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/7a062b77/player_ias.vflset/en_US/base.js',
|
||||
'NRcE3y3mVtm_cV-W', 'VbsCYUATvqlt5w',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/dac945fd/player_ias.vflset/en_US/base.js',
|
||||
'o8BkRxXhuYsBCWi6RplPdP', '3Lx32v_hmzTm6A',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/6f20102c/player_ias.vflset/en_US/base.js',
|
||||
'lE8DhoDmKqnmJJ', 'pJTTX6XyJP2BYw',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/cfa9e7cb/player_ias.vflset/en_US/base.js',
|
||||
'aCi3iElgd2kq0bxVbQ', 'QX1y8jGb2IbZ0w',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/8c7583ff/player_ias.vflset/en_US/base.js',
|
||||
'1wWCVpRR96eAmMI87L', 'KSkWAVv1ZQxC3A',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/b7910ca8/player_ias.vflset/en_US/base.js',
|
||||
'_hXMCwMt9qE310D', 'LoZMgkkofRMCZQ',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/590f65a6/player_ias.vflset/en_US/base.js',
|
||||
'1tm7-g_A9zsI8_Lay_', 'xI4Vem4Put_rOg',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/b22ef6e7/player_ias.vflset/en_US/base.js',
|
||||
'b6HcntHGkvBLk_FRf', 'kNPW6A7FyP2l8A',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/3400486c/player_ias.vflset/en_US/base.js',
|
||||
'lL46g3XifCKUZn1Xfw', 'z767lhet6V2Skl',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/20dfca59/player_ias.vflset/en_US/base.js',
|
||||
'-fLCxedkAk4LUTK2', 'O8kfRq1y1eyHGw',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/b12cc44b/player_ias.vflset/en_US/base.js',
|
||||
'keLa5R2U00sR9SQK', 'N1OGyujjEwMnLw',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/3bb1f723/player_ias.vflset/en_US/base.js',
|
||||
'gK15nzVyaXE9RsMP3z', 'ZFFWFLPWx9DEgQ',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/2f1832d2/player_ias.vflset/en_US/base.js',
|
||||
'YWt1qdbe8SAfkoPHW5d', 'RrRjWQOJmBiP',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/9c6dfc4a/player_ias.vflset/en_US/base.js',
|
||||
'jbu7ylIosQHyJyJV', 'uwI0ESiynAmhNg',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/e7567ecf/player_ias_tce.vflset/en_US/base.js',
|
||||
'Sy4aDGc0VpYRR9ew_', '5UPOT1VhoZxNLQ',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/d50f54ef/player_ias_tce.vflset/en_US/base.js',
|
||||
'Ha7507LzRmH3Utygtj', 'XFTb2HoeOE5MHg',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/074a8365/player_ias_tce.vflset/en_US/base.js',
|
||||
'Ha7507LzRmH3Utygtj', 'ufTsrE0IVYrkl8v',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/643afba4/player_ias.vflset/en_US/base.js',
|
||||
'N5uAlLqm0eg1GyHO', 'dCBQOejdq5s-ww',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/69f581a5/tv-player-ias.vflset/tv-player-ias.js',
|
||||
'-qIP447rVlTTwaZjY', 'KNcGOksBAvwqQg',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/643afba4/tv-player-ias.vflset/tv-player-ias.js',
|
||||
'ir9-V6cdbCiyKxhr', '2PL7ZDYAALMfmA',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/363db69b/player_ias.vflset/en_US/base.js',
|
||||
'eWYu5d5YeY_4LyEDc', 'XJQqf-N7Xra3gg',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/4fcd6e4a/player_ias.vflset/en_US/base.js',
|
||||
'o_L251jm8yhZkWtBW', 'lXoxI3XvToqn6A',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/4fcd6e4a/player_ias_tce.vflset/en_US/base.js',
|
||||
'o_L251jm8yhZkWtBW', 'lXoxI3XvToqn6A',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/20830619/tv-player-ias.vflset/tv-player-ias.js',
|
||||
'ir9-V6cdbCiyKxhr', '9YE85kNjZiS4',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/20830619/player-plasma-ias-phone-en_US.vflset/base.js',
|
||||
'ir9-V6cdbCiyKxhr', '9YE85kNjZiS4',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/20830619/player-plasma-ias-tablet-en_US.vflset/base.js',
|
||||
'ir9-V6cdbCiyKxhr', '9YE85kNjZiS4',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/8a8ac953/player_ias_tce.vflset/en_US/base.js',
|
||||
'MiBYeXx_vRREbiCCmh', 'RtZYMVvmkE0JE',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/8a8ac953/tv-player-es6.vflset/tv-player-es6.js',
|
||||
'MiBYeXx_vRREbiCCmh', 'RtZYMVvmkE0JE',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/59b252b9/player_ias.vflset/en_US/base.js',
|
||||
'D3XWVpYgwhLLKNK4AGX', 'aZrQ1qWJ5yv5h',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/fc2a56a5/player_ias.vflset/en_US/base.js',
|
||||
'qTKWg_Il804jd2kAC', 'OtUAm2W6gyzJjB9u',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/fc2a56a5/tv-player-ias.vflset/tv-player-ias.js',
|
||||
'qTKWg_Il804jd2kAC', 'OtUAm2W6gyzJjB9u',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/a74bf670/player_ias_tce.vflset/en_US/base.js',
|
||||
'kM5r52fugSZRAKHfo3', 'hQP7k1hA22OrNTnq',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/6275f73c/player_ias_tce.vflset/en_US/base.js',
|
||||
'kM5r52fugSZRAKHfo3', '-I03XF0iyf6I_X0A',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/20c72c18/player_ias_tce.vflset/en_US/base.js',
|
||||
'kM5r52fugSZRAKHfo3', '-I03XF0iyf6I_X0A',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/9fe2e06e/player_ias_tce.vflset/en_US/base.js',
|
||||
'kM5r52fugSZRAKHfo3', '6r5ekNIiEMPutZy',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/680f8c75/player_ias_tce.vflset/en_US/base.js',
|
||||
'kM5r52fugSZRAKHfo3', '0ml9caTwpa55Jf',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/14397202/player_ias_tce.vflset/en_US/base.js',
|
||||
'kM5r52fugSZRAKHfo3', 'ozZFAN21okDdJTa',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/5dcb2c1f/player_ias_tce.vflset/en_US/base.js',
|
||||
'kM5r52fugSZRAKHfo3', 'p7iTbRZDYAF',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/a10d7fcc/player_ias_tce.vflset/en_US/base.js',
|
||||
'kM5r52fugSZRAKHfo3', '9Zue7DDHJSD',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/8e20cb06/player_ias_tce.vflset/en_US/base.js',
|
||||
'kM5r52fugSZRAKHfo3', '5-4tTneTROTpMzba',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/e12fbea4/player_ias_tce.vflset/en_US/base.js',
|
||||
'kM5r52fugSZRAKHfo3', 'XkeRfXIPOkSwfg',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/ef259203/player_ias_tce.vflset/en_US/base.js',
|
||||
'rPqBC01nJpqhhi2iA2U', 'hY7dbiKFT51UIA',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/010fbc8d/player_es5.vflset/en_US/base.js',
|
||||
'0hlOAlqjFszVvF4Z', 'R-H23bZGAsRFTg',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/010fbc8d/player_es6.vflset/en_US/base.js',
|
||||
'0hlOAlqjFszVvF4Z', 'R-H23bZGAsRFTg',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/5ec65609/player_ias_tcc.vflset/en_US/base.js',
|
||||
'6l5CTNx4AzIqH4MXM', 'NupToduxHBew1g',
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@is_download_test
|
||||
class TestPlayerInfo(unittest.TestCase):
|
||||
def test_youtube_extract_player_info(self):
|
||||
PLAYER_URLS = (
|
||||
('https://www.youtube.com/s/player/4c3f79c5/player_ias.vflset/en_US/base.js', '4c3f79c5'),
|
||||
('https://www.youtube.com/s/player/64dddad9/player_ias.vflset/en_US/base.js', '64dddad9'),
|
||||
('https://www.youtube.com/s/player/64dddad9/player_ias.vflset/fr_FR/base.js', '64dddad9'),
|
||||
('https://www.youtube.com/s/player/64dddad9/player-plasma-ias-phone-en_US.vflset/base.js', '64dddad9'),
|
||||
('https://www.youtube.com/s/player/64dddad9/player-plasma-ias-phone-de_DE.vflset/base.js', '64dddad9'),
|
||||
('https://www.youtube.com/s/player/64dddad9/player-plasma-ias-tablet-en_US.vflset/base.js', '64dddad9'),
|
||||
('https://www.youtube.com/s/player/e7567ecf/player_ias_tce.vflset/en_US/base.js', 'e7567ecf'),
|
||||
('https://www.youtube.com/s/player/643afba4/tv-player-ias.vflset/tv-player-ias.js', '643afba4'),
|
||||
# obsolete
|
||||
('https://www.youtube.com/yts/jsbin/player_ias-vfle4-e03/en_US/base.js', 'vfle4-e03'),
|
||||
('https://www.youtube.com/yts/jsbin/player_ias-vfl49f_g4/en_US/base.js', 'vfl49f_g4'),
|
||||
('https://www.youtube.com/yts/jsbin/player_ias-vflCPQUIL/en_US/base.js', 'vflCPQUIL'),
|
||||
('https://www.youtube.com/yts/jsbin/player-vflzQZbt7/en_US/base.js', 'vflzQZbt7'),
|
||||
('https://www.youtube.com/yts/jsbin/player-en_US-vflaxXRn1/base.js', 'vflaxXRn1'),
|
||||
('https://s.ytimg.com/yts/jsbin/html5player-en_US-vflXGBaUN.js', 'vflXGBaUN'),
|
||||
('https://s.ytimg.com/yts/jsbin/html5player-en_US-vflKjOTVq/html5player.js', 'vflKjOTVq'),
|
||||
)
|
||||
for player_url, expected_player_id in PLAYER_URLS:
|
||||
player_id = YoutubeIE._extract_player_info(player_url)
|
||||
self.assertEqual(player_id, expected_player_id)
|
||||
|
||||
|
||||
@is_download_test
|
||||
class TestSignature(unittest.TestCase):
|
||||
def setUp(self):
|
||||
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
self.TESTDATA_DIR = os.path.join(TEST_DIR, 'testdata/sigs')
|
||||
if not os.path.exists(self.TESTDATA_DIR):
|
||||
os.mkdir(self.TESTDATA_DIR)
|
||||
|
||||
def tearDown(self):
|
||||
with contextlib.suppress(OSError):
|
||||
for f in os.listdir(self.TESTDATA_DIR):
|
||||
os.remove(f)
|
||||
|
||||
|
||||
def t_factory(name, sig_func, url_pattern):
|
||||
def make_tfunc(url, sig_input, expected_sig):
|
||||
m = url_pattern.match(url)
|
||||
assert m, f'{url!r} should follow URL format'
|
||||
test_id = re.sub(r'[/.-]', '_', m.group('id') or m.group('compat_id'))
|
||||
|
||||
def test_func(self):
|
||||
basename = f'player-{test_id}.js'
|
||||
fn = os.path.join(self.TESTDATA_DIR, basename)
|
||||
|
||||
if not os.path.exists(fn):
|
||||
urllib.request.urlretrieve(url, fn)
|
||||
with open(fn, encoding='utf-8') as testf:
|
||||
jscode = testf.read()
|
||||
self.assertEqual(sig_func(jscode, sig_input, url), expected_sig)
|
||||
|
||||
test_func.__name__ = f'test_{name}_js_{test_id}'
|
||||
setattr(TestSignature, test_func.__name__, test_func)
|
||||
return make_tfunc
|
||||
|
||||
|
||||
def signature(jscode, sig_input, player_url):
|
||||
func = YoutubeIE(FakeYDL())._parse_sig_js(jscode, player_url)
|
||||
src_sig = (
|
||||
str(string.printable[:sig_input])
|
||||
if isinstance(sig_input, int) else sig_input)
|
||||
return func(src_sig)
|
||||
|
||||
|
||||
def n_sig(jscode, sig_input, player_url):
|
||||
ie = YoutubeIE(FakeYDL())
|
||||
funcname = ie._extract_n_function_name(jscode, player_url=player_url)
|
||||
jsi = JSInterpreter(jscode)
|
||||
func = jsi.extract_function_from_code(*ie._fixup_n_function_code(*jsi.extract_function_code(funcname), jscode, player_url))
|
||||
return func([sig_input])
|
||||
|
||||
|
||||
make_sig_test = t_factory(
|
||||
'signature', signature,
|
||||
re.compile(r'''(?x)
|
||||
.+(?:
|
||||
/player/(?P<id>[a-zA-Z0-9_/.-]+)|
|
||||
/html5player-(?:en_US-)?(?P<compat_id>[a-zA-Z0-9_-]+)(?:/watch_as3|/html5player)?
|
||||
)\.js$'''))
|
||||
for test_spec in _SIG_TESTS:
|
||||
make_sig_test(*test_spec)
|
||||
|
||||
make_nsig_test = t_factory(
|
||||
'nsig', n_sig, re.compile(r'.+/player/(?P<id>[a-zA-Z0-9_/.-]+)\.js$'))
|
||||
for test_spec in _NSIG_TESTS:
|
||||
make_nsig_test(*test_spec)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
@ -42,6 +42,8 @@ from .globals import (
|
|||
plugin_pps,
|
||||
all_plugins_loaded,
|
||||
plugin_dirs,
|
||||
supported_js_runtimes,
|
||||
supported_remote_components,
|
||||
)
|
||||
from .minicurses import format_text
|
||||
from .networking import HEADRequest, Request, RequestDirector
|
||||
|
|
@ -304,7 +306,6 @@ class YoutubeDL:
|
|||
clean_infojson: Remove internal metadata from the infojson
|
||||
getcomments: Extract video comments. This will not be written to disk
|
||||
unless writeinfojson is also given
|
||||
writeannotations: Write the video annotations to a .annotations.xml file
|
||||
writethumbnail: Write the thumbnail image to a file
|
||||
allow_playlist_files: Whether to write playlists' description, infojson etc
|
||||
also to disk when using the 'write*' options
|
||||
|
|
@ -511,11 +512,11 @@ class YoutubeDL:
|
|||
the downloader (see yt_dlp/downloader/common.py):
|
||||
nopart, updatetime, buffersize, ratelimit, throttledratelimit, min_filesize,
|
||||
max_filesize, test, noresizebuffer, retries, file_access_retries, fragment_retries,
|
||||
continuedl, xattr_set_filesize, hls_use_mpegts, http_chunk_size,
|
||||
external_downloader_args, concurrent_fragment_downloads, progress_delta.
|
||||
continuedl, hls_use_mpegts, http_chunk_size, external_downloader_args,
|
||||
concurrent_fragment_downloads, progress_delta.
|
||||
|
||||
The following options are used by the post processors:
|
||||
ffmpeg_location: Location of the ffmpeg/avconv binary; either the path
|
||||
ffmpeg_location: Location of the ffmpeg binary; either the path
|
||||
to the binary or its containing directory.
|
||||
postprocessor_args: A dictionary of postprocessor/executable keys (in lower case)
|
||||
and a list of additional command-line arguments for the
|
||||
|
|
@ -534,6 +535,18 @@ class YoutubeDL:
|
|||
See "EXTRACTOR ARGUMENTS" for details.
|
||||
Argument values must always be a list of string(s).
|
||||
E.g. {'youtube': {'skip': ['dash', 'hls']}}
|
||||
js_runtimes: A dictionary of JavaScript runtime keys (in lower case) to enable
|
||||
and a dictionary of additional configuration for the runtime.
|
||||
Currently supported runtimes are 'deno', 'node', 'bun', and 'quickjs'.
|
||||
If None, the default runtime of "deno" will be enabled.
|
||||
The runtime configuration dictionary can have the following keys:
|
||||
- path: Path to the executable (optional)
|
||||
E.g. {'deno': {'path': '/path/to/deno'}
|
||||
remote_components: A list of remote components that are allowed to be fetched when required.
|
||||
Supported components:
|
||||
- ejs:npm (external JavaScript components from npm)
|
||||
- ejs:github (external JavaScript components from yt-dlp-ejs GitHub)
|
||||
By default, no remote components are allowed to be fetched.
|
||||
mark_watched: Mark videos watched (even with --simulate). Only for YouTube
|
||||
|
||||
The following options are deprecated and may be removed in the future:
|
||||
|
|
@ -566,32 +579,14 @@ class YoutubeDL:
|
|||
allsubtitles: - Use subtitleslangs = ['all']
|
||||
Downloads all the subtitles of the video
|
||||
(requires writesubtitles or writeautomaticsub)
|
||||
include_ads: - Doesn't work
|
||||
Download ads as well
|
||||
call_home: - Not implemented
|
||||
Boolean, true if we are allowed to contact the
|
||||
yt-dlp servers for debugging.
|
||||
post_hooks: - Register a custom postprocessor
|
||||
A list of functions that get called as the final step
|
||||
for each video file, after all postprocessors have been
|
||||
called. The filename will be passed as the only argument.
|
||||
hls_prefer_native: - Use external_downloader = {'m3u8': 'native'} or {'m3u8': 'ffmpeg'}.
|
||||
Use the native HLS downloader instead of ffmpeg/avconv
|
||||
if True, otherwise use ffmpeg/avconv if False, otherwise
|
||||
Use the native HLS downloader instead of ffmpeg
|
||||
if True, otherwise use ffmpeg if False, otherwise
|
||||
use downloader suggested by extractor if None.
|
||||
prefer_ffmpeg: - avconv support is deprecated
|
||||
If False, use avconv instead of ffmpeg if both are available,
|
||||
otherwise prefer ffmpeg.
|
||||
youtube_include_dash_manifest: - Use extractor_args
|
||||
If True (default), DASH manifests and related
|
||||
data will be downloaded and processed by extractor.
|
||||
You can reduce network I/O by disabling it if you don't
|
||||
care about DASH. (only for youtube)
|
||||
youtube_include_hls_manifest: - Use extractor_args
|
||||
If True (default), HLS manifests and related
|
||||
data will be downloaded and processed by extractor.
|
||||
You can reduce network I/O by disabling it if you don't
|
||||
care about HLS. (only for youtube)
|
||||
no_color: Same as `color='no_color'`
|
||||
no_overwrites: Same as `overwrites=False`
|
||||
"""
|
||||
|
|
@ -736,6 +731,13 @@ class YoutubeDL:
|
|||
else:
|
||||
raise
|
||||
|
||||
# Note: this must be after plugins are loaded
|
||||
self.params['js_runtimes'] = self.params.get('js_runtimes', {'deno': {}})
|
||||
self._clean_js_runtimes(self.params['js_runtimes'])
|
||||
|
||||
self.params['remote_components'] = set(self.params.get('remote_components', ()))
|
||||
self._clean_remote_components(self.params['remote_components'])
|
||||
|
||||
self.params['compat_opts'] = set(self.params.get('compat_opts', ()))
|
||||
self.params['http_headers'] = HTTPHeaderDict(std_headers, self.params.get('http_headers'))
|
||||
self._load_cookies(self.params['http_headers'].get('Cookie')) # compat
|
||||
|
|
@ -750,10 +752,6 @@ class YoutubeDL:
|
|||
return True
|
||||
return False
|
||||
|
||||
if check_deprecated('cn_verification_proxy', '--cn-verification-proxy', '--geo-verification-proxy'):
|
||||
if self.params.get('geo_verification_proxy') is None:
|
||||
self.params['geo_verification_proxy'] = self.params['cn_verification_proxy']
|
||||
|
||||
check_deprecated('useid', '--id', '-o "%(id)s.%(ext)s"')
|
||||
|
||||
for msg in self.params.get('_warnings', []):
|
||||
|
|
@ -852,6 +850,36 @@ class YoutubeDL:
|
|||
|
||||
self.archive = preload_download_archive(self.params.get('download_archive'))
|
||||
|
||||
def _clean_js_runtimes(self, runtimes):
|
||||
if not (
|
||||
isinstance(runtimes, dict)
|
||||
and all(isinstance(k, str) and (v is None or isinstance(v, dict)) for k, v in runtimes.items())
|
||||
):
|
||||
raise ValueError('Invalid js_runtimes format, expected a dict of {runtime: {config}}')
|
||||
|
||||
if unsupported_runtimes := runtimes.keys() - supported_js_runtimes.value.keys():
|
||||
self.report_warning(
|
||||
f'Ignoring unsupported JavaScript runtime(s): {", ".join(unsupported_runtimes)}.'
|
||||
f' Supported runtimes: {", ".join(supported_js_runtimes.value.keys())}.')
|
||||
for rt in unsupported_runtimes:
|
||||
runtimes.pop(rt)
|
||||
|
||||
def _clean_remote_components(self, remote_components: set):
|
||||
if unsupported_remote_components := set(remote_components) - set(supported_remote_components.value):
|
||||
self.report_warning(
|
||||
f'Ignoring unsupported remote component(s): {", ".join(unsupported_remote_components)}.'
|
||||
f' Supported remote components: {", ".join(supported_remote_components.value)}.')
|
||||
for rt in unsupported_remote_components:
|
||||
remote_components.remove(rt)
|
||||
|
||||
@functools.cached_property
|
||||
def _js_runtimes(self):
|
||||
runtimes = {}
|
||||
for name, config in self.params.get('js_runtimes', {}).items():
|
||||
runtime_cls = supported_js_runtimes.value.get(name)
|
||||
runtimes[name] = runtime_cls(path=config.get('path')) if runtime_cls else None
|
||||
return runtimes
|
||||
|
||||
def warn_if_short_id(self, argv):
|
||||
# short YouTube ID starting with dash?
|
||||
idxs = [
|
||||
|
|
@ -2030,7 +2058,7 @@ class YoutubeDL:
|
|||
else:
|
||||
entries = resolved_entries = list(entries)
|
||||
n_entries = len(resolved_entries)
|
||||
ie_result['requested_entries'], ie_result['entries'] = tuple(zip(*resolved_entries)) or ([], [])
|
||||
ie_result['requested_entries'], ie_result['entries'] = tuple(zip(*resolved_entries, strict=True)) or ([], [])
|
||||
if not ie_result.get('playlist_count'):
|
||||
# Better to do this after potentially exhausting entries
|
||||
ie_result['playlist_count'] = all_entries.get_full_count()
|
||||
|
|
@ -2717,11 +2745,7 @@ class YoutubeDL:
|
|||
('modified_timestamp', 'modified_date'),
|
||||
):
|
||||
if info_dict.get(date_key) is None and info_dict.get(ts_key) is not None:
|
||||
# Working around out-of-range timestamp values (e.g. negative ones on Windows,
|
||||
# see http://bugs.python.org/issue1646728)
|
||||
with contextlib.suppress(ValueError, OverflowError, OSError):
|
||||
upload_date = dt.datetime.fromtimestamp(info_dict[ts_key], dt.timezone.utc)
|
||||
info_dict[date_key] = upload_date.strftime('%Y%m%d')
|
||||
info_dict[date_key] = strftime_or_none(info_dict[ts_key])
|
||||
|
||||
if not info_dict.get('release_year'):
|
||||
info_dict['release_year'] = traverse_obj(info_dict, ('release_date', {lambda x: int(x[:4])}))
|
||||
|
|
@ -2812,7 +2836,7 @@ class YoutubeDL:
|
|||
|
||||
dummy_chapter = {'end_time': 0, 'start_time': info_dict.get('duration')}
|
||||
for idx, (prev, current, next_) in enumerate(zip(
|
||||
(dummy_chapter, *chapters), chapters, (*chapters[1:], dummy_chapter)), 1):
|
||||
(dummy_chapter, *chapters), chapters, (*chapters[1:], dummy_chapter), strict=False), 1):
|
||||
if current.get('start_time') is None:
|
||||
current['start_time'] = prev.get('end_time')
|
||||
if not current.get('end_time'):
|
||||
|
|
@ -3339,28 +3363,6 @@ class YoutubeDL:
|
|||
elif _infojson_written is None:
|
||||
return
|
||||
|
||||
# Note: Annotations are deprecated
|
||||
annofn = None
|
||||
if self.params.get('writeannotations', False):
|
||||
annofn = self.prepare_filename(info_dict, 'annotation')
|
||||
if annofn:
|
||||
if not self._ensure_dir_exists(annofn):
|
||||
return
|
||||
if not self.params.get('overwrites', True) and os.path.exists(annofn):
|
||||
self.to_screen('[info] Video annotations are already present')
|
||||
elif not info_dict.get('annotations'):
|
||||
self.report_warning('There are no annotations to write.')
|
||||
else:
|
||||
try:
|
||||
self.to_screen('[info] Writing video annotations to: ' + annofn)
|
||||
with open(annofn, 'w', encoding='utf-8') as annofile:
|
||||
annofile.write(info_dict['annotations'])
|
||||
except (KeyError, TypeError):
|
||||
self.report_warning('There are no annotations to write.')
|
||||
except OSError:
|
||||
self.report_error('Cannot write annotations file: ' + annofn)
|
||||
return
|
||||
|
||||
# Write internet shortcut files
|
||||
def _write_link_file(link_type):
|
||||
url = try_get(info_dict['webpage_url'], iri_to_uri)
|
||||
|
|
@ -3419,7 +3421,7 @@ class YoutubeDL:
|
|||
def existing_video_file(*filepaths):
|
||||
ext = info_dict.get('ext')
|
||||
converted = lambda file: replace_extension(file, self.params.get('final_ext') or ext, ext)
|
||||
file = self.existing_file(itertools.chain(*zip(map(converted, filepaths), filepaths)),
|
||||
file = self.existing_file(itertools.chain(*zip(map(converted, filepaths), filepaths, strict=True)),
|
||||
default_overwrite=False)
|
||||
if file:
|
||||
info_dict['ext'] = os.path.splitext(file)[1][1:]
|
||||
|
|
@ -4005,7 +4007,7 @@ class YoutubeDL:
|
|||
|
||||
def render_subtitles_table(self, video_id, subtitles):
|
||||
def _row(lang, formats):
|
||||
exts, names = zip(*((f['ext'], f.get('name') or 'unknown') for f in reversed(formats)))
|
||||
exts, names = zip(*((f['ext'], f.get('name') or 'unknown') for f in reversed(formats)), strict=True)
|
||||
if len(set(names)) == 1:
|
||||
names = [] if names[0] == 'unknown' else names[:1]
|
||||
return [lang, ', '.join(names), ', '.join(exts)]
|
||||
|
|
@ -4113,6 +4115,18 @@ class YoutubeDL:
|
|||
join_nonempty(*get_package_info(m)) for m in available_dependencies.values()
|
||||
})) or 'none'))
|
||||
|
||||
if not self.params.get('js_runtimes'):
|
||||
write_debug('JS runtimes: none (disabled)')
|
||||
else:
|
||||
write_debug('JS runtimes: %s' % (', '.join(sorted(
|
||||
f'{name} (unknown)' if runtime is None
|
||||
else join_nonempty(
|
||||
runtime.info.name,
|
||||
runtime.info.version + (' (unsupported)' if runtime.info.supported is False else ''),
|
||||
)
|
||||
for name, runtime in self._js_runtimes.items() if runtime is None or runtime.info is not None
|
||||
)) or 'none'))
|
||||
|
||||
write_debug(f'Proxy map: {self.proxies}')
|
||||
write_debug(f'Request Handlers: {", ".join(rh.RH_NAME for rh in self._request_director.handlers.values())}')
|
||||
|
||||
|
|
@ -4161,8 +4175,7 @@ class YoutubeDL:
|
|||
self.params.get('cookiefile'), self.params.get('cookiesfrombrowser'), self)
|
||||
except CookieLoadError as error:
|
||||
cause = error.__context__
|
||||
# compat: <=py3.9: `traceback.format_exception` has a different signature
|
||||
self.report_error(str(cause), tb=''.join(traceback.format_exception(None, cause, cause.__traceback__)))
|
||||
self.report_error(str(cause), tb=''.join(traceback.format_exception(cause)))
|
||||
raise
|
||||
|
||||
@property
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
import sys
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
if sys.version_info < (3, 10):
|
||||
raise ImportError(
|
||||
f'You are using an unsupported version of Python. Only Python versions 3.9 and above are supported by yt-dlp') # noqa: F541
|
||||
f'You are using an unsupported version of Python. Only Python versions 3.10 and above are supported by yt-dlp') # noqa: F541
|
||||
|
||||
__license__ = 'The Unlicense'
|
||||
|
||||
|
|
@ -59,12 +59,17 @@ from .utils import (
|
|||
render_table,
|
||||
setproctitle,
|
||||
shell_quote,
|
||||
traverse_obj,
|
||||
variadic,
|
||||
write_string,
|
||||
|
||||
)
|
||||
from .utils.networking import std_headers
|
||||
from .utils._utils import _UnsafeExtensionError
|
||||
from .utils._jsruntime import (
|
||||
BunJsRuntime as _BunJsRuntime,
|
||||
DenoJsRuntime as _DenoJsRuntime,
|
||||
NodeJsRuntime as _NodeJsRuntime,
|
||||
QuickJsRuntime as _QuickJsRuntime,
|
||||
)
|
||||
from .YoutubeDL import YoutubeDL
|
||||
|
||||
|
||||
|
|
@ -157,7 +162,7 @@ def set_compat_opts(opts):
|
|||
if 'format-sort' in opts.compat_opts:
|
||||
opts.format_sort.extend(FormatSorter.ytdl_default)
|
||||
elif 'prefer-vp9-sort' in opts.compat_opts:
|
||||
opts.format_sort.extend(FormatSorter._prefer_vp9_sort)
|
||||
FormatSorter.default = FormatSorter._prefer_vp9_sort
|
||||
|
||||
if 'mtime-by-default' in opts.compat_opts:
|
||||
if opts.updatetime is None:
|
||||
|
|
@ -523,7 +528,6 @@ def validate_options(opts):
|
|||
|
||||
if report_args_compat('post-processor', opts.postprocessor_args, 'default-compat', 'default'):
|
||||
opts.postprocessor_args['default'] = opts.postprocessor_args.pop('default-compat')
|
||||
opts.postprocessor_args.setdefault('sponskrub', [])
|
||||
|
||||
def report_conflict(arg1, opt1, arg2='--allow-unplayable-formats', opt2='allow_unplayable_formats',
|
||||
val1=NO_DEFAULT, val2=NO_DEFAULT, default=False):
|
||||
|
|
@ -548,11 +552,6 @@ def validate_options(opts):
|
|||
'"--exec before_dl:"', 'exec_cmd', val2=opts.exec_cmd.get('before_dl'))
|
||||
report_conflict('--id', 'useid', '--output', 'outtmpl', val2=opts.outtmpl.get('default'))
|
||||
report_conflict('--remux-video', 'remuxvideo', '--recode-video', 'recodevideo')
|
||||
report_conflict('--sponskrub', 'sponskrub', '--remove-chapters', 'remove_chapters')
|
||||
report_conflict('--sponskrub', 'sponskrub', '--sponsorblock-mark', 'sponsorblock_mark')
|
||||
report_conflict('--sponskrub', 'sponskrub', '--sponsorblock-remove', 'sponsorblock_remove')
|
||||
report_conflict('--sponskrub-cut', 'sponskrub_cut', '--split-chapter', 'split_chapters',
|
||||
val1=opts.sponskrub and opts.sponskrub_cut)
|
||||
|
||||
# Conflicts with --allow-unplayable-formats
|
||||
report_conflict('--embed-metadata', 'addmetadata')
|
||||
|
|
@ -565,23 +564,15 @@ def validate_options(opts):
|
|||
report_conflict('--recode-video', 'recodevideo')
|
||||
report_conflict('--remove-chapters', 'remove_chapters', default=[])
|
||||
report_conflict('--remux-video', 'remuxvideo')
|
||||
report_conflict('--sponskrub', 'sponskrub')
|
||||
report_conflict('--sponsorblock-remove', 'sponsorblock_remove', default=set())
|
||||
report_conflict('--xattrs', 'xattrs')
|
||||
|
||||
# Fully deprecated options
|
||||
def report_deprecation(val, old, new=None):
|
||||
if not val:
|
||||
return
|
||||
if hasattr(opts, '_deprecated_options'):
|
||||
deprecation_warnings.append(
|
||||
f'{old} is deprecated and may be removed in a future version. Use {new} instead' if new
|
||||
else f'{old} is deprecated and may not work as expected')
|
||||
|
||||
report_deprecation(opts.sponskrub, '--sponskrub', '--sponsorblock-mark or --sponsorblock-remove')
|
||||
report_deprecation(not opts.prefer_ffmpeg, '--prefer-avconv', 'ffmpeg')
|
||||
# report_deprecation(opts.include_ads, '--include-ads') # We may re-implement this in future
|
||||
# report_deprecation(opts.call_home, '--call-home') # We may re-implement this in future
|
||||
# report_deprecation(opts.writeannotations, '--write-annotations') # It's just that no website has it
|
||||
f'The following options have been deprecated: {", ".join(opts._deprecated_options)}\n'
|
||||
'Please remove them from your command/configuration to avoid future errors.\n'
|
||||
'See https://github.com/yt-dlp/yt-dlp/issues/14198 for more details')
|
||||
del opts._deprecated_options
|
||||
|
||||
# Dependent options
|
||||
opts.date = DateRange.day(opts.date) if opts.date else DateRange(opts.dateafter, opts.datebefore)
|
||||
|
|
@ -712,21 +703,6 @@ def get_postprocessors(opts):
|
|||
'add_metadata': opts.addmetadata,
|
||||
'add_infojson': opts.embed_infojson,
|
||||
}
|
||||
# Deprecated
|
||||
# This should be above EmbedThumbnail since sponskrub removes the thumbnail attachment
|
||||
# but must be below EmbedSubtitle and FFmpegMetadata
|
||||
# See https://github.com/yt-dlp/yt-dlp/issues/204 , https://github.com/faissaloo/SponSkrub/issues/29
|
||||
# If opts.sponskrub is None, sponskrub is used, but it silently fails if the executable can't be found
|
||||
if opts.sponskrub is not False:
|
||||
yield {
|
||||
'key': 'SponSkrub',
|
||||
'path': opts.sponskrub_path,
|
||||
'args': opts.sponskrub_args,
|
||||
'cut': opts.sponskrub_cut,
|
||||
'force': opts.sponskrub_force,
|
||||
'ignoreerror': opts.sponskrub is None,
|
||||
'_from_cli': True,
|
||||
}
|
||||
if opts.embedthumbnail:
|
||||
yield {
|
||||
'key': 'EmbedThumbnail',
|
||||
|
|
@ -804,6 +780,10 @@ def parse_options(argv=None):
|
|||
else opts.audioformat if (opts.extractaudio and opts.audioformat in FFmpegExtractAudioPP.SUPPORTED_EXTS)
|
||||
else None)
|
||||
|
||||
js_runtimes = {
|
||||
runtime.lower(): {'path': path} for runtime, path in (
|
||||
[*arg.split(':', 1), None][:2] for arg in opts.js_runtimes)}
|
||||
|
||||
return ParsedOptions(parser, opts, urls, {
|
||||
'usenetrc': opts.usenetrc,
|
||||
'netrc_location': opts.netrc_location,
|
||||
|
|
@ -885,7 +865,6 @@ def parse_options(argv=None):
|
|||
'nopart': opts.nopart,
|
||||
'updatetime': opts.updatetime,
|
||||
'writedescription': opts.writedescription,
|
||||
'writeannotations': opts.writeannotations,
|
||||
'writeinfojson': opts.writeinfojson,
|
||||
'allow_playlist_files': opts.allow_playlist_files,
|
||||
'clean_infojson': opts.clean_infojson,
|
||||
|
|
@ -919,7 +898,6 @@ def parse_options(argv=None):
|
|||
'max_views': opts.max_views,
|
||||
'daterange': opts.date,
|
||||
'cachedir': opts.cachedir,
|
||||
'youtube_print_sig_code': opts.youtube_print_sig_code,
|
||||
'age_limit': opts.age_limit,
|
||||
'download_archive': opts.download_archive,
|
||||
'break_on_existing': opts.break_on_existing,
|
||||
|
|
@ -937,13 +915,9 @@ def parse_options(argv=None):
|
|||
'socket_timeout': opts.socket_timeout,
|
||||
'bidi_workaround': opts.bidi_workaround,
|
||||
'debug_printtraffic': opts.debug_printtraffic,
|
||||
'prefer_ffmpeg': opts.prefer_ffmpeg,
|
||||
'include_ads': opts.include_ads,
|
||||
'default_search': opts.default_search,
|
||||
'dynamic_mpd': opts.dynamic_mpd,
|
||||
'extractor_args': opts.extractor_args,
|
||||
'youtube_include_dash_manifest': opts.youtube_include_dash_manifest,
|
||||
'youtube_include_hls_manifest': opts.youtube_include_hls_manifest,
|
||||
'encoding': opts.encoding,
|
||||
'extract_flat': opts.extract_flat,
|
||||
'live_from_start': opts.live_from_start,
|
||||
|
|
@ -955,7 +929,6 @@ def parse_options(argv=None):
|
|||
'fixup': opts.fixup,
|
||||
'source_address': opts.source_address,
|
||||
'impersonate': opts.impersonate,
|
||||
'call_home': opts.call_home,
|
||||
'sleep_interval_requests': opts.sleep_interval_requests,
|
||||
'sleep_interval': opts.sleep_interval,
|
||||
'max_sleep_interval': opts.max_sleep_interval,
|
||||
|
|
@ -965,7 +938,6 @@ def parse_options(argv=None):
|
|||
'force_keyframes_at_cuts': opts.force_keyframes_at_cuts,
|
||||
'list_thumbnails': opts.list_thumbnails,
|
||||
'playlist_items': opts.playlist_items,
|
||||
'xattr_set_filesize': opts.xattr_set_filesize,
|
||||
'match_filter': opts.match_filter,
|
||||
'color': opts.color,
|
||||
'ffmpeg_location': opts.ffmpeg_location,
|
||||
|
|
@ -974,11 +946,13 @@ def parse_options(argv=None):
|
|||
'hls_split_discontinuity': opts.hls_split_discontinuity,
|
||||
'external_downloader_args': opts.external_downloader_args,
|
||||
'postprocessor_args': opts.postprocessor_args,
|
||||
'cn_verification_proxy': opts.cn_verification_proxy,
|
||||
'geo_verification_proxy': opts.geo_verification_proxy,
|
||||
'geo_bypass': opts.geo_bypass,
|
||||
'geo_bypass_country': opts.geo_bypass_country,
|
||||
'geo_bypass_ip_block': opts.geo_bypass_ip_block,
|
||||
'useid': opts.useid or None,
|
||||
'js_runtimes': js_runtimes,
|
||||
'remote_components': opts.remote_components,
|
||||
'warn_when_outdated': opts.update_self is None,
|
||||
'_warnings': warnings,
|
||||
'_deprecation_warnings': deprecation_warnings,
|
||||
|
|
@ -991,12 +965,6 @@ def _real_main(argv=None):
|
|||
|
||||
parser, opts, all_urls, ydl_opts = parse_options(argv)
|
||||
|
||||
# Dump user agent
|
||||
if opts.dump_user_agent:
|
||||
ua = traverse_obj(opts.headers, 'User-Agent', casesense=False, default=std_headers['User-Agent'])
|
||||
write_string(f'{ua}\n', out=sys.stdout)
|
||||
return
|
||||
|
||||
if print_extractor_information(opts, all_urls):
|
||||
return
|
||||
|
||||
|
|
@ -1019,13 +987,8 @@ def _real_main(argv=None):
|
|||
|
||||
try:
|
||||
updater = Updater(ydl, opts.update_self)
|
||||
if opts.update_self and updater.update() and actual_use:
|
||||
if updater.cmd:
|
||||
return updater.restart()
|
||||
# This code is reachable only for zip variant in py < 3.10
|
||||
# It makes sense to exit here, but the old behavior is to continue
|
||||
ydl.report_warning('Restart yt-dlp to use the updated version')
|
||||
# return 100, 'ERROR: The program must exit for the update to complete'
|
||||
if opts.update_self and updater.update() and actual_use and updater.cmd:
|
||||
return updater.restart()
|
||||
except Exception:
|
||||
traceback.print_exc()
|
||||
ydl._download_retcode = 100
|
||||
|
|
@ -1131,6 +1094,16 @@ def main(argv=None):
|
|||
|
||||
from .extractor import gen_extractors, list_extractors
|
||||
|
||||
# Register JS runtimes and remote components
|
||||
from .globals import supported_js_runtimes, supported_remote_components
|
||||
supported_js_runtimes.value['deno'] = _DenoJsRuntime
|
||||
supported_js_runtimes.value['node'] = _NodeJsRuntime
|
||||
supported_js_runtimes.value['bun'] = _BunJsRuntime
|
||||
supported_js_runtimes.value['quickjs'] = _QuickJsRuntime
|
||||
|
||||
supported_remote_components.value.append('ejs:github')
|
||||
supported_remote_components.value.append('ejs:npm')
|
||||
|
||||
__all__ = [
|
||||
'YoutubeDL',
|
||||
'gen_extractors',
|
||||
|
|
|
|||
|
|
@ -34,3 +34,4 @@ print(f'Adding imports: {hiddenimports}')
|
|||
excludedimports = ['youtube_dl', 'youtube_dlc', 'test', 'ytdlp_plugins', 'devscripts', 'bundle']
|
||||
|
||||
datas = collect_data_files('curl_cffi', includes=['cacert.pem'])
|
||||
datas += collect_data_files('yt_dlp_ejs', includes=['**/*.js'])
|
||||
|
|
|
|||
|
|
@ -447,7 +447,7 @@ def key_schedule_core(data, rcon_iteration):
|
|||
|
||||
|
||||
def xor(data1, data2):
|
||||
return [x ^ y for x, y in zip(data1, data2)]
|
||||
return [x ^ y for x, y in zip(data1, data2, strict=False)]
|
||||
|
||||
|
||||
def iter_mix_columns(data, matrix):
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import datetime as dt
|
||||
import os
|
||||
import xml.etree.ElementTree as etree
|
||||
|
||||
|
|
@ -27,6 +28,13 @@ def compat_ord(c):
|
|||
return c if isinstance(c, int) else ord(c)
|
||||
|
||||
|
||||
def compat_datetime_from_timestamp(timestamp):
|
||||
# Calling dt.datetime.fromtimestamp with negative timestamps throws error in Windows
|
||||
# Ref: https://github.com/yt-dlp/yt-dlp/issues/5185, https://github.com/python/cpython/issues/81708,
|
||||
# https://github.com/yt-dlp/yt-dlp/issues/6706#issuecomment-1496842642
|
||||
return (dt.datetime.fromtimestamp(0, dt.timezone.utc) + dt.timedelta(seconds=timestamp))
|
||||
|
||||
|
||||
# Python 3.8+ does not honor %HOME% on windows, but this breaks compatibility with youtube-dl
|
||||
# See https://github.com/yt-dlp/yt-dlp/issues/792
|
||||
# https://docs.python.org/3/library/os.path.html#os.path.expanduser
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
# flake8: noqa: F405
|
||||
from types import * # noqa: F403
|
||||
|
||||
from .compat_utils import passthrough_module
|
||||
|
||||
passthrough_module(__name__, 'types')
|
||||
del passthrough_module
|
||||
|
||||
try:
|
||||
# NB: pypy has builtin NoneType, so checking NameError won't work
|
||||
from types import NoneType # >= 3.10
|
||||
except ImportError:
|
||||
NoneType = type(None)
|
||||
|
|
@ -22,15 +22,11 @@ if os.name == 'nt':
|
|||
|
||||
def getproxies_registry_patched():
|
||||
proxies = getproxies_registry()
|
||||
if (
|
||||
sys.version_info >= (3, 10, 5) # https://docs.python.org/3.10/whatsnew/changelog.html#python-3-10-5-final
|
||||
or (3, 9, 13) <= sys.version_info < (3, 10) # https://docs.python.org/3.9/whatsnew/changelog.html#python-3-9-13-final
|
||||
):
|
||||
return proxies
|
||||
|
||||
for scheme in ('https', 'ftp'):
|
||||
if scheme in proxies and proxies[scheme].startswith(f'{scheme}://'):
|
||||
proxies[scheme] = 'http' + proxies[scheme][len(scheme):]
|
||||
if sys.version_info < (3, 10, 5): # https://docs.python.org/3.10/whatsnew/changelog.html#python-3-10-5-final
|
||||
for scheme in ('https', 'ftp'):
|
||||
if scheme in proxies and proxies[scheme].startswith(f'{scheme}://'):
|
||||
proxies[scheme] = 'http' + proxies[scheme][len(scheme):]
|
||||
|
||||
return proxies
|
||||
|
||||
|
|
|
|||
|
|
@ -127,7 +127,7 @@ def extract_cookies_from_browser(browser_name, profile=None, logger=YDLLogger(),
|
|||
|
||||
|
||||
def _extract_firefox_cookies(browser_name, profile, container, logger):
|
||||
MAX_SUPPORTED_DB_SCHEMA_VERSION = 16
|
||||
MAX_SUPPORTED_DB_SCHEMA_VERSION = 17
|
||||
|
||||
logger.info(f'Extracting cookies from {browser_name}')
|
||||
if not sqlite3:
|
||||
|
|
@ -169,6 +169,8 @@ def _extract_firefox_cookies(browser_name, profile, container, logger):
|
|||
db_schema_version = cursor.execute('PRAGMA user_version;').fetchone()[0]
|
||||
if db_schema_version > MAX_SUPPORTED_DB_SCHEMA_VERSION:
|
||||
logger.warning(f'Possibly unsupported {browser_name} cookies database version: {db_schema_version}')
|
||||
else:
|
||||
logger.debug(f'{browser_name} cookies database version: {db_schema_version}')
|
||||
if isinstance(container_id, int):
|
||||
logger.debug(
|
||||
f'Only loading cookies from {browser_name} container "{container}", ID {container_id}')
|
||||
|
|
@ -680,7 +682,7 @@ class WindowsChromeCookieDecryptor(ChromeCookieDecryptor):
|
|||
|
||||
|
||||
def _extract_safari_cookies(profile, logger):
|
||||
if sys.platform != 'darwin':
|
||||
if sys.platform not in ('darwin', 'ios'):
|
||||
raise ValueError(f'unsupported platform: {sys.platform}')
|
||||
|
||||
if profile:
|
||||
|
|
|
|||
|
|
@ -81,6 +81,12 @@ except ImportError:
|
|||
|
||||
from . import Cryptodome
|
||||
|
||||
try:
|
||||
import yt_dlp_ejs
|
||||
except ImportError:
|
||||
yt_dlp_ejs = None
|
||||
|
||||
|
||||
all_dependencies = {k: v for k, v in globals().items() if not k.startswith('_')}
|
||||
available_dependencies = {k: v for k, v in all_dependencies.items() if v}
|
||||
|
||||
|
|
|
|||
|
|
@ -62,7 +62,6 @@ class FileDownloader:
|
|||
test: Download only first bytes to test the downloader.
|
||||
min_filesize: Skip files smaller than this size
|
||||
max_filesize: Skip files larger than this size
|
||||
xattr_set_filesize: Set ytdl.filesize user xattribute with expected size.
|
||||
progress_delta: The minimum time between progress output, in seconds
|
||||
external_downloader_args: A dictionary of downloader keys (in lower case)
|
||||
and a list of additional command-line arguments for the
|
||||
|
|
@ -462,7 +461,8 @@ class FileDownloader:
|
|||
min_sleep_interval = self.params.get('sleep_interval') or 0
|
||||
max_sleep_interval = self.params.get('max_sleep_interval') or 0
|
||||
|
||||
if available_at := info_dict.get('available_at'):
|
||||
requested_formats = info_dict.get('requested_formats') or [info_dict]
|
||||
if available_at := max(f.get('available_at') or 0 for f in requested_formats):
|
||||
forced_sleep_interval = available_at - int(time.time())
|
||||
if forced_sleep_interval > min_sleep_interval:
|
||||
sleep_note = 'as required by the site'
|
||||
|
|
|
|||
|
|
@ -457,6 +457,8 @@ class FFmpegFD(ExternalFD):
|
|||
|
||||
@classmethod
|
||||
def available(cls, path=None):
|
||||
# TODO: Fix path for ffmpeg
|
||||
# Fixme: This may be wrong when --ffmpeg-location is used
|
||||
return FFmpegPostProcessor().available
|
||||
|
||||
def on_process_started(self, proc, stdin):
|
||||
|
|
@ -488,20 +490,6 @@ class FFmpegFD(ExternalFD):
|
|||
if not self.params.get('verbose'):
|
||||
args += ['-hide_banner']
|
||||
|
||||
args += traverse_obj(info_dict, ('downloader_options', 'ffmpeg_args', ...))
|
||||
|
||||
# These exists only for compatibility. Extractors should use
|
||||
# info_dict['downloader_options']['ffmpeg_args'] instead
|
||||
args += info_dict.get('_ffmpeg_args') or []
|
||||
seekable = info_dict.get('_seekable')
|
||||
if seekable is not None:
|
||||
# setting -seekable prevents ffmpeg from guessing if the server
|
||||
# supports seeking(by adding the header `Range: bytes=0-`), which
|
||||
# can cause problems in some cases
|
||||
# https://github.com/ytdl-org/youtube-dl/issues/11800#issuecomment-275037127
|
||||
# http://trac.ffmpeg.org/ticket/6125#comment:10
|
||||
args += ['-seekable', '1' if seekable else '0']
|
||||
|
||||
env = None
|
||||
proxy = self.params.get('proxy')
|
||||
if proxy:
|
||||
|
|
@ -521,39 +509,10 @@ class FFmpegFD(ExternalFD):
|
|||
env['HTTP_PROXY'] = proxy
|
||||
env['http_proxy'] = proxy
|
||||
|
||||
protocol = info_dict.get('protocol')
|
||||
|
||||
if protocol == 'rtmp':
|
||||
player_url = info_dict.get('player_url')
|
||||
page_url = info_dict.get('page_url')
|
||||
app = info_dict.get('app')
|
||||
play_path = info_dict.get('play_path')
|
||||
tc_url = info_dict.get('tc_url')
|
||||
flash_version = info_dict.get('flash_version')
|
||||
live = info_dict.get('rtmp_live', False)
|
||||
conn = info_dict.get('rtmp_conn')
|
||||
if player_url is not None:
|
||||
args += ['-rtmp_swfverify', player_url]
|
||||
if page_url is not None:
|
||||
args += ['-rtmp_pageurl', page_url]
|
||||
if app is not None:
|
||||
args += ['-rtmp_app', app]
|
||||
if play_path is not None:
|
||||
args += ['-rtmp_playpath', play_path]
|
||||
if tc_url is not None:
|
||||
args += ['-rtmp_tcurl', tc_url]
|
||||
if flash_version is not None:
|
||||
args += ['-rtmp_flashver', flash_version]
|
||||
if live:
|
||||
args += ['-rtmp_live', 'live']
|
||||
if isinstance(conn, list):
|
||||
for entry in conn:
|
||||
args += ['-rtmp_conn', entry]
|
||||
elif isinstance(conn, str):
|
||||
args += ['-rtmp_conn', conn]
|
||||
|
||||
start_time, end_time = info_dict.get('section_start') or 0, info_dict.get('section_end')
|
||||
|
||||
fallback_input_args = traverse_obj(info_dict, ('downloader_options', 'ffmpeg_args', ...))
|
||||
|
||||
selected_formats = info_dict.get('requested_formats') or [info_dict]
|
||||
for i, fmt in enumerate(selected_formats):
|
||||
is_http = re.match(r'https?://', fmt['url'])
|
||||
|
|
@ -563,7 +522,7 @@ class FFmpegFD(ExternalFD):
|
|||
f'{cookie.name}={cookie.value}; path={cookie.path}; domain={cookie.domain};\r\n'
|
||||
for cookie in cookies)])
|
||||
if fmt.get('http_headers') and is_http:
|
||||
# Trailing \r\n after each HTTP header is important to prevent warning from ffmpeg/avconv:
|
||||
# Trailing \r\n after each HTTP header is important to prevent warning from ffmpeg:
|
||||
# [http @ 00000000003d2fa0] No trailing CRLF found in HTTP header.
|
||||
args.extend(['-headers', ''.join(f'{key}: {val}\r\n' for key, val in fmt['http_headers'].items())])
|
||||
|
||||
|
|
@ -572,6 +531,44 @@ class FFmpegFD(ExternalFD):
|
|||
if end_time:
|
||||
args += ['-t', str(end_time - start_time)]
|
||||
|
||||
protocol = fmt.get('protocol')
|
||||
|
||||
if protocol == 'rtmp':
|
||||
player_url = fmt.get('player_url')
|
||||
page_url = fmt.get('page_url')
|
||||
app = fmt.get('app')
|
||||
play_path = fmt.get('play_path')
|
||||
tc_url = fmt.get('tc_url')
|
||||
flash_version = fmt.get('flash_version')
|
||||
live = fmt.get('rtmp_live', False)
|
||||
conn = fmt.get('rtmp_conn')
|
||||
if player_url is not None:
|
||||
args += ['-rtmp_swfverify', player_url]
|
||||
if page_url is not None:
|
||||
args += ['-rtmp_pageurl', page_url]
|
||||
if app is not None:
|
||||
args += ['-rtmp_app', app]
|
||||
if play_path is not None:
|
||||
args += ['-rtmp_playpath', play_path]
|
||||
if tc_url is not None:
|
||||
args += ['-rtmp_tcurl', tc_url]
|
||||
if flash_version is not None:
|
||||
args += ['-rtmp_flashver', flash_version]
|
||||
if live:
|
||||
args += ['-rtmp_live', 'live']
|
||||
if isinstance(conn, list):
|
||||
for entry in conn:
|
||||
args += ['-rtmp_conn', entry]
|
||||
elif isinstance(conn, str):
|
||||
args += ['-rtmp_conn', conn]
|
||||
|
||||
elif protocol == 'http_dash_segments' and info_dict.get('is_live'):
|
||||
# ffmpeg may try to read past the latest available segments for
|
||||
# live DASH streams unless we pass `-re`. In modern ffmpeg, this
|
||||
# is an alias of `-readrate 1`, but `-readrate` was not added
|
||||
# until ffmpeg 5.0, so we must stick to using `-re`
|
||||
args += ['-re']
|
||||
|
||||
url = fmt['url']
|
||||
if self.params.get('enable_file_urls') and url.startswith('file:'):
|
||||
# The default protocol_whitelist is 'file,crypto,data' when reading local m3u8 URLs,
|
||||
|
|
@ -586,6 +583,7 @@ class FFmpegFD(ExternalFD):
|
|||
# https://trac.ffmpeg.org/ticket/2702
|
||||
url = re.sub(r'^file://(?:localhost)?/', 'file:' if os.name == 'nt' else 'file:/', url)
|
||||
|
||||
args += traverse_obj(fmt, ('downloader_options', 'ffmpeg_args', ...)) or fallback_input_args
|
||||
args += [*self._configuration_args((f'_i{i + 1}', '_i')), '-i', url]
|
||||
|
||||
if not (start_time or end_time) or not self.params.get('force_keyframes_at_cuts'):
|
||||
|
|
@ -654,10 +652,6 @@ class FFmpegFD(ExternalFD):
|
|||
return retval
|
||||
|
||||
|
||||
class AVconvFD(FFmpegFD):
|
||||
pass
|
||||
|
||||
|
||||
_BY_NAME = {
|
||||
klass.get_basename(): klass
|
||||
for name, klass in globals().items()
|
||||
|
|
|
|||
|
|
@ -149,14 +149,14 @@ class FlvReader(io.BytesIO):
|
|||
segments_count = self.read_unsigned_char()
|
||||
segments = []
|
||||
for _ in range(segments_count):
|
||||
box_size, box_type, box_data = self.read_box_info()
|
||||
_box_size, box_type, box_data = self.read_box_info()
|
||||
assert box_type == b'asrt'
|
||||
segment = FlvReader(box_data).read_asrt()
|
||||
segments.append(segment)
|
||||
fragments_run_count = self.read_unsigned_char()
|
||||
fragments = []
|
||||
for _ in range(fragments_run_count):
|
||||
box_size, box_type, box_data = self.read_box_info()
|
||||
_box_size, box_type, box_data = self.read_box_info()
|
||||
assert box_type == b'afrt'
|
||||
fragments.append(FlvReader(box_data).read_afrt())
|
||||
|
||||
|
|
@ -167,7 +167,7 @@ class FlvReader(io.BytesIO):
|
|||
}
|
||||
|
||||
def read_bootstrap_info(self):
|
||||
total_size, box_type, box_data = self.read_box_info()
|
||||
_, box_type, box_data = self.read_box_info()
|
||||
assert box_type == b'abst'
|
||||
return FlvReader(box_data).read_abst()
|
||||
|
||||
|
|
@ -324,9 +324,9 @@ class F4mFD(FragmentFD):
|
|||
if requested_bitrate is None or len(formats) == 1:
|
||||
# get the best format
|
||||
formats = sorted(formats, key=lambda f: f[0])
|
||||
rate, media = formats[-1]
|
||||
_, media = formats[-1]
|
||||
else:
|
||||
rate, media = next(filter(
|
||||
_, media = next(filter(
|
||||
lambda f: int(f[0]) == requested_bitrate, formats))
|
||||
|
||||
# Prefer baseURL for relative URLs as per 11.2 of F4M 3.0 spec.
|
||||
|
|
|
|||
|
|
@ -13,12 +13,9 @@ from ..utils import (
|
|||
ContentTooShortError,
|
||||
RetryManager,
|
||||
ThrottledDownload,
|
||||
XAttrMetadataError,
|
||||
XAttrUnavailableError,
|
||||
int_or_none,
|
||||
parse_http_range,
|
||||
try_call,
|
||||
write_xattr,
|
||||
)
|
||||
from ..utils.networking import HTTPHeaderDict
|
||||
|
||||
|
|
@ -273,12 +270,6 @@ class HttpFD(FileDownloader):
|
|||
self.report_error(f'unable to open for writing: {err}')
|
||||
return False
|
||||
|
||||
if self.params.get('xattr_set_filesize', False) and data_len is not None:
|
||||
try:
|
||||
write_xattr(ctx.tmpfilename, 'user.ytdl.filesize', str(data_len).encode())
|
||||
except (XAttrUnavailableError, XAttrMetadataError) as err:
|
||||
self.report_error(f'unable to set filesize xattr: {err}')
|
||||
|
||||
try:
|
||||
ctx.stream.write(data_block)
|
||||
except OSError as err:
|
||||
|
|
|
|||
|
|
@ -75,6 +75,7 @@ from .afreecatv import (
|
|||
AfreecaTVLiveIE,
|
||||
AfreecaTVUserIE,
|
||||
)
|
||||
from .agalega import AGalegaIE
|
||||
from .agora import (
|
||||
TokFMAuditionIE,
|
||||
TokFMPodcastIE,
|
||||
|
|
@ -143,6 +144,8 @@ from .archiveorg import (
|
|||
from .arcpublishing import ArcPublishingIE
|
||||
from .ard import (
|
||||
ARDIE,
|
||||
ARDAudiothekIE,
|
||||
ARDAudiothekPlaylistIE,
|
||||
ARDBetaMediathekIE,
|
||||
ARDMediathekCollectionIE,
|
||||
)
|
||||
|
|
@ -266,6 +269,7 @@ from .bitchute import (
|
|||
BitChuteChannelIE,
|
||||
BitChuteIE,
|
||||
)
|
||||
from .bitmovin import BitmovinIE
|
||||
from .blackboardcollaborate import (
|
||||
BlackboardCollaborateIE,
|
||||
BlackboardCollaborateLaunchIE,
|
||||
|
|
@ -337,6 +341,7 @@ from .cbc import (
|
|||
CBCGemIE,
|
||||
CBCGemLiveIE,
|
||||
CBCGemPlaylistIE,
|
||||
CBCListenIE,
|
||||
CBCPlayerIE,
|
||||
CBCPlayerPlaylistIE,
|
||||
)
|
||||
|
|
@ -424,7 +429,6 @@ from .cpac import (
|
|||
CPACPlaylistIE,
|
||||
)
|
||||
from .cracked import CrackedIE
|
||||
from .crackle import CrackleIE
|
||||
from .craftsy import CraftsyIE
|
||||
from .crooksandliars import CrooksAndLiarsIE
|
||||
from .crowdbunker import (
|
||||
|
|
@ -444,10 +448,6 @@ from .curiositystream import (
|
|||
CuriosityStreamIE,
|
||||
CuriosityStreamSeriesIE,
|
||||
)
|
||||
from .cwtv import (
|
||||
CWTVIE,
|
||||
CWTVMovieIE,
|
||||
)
|
||||
from .cybrary import (
|
||||
CybraryCourseIE,
|
||||
CybraryIE,
|
||||
|
|
@ -642,7 +642,10 @@ from .filmon import (
|
|||
FilmOnIE,
|
||||
)
|
||||
from .filmweb import FilmwebIE
|
||||
from .firsttv import FirstTVIE
|
||||
from .firsttv import (
|
||||
FirstTVIE,
|
||||
FirstTVLiveIE,
|
||||
)
|
||||
from .fivetv import FiveTVIE
|
||||
from .flextv import FlexTVIE
|
||||
from .flickr import FlickrIE
|
||||
|
|
@ -689,6 +692,10 @@ from .frontendmasters import (
|
|||
FrontendMastersIE,
|
||||
FrontendMastersLessonIE,
|
||||
)
|
||||
from .frontro import (
|
||||
TheChosenGroupIE,
|
||||
TheChosenIE,
|
||||
)
|
||||
from .fujitv import FujiTVFODPlus7IE
|
||||
from .funk import FunkIE
|
||||
from .funker530 import Funker530IE
|
||||
|
|
@ -828,6 +835,13 @@ from .ichinanalive import (
|
|||
IchinanaLiveIE,
|
||||
IchinanaLiveVODIE,
|
||||
)
|
||||
from .idagio import (
|
||||
IdagioAlbumIE,
|
||||
IdagioPersonalPlaylistIE,
|
||||
IdagioPlaylistIE,
|
||||
IdagioRecordingIE,
|
||||
IdagioTrackIE,
|
||||
)
|
||||
from .idolplus import IdolPlusIE
|
||||
from .ign import (
|
||||
IGNIE,
|
||||
|
|
@ -1085,7 +1099,10 @@ from .markiza import (
|
|||
from .massengeschmacktv import MassengeschmackTVIE
|
||||
from .masters import MastersIE
|
||||
from .matchtv import MatchTVIE
|
||||
from .mave import MaveIE
|
||||
from .mave import (
|
||||
MaveChannelIE,
|
||||
MaveIE,
|
||||
)
|
||||
from .mbn import MBNIE
|
||||
from .mdr import MDRIE
|
||||
from .medaltv import MedalTVIE
|
||||
|
|
@ -1141,7 +1158,6 @@ from .mit import (
|
|||
OCWMITIE,
|
||||
TechTVMITIE,
|
||||
)
|
||||
from .mitele import MiTeleIE
|
||||
from .mixch import (
|
||||
MixchArchiveIE,
|
||||
MixchIE,
|
||||
|
|
@ -1193,6 +1209,7 @@ from .musicdex import (
|
|||
MusicdexPlaylistIE,
|
||||
MusicdexSongIE,
|
||||
)
|
||||
from .mux import MuxIE
|
||||
from .mx3 import (
|
||||
Mx3IE,
|
||||
Mx3NeoIE,
|
||||
|
|
@ -1214,6 +1231,7 @@ from .n1 import (
|
|||
N1InfoAssetIE,
|
||||
N1InfoIIE,
|
||||
)
|
||||
from .nascar import NascarClassicsIE
|
||||
from .nate import (
|
||||
NateIE,
|
||||
NateProgramIE,
|
||||
|
|
@ -1267,6 +1285,10 @@ from .nest import (
|
|||
NestClipIE,
|
||||
NestIE,
|
||||
)
|
||||
from .netapp import (
|
||||
NetAppCollectionIE,
|
||||
NetAppVideoIE,
|
||||
)
|
||||
from .neteasemusic import (
|
||||
NetEaseMusicAlbumIE,
|
||||
NetEaseMusicDjRadioIE,
|
||||
|
|
@ -1359,6 +1381,7 @@ from .nova import (
|
|||
NovaIE,
|
||||
)
|
||||
from .novaplay import NovaPlayIE
|
||||
from .nowcanal import NowCanalIE
|
||||
from .nowness import (
|
||||
NownessIE,
|
||||
NownessPlaylistIE,
|
||||
|
|
@ -1433,6 +1456,7 @@ from .onet import (
|
|||
OnetPlIE,
|
||||
)
|
||||
from .onionstudios import OnionStudiosIE
|
||||
from .onsen import OnsenIE
|
||||
from .opencast import (
|
||||
OpencastIE,
|
||||
OpencastPlaylistIE,
|
||||
|
|
@ -1466,10 +1490,6 @@ from .panopto import (
|
|||
PanoptoListIE,
|
||||
PanoptoPlaylistIE,
|
||||
)
|
||||
from .paramountplus import (
|
||||
ParamountPlusIE,
|
||||
ParamountPlusSeriesIE,
|
||||
)
|
||||
from .parler import ParlerIE
|
||||
from .parlview import ParlviewIE
|
||||
from .parti import (
|
||||
|
|
@ -1523,10 +1543,6 @@ from .piramidetv import (
|
|||
PiramideTVChannelIE,
|
||||
PiramideTVIE,
|
||||
)
|
||||
from .pixivsketch import (
|
||||
PixivSketchIE,
|
||||
PixivSketchUserIE,
|
||||
)
|
||||
from .planetmarathi import PlanetMarathiIE
|
||||
from .platzi import (
|
||||
PlatziCourseIE,
|
||||
|
|
@ -1783,7 +1799,6 @@ from .rutube import (
|
|||
RutubePlaylistIE,
|
||||
RutubeTagsIE,
|
||||
)
|
||||
from .rutv import RUTVIE
|
||||
from .ruutu import RuutuIE
|
||||
from .ruv import (
|
||||
RuvIE,
|
||||
|
|
@ -1853,7 +1868,6 @@ from .simplecast import (
|
|||
SimplecastPodcastIE,
|
||||
)
|
||||
from .sina import SinaIE
|
||||
from .sixplay import SixPlayIE
|
||||
from .skeb import SkebIE
|
||||
from .sky import (
|
||||
SkyNewsIE,
|
||||
|
|
@ -1881,7 +1895,12 @@ from .skynewsau import SkyNewsAUIE
|
|||
from .slideshare import SlideshareIE
|
||||
from .slideslive import SlidesLiveIE
|
||||
from .slutload import SlutloadIE
|
||||
from .smotrim import SmotrimIE
|
||||
from .smotrim import (
|
||||
SmotrimAudioIE,
|
||||
SmotrimIE,
|
||||
SmotrimLiveIE,
|
||||
SmotrimPlaylistIE,
|
||||
)
|
||||
from .snapchat import SnapchatSpotlightIE
|
||||
from .snotr import SnotrIE
|
||||
from .softwhiteunderbelly import SoftWhiteUnderbellyIE
|
||||
|
|
@ -1929,10 +1948,6 @@ from .spiegel import SpiegelIE
|
|||
from .sport5 import Sport5IE
|
||||
from .sportbox import SportBoxIE
|
||||
from .sportdeutschland import SportDeutschlandIE
|
||||
from .spotify import (
|
||||
SpotifyIE,
|
||||
SpotifyShowIE,
|
||||
)
|
||||
from .spreaker import (
|
||||
SpreakerIE,
|
||||
SpreakerShowIE,
|
||||
|
|
@ -2153,6 +2168,7 @@ from .tubitv import (
|
|||
)
|
||||
from .tumblr import TumblrIE
|
||||
from .tunein import (
|
||||
TuneInEmbedIE,
|
||||
TuneInPodcastEpisodeIE,
|
||||
TuneInPodcastIE,
|
||||
TuneInShortenerIE,
|
||||
|
|
@ -2287,7 +2303,10 @@ from .utreon import UtreonIE
|
|||
from .varzesh3 import Varzesh3IE
|
||||
from .vbox7 import Vbox7IE
|
||||
from .veo import VeoIE
|
||||
from .vesti import VestiIE
|
||||
from .vevo import (
|
||||
VevoIE,
|
||||
VevoPlaylistIE,
|
||||
)
|
||||
from .vgtv import (
|
||||
VGTVIE,
|
||||
BTArticleIE,
|
||||
|
|
@ -2472,7 +2491,6 @@ from .wykop import (
|
|||
WykopPostCommentIE,
|
||||
WykopPostIE,
|
||||
)
|
||||
from .xanimu import XanimuIE
|
||||
from .xboxclips import XboxClipsIE
|
||||
from .xhamster import (
|
||||
XHamsterEmbedIE,
|
||||
|
|
@ -2517,6 +2535,7 @@ from .yappy import (
|
|||
YappyIE,
|
||||
YappyProfileIE,
|
||||
)
|
||||
from .yfanefa import YfanefaIE
|
||||
from .yle_areena import YleAreenaIE
|
||||
from .youjizz import YouJizzIE
|
||||
from .youku import (
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ from ..utils import (
|
|||
|
||||
class ABCIE(InfoExtractor):
|
||||
IE_NAME = 'abc.net.au'
|
||||
_VALID_URL = r'https?://(?:www\.)?abc\.net\.au/(?:news|btn)/(?:[^/]+/){1,4}(?P<id>\d{5,})'
|
||||
_VALID_URL = r'https?://(?:www\.)?abc\.net\.au/(?:news|btn|listen)/(?:[^/?#]+/){1,4}(?P<id>\d{5,})'
|
||||
|
||||
_TESTS = [{
|
||||
'url': 'http://www.abc.net.au/news/2014-11-05/australia-to-staff-ebola-treatment-centre-in-sierra-leone/5868334',
|
||||
|
|
@ -53,8 +53,9 @@ class ABCIE(InfoExtractor):
|
|||
'info_dict': {
|
||||
'id': '6880080',
|
||||
'ext': 'mp3',
|
||||
'title': 'NAB lifts interest rates, following Westpac and CBA',
|
||||
'title': 'NAB lifts interest rates, following Westpac and CBA - ABC listen',
|
||||
'description': 'md5:f13d8edc81e462fce4a0437c7dc04728',
|
||||
'thumbnail': r're:https://live-production\.wcms\.abc-cdn\.net\.au/2193d7437c84b25eafd6360c82b5fa21',
|
||||
},
|
||||
}, {
|
||||
'url': 'http://www.abc.net.au/news/2015-10-19/6866214',
|
||||
|
|
@ -64,8 +65,9 @@ class ABCIE(InfoExtractor):
|
|||
'info_dict': {
|
||||
'id': '10527914',
|
||||
'ext': 'mp4',
|
||||
'title': 'WWI Centenary',
|
||||
'description': 'md5:c2379ec0ca84072e86b446e536954546',
|
||||
'title': 'WWI Centenary - Behind The News',
|
||||
'description': 'md5:fa4405939ff750fade46ff0cd4c66a52',
|
||||
'thumbnail': r're:https://live-production\.wcms\.abc-cdn\.net\.au/bcc3433c97bf992dff32ec5a768713c9',
|
||||
},
|
||||
}, {
|
||||
'url': 'https://www.abc.net.au/news/programs/the-world/2020-06-10/black-lives-matter-protests-spawn-support-for/12342074',
|
||||
|
|
@ -73,7 +75,8 @@ class ABCIE(InfoExtractor):
|
|||
'id': '12342074',
|
||||
'ext': 'mp4',
|
||||
'title': 'Black Lives Matter protests spawn support for Papuans in Indonesia',
|
||||
'description': 'md5:2961a17dc53abc558589ccd0fb8edd6f',
|
||||
'description': 'md5:625257209f2d14ce23cb4e3785da9beb',
|
||||
'thumbnail': r're:https://live-production\.wcms\.abc-cdn\.net\.au/7ee6f190de6d7dbb04203e514bfae9ec',
|
||||
},
|
||||
}, {
|
||||
'url': 'https://www.abc.net.au/btn/newsbreak/btn-newsbreak-20200814/12560476',
|
||||
|
|
@ -93,7 +96,16 @@ class ABCIE(InfoExtractor):
|
|||
'title': 'Wagner Group retreating from Russia, leader Prigozhin to move to Belarus',
|
||||
'ext': 'mp4',
|
||||
'description': 'Wagner troops leave Rostov-on-Don and\xa0Yevgeny Prigozhin will move to Belarus under a deal brokered by Belarusian President Alexander Lukashenko to end the mutiny.',
|
||||
'thumbnail': 'https://live-production.wcms.abc-cdn.net.au/0c170f5b57f0105c432f366c0e8e267b?impolicy=wcms_crop_resize&cropH=2813&cropW=5000&xPos=0&yPos=249&width=862&height=485',
|
||||
'thumbnail': r're:https://live-production\.wcm\.abc-cdn\.net\.au/0c170f5b57f0105c432f366c0e8e267b',
|
||||
},
|
||||
}, {
|
||||
'url': 'https://www.abc.net.au/listen/programs/the-followers-madness-of-two/presents-followers-madness-of-two/105697646',
|
||||
'info_dict': {
|
||||
'id': '105697646',
|
||||
'title': 'INTRODUCING — The Followers: Madness of Two - ABC listen',
|
||||
'ext': 'mp3',
|
||||
'description': 'md5:2310cd0d440a4e01656abea15db8d1f3',
|
||||
'thumbnail': r're:https://live-production\.wcms\.abc-cdn\.net\.au/90d7078214e5d66553ffb7fcf0da0cda',
|
||||
},
|
||||
}]
|
||||
|
||||
|
|
@ -309,6 +321,8 @@ class ABCIViewIE(InfoExtractor):
|
|||
entry_protocol='m3u8_native', m3u8_id='hls', fatal=False)
|
||||
if formats:
|
||||
break
|
||||
else:
|
||||
formats = []
|
||||
|
||||
subtitles = {}
|
||||
src_vtt = stream.get('captions', {}).get('src-vtt')
|
||||
|
|
|
|||
91
yt_dlp/extractor/agalega.py
Normal file
91
yt_dlp/extractor/agalega.py
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
import json
|
||||
import time
|
||||
|
||||
from .common import InfoExtractor
|
||||
from ..utils import jwt_decode_hs256, url_or_none
|
||||
from ..utils.traversal import traverse_obj
|
||||
|
||||
|
||||
class AGalegaBaseIE(InfoExtractor):
|
||||
_access_token = None
|
||||
|
||||
@staticmethod
|
||||
def _jwt_is_expired(token):
|
||||
return jwt_decode_hs256(token)['exp'] - time.time() < 120
|
||||
|
||||
def _refresh_access_token(self, video_id):
|
||||
AGalegaBaseIE._access_token = self._download_json(
|
||||
'https://www.agalega.gal/api/fetch-api/jwt/token', video_id,
|
||||
note='Downloading access token',
|
||||
data=json.dumps({
|
||||
'username': None,
|
||||
'password': None,
|
||||
'client': 'crtvg',
|
||||
'checkExistsCookies': False,
|
||||
}).encode())['access']
|
||||
|
||||
def _call_api(self, endpoint, display_id, note, fatal=True, query=None):
|
||||
if not AGalegaBaseIE._access_token or self._jwt_is_expired(AGalegaBaseIE._access_token):
|
||||
self._refresh_access_token(endpoint)
|
||||
return self._download_json(
|
||||
f'https://api-agalega.interactvty.com/api/2.0/contents/{endpoint}', display_id,
|
||||
note=note, fatal=fatal, query=query,
|
||||
headers={'Authorization': f'jwtok {AGalegaBaseIE._access_token}'})
|
||||
|
||||
|
||||
class AGalegaIE(AGalegaBaseIE):
|
||||
IE_NAME = 'agalega:videos'
|
||||
_VALID_URL = r'https?://(?:www\.)?agalega\.gal/videos/(?:detail/)?(?P<id>[0-9]+)'
|
||||
_TESTS = [{
|
||||
'url': 'https://www.agalega.gal/videos/288664-lr-ninguencheconta',
|
||||
'md5': '04533a66c5f863d08dd9724b11d1c223',
|
||||
'info_dict': {
|
||||
'id': '288664',
|
||||
'title': 'Roberto e Ángel Martín atenden consultas dos espectadores',
|
||||
'description': 'O cómico ademais fai un repaso dalgúns momentos da súa traxectoria profesional',
|
||||
'thumbnail': 'https://crtvg-bucket.flumotion.cloud/content_cards/2ef32c3b9f6249d9868fd8f11d389d3d.png',
|
||||
'ext': 'mp4',
|
||||
},
|
||||
}, {
|
||||
'url': 'https://www.agalega.gal/videos/detail/296152-pulso-activo-7',
|
||||
'md5': '26df7fdcf859f38ad92d837279d6b56d',
|
||||
'info_dict': {
|
||||
'id': '296152',
|
||||
'title': 'Pulso activo | 18-11-2025',
|
||||
'description': 'Anxo, Noemí, Silvia e Estrella comparten as sensacións da clase de Eddy.',
|
||||
'thumbnail': 'https://crtvg-bucket.flumotion.cloud/content_cards/a6bb7da6c8994b82bf961ac6cad1707b.png',
|
||||
'ext': 'mp4',
|
||||
},
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
video_id = self._match_id(url)
|
||||
content_data = self._call_api(
|
||||
f'content/{video_id}/', video_id, note='Downloading content data', fatal=False,
|
||||
query={
|
||||
'optional_fields': 'image,is_premium,short_description,has_subtitle',
|
||||
})
|
||||
resource_data = self._call_api(
|
||||
f'content_resources/{video_id}/', video_id, note='Downloading resource data',
|
||||
query={
|
||||
'optional_fields': 'media_url',
|
||||
})
|
||||
|
||||
formats = []
|
||||
subtitles = {}
|
||||
for m3u8_url in traverse_obj(resource_data, ('results', ..., 'media_url', {url_or_none})):
|
||||
fmts, subs = self._extract_m3u8_formats_and_subtitles(
|
||||
m3u8_url, video_id, ext='mp4', m3u8_id='hls')
|
||||
formats.extend(fmts)
|
||||
self._merge_subtitles(subs, target=subtitles)
|
||||
|
||||
return {
|
||||
'id': video_id,
|
||||
'formats': formats,
|
||||
'subtitles': subtitles,
|
||||
**traverse_obj(content_data, {
|
||||
'title': ('name', {str}),
|
||||
'description': (('description', 'short_description'), {str}, any),
|
||||
'thumbnail': ('image', {url_or_none}),
|
||||
}),
|
||||
}
|
||||
|
|
@ -1,47 +1,125 @@
|
|||
import time
|
||||
|
||||
from .common import InfoExtractor
|
||||
from ..utils import ExtractorError, str_to_int
|
||||
from ..utils import (
|
||||
ExtractorError,
|
||||
extract_attributes,
|
||||
float_or_none,
|
||||
jwt_decode_hs256,
|
||||
jwt_encode,
|
||||
parse_resolution,
|
||||
qualities,
|
||||
unified_strdate,
|
||||
update_url,
|
||||
url_or_none,
|
||||
urljoin,
|
||||
)
|
||||
from ..utils.traversal import (
|
||||
find_element,
|
||||
require,
|
||||
traverse_obj,
|
||||
)
|
||||
|
||||
|
||||
class AppleConnectIE(InfoExtractor):
|
||||
_VALID_URL = r'https?://itunes\.apple\.com/\w{0,2}/?post/(?:id)?sa\.(?P<id>[\w-]+)'
|
||||
IE_NAME = 'apple:music:connect'
|
||||
IE_DESC = 'Apple Music Connect'
|
||||
|
||||
_BASE_URL = 'https://music.apple.com'
|
||||
_QUALITIES = {
|
||||
'provisionalUploadVideo': None,
|
||||
'sdVideo': 480,
|
||||
'sdVideoWithPlusAudio': 480,
|
||||
'sd480pVideo': 480,
|
||||
'720pHdVideo': 720,
|
||||
'1080pHdVideo': 1080,
|
||||
}
|
||||
_VALID_URL = r'https?://music\.apple\.com/[\w-]+/post/(?P<id>\d+)'
|
||||
_TESTS = [{
|
||||
'url': 'https://itunes.apple.com/us/post/idsa.4ab17a39-2720-11e5-96c5-a5b38f6c42d3',
|
||||
'md5': 'c1d41f72c8bcaf222e089434619316e4',
|
||||
'url': 'https://music.apple.com/us/post/1018290019',
|
||||
'info_dict': {
|
||||
'id': '4ab17a39-2720-11e5-96c5-a5b38f6c42d3',
|
||||
'id': '1018290019',
|
||||
'ext': 'm4v',
|
||||
'title': 'Energy',
|
||||
'uploader': 'Drake',
|
||||
'thumbnail': r're:^https?://.*\.jpg$',
|
||||
'duration': 177.911,
|
||||
'thumbnail': r're:https?://.+\.png',
|
||||
'upload_date': '20150710',
|
||||
'timestamp': 1436545535,
|
||||
'uploader': 'Drake',
|
||||
},
|
||||
}, {
|
||||
'url': 'https://itunes.apple.com/us/post/sa.0fe0229f-2457-11e5-9f40-1bb645f2d5d9',
|
||||
'only_matching': True,
|
||||
'url': 'https://music.apple.com/us/post/1016746627',
|
||||
'info_dict': {
|
||||
'id': '1016746627',
|
||||
'ext': 'm4v',
|
||||
'title': 'Body Shop (Madonna) - Chellous Lima (Acoustic Cover)',
|
||||
'duration': 210.278,
|
||||
'thumbnail': r're:https?://.+\.png',
|
||||
'upload_date': '20150706',
|
||||
'uploader': 'Chellous Lima',
|
||||
},
|
||||
}]
|
||||
|
||||
_jwt = None
|
||||
|
||||
@staticmethod
|
||||
def _jwt_is_expired(token):
|
||||
return jwt_decode_hs256(token)['exp'] - time.time() < 120
|
||||
|
||||
def _get_token(self, webpage, video_id):
|
||||
if self._jwt and not self._jwt_is_expired(self._jwt):
|
||||
return self._jwt
|
||||
|
||||
js_url = traverse_obj(webpage, (
|
||||
{find_element(tag='script', attr='crossorigin', value='', html=True)},
|
||||
{extract_attributes}, 'src', {urljoin(self._BASE_URL)}, {require('JS URL')}))
|
||||
js = self._download_webpage(
|
||||
js_url, video_id, 'Downloading token JS', 'Unable to download token JS')
|
||||
|
||||
header = jwt_encode({}, '', headers={'alg': 'ES256', 'kid': 'WebPlayKid'}).split('.')[0]
|
||||
self._jwt = self._search_regex(
|
||||
fr'(["\'])(?P<jwt>{header}(?:\.[\w-]+){{2}})\1', js, 'JSON Web Token', group='jwt')
|
||||
if self._jwt_is_expired(self._jwt):
|
||||
raise ExtractorError('The fetched token is already expired')
|
||||
|
||||
return self._jwt
|
||||
|
||||
def _real_extract(self, url):
|
||||
video_id = self._match_id(url)
|
||||
webpage = self._download_webpage(url, video_id)
|
||||
|
||||
try:
|
||||
video_json = self._html_search_regex(
|
||||
r'class="auc-video-data">(\{.*?\})', webpage, 'json')
|
||||
except ExtractorError:
|
||||
raise ExtractorError('This post doesn\'t contain a video', expected=True)
|
||||
videos = self._download_json(
|
||||
'https://amp-api.music.apple.com/v1/catalog/us/uploaded-videos',
|
||||
video_id, headers={
|
||||
'Authorization': f'Bearer {self._get_token(webpage, video_id)}',
|
||||
'Origin': self._BASE_URL,
|
||||
}, query={'ids': video_id, 'l': 'en-US'})
|
||||
attributes = traverse_obj(videos, (
|
||||
'data', ..., 'attributes', any, {require('video information')}))
|
||||
|
||||
video_data = self._parse_json(video_json, video_id)
|
||||
timestamp = str_to_int(self._html_search_regex(r'data-timestamp="(\d+)"', webpage, 'timestamp'))
|
||||
like_count = str_to_int(self._html_search_regex(r'(\d+) Loves', webpage, 'like count', default=None))
|
||||
formats = []
|
||||
quality = qualities(list(self._QUALITIES.keys()))
|
||||
for format_id, src_url in traverse_obj(attributes, (
|
||||
'assetTokens', {dict.items}, lambda _, v: url_or_none(v[1]),
|
||||
)):
|
||||
formats.append({
|
||||
'ext': 'm4v',
|
||||
'format_id': format_id,
|
||||
'height': self._QUALITIES.get(format_id),
|
||||
'quality': quality(format_id),
|
||||
'url': src_url,
|
||||
**parse_resolution(update_url(src_url, query=None), lenient=True),
|
||||
})
|
||||
|
||||
return {
|
||||
'id': video_id,
|
||||
'url': video_data['sslSrc'],
|
||||
'title': video_data['title'],
|
||||
'description': video_data['description'],
|
||||
'uploader': video_data['artistName'],
|
||||
'thumbnail': video_data['artworkUrl'],
|
||||
'timestamp': timestamp,
|
||||
'like_count': like_count,
|
||||
'formats': formats,
|
||||
'thumbnail': self._html_search_meta(
|
||||
['og:image', 'og:image:secure_url', 'twitter:image'], webpage),
|
||||
**traverse_obj(attributes, {
|
||||
'title': ('name', {str}),
|
||||
'duration': ('durationInMilliseconds', {float_or_none(scale=1000)}),
|
||||
'upload_date': ('uploadDate', {unified_strdate}),
|
||||
'uploader': (('artistName', 'uploadingArtistName'), {str}, any),
|
||||
'webpage_url': ('postUrl', {url_or_none}),
|
||||
}),
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
from .common import InfoExtractor
|
||||
from ..utils import (
|
||||
clean_html,
|
||||
clean_podcast_url,
|
||||
int_or_none,
|
||||
parse_iso8601,
|
||||
|
|
@ -17,7 +18,7 @@ class ApplePodcastsIE(InfoExtractor):
|
|||
'ext': 'mp3',
|
||||
'title': 'Ferreck Dawn - To The Break of Dawn 117',
|
||||
'episode': 'Ferreck Dawn - To The Break of Dawn 117',
|
||||
'description': 'md5:1fc571102f79dbd0a77bfd71ffda23bc',
|
||||
'description': 'md5:8c4f5c2c30af17ed6a98b0b9daf15b76',
|
||||
'upload_date': '20240812',
|
||||
'timestamp': 1723449600,
|
||||
'duration': 3596,
|
||||
|
|
@ -58,7 +59,7 @@ class ApplePodcastsIE(InfoExtractor):
|
|||
r'<script [^>]*\bid=["\']serialized-server-data["\'][^>]*>', webpage,
|
||||
'server data', episode_id, contains_pattern=r'\[{(?s:.+)}\]')[0]['data']
|
||||
model_data = traverse_obj(server_data, (
|
||||
'headerButtonItems', lambda _, v: v['$kind'] == 'bookmark' and v['modelType'] == 'EpisodeOffer',
|
||||
'headerButtonItems', lambda _, v: v['$kind'] == 'share' and v['modelType'] == 'EpisodeLockup',
|
||||
'model', {dict}, any))
|
||||
|
||||
return {
|
||||
|
|
@ -68,7 +69,8 @@ class ApplePodcastsIE(InfoExtractor):
|
|||
or self._yield_json_ld(webpage, episode_id, fatal=False), episode_id, fatal=False),
|
||||
**traverse_obj(model_data, {
|
||||
'title': ('title', {str}),
|
||||
'url': ('streamUrl', {clean_podcast_url}),
|
||||
'description': ('summary', {clean_html}),
|
||||
'url': ('playAction', 'episodeOffer', 'streamUrl', {clean_podcast_url}),
|
||||
'timestamp': ('releaseDate', {parse_iso8601}),
|
||||
'duration': ('duration', {int_or_none}),
|
||||
}),
|
||||
|
|
|
|||
|
|
@ -5,12 +5,9 @@ import re
|
|||
import urllib.parse
|
||||
|
||||
from .common import InfoExtractor
|
||||
from .youtube import YoutubeBaseInfoExtractor, YoutubeIE
|
||||
from ..networking import HEADRequest
|
||||
from ..networking.exceptions import HTTPError
|
||||
from .youtube import YoutubeBaseInfoExtractor
|
||||
from ..utils import (
|
||||
KNOWN_EXTENSIONS,
|
||||
ExtractorError,
|
||||
bug_reports_message,
|
||||
clean_html,
|
||||
dict_get,
|
||||
|
|
@ -21,18 +18,14 @@ from ..utils import (
|
|||
join_nonempty,
|
||||
js_to_json,
|
||||
merge_dicts,
|
||||
mimetype2ext,
|
||||
orderedSet,
|
||||
parse_duration,
|
||||
parse_qs,
|
||||
str_or_none,
|
||||
str_to_int,
|
||||
traverse_obj,
|
||||
try_get,
|
||||
unified_strdate,
|
||||
unified_timestamp,
|
||||
url_or_none,
|
||||
urlhandle_detect_ext,
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -471,7 +464,7 @@ class YoutubeWebArchiveIE(InfoExtractor):
|
|||
'url': 'https://web.archive.org/web/20110712231407/http://www.youtube.com/watch?v=lTx3G6h2xyA',
|
||||
'info_dict': {
|
||||
'id': 'lTx3G6h2xyA',
|
||||
'ext': 'flv',
|
||||
'ext': 'mp4',
|
||||
'title': 'Madeon - Pop Culture (live mashup)',
|
||||
'upload_date': '20110711',
|
||||
'uploader': 'Madeon',
|
||||
|
|
@ -578,7 +571,7 @@ class YoutubeWebArchiveIE(InfoExtractor):
|
|||
'url': 'https://web.archive.org/web/20110126141719/http://www.youtube.com/watch?v=Q_yjX80U7Yc',
|
||||
'info_dict': {
|
||||
'id': 'Q_yjX80U7Yc',
|
||||
'ext': 'flv',
|
||||
'ext': 'webm',
|
||||
'title': 'Spray Paint Art by Clay Butler: Purple Fantasy Forest',
|
||||
'uploader_id': 'claybutlermusic',
|
||||
'description': 'md5:4595264559e3d0a0ceb3f011f6334543',
|
||||
|
|
@ -680,6 +673,55 @@ class YoutubeWebArchiveIE(InfoExtractor):
|
|||
'upload_date': '20120407',
|
||||
'uploader_id': 'thecomputernerd01',
|
||||
},
|
||||
}, {
|
||||
# Contains split audio/video formats
|
||||
'url': 'ytarchive:o_T_S_TU12M',
|
||||
'info_dict': {
|
||||
'id': 'o_T_S_TU12M',
|
||||
'ext': 'mp4',
|
||||
'title': 'Prairie Pulse 1218; Lin Enger, Paul Olson',
|
||||
'description': 'md5:36e7a34cdc8508e35a920ec042e799c7',
|
||||
'uploader': 'Prairie Public',
|
||||
'channel_id': 'UC4BOzQel6tvJm7OEDd3vZlw',
|
||||
'channel_url': 'https://www.youtube.com/channel/UC4BOzQel6tvJm7OEDd3vZlw',
|
||||
'duration': 1606,
|
||||
'upload_date': '20150213',
|
||||
},
|
||||
}, {
|
||||
# Video unavailable through wayback-fakeurl
|
||||
'url': 'ytarchive:SQCom7wjGDs',
|
||||
'info_dict': {
|
||||
'id': 'SQCom7wjGDs',
|
||||
'ext': 'mp4',
|
||||
'title': 'Jamin Warren from PBS Game/Show decides that Portal is a feminist Game [Top Hats and No Brain]',
|
||||
'description': 'md5:c0cb876dd075483ead9afcc86798efb0',
|
||||
'uploader': 'Top Hats and Champagne',
|
||||
'uploader_id': 'sparrowtm',
|
||||
'uploader_url': 'https://www.youtube.com/user/sparrowtm',
|
||||
'channel_id': 'UCW3T5nG4iEkI7HjG-Du3HQA',
|
||||
'channel_url': 'https://www.youtube.com/channel/UCW3T5nG4iEkI7HjG-Du3HQA',
|
||||
'duration': 1500,
|
||||
'thumbnail': 'https://web.archive.org/web/20160108040020if_/https://i.ytimg.com/vi/SQCom7wjGDs/maxresdefault.jpg',
|
||||
'upload_date': '20160107',
|
||||
},
|
||||
}, {
|
||||
# dmuxed formats
|
||||
'url': 'https://web.archive.org/web/20240922160632/https://www.youtube.com/watch?v=z7hzvTL3k1k',
|
||||
'info_dict': {
|
||||
'id': 'z7hzvTL3k1k',
|
||||
'ext': 'webm',
|
||||
'title': 'Praise the Lord and Pass the Ammunition (BARRXN REMIX)',
|
||||
'description': 'md5:45dbf2c71c23b0734c8dfb82dd1e94b6',
|
||||
'uploader': 'Barrxn',
|
||||
'uploader_id': 'TheRockstar6086',
|
||||
'uploader_url': 'https://www.youtube.com/user/TheRockstar6086',
|
||||
'channel_id': 'UCjJPGUTtvR9uizmawn2ThqA',
|
||||
'channel_url': 'https://www.youtube.com/channel/UCjJPGUTtvR9uizmawn2ThqA',
|
||||
'duration': 125,
|
||||
'thumbnail': r're:https?://.*\.(jpg|webp)',
|
||||
'upload_date': '20201207',
|
||||
},
|
||||
'params': {'format': 'bv'},
|
||||
}, {
|
||||
'url': 'https://web.archive.org/web/http://www.youtube.com/watch?v=kH-G_aIBlFw',
|
||||
'only_matching': True,
|
||||
|
|
@ -724,6 +766,113 @@ class YoutubeWebArchiveIE(InfoExtractor):
|
|||
_OLDEST_CAPTURE_DATE = 20050214000000
|
||||
_NEWEST_CAPTURE_DATE = 20500101000000
|
||||
|
||||
_FORMATS = {
|
||||
'5': {'ext': 'flv', 'width': 400, 'height': 240, 'acodec': 'mp3', 'vcodec': 'h263'},
|
||||
'6': {'ext': 'flv', 'width': 450, 'height': 270, 'acodec': 'mp3', 'vcodec': 'h263'},
|
||||
'13': {'ext': '3gp', 'acodec': 'aac', 'vcodec': 'mp4v'},
|
||||
'17': {'ext': '3gp', 'width': 176, 'height': 144, 'acodec': 'aac', 'vcodec': 'mp4v'},
|
||||
'18': {'ext': 'mp4', 'width': 640, 'height': 360, 'acodec': 'aac', 'vcodec': 'h264'},
|
||||
'22': {'ext': 'mp4', 'width': 1280, 'height': 720, 'acodec': 'aac', 'vcodec': 'h264'},
|
||||
'34': {'ext': 'flv', 'width': 640, 'height': 360, 'acodec': 'aac', 'vcodec': 'h264'},
|
||||
'35': {'ext': 'flv', 'width': 854, 'height': 480, 'acodec': 'aac', 'vcodec': 'h264'},
|
||||
# itag 36 videos are either 320x180 (BaW_jenozKc) or 320x240 (__2ABJjxzNo), abr varies as well
|
||||
'36': {'ext': '3gp', 'width': 320, 'acodec': 'aac', 'vcodec': 'mp4v'},
|
||||
'37': {'ext': 'mp4', 'width': 1920, 'height': 1080, 'acodec': 'aac', 'vcodec': 'h264'},
|
||||
'38': {'ext': 'mp4', 'width': 4096, 'height': 3072, 'acodec': 'aac', 'vcodec': 'h264'},
|
||||
'43': {'ext': 'webm', 'width': 640, 'height': 360, 'acodec': 'vorbis', 'vcodec': 'vp8'},
|
||||
'44': {'ext': 'webm', 'width': 854, 'height': 480, 'acodec': 'vorbis', 'vcodec': 'vp8'},
|
||||
'45': {'ext': 'webm', 'width': 1280, 'height': 720, 'acodec': 'vorbis', 'vcodec': 'vp8'},
|
||||
'46': {'ext': 'webm', 'width': 1920, 'height': 1080, 'acodec': 'vorbis', 'vcodec': 'vp8'},
|
||||
'59': {'ext': 'mp4', 'width': 854, 'height': 480, 'acodec': 'aac', 'vcodec': 'h264'},
|
||||
'78': {'ext': 'mp4', 'width': 854, 'height': 480, 'acodec': 'aac', 'vcodec': 'h264'},
|
||||
|
||||
|
||||
# 3D videos
|
||||
'82': {'ext': 'mp4', 'height': 360, 'format_note': '3D', 'acodec': 'aac', 'vcodec': 'h264', 'preference': -20},
|
||||
'83': {'ext': 'mp4', 'height': 480, 'format_note': '3D', 'acodec': 'aac', 'vcodec': 'h264', 'preference': -20},
|
||||
'84': {'ext': 'mp4', 'height': 720, 'format_note': '3D', 'acodec': 'aac', 'vcodec': 'h264', 'preference': -20},
|
||||
'85': {'ext': 'mp4', 'height': 1080, 'format_note': '3D', 'acodec': 'aac', 'vcodec': 'h264', 'preference': -20},
|
||||
'100': {'ext': 'webm', 'height': 360, 'format_note': '3D', 'acodec': 'vorbis', 'vcodec': 'vp8', 'preference': -20},
|
||||
'101': {'ext': 'webm', 'height': 480, 'format_note': '3D', 'acodec': 'vorbis', 'vcodec': 'vp8', 'preference': -20},
|
||||
'102': {'ext': 'webm', 'height': 720, 'format_note': '3D', 'acodec': 'vorbis', 'vcodec': 'vp8', 'preference': -20},
|
||||
|
||||
# Apple HTTP Live Streaming
|
||||
'91': {'ext': 'mp4', 'height': 144, 'format_note': 'HLS', 'acodec': 'aac', 'vcodec': 'h264'},
|
||||
'92': {'ext': 'mp4', 'height': 240, 'format_note': 'HLS', 'acodec': 'aac', 'vcodec': 'h264'},
|
||||
'93': {'ext': 'mp4', 'height': 360, 'format_note': 'HLS', 'acodec': 'aac', 'vcodec': 'h264'},
|
||||
'94': {'ext': 'mp4', 'height': 480, 'format_note': 'HLS', 'acodec': 'aac', 'vcodec': 'h264'},
|
||||
'95': {'ext': 'mp4', 'height': 720, 'format_note': 'HLS', 'acodec': 'aac', 'vcodec': 'h264'},
|
||||
'96': {'ext': 'mp4', 'height': 1080, 'format_note': 'HLS', 'acodec': 'aac', 'vcodec': 'h264'},
|
||||
'132': {'ext': 'mp4', 'height': 240, 'format_note': 'HLS', 'acodec': 'aac', 'vcodec': 'h264'},
|
||||
'151': {'ext': 'mp4', 'height': 72, 'format_note': 'HLS', 'acodec': 'aac', 'vcodec': 'h264'},
|
||||
|
||||
# DASH mp4 video
|
||||
'133': {'ext': 'mp4', 'height': 240, 'vcodec': 'h264', 'acodec': 'none'},
|
||||
'134': {'ext': 'mp4', 'height': 360, 'vcodec': 'h264', 'acodec': 'none'},
|
||||
'135': {'ext': 'mp4', 'height': 480, 'vcodec': 'h264', 'acodec': 'none'},
|
||||
'136': {'ext': 'mp4', 'height': 720, 'vcodec': 'h264', 'acodec': 'none'},
|
||||
'137': {'ext': 'mp4', 'height': 1080, 'vcodec': 'h264', 'acodec': 'none'},
|
||||
'138': {'ext': 'mp4', 'vcodec': 'h264', 'acodec': 'none'}, # Height can vary (https://github.com/ytdl-org/youtube-dl/issues/4559)
|
||||
'160': {'ext': 'mp4', 'height': 144, 'vcodec': 'h264', 'acodec': 'none'},
|
||||
'212': {'ext': 'mp4', 'height': 480, 'vcodec': 'h264', 'acodec': 'none'},
|
||||
'264': {'ext': 'mp4', 'height': 1440, 'vcodec': 'h264', 'acodec': 'none'},
|
||||
'298': {'ext': 'mp4', 'height': 720, 'vcodec': 'h264', 'fps': 60, 'acodec': 'none'},
|
||||
'299': {'ext': 'mp4', 'height': 1080, 'vcodec': 'h264', 'fps': 60, 'acodec': 'none'},
|
||||
'266': {'ext': 'mp4', 'height': 2160, 'vcodec': 'h264', 'acodec': 'none'},
|
||||
|
||||
# Dash mp4 audio
|
||||
'139': {'ext': 'm4a', 'acodec': 'aac', 'vcodec': 'none'},
|
||||
'140': {'ext': 'm4a', 'acodec': 'aac', 'vcodec': 'none'},
|
||||
'141': {'ext': 'm4a', 'acodec': 'aac', 'vcodec': 'none'},
|
||||
'256': {'ext': 'm4a', 'acodec': 'aac', 'vcodec': 'none'},
|
||||
'258': {'ext': 'm4a', 'acodec': 'aac', 'vcodec': 'none'},
|
||||
'325': {'ext': 'm4a', 'acodec': 'dtse', 'vcodec': 'none'},
|
||||
'328': {'ext': 'm4a', 'acodec': 'ec-3', 'vcodec': 'none'},
|
||||
|
||||
# Dash webm
|
||||
'167': {'ext': 'webm', 'height': 360, 'width': 640, 'vcodec': 'vp8'},
|
||||
'168': {'ext': 'webm', 'height': 480, 'width': 854, 'vcodec': 'vp8'},
|
||||
'169': {'ext': 'webm', 'height': 720, 'width': 1280, 'vcodec': 'vp8'},
|
||||
'170': {'ext': 'webm', 'height': 1080, 'width': 1920, 'vcodec': 'vp8'},
|
||||
'218': {'ext': 'webm', 'height': 480, 'width': 854, 'vcodec': 'vp8'},
|
||||
'219': {'ext': 'webm', 'height': 480, 'width': 854, 'vcodec': 'vp8'},
|
||||
'278': {'ext': 'webm', 'height': 144, 'vcodec': 'vp9', 'acodec': 'none'},
|
||||
'242': {'ext': 'webm', 'height': 240, 'vcodec': 'vp9', 'acodec': 'none'},
|
||||
'243': {'ext': 'webm', 'height': 360, 'vcodec': 'vp9', 'acodec': 'none'},
|
||||
'244': {'ext': 'webm', 'height': 480, 'vcodec': 'vp9', 'acodec': 'none'},
|
||||
'245': {'ext': 'webm', 'height': 480, 'vcodec': 'vp9', 'acodec': 'none'},
|
||||
'246': {'ext': 'webm', 'height': 480, 'vcodec': 'vp9', 'acodec': 'none'},
|
||||
'247': {'ext': 'webm', 'height': 720, 'vcodec': 'vp9', 'acodec': 'none'},
|
||||
'248': {'ext': 'webm', 'height': 1080, 'vcodec': 'vp9', 'acodec': 'none'},
|
||||
'271': {'ext': 'webm', 'height': 1440, 'vcodec': 'vp9', 'acodec': 'none'},
|
||||
# itag 272 videos are either 3840x2160 (e.g. RtoitU2A-3E) or 7680x4320 (sLprVF6d7Ug)
|
||||
'272': {'ext': 'webm', 'height': 2160, 'vcodec': 'vp9', 'acodec': 'none'},
|
||||
'302': {'ext': 'webm', 'height': 720, 'vcodec': 'vp9', 'fps': 60, 'acodec': 'none'},
|
||||
'303': {'ext': 'webm', 'height': 1080, 'vcodec': 'vp9', 'fps': 60, 'acodec': 'none'},
|
||||
'308': {'ext': 'webm', 'height': 1440, 'vcodec': 'vp9', 'fps': 60, 'acodec': 'none'},
|
||||
'313': {'ext': 'webm', 'height': 2160, 'vcodec': 'vp9', 'acodec': 'none'},
|
||||
'315': {'ext': 'webm', 'height': 2160, 'vcodec': 'vp9', 'fps': 60, 'acodec': 'none'},
|
||||
|
||||
# Dash webm audio
|
||||
'171': {'ext': 'webm', 'acodec': 'vorbis', 'vcodec': 'none'},
|
||||
'172': {'ext': 'webm', 'acodec': 'vorbis', 'vcodec': 'none'},
|
||||
|
||||
# Dash webm audio with opus inside
|
||||
'249': {'ext': 'webm', 'acodec': 'opus', 'vcodec': 'none'},
|
||||
'250': {'ext': 'webm', 'acodec': 'opus', 'vcodec': 'none'},
|
||||
'251': {'ext': 'webm', 'acodec': 'opus', 'vcodec': 'none'},
|
||||
|
||||
# av01 video only formats sometimes served with "unknown" codecs
|
||||
'394': {'ext': 'mp4', 'height': 144, 'vcodec': 'av01.0.00M.08', 'acodec': 'none'},
|
||||
'395': {'ext': 'mp4', 'height': 240, 'vcodec': 'av01.0.00M.08', 'acodec': 'none'},
|
||||
'396': {'ext': 'mp4', 'height': 360, 'vcodec': 'av01.0.01M.08', 'acodec': 'none'},
|
||||
'397': {'ext': 'mp4', 'height': 480, 'vcodec': 'av01.0.04M.08', 'acodec': 'none'},
|
||||
'398': {'ext': 'mp4', 'height': 720, 'vcodec': 'av01.0.05M.08', 'acodec': 'none'},
|
||||
'399': {'ext': 'mp4', 'height': 1080, 'vcodec': 'av01.0.08M.08', 'acodec': 'none'},
|
||||
'400': {'ext': 'mp4', 'height': 1440, 'vcodec': 'av01.0.12M.08', 'acodec': 'none'},
|
||||
'401': {'ext': 'mp4', 'height': 2160, 'vcodec': 'av01.0.12M.08', 'acodec': 'none'},
|
||||
}
|
||||
|
||||
def _call_cdx_api(self, item_id, url, filters: list | None = None, collapse: list | None = None, query: dict | None = None, note=None, fatal=False):
|
||||
# CDX docs: https://github.com/internetarchive/wayback/blob/master/wayback-cdx-server/README.md
|
||||
query = {
|
||||
|
|
@ -740,7 +889,7 @@ class YoutubeWebArchiveIE(InfoExtractor):
|
|||
note or 'Downloading CDX API JSON', query=query, fatal=fatal)
|
||||
if isinstance(res, list) and len(res) >= 2:
|
||||
# format response to make it easier to use
|
||||
return [dict(zip(res[0], v)) for v in res[1:]]
|
||||
return [dict(zip(res[0], v)) for v in res[1:]] # noqa: B905
|
||||
elif not isinstance(res, list) or len(res) != 0:
|
||||
self.report_warning('Error while parsing CDX API response' + bug_reports_message())
|
||||
|
||||
|
|
@ -929,27 +1078,30 @@ class YoutubeWebArchiveIE(InfoExtractor):
|
|||
capture_dates.extend([self._OLDEST_CAPTURE_DATE, self._NEWEST_CAPTURE_DATE])
|
||||
return orderedSet(filter(None, capture_dates))
|
||||
|
||||
def _parse_fmt(self, fmt, extra_info=None):
|
||||
format_id = traverse_obj(fmt, ('url', {parse_qs}, 'itag', 0))
|
||||
return {
|
||||
'format_id': format_id,
|
||||
**self._FORMATS.get(format_id, {}),
|
||||
**traverse_obj(fmt, {
|
||||
'url': ('url', {lambda x: f'https://web.archive.org/web/2id_/{x}'}),
|
||||
'ext': ('ext', {str}),
|
||||
'filesize': ('url', {parse_qs}, 'clen', 0, {int_or_none}),
|
||||
}),
|
||||
**(extra_info or {}),
|
||||
}
|
||||
|
||||
def _real_extract(self, url):
|
||||
video_id, url_date, url_date_2 = self._match_valid_url(url).group('id', 'date', 'date2')
|
||||
url_date = url_date or url_date_2
|
||||
|
||||
urlh = None
|
||||
retry_manager = self.RetryManager(fatal=False)
|
||||
for retry in retry_manager:
|
||||
try:
|
||||
urlh = self._request_webpage(
|
||||
HEADRequest(f'https://web.archive.org/web/2oe_/http://wayback-fakeurl.archive.org/yt/{video_id}'),
|
||||
video_id, note='Fetching archived video file url', expected_status=True)
|
||||
except ExtractorError as e:
|
||||
# HTTP Error 404 is expected if the video is not saved.
|
||||
if isinstance(e.cause, HTTPError) and e.cause.status == 404:
|
||||
self.raise_no_formats(
|
||||
'The requested video is not archived, indexed, or there is an issue with web.archive.org (try again later)', expected=True)
|
||||
else:
|
||||
retry.error = e
|
||||
video_info = self._download_json(
|
||||
'https://web.archive.org/__wb/videoinfo', video_id,
|
||||
query={'vtype': 'youtube', 'vid': video_id})
|
||||
|
||||
if retry_manager.error:
|
||||
self.raise_no_formats(retry_manager.error, expected=True, video_id=video_id)
|
||||
if not traverse_obj(video_info, 'formats'):
|
||||
self.raise_no_formats(
|
||||
'The requested video is not archived or indexed', expected=True)
|
||||
|
||||
capture_dates = self._get_capture_dates(video_id, int_or_none(url_date))
|
||||
self.write_debug('Captures to try: ' + join_nonempty(*capture_dates, delim=', '))
|
||||
|
|
@ -968,25 +1120,15 @@ class YoutubeWebArchiveIE(InfoExtractor):
|
|||
|
||||
info['thumbnails'] = self._extract_thumbnails(video_id)
|
||||
|
||||
if urlh:
|
||||
url = urllib.parse.unquote(urlh.url)
|
||||
video_file_url_qs = parse_qs(url)
|
||||
# Attempt to recover any ext & format info from playback url & response headers
|
||||
fmt = {'url': url, 'filesize': int_or_none(urlh.headers.get('x-archive-orig-content-length'))}
|
||||
itag = try_get(video_file_url_qs, lambda x: x['itag'][0])
|
||||
if itag and itag in YoutubeIE._formats:
|
||||
fmt.update(YoutubeIE._formats[itag])
|
||||
fmt.update({'format_id': itag})
|
||||
else:
|
||||
mime = try_get(video_file_url_qs, lambda x: x['mime'][0])
|
||||
ext = (mimetype2ext(mime)
|
||||
or urlhandle_detect_ext(urlh)
|
||||
or mimetype2ext(urlh.headers.get('x-archive-guessed-content-type')))
|
||||
fmt.update({'ext': ext})
|
||||
info['formats'] = [fmt]
|
||||
if not info.get('duration'):
|
||||
info['duration'] = str_to_int(try_get(video_file_url_qs, lambda x: x['dur'][0]))
|
||||
formats = []
|
||||
if video_info.get('dmux'):
|
||||
for vf in traverse_obj(video_info, ('formats', 'video', lambda _, v: url_or_none(v['url']))):
|
||||
formats.append(self._parse_fmt(vf, {'acodec': 'none'}))
|
||||
for af in traverse_obj(video_info, ('formats', 'audio', lambda _, v: url_or_none(v['url']))):
|
||||
formats.append(self._parse_fmt(af, {'vcodec': 'none'}))
|
||||
else:
|
||||
for fmt in traverse_obj(video_info, ('formats', lambda _, v: url_or_none(v['url']))):
|
||||
formats.append(self._parse_fmt(fmt))
|
||||
info['formats'] = formats
|
||||
|
||||
if not info.get('title'):
|
||||
info['title'] = video_id
|
||||
return info
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue