diff --git a/.github/actionlint.yml b/.github/actionlint.yml index d6d5b9abd2..bdd3901a37 100644 --- a/.github/actionlint.yml +++ b/.github/actionlint.yml @@ -1,9 +1,3 @@ -self-hosted-runner: - labels: - # Workaround for the outdated runner list in actionlint v1.7.7 - # Ref: https://github.com/rhysd/actionlint/issues/533 - - windows-11-arm - config-variables: - KEEP_CACHE_WARM - PUSH_VERSION_COMMIT diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 29c18723c2..b7487f1c2f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -153,10 +153,12 @@ jobs: 'os': 'musllinux', 'arch': 'x86_64', 'runner': 'ubuntu-24.04', + 'python_version': '3.14', }, { 'os': 'musllinux', 'arch': 'aarch64', 'runner': 'ubuntu-24.04-arm', + 'python_version': '3.14', }], } INPUTS = json.loads(os.environ['INPUTS']) @@ -214,7 +216,7 @@ jobs: - name: Build Unix platform-independent binary run: | - make all tar + make all-extra tar - name: Verify --update-to if: vars.UPDATE_TO_VERIFICATION @@ -341,14 +343,14 @@ jobs: brew uninstall --ignore-dependencies python3 python3 -m venv ~/yt-dlp-build-venv source ~/yt-dlp-build-venv/bin/activate - python3 devscripts/install_deps.py -o --include build - python3 devscripts/install_deps.py --print --include pyinstaller > requirements.txt + python3 devscripts/install_deps.py --only-optional-groups --include-group build + python3 devscripts/install_deps.py --print --include-group pyinstaller > requirements.txt # We need to ignore wheels otherwise we break universal2 builds python3 -m pip install -U --no-binary :all: -r requirements.txt # We need to fuse our own universal2 wheels for curl_cffi python3 -m pip install -U 'delocate==0.11.0' mkdir curl_cffi_whls curl_cffi_universal2 - python3 devscripts/install_deps.py --print -o --include curl-cffi > requirements.txt + python3 devscripts/install_deps.py --print --only-optional-groups --include-group curl-cffi > requirements.txt for platform in "macosx_11_0_arm64" "macosx_11_0_x86_64"; do python3 -m pip download \ --only-binary=:all: \ @@ -482,11 +484,11 @@ jobs: mkdir /pyi-wheels python -m pip download -d /pyi-wheels --no-deps --require-hashes "pyinstaller@${Env:PYI_URL}#sha256=${Env:PYI_HASH}" python -m pip install --force-reinstall -U "/pyi-wheels/${Env:PYI_WHEEL}" - python devscripts/install_deps.py -o --include build + python devscripts/install_deps.py --only-optional-groups --include-group build if ("${Env:ARCH}" -eq "x86") { python devscripts/install_deps.py } else { - python devscripts/install_deps.py --include curl-cffi + python devscripts/install_deps.py --include-group curl-cffi } - name: Prepare diff --git a/.github/workflows/challenge-tests.yml b/.github/workflows/challenge-tests.yml new file mode 100644 index 0000000000..89895eb07b --- /dev/null +++ b/.github/workflows/challenge-tests.yml @@ -0,0 +1,77 @@ +name: Challenge Tests +on: + push: + paths: + - .github/workflows/challenge-tests.yml + - test/test_jsc/*.py + - yt_dlp/extractor/youtube/jsc/**.js + - yt_dlp/extractor/youtube/jsc/**.py + - yt_dlp/extractor/youtube/pot/**.py + - yt_dlp/utils/_jsruntime.py + pull_request: + paths: + - .github/workflows/challenge-tests.yml + - test/test_jsc/*.py + - yt_dlp/extractor/youtube/jsc/**.js + - yt_dlp/extractor/youtube/jsc/**.py + - yt_dlp/extractor/youtube/pot/**.py + - yt_dlp/utils/_jsruntime.py +permissions: + contents: read + +concurrency: + group: challenge-tests-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: ${{ github.event_name == 'pull_request' }} + +jobs: + tests: + name: Challenge Tests + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, windows-latest] + python-version: ['3.10', '3.11', '3.12', '3.13', '3.14', pypy-3.11] + env: + QJS_VERSION: '2025-04-26' # Earliest version with rope strings + steps: + - uses: actions/checkout@v5 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v6 + with: + python-version: ${{ matrix.python-version }} + - name: Install Deno + uses: denoland/setup-deno@v2 + with: + deno-version: '2.0.0' # minimum supported version + - name: Install Bun + uses: oven-sh/setup-bun@v2 + with: + # minimum supported version is 1.0.31 but earliest available Windows version is 1.1.0 + bun-version: ${{ (matrix.os == 'windows-latest' && '1.1.0') || '1.0.31' }} + - name: Install Node + uses: actions/setup-node@v6 + with: + node-version: '20.0' # minimum supported version + - name: Install QuickJS (Linux) + if: matrix.os == 'ubuntu-latest' + run: | + wget "https://bellard.org/quickjs/binary_releases/quickjs-linux-x86_64-${QJS_VERSION}.zip" -O quickjs.zip + unzip quickjs.zip qjs + sudo install qjs /usr/local/bin/qjs + - name: Install QuickJS (Windows) + if: matrix.os == 'windows-latest' + shell: pwsh + run: | + Invoke-WebRequest "https://bellard.org/quickjs/binary_releases/quickjs-win-x86_64-${Env:QJS_VERSION}.zip" -OutFile quickjs.zip + unzip quickjs.zip + - name: Install test requirements + run: | + python ./devscripts/install_deps.py --print --only-optional-groups --include-group test > requirements.txt + python ./devscripts/install_deps.py --print -c certifi -c requests -c urllib3 -c yt-dlp-ejs >> requirements.txt + python -m pip install -U -r requirements.txt + - name: Run tests + timeout-minutes: 15 + run: | + python -m yt_dlp -v --js-runtimes node --js-runtimes bun --js-runtimes quickjs || true + python ./devscripts/run_tests.py test/test_jsc -k download diff --git a/.github/workflows/core.yml b/.github/workflows/core.yml index d196f59d83..3cb17f2b7d 100644 --- a/.github/workflows/core.yml +++ b/.github/workflows/core.yml @@ -7,6 +7,7 @@ on: - test/** - yt_dlp/**.py - '!yt_dlp/extractor/**.py' + - yt_dlp/extractor/youtube/**.py - yt_dlp/extractor/__init__.py - yt_dlp/extractor/common.py - yt_dlp/extractor/extractors.py @@ -17,6 +18,7 @@ on: - test/** - yt_dlp/**.py - '!yt_dlp/extractor/**.py' + - yt_dlp/extractor/youtube/**.py - yt_dlp/extractor/__init__.py - yt_dlp/extractor/common.py - yt_dlp/extractor/extractors.py @@ -54,15 +56,36 @@ jobs: python-version: pypy-3.11 steps: - uses: actions/checkout@v5 + with: + fetch-depth: 0 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v6 with: python-version: ${{ matrix.python-version }} - name: Install test requirements - run: python3 ./devscripts/install_deps.py --include test --include curl-cffi + run: python ./devscripts/install_deps.py --include-group test --include-group curl-cffi - name: Run tests timeout-minutes: 15 continue-on-error: False + env: + source: ${{ (github.event_name == 'push' && github.event.before) || 'origin/master' }} + target: ${{ (github.event_name == 'push' && github.event.after) || 'HEAD' }} + shell: bash run: | + flags=() + # Check if a networking file is involved + patterns="\ + ^yt_dlp/networking/ + ^yt_dlp/utils/networking\.py$ + ^test/test_http_proxy\.py$ + ^test/test_networking\.py$ + ^test/test_networking_utils\.py$ + ^test/test_socks\.py$ + ^test/test_websockets\.py$ + ^pyproject\.toml$ + " + if git diff --name-only "${source}" "${target}" | grep -Ef <(printf '%s' "${patterns}"); then + flags+=(--flaky) + fi python3 -m yt_dlp -v || true # Print debug head - python3 ./devscripts/run_tests.py --pytest-args '--reruns 2 --reruns-delay 3.0' core + python3 -m devscripts.run_tests "${flags[@]}" --pytest-args '--reruns 2 --reruns-delay 3.0' core diff --git a/.github/workflows/download.yml b/.github/workflows/download.yml index 8163bd1a23..d075270d7b 100644 --- a/.github/workflows/download.yml +++ b/.github/workflows/download.yml @@ -15,10 +15,10 @@ jobs: with: python-version: '3.10' - name: Install test requirements - run: python3 ./devscripts/install_deps.py --include dev + run: python ./devscripts/install_deps.py --include-group dev - name: Run tests continue-on-error: true - run: python3 ./devscripts/run_tests.py download + run: python ./devscripts/run_tests.py download full: name: Full Download Tests @@ -42,7 +42,7 @@ jobs: with: python-version: ${{ matrix.python-version }} - name: Install test requirements - run: python3 ./devscripts/install_deps.py --include dev + run: python ./devscripts/install_deps.py --include-group dev - name: Run tests continue-on-error: true - run: python3 ./devscripts/run_tests.py download + run: python ./devscripts/run_tests.py download diff --git a/.github/workflows/quick-test.yml b/.github/workflows/quick-test.yml index c26628b421..a6e84b1d80 100644 --- a/.github/workflows/quick-test.yml +++ b/.github/workflows/quick-test.yml @@ -15,7 +15,7 @@ jobs: with: python-version: '3.10' - name: Install test requirements - run: python3 ./devscripts/install_deps.py -o --include test + run: python ./devscripts/install_deps.py --only-optional-groups --include-group test - name: Run tests timeout-minutes: 15 run: | @@ -31,9 +31,9 @@ jobs: with: python-version: '3.10' - name: Install dev dependencies - run: python3 ./devscripts/install_deps.py -o --include static-analysis + run: python ./devscripts/install_deps.py --only-optional-groups --include-group static-analysis - name: Make lazy extractors - run: python3 ./devscripts/make_lazy_extractors.py + run: python ./devscripts/make_lazy_extractors.py - name: Run ruff run: ruff check --output-format github . - name: Run autopep8 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index b60a0650a5..afe1d384b4 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -180,7 +180,7 @@ jobs: - name: Install Requirements run: | sudo apt -y install pandoc man - python devscripts/install_deps.py -o --include build + python devscripts/install_deps.py --only-optional-groups --include-group build - name: Prepare env: @@ -269,9 +269,10 @@ jobs: "[![Master](https://img.shields.io/badge/Master%20builds-lightblue.svg?style=for-the-badge)]" \ "(https://github.com/${MASTER_REPO}/releases/latest \"Master builds\")" >> ./RELEASE_NOTES fi - printf '\n\n%s\n\n%s%s\n\n---\n' \ + printf '\n\n%s\n\n%s%s%s\n\n---\n' \ "#### A description of the various files is in the [README](https://github.com/${REPOSITORY}#release-files)" \ - "The PyInstaller-bundled executables are subject to the licenses described in " \ + "The zipimport Unix executable contains code licensed under ISC and MIT. " \ + "The PyInstaller-bundled executables are subject to these and other licenses, all of which are compiled in " \ "[THIRD_PARTY_LICENSES.txt](https://github.com/${BASE_REPO}/blob/${HEAD_SHA}/THIRD_PARTY_LICENSES.txt)" >> ./RELEASE_NOTES python ./devscripts/make_changelog.py -vv --collapsible >> ./RELEASE_NOTES printf '%s\n\n' '**This is a pre-release build**' >> ./PRERELEASE_NOTES diff --git a/.github/workflows/signature-tests.yml b/.github/workflows/signature-tests.yml deleted file mode 100644 index 77f5e6a4c8..0000000000 --- a/.github/workflows/signature-tests.yml +++ /dev/null @@ -1,41 +0,0 @@ -name: Signature Tests -on: - push: - paths: - - .github/workflows/signature-tests.yml - - test/test_youtube_signature.py - - yt_dlp/jsinterp.py - pull_request: - paths: - - .github/workflows/signature-tests.yml - - test/test_youtube_signature.py - - yt_dlp/jsinterp.py -permissions: - contents: read - -concurrency: - group: signature-tests-${{ github.event.pull_request.number || github.ref }} - cancel-in-progress: ${{ github.event_name == 'pull_request' }} - -jobs: - tests: - name: Signature Tests - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest, windows-latest] - python-version: ['3.10', '3.11', '3.12', '3.13', '3.14', pypy-3.11] - steps: - - uses: actions/checkout@v5 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v6 - with: - python-version: ${{ matrix.python-version }} - - name: Install test requirements - run: python3 ./devscripts/install_deps.py --only-optional --include test - - name: Run tests - timeout-minutes: 15 - run: | - python3 -m yt_dlp -v || true # Print debug head - python3 ./devscripts/run_tests.py test/test_youtube_signature.py diff --git a/.github/workflows/test-workflows.yml b/.github/workflows/test-workflows.yml index 6c993e6b34..d39ab8814b 100644 --- a/.github/workflows/test-workflows.yml +++ b/.github/workflows/test-workflows.yml @@ -17,8 +17,8 @@ on: permissions: contents: read env: - ACTIONLINT_VERSION: "1.7.7" - ACTIONLINT_SHA256SUM: 023070a287cd8cccd71515fedc843f1985bf96c436b7effaecce67290e7e0757 + ACTIONLINT_VERSION: "1.7.8" + ACTIONLINT_SHA256SUM: be92c2652ab7b6d08425428797ceabeb16e31a781c07bc388456b4e592f3e36a ACTIONLINT_REPO: https://github.com/rhysd/actionlint jobs: @@ -34,7 +34,7 @@ jobs: env: ACTIONLINT_TARBALL: ${{ format('actionlint_{0}_linux_amd64.tar.gz', env.ACTIONLINT_VERSION) }} run: | - python -m devscripts.install_deps -o --include test + python -m devscripts.install_deps --only-optional-groups --include-group test sudo apt -y install shellcheck python -m pip install -U pyflakes curl -LO "${ACTIONLINT_REPO}/releases/download/v${ACTIONLINT_VERSION}/${ACTIONLINT_TARBALL}" diff --git a/.gitignore b/.gitignore index 40bb34d2aa..af6da639db 100644 --- a/.gitignore +++ b/.gitignore @@ -107,6 +107,7 @@ README.txt test/testdata/sigs/player-*.js test/testdata/thumbnails/empty.webp test/testdata/thumbnails/foo\ %d\ bar/foo_%d.* +.ejs-* # Binary /youtube-dl @@ -129,3 +130,6 @@ yt-dlp.zip # Plugins ytdlp_plugins/ yt-dlp-plugins + +# Packages +yt_dlp_ejs/ diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 89327581c0..99f18b2f32 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -177,7 +177,7 @@ While it is strongly recommended to use `hatch` for yt-dlp development, if you a ```shell # To only install development dependencies: -$ python -m devscripts.install_deps --include dev +$ python -m devscripts.install_deps --include-group dev # Or, for an editable install plus dev dependencies: $ python -m pip install -e ".[default,dev]" diff --git a/CONTRIBUTORS b/CONTRIBUTORS index 888d48d561..51369b35b6 100644 --- a/CONTRIBUTORS +++ b/CONTRIBUTORS @@ -818,3 +818,19 @@ robin-mu shssoichiro thanhtaivtt uoag +CaramelConnoisseur +ctengel +einstein95 +evilpie +i3p9 +JrM2628 +krystophny +matyb08 +pha1n0q +PierceLBrooks +sepro +TheQWERTYCodr +thomasmllt +w4grfw +WeidiDeng +Zer0spectrum diff --git a/Changelog.md b/Changelog.md index 8737441e86..b115fd045c 100644 --- a/Changelog.md +++ b/Changelog.md @@ -4,6 +4,71 @@ # To create a release, dispatch the https://github.com/yt-dlp/yt-dlp/actions/workflows/release.yml workflow on master --> +### 2025.11.12 + +#### Important changes +- **An external JavaScript runtime is now required for full YouTube support** +yt-dlp now requires users to have an external JavaScript runtime (e.g. Deno) installed in order to solve the JavaScript challenges presented by YouTube. [Read more](https://github.com/yt-dlp/yt-dlp/issues/15012) + +#### Core changes +- **cookies** + - [Allow `--cookies-from-browser` for Safari on iOS](https://github.com/yt-dlp/yt-dlp/commit/e6414d64e73d86d65bb357e5ad59d0ca080d5812) ([#14950](https://github.com/yt-dlp/yt-dlp/issues/14950)) by [pha1n0q](https://github.com/pha1n0q) + - [Support Firefox cookies database v17](https://github.com/yt-dlp/yt-dlp/commit/bf7e04e9d8bd3c4a4614b67ce617b7ae5d17d62a) ([#15010](https://github.com/yt-dlp/yt-dlp/issues/15010)) by [Grub4K](https://github.com/Grub4K) +- **sponsorblock**: [Add `hook` category](https://github.com/yt-dlp/yt-dlp/commit/52f3c56e83bbb25eec2496b0499768753732a093) ([#14845](https://github.com/yt-dlp/yt-dlp/issues/14845)) by [seproDev](https://github.com/seproDev) +- **update**: [Fix PyInstaller onedir variant detection](https://github.com/yt-dlp/yt-dlp/commit/1c2ad94353d1c9e03615d20b6bbfc293286c7a32) ([#14800](https://github.com/yt-dlp/yt-dlp/issues/14800)) by [bashonly](https://github.com/bashonly) + +#### Extractor changes +- **1tv**: live: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/19c5d7c53013440ec4f3f56ebbb067531b272f3f) ([#14299](https://github.com/yt-dlp/yt-dlp/issues/14299)) by [swayll](https://github.com/swayll) +- **ardaudiothek**: [Add extractors](https://github.com/yt-dlp/yt-dlp/commit/0046fbcbfceee32fa2f68a8ea00cca02765470b6) ([#14309](https://github.com/yt-dlp/yt-dlp/issues/14309)) by [evilpie](https://github.com/evilpie), [marieell](https://github.com/marieell) +- **bunnycdn** + - [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/228ae9f0f2b441fa1296db2ed2b7afbd4a9a62a1) ([#14954](https://github.com/yt-dlp/yt-dlp/issues/14954)) by [seproDev](https://github.com/seproDev) + - [Support player subdomain URLs](https://github.com/yt-dlp/yt-dlp/commit/3ef867451cd9604b4195dfee00db768619629b2d) ([#14979](https://github.com/yt-dlp/yt-dlp/issues/14979)) by [einstein95](https://github.com/einstein95) +- **discoverynetworksde**: [Fix extraction](https://github.com/yt-dlp/yt-dlp/commit/10dea209d2460daf924c93835ddc2f0301cf2cd4) ([#14818](https://github.com/yt-dlp/yt-dlp/issues/14818)) by [dirkf](https://github.com/dirkf), [w4grfw](https://github.com/w4grfw) (With fixes in [f3c255b](https://github.com/yt-dlp/yt-dlp/commit/f3c255b63bd26069151fc3d3ba6dc626bb62ad6e) by [bashonly](https://github.com/bashonly)) +- **floatplane**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/1ac7e6005cd3be9fff0b28be189c3a68ecd4c593) ([#14984](https://github.com/yt-dlp/yt-dlp/issues/14984)) by [i3p9](https://github.com/i3p9) +- **googledrive** + - [Fix subtitles extraction](https://github.com/yt-dlp/yt-dlp/commit/6d05cee4df30774ddce5c5c751fd2118f40c24fe) ([#14809](https://github.com/yt-dlp/yt-dlp/issues/14809)) by [seproDev](https://github.com/seproDev) + - [Rework extractor](https://github.com/yt-dlp/yt-dlp/commit/70f1098312fe53bc85358f7bd624370878b2fa28) ([#14746](https://github.com/yt-dlp/yt-dlp/issues/14746)) by [seproDev](https://github.com/seproDev) +- **kika**: [Do not extract non-existent subtitles](https://github.com/yt-dlp/yt-dlp/commit/79f9232ffbd57dde91c372b673b42801edaa9e53) ([#14813](https://github.com/yt-dlp/yt-dlp/issues/14813)) by [InvalidUsernameException](https://github.com/InvalidUsernameException) +- **mux**: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/a0bda3b78609593ce1127215fc035c1a308a89b6) ([#14914](https://github.com/yt-dlp/yt-dlp/issues/14914)) by [PierceLBrooks](https://github.com/PierceLBrooks), [seproDev](https://github.com/seproDev) +- **nascarclassics**: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/e8a6b1ca92f2a0ce2c187668165be23dc5506aab) ([#14866](https://github.com/yt-dlp/yt-dlp/issues/14866)) by [JrM2628](https://github.com/JrM2628) +- **nbc**: [Detect and discard DRM formats](https://github.com/yt-dlp/yt-dlp/commit/ee3a106f34124f0e2d28f062f5302863fd7639be) ([#14844](https://github.com/yt-dlp/yt-dlp/issues/14844)) by [bashonly](https://github.com/bashonly) +- **ntv.ru**: [Rework extractor](https://github.com/yt-dlp/yt-dlp/commit/5dde0d0c9fcef2ce57e486b2e563e0dff9b2845a) ([#14934](https://github.com/yt-dlp/yt-dlp/issues/14934)) by [anlar](https://github.com/anlar), [seproDev](https://github.com/seproDev) (With fixes in [a86eeaa](https://github.com/yt-dlp/yt-dlp/commit/a86eeaadf236ceaf6bb232eb410cf21572538aa6) by [seproDev](https://github.com/seproDev)) +- **play.tv**: [Update extractor for new domain](https://github.com/yt-dlp/yt-dlp/commit/73fd850d170e01c47c31aaa6aa8fe90856d9ad18) ([#14905](https://github.com/yt-dlp/yt-dlp/issues/14905)) by [thomasmllt](https://github.com/thomasmllt) +- **tubetugraz**: [Support alternate URL format](https://github.com/yt-dlp/yt-dlp/commit/f3597cfafcab4d7d4c6d41bff3647681301f1e6b) ([#14718](https://github.com/yt-dlp/yt-dlp/issues/14718)) by [krystophny](https://github.com/krystophny) +- **twitch** + - [Fix playlist extraction](https://github.com/yt-dlp/yt-dlp/commit/cb78440e468608fd55546280b537387d375335f2) ([#15008](https://github.com/yt-dlp/yt-dlp/issues/15008)) by [bashonly](https://github.com/bashonly), [ctengel](https://github.com/ctengel) + - stream: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/7eff676183518175ce495ae63291c89f9b39f02a) ([#14988](https://github.com/yt-dlp/yt-dlp/issues/14988)) by [seproDev](https://github.com/seproDev) + - vod: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/b46c572b26be15683584102c5fb7e7bfde0c9821) ([#14999](https://github.com/yt-dlp/yt-dlp/issues/14999)) by [Zer0spectrum](https://github.com/Zer0spectrum) +- **urplay**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/808b1fed76fbd07840cc23a346c11334e3d34f43) ([#14785](https://github.com/yt-dlp/yt-dlp/issues/14785)) by [seproDev](https://github.com/seproDev) +- **web.archive**: youtube: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/d9e3011fd1c3a75871a50e78533afe78ad427ce3) ([#14753](https://github.com/yt-dlp/yt-dlp/issues/14753)) by [seproDev](https://github.com/seproDev) +- **xhamster**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/a1d6351c3fc82c07fa0ee70811ed84807f6bbb58) ([#14948](https://github.com/yt-dlp/yt-dlp/issues/14948)) by [CaramelConnoisseur](https://github.com/CaramelConnoisseur), [dhwz](https://github.com/dhwz) +- **youtube** + - [Add `tv_downgraded` client](https://github.com/yt-dlp/yt-dlp/commit/61cf34f5447177a73ba25ea9a47d7df516ca3b3b) ([#14887](https://github.com/yt-dlp/yt-dlp/issues/14887)) by [seproDev](https://github.com/seproDev) (With fixes in [fa35eb2](https://github.com/yt-dlp/yt-dlp/commit/fa35eb27eaf27df7b5854f527a89fc828c9e0ec0)) + - [Fix `web_embedded` client extraction](https://github.com/yt-dlp/yt-dlp/commit/d6ee67725397807bbb5edcd0b2c94f5bca62d3f4) ([#14843](https://github.com/yt-dlp/yt-dlp/issues/14843)) by [bashonly](https://github.com/bashonly), [seproDev](https://github.com/seproDev) + - [Fix auto-generated metadata extraction](https://github.com/yt-dlp/yt-dlp/commit/a56217f9f6c594f6c419ce8dce9134198a9d90d0) ([#13896](https://github.com/yt-dlp/yt-dlp/issues/13896)) by [TheQWERTYCodr](https://github.com/TheQWERTYCodr) + - [Fix original language detection](https://github.com/yt-dlp/yt-dlp/commit/afc44022d0b736b2b3e87b52490bd35c53c53632) ([#14919](https://github.com/yt-dlp/yt-dlp/issues/14919)) by [bashonly](https://github.com/bashonly) + - [Implement external n/sig solver](https://github.com/yt-dlp/yt-dlp/commit/6224a3898821965a7d6a2cb9cc2de40a0fd6e6bc) ([#14157](https://github.com/yt-dlp/yt-dlp/issues/14157)) by [bashonly](https://github.com/bashonly), [coletdjnz](https://github.com/coletdjnz), [Grub4K](https://github.com/Grub4K), [seproDev](https://github.com/seproDev) (With fixes in [4b4223b](https://github.com/yt-dlp/yt-dlp/commit/4b4223b436fb03a12628679daed32ae4fc15ae4b), [ee98be4](https://github.com/yt-dlp/yt-dlp/commit/ee98be4ad767b77e4d8dd9bfd3c7d10f2e8397ff), [c0c9f30](https://github.com/yt-dlp/yt-dlp/commit/c0c9f30695db314df084e8701a7c376eb54f283c), [cacd163](https://github.com/yt-dlp/yt-dlp/commit/cacd1630a1a59e92f857d0d175c8730cffbf9801), [8636a9b](https://github.com/yt-dlp/yt-dlp/commit/8636a9bac3bed99984c1e297453660468ecf504b)) + - [Support collaborators](https://github.com/yt-dlp/yt-dlp/commit/f87cfadb5c3cba8e9dc4231c9554548e9edb3882) ([#14677](https://github.com/yt-dlp/yt-dlp/issues/14677)) by [seproDev](https://github.com/seproDev) + - tab: [Fix duration extraction for feeds](https://github.com/yt-dlp/yt-dlp/commit/1d2f0edaf978a5541cfb8f7e83fec433c65c1011) ([#14668](https://github.com/yt-dlp/yt-dlp/issues/14668)) by [WeidiDeng](https://github.com/WeidiDeng) + +#### Downloader changes +- **ffmpeg** + - [Apply `ffmpeg_args` for each format](https://github.com/yt-dlp/yt-dlp/commit/ffb7b7f446b6c67a28c66598ae91f4f2263e0d75) ([#14886](https://github.com/yt-dlp/yt-dlp/issues/14886)) by [bashonly](https://github.com/bashonly) + - [Limit read rate for DASH livestreams](https://github.com/yt-dlp/yt-dlp/commit/7af6d81f35aea8832023daa30ada10e6673a0529) ([#14918](https://github.com/yt-dlp/yt-dlp/issues/14918)) by [bashonly](https://github.com/bashonly) + +#### Networking changes +- [Ensure underlying file object is closed when fully read](https://github.com/yt-dlp/yt-dlp/commit/5767fb4ab108dddb07fc839a3b0f4d323a7c4bea) ([#14935](https://github.com/yt-dlp/yt-dlp/issues/14935)) by [coletdjnz](https://github.com/coletdjnz) + +#### Misc. changes +- [Fix zsh path argument completion](https://github.com/yt-dlp/yt-dlp/commit/c96e9291ab7bd6e7da66d33424982c8b0b4431c7) ([#14953](https://github.com/yt-dlp/yt-dlp/issues/14953)) by [matyb08](https://github.com/matyb08) +- **build**: [Bump musllinux Python version to 3.14](https://github.com/yt-dlp/yt-dlp/commit/646904cd3a79429ec5fdc43f904b3f57ae213f34) ([#14623](https://github.com/yt-dlp/yt-dlp/issues/14623)) by [bashonly](https://github.com/bashonly) +- **cleanup** + - Miscellaneous + - [c63b4e2](https://github.com/yt-dlp/yt-dlp/commit/c63b4e2a2b81cc78397c8709ef53ffd29bada213) by [bashonly](https://github.com/bashonly), [matyb08](https://github.com/matyb08), [sepro](https://github.com/sepro) + - [335653b](https://github.com/yt-dlp/yt-dlp/commit/335653be82d5ef999cfc2879d005397402eebec1) by [bashonly](https://github.com/bashonly), [seproDev](https://github.com/seproDev) +- **devscripts**: [Improve `install_deps` script](https://github.com/yt-dlp/yt-dlp/commit/73922e66e437fb4bb618bdc119a96375081bf508) ([#14766](https://github.com/yt-dlp/yt-dlp/issues/14766)) by [bashonly](https://github.com/bashonly) +- **test**: [Skip flaky tests if source unchanged](https://github.com/yt-dlp/yt-dlp/commit/ade8c2b36ff300edef87d48fd1ba835ac35c5b63) ([#14970](https://github.com/yt-dlp/yt-dlp/issues/14970)) by [bashonly](https://github.com/bashonly), [Grub4K](https://github.com/Grub4K) + ### 2025.10.22 #### Important changes diff --git a/Maintainers.md b/Maintainers.md index 8b52daf5fa..515505d882 100644 --- a/Maintainers.md +++ b/Maintainers.md @@ -10,6 +10,8 @@ Core Maintainers are responsible for reviewing and merging contributions, publis **You can contact the core maintainers via `maintainers@yt-dlp.org`.** +This is **NOT** a support channel. [Open an issue](https://github.com/yt-dlp/yt-dlp/issues/new/choose) if you need help or want to report a bug. + ### [coletdjnz](https://github.com/coletdjnz) [![gh-sponsor](https://img.shields.io/badge/_-Github-white.svg?logo=github&labelColor=555555&style=for-the-badge)](https://github.com/sponsors/coletdjnz) diff --git a/Makefile b/Makefile index 290955d209..89aef9033b 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,5 @@ all: lazy-extractors yt-dlp doc pypi-files +all-extra: lazy-extractors yt-dlp-extra doc pypi-files clean: clean-test clean-dist clean-all: clean clean-cache completions: completion-bash completion-fish completion-zsh @@ -15,7 +16,11 @@ pypi-files: AUTHORS Changelog.md LICENSE README.md README.txt supportedsites \ .PHONY: all clean clean-all clean-test clean-dist clean-cache \ completions completion-bash completion-fish completion-zsh \ doc issuetemplates supportedsites ot offlinetest codetest test \ - tar pypi-files lazy-extractors install uninstall + tar pypi-files lazy-extractors install uninstall \ + all-extra yt-dlp-extra current-ejs-version + +.IGNORE: current-ejs-version +.SILENT: current-ejs-version clean-test: rm -rf tmp/ *.annotations.xml *.aria2 *.description *.dump *.frag \ @@ -25,7 +30,8 @@ clean-test: test/testdata/sigs/player-*.js test/testdata/thumbnails/empty.webp "test/testdata/thumbnails/foo %d bar/foo_%d."* clean-dist: rm -rf yt-dlp.1.temp.md yt-dlp.1 README.txt MANIFEST build/ dist/ .coverage cover/ yt-dlp.tar.gz completions/ \ - yt_dlp/extractor/lazy_extractors.py *.spec CONTRIBUTING.md.tmp yt-dlp yt-dlp.exe yt_dlp.egg-info/ AUTHORS + yt_dlp/extractor/lazy_extractors.py *.spec CONTRIBUTING.md.tmp yt-dlp yt-dlp.exe yt_dlp.egg-info/ AUTHORS \ + yt-dlp.zip .ejs-* yt_dlp_ejs/ clean-cache: find . \( \ -type d -name ".*_cache" -o -type d -name __pycache__ -o -name "*.pyc" -o -name "*.class" \ @@ -81,28 +87,49 @@ test: offlinetest: codetest $(PYTHON) -m pytest -Werror -m "not download" -CODE_FOLDERS_CMD = find yt_dlp -type f -name '__init__.py' | sed 's,/__init__.py,,' | grep -v '/__' | sort -CODE_FOLDERS != $(CODE_FOLDERS_CMD) -CODE_FOLDERS ?= $(shell $(CODE_FOLDERS_CMD)) -CODE_FILES_CMD = for f in $(CODE_FOLDERS) ; do echo "$$f" | sed 's,$$,/*.py,' ; done -CODE_FILES != $(CODE_FILES_CMD) -CODE_FILES ?= $(shell $(CODE_FILES_CMD)) -yt-dlp: $(CODE_FILES) +PY_CODE_FOLDERS_CMD = find yt_dlp -type f -name '__init__.py' | sed 's|/__init__\.py||' | grep -v '/__' | sort +PY_CODE_FOLDERS != $(PY_CODE_FOLDERS_CMD) +PY_CODE_FOLDERS ?= $(shell $(PY_CODE_FOLDERS_CMD)) + +PY_CODE_FILES_CMD = for f in $(PY_CODE_FOLDERS) ; do echo "$$f" | sed 's|$$|/*.py|' ; done +PY_CODE_FILES != $(PY_CODE_FILES_CMD) +PY_CODE_FILES ?= $(shell $(PY_CODE_FILES_CMD)) + +JS_CODE_FOLDERS_CMD = find yt_dlp -type f -name '*.js' | sed 's|/[^/]\{1,\}\.js$$||' | uniq +JS_CODE_FOLDERS != $(JS_CODE_FOLDERS_CMD) +JS_CODE_FOLDERS ?= $(shell $(JS_CODE_FOLDERS_CMD)) + +JS_CODE_FILES_CMD = for f in $(JS_CODE_FOLDERS) ; do echo "$$f" | sed 's|$$|/*.js|' ; done +JS_CODE_FILES != $(JS_CODE_FILES_CMD) +JS_CODE_FILES ?= $(shell $(JS_CODE_FILES_CMD)) + +yt-dlp.zip: $(PY_CODE_FILES) $(JS_CODE_FILES) mkdir -p zip - for d in $(CODE_FOLDERS) ; do \ + for d in $(PY_CODE_FOLDERS) ; do \ mkdir -p zip/$$d ;\ cp -pPR $$d/*.py zip/$$d/ ;\ done - (cd zip && touch -t 200001010101 $(CODE_FILES)) - mv zip/yt_dlp/__main__.py zip/ - (cd zip && zip -q ../yt-dlp $(CODE_FILES) __main__.py) + for d in $(JS_CODE_FOLDERS) ; do \ + mkdir -p zip/$$d ;\ + cp -pPR $$d/*.js zip/$$d/ ;\ + done + (cd zip && touch -t 200001010101 $(PY_CODE_FILES) $(JS_CODE_FILES)) + rm -f zip/yt_dlp/__main__.py + (cd zip && zip -q ../yt-dlp.zip $(PY_CODE_FILES) $(JS_CODE_FILES)) rm -rf zip + +yt-dlp: yt-dlp.zip + mkdir -p zip + cp -pP yt_dlp/__main__.py zip/ + touch -t 200001010101 zip/__main__.py + (cd zip && zip -q ../yt-dlp.zip __main__.py) echo '#!$(PYTHON)' > yt-dlp cat yt-dlp.zip >> yt-dlp rm yt-dlp.zip chmod a+x yt-dlp + rm -rf zip -README.md: $(CODE_FILES) devscripts/make_readme.py +README.md: $(PY_CODE_FILES) devscripts/make_readme.py COLUMNS=80 $(PYTHON) yt_dlp/__main__.py --ignore-config --help | $(PYTHON) devscripts/make_readme.py CONTRIBUTING.md: README.md devscripts/make_contributing.py @@ -127,15 +154,15 @@ yt-dlp.1: README.md devscripts/prepare_manpage.py pandoc -s -f $(MARKDOWN) -t man yt-dlp.1.temp.md -o yt-dlp.1 rm -f yt-dlp.1.temp.md -completions/bash/yt-dlp: $(CODE_FILES) devscripts/bash-completion.in +completions/bash/yt-dlp: $(PY_CODE_FILES) devscripts/bash-completion.in mkdir -p completions/bash $(PYTHON) devscripts/bash-completion.py -completions/zsh/_yt-dlp: $(CODE_FILES) devscripts/zsh-completion.in +completions/zsh/_yt-dlp: $(PY_CODE_FILES) devscripts/zsh-completion.in mkdir -p completions/zsh $(PYTHON) devscripts/zsh-completion.py -completions/fish/yt-dlp.fish: $(CODE_FILES) devscripts/fish-completion.in +completions/fish/yt-dlp.fish: $(PY_CODE_FILES) devscripts/fish-completion.in mkdir -p completions/fish $(PYTHON) devscripts/fish-completion.py @@ -172,3 +199,45 @@ CONTRIBUTORS: Changelog.md echo 'Updating $@ from git commit history' ; \ $(PYTHON) devscripts/make_changelog.py -v -c > /dev/null ; \ fi + +# The following EJS_-prefixed variables are auto-generated by devscripts/update_ejs.py +# DO NOT EDIT! +EJS_VERSION = 0.3.1 +EJS_WHEEL_NAME = yt_dlp_ejs-0.3.1-py3-none-any.whl +EJS_WHEEL_HASH = sha256:a6e3548874db7c774388931752bb46c7f4642c044b2a189e56968f3d5ecab622 +EJS_PY_FOLDERS = yt_dlp_ejs yt_dlp_ejs/yt yt_dlp_ejs/yt/solver +EJS_PY_FILES = yt_dlp_ejs/__init__.py yt_dlp_ejs/_version.py yt_dlp_ejs/yt/__init__.py yt_dlp_ejs/yt/solver/__init__.py +EJS_JS_FOLDERS = yt_dlp_ejs/yt/solver +EJS_JS_FILES = yt_dlp_ejs/yt/solver/core.min.js yt_dlp_ejs/yt/solver/lib.min.js + +yt-dlp-extra: current-ejs-version .ejs-$(EJS_VERSION) $(EJS_PY_FILES) $(EJS_JS_FILES) yt-dlp.zip + mkdir -p zip + for d in $(EJS_PY_FOLDERS) ; do \ + mkdir -p zip/$$d ;\ + cp -pPR $$d/*.py zip/$$d/ ;\ + done + for d in $(EJS_JS_FOLDERS) ; do \ + mkdir -p zip/$$d ;\ + cp -pPR $$d/*.js zip/$$d/ ;\ + done + (cd zip && touch -t 200001010101 $(EJS_PY_FILES) $(EJS_JS_FILES)) + (cd zip && zip -q ../yt-dlp.zip $(EJS_PY_FILES) $(EJS_JS_FILES)) + cp -pP yt_dlp/__main__.py zip/ + touch -t 200001010101 zip/__main__.py + (cd zip && zip -q ../yt-dlp.zip __main__.py) + echo '#!$(PYTHON)' > yt-dlp + cat yt-dlp.zip >> yt-dlp + rm yt-dlp.zip + chmod a+x yt-dlp + rm -rf zip + +.ejs-$(EJS_VERSION): + @echo Downloading yt-dlp-ejs + @echo "yt-dlp-ejs==$(EJS_VERSION) --hash $(EJS_WHEEL_HASH)" > .ejs-requirements.txt + $(PYTHON) -m pip download -d ./build --no-deps --require-hashes -r .ejs-requirements.txt + unzip -o build/$(EJS_WHEEL_NAME) "yt_dlp_ejs/*" + @touch .ejs-$(EJS_VERSION) + +current-ejs-version: + rm -rf .ejs-* + touch .ejs-$$($(PYTHON) -c 'import sys; sys.path = [""]; from yt_dlp_ejs import version; print(version)' 2>/dev/null) diff --git a/README.md b/README.md index 7b3cd0970d..8189015c72 100644 --- a/README.md +++ b/README.md @@ -145,9 +145,11 @@ While yt-dlp is licensed under the [Unlicense](LICENSE), many of the release fil Most notably, the PyInstaller-bundled executables include GPLv3+ licensed code, and as such the combined work is licensed under [GPLv3+](https://www.gnu.org/licenses/gpl-3.0.html). -See [THIRD_PARTY_LICENSES.txt](THIRD_PARTY_LICENSES.txt) for details. +The zipimport Unix executable (`yt-dlp`) contains [ISC](https://github.com/meriyah/meriyah/blob/main/LICENSE.md) licensed code from [`meriyah`](https://github.com/meriyah/meriyah) and [MIT](https://github.com/davidbonnet/astring/blob/main/LICENSE) licensed code from [`astring`](https://github.com/davidbonnet/astring). -The zipimport binary (`yt-dlp`), the source tarball (`yt-dlp.tar.gz`), and the PyPI source distribution & wheel only contain code licensed under the [Unlicense](LICENSE). +See [THIRD_PARTY_LICENSES.txt](THIRD_PARTY_LICENSES.txt) for more details. + +The git repository, the source tarball (`yt-dlp.tar.gz`), the PyPI source distribution and the PyPI built distribution (wheel) only contain code licensed under the [Unlicense](LICENSE). @@ -187,7 +189,7 @@ Example usage: yt-dlp --update-to nightly # To install nightly with pip: -python3 -m pip install -U --pre "yt-dlp[default]" +python -m pip install -U --pre "yt-dlp[default]" ``` When running a yt-dlp version that is older than 90 days, you will see a warning message suggesting to update to the latest version. @@ -201,7 +203,7 @@ Python versions 3.10+ (CPython) and 3.11+ (PyPy) are supported. Other versions a On Windows, [Microsoft Visual C++ 2010 SP1 Redistributable Package (x86)](https://download.microsoft.com/download/1/6/5/165255E7-1014-4D0A-B094-B6A430A6BFFC/vcredist_x86.exe) is also necessary to run yt-dlp. You probably already have this, but if the executable throws an error due to missing `MSVCR100.dll` you need to install it manually. --> -While all the other dependencies are optional, `ffmpeg` and `ffprobe` are highly recommended +While all the other dependencies are optional, `ffmpeg`, `ffprobe`, `yt-dlp-ejs` and a JavaScript runtime are highly recommended ### Strongly recommended @@ -211,6 +213,10 @@ While all the other dependencies are optional, `ffmpeg` and `ffprobe` are highly **Important**: What you need is ffmpeg *binary*, **NOT** [the Python package of the same name](https://pypi.org/project/ffmpeg) +* [**yt-dlp-ejs**](https://github.com/yt-dlp/ejs) - Required for deciphering YouTube n/sig values. Licensed under [Unlicense](https://github.com/yt-dlp/ejs/blob/main/LICENSE), bundles [MIT](https://github.com/davidbonnet/astring/blob/main/LICENSE) and [ISC](https://github.com/meriyah/meriyah/blob/main/LICENSE.md) components. + + A JavaScript runtime like [**deno**](https://deno.land) (recommended), [**node.js**](https://nodejs.org), [**bun**](https://bun.sh), or [**QuickJS**](https://bellard.org/quickjs/) is also required to run yt-dlp-ejs. See [the wiki](https://github.com/yt-dlp/yt-dlp/wiki/EJS). + ### Networking * [**certifi**](https://github.com/certifi/python-certifi)\* - Provides Mozilla's root certificate bundle. Licensed under [MPLv2](https://github.com/certifi/python-certifi/blob/master/LICENSE) * [**brotli**](https://github.com/google/brotli)\* or [**brotlicffi**](https://github.com/python-hyper/brotlicffi) - [Brotli](https://en.wikipedia.org/wiki/Brotli) content encoding support. Both licensed under MIT [1](https://github.com/google/brotli/blob/master/LICENSE) [2](https://github.com/python-hyper/brotlicffi/blob/master/LICENSE) @@ -235,7 +241,7 @@ The following provide support for impersonating browser requests. This may be re ### Misc * [**pycryptodomex**](https://github.com/Legrandin/pycryptodome)\* - For decrypting AES-128 HLS streams and various other data. Licensed under [BSD-2-Clause](https://github.com/Legrandin/pycryptodome/blob/master/LICENSE.rst) -* [**phantomjs**](https://github.com/ariya/phantomjs) - Used in extractors where javascript needs to be run. Licensed under [BSD-3-Clause](https://github.com/ariya/phantomjs/blob/master/LICENSE.BSD) +* [**phantomjs**](https://github.com/ariya/phantomjs) - Used in some extractors where JavaScript needs to be run. No longer used for YouTube. To be deprecated in the near future. Licensed under [BSD-3-Clause](https://github.com/ariya/phantomjs/blob/master/LICENSE.BSD) * [**secretstorage**](https://github.com/mitya57/secretstorage)\* - For `--cookies-from-browser` to access the **Gnome** keyring while decrypting cookies of **Chromium**-based browsers on **Linux**. Licensed under [BSD-3-Clause](https://github.com/mitya57/secretstorage/blob/master/LICENSE) * Any external downloader that you want to use with `--downloader` @@ -259,12 +265,12 @@ To build the standalone executable, you must have Python and `pyinstaller` (plus You can run the following commands: ``` -python3 devscripts/install_deps.py --include pyinstaller -python3 devscripts/make_lazy_extractors.py -python3 -m bundle.pyinstaller +python devscripts/install_deps.py --include-group pyinstaller +python devscripts/make_lazy_extractors.py +python -m bundle.pyinstaller ``` -On some systems, you may need to use `py` or `python` instead of `python3`. +On some systems, you may need to use `py` or `python3` instead of `python`. `python -m bundle.pyinstaller` accepts any arguments that can be passed to `pyinstaller`, such as `--onefile/-F` or `--onedir/-D`, which is further [documented here](https://pyinstaller.org/en/stable/usage.html#what-to-generate). @@ -354,7 +360,7 @@ Tip: Use `CTRL`+`F` (or `Command`+`F`) to search by keywords containing directory ("-" for stdin). Can be used multiple times and inside other configuration files - --plugin-dirs PATH Path to an additional directory to search + --plugin-dirs DIR Path to an additional directory to search for plugins. This option can be used multiple times to add multiple directories. Use "default" to search the default plugin @@ -362,6 +368,37 @@ Tip: Use `CTRL`+`F` (or `Command`+`F`) to search by keywords --no-plugin-dirs Clear plugin directories to search, including defaults and those provided by previous --plugin-dirs + --js-runtimes RUNTIME[:PATH] Additional JavaScript runtime to enable, + with an optional location for the runtime + (either the path to the binary or its + containing directory). This option can be + used multiple times to enable multiple + runtimes. Supported runtimes are (in order + of priority, from highest to lowest): deno, + node, quickjs, bun. Only "deno" is enabled + by default. The highest priority runtime + that is both enabled and available will be + used. In order to use a lower priority + runtime when "deno" is available, --no-js- + runtimes needs to be passed before enabling + other runtimes + --no-js-runtimes Clear JavaScript runtimes to enable, + including defaults and those provided by + previous --js-runtimes + --remote-components COMPONENT Remote components to allow yt-dlp to fetch + when required. This option is currently not + needed if you are using an official + executable or have the requisite version of + the yt-dlp-ejs package installed. You can + use this option multiple times to allow + multiple components. Supported values: + ejs:npm (external JavaScript components from + npm), ejs:github (external JavaScript + components from yt-dlp-ejs GitHub). By + default, no remote components are allowed + --no-remote-components Disallow fetching of all remote components, + including any previously allowed by + --remote-components or defaults. --flat-playlist Do not extract a playlist's URL result entries; some entry metadata may be missing and downloading may be bypassed @@ -1079,11 +1116,12 @@ Make chapter entries for, or remove various segments (sponsor, for, separated by commas. Available categories are sponsor, intro, outro, selfpromo, preview, filler, interaction, - music_offtopic, poi_highlight, chapter, all - and default (=all). You can prefix the - category with a "-" to exclude it. See [1] - for descriptions of the categories. E.g. - --sponsorblock-mark all,-preview + music_offtopic, hook, poi_highlight, + chapter, all and default (=all). You can + prefix the category with a "-" to exclude + it. See [1] for descriptions of the + categories. E.g. --sponsorblock-mark + all,-preview [1] https://wiki.sponsor.ajay.app/w/Segment_Categories --sponsorblock-remove CATS SponsorBlock categories to be removed from the video file, separated by commas. If a @@ -1148,7 +1186,7 @@ Predefined aliases for convenience and ease of use. Note that future You can configure yt-dlp by placing any supported command line option in a configuration file. The configuration is loaded from the following locations: 1. **Main Configuration**: - * The file given to `--config-location` + * The file given to `--config-locations` 1. **Portable Configuration**: (Recommended for portable installations) * If using a binary, `yt-dlp.conf` in the same directory as the binary * If running from source-code, `yt-dlp.conf` in the parent directory of `yt_dlp` @@ -1230,7 +1268,7 @@ yt-dlp --netrc-cmd 'gpg --decrypt ~/.authinfo.gpg' 'https://www.youtube.com/watc ### Notes about environment variables * Environment variables are normally specified as `${VARIABLE}`/`$VARIABLE` on UNIX and `%VARIABLE%` on Windows; but is always shown as `${VARIABLE}` in this documentation -* yt-dlp also allows using UNIX-style variables on Windows for path-like options; e.g. `--output`, `--config-location` +* yt-dlp also allows using UNIX-style variables on Windows for path-like options; e.g. `--output`, `--config-locations` * If unset, `${XDG_CONFIG_HOME}` defaults to `~/.config` and `${XDG_CACHE_HOME}` to `~/.cache` * On Windows, `~` points to `${HOME}` if present; or, `${USERPROFILE}` or `${HOMEDRIVE}${HOMEPATH}` otherwise * On Windows, `${USERPROFILE}` generally points to `C:\Users\` and `${APPDATA}` to `${USERPROFILE}\AppData\Roaming` @@ -1814,7 +1852,7 @@ The following extractors use this feature: #### youtube * `lang`: Prefer translated metadata (`title`, `description` etc) of this language code (case-sensitive). By default, the video primary language metadata is preferred, with a fallback to `en` translated. See [youtube/_base.py](https://github.com/yt-dlp/yt-dlp/blob/415b4c9f955b1a0391204bd24a7132590e7b3bdb/yt_dlp/extractor/youtube/_base.py#L402-L409) for the list of supported content language codes * `skip`: One or more of `hls`, `dash` or `translated_subs` to skip extraction of the m3u8 manifests, dash manifests and [auto-translated subtitles](https://github.com/yt-dlp/yt-dlp/issues/4090#issuecomment-1158102032) respectively -* `player_client`: Clients to extract video data from. The currently available clients are `web`, `web_safari`, `web_embedded`, `web_music`, `web_creator`, `mweb`, `ios`, `android`, `android_sdkless`, `android_vr`, `tv`, `tv_simply` and `tv_embedded`. By default, `android_sdkless,tv,web_safari,web` is used. `android_sdkless` is omitted if cookies are passed. If premium cookies are passed, `tv,web_creator,web_safari,web` is used instead. The `web_music` client is added for `music.youtube.com` URLs when logged-in cookies are used. The `web_embedded` client is added for age-restricted videos but only works if the video is embeddable. The `tv_embedded` and `web_creator` clients are added for age-restricted videos if account age-verification is required. Some clients, such as `web` and `web_music`, require a `po_token` for their formats to be downloadable. Some clients, such as `web_creator`, will only work with authentication. Not all clients support authentication via cookies. You can use `default` for the default clients, or you can use `all` for all clients (not recommended). You can prefix a client with `-` to exclude it, e.g. `youtube:player_client=default,-ios` +* `player_client`: Clients to extract video data from. The currently available clients are `web`, `web_safari`, `web_embedded`, `web_music`, `web_creator`, `mweb`, `ios`, `android`, `android_sdkless`, `android_vr`, `tv`, `tv_simply`, `tv_downgraded`, and `tv_embedded`. By default, `tv,android_sdkless,web` is used. If no JavaScript runtime is available, then `android_sdkless,web_safari,web` is used. If logged-in cookies are passed to yt-dlp, then `tv_downgraded,web_safari,web` is used for free accounts and `tv_downgraded,web_creator,web` is used for premium accounts. The `web_music` client is added for `music.youtube.com` URLs when logged-in cookies are used. The `web_embedded` client is added for age-restricted videos but only works if the video is embeddable. The `tv_embedded` and `web_creator` clients are added for age-restricted videos if account age-verification is required. Some clients, such as `web` and `web_music`, require a `po_token` for their formats to be downloadable. Some clients, such as `web_creator`, will only work with authentication. Not all clients support authentication via cookies. You can use `default` for the default clients, or you can use `all` for all clients (not recommended). You can prefix a client with `-` to exclude it, e.g. `youtube:player_client=default,-ios` * `player_skip`: Skip some network requests that are generally needed for robust extraction. One or more of `configs` (skip client configs), `webpage` (skip initial webpage), `js` (skip js player), `initial_data` (skip initial data/next ep request). While these options can help reduce the number of requests needed or avoid some rate-limiting, they could cause issues such as missing formats or metadata. See [#860](https://github.com/yt-dlp/yt-dlp/pull/860) and [#12826](https://github.com/yt-dlp/yt-dlp/issues/12826) for more details * `webpage_skip`: Skip extraction of embedded webpage data. One or both of `player_response`, `initial_data`. These options are for testing purposes and don't skip any network requests * `player_params`: YouTube player parameters to use for player requests. Will overwrite any default ones set by yt-dlp. @@ -1833,6 +1871,10 @@ The following extractors use this feature: * `pot_trace`: Enable debug logging for PO Token fetching. Either `true` or `false` (default) * `fetch_pot`: Policy to use for fetching a PO Token from providers. One of `always` (always try fetch a PO Token regardless if the client requires one for the given context), `never` (never fetch a PO Token), or `auto` (default; only fetch a PO Token if the client requires one for the given context) * `playback_wait`: Duration (in seconds) to wait inbetween the extraction and download stages in order to ensure the formats are available. The default is `6` seconds +* `jsc_trace`: Enable debug logging for JS Challenge fetching. Either `true` or `false` (default) + +#### youtube-ejs +* `jitless`: Run suported Javascript engines in JIT-less mode. Supported runtimes are `deno`, `node` and `bun`. Provides better security at the cost of performance/speed. Do note that `node` and `bun` are still considered unsecure. Either `true` or `false` (default) #### youtubepot-webpo * `bind_to_visitor_id`: Whether to use the Visitor ID instead of Visitor Data for caching WebPO tokens. Either `true` (default) or `false` diff --git a/THIRD_PARTY_LICENSES.txt b/THIRD_PARTY_LICENSES.txt index 1040046541..f7977064a0 100644 --- a/THIRD_PARTY_LICENSES.txt +++ b/THIRD_PARTY_LICENSES.txt @@ -4431,3 +4431,43 @@ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + + +-------------------------------------------------------------------------------- +Meriyah | ISC +URL: https://github.com/meriyah/meriyah +-------------------------------------------------------------------------------- +ISC License + +Copyright (c) 2019 and later, KFlash and others. + +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + + +-------------------------------------------------------------------------------- +Astring | MIT +URL: https://github.com/davidbonnet/astring/ +-------------------------------------------------------------------------------- +Copyright (c) 2015, David Bonnet + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/bundle/docker/linux/build.sh b/bundle/docker/linux/build.sh index 71adaad058..b30d40980e 100755 --- a/bundle/docker/linux/build.sh +++ b/bundle/docker/linux/build.sh @@ -15,12 +15,12 @@ function venvpy { } INCLUDES=( - --include pyinstaller - --include secretstorage + --include-group pyinstaller + --include-group secretstorage ) if [[ -z "${EXCLUDE_CURL_CFFI:-}" ]]; then - INCLUDES+=(--include curl-cffi) + INCLUDES+=(--include-group curl-cffi) fi runpy -m venv /yt-dlp-build-venv @@ -28,7 +28,7 @@ runpy -m venv /yt-dlp-build-venv source /yt-dlp-build-venv/bin/activate # Inside the venv we use venvpy instead of runpy venvpy -m ensurepip --upgrade --default-pip -venvpy -m devscripts.install_deps -o --include build +venvpy -m devscripts.install_deps --only-optional-groups --include-group build venvpy -m devscripts.install_deps "${INCLUDES[@]}" venvpy -m devscripts.make_lazy_extractors venvpy devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}" diff --git a/devscripts/changelog_override.json b/devscripts/changelog_override.json index e906838175..ba3f9518fb 100644 --- a/devscripts/changelog_override.json +++ b/devscripts/changelog_override.json @@ -308,5 +308,16 @@ "action": "add", "when": "2c9091e355a7ba5d1edb69796ecdca48199b77fb", "short": "[priority] **A stopgap release with a *TEMPORARY partial* fix for YouTube support**\nSome formats may still be unavailable, especially if cookies are passed to yt-dlp. The ***NEXT*** release, expected very soon, **will require an external JS runtime (e.g. Deno)** in order for YouTube downloads to work properly. [Read more](https://github.com/yt-dlp/yt-dlp/issues/14404)" + }, + { + "action": "change", + "when": "8636a9bac3bed99984c1e297453660468ecf504b", + "short": "Fix 6224a3898821965a7d6a2cb9cc2de40a0fd6e6bc", + "authors": ["Grub4K"] + }, + { + "action": "add", + "when": "6224a3898821965a7d6a2cb9cc2de40a0fd6e6bc", + "short": "[priority] **An external JavaScript runtime is now required for full YouTube support**\nyt-dlp now requires users to have an external JavaScript runtime (e.g. Deno) installed in order to solve the JavaScript challenges presented by YouTube. [Read more](https://github.com/yt-dlp/yt-dlp/issues/15012)" } ] diff --git a/devscripts/generate_third_party_licenses.py b/devscripts/generate_third_party_licenses.py index db615d2e35..322d56f633 100644 --- a/devscripts/generate_third_party_licenses.py +++ b/devscripts/generate_third_party_licenses.py @@ -271,6 +271,19 @@ DEPENDENCIES: list[Dependency] = [ license_url='https://raw.githubusercontent.com/python-websockets/websockets/refs/heads/main/LICENSE', project_url='https://websockets.readthedocs.io/', ), + # Dependencies of yt-dlp-ejs + Dependency( + name='Meriyah', + license='ISC', + license_url='https://raw.githubusercontent.com/meriyah/meriyah/refs/heads/main/LICENSE.md', + project_url='https://github.com/meriyah/meriyah', + ), + Dependency( + name='Astring', + license='MIT', + license_url='https://raw.githubusercontent.com/davidbonnet/astring/refs/heads/main/LICENSE', + project_url='https://github.com/davidbonnet/astring/', + ), ] diff --git a/devscripts/install_deps.py b/devscripts/install_deps.py index d292505458..07c646a4c0 100755 --- a/devscripts/install_deps.py +++ b/devscripts/install_deps.py @@ -22,14 +22,19 @@ def parse_args(): 'input', nargs='?', metavar='TOMLFILE', default=Path(__file__).parent.parent / 'pyproject.toml', help='input file (default: %(default)s)') parser.add_argument( - '-e', '--exclude', metavar='DEPENDENCY', action='append', - help='exclude a dependency') + '-e', '--exclude-dependency', metavar='DEPENDENCY', action='append', + help='exclude a dependency (can be used multiple times)') parser.add_argument( - '-i', '--include', metavar='GROUP', action='append', - help='include an optional dependency group') + '-i', '--include-group', metavar='GROUP', action='append', + help='include an optional dependency group (can be used multiple times)') parser.add_argument( - '-o', '--only-optional', action='store_true', - help='only install optional dependencies') + '-c', '--cherry-pick', metavar='DEPENDENCY', action='append', + help=( + 'only include a specific dependency from the resulting dependency list ' + '(can be used multiple times)')) + parser.add_argument( + '-o', '--only-optional-groups', action='store_true', + help='omit default dependencies unless the "default" group is specified with --include-group') parser.add_argument( '-p', '--print', action='store_true', help='only print requirements to stdout') @@ -39,30 +44,41 @@ def parse_args(): return parser.parse_args() +def uniq(arg) -> dict[str, None]: + return dict.fromkeys(map(str.lower, arg or ())) + + def main(): args = parse_args() project_table = parse_toml(read_file(args.input))['project'] recursive_pattern = re.compile(rf'{project_table["name"]}\[(?P[\w-]+)\]') optional_groups = project_table['optional-dependencies'] - excludes = args.exclude or [] + + excludes = uniq(args.exclude_dependency) + only_includes = uniq(args.cherry_pick) + include_groups = uniq(args.include_group) def yield_deps(group): for dep in group: if mobj := recursive_pattern.fullmatch(dep): - yield from optional_groups.get(mobj.group('group_name'), []) + yield from optional_groups.get(mobj.group('group_name'), ()) else: yield dep - targets = [] - if not args.only_optional: # `-o` should exclude 'dependencies' and the 'default' group - targets.extend(project_table['dependencies']) - if 'default' not in excludes: # `--exclude default` should exclude entire 'default' group - targets.extend(yield_deps(optional_groups['default'])) + targets = {} + if not args.only_optional_groups: + # legacy: 'dependencies' is empty now + targets.update(dict.fromkeys(project_table['dependencies'])) + targets.update(dict.fromkeys(yield_deps(optional_groups['default']))) - for include in filter(None, map(optional_groups.get, args.include or [])): - targets.extend(yield_deps(include)) + for include in filter(None, map(optional_groups.get, include_groups)): + targets.update(dict.fromkeys(yield_deps(include))) - targets = [t for t in targets if re.match(r'[\w-]+', t).group(0).lower() not in excludes] + def target_filter(target): + name = re.match(r'[\w-]+', target).group(0).lower() + return name not in excludes and (not only_includes or name in only_includes) + + targets = list(filter(target_filter, targets)) if args.print: for target in targets: diff --git a/devscripts/make_changelog.py b/devscripts/make_changelog.py index 0b2eb93b4e..88dbf74e4f 100644 --- a/devscripts/make_changelog.py +++ b/devscripts/make_changelog.py @@ -353,6 +353,13 @@ class CommitRange: continue commit = Commit(override_hash, override['short'], override.get('authors') or []) logger.info(f'CHANGE {self._commits[commit.hash]} -> {commit}') + if match := self.FIXES_RE.search(commit.short): + fix_commitish = match.group(1) + if fix_commitish in self._commits: + del self._commits[commit.hash] + self._fixes[fix_commitish].append(commit) + logger.info(f'Found fix for {fix_commitish[:HASH_LENGTH]}: {commit.hash[:HASH_LENGTH]}') + continue self._commits[commit.hash] = commit self._commits = dict(reversed(self._commits.items())) diff --git a/devscripts/run_tests.py b/devscripts/run_tests.py index ebb3500b6c..3274abc39f 100755 --- a/devscripts/run_tests.py +++ b/devscripts/run_tests.py @@ -17,6 +17,18 @@ def parse_args(): parser = argparse.ArgumentParser(description='Run selected yt-dlp tests') parser.add_argument( 'test', help='an extractor test, test path, or one of "core" or "download"', nargs='*') + parser.add_argument( + '--flaky', + action='store_true', + default=None, + help='Allow running flaky tests. (default: run, unless in CI)', + ) + parser.add_argument( + '--no-flaky', + action='store_false', + dest='flaky', + help=argparse.SUPPRESS, + ) parser.add_argument( '-k', help='run a test matching EXPRESSION. Same as "pytest -k"', metavar='EXPRESSION') parser.add_argument( @@ -24,10 +36,11 @@ def parse_args(): return parser.parse_args() -def run_tests(*tests, pattern=None, ci=False): +def run_tests(*tests, pattern=None, ci=False, flaky: bool | None = None): # XXX: hatch uses `tests` if no arguments are passed run_core = 'core' in tests or 'tests' in tests or (not pattern and not tests) run_download = 'download' in tests + run_flaky = flaky or (flaky is None and not ci) pytest_args = args.pytest_args or os.getenv('HATCH_TEST_ARGS', '') arguments = ['pytest', '-Werror', '--tb=short', *shlex.split(pytest_args)] @@ -44,6 +57,8 @@ def run_tests(*tests, pattern=None, ci=False): test if '/' in test else f'test/test_download.py::TestDownload::test_{fix_test_name(test)}' for test in tests) + if not run_flaky: + arguments.append('--disallow-flaky') print(f'Running {arguments}', flush=True) try: @@ -72,6 +87,11 @@ if __name__ == '__main__': args = parse_args() os.chdir(Path(__file__).parent.parent) - sys.exit(run_tests(*args.test, pattern=args.k, ci=bool(os.getenv('CI')))) + sys.exit(run_tests( + *args.test, + pattern=args.k, + ci=bool(os.getenv('CI')), + flaky=args.flaky, + )) except KeyboardInterrupt: pass diff --git a/devscripts/update_ejs.py b/devscripts/update_ejs.py new file mode 100755 index 0000000000..3aa76bd0ce --- /dev/null +++ b/devscripts/update_ejs.py @@ -0,0 +1,166 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import contextlib +import io +import json +import hashlib +import pathlib +import urllib.request +import zipfile + + +TEMPLATE = '''\ +# This file is generated by devscripts/update_ejs.py. DO NOT MODIFY! + +VERSION = {version!r} +HASHES = {{ +{hash_mapping} +}} +''' +PREFIX = ' "yt-dlp-ejs==' +BASE_PATH = pathlib.Path(__file__).parent.parent +PYPROJECT_PATH = BASE_PATH / 'pyproject.toml' +PACKAGE_PATH = BASE_PATH / 'yt_dlp/extractor/youtube/jsc/_builtin/vendor' +RELEASE_URL = 'https://api.github.com/repos/yt-dlp/ejs/releases/latest' +ASSETS = { + 'yt.solver.lib.js': False, + 'yt.solver.lib.min.js': False, + 'yt.solver.deno.lib.js': True, + 'yt.solver.bun.lib.js': True, + 'yt.solver.core.min.js': False, + 'yt.solver.core.js': True, +} +MAKEFILE_PATH = BASE_PATH / 'Makefile' + + +def request(url: str): + return contextlib.closing(urllib.request.urlopen(url)) + + +def makefile_variables( + version: str | None = None, + name: str | None = None, + digest: str | None = None, + data: bytes | None = None, + keys_only: bool = False, +) -> dict[str, str | None]: + assert keys_only or all(arg is not None for arg in (version, name, digest, data)) + + return { + 'EJS_VERSION': None if keys_only else version, + 'EJS_WHEEL_NAME': None if keys_only else name, + 'EJS_WHEEL_HASH': None if keys_only else digest, + 'EJS_PY_FOLDERS': None if keys_only else list_wheel_contents(data, 'py', files=False), + 'EJS_PY_FILES': None if keys_only else list_wheel_contents(data, 'py', folders=False), + 'EJS_JS_FOLDERS': None if keys_only else list_wheel_contents(data, 'js', files=False), + 'EJS_JS_FILES': None if keys_only else list_wheel_contents(data, 'js', folders=False), + } + + +def list_wheel_contents( + wheel_data: bytes, + suffix: str | None = None, + folders: bool = True, + files: bool = True, +) -> str: + assert folders or files, 'at least one of "folders" or "files" must be True' + + with zipfile.ZipFile(io.BytesIO(wheel_data)) as zipf: + path_gen = (zinfo.filename for zinfo in zipf.infolist()) + + filtered = filter(lambda path: path.startswith('yt_dlp_ejs/'), path_gen) + if suffix: + filtered = filter(lambda path: path.endswith(f'.{suffix}'), filtered) + + files_list = list(filtered) + if not folders: + return ' '.join(files_list) + + folders_list = list(dict.fromkeys(path.rpartition('/')[0] for path in files_list)) + if not files: + return ' '.join(folders_list) + + return ' '.join(folders_list + files_list) + + +def main(): + current_version = None + with PYPROJECT_PATH.open() as file: + for line in file: + if not line.startswith(PREFIX): + continue + current_version, _, _ = line.removeprefix(PREFIX).partition('"') + + if not current_version: + print('yt-dlp-ejs dependency line could not be found') + return + + makefile_info = makefile_variables(keys_only=True) + prefixes = tuple(f'{key} = ' for key in makefile_info) + with MAKEFILE_PATH.open() as file: + for line in file: + if not line.startswith(prefixes): + continue + key, _, val = line.partition(' = ') + makefile_info[key] = val.rstrip() + + with request(RELEASE_URL) as resp: + info = json.load(resp) + + version = info['tag_name'] + if version == current_version: + print(f'yt-dlp-ejs is up to date! ({version})') + return + + print(f'Updating yt-dlp-ejs from {current_version} to {version}') + hashes = [] + wheel_info = {} + for asset in info['assets']: + name = asset['name'] + is_wheel = name.startswith('yt_dlp_ejs-') and name.endswith('.whl') + if not is_wheel and name not in ASSETS: + continue + with request(asset['browser_download_url']) as resp: + data = resp.read() + + # verify digest from github + digest = asset['digest'] + algo, _, expected = digest.partition(':') + hexdigest = hashlib.new(algo, data).hexdigest() + assert hexdigest == expected, f'downloaded attest mismatch ({hexdigest!r} != {expected!r})' + + if is_wheel: + wheel_info = makefile_variables(version, name, digest, data) + continue + + # calculate sha3-512 digest + asset_hash = hashlib.sha3_512(data).hexdigest() + hashes.append(f' {name!r}: {asset_hash!r},') + + if ASSETS[name]: + (PACKAGE_PATH / name).write_bytes(data) + + hash_mapping = '\n'.join(hashes) + for asset_name in ASSETS: + assert asset_name in hash_mapping, f'{asset_name} not found in release' + + assert all(wheel_info.get(key) for key in makefile_info), 'wheel info not found in release' + + (PACKAGE_PATH / '_info.py').write_text(TEMPLATE.format( + version=version, + hash_mapping=hash_mapping, + )) + + content = PYPROJECT_PATH.read_text() + updated = content.replace(PREFIX + current_version, PREFIX + version) + PYPROJECT_PATH.write_text(updated) + + makefile = MAKEFILE_PATH.read_text() + for key in wheel_info: + makefile = makefile.replace(f'{key} = {makefile_info[key]}', f'{key} = {wheel_info[key]}') + MAKEFILE_PATH.write_text(makefile) + + +if __name__ == '__main__': + main() diff --git a/devscripts/zsh-completion.py b/devscripts/zsh-completion.py index 8e190c00cb..046e9231f1 100755 --- a/devscripts/zsh-completion.py +++ b/devscripts/zsh-completion.py @@ -18,6 +18,7 @@ def build_completion(opt_parser): for opt in group.option_list] opts_file = [opt for opt in opts if opt.metavar == 'FILE'] opts_dir = [opt for opt in opts if opt.metavar == 'DIR'] + opts_path = [opt for opt in opts if opt.metavar == 'PATH'] fileopts = [] for opt in opts_file: @@ -26,6 +27,12 @@ def build_completion(opt_parser): if opt._long_opts: fileopts.extend(opt._long_opts) + for opt in opts_path: + if opt._short_opts: + fileopts.extend(opt._short_opts) + if opt._long_opts: + fileopts.extend(opt._long_opts) + diropts = [] for opt in opts_dir: if opt._short_opts: diff --git a/pyproject.toml b/pyproject.toml index 1d6e573791..d2c5745b95 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,6 +56,7 @@ default = [ "requests>=2.32.2,<3", "urllib3>=2.0.2,<3", "websockets>=13.0", + "yt-dlp-ejs==0.3.1", ] curl-cffi = [ "curl-cffi>=0.5.10,!=0.6.*,!=0.7.*,!=0.8.*,!=0.9.*,<0.14; implementation_name=='cpython'", @@ -122,7 +123,12 @@ artifacts = [ [tool.hatch.build.targets.wheel] packages = ["yt_dlp"] -artifacts = ["/yt_dlp/extractor/lazy_extractors.py"] +artifacts = [ + "/yt_dlp/extractor/lazy_extractors.py", +] +exclude = [ + "/yt_dlp/**/*.md", +] [tool.hatch.build.targets.wheel.shared-data] "completions/bash/yt-dlp" = "share/bash-completion/completions/yt-dlp" diff --git a/supportedsites.md b/supportedsites.md index a546819286..9ab6d26335 100644 --- a/supportedsites.md +++ b/supportedsites.md @@ -12,6 +12,7 @@ The only reliable way to check if a site is supported is to try it. - **17live:vod** - **1News**: 1news.co.nz article videos - **1tv**: Первый канал + - **1tv:live**: Первый канал (прямой эфир) - **20min**: (**Currently broken**) - **23video** - **247sports**: (**Currently broken**) @@ -93,6 +94,8 @@ The only reliable way to check if a site is supported is to try it. - **archive.org**: archive.org video and audio - **ArcPublishing** - **ARD** + - **ARDAudiothek** + - **ARDAudiothekPlaylist** - **ARDMediathek** - **ARDMediathekCollection** - **Art19** @@ -533,7 +536,6 @@ The only reliable way to check if a site is supported is to try it. - **google:​podcasts:feed** - **GoogleDrive** - **GoogleDrive:Folder** - - **GoPlay**: [*goplay*](## "netrc machine") - **GoPro** - **Goshgay** - **GoToStage** @@ -844,6 +846,7 @@ The only reliable way to check if a site is supported is to try it. - **MusicdexArtist** - **MusicdexPlaylist** - **MusicdexSong** + - **Mux** - **Mx3** - **Mx3Neo** - **Mx3Volksmusik** @@ -858,6 +861,7 @@ The only reliable way to check if a site is supported is to try it. - **n-tv.de** - **N1Info:article** - **N1InfoAsset** + - **NascarClassics** - **Nate** - **NateProgram** - **natgeo:video** @@ -1071,6 +1075,7 @@ The only reliable way to check if a site is supported is to try it. - **PlanetMarathi** - **Platzi**: [*platzi*](## "netrc machine") - **PlatziCourse**: [*platzi*](## "netrc machine") + - **play.tv**: [*goplay*](## "netrc machine") PLAY (formerly goplay.be) - **player.sky.it** - **PlayerFm** - **playeur** @@ -1559,12 +1564,12 @@ The only reliable way to check if a site is supported is to try it. - **TwitCastingLive** - **TwitCastingUser** - **twitch:clips**: [*twitch*](## "netrc machine") + - **twitch:collection**: [*twitch*](## "netrc machine") - **twitch:stream**: [*twitch*](## "netrc machine") + - **twitch:videos**: [*twitch*](## "netrc machine") + - **twitch:​videos:clips**: [*twitch*](## "netrc machine") + - **twitch:​videos:collections**: [*twitch*](## "netrc machine") - **twitch:vod**: [*twitch*](## "netrc machine") - - **TwitchCollection**: [*twitch*](## "netrc machine") - - **TwitchVideos**: [*twitch*](## "netrc machine") - - **TwitchVideosClips**: [*twitch*](## "netrc machine") - - **TwitchVideosCollections**: [*twitch*](## "netrc machine") - **twitter**: [*twitter*](## "netrc machine") - **twitter:amplify**: [*twitter*](## "netrc machine") - **twitter:broadcast**: [*twitter*](## "netrc machine") diff --git a/test/conftest.py b/test/conftest.py index a8b92f811e..9d31986196 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -52,6 +52,33 @@ def skip_handlers_if(request, handler): pytest.skip(marker.args[1] if len(marker.args) > 1 else '') +@pytest.fixture(autouse=True) +def handler_flaky(request, handler): + """Mark a certain handler as being flaky. + + This will skip the test if pytest does not get run using `--allow-flaky` + + usage: + pytest.mark.handler_flaky('my_handler', os.name != 'nt', reason='reason') + """ + for marker in request.node.iter_markers(handler_flaky.__name__): + if ( + marker.args[0] == handler.RH_KEY + and (not marker.args[1:] or any(marker.args[1:])) + and request.config.getoption('disallow_flaky') + ): + reason = marker.kwargs.get('reason') + pytest.skip(f'flaky: {reason}' if reason else 'flaky') + + +def pytest_addoption(parser, pluginmanager): + parser.addoption( + '--disallow-flaky', + action='store_true', + help='disallow flaky tests from running.', + ) + + def pytest_configure(config): config.addinivalue_line( 'markers', 'skip_handler(handler): skip test for the given handler', @@ -62,3 +89,6 @@ def pytest_configure(config): config.addinivalue_line( 'markers', 'skip_handlers_if(handler): skip test for handlers when the condition is true', ) + config.addinivalue_line( + 'markers', 'handler_flaky(handler): mark handler as flaky if condition is true', + ) diff --git a/test/test_YoutubeDL.py b/test/test_YoutubeDL.py index 72dfc20288..2705accb76 100644 --- a/test/test_YoutubeDL.py +++ b/test/test_YoutubeDL.py @@ -13,12 +13,10 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) import contextlib import copy -import itertools import json from test.helper import FakeYDL, assertRegexpMatches, try_rm from yt_dlp import YoutubeDL -from yt_dlp.extractor import YoutubeIE from yt_dlp.extractor.common import InfoExtractor from yt_dlp.postprocessor.common import PostProcessor from yt_dlp.utils import ( @@ -337,99 +335,6 @@ class TestFormatSelection(unittest.TestCase): ydl = YDL({'format': '[format_id!*=-]'}) self.assertRaises(ExtractorError, ydl.process_ie_result, info_dict.copy()) - def test_youtube_format_selection(self): - # FIXME: Rewrite in accordance with the new format sorting options - return - - order = [ - '38', '37', '46', '22', '45', '35', '44', '18', '34', '43', '6', '5', '17', '36', '13', - # Apple HTTP Live Streaming - '96', '95', '94', '93', '92', '132', '151', - # 3D - '85', '84', '102', '83', '101', '82', '100', - # Dash video - '137', '248', '136', '247', '135', '246', - '245', '244', '134', '243', '133', '242', '160', - # Dash audio - '141', '172', '140', '171', '139', - ] - - def format_info(f_id): - info = YoutubeIE._formats[f_id].copy() - - # XXX: In real cases InfoExtractor._parse_mpd_formats() fills up 'acodec' - # and 'vcodec', while in tests such information is incomplete since - # commit a6c2c24479e5f4827ceb06f64d855329c0a6f593 - # test_YoutubeDL.test_youtube_format_selection is broken without - # this fix - if 'acodec' in info and 'vcodec' not in info: - info['vcodec'] = 'none' - elif 'vcodec' in info and 'acodec' not in info: - info['acodec'] = 'none' - - info['format_id'] = f_id - info['url'] = 'url:' + f_id - return info - formats_order = [format_info(f_id) for f_id in order] - - info_dict = _make_result(list(formats_order), extractor='youtube') - ydl = YDL({'format': 'bestvideo+bestaudio'}) - ydl.sort_formats(info_dict) - ydl.process_ie_result(info_dict) - downloaded = ydl.downloaded_info_dicts[0] - self.assertEqual(downloaded['format_id'], '248+172') - self.assertEqual(downloaded['ext'], 'mp4') - - info_dict = _make_result(list(formats_order), extractor='youtube') - ydl = YDL({'format': 'bestvideo[height>=999999]+bestaudio/best'}) - ydl.sort_formats(info_dict) - ydl.process_ie_result(info_dict) - downloaded = ydl.downloaded_info_dicts[0] - self.assertEqual(downloaded['format_id'], '38') - - info_dict = _make_result(list(formats_order), extractor='youtube') - ydl = YDL({'format': 'bestvideo/best,bestaudio'}) - ydl.sort_formats(info_dict) - ydl.process_ie_result(info_dict) - downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts] - self.assertEqual(downloaded_ids, ['137', '141']) - - info_dict = _make_result(list(formats_order), extractor='youtube') - ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])+bestaudio'}) - ydl.sort_formats(info_dict) - ydl.process_ie_result(info_dict) - downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts] - self.assertEqual(downloaded_ids, ['137+141', '248+141']) - - info_dict = _make_result(list(formats_order), extractor='youtube') - ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])[height<=720]+bestaudio'}) - ydl.sort_formats(info_dict) - ydl.process_ie_result(info_dict) - downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts] - self.assertEqual(downloaded_ids, ['136+141', '247+141']) - - info_dict = _make_result(list(formats_order), extractor='youtube') - ydl = YDL({'format': '(bestvideo[ext=none]/bestvideo[ext=webm])+bestaudio'}) - ydl.sort_formats(info_dict) - ydl.process_ie_result(info_dict) - downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts] - self.assertEqual(downloaded_ids, ['248+141']) - - for f1, f2 in itertools.pairwise(formats_order): - info_dict = _make_result([f1, f2], extractor='youtube') - ydl = YDL({'format': 'best/bestvideo'}) - ydl.sort_formats(info_dict) - ydl.process_ie_result(info_dict) - downloaded = ydl.downloaded_info_dicts[0] - self.assertEqual(downloaded['format_id'], f1['format_id']) - - info_dict = _make_result([f2, f1], extractor='youtube') - ydl = YDL({'format': 'best/bestvideo'}) - ydl.sort_formats(info_dict) - ydl.process_ie_result(info_dict) - downloaded = ydl.downloaded_info_dicts[0] - self.assertEqual(downloaded['format_id'], f1['format_id']) - def test_audio_only_extractor_format_selection(self): # For extractors with incomplete formats (all formats are audio-only or # video-only) best and worst should fallback to corresponding best/worst diff --git a/test/test_http_proxy.py b/test/test_http_proxy.py index e903ff8beb..22ce3ca5d7 100644 --- a/test/test_http_proxy.py +++ b/test/test_http_proxy.py @@ -247,6 +247,7 @@ def ctx(request): @pytest.mark.parametrize( 'handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True) +@pytest.mark.handler_flaky('CurlCFFI', reason='segfaults') @pytest.mark.parametrize('ctx', ['http'], indirect=True) # pure http proxy can only support http class TestHTTPProxy: def test_http_no_auth(self, handler, ctx): @@ -315,6 +316,7 @@ class TestHTTPProxy: ('Requests', 'https'), ('CurlCFFI', 'https'), ], indirect=True) +@pytest.mark.handler_flaky('CurlCFFI', reason='segfaults') class TestHTTPConnectProxy: def test_http_connect_no_auth(self, handler, ctx): with ctx.http_server(HTTPConnectProxyHandler) as server_address: diff --git a/test/test_jsc/conftest.py b/test/test_jsc/conftest.py new file mode 100644 index 0000000000..28d6734122 --- /dev/null +++ b/test/test_jsc/conftest.py @@ -0,0 +1,60 @@ +import re +import pathlib + +import pytest + +import yt_dlp.globals +from yt_dlp import YoutubeDL +from yt_dlp.extractor.common import InfoExtractor + + +_TESTDATA_PATH = pathlib.Path(__file__).parent.parent / 'testdata/sigs' +_player_re = re.compile(r'^.+/player/(?P[a-zA-Z0-9_/.-]+)\.js$') +_player_id_trans = str.maketrans(dict.fromkeys('/.-', '_')) + + +@pytest.fixture +def ie() -> InfoExtractor: + runtime_names = yt_dlp.globals.supported_js_runtimes.value + ydl = YoutubeDL({'js_runtimes': {key: {} for key in runtime_names}}) + ie = ydl.get_info_extractor('Youtube') + + def _load_player(video_id, player_url, fatal=True): + match = _player_re.match(player_url) + test_id = match.group('id').translate(_player_id_trans) + cached_file = _TESTDATA_PATH / f'player-{test_id}.js' + + if cached_file.exists(): + return cached_file.read_text() + + if code := ie._download_webpage(player_url, video_id, fatal=fatal): + _TESTDATA_PATH.mkdir(exist_ok=True, parents=True) + cached_file.write_text(code) + return code + + return None + + ie._load_player = _load_player + return ie + + +class MockLogger: + def trace(self, message: str): + print(f'trace: {message}') + + def debug(self, message: str, *, once=False): + print(f'debug: {message}') + + def info(self, message: str): + print(f'info: {message}') + + def warning(self, message: str, *, once=False): + print(f'warning: {message}') + + def error(self, message: str): + print(f'error: {message}') + + +@pytest.fixture +def logger(): + return MockLogger() diff --git a/test/test_jsc/test_ejs_integration.py b/test/test_jsc/test_ejs_integration.py new file mode 100644 index 0000000000..7984810794 --- /dev/null +++ b/test/test_jsc/test_ejs_integration.py @@ -0,0 +1,128 @@ +from __future__ import annotations + +import dataclasses +import enum +import importlib.util +import json + +import pytest + +from yt_dlp.extractor.youtube.jsc.provider import ( + JsChallengeRequest, + JsChallengeType, + JsChallengeProviderResponse, + JsChallengeResponse, + NChallengeInput, + NChallengeOutput, + SigChallengeInput, + SigChallengeOutput, +) +from yt_dlp.extractor.youtube.jsc._builtin.bun import BunJCP +from yt_dlp.extractor.youtube.jsc._builtin.deno import DenoJCP +from yt_dlp.extractor.youtube.jsc._builtin.node import NodeJCP +from yt_dlp.extractor.youtube.jsc._builtin.quickjs import QuickJSJCP + + +_has_ejs = bool(importlib.util.find_spec('yt_dlp_ejs')) +pytestmark = pytest.mark.skipif(not _has_ejs, reason='yt-dlp-ejs not available') + + +class Variant(enum.Enum): + main = 'player_ias.vflset/en_US/base.js' + tcc = 'player_ias_tcc.vflset/en_US/base.js' + tce = 'player_ias_tce.vflset/en_US/base.js' + es5 = 'player_es5.vflset/en_US/base.js' + es6 = 'player_es6.vflset/en_US/base.js' + tv = 'tv-player-ias.vflset/tv-player-ias.js' + tv_es6 = 'tv-player-es6.vflset/tv-player-es6.js' + phone = 'player-plasma-ias-phone-en_US.vflset/base.js' + tablet = 'player-plasma-ias-tablet-en_US.vflset/base.js' + + +@dataclasses.dataclass +class Challenge: + player: str + variant: Variant + type: JsChallengeType + values: dict[str, str] = dataclasses.field(default_factory=dict) + + def url(self, /): + return f'https://www.youtube.com/s/player/{self.player}/{self.variant.value}' + + +CHALLENGES: list[Challenge] = [ + Challenge('3d3ba064', Variant.tce, JsChallengeType.N, { + 'ZdZIqFPQK-Ty8wId': 'qmtUsIz04xxiNW', + '4GMrWHyKI5cEvhDO': 'N9gmEX7YhKTSmw', + }), + Challenge('3d3ba064', Variant.tce, JsChallengeType.SIG, { + 'gN7a-hudCuAuPH6fByOk1_GNXN0yNMHShjZXS2VOgsEItAJz0tipeavEOmNdYN-wUtcEqD3bCXjc0iyKfAyZxCBGgIARwsSdQfJ2CJtt': + 'ttJC2JfQdSswRAIgGBCxZyAfKyi0cjXCb3gqEctUw-NYdNmOEvaepit0zJAtIEsgOV2SXZjhSHMNy0NXNG_1kNyBf6HPuAuCduh-a7O', + }), + Challenge('5ec65609', Variant.tce, JsChallengeType.N, { + '0eRGgQWJGfT5rFHFj': '4SvMpDQH-vBJCw', + }), + Challenge('5ec65609', Variant.tce, JsChallengeType.SIG, { + 'AAJAJfQdSswRQIhAMG5SN7-cAFChdrE7tLA6grH0rTMICA1mmDc0HoXgW3CAiAQQ4=CspfaF_vt82XH5yewvqcuEkvzeTsbRuHssRMyJQ=I': + 'AJfQdSswRQIhAMG5SN7-cAFChdrE7tLA6grI0rTMICA1mmDc0HoXgW3CAiAQQ4HCspfaF_vt82XH5yewvqcuEkvzeTsbRuHssRMyJQ==', + }), + Challenge('6742b2b9', Variant.tce, JsChallengeType.N, { + '_HPB-7GFg1VTkn9u': 'qUAsPryAO_ByYg', + 'K1t_fcB6phzuq2SF': 'Y7PcOt3VE62mog', + }), + Challenge('6742b2b9', Variant.tce, JsChallengeType.SIG, { + 'MMGZJMUucirzS_SnrSPYsc85CJNnTUi6GgR5NKn-znQEICACojE8MHS6S7uYq4TGjQX_D4aPk99hNU6wbTvorvVVMgIARwsSdQfJAA': + 'AJfQdSswRAIgMVVvrovTbw6UNh99kPa4D_XQjGT4qYu7S6SHM8EjoCACIEQnz-nKN5RgG6iUTnNJC58csYPSrnS_SzricuUMJZGM', + }), + Challenge('2b83d2e0', Variant.main, JsChallengeType.N, { + '0eRGgQWJGfT5rFHFj': 'euHbygrCMLksxd', + }), + Challenge('2b83d2e0', Variant.main, JsChallengeType.SIG, { + 'MMGZJMUucirzS_SnrSPYsc85CJNnTUi6GgR5NKn-znQEICACojE8MHS6S7uYq4TGjQX_D4aPk99hNU6wbTvorvVVMgIARwsSdQfJA': + '-MGZJMUucirzS_SnrSPYsc85CJNnTUi6GgR5NKnMznQEICACojE8MHS6S7uYq4TGjQX_D4aPk99hNU6wbTvorvVVMgIARwsSdQfJ', + }), + Challenge('638ec5c6', Variant.main, JsChallengeType.N, { + 'ZdZIqFPQK-Ty8wId': '1qov8-KM-yH', + }), + Challenge('638ec5c6', Variant.main, JsChallengeType.SIG, { + 'gN7a-hudCuAuPH6fByOk1_GNXN0yNMHShjZXS2VOgsEItAJz0tipeavEOmNdYN-wUtcEqD3bCXjc0iyKfAyZxCBGgIARwsSdQfJ2CJtt': + 'MhudCuAuP-6fByOk1_GNXN7gNHHShjyXS2VOgsEItAJz0tipeav0OmNdYN-wUtcEqD3bCXjc0iyKfAyZxCBGgIARwsSdQfJ2CJtt', + }), +] + +requests: list[JsChallengeRequest] = [] +responses: list[JsChallengeProviderResponse] = [] +for test in CHALLENGES: + input_type, output_type = { + JsChallengeType.N: (NChallengeInput, NChallengeOutput), + JsChallengeType.SIG: (SigChallengeInput, SigChallengeOutput), + }[test.type] + + request = JsChallengeRequest(test.type, input_type(test.url(), list(test.values.keys())), test.player) + requests.append(request) + responses.append(JsChallengeProviderResponse(request, JsChallengeResponse(test.type, output_type(test.values)))) + + +@pytest.fixture(params=[BunJCP, DenoJCP, NodeJCP, QuickJSJCP]) +def jcp(request, ie, logger): + obj = request.param(ie, logger, None) + if not obj.is_available(): + pytest.skip(f'{obj.PROVIDER_NAME} is not available') + obj.is_dev = True + return obj + + +@pytest.mark.download +def test_bulk_requests(jcp): + assert list(jcp.bulk_solve(requests)) == responses + + +@pytest.mark.download +def test_using_cached_player(jcp): + first_player_requests = requests[:3] + player = jcp._get_player(first_player_requests[0].video_id, first_player_requests[0].input.player_url) + initial = json.loads(jcp._run_js_runtime(jcp._construct_stdin(player, False, first_player_requests))) + preprocessed = initial.pop('preprocessed_player') + result = json.loads(jcp._run_js_runtime(jcp._construct_stdin(preprocessed, True, first_player_requests))) + + assert initial == result diff --git a/test/test_jsc/test_provider.py b/test/test_jsc/test_provider.py new file mode 100644 index 0000000000..3342f77546 --- /dev/null +++ b/test/test_jsc/test_provider.py @@ -0,0 +1,194 @@ + +import pytest + +from yt_dlp.extractor.youtube.jsc.provider import ( + JsChallengeProvider, + JsChallengeRequest, + JsChallengeProviderResponse, + JsChallengeProviderRejectedRequest, + JsChallengeType, + JsChallengeResponse, + NChallengeOutput, + NChallengeInput, + JsChallengeProviderError, + register_provider, + register_preference, +) +from yt_dlp.extractor.youtube.pot._provider import IEContentProvider +from yt_dlp.utils import ExtractorError +from yt_dlp.extractor.youtube.jsc._registry import _jsc_preferences, _jsc_providers + + +class ExampleJCP(JsChallengeProvider): + PROVIDER_NAME = 'example-provider' + PROVIDER_VERSION = '0.0.1' + BUG_REPORT_LOCATION = 'https://example.com/issues' + + _SUPPORTED_TYPES = [JsChallengeType.N] + + def is_available(self) -> bool: + return True + + def _real_bulk_solve(self, requests): + for request in requests: + results = dict.fromkeys(request.input.challenges, 'example-solution') + response = JsChallengeResponse( + type=request.type, + output=NChallengeOutput(results=results)) + yield JsChallengeProviderResponse(request=request, response=response) + + +PLAYER_URL = 'https://example.com/player.js' + + +class TestJsChallengeProvider: + # note: some test covered in TestPoTokenProvider which shares the same base class + def test_base_type(self): + assert issubclass(JsChallengeProvider, IEContentProvider) + + def test_create_provider_missing_bulk_solve_method(self, ie, logger): + class MissingMethodsJCP(JsChallengeProvider): + def is_available(self) -> bool: + return True + + with pytest.raises(TypeError, match='bulk_solve'): + MissingMethodsJCP(ie=ie, logger=logger, settings={}) + + def test_create_provider_missing_available_method(self, ie, logger): + class MissingMethodsJCP(JsChallengeProvider): + def _real_bulk_solve(self, requests): + raise JsChallengeProviderRejectedRequest('Not implemented') + + with pytest.raises(TypeError, match='is_available'): + MissingMethodsJCP(ie=ie, logger=logger, settings={}) + + def test_barebones_provider(self, ie, logger): + class BarebonesProviderJCP(JsChallengeProvider): + def is_available(self) -> bool: + return True + + def _real_bulk_solve(self, requests): + raise JsChallengeProviderRejectedRequest('Not implemented') + + provider = BarebonesProviderJCP(ie=ie, logger=logger, settings={}) + assert provider.PROVIDER_NAME == 'BarebonesProvider' + assert provider.PROVIDER_KEY == 'BarebonesProvider' + assert provider.PROVIDER_VERSION == '0.0.0' + assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at (developer has not provided a bug report location) .' + + def test_example_provider_success(self, ie, logger): + provider = ExampleJCP(ie=ie, logger=logger, settings={}) + + request = JsChallengeRequest( + type=JsChallengeType.N, + input=NChallengeInput(player_url=PLAYER_URL, challenges=['example-challenge'])) + + request_two = JsChallengeRequest( + type=JsChallengeType.N, + input=NChallengeInput(player_url=PLAYER_URL, challenges=['example-challenge-2'])) + + responses = list(provider.bulk_solve([request, request_two])) + assert len(responses) == 2 + assert all(isinstance(r, JsChallengeProviderResponse) for r in responses) + assert responses == [ + JsChallengeProviderResponse( + request=request, + response=JsChallengeResponse( + type=JsChallengeType.N, + output=NChallengeOutput(results={'example-challenge': 'example-solution'}), + ), + ), + JsChallengeProviderResponse( + request=request_two, + response=JsChallengeResponse( + type=JsChallengeType.N, + output=NChallengeOutput(results={'example-challenge-2': 'example-solution'}), + ), + ), + ] + + def test_provider_unsupported_challenge_type(self, ie, logger): + provider = ExampleJCP(ie=ie, logger=logger, settings={}) + request_supported = JsChallengeRequest( + type=JsChallengeType.N, + input=NChallengeInput(player_url=PLAYER_URL, challenges=['example-challenge'])) + request_unsupported = JsChallengeRequest( + type=JsChallengeType.SIG, + input=NChallengeInput(player_url=PLAYER_URL, challenges=['example-challenge'])) + responses = list(provider.bulk_solve([request_supported, request_unsupported, request_supported])) + assert len(responses) == 3 + # Requests are validated first before continuing to _real_bulk_solve + assert isinstance(responses[0], JsChallengeProviderResponse) + assert isinstance(responses[0].error, JsChallengeProviderRejectedRequest) + assert responses[0].request is request_unsupported + assert str(responses[0].error) == 'JS Challenge type "JsChallengeType.SIG" is not supported by example-provider' + + assert responses[1:] == [ + JsChallengeProviderResponse( + request=request_supported, + response=JsChallengeResponse( + type=JsChallengeType.N, + output=NChallengeOutput(results={'example-challenge': 'example-solution'}), + ), + ), + JsChallengeProviderResponse( + request=request_supported, + response=JsChallengeResponse( + type=JsChallengeType.N, + output=NChallengeOutput(results={'example-challenge': 'example-solution'}), + ), + ), + ] + + def test_provider_get_player(self, ie, logger): + ie._load_player = lambda video_id, player_url, fatal: (video_id, player_url, fatal) + provider = ExampleJCP(ie=ie, logger=logger, settings={}) + assert provider._get_player('video123', PLAYER_URL) == ('video123', PLAYER_URL, True) + + def test_provider_get_player_error(self, ie, logger): + def raise_error(video_id, player_url, fatal): + raise ExtractorError('Failed to load player') + + ie._load_player = raise_error + provider = ExampleJCP(ie=ie, logger=logger, settings={}) + with pytest.raises(JsChallengeProviderError, match='Failed to load player for JS challenge'): + provider._get_player('video123', PLAYER_URL) + + def test_require_class_end_with_suffix(self, ie, logger): + class InvalidSuffix(JsChallengeProvider): + PROVIDER_NAME = 'invalid-suffix' + + def _real_bulk_solve(self, requests): + raise JsChallengeProviderRejectedRequest('Not implemented') + + def is_available(self) -> bool: + return True + + provider = InvalidSuffix(ie=ie, logger=logger, settings={}) + + with pytest.raises(AssertionError): + provider.PROVIDER_KEY # noqa: B018 + + +def test_register_provider(ie): + + @register_provider + class UnavailableProviderJCP(JsChallengeProvider): + def is_available(self) -> bool: + return False + + def _real_bulk_solve(self, requests): + raise JsChallengeProviderRejectedRequest('Not implemented') + + assert _jsc_providers.value.get('UnavailableProvider') == UnavailableProviderJCP + _jsc_providers.value.pop('UnavailableProvider') + + +def test_register_preference(ie): + before = len(_jsc_preferences.value) + + @register_preference(ExampleJCP) + def unavailable_preference(*args, **kwargs): + return 1 + + assert len(_jsc_preferences.value) == before + 1 diff --git a/test/test_networking.py b/test/test_networking.py index afdd0c7aa7..631e7458e6 100644 --- a/test/test_networking.py +++ b/test/test_networking.py @@ -3,6 +3,7 @@ # Allow direct execution import os import sys +from unittest.mock import MagicMock import pytest @@ -311,6 +312,7 @@ class TestRequestHandlerBase: @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True) +@pytest.mark.handler_flaky('CurlCFFI', os.name == 'nt', reason='segfaults') class TestHTTPRequestHandler(TestRequestHandlerBase): def test_verify_cert(self, handler): @@ -614,8 +616,11 @@ class TestHTTPRequestHandler(TestRequestHandlerBase): @pytest.mark.skip_handler('CurlCFFI', 'not supported by curl-cffi') def test_gzip_trailing_garbage(self, handler): with handler() as rh: - data = validate_and_send(rh, Request(f'http://localhost:{self.http_port}/trailing_garbage')).read().decode() + res = validate_and_send(rh, Request(f'http://localhost:{self.http_port}/trailing_garbage')) + data = res.read().decode() assert data == '