mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2025-12-06 06:45:00 +01:00
Merge branch 'master' into fix/foxsports
This commit is contained in:
commit
eba34968a7
645 changed files with 58298 additions and 32307 deletions
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
|
|
@ -10,4 +10,4 @@ liberapay: # Replace with a single Liberapay username
|
|||
issuehunt: # Replace with a single IssueHunt username
|
||||
otechie: # Replace with a single Otechie username
|
||||
|
||||
custom: ['https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators']
|
||||
custom: ['https://github.com/yt-dlp/yt-dlp/blob/master/Maintainers.md#maintainers']
|
||||
|
|
|
|||
31
.github/ISSUE_TEMPLATE/1_broken_site.yml
vendored
31
.github/ISSUE_TEMPLATE/1_broken_site.yml
vendored
|
|
@ -2,13 +2,11 @@ name: Broken site support
|
|||
description: Report issue with yt-dlp on a supported site
|
||||
labels: [triage, site-bug]
|
||||
body:
|
||||
- type: checkboxes
|
||||
- type: markdown
|
||||
attributes:
|
||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||
description: Fill all fields even if you think it is irrelevant for the issue
|
||||
options:
|
||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||
required: true
|
||||
value: |
|
||||
> [!IMPORTANT]
|
||||
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||
- type: checkboxes
|
||||
id: checklist
|
||||
attributes:
|
||||
|
|
@ -24,9 +22,9 @@ body:
|
|||
required: true
|
||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||
- type: input
|
||||
|
|
@ -47,6 +45,8 @@ body:
|
|||
id: verbose
|
||||
attributes:
|
||||
label: Provide verbose output that clearly demonstrates the problem
|
||||
description: |
|
||||
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||
options:
|
||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||
required: true
|
||||
|
|
@ -63,14 +63,15 @@ body:
|
|||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||
[debug] Proxy map: {}
|
||||
[debug] Request Handlers: urllib, requests
|
||||
[debug] Loaded 1893 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
||||
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||
[debug] Loaded 1838 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||
<more lines>
|
||||
|
|
|
|||
|
|
@ -2,13 +2,11 @@ name: Site support request
|
|||
description: Request support for a new site
|
||||
labels: [triage, site-request]
|
||||
body:
|
||||
- type: checkboxes
|
||||
- type: markdown
|
||||
attributes:
|
||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||
description: Fill all fields even if you think it is irrelevant for the issue
|
||||
options:
|
||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||
required: true
|
||||
value: |
|
||||
> [!IMPORTANT]
|
||||
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||
- type: checkboxes
|
||||
id: checklist
|
||||
attributes:
|
||||
|
|
@ -24,9 +22,9 @@ body:
|
|||
required: true
|
||||
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and am willing to share it if required
|
||||
- type: input
|
||||
|
|
@ -59,6 +57,8 @@ body:
|
|||
id: verbose
|
||||
attributes:
|
||||
label: Provide verbose output that clearly demonstrates the problem
|
||||
description: |
|
||||
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||
options:
|
||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||
required: true
|
||||
|
|
@ -75,14 +75,15 @@ body:
|
|||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||
[debug] Proxy map: {}
|
||||
[debug] Request Handlers: urllib, requests
|
||||
[debug] Loaded 1893 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
||||
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||
[debug] Loaded 1838 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||
<more lines>
|
||||
|
|
|
|||
|
|
@ -1,14 +1,12 @@
|
|||
name: Site feature request
|
||||
description: Request a new functionality for a supported site
|
||||
description: Request new functionality for a site supported by yt-dlp
|
||||
labels: [triage, site-enhancement]
|
||||
body:
|
||||
- type: checkboxes
|
||||
- type: markdown
|
||||
attributes:
|
||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||
description: Fill all fields even if you think it is irrelevant for the issue
|
||||
options:
|
||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||
required: true
|
||||
value: |
|
||||
> [!IMPORTANT]
|
||||
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||
- type: checkboxes
|
||||
id: checklist
|
||||
attributes:
|
||||
|
|
@ -22,9 +20,9 @@ body:
|
|||
required: true
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||
- type: input
|
||||
|
|
@ -55,6 +53,8 @@ body:
|
|||
id: verbose
|
||||
attributes:
|
||||
label: Provide verbose output that clearly demonstrates the problem
|
||||
description: |
|
||||
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||
options:
|
||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||
required: true
|
||||
|
|
@ -71,14 +71,15 @@ body:
|
|||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||
[debug] Proxy map: {}
|
||||
[debug] Request Handlers: urllib, requests
|
||||
[debug] Loaded 1893 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
||||
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||
[debug] Loaded 1838 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||
<more lines>
|
||||
|
|
|
|||
35
.github/ISSUE_TEMPLATE/4_bug_report.yml
vendored
35
.github/ISSUE_TEMPLATE/4_bug_report.yml
vendored
|
|
@ -2,13 +2,11 @@ name: Core bug report
|
|||
description: Report a bug unrelated to any particular site or extractor
|
||||
labels: [triage, bug]
|
||||
body:
|
||||
- type: checkboxes
|
||||
- type: markdown
|
||||
attributes:
|
||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||
description: Fill all fields even if you think it is irrelevant for the issue
|
||||
options:
|
||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||
required: true
|
||||
value: |
|
||||
> [!IMPORTANT]
|
||||
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||
- type: checkboxes
|
||||
id: checklist
|
||||
attributes:
|
||||
|
|
@ -20,13 +18,9 @@ body:
|
|||
required: true
|
||||
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||
required: true
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
|
|
@ -40,6 +34,8 @@ body:
|
|||
id: verbose
|
||||
attributes:
|
||||
label: Provide verbose output that clearly demonstrates the problem
|
||||
description: |
|
||||
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||
options:
|
||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||
required: true
|
||||
|
|
@ -56,14 +52,15 @@ body:
|
|||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||
[debug] Proxy map: {}
|
||||
[debug] Request Handlers: urllib, requests
|
||||
[debug] Loaded 1893 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
||||
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||
[debug] Loaded 1838 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||
<more lines>
|
||||
|
|
|
|||
33
.github/ISSUE_TEMPLATE/5_feature_request.yml
vendored
33
.github/ISSUE_TEMPLATE/5_feature_request.yml
vendored
|
|
@ -1,14 +1,12 @@
|
|||
name: Feature request
|
||||
description: Request a new functionality unrelated to any particular site or extractor
|
||||
description: Request a new feature unrelated to any particular site or extractor
|
||||
labels: [triage, enhancement]
|
||||
body:
|
||||
- type: checkboxes
|
||||
- type: markdown
|
||||
attributes:
|
||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||
description: Fill all fields even if you think it is irrelevant for the issue
|
||||
options:
|
||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||
required: true
|
||||
value: |
|
||||
> [!IMPORTANT]
|
||||
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||
- type: checkboxes
|
||||
id: checklist
|
||||
attributes:
|
||||
|
|
@ -22,9 +20,9 @@ body:
|
|||
required: true
|
||||
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
|
|
@ -38,6 +36,8 @@ body:
|
|||
id: verbose
|
||||
attributes:
|
||||
label: Provide verbose output that clearly demonstrates the problem
|
||||
description: |
|
||||
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||
options:
|
||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||
|
|
@ -52,14 +52,15 @@ body:
|
|||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||
[debug] Proxy map: {}
|
||||
[debug] Request Handlers: urllib, requests
|
||||
[debug] Loaded 1893 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
||||
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||
[debug] Loaded 1838 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||
<more lines>
|
||||
|
|
|
|||
33
.github/ISSUE_TEMPLATE/6_question.yml
vendored
33
.github/ISSUE_TEMPLATE/6_question.yml
vendored
|
|
@ -1,14 +1,12 @@
|
|||
name: Ask question
|
||||
description: Ask yt-dlp related question
|
||||
description: Ask a question about using yt-dlp
|
||||
labels: [question]
|
||||
body:
|
||||
- type: checkboxes
|
||||
- type: markdown
|
||||
attributes:
|
||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||
description: Fill all fields even if you think it is irrelevant for the issue
|
||||
options:
|
||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||
required: true
|
||||
value: |
|
||||
> [!IMPORTANT]
|
||||
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
|
|
@ -28,9 +26,9 @@ body:
|
|||
required: true
|
||||
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions **including closed ones**. DO NOT post duplicates
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar questions **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- type: textarea
|
||||
id: question
|
||||
|
|
@ -44,6 +42,8 @@ body:
|
|||
id: verbose
|
||||
attributes:
|
||||
label: Provide verbose output that clearly demonstrates the problem
|
||||
description: |
|
||||
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||
options:
|
||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||
|
|
@ -58,14 +58,15 @@ body:
|
|||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||
[debug] Proxy map: {}
|
||||
[debug] Request Handlers: urllib, requests
|
||||
[debug] Loaded 1893 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
||||
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||
[debug] Loaded 1838 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||
<more lines>
|
||||
|
|
|
|||
7
.github/ISSUE_TEMPLATE/config.yml
vendored
7
.github/ISSUE_TEMPLATE/config.yml
vendored
|
|
@ -1,8 +1,5 @@
|
|||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Get help from the community on Discord
|
||||
- name: Get help on Discord
|
||||
url: https://discord.gg/H5MNcFW63r
|
||||
about: Join the yt-dlp Discord for community-powered support!
|
||||
- name: Matrix Bridge to the Discord server
|
||||
url: https://matrix.to/#/#yt-dlp:matrix.org
|
||||
about: For those who do not want to use Discord
|
||||
about: Join the yt-dlp Discord server for support and discussion
|
||||
|
|
|
|||
|
|
@ -18,9 +18,9 @@ body:
|
|||
required: true
|
||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||
- type: input
|
||||
|
|
|
|||
|
|
@ -18,9 +18,9 @@ body:
|
|||
required: true
|
||||
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and am willing to share it if required
|
||||
- type: input
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
name: Site feature request
|
||||
description: Request a new functionality for a supported site
|
||||
description: Request new functionality for a site supported by yt-dlp
|
||||
labels: [triage, site-enhancement]
|
||||
body:
|
||||
%(no_skip)s
|
||||
|
|
@ -16,9 +16,9 @@ body:
|
|||
required: true
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||
- type: input
|
||||
|
|
|
|||
8
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
8
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
|
|
@ -14,13 +14,9 @@ body:
|
|||
required: true
|
||||
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||
required: true
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
name: Feature request
|
||||
description: Request a new functionality unrelated to any particular site or extractor
|
||||
description: Request a new feature unrelated to any particular site or extractor
|
||||
labels: [triage, enhancement]
|
||||
body:
|
||||
%(no_skip)s
|
||||
|
|
@ -16,9 +16,9 @@ body:
|
|||
required: true
|
||||
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
|
|
|
|||
6
.github/ISSUE_TEMPLATE_tmpl/6_question.yml
vendored
6
.github/ISSUE_TEMPLATE_tmpl/6_question.yml
vendored
|
|
@ -1,5 +1,5 @@
|
|||
name: Ask question
|
||||
description: Ask yt-dlp related question
|
||||
description: Ask a question about using yt-dlp
|
||||
labels: [question]
|
||||
body:
|
||||
%(no_skip)s
|
||||
|
|
@ -22,9 +22,9 @@ body:
|
|||
required: true
|
||||
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions **including closed ones**. DO NOT post duplicates
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar questions **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- type: textarea
|
||||
id: question
|
||||
|
|
|
|||
38
.github/PULL_REQUEST_TEMPLATE.md
vendored
38
.github/PULL_REQUEST_TEMPLATE.md
vendored
|
|
@ -1,14 +1,17 @@
|
|||
**IMPORTANT**: PRs without the template will be CLOSED
|
||||
<!--
|
||||
**IMPORTANT**: PRs without the template will be CLOSED
|
||||
|
||||
Due to the high volume of pull requests, it may be a while before your PR is reviewed.
|
||||
Please try to keep your pull request focused on a single bugfix or new feature.
|
||||
Pull requests with a vast scope and/or very large diff will take much longer to review.
|
||||
It is recommended for new contributors to stick to smaller pull requests, so you can receive much more immediate feedback as you familiarize yourself with the codebase.
|
||||
|
||||
PLEASE AVOID FORCE-PUSHING after opening a PR, as it makes reviewing more difficult.
|
||||
-->
|
||||
|
||||
### Description of your *pull request* and other information
|
||||
|
||||
<!--
|
||||
|
||||
Explanation of your *pull request* in arbitrary form goes here. Please **make sure the description explains the purpose and effect** of your *pull request* and is worded well enough to be understood. Provide as much **context and examples** as possible
|
||||
|
||||
-->
|
||||
|
||||
ADD DESCRIPTION HERE
|
||||
ADD DETAILED DESCRIPTION HERE
|
||||
|
||||
Fixes #
|
||||
|
||||
|
|
@ -16,24 +19,23 @@ Fixes #
|
|||
<details open><summary>Template</summary> <!-- OPEN is intentional -->
|
||||
|
||||
<!--
|
||||
# PLEASE FOLLOW THE GUIDE BELOW
|
||||
|
||||
# PLEASE FOLLOW THE GUIDE BELOW
|
||||
|
||||
- You will be asked some questions, please read them **carefully** and answer honestly
|
||||
- Put an `x` into all the boxes `[ ]` relevant to your *pull request* (like [x])
|
||||
- Use *Preview* tab to see how your *pull request* will actually look like
|
||||
|
||||
- You will be asked some questions, please read them **carefully** and answer honestly
|
||||
- Put an `x` into all the boxes `[ ]` relevant to your *pull request* (like [x])
|
||||
- Use *Preview* tab to see what your *pull request* will actually look like
|
||||
-->
|
||||
|
||||
### Before submitting a *pull request* make sure you have:
|
||||
- [ ] At least skimmed through [contributing guidelines](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#developer-instructions) including [yt-dlp coding conventions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#yt-dlp-coding-conventions)
|
||||
- [ ] [Searched](https://github.com/yt-dlp/yt-dlp/search?q=is%3Apr&type=Issues) the bugtracker for similar pull requests
|
||||
|
||||
### In order to be accepted and merged into yt-dlp each piece of code must be in public domain or released under [Unlicense](http://unlicense.org/). Check all of the following options that apply:
|
||||
- [ ] I am the original author of this code and I am willing to release it under [Unlicense](http://unlicense.org/)
|
||||
- [ ] I am not the original author of this code but it is in public domain or released under [Unlicense](http://unlicense.org/) (provide reliable evidence)
|
||||
### In order to be accepted and merged into yt-dlp each piece of code must be in public domain or released under [Unlicense](http://unlicense.org/). Check those that apply and remove the others:
|
||||
- [ ] I am the original author of the code in this PR, and I am willing to release it under [Unlicense](http://unlicense.org/)
|
||||
- [ ] I am not the original author of the code in this PR, but it is in the public domain or released under [Unlicense](http://unlicense.org/) (provide reliable evidence)
|
||||
- [ ] I have read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
|
||||
### What is the purpose of your *pull request*?
|
||||
### What is the purpose of your *pull request*? Check those that apply and remove the others:
|
||||
- [ ] Fix or improvement to an extractor (Make sure to add/update tests)
|
||||
- [ ] New extractor ([Piracy websites will not be accepted](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy))
|
||||
- [ ] Core bug fix/improvement
|
||||
|
|
|
|||
22
.github/actionlint.yml
vendored
Normal file
22
.github/actionlint.yml
vendored
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
config-variables:
|
||||
- KEEP_CACHE_WARM
|
||||
- PUSH_VERSION_COMMIT
|
||||
- UPDATE_TO_VERIFICATION
|
||||
- PYPI_PROJECT
|
||||
- PYPI_SUFFIX
|
||||
- NIGHTLY_PYPI_PROJECT
|
||||
- NIGHTLY_PYPI_SUFFIX
|
||||
- NIGHTLY_ARCHIVE_REPO
|
||||
- BUILD_NIGHTLY
|
||||
- MASTER_PYPI_PROJECT
|
||||
- MASTER_PYPI_SUFFIX
|
||||
- MASTER_ARCHIVE_REPO
|
||||
- BUILD_MASTER
|
||||
- ISSUE_LOCKDOWN
|
||||
- SANITIZE_COMMENT
|
||||
|
||||
paths:
|
||||
.github/workflows/build.yml:
|
||||
ignore:
|
||||
# SC1090 "Can't follow non-constant source": ignore when using `source` to activate venv
|
||||
- '.+SC1090.+'
|
||||
639
.github/workflows/build.yml
vendored
639
.github/workflows/build.yml
vendored
|
|
@ -9,31 +9,27 @@ on:
|
|||
required: false
|
||||
default: stable
|
||||
type: string
|
||||
origin:
|
||||
required: true
|
||||
type: string
|
||||
unix:
|
||||
default: true
|
||||
type: boolean
|
||||
linux_static:
|
||||
linux:
|
||||
default: true
|
||||
type: boolean
|
||||
linux_arm:
|
||||
linux_armv7l:
|
||||
default: true
|
||||
type: boolean
|
||||
musllinux:
|
||||
default: true
|
||||
type: boolean
|
||||
macos:
|
||||
default: true
|
||||
type: boolean
|
||||
macos_legacy:
|
||||
default: true
|
||||
type: boolean
|
||||
windows:
|
||||
default: true
|
||||
type: boolean
|
||||
windows32:
|
||||
default: true
|
||||
type: boolean
|
||||
origin:
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
secrets:
|
||||
GPG_SIGNING_KEY:
|
||||
required: false
|
||||
|
|
@ -43,7 +39,9 @@ on:
|
|||
version:
|
||||
description: |
|
||||
VERSION: yyyy.mm.dd[.rev] or rev
|
||||
required: true
|
||||
(default: auto-generated)
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
channel:
|
||||
description: |
|
||||
|
|
@ -55,37 +53,26 @@ on:
|
|||
description: yt-dlp, yt-dlp.tar.gz
|
||||
default: true
|
||||
type: boolean
|
||||
linux_static:
|
||||
description: yt-dlp_linux
|
||||
linux:
|
||||
description: yt-dlp_linux, yt-dlp_linux.zip, yt-dlp_linux_aarch64, yt-dlp_linux_aarch64.zip
|
||||
default: true
|
||||
type: boolean
|
||||
linux_arm:
|
||||
description: yt-dlp_linux_aarch64, yt-dlp_linux_armv7l
|
||||
linux_armv7l:
|
||||
description: yt-dlp_linux_armv7l.zip
|
||||
default: true
|
||||
type: boolean
|
||||
musllinux:
|
||||
description: yt-dlp_musllinux, yt-dlp_musllinux.zip, yt-dlp_musllinux_aarch64, yt-dlp_musllinux_aarch64.zip
|
||||
default: true
|
||||
type: boolean
|
||||
macos:
|
||||
description: yt-dlp_macos, yt-dlp_macos.zip
|
||||
default: true
|
||||
type: boolean
|
||||
macos_legacy:
|
||||
description: yt-dlp_macos_legacy
|
||||
default: true
|
||||
type: boolean
|
||||
windows:
|
||||
description: yt-dlp.exe, yt-dlp_min.exe, yt-dlp_win.zip
|
||||
description: yt-dlp.exe, yt-dlp_win.zip, yt-dlp_x86.exe, yt-dlp_win_x86.zip, yt-dlp_arm64.exe, yt-dlp_win_arm64.zip
|
||||
default: true
|
||||
type: boolean
|
||||
windows32:
|
||||
description: yt-dlp_x86.exe
|
||||
default: true
|
||||
type: boolean
|
||||
origin:
|
||||
description: Origin
|
||||
required: false
|
||||
default: 'current repo'
|
||||
type: choice
|
||||
options:
|
||||
- 'current repo'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
|
@ -94,44 +81,153 @@ jobs:
|
|||
process:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
origin: ${{ steps.process_origin.outputs.origin }}
|
||||
origin: ${{ steps.process_inputs.outputs.origin }}
|
||||
timestamp: ${{ steps.process_inputs.outputs.timestamp }}
|
||||
version: ${{ steps.process_inputs.outputs.version }}
|
||||
linux_matrix: ${{ steps.linux_matrix.outputs.matrix }}
|
||||
|
||||
steps:
|
||||
- name: Process origin
|
||||
id: process_origin
|
||||
- name: Process inputs
|
||||
id: process_inputs
|
||||
env:
|
||||
INPUTS: ${{ toJSON(inputs) }}
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
shell: python
|
||||
run: |
|
||||
echo "origin=${{ inputs.origin == 'current repo' && github.repository || inputs.origin }}" | tee "$GITHUB_OUTPUT"
|
||||
import datetime as dt
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
INPUTS = json.loads(os.environ['INPUTS'])
|
||||
timestamp = dt.datetime.now(tz=dt.timezone.utc).strftime('%Y.%m.%d.%H%M%S.%f')
|
||||
version = INPUTS.get('version')
|
||||
if version and '.' not in version:
|
||||
# build.yml was dispatched with only a revision as the version input value
|
||||
version_parts = [*timestamp.split('.')[:3], version]
|
||||
elif not version:
|
||||
# build.yml was dispatched without any version input value, so include .HHMMSS revision
|
||||
version_parts = timestamp.split('.')[:4]
|
||||
else:
|
||||
# build.yml was called or dispatched with a complete version input value
|
||||
version_parts = version.split('.')
|
||||
assert all(re.fullmatch(r'[0-9]+', part) for part in version_parts), 'Version must be numeric'
|
||||
outputs = {
|
||||
'origin': INPUTS.get('origin') or os.environ['REPOSITORY'],
|
||||
'timestamp': timestamp,
|
||||
'version': '.'.join(version_parts),
|
||||
}
|
||||
print(json.dumps(outputs, indent=2))
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||
f.write('\n'.join(f'{key}={value}' for key, value in outputs.items()))
|
||||
|
||||
- name: Build Linux matrix
|
||||
id: linux_matrix
|
||||
env:
|
||||
INPUTS: ${{ toJSON(inputs) }}
|
||||
PYTHON_VERSION: '3.13'
|
||||
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||
shell: python
|
||||
run: |
|
||||
import json
|
||||
import os
|
||||
EXE_MAP = {
|
||||
'linux': [{
|
||||
'os': 'linux',
|
||||
'arch': 'x86_64',
|
||||
'runner': 'ubuntu-24.04',
|
||||
}, {
|
||||
'os': 'linux',
|
||||
'arch': 'aarch64',
|
||||
'runner': 'ubuntu-24.04-arm',
|
||||
}],
|
||||
'linux_armv7l': [{
|
||||
'os': 'linux',
|
||||
'arch': 'armv7l',
|
||||
'runner': 'ubuntu-24.04-arm',
|
||||
'qemu_platform': 'linux/arm/v7',
|
||||
'onefile': False,
|
||||
'cache_requirements': True,
|
||||
'update_to': 'yt-dlp/yt-dlp@2023.03.04',
|
||||
}],
|
||||
'musllinux': [{
|
||||
'os': 'musllinux',
|
||||
'arch': 'x86_64',
|
||||
'runner': 'ubuntu-24.04',
|
||||
'python_version': '3.14',
|
||||
}, {
|
||||
'os': 'musllinux',
|
||||
'arch': 'aarch64',
|
||||
'runner': 'ubuntu-24.04-arm',
|
||||
'python_version': '3.14',
|
||||
}],
|
||||
}
|
||||
INPUTS = json.loads(os.environ['INPUTS'])
|
||||
matrix = [exe for key, group in EXE_MAP.items() for exe in group if INPUTS.get(key)]
|
||||
if not matrix:
|
||||
# If we send an empty matrix when no linux inputs are given, the entire workflow fails
|
||||
matrix = [EXE_MAP['linux'][0]]
|
||||
for exe in matrix:
|
||||
exe['exe'] = '_'.join(filter(None, (
|
||||
'yt-dlp',
|
||||
exe['os'],
|
||||
exe['arch'] != 'x86_64' and exe['arch'],
|
||||
)))
|
||||
exe.setdefault('qemu_platform', None)
|
||||
exe.setdefault('onefile', True)
|
||||
exe.setdefault('onedir', True)
|
||||
exe.setdefault('cache_requirements', False)
|
||||
exe.setdefault('python_version', os.environ['PYTHON_VERSION'])
|
||||
exe.setdefault('update_to', os.environ['UPDATE_TO'])
|
||||
if not any(INPUTS.get(key) for key in EXE_MAP):
|
||||
print('skipping linux job')
|
||||
else:
|
||||
print(json.dumps(matrix, indent=2))
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||
f.write(f'matrix={json.dumps(matrix)}')
|
||||
|
||||
unix:
|
||||
needs: process
|
||||
if: inputs.unix
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CHANNEL: ${{ inputs.channel }}
|
||||
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||
VERSION: ${{ needs.process.outputs.version }}
|
||||
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0 # Needed for changelog
|
||||
- uses: actions/setup-python@v5
|
||||
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Install Requirements
|
||||
run: |
|
||||
sudo apt -y install zip pandoc man sed
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||
python devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}"
|
||||
python devscripts/update_changelog.py -vv
|
||||
python devscripts/make_lazy_extractors.py
|
||||
|
||||
- name: Build Unix platform-independent binary
|
||||
run: |
|
||||
make all tar
|
||||
make all-extra tar
|
||||
|
||||
- name: Verify --update-to
|
||||
if: vars.UPDATE_TO_VERIFICATION
|
||||
run: |
|
||||
chmod +x ./yt-dlp
|
||||
cp ./yt-dlp ./yt-dlp_downgraded
|
||||
version="$(./yt-dlp --version)"
|
||||
./yt-dlp_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||
./yt-dlp_downgraded -v --update-to "${UPDATE_TO}"
|
||||
downgraded_version="$(./yt-dlp_downgraded --version)"
|
||||
[[ "$version" != "$downgraded_version" ]]
|
||||
[[ "${version}" != "${downgraded_version}" ]]
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
|
|
@ -141,97 +237,74 @@ jobs:
|
|||
yt-dlp.tar.gz
|
||||
compression-level: 0
|
||||
|
||||
linux_static:
|
||||
linux:
|
||||
name: ${{ matrix.os }} (${{ matrix.arch }})
|
||||
if: inputs.linux || inputs.linux_armv7l || inputs.musllinux
|
||||
needs: process
|
||||
if: inputs.linux_static
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build static executable
|
||||
env:
|
||||
channel: ${{ inputs.channel }}
|
||||
origin: ${{ needs.process.outputs.origin }}
|
||||
version: ${{ inputs.version }}
|
||||
run: |
|
||||
mkdir ~/build
|
||||
cd bundle/docker
|
||||
docker compose up --build static
|
||||
sudo chown "${USER}:docker" ~/build/yt-dlp_linux
|
||||
- name: Verify --update-to
|
||||
if: vars.UPDATE_TO_VERIFICATION
|
||||
run: |
|
||||
chmod +x ~/build/yt-dlp_linux
|
||||
cp ~/build/yt-dlp_linux ~/build/yt-dlp_linux_downgraded
|
||||
version="$(~/build/yt-dlp_linux --version)"
|
||||
~/build/yt-dlp_linux_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||
downgraded_version="$(~/build/yt-dlp_linux_downgraded --version)"
|
||||
[[ "$version" != "$downgraded_version" ]]
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-bin-${{ github.job }}
|
||||
path: |
|
||||
~/build/yt-dlp_linux
|
||||
compression-level: 0
|
||||
|
||||
linux_arm:
|
||||
needs: process
|
||||
if: inputs.linux_arm
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write # for creating cache
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
architecture:
|
||||
- armv7
|
||||
- aarch64
|
||||
include: ${{ fromJSON(needs.process.outputs.linux_matrix) }}
|
||||
env:
|
||||
CHANNEL: ${{ inputs.channel }}
|
||||
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||
VERSION: ${{ needs.process.outputs.version }}
|
||||
EXE_NAME: ${{ matrix.exe }}
|
||||
PYTHON_VERSION: ${{ matrix.python_version }}
|
||||
UPDATE_TO: ${{ (vars.UPDATE_TO_VERIFICATION && matrix.update_to) || '' }}
|
||||
SKIP_ONEDIR_BUILD: ${{ (!matrix.onedir && '1') || '' }}
|
||||
SKIP_ONEFILE_BUILD: ${{ (!matrix.onefile && '1') || '' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
path: ./repo
|
||||
- name: Virtualized Install, Prepare & Build
|
||||
uses: yt-dlp/run-on-arch-action@v2
|
||||
with:
|
||||
# Ref: https://github.com/uraimo/run-on-arch-action/issues/55
|
||||
env: |
|
||||
GITHUB_WORKFLOW: build
|
||||
githubToken: ${{ github.token }} # To cache image
|
||||
arch: ${{ matrix.architecture }}
|
||||
distro: ubuntu18.04 # Standalone executable should be built on minimum supported OS
|
||||
dockerRunArgs: --volume "${PWD}/repo:/repo"
|
||||
install: | # Installing Python 3.10 from the Deadsnakes repo raises errors
|
||||
apt update
|
||||
apt -y install zlib1g-dev libffi-dev python3.8 python3.8-dev python3.8-distutils python3-pip
|
||||
python3.8 -m pip install -U pip setuptools wheel
|
||||
# Cannot access any files from the repo directory at this stage
|
||||
python3.8 -m pip install -U Pyinstaller mutagen pycryptodomex websockets brotli certifi secretstorage cffi
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
run: |
|
||||
cd repo
|
||||
python3.8 devscripts/install_deps.py -o --include build
|
||||
python3.8 devscripts/install_deps.py --include pyinstaller --include secretstorage # Cached version may be out of date
|
||||
python3.8 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||
python3.8 devscripts/make_lazy_extractors.py
|
||||
python3.8 -m bundle.pyinstaller
|
||||
- name: Cache requirements
|
||||
if: matrix.cache_requirements
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
||||
with:
|
||||
path: |
|
||||
venv
|
||||
key: cache-reqs-${{ matrix.os }}_${{ matrix.arch }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
|
||||
restore-keys: |
|
||||
cache-reqs-${{ matrix.os }}_${{ matrix.arch }}-${{ github.ref }}-
|
||||
cache-reqs-${{ matrix.os }}_${{ matrix.arch }}-
|
||||
|
||||
if ${{ vars.UPDATE_TO_VERIFICATION && 'true' || 'false' }}; then
|
||||
arch="${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}"
|
||||
chmod +x ./dist/yt-dlp_linux_${arch}
|
||||
cp ./dist/yt-dlp_linux_${arch} ./dist/yt-dlp_linux_${arch}_downgraded
|
||||
version="$(./dist/yt-dlp_linux_${arch} --version)"
|
||||
./dist/yt-dlp_linux_${arch}_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||
downgraded_version="$(./dist/yt-dlp_linux_${arch}_downgraded --version)"
|
||||
[[ "$version" != "$downgraded_version" ]]
|
||||
fi
|
||||
- name: Set up QEMU
|
||||
if: matrix.qemu_platform
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
platforms: ${{ matrix.qemu_platform }}
|
||||
|
||||
- name: Build executable
|
||||
env:
|
||||
SERVICE: ${{ matrix.os }}_${{ matrix.arch }}
|
||||
run: |
|
||||
mkdir -p ./venv
|
||||
mkdir -p ./dist
|
||||
pushd bundle/docker
|
||||
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
|
||||
popd
|
||||
if [[ -z "${SKIP_ONEFILE_BUILD}" ]]; then
|
||||
sudo chown "${USER}:docker" "./dist/${EXE_NAME}"
|
||||
fi
|
||||
|
||||
- name: Verify executable in container
|
||||
env:
|
||||
SERVICE: ${{ matrix.os }}_${{ matrix.arch }}_verify
|
||||
run: |
|
||||
cd bundle/docker
|
||||
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-bin-linux_${{ matrix.architecture }}
|
||||
path: | # run-on-arch-action designates armv7l as armv7
|
||||
repo/dist/yt-dlp_linux_${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}
|
||||
name: build-bin-${{ matrix.os }}_${{ matrix.arch }}
|
||||
path: |
|
||||
dist/${{ matrix.exe }}*
|
||||
compression-level: 0
|
||||
|
||||
macos:
|
||||
|
|
@ -239,36 +312,45 @@ jobs:
|
|||
if: inputs.macos
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write # For cleaning up cache
|
||||
runs-on: macos-12
|
||||
runs-on: macos-14
|
||||
env:
|
||||
CHANNEL: ${{ inputs.channel }}
|
||||
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||
VERSION: ${{ needs.process.outputs.version }}
|
||||
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
# NB: Building universal2 does not work with python from actions/setup-python
|
||||
|
||||
- name: Restore cached requirements
|
||||
id: restore-cache
|
||||
uses: actions/cache/restore@v4
|
||||
- name: Cache requirements
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
||||
with:
|
||||
path: |
|
||||
~/yt-dlp-build-venv
|
||||
key: cache-reqs-${{ github.job }}
|
||||
key: cache-reqs-${{ github.job }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
|
||||
restore-keys: |
|
||||
cache-reqs-${{ github.job }}-${{ github.ref }}-
|
||||
cache-reqs-${{ github.job }}-
|
||||
|
||||
- name: Install Requirements
|
||||
run: |
|
||||
brew install coreutils
|
||||
# We need to use system Python in order to roll our own universal2 curl_cffi wheel
|
||||
brew uninstall --ignore-dependencies python3
|
||||
python3 -m venv ~/yt-dlp-build-venv
|
||||
source ~/yt-dlp-build-venv/bin/activate
|
||||
python3 devscripts/install_deps.py -o --include build
|
||||
python3 devscripts/install_deps.py --print --include pyinstaller > requirements.txt
|
||||
python3 devscripts/install_deps.py --only-optional-groups --include-group build
|
||||
python3 devscripts/install_deps.py --print --include-group pyinstaller > requirements.txt
|
||||
# We need to ignore wheels otherwise we break universal2 builds
|
||||
python3 -m pip install -U --no-binary :all: -r requirements.txt
|
||||
# We need to fuse our own universal2 wheels for curl_cffi
|
||||
python3 -m pip install -U delocate
|
||||
python3 -m pip install -U 'delocate==0.11.0'
|
||||
mkdir curl_cffi_whls curl_cffi_universal2
|
||||
python3 devscripts/install_deps.py --print -o --include curl-cffi > requirements.txt
|
||||
python3 devscripts/install_deps.py --print --only-optional-groups --include-group curl-cffi > requirements.txt
|
||||
for platform in "macosx_11_0_arm64" "macosx_11_0_x86_64"; do
|
||||
python3 -m pip download \
|
||||
--only-binary=:all: \
|
||||
|
|
@ -297,7 +379,7 @@ jobs:
|
|||
|
||||
- name: Prepare
|
||||
run: |
|
||||
python3 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||
python3 devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}"
|
||||
python3 devscripts/make_lazy_extractors.py
|
||||
- name: Build
|
||||
run: |
|
||||
|
|
@ -312,7 +394,7 @@ jobs:
|
|||
chmod +x ./dist/yt-dlp_macos
|
||||
cp ./dist/yt-dlp_macos ./dist/yt-dlp_macos_downgraded
|
||||
version="$(./dist/yt-dlp_macos --version)"
|
||||
./dist/yt-dlp_macos_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||
./dist/yt-dlp_macos_downgraded -v --update-to "${UPDATE_TO}"
|
||||
downgraded_version="$(./dist/yt-dlp_macos_downgraded --version)"
|
||||
[[ "$version" != "$downgraded_version" ]]
|
||||
|
||||
|
|
@ -325,179 +407,124 @@ jobs:
|
|||
dist/yt-dlp_macos.zip
|
||||
compression-level: 0
|
||||
|
||||
- name: Cleanup cache
|
||||
if: steps.restore-cache.outputs.cache-hit == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
cache_key: cache-reqs-${{ github.job }}
|
||||
repository: ${{ github.repository }}
|
||||
branch: ${{ github.ref }}
|
||||
run: |
|
||||
gh extension install actions/gh-actions-cache
|
||||
gh actions-cache delete "${cache_key}" -R "${repository}" -B "${branch}" --confirm
|
||||
|
||||
- name: Cache requirements
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: |
|
||||
~/yt-dlp-build-venv
|
||||
key: cache-reqs-${{ github.job }}
|
||||
|
||||
macos_legacy:
|
||||
needs: process
|
||||
if: inputs.macos_legacy
|
||||
runs-on: macos-12
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install Python
|
||||
# We need the official Python, because the GA ones only support newer macOS versions
|
||||
env:
|
||||
PYTHON_VERSION: 3.10.5
|
||||
MACOSX_DEPLOYMENT_TARGET: 10.9 # Used up by the Python build tools
|
||||
run: |
|
||||
# Hack to get the latest patch version. Uncomment if needed
|
||||
#brew install python@3.10
|
||||
#export PYTHON_VERSION=$( $(brew --prefix)/opt/python@3.10/bin/python3 --version | cut -d ' ' -f 2 )
|
||||
curl "https://www.python.org/ftp/python/${PYTHON_VERSION}/python-${PYTHON_VERSION}-macos11.pkg" -o "python.pkg"
|
||||
sudo installer -pkg python.pkg -target /
|
||||
python3 --version
|
||||
- name: Install Requirements
|
||||
run: |
|
||||
brew install coreutils
|
||||
python3 devscripts/install_deps.py --user -o --include build
|
||||
python3 devscripts/install_deps.py --user --include pyinstaller
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
python3 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||
python3 devscripts/make_lazy_extractors.py
|
||||
- name: Build
|
||||
run: |
|
||||
python3 -m bundle.pyinstaller
|
||||
mv dist/yt-dlp_macos dist/yt-dlp_macos_legacy
|
||||
|
||||
- name: Verify --update-to
|
||||
if: vars.UPDATE_TO_VERIFICATION
|
||||
run: |
|
||||
chmod +x ./dist/yt-dlp_macos_legacy
|
||||
cp ./dist/yt-dlp_macos_legacy ./dist/yt-dlp_macos_legacy_downgraded
|
||||
version="$(./dist/yt-dlp_macos_legacy --version)"
|
||||
./dist/yt-dlp_macos_legacy_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||
downgraded_version="$(./dist/yt-dlp_macos_legacy_downgraded --version)"
|
||||
[[ "$version" != "$downgraded_version" ]]
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-bin-${{ github.job }}
|
||||
path: |
|
||||
dist/yt-dlp_macos_legacy
|
||||
compression-level: 0
|
||||
|
||||
windows:
|
||||
name: windows (${{ matrix.arch }})
|
||||
needs: process
|
||||
if: inputs.windows
|
||||
runs-on: windows-latest
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- arch: 'x64'
|
||||
runner: windows-2025
|
||||
python_version: '3.10'
|
||||
platform_tag: win_amd64
|
||||
pyi_version: '6.16.0'
|
||||
pyi_tag: '2025.09.13.221251'
|
||||
pyi_hash: b6496c7630c3afe66900cfa824e8234a8c2e2c81704bd7facd79586abc76c0e5
|
||||
- arch: 'x86'
|
||||
runner: windows-2025
|
||||
python_version: '3.10'
|
||||
platform_tag: win32
|
||||
pyi_version: '6.16.0'
|
||||
pyi_tag: '2025.09.13.221251'
|
||||
pyi_hash: 2d881843580efdc54f3523507fc6d9c5b6051ee49c743a6d9b7003ac5758c226
|
||||
- arch: 'arm64'
|
||||
runner: windows-11-arm
|
||||
python_version: '3.13' # arm64 only has Python >= 3.11 available
|
||||
platform_tag: win_arm64
|
||||
pyi_version: '6.16.0'
|
||||
pyi_tag: '2025.09.13.221251'
|
||||
pyi_hash: 4250c9085e34a95c898f3ee2f764914fc36ec59f0d97c28e6a75fcf21f7b144f
|
||||
env:
|
||||
CHANNEL: ${{ inputs.channel }}
|
||||
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||
VERSION: ${{ needs.process.outputs.version }}
|
||||
SUFFIX: ${{ (matrix.arch != 'x64' && format('_{0}', matrix.arch)) || '' }}
|
||||
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||
BASE_CACHE_KEY: cache-reqs-${{ github.job }}_${{ matrix.arch }}-${{ matrix.python_version }}
|
||||
PYI_REPO: https://github.com/yt-dlp/Pyinstaller-Builds
|
||||
PYI_WHEEL: pyinstaller-${{ matrix.pyi_version }}-py3-none-${{ matrix.platform_tag }}.whl
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with: # 3.8 is used for Win7 support
|
||||
python-version: "3.8"
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python_version }}
|
||||
architecture: ${{ matrix.arch }}
|
||||
|
||||
- name: Cache requirements
|
||||
id: cache-venv
|
||||
if: matrix.arch == 'arm64'
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
||||
with:
|
||||
path: |
|
||||
/yt-dlp-build-venv
|
||||
key: ${{ env.BASE_CACHE_KEY }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
|
||||
restore-keys: |
|
||||
${{ env.BASE_CACHE_KEY }}-${{ github.ref }}-
|
||||
${{ env.BASE_CACHE_KEY }}-
|
||||
|
||||
- name: Install Requirements
|
||||
run: | # Custom pyinstaller built with https://github.com/yt-dlp/pyinstaller-builds
|
||||
python devscripts/install_deps.py -o --include build
|
||||
python devscripts/install_deps.py --include curl-cffi
|
||||
python -m pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/x86_64/pyinstaller-6.7.0-py3-none-any.whl"
|
||||
env:
|
||||
ARCH: ${{ matrix.arch }}
|
||||
PYI_URL: ${{ env.PYI_REPO }}/releases/download/${{ matrix.pyi_tag }}/${{ env.PYI_WHEEL }}
|
||||
PYI_HASH: ${{ matrix.pyi_hash }}
|
||||
shell: pwsh
|
||||
run: |
|
||||
python -m venv /yt-dlp-build-venv
|
||||
/yt-dlp-build-venv/Scripts/Activate.ps1
|
||||
python -m pip install -U pip
|
||||
# Install custom PyInstaller build and verify hash
|
||||
mkdir /pyi-wheels
|
||||
python -m pip download -d /pyi-wheels --no-deps --require-hashes "pyinstaller@${Env:PYI_URL}#sha256=${Env:PYI_HASH}"
|
||||
python -m pip install --force-reinstall -U "/pyi-wheels/${Env:PYI_WHEEL}"
|
||||
python devscripts/install_deps.py --only-optional-groups --include-group build
|
||||
if ("${Env:ARCH}" -eq "x86") {
|
||||
python devscripts/install_deps.py
|
||||
} else {
|
||||
python devscripts/install_deps.py --include-group curl-cffi
|
||||
}
|
||||
|
||||
- name: Prepare
|
||||
shell: pwsh
|
||||
run: |
|
||||
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||
python devscripts/update-version.py -c "${Env:CHANNEL}" -r "${Env:ORIGIN}" "${Env:VERSION}"
|
||||
python devscripts/make_lazy_extractors.py
|
||||
|
||||
- name: Build
|
||||
shell: pwsh
|
||||
run: |
|
||||
/yt-dlp-build-venv/Scripts/Activate.ps1
|
||||
python -m bundle.pyinstaller
|
||||
python -m bundle.pyinstaller --onedir
|
||||
Move-Item ./dist/yt-dlp.exe ./dist/yt-dlp_real.exe
|
||||
Compress-Archive -Path ./dist/yt-dlp/* -DestinationPath ./dist/yt-dlp_win.zip
|
||||
|
||||
- name: Install Requirements (py2exe)
|
||||
run: |
|
||||
python devscripts/install_deps.py --include py2exe
|
||||
- name: Build (py2exe)
|
||||
run: |
|
||||
python -m bundle.py2exe
|
||||
Move-Item ./dist/yt-dlp.exe ./dist/yt-dlp_min.exe
|
||||
Move-Item ./dist/yt-dlp_real.exe ./dist/yt-dlp.exe
|
||||
Compress-Archive -Path ./dist/yt-dlp${Env:SUFFIX}/* -DestinationPath ./dist/yt-dlp_win${Env:SUFFIX}.zip
|
||||
|
||||
- name: Verify --update-to
|
||||
if: vars.UPDATE_TO_VERIFICATION
|
||||
shell: pwsh
|
||||
run: |
|
||||
foreach ($name in @("yt-dlp","yt-dlp_min")) {
|
||||
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
|
||||
$version = & "./dist/${name}.exe" --version
|
||||
& "./dist/${name}_downgraded.exe" -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||
$downgraded_version = & "./dist/${name}_downgraded.exe" --version
|
||||
if ($version -eq $downgraded_version) {
|
||||
exit 1
|
||||
}
|
||||
$name = "yt-dlp${Env:SUFFIX}"
|
||||
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
|
||||
$version = & "./dist/${name}.exe" --version
|
||||
& "./dist/${name}_downgraded.exe" -v --update-to "${Env:UPDATE_TO}"
|
||||
$downgraded_version = & "./dist/${name}_downgraded.exe" --version
|
||||
if ($version -eq $downgraded_version) {
|
||||
exit 1
|
||||
}
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-bin-${{ github.job }}
|
||||
name: build-bin-${{ github.job }}-${{ matrix.arch }}
|
||||
path: |
|
||||
dist/yt-dlp.exe
|
||||
dist/yt-dlp_min.exe
|
||||
dist/yt-dlp_win.zip
|
||||
compression-level: 0
|
||||
|
||||
windows32:
|
||||
needs: process
|
||||
if: inputs.windows32
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.8"
|
||||
architecture: "x86"
|
||||
- name: Install Requirements
|
||||
run: |
|
||||
python devscripts/install_deps.py -o --include build
|
||||
python devscripts/install_deps.py
|
||||
python -m pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/i686/pyinstaller-6.7.0-py3-none-any.whl"
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||
python devscripts/make_lazy_extractors.py
|
||||
- name: Build
|
||||
run: |
|
||||
python -m bundle.pyinstaller
|
||||
|
||||
- name: Verify --update-to
|
||||
if: vars.UPDATE_TO_VERIFICATION
|
||||
run: |
|
||||
foreach ($name in @("yt-dlp_x86")) {
|
||||
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
|
||||
$version = & "./dist/${name}.exe" --version
|
||||
& "./dist/${name}_downgraded.exe" -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||
$downgraded_version = & "./dist/${name}_downgraded.exe" --version
|
||||
if ($version -eq $downgraded_version) {
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-bin-${{ github.job }}
|
||||
path: |
|
||||
dist/yt-dlp_x86.exe
|
||||
dist/yt-dlp${{ env.SUFFIX }}.exe
|
||||
dist/yt-dlp_win${{ env.SUFFIX }}.zip
|
||||
compression-level: 0
|
||||
|
||||
meta_files:
|
||||
|
|
@ -505,15 +532,13 @@ jobs:
|
|||
needs:
|
||||
- process
|
||||
- unix
|
||||
- linux_static
|
||||
- linux_arm
|
||||
- linux
|
||||
- macos
|
||||
- macos_legacy
|
||||
- windows
|
||||
- windows32
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/download-artifact@v4
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
path: artifact
|
||||
pattern: build-bin-*
|
||||
|
|
@ -535,21 +560,45 @@ jobs:
|
|||
cat >> _update_spec << EOF
|
||||
# This file is used for regulating self-update
|
||||
lock 2022.08.18.36 .+ Python 3\.6
|
||||
lock 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
||||
lock 2023.11.16 zip Python 3\.7
|
||||
lock 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||
lock 2024.10.22 py2exe .+
|
||||
lock 2024.10.22 zip Python 3\.8
|
||||
lock 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||
lock 2025.08.11 darwin_legacy_exe .+
|
||||
lock 2025.08.27 linux_armv7l_exe .+
|
||||
lock 2025.10.14 zip Python 3\.9
|
||||
lockV2 yt-dlp/yt-dlp 2022.08.18.36 .+ Python 3\.6
|
||||
lockV2 yt-dlp/yt-dlp 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp 2023.11.16 zip Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 (?!win_x86_exe).+ Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp 2024.10.22 py2exe .+
|
||||
lockV2 yt-dlp/yt-dlp 2024.10.22 zip Python 3\.8
|
||||
lockV2 yt-dlp/yt-dlp 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||
lockV2 yt-dlp/yt-dlp 2025.08.11 darwin_legacy_exe .+
|
||||
lockV2 yt-dlp/yt-dlp 2025.08.27 linux_armv7l_exe .+
|
||||
lockV2 yt-dlp/yt-dlp 2025.10.14 zip Python 3\.9
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 zip Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 (?!win_x86_exe).+ Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 py2exe .+
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 zip Python 3\.8
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.12.233030 darwin_legacy_exe .+
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.30.232839 linux_armv7l_exe .+
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.10.14.232845 zip Python 3\.9
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 zip Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.045052 py2exe .+
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 zip Python 3\.8
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2025.08.12.232447 darwin_legacy_exe .+
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2025.09.05.212910 linux_armv7l_exe .+
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2025.10.14.232330 zip Python 3\.9
|
||||
EOF
|
||||
|
||||
- name: Sign checksum files
|
||||
env:
|
||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||
if: env.GPG_SIGNING_KEY != ''
|
||||
if: env.GPG_SIGNING_KEY
|
||||
run: |
|
||||
gpg --batch --import <<< "${{ secrets.GPG_SIGNING_KEY }}"
|
||||
for signfile in ./SHA*SUMS; do
|
||||
|
|
|
|||
23
.github/workflows/cache-warmer.yml
vendored
Normal file
23
.github/workflows/cache-warmer.yml
vendored
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
name: Keep cache warm
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 22 1,6,11,16,21,27 * *'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
if: |
|
||||
vars.KEEP_CACHE_WARM || github.event_name == 'workflow_dispatch'
|
||||
uses: ./.github/workflows/build.yml
|
||||
with:
|
||||
version: '999999'
|
||||
channel: stable
|
||||
origin: ${{ github.repository }}
|
||||
unix: false
|
||||
linux: false
|
||||
linux_armv7l: true
|
||||
musllinux: false
|
||||
macos: true
|
||||
windows: true
|
||||
permissions:
|
||||
contents: read
|
||||
77
.github/workflows/challenge-tests.yml
vendored
Normal file
77
.github/workflows/challenge-tests.yml
vendored
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
name: Challenge Tests
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- .github/workflows/challenge-tests.yml
|
||||
- test/test_jsc/*.py
|
||||
- yt_dlp/extractor/youtube/jsc/**.js
|
||||
- yt_dlp/extractor/youtube/jsc/**.py
|
||||
- yt_dlp/extractor/youtube/pot/**.py
|
||||
- yt_dlp/utils/_jsruntime.py
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/challenge-tests.yml
|
||||
- test/test_jsc/*.py
|
||||
- yt_dlp/extractor/youtube/jsc/**.js
|
||||
- yt_dlp/extractor/youtube/jsc/**.py
|
||||
- yt_dlp/extractor/youtube/pot/**.py
|
||||
- yt_dlp/utils/_jsruntime.py
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: challenge-tests-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
name: Challenge Tests
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
python-version: ['3.10', '3.11', '3.12', '3.13', '3.14', pypy-3.11]
|
||||
env:
|
||||
QJS_VERSION: '2025-04-26' # Earliest version with rope strings
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Deno
|
||||
uses: denoland/setup-deno@v2
|
||||
with:
|
||||
deno-version: '2.0.0' # minimum supported version
|
||||
- name: Install Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
# minimum supported version is 1.0.31 but earliest available Windows version is 1.1.0
|
||||
bun-version: ${{ (matrix.os == 'windows-latest' && '1.1.0') || '1.0.31' }}
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: '20.0' # minimum supported version
|
||||
- name: Install QuickJS (Linux)
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
run: |
|
||||
wget "https://bellard.org/quickjs/binary_releases/quickjs-linux-x86_64-${QJS_VERSION}.zip" -O quickjs.zip
|
||||
unzip quickjs.zip qjs
|
||||
sudo install qjs /usr/local/bin/qjs
|
||||
- name: Install QuickJS (Windows)
|
||||
if: matrix.os == 'windows-latest'
|
||||
shell: pwsh
|
||||
run: |
|
||||
Invoke-WebRequest "https://bellard.org/quickjs/binary_releases/quickjs-win-x86_64-${Env:QJS_VERSION}.zip" -OutFile quickjs.zip
|
||||
unzip quickjs.zip
|
||||
- name: Install test requirements
|
||||
run: |
|
||||
python ./devscripts/install_deps.py --print --only-optional-groups --include-group test > requirements.txt
|
||||
python ./devscripts/install_deps.py --print -c certifi -c requests -c urllib3 -c yt-dlp-ejs >> requirements.txt
|
||||
python -m pip install -U -r requirements.txt
|
||||
- name: Run tests
|
||||
timeout-minutes: 15
|
||||
run: |
|
||||
python -m yt_dlp -v --js-runtimes node --js-runtimes bun --js-runtimes quickjs || true
|
||||
python ./devscripts/run_tests.py test/test_jsc -k download
|
||||
8
.github/workflows/codeql.yml
vendored
8
.github/workflows/codeql.yml
vendored
|
|
@ -29,11 +29,11 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
|
|
@ -47,7 +47,7 @@ jobs:
|
|||
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
|
|
@ -60,6 +60,6 @@ jobs:
|
|||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
|
|
|||
50
.github/workflows/core.yml
vendored
50
.github/workflows/core.yml
vendored
|
|
@ -6,7 +6,8 @@ on:
|
|||
- devscripts/**
|
||||
- test/**
|
||||
- yt_dlp/**.py
|
||||
- '!yt_dlp/extractor/*.py'
|
||||
- '!yt_dlp/extractor/**.py'
|
||||
- yt_dlp/extractor/youtube/**.py
|
||||
- yt_dlp/extractor/__init__.py
|
||||
- yt_dlp/extractor/common.py
|
||||
- yt_dlp/extractor/extractors.py
|
||||
|
|
@ -16,7 +17,8 @@ on:
|
|||
- devscripts/**
|
||||
- test/**
|
||||
- yt_dlp/**.py
|
||||
- '!yt_dlp/extractor/*.py'
|
||||
- '!yt_dlp/extractor/**.py'
|
||||
- yt_dlp/extractor/youtube/**.py
|
||||
- yt_dlp/extractor/__init__.py
|
||||
- yt_dlp/extractor/common.py
|
||||
- yt_dlp/extractor/extractors.py
|
||||
|
|
@ -36,26 +38,54 @@ jobs:
|
|||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
# CPython 3.8 is in quick-test
|
||||
python-version: ['3.9', '3.10', '3.11', '3.12', pypy-3.8, pypy-3.10]
|
||||
# CPython 3.10 is in quick-test
|
||||
python-version: ['3.11', '3.12', '3.13', '3.14', pypy-3.11]
|
||||
include:
|
||||
# atleast one of each CPython/PyPy tests must be in windows
|
||||
- os: windows-latest
|
||||
python-version: '3.8'
|
||||
python-version: '3.10'
|
||||
- os: windows-latest
|
||||
python-version: '3.11'
|
||||
- os: windows-latest
|
||||
python-version: '3.12'
|
||||
- os: windows-latest
|
||||
python-version: pypy-3.9
|
||||
python-version: '3.13'
|
||||
- os: windows-latest
|
||||
python-version: '3.14'
|
||||
- os: windows-latest
|
||||
python-version: pypy-3.11
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install test requirements
|
||||
run: python3 ./devscripts/install_deps.py --include test --include curl-cffi
|
||||
run: python ./devscripts/install_deps.py --include-group test --include-group curl-cffi
|
||||
- name: Run tests
|
||||
timeout-minutes: 15
|
||||
continue-on-error: False
|
||||
env:
|
||||
source: ${{ (github.event_name == 'push' && github.event.before) || 'origin/master' }}
|
||||
target: ${{ (github.event_name == 'push' && github.event.after) || 'HEAD' }}
|
||||
shell: bash
|
||||
run: |
|
||||
flags=()
|
||||
# Check if a networking file is involved
|
||||
patterns="\
|
||||
^yt_dlp/networking/
|
||||
^yt_dlp/utils/networking\.py$
|
||||
^test/test_http_proxy\.py$
|
||||
^test/test_networking\.py$
|
||||
^test/test_networking_utils\.py$
|
||||
^test/test_socks\.py$
|
||||
^test/test_websockets\.py$
|
||||
^pyproject\.toml$
|
||||
"
|
||||
if git diff --name-only "${source}" "${target}" | grep -Ef <(printf '%s' "${patterns}"); then
|
||||
flags+=(--flaky)
|
||||
fi
|
||||
python3 -m yt_dlp -v || true # Print debug head
|
||||
python3 ./devscripts/run_tests.py core
|
||||
python3 -m devscripts.run_tests "${flags[@]}" --pytest-args '--reruns 2 --reruns-delay 3.0' core
|
||||
|
|
|
|||
24
.github/workflows/download.yml
vendored
24
.github/workflows/download.yml
vendored
|
|
@ -9,16 +9,16 @@ jobs:
|
|||
if: "contains(github.event.head_commit.message, 'ci run dl')"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: '3.10'
|
||||
- name: Install test requirements
|
||||
run: python3 ./devscripts/install_deps.py --include dev
|
||||
run: python ./devscripts/install_deps.py --include-group dev
|
||||
- name: Run tests
|
||||
continue-on-error: true
|
||||
run: python3 ./devscripts/run_tests.py download
|
||||
run: python ./devscripts/run_tests.py download
|
||||
|
||||
full:
|
||||
name: Full Download Tests
|
||||
|
|
@ -28,21 +28,21 @@ jobs:
|
|||
fail-fast: true
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
python-version: ['3.10', '3.11', '3.12', pypy-3.8, pypy-3.10]
|
||||
python-version: ['3.11', '3.12', '3.13', '3.14', pypy-3.11]
|
||||
include:
|
||||
# atleast one of each CPython/PyPy tests must be in windows
|
||||
- os: windows-latest
|
||||
python-version: '3.8'
|
||||
python-version: '3.10'
|
||||
- os: windows-latest
|
||||
python-version: pypy-3.9
|
||||
python-version: pypy-3.11
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install test requirements
|
||||
run: python3 ./devscripts/install_deps.py --include dev
|
||||
run: python ./devscripts/install_deps.py --include-group dev
|
||||
- name: Run tests
|
||||
continue-on-error: true
|
||||
run: python3 ./devscripts/run_tests.py download
|
||||
run: python ./devscripts/run_tests.py download
|
||||
|
|
|
|||
21
.github/workflows/issue-lockdown.yml
vendored
Normal file
21
.github/workflows/issue-lockdown.yml
vendored
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
name: Issue Lockdown
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
lockdown:
|
||||
name: Issue Lockdown
|
||||
if: vars.ISSUE_LOCKDOWN
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: "Lock new issue"
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
run: |
|
||||
gh issue lock "${ISSUE_NUMBER}" -R "${REPOSITORY}"
|
||||
25
.github/workflows/quick-test.yml
vendored
25
.github/workflows/quick-test.yml
vendored
|
|
@ -9,31 +9,34 @@ jobs:
|
|||
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python 3.8
|
||||
uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.8'
|
||||
python-version: '3.10'
|
||||
- name: Install test requirements
|
||||
run: python3 ./devscripts/install_deps.py --include test
|
||||
run: python ./devscripts/install_deps.py --only-optional-groups --include-group test
|
||||
- name: Run tests
|
||||
timeout-minutes: 15
|
||||
run: |
|
||||
python3 -m yt_dlp -v || true
|
||||
python3 ./devscripts/run_tests.py core
|
||||
python3 ./devscripts/run_tests.py --pytest-args '--reruns 2 --reruns-delay 3.0' core
|
||||
check:
|
||||
name: Code check
|
||||
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.8'
|
||||
python-version: '3.10'
|
||||
- name: Install dev dependencies
|
||||
run: python3 ./devscripts/install_deps.py -o --include static-analysis
|
||||
run: python ./devscripts/install_deps.py --only-optional-groups --include-group static-analysis
|
||||
- name: Make lazy extractors
|
||||
run: python3 ./devscripts/make_lazy_extractors.py
|
||||
run: python ./devscripts/make_lazy_extractors.py
|
||||
- name: Run ruff
|
||||
run: ruff check --output-format github .
|
||||
- name: Run autopep8
|
||||
run: autopep8 --diff .
|
||||
- name: Check file mode
|
||||
run: git ls-files --format="%(objectmode) %(path)" yt_dlp/ | ( ! grep -v "^100644" )
|
||||
|
|
|
|||
28
.github/workflows/release-master.yml
vendored
28
.github/workflows/release-master.yml
vendored
|
|
@ -6,10 +6,12 @@ on:
|
|||
paths:
|
||||
- "yt_dlp/**.py"
|
||||
- "!yt_dlp/version.py"
|
||||
- "bundle/*.py"
|
||||
- "bundle/**"
|
||||
- "pyproject.toml"
|
||||
- "Makefile"
|
||||
- ".github/workflows/build.yml"
|
||||
- ".github/workflows/release.yml"
|
||||
- ".github/workflows/release-master.yml"
|
||||
concurrency:
|
||||
group: release-master
|
||||
permissions:
|
||||
|
|
@ -17,14 +19,30 @@ permissions:
|
|||
|
||||
jobs:
|
||||
release:
|
||||
if: vars.BUILD_MASTER != ''
|
||||
if: vars.BUILD_MASTER
|
||||
uses: ./.github/workflows/release.yml
|
||||
with:
|
||||
prerelease: true
|
||||
source: master
|
||||
source: ${{ (github.repository != 'yt-dlp/yt-dlp' && vars.MASTER_ARCHIVE_REPO) || 'master' }}
|
||||
target: 'master'
|
||||
permissions:
|
||||
contents: write
|
||||
packages: write # For package cache
|
||||
actions: write # For cleaning up cache
|
||||
id-token: write # mandatory for trusted publishing
|
||||
secrets: inherit
|
||||
|
||||
publish_pypi:
|
||||
needs: [release]
|
||||
if: vars.MASTER_PYPI_PROJECT
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write # mandatory for trusted publishing
|
||||
steps:
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
path: dist
|
||||
name: build-pypi
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
verbose: true
|
||||
|
|
|
|||
30
.github/workflows/release-nightly.yml
vendored
30
.github/workflows/release-nightly.yml
vendored
|
|
@ -7,12 +7,12 @@ permissions:
|
|||
|
||||
jobs:
|
||||
check_nightly:
|
||||
if: vars.BUILD_NIGHTLY != ''
|
||||
if: vars.BUILD_NIGHTLY
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
commit: ${{ steps.check_for_new_commits.outputs.commit }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Check for new commits
|
||||
|
|
@ -22,9 +22,13 @@ jobs:
|
|||
"yt_dlp/*.py"
|
||||
':!yt_dlp/version.py'
|
||||
"bundle/*.py"
|
||||
"bundle/docker/compose.yml"
|
||||
"bundle/docker/linux/*"
|
||||
"pyproject.toml"
|
||||
"Makefile"
|
||||
".github/workflows/build.yml"
|
||||
".github/workflows/release.yml"
|
||||
".github/workflows/release-nightly.yml"
|
||||
)
|
||||
echo "commit=$(git log --format=%H -1 --since="24 hours ago" -- "${relevant_files[@]}")" | tee "$GITHUB_OUTPUT"
|
||||
|
||||
|
|
@ -34,10 +38,26 @@ jobs:
|
|||
uses: ./.github/workflows/release.yml
|
||||
with:
|
||||
prerelease: true
|
||||
source: nightly
|
||||
source: ${{ (github.repository != 'yt-dlp/yt-dlp' && vars.NIGHTLY_ARCHIVE_REPO) || 'nightly' }}
|
||||
target: 'nightly'
|
||||
permissions:
|
||||
contents: write
|
||||
packages: write # For package cache
|
||||
actions: write # For cleaning up cache
|
||||
id-token: write # mandatory for trusted publishing
|
||||
secrets: inherit
|
||||
|
||||
publish_pypi:
|
||||
needs: [release]
|
||||
if: vars.NIGHTLY_PYPI_PROJECT
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write # mandatory for trusted publishing
|
||||
steps:
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
path: dist
|
||||
name: build-pypi
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
verbose: true
|
||||
|
|
|
|||
308
.github/workflows/release.yml
vendored
308
.github/workflows/release.yml
vendored
|
|
@ -2,10 +2,6 @@ name: Release
|
|||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
prerelease:
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
source:
|
||||
required: false
|
||||
default: ''
|
||||
|
|
@ -18,6 +14,14 @@ on:
|
|||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
linux_armv7l:
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
prerelease:
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
source:
|
||||
|
|
@ -43,6 +47,10 @@ on:
|
|||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
linux_armv7l:
|
||||
description: Include linux_armv7l
|
||||
default: true
|
||||
type: boolean
|
||||
prerelease:
|
||||
description: Pre-release
|
||||
default: false
|
||||
|
|
@ -67,145 +75,67 @@ jobs:
|
|||
head_sha: ${{ steps.get_target.outputs.head_sha }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.10"
|
||||
python-version: "3.10" # Keep this in sync with test-workflows.yml
|
||||
|
||||
- name: Process inputs
|
||||
id: process_inputs
|
||||
env:
|
||||
INPUTS: ${{ toJSON(inputs) }}
|
||||
run: |
|
||||
cat << EOF
|
||||
::group::Inputs
|
||||
prerelease=${{ inputs.prerelease }}
|
||||
source=${{ inputs.source }}
|
||||
target=${{ inputs.target }}
|
||||
version=${{ inputs.version }}
|
||||
::endgroup::
|
||||
EOF
|
||||
IFS='@' read -r source_repo source_tag <<<"${{ inputs.source }}"
|
||||
IFS='@' read -r target_repo target_tag <<<"${{ inputs.target }}"
|
||||
cat << EOF >> "$GITHUB_OUTPUT"
|
||||
source_repo=${source_repo}
|
||||
source_tag=${source_tag}
|
||||
target_repo=${target_repo}
|
||||
target_tag=${target_tag}
|
||||
EOF
|
||||
python -m devscripts.setup_variables process_inputs
|
||||
|
||||
- name: Setup variables
|
||||
id: setup_variables
|
||||
env:
|
||||
source_repo: ${{ steps.process_inputs.outputs.source_repo }}
|
||||
source_tag: ${{ steps.process_inputs.outputs.source_tag }}
|
||||
target_repo: ${{ steps.process_inputs.outputs.target_repo }}
|
||||
target_tag: ${{ steps.process_inputs.outputs.target_tag }}
|
||||
INPUTS: ${{ toJSON(inputs) }}
|
||||
PROCESSED: ${{ toJSON(steps.process_inputs.outputs) }}
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
PUSH_VERSION_COMMIT: ${{ vars.PUSH_VERSION_COMMIT }}
|
||||
PYPI_PROJECT: ${{ vars.PYPI_PROJECT }}
|
||||
SOURCE_PYPI_PROJECT: ${{ vars[format('{0}_pypi_project', steps.process_inputs.outputs.source_repo)] }}
|
||||
SOURCE_PYPI_SUFFIX: ${{ vars[format('{0}_pypi_suffix', steps.process_inputs.outputs.source_repo)] }}
|
||||
TARGET_PYPI_PROJECT: ${{ vars[format('{0}_pypi_project', steps.process_inputs.outputs.target_repo)] }}
|
||||
TARGET_PYPI_SUFFIX: ${{ vars[format('{0}_pypi_suffix', steps.process_inputs.outputs.target_repo)] }}
|
||||
SOURCE_ARCHIVE_REPO: ${{ vars[format('{0}_archive_repo', steps.process_inputs.outputs.source_repo)] }}
|
||||
TARGET_ARCHIVE_REPO: ${{ vars[format('{0}_archive_repo', steps.process_inputs.outputs.target_repo)] }}
|
||||
HAS_SOURCE_ARCHIVE_REPO_TOKEN: ${{ !!secrets[format('{0}_archive_repo_token', steps.process_inputs.outputs.source_repo)] }}
|
||||
HAS_TARGET_ARCHIVE_REPO_TOKEN: ${{ !!secrets[format('{0}_archive_repo_token', steps.process_inputs.outputs.target_repo)] }}
|
||||
HAS_ARCHIVE_REPO_TOKEN: ${{ !!secrets.ARCHIVE_REPO_TOKEN }}
|
||||
run: |
|
||||
# unholy bash monstrosity (sincere apologies)
|
||||
fallback_token () {
|
||||
if ${{ !secrets.ARCHIVE_REPO_TOKEN }}; then
|
||||
echo "::error::Repository access secret ${target_repo_token^^} not found"
|
||||
exit 1
|
||||
fi
|
||||
target_repo_token=ARCHIVE_REPO_TOKEN
|
||||
return 0
|
||||
}
|
||||
python -m devscripts.setup_variables
|
||||
|
||||
source_is_channel=0
|
||||
[[ "${source_repo}" == 'stable' ]] && source_repo='yt-dlp/yt-dlp'
|
||||
if [[ -z "${source_repo}" ]]; then
|
||||
source_repo='${{ github.repository }}'
|
||||
elif [[ '${{ vars[format('{0}_archive_repo', env.source_repo)] }}' ]]; then
|
||||
source_is_channel=1
|
||||
source_channel='${{ vars[format('{0}_archive_repo', env.source_repo)] }}'
|
||||
elif [[ -z "${source_tag}" && "${source_repo}" != */* ]]; then
|
||||
source_tag="${source_repo}"
|
||||
source_repo='${{ github.repository }}'
|
||||
fi
|
||||
resolved_source="${source_repo}"
|
||||
if [[ "${source_tag}" ]]; then
|
||||
resolved_source="${resolved_source}@${source_tag}"
|
||||
elif [[ "${source_repo}" == 'yt-dlp/yt-dlp' ]]; then
|
||||
resolved_source='stable'
|
||||
fi
|
||||
|
||||
revision="${{ (inputs.prerelease || !vars.PUSH_VERSION_COMMIT) && '$(date -u +"%H%M%S")' || '' }}"
|
||||
version="$(
|
||||
python devscripts/update-version.py \
|
||||
-c "${resolved_source}" -r "${{ github.repository }}" ${{ inputs.version || '$revision' }} | \
|
||||
grep -Po "version=\K\d+\.\d+\.\d+(\.\d+)?")"
|
||||
|
||||
if [[ "${target_repo}" ]]; then
|
||||
if [[ -z "${target_tag}" ]]; then
|
||||
if [[ '${{ vars[format('{0}_archive_repo', env.target_repo)] }}' ]]; then
|
||||
target_tag="${source_tag:-${version}}"
|
||||
else
|
||||
target_tag="${target_repo}"
|
||||
target_repo='${{ github.repository }}'
|
||||
fi
|
||||
fi
|
||||
if [[ "${target_repo}" != '${{ github.repository}}' ]]; then
|
||||
target_repo='${{ vars[format('{0}_archive_repo', env.target_repo)] }}'
|
||||
target_repo_token='${{ env.target_repo }}_archive_repo_token'
|
||||
${{ !!secrets[format('{0}_archive_repo_token', env.target_repo)] }} || fallback_token
|
||||
pypi_project='${{ vars[format('{0}_pypi_project', env.target_repo)] }}'
|
||||
pypi_suffix='${{ vars[format('{0}_pypi_suffix', env.target_repo)] }}'
|
||||
fi
|
||||
else
|
||||
target_tag="${source_tag:-${version}}"
|
||||
if ((source_is_channel)); then
|
||||
target_repo="${source_channel}"
|
||||
target_repo_token='${{ env.source_repo }}_archive_repo_token'
|
||||
${{ !!secrets[format('{0}_archive_repo_token', env.source_repo)] }} || fallback_token
|
||||
pypi_project='${{ vars[format('{0}_pypi_project', env.source_repo)] }}'
|
||||
pypi_suffix='${{ vars[format('{0}_pypi_suffix', env.source_repo)] }}'
|
||||
else
|
||||
target_repo='${{ github.repository }}'
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "${target_repo}" == '${{ github.repository }}' ]] && ${{ !inputs.prerelease }}; then
|
||||
pypi_project='${{ vars.PYPI_PROJECT }}'
|
||||
fi
|
||||
|
||||
echo "::group::Output variables"
|
||||
cat << EOF | tee -a "$GITHUB_OUTPUT"
|
||||
channel=${resolved_source}
|
||||
version=${version}
|
||||
target_repo=${target_repo}
|
||||
target_repo_token=${target_repo_token}
|
||||
target_tag=${target_tag}
|
||||
pypi_project=${pypi_project}
|
||||
pypi_suffix=${pypi_suffix}
|
||||
EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Update documentation
|
||||
- name: Update version & documentation
|
||||
env:
|
||||
version: ${{ steps.setup_variables.outputs.version }}
|
||||
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
||||
if: |
|
||||
!inputs.prerelease && env.target_repo == github.repository
|
||||
CHANNEL: ${{ steps.setup_variables.outputs.channel }}
|
||||
# Use base repo since this could be committed; build jobs will call this again with true origin
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
VERSION: ${{ steps.setup_variables.outputs.version }}
|
||||
run: |
|
||||
python devscripts/update-version.py -c "${CHANNEL}" -r "${REPOSITORY}" "${VERSION}"
|
||||
python devscripts/update_changelog.py -vv
|
||||
make doc
|
||||
|
||||
- name: Push to release
|
||||
id: push_release
|
||||
env:
|
||||
version: ${{ steps.setup_variables.outputs.version }}
|
||||
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
||||
VERSION: ${{ steps.setup_variables.outputs.version }}
|
||||
GITHUB_EVENT_SENDER_LOGIN: ${{ github.event.sender.login }}
|
||||
GITHUB_EVENT_REF: ${{ github.event.ref }}
|
||||
if: |
|
||||
!inputs.prerelease && env.target_repo == github.repository
|
||||
!inputs.prerelease && steps.setup_variables.outputs.target_repo == github.repository
|
||||
run: |
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git add -u
|
||||
git commit -m "Release ${{ env.version }}" \
|
||||
-m "Created by: ${{ github.event.sender.login }}" -m ":ci skip all :ci run dl"
|
||||
git push origin --force ${{ github.event.ref }}:release
|
||||
git commit -m "Release ${VERSION}" \
|
||||
-m "Created by: ${GITHUB_EVENT_SENDER_LOGIN}" -m ":ci skip all"
|
||||
git push origin --force "${GITHUB_EVENT_REF}:release"
|
||||
|
||||
- name: Get target commitish
|
||||
id: get_target
|
||||
|
|
@ -214,10 +144,10 @@ jobs:
|
|||
|
||||
- name: Update master
|
||||
env:
|
||||
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
||||
GITHUB_EVENT_REF: ${{ github.event.ref }}
|
||||
if: |
|
||||
vars.PUSH_VERSION_COMMIT != '' && !inputs.prerelease && env.target_repo == github.repository
|
||||
run: git push origin ${{ github.event.ref }}
|
||||
vars.PUSH_VERSION_COMMIT && !inputs.prerelease && steps.setup_variables.outputs.target_repo == github.repository
|
||||
run: git push origin "${GITHUB_EVENT_REF}"
|
||||
|
||||
build:
|
||||
needs: prepare
|
||||
|
|
@ -226,10 +156,9 @@ jobs:
|
|||
version: ${{ needs.prepare.outputs.version }}
|
||||
channel: ${{ needs.prepare.outputs.channel }}
|
||||
origin: ${{ needs.prepare.outputs.target_repo }}
|
||||
linux_armv7l: ${{ inputs.linux_armv7l }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write # For package cache
|
||||
actions: write # For cleaning up cache
|
||||
secrets:
|
||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||
|
||||
|
|
@ -241,30 +170,30 @@ jobs:
|
|||
id-token: write # mandatory for trusted publishing
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Install Requirements
|
||||
run: |
|
||||
sudo apt -y install pandoc man
|
||||
python devscripts/install_deps.py -o --include build
|
||||
python devscripts/install_deps.py --only-optional-groups --include-group build
|
||||
|
||||
- name: Prepare
|
||||
env:
|
||||
version: ${{ needs.prepare.outputs.version }}
|
||||
suffix: ${{ needs.prepare.outputs.pypi_suffix }}
|
||||
channel: ${{ needs.prepare.outputs.channel }}
|
||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
||||
pypi_project: ${{ needs.prepare.outputs.pypi_project }}
|
||||
VERSION: ${{ needs.prepare.outputs.version }}
|
||||
SUFFIX: ${{ needs.prepare.outputs.pypi_suffix }}
|
||||
CHANNEL: ${{ needs.prepare.outputs.channel }}
|
||||
TARGET_REPO: ${{ needs.prepare.outputs.target_repo }}
|
||||
PYPI_PROJECT: ${{ needs.prepare.outputs.pypi_project }}
|
||||
run: |
|
||||
python devscripts/update-version.py -c "${{ env.channel }}" -r "${{ env.target_repo }}" -s "${{ env.suffix }}" "${{ env.version }}"
|
||||
python devscripts/update-version.py -c "${CHANNEL}" -r "${TARGET_REPO}" -s "${SUFFIX}" "${VERSION}"
|
||||
python devscripts/update_changelog.py -vv
|
||||
python devscripts/make_lazy_extractors.py
|
||||
sed -i -E '0,/(name = ")[^"]+(")/s//\1${{ env.pypi_project }}\2/' pyproject.toml
|
||||
sed -i -E '0,/(name = ")[^"]+(")/s//\1'"${PYPI_PROJECT}"'\2/' pyproject.toml
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
|
|
@ -278,7 +207,17 @@ jobs:
|
|||
make clean-cache
|
||||
python -m build --no-isolation .
|
||||
|
||||
- name: Upload artifacts
|
||||
if: github.event_name != 'workflow_dispatch'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-pypi
|
||||
path: |
|
||||
dist/*
|
||||
compression-level: 0
|
||||
|
||||
- name: Publish to PyPI
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
verbose: true
|
||||
|
|
@ -288,97 +227,100 @@ jobs:
|
|||
permissions:
|
||||
contents: write
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
env:
|
||||
TARGET_REPO: ${{ needs.prepare.outputs.target_repo }}
|
||||
TARGET_TAG: ${{ needs.prepare.outputs.target_tag }}
|
||||
VERSION: ${{ needs.prepare.outputs.version }}
|
||||
HEAD_SHA: ${{ needs.prepare.outputs.head_sha }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/download-artifact@v4
|
||||
- uses: actions/download-artifact@v5
|
||||
with:
|
||||
path: artifact
|
||||
pattern: build-*
|
||||
merge-multiple: true
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Generate release notes
|
||||
env:
|
||||
head_sha: ${{ needs.prepare.outputs.head_sha }}
|
||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
||||
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
BASE_REPO: yt-dlp/yt-dlp
|
||||
NIGHTLY_REPO: yt-dlp/yt-dlp-nightly-builds
|
||||
MASTER_REPO: yt-dlp/yt-dlp-master-builds
|
||||
DOCS_PATH: ${{ env.TARGET_REPO == github.repository && format('/tree/{0}', env.TARGET_TAG) || '' }}
|
||||
run: |
|
||||
printf '%s' \
|
||||
'[]' \
|
||||
'(https://github.com/${{ github.repository }}#installation "Installation instructions") ' \
|
||||
'[]' \
|
||||
'(https://discord.gg/H5MNcFW63r "Discord") ' \
|
||||
'[]' \
|
||||
'(https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators "Donate") ' \
|
||||
'[]' \
|
||||
'(https://github.com/${{ github.repository }}' \
|
||||
'${{ env.target_repo == github.repository && format('/tree/{0}', env.target_tag) || '' }}#readme "Documentation") ' \
|
||||
${{ env.target_repo == 'yt-dlp/yt-dlp' && '\
|
||||
"[]" \
|
||||
"(https://github.com/${REPOSITORY}#installation \"Installation instructions\") " \
|
||||
"[]" \
|
||||
"(https://discord.gg/H5MNcFW63r \"Discord\") " \
|
||||
"[]" \
|
||||
"(https://github.com/${BASE_REPO}/blob/master/Maintainers.md#maintainers \"Donate\") " \
|
||||
"[]" \
|
||||
"(https://github.com/${REPOSITORY}${DOCS_PATH}#readme \"Documentation\") " > ./RELEASE_NOTES
|
||||
if [[ "${TARGET_REPO}" == "${BASE_REPO}" ]]; then
|
||||
printf '%s' \
|
||||
"[]" \
|
||||
"(https://github.com/yt-dlp/yt-dlp-nightly-builds/releases/latest \"Nightly builds\") " \
|
||||
"(https://github.com/${NIGHTLY_REPO}/releases/latest \"Nightly builds\") " \
|
||||
"[]" \
|
||||
"(https://github.com/yt-dlp/yt-dlp-master-builds/releases/latest \"Master builds\")"' || '' }} > ./RELEASE_NOTES
|
||||
printf '\n\n' >> ./RELEASE_NOTES
|
||||
cat >> ./RELEASE_NOTES << EOF
|
||||
#### A description of the various files are in the [README](https://github.com/${{ github.repository }}#release-files)
|
||||
---
|
||||
$(python ./devscripts/make_changelog.py -vv --collapsible)
|
||||
EOF
|
||||
"(https://github.com/${MASTER_REPO}/releases/latest \"Master builds\")" >> ./RELEASE_NOTES
|
||||
fi
|
||||
printf '\n\n%s\n\n%s%s%s\n\n---\n' \
|
||||
"#### A description of the various files is in the [README](https://github.com/${REPOSITORY}#release-files)" \
|
||||
"The zipimport Unix executable contains code licensed under ISC and MIT. " \
|
||||
"The PyInstaller-bundled executables are subject to these and other licenses, all of which are compiled in " \
|
||||
"[THIRD_PARTY_LICENSES.txt](https://github.com/${BASE_REPO}/blob/${HEAD_SHA}/THIRD_PARTY_LICENSES.txt)" >> ./RELEASE_NOTES
|
||||
python ./devscripts/make_changelog.py -vv --collapsible >> ./RELEASE_NOTES
|
||||
printf '%s\n\n' '**This is a pre-release build**' >> ./PRERELEASE_NOTES
|
||||
cat ./RELEASE_NOTES >> ./PRERELEASE_NOTES
|
||||
printf '%s\n\n' 'Generated from: https://github.com/${{ github.repository }}/commit/${{ env.head_sha }}' >> ./ARCHIVE_NOTES
|
||||
printf '%s\n\n' "Generated from: https://github.com/${REPOSITORY}/commit/${HEAD_SHA}" >> ./ARCHIVE_NOTES
|
||||
cat ./RELEASE_NOTES >> ./ARCHIVE_NOTES
|
||||
|
||||
- name: Publish to archive repo
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets[needs.prepare.outputs.target_repo_token] }}
|
||||
GH_REPO: ${{ needs.prepare.outputs.target_repo }}
|
||||
version: ${{ needs.prepare.outputs.version }}
|
||||
channel: ${{ needs.prepare.outputs.channel }}
|
||||
TITLE_PREFIX: ${{ startswith(env.TARGET_REPO, 'yt-dlp/') && 'yt-dlp ' || '' }}
|
||||
TITLE: ${{ inputs.target != env.TARGET_REPO && inputs.target || needs.prepare.outputs.channel }}
|
||||
if: |
|
||||
inputs.prerelease && env.GH_TOKEN != '' && env.GH_REPO != '' && env.GH_REPO != github.repository
|
||||
inputs.prerelease && env.GH_TOKEN && env.GH_REPO && env.GH_REPO != github.repository
|
||||
run: |
|
||||
title="${{ startswith(env.GH_REPO, 'yt-dlp/') && 'yt-dlp ' || '' }}${{ env.channel }}"
|
||||
gh release create \
|
||||
--notes-file ARCHIVE_NOTES \
|
||||
--title "${title} ${{ env.version }}" \
|
||||
${{ env.version }} \
|
||||
--title "${TITLE_PREFIX}${TITLE} ${VERSION}" \
|
||||
"${VERSION}" \
|
||||
artifact/*
|
||||
|
||||
- name: Prune old release
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
version: ${{ needs.prepare.outputs.version }}
|
||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
||||
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
||||
if: |
|
||||
env.target_repo == github.repository && env.target_tag != env.version
|
||||
env.TARGET_REPO == github.repository && env.TARGET_TAG != env.VERSION
|
||||
run: |
|
||||
gh release delete --yes --cleanup-tag "${{ env.target_tag }}" || true
|
||||
git tag --delete "${{ env.target_tag }}" || true
|
||||
gh release delete --yes --cleanup-tag "${TARGET_TAG}" || true
|
||||
git tag --delete "${TARGET_TAG}" || true
|
||||
sleep 5 # Enough time to cover deletion race condition
|
||||
|
||||
- name: Publish release
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
version: ${{ needs.prepare.outputs.version }}
|
||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
||||
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
||||
head_sha: ${{ needs.prepare.outputs.head_sha }}
|
||||
NOTES_FILE: ${{ inputs.prerelease && 'PRERELEASE_NOTES' || 'RELEASE_NOTES' }}
|
||||
TITLE_PREFIX: ${{ github.repository == 'yt-dlp/yt-dlp' && 'yt-dlp ' || '' }}
|
||||
TITLE: ${{ env.TARGET_TAG != env.VERSION && format('{0} ', env.TARGET_TAG) || '' }}
|
||||
PRERELEASE: ${{ inputs.prerelease && '1' || '0' }}
|
||||
if: |
|
||||
env.target_repo == github.repository
|
||||
env.TARGET_REPO == github.repository
|
||||
run: |
|
||||
title="${{ github.repository == 'yt-dlp/yt-dlp' && 'yt-dlp ' || '' }}"
|
||||
title+="${{ env.target_tag != env.version && format('{0} ', env.target_tag) || '' }}"
|
||||
gh release create \
|
||||
--notes-file ${{ inputs.prerelease && 'PRERELEASE_NOTES' || 'RELEASE_NOTES' }} \
|
||||
--target ${{ env.head_sha }} \
|
||||
--title "${title}${{ env.version }}" \
|
||||
${{ inputs.prerelease && '--prerelease' || '' }} \
|
||||
${{ env.target_tag }} \
|
||||
artifact/*
|
||||
gh_options=(
|
||||
--notes-file "${NOTES_FILE}"
|
||||
--target "${HEAD_SHA}"
|
||||
--title "${TITLE_PREFIX}${TITLE}${VERSION}"
|
||||
)
|
||||
if ((PRERELEASE)); then
|
||||
gh_options+=(--prerelease)
|
||||
fi
|
||||
gh release create "${gh_options[@]}" "${TARGET_TAG}" artifact/*
|
||||
|
|
|
|||
17
.github/workflows/sanitize-comment.yml
vendored
Normal file
17
.github/workflows/sanitize-comment.yml
vendored
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
name: Sanitize comment
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created, edited]
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
sanitize-comment:
|
||||
name: Sanitize comment
|
||||
if: vars.SANITIZE_COMMENT && !github.event.issue.pull_request
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Sanitize comment
|
||||
uses: yt-dlp/sanitize-comment@v1
|
||||
52
.github/workflows/test-workflows.yml
vendored
Normal file
52
.github/workflows/test-workflows.yml
vendored
Normal file
|
|
@ -0,0 +1,52 @@
|
|||
name: Test and lint workflows
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- .github/workflows/*
|
||||
- bundle/docker/linux/*.sh
|
||||
- devscripts/setup_variables.py
|
||||
- devscripts/setup_variables_tests.py
|
||||
- devscripts/utils.py
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/*
|
||||
- bundle/docker/linux/*.sh
|
||||
- devscripts/setup_variables.py
|
||||
- devscripts/setup_variables_tests.py
|
||||
- devscripts/utils.py
|
||||
permissions:
|
||||
contents: read
|
||||
env:
|
||||
ACTIONLINT_VERSION: "1.7.8"
|
||||
ACTIONLINT_SHA256SUM: be92c2652ab7b6d08425428797ceabeb16e31a781c07bc388456b4e592f3e36a
|
||||
ACTIONLINT_REPO: https://github.com/rhysd/actionlint
|
||||
|
||||
jobs:
|
||||
check:
|
||||
name: Check workflows
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.10" # Keep this in sync with release.yml's prepare job
|
||||
- name: Install requirements
|
||||
env:
|
||||
ACTIONLINT_TARBALL: ${{ format('actionlint_{0}_linux_amd64.tar.gz', env.ACTIONLINT_VERSION) }}
|
||||
run: |
|
||||
python -m devscripts.install_deps --only-optional-groups --include-group test
|
||||
sudo apt -y install shellcheck
|
||||
python -m pip install -U pyflakes
|
||||
curl -LO "${ACTIONLINT_REPO}/releases/download/v${ACTIONLINT_VERSION}/${ACTIONLINT_TARBALL}"
|
||||
printf '%s %s' "${ACTIONLINT_SHA256SUM}" "${ACTIONLINT_TARBALL}" | sha256sum -c -
|
||||
tar xvzf "${ACTIONLINT_TARBALL}" actionlint
|
||||
chmod +x actionlint
|
||||
- name: Run actionlint
|
||||
run: |
|
||||
./actionlint -color
|
||||
- name: Check Docker shell scripts
|
||||
run: |
|
||||
shellcheck bundle/docker/linux/*.sh
|
||||
- name: Test GHA devscripts
|
||||
run: |
|
||||
pytest -Werror --tb=short --color=yes devscripts/setup_variables_tests.py
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
|
|
@ -51,7 +51,6 @@ cookies
|
|||
*.srt
|
||||
*.ssa
|
||||
*.swf
|
||||
*.swp
|
||||
*.tt
|
||||
*.ttml
|
||||
*.url
|
||||
|
|
@ -93,6 +92,7 @@ updates_key.pem
|
|||
*.class
|
||||
*.isorted
|
||||
*.stackdump
|
||||
uv.lock
|
||||
|
||||
# Generated
|
||||
AUTHORS
|
||||
|
|
@ -105,6 +105,9 @@ README.txt
|
|||
*.zsh
|
||||
*.spec
|
||||
test/testdata/sigs/player-*.js
|
||||
test/testdata/thumbnails/empty.webp
|
||||
test/testdata/thumbnails/foo\ %d\ bar/foo_%d.*
|
||||
.ejs-*
|
||||
|
||||
# Binary
|
||||
/youtube-dl
|
||||
|
|
@ -119,6 +122,7 @@ yt-dlp.zip
|
|||
.vscode
|
||||
*.sublime-*
|
||||
*.code-workspace
|
||||
*.swp
|
||||
|
||||
# Lazy extractors
|
||||
*/extractor/lazy_extractors.py
|
||||
|
|
@ -126,3 +130,6 @@ yt-dlp.zip
|
|||
# Plugins
|
||||
ytdlp_plugins/
|
||||
yt-dlp-plugins
|
||||
|
||||
# Packages
|
||||
yt_dlp_ejs/
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@
|
|||
- [Is your question about yt-dlp?](#is-your-question-about-yt-dlp)
|
||||
- [Are you willing to share account details if needed?](#are-you-willing-to-share-account-details-if-needed)
|
||||
- [Is the website primarily used for piracy](#is-the-website-primarily-used-for-piracy)
|
||||
- [AUTOMATED CONTRIBUTIONS (AI / LLM) POLICY](#automated-contributions-ai--llm-policy)
|
||||
- [DEVELOPER INSTRUCTIONS](#developer-instructions)
|
||||
- [Adding new feature or making overarching changes](#adding-new-feature-or-making-overarching-changes)
|
||||
- [Adding support for a new site](#adding-support-for-a-new-site)
|
||||
|
|
@ -37,14 +38,18 @@ Bugs and suggestions should be reported at: [yt-dlp/yt-dlp/issues](https://githu
|
|||
**Please include the full output of yt-dlp when run with `-vU`**, i.e. **add** `-vU` flag to **your command line**, copy the **whole** output and post it in the issue body wrapped in \`\`\` for better formatting. It should look similar to this:
|
||||
```
|
||||
$ yt-dlp -vU <your command line>
|
||||
[debug] Command-line config: ['-v', 'demo.com']
|
||||
[debug] Encodings: locale UTF-8, fs utf-8, out utf-8, pref UTF-8
|
||||
[debug] yt-dlp version 2021.09.25 (zip)
|
||||
[debug] Python version 3.8.10 (CPython 64bit) - Linux-5.4.0-74-generic-x86_64-with-glibc2.29
|
||||
[debug] exe versions: ffmpeg 4.2.4, ffprobe 4.2.4
|
||||
[debug] Command-line config: ['-vU', 'https://www.example.com/']
|
||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||
[debug] Proxy map: {}
|
||||
Current Build Hash 25cc412d1d3c0725a1f2f5b7e4682f6fb40e6d15f7024e96f7afd572e9919535
|
||||
yt-dlp is up to date (2021.09.25)
|
||||
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||
[debug] Loaded 1838 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||
...
|
||||
```
|
||||
**Do not post screenshots of verbose logs; only plain text is acceptable.**
|
||||
|
|
@ -122,7 +127,7 @@ By sharing an account with anyone, you agree to bear all risks associated with i
|
|||
While these steps won't necessarily ensure that no misuse of the account takes place, these are still some good practices to follow.
|
||||
|
||||
- Look for people with `Member` (maintainers of the project) or `Contributor` (people who have previously contributed code) tag on their messages.
|
||||
- Change the password before sharing the account to something random (use [this](https://passwordsgenerator.net/) if you don't have a random password generator).
|
||||
- Change the password before sharing the account to something random.
|
||||
- Change the password after receiving the account back.
|
||||
|
||||
### Is the website primarily used for piracy?
|
||||
|
|
@ -130,6 +135,17 @@ While these steps won't necessarily ensure that no misuse of the account takes p
|
|||
We follow [youtube-dl's policy](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) to not support services that is primarily used for infringing copyright. Additionally, it has been decided to not to support porn sites that specialize in fakes. We also cannot support any service that serves only [DRM protected content](https://en.wikipedia.org/wiki/Digital_rights_management).
|
||||
|
||||
|
||||
# AUTOMATED CONTRIBUTIONS (AI / LLM) POLICY
|
||||
|
||||
Please refrain from submitting issues or pull requests that have been generated by an LLM or other fully-automated tools. Any submission that is in violation of this policy will be closed, and the submitter may be blocked from this repository without warning.
|
||||
|
||||
If you submit an issue, you need to understand what your issue description is saying. You need to be able to answer questions about your bug report or feature request. Using an AI tool to *proofread* your issue/comment text is acceptable. Using an AI tool to *write* your issue/comment text is unacceptable.
|
||||
|
||||
If you submit a pull request, you need to understand what every line of code you've changed does. If you can't explain why your PR is doing something, then do not submit it. Using an AI tool to generate entire lines of code is unacceptable.
|
||||
|
||||
The rationale behind this policy is that automated contributions are a waste of the maintainers' time. Humans spend their time and brainpower reviewing every submission. Issues or pull requests generated by automation tools create an imbalance of effort between the submitter and the reviewer. Nobody learns anything when a maintainer reviews code written by an LLM.
|
||||
|
||||
Additionally, AI-generated code conflicts with this project's license (Unlicense), since you cannot truly release code into the public domain if you didn't author it yourself.
|
||||
|
||||
|
||||
# DEVELOPER INSTRUCTIONS
|
||||
|
|
@ -161,7 +177,7 @@ While it is strongly recommended to use `hatch` for yt-dlp development, if you a
|
|||
|
||||
```shell
|
||||
# To only install development dependencies:
|
||||
$ python -m devscripts.install_deps --include dev
|
||||
$ python -m devscripts.install_deps --include-group dev
|
||||
|
||||
# Or, for an editable install plus dev dependencies:
|
||||
$ python -m pip install -e ".[default,dev]"
|
||||
|
|
@ -233,7 +249,7 @@ After you have ensured this site is distributing its content legally, you can fo
|
|||
# * MD5 checksum; start the string with 'md5:', e.g.
|
||||
# 'description': 'md5:098f6bcd4621d373cade4e832627b4f6',
|
||||
# * A regular expression; start the string with 're:', e.g.
|
||||
# 'thumbnail': r're:^https?://.*\.jpg$',
|
||||
# 'thumbnail': r're:https?://.*\.jpg$',
|
||||
# * A count of elements in a list; start the string with 'count:', e.g.
|
||||
# 'tags': 'count:10',
|
||||
# * Any Python type, e.g.
|
||||
|
|
@ -268,7 +284,7 @@ After you have ensured this site is distributing its content legally, you can fo
|
|||
|
||||
You can use `hatch fmt` to automatically fix problems. Rules that the linter/formatter enforces should not be disabled with `# noqa` unless a maintainer requests it. The only exception allowed is for old/printf-style string formatting in GraphQL query templates (use `# noqa: UP031`).
|
||||
|
||||
1. Make sure your code works under all [Python](https://www.python.org/) versions supported by yt-dlp, namely CPython and PyPy for Python 3.8 and above. Backward compatibility is not required for even older versions of Python.
|
||||
1. Make sure your code works under all [Python](https://www.python.org/) versions supported by yt-dlp, namely CPython >=3.10 and PyPy >=3.11. Backward compatibility is not required for even older versions of Python.
|
||||
1. When the tests pass, [add](https://git-scm.com/docs/git-add) the new files, [commit](https://git-scm.com/docs/git-commit) them and [push](https://git-scm.com/docs/git-push) the result, like this:
|
||||
|
||||
```shell
|
||||
|
|
@ -302,10 +318,9 @@ Extractors are very fragile by nature since they depend on the layout of the sou
|
|||
For extraction to work yt-dlp relies on metadata your extractor extracts and provides to yt-dlp expressed by an [information dictionary](yt_dlp/extractor/common.py#L119-L440) or simply *info dict*. Only the following meta fields in the *info dict* are considered mandatory for a successful extraction process by yt-dlp:
|
||||
|
||||
- `id` (media identifier)
|
||||
- `title` (media title)
|
||||
- `url` (media download URL) or `formats`
|
||||
|
||||
The aforementioned metafields are the critical data that the extraction does not make any sense without and if any of them fail to be extracted then the extractor is considered completely broken. While all extractors must return a `title`, they must also allow it's extraction to be non-fatal.
|
||||
The aforementioned metadata fields are the critical data without which extraction does not make any sense. If any of them fail to be extracted, then the extractor is considered broken. All other metadata extraction should be completely non-fatal.
|
||||
|
||||
For pornographic sites, appropriate `age_limit` must also be returned.
|
||||
|
||||
|
|
@ -765,12 +780,10 @@ view_count = int_or_none(video.get('views'))
|
|||
```
|
||||
|
||||
|
||||
# My pull request is labeled pending-fixes
|
||||
## My pull request is labeled pending-fixes
|
||||
|
||||
The `pending-fixes` label is added when there are changes requested to a PR. When the necessary changes are made, the label should be removed. However, despite our best efforts, it may sometimes happen that the maintainer did not see the changes or forgot to remove the label. If your PR is still marked as `pending-fixes` a few days after all requested changes have been made, feel free to ping the maintainer who labeled your issue and ask them to re-review and remove the label.
|
||||
|
||||
|
||||
|
||||
|
||||
# EMBEDDING YT-DLP
|
||||
See [README.md#embedding-yt-dlp](README.md#embedding-yt-dlp) for instructions on how to embed yt-dlp in another Python program
|
||||
|
|
|
|||
202
CONTRIBUTORS
202
CONTRIBUTORS
|
|
@ -1,9 +1,10 @@
|
|||
pukkandan (owner)
|
||||
shirt-dev (collaborator)
|
||||
coletdjnz/colethedj (collaborator)
|
||||
Ashish0804 (collaborator)
|
||||
bashonly (collaborator)
|
||||
Grub4K (collaborator)
|
||||
shirt-dev (maintainer)
|
||||
coletdjnz (maintainer)
|
||||
Ashish0804 (maintainer)
|
||||
bashonly (maintainer)
|
||||
Grub4K (maintainer)
|
||||
seproDev (maintainer)
|
||||
h-h-h-h
|
||||
pauldubois98
|
||||
nixxo
|
||||
|
|
@ -403,7 +404,6 @@ rebane2001
|
|||
road-master
|
||||
rohieb
|
||||
sdht0
|
||||
seproDev
|
||||
Hill-98
|
||||
LXYan2333
|
||||
mushbite
|
||||
|
|
@ -644,3 +644,193 @@ peisenwang
|
|||
TheZ3ro
|
||||
tippfehlr
|
||||
varunchopra
|
||||
DrakoCpp
|
||||
PatrykMis
|
||||
DinhHuy2010
|
||||
exterrestris
|
||||
harbhim
|
||||
LeSuisse
|
||||
DunnesH
|
||||
iancmy
|
||||
mokrueger
|
||||
luvyana
|
||||
szantnerb
|
||||
hugepower
|
||||
scribblemaniac
|
||||
Codenade
|
||||
Demon000
|
||||
Deukhoofd
|
||||
grqz
|
||||
hibes
|
||||
Khaoklong51
|
||||
kieraneglin
|
||||
lengzuo
|
||||
naglis
|
||||
ndyanx
|
||||
otovalek
|
||||
quad
|
||||
rakslice
|
||||
sahilsinghss73
|
||||
tony-hn
|
||||
xingchensong
|
||||
BallzCrasher
|
||||
coreywright
|
||||
eric321
|
||||
poyhen
|
||||
tetra-fox
|
||||
444995
|
||||
63427083
|
||||
allendema
|
||||
DarkZeros
|
||||
DTrombett
|
||||
imranh2
|
||||
KarboniteKream
|
||||
mikkovedru
|
||||
pktiuk
|
||||
rubyevadestaxes
|
||||
avagordon01
|
||||
CounterPillow
|
||||
JoseAngelB
|
||||
KBelmin
|
||||
kesor
|
||||
MellowKyler
|
||||
Wesley107772
|
||||
a13ssandr0
|
||||
ChocoLZS
|
||||
doe1080
|
||||
hugovdev
|
||||
jshumphrey
|
||||
julionc
|
||||
manavchaudhary1
|
||||
powergold1
|
||||
Sakura286
|
||||
SamDecrock
|
||||
stratus-ss
|
||||
subrat-lima
|
||||
gitninja1234
|
||||
jkruse
|
||||
xiaomac
|
||||
wesson09
|
||||
Crypto90
|
||||
MutantPiggieGolem1
|
||||
Sanceilaks
|
||||
Strkmn
|
||||
0x9fff00
|
||||
4ft35t
|
||||
7x11x13
|
||||
b5i
|
||||
cotko
|
||||
d3d9
|
||||
Dioarya
|
||||
finch71
|
||||
hexahigh
|
||||
InvalidUsernameException
|
||||
jixunmoe
|
||||
knackku
|
||||
krandor
|
||||
kvk-2015
|
||||
lonble
|
||||
msm595
|
||||
n10dollar
|
||||
NecroRomnt
|
||||
pjrobertson
|
||||
subsense
|
||||
test20140
|
||||
arantius
|
||||
entourage8
|
||||
lfavole
|
||||
mp3butcher
|
||||
slipinthedove
|
||||
YoshiTabletopGamer
|
||||
Arc8ne
|
||||
benfaerber
|
||||
chrisellsworth
|
||||
fries1234
|
||||
Kenshin9977
|
||||
MichaelDeBoey
|
||||
msikma
|
||||
pedro
|
||||
pferreir
|
||||
red-acid
|
||||
refack
|
||||
rysson
|
||||
somini
|
||||
thedenv
|
||||
vallovic
|
||||
arabcoders
|
||||
mireq
|
||||
mlabeeb03
|
||||
1271
|
||||
CasperMcFadden95
|
||||
Kicer86
|
||||
Kiritomo
|
||||
leeblackc
|
||||
meGAmeS1
|
||||
NeonMan
|
||||
pj47x
|
||||
troex
|
||||
WouterGordts
|
||||
baierjan
|
||||
GeoffreyFrogeye
|
||||
Pawka
|
||||
v3DJG6GL
|
||||
yozel
|
||||
brian6932
|
||||
iednod55
|
||||
maxbin123
|
||||
nullpos
|
||||
anlar
|
||||
eason1478
|
||||
ceandreasen
|
||||
chauhantirth
|
||||
helpimnotdrowning
|
||||
adamralph
|
||||
averageFOSSenjoyer
|
||||
bubo
|
||||
flanter21
|
||||
Georift
|
||||
moonshinerd
|
||||
R0hanW
|
||||
ShockedPlot7560
|
||||
swayll
|
||||
atsushi2965
|
||||
barryvan
|
||||
injust
|
||||
iribeirocampos
|
||||
rolandcrosby
|
||||
Sojiroh
|
||||
tchebb
|
||||
AzartX47
|
||||
e2dk4r
|
||||
junyilou
|
||||
PierreMesure
|
||||
Randalix
|
||||
runarmod
|
||||
gitchasing
|
||||
zakaryan2004
|
||||
cdce8p
|
||||
nicolaasjan
|
||||
willsmillie
|
||||
CasualYT31
|
||||
cecilia-sanare
|
||||
dhwz
|
||||
robin-mu
|
||||
shssoichiro
|
||||
thanhtaivtt
|
||||
uoag
|
||||
CaramelConnoisseur
|
||||
ctengel
|
||||
einstein95
|
||||
evilpie
|
||||
i3p9
|
||||
JrM2628
|
||||
krystophny
|
||||
matyb08
|
||||
pha1n0q
|
||||
PierceLBrooks
|
||||
sepro
|
||||
TheQWERTYCodr
|
||||
thomasmllt
|
||||
w4grfw
|
||||
WeidiDeng
|
||||
Zer0spectrum
|
||||
|
|
|
|||
1427
Changelog.md
1427
Changelog.md
File diff suppressed because it is too large
Load diff
|
|
@ -1,59 +1,36 @@
|
|||
# Collaborators
|
||||
# Maintainers
|
||||
|
||||
This is a list of the collaborators of the project and their major contributions. See the [Changelog](Changelog.md) for more details.
|
||||
This file lists the maintainers of yt-dlp and their major contributions. See the [Changelog](Changelog.md) for more details.
|
||||
|
||||
You can also find lists of all [contributors of yt-dlp](CONTRIBUTORS) and [authors of youtube-dl](https://github.com/ytdl-org/youtube-dl/blob/master/AUTHORS)
|
||||
|
||||
## Core Maintainers
|
||||
|
||||
## [pukkandan](https://github.com/pukkandan)
|
||||
Core Maintainers are responsible for reviewing and merging contributions, publishing releases, and steering the overall direction of the project.
|
||||
|
||||
[](https://ko-fi.com/pukkandan)
|
||||
[](https://github.com/sponsors/pukkandan)
|
||||
**You can contact the core maintainers via `maintainers@yt-dlp.org`.**
|
||||
|
||||
* Owner of the fork
|
||||
This is **NOT** a support channel. [Open an issue](https://github.com/yt-dlp/yt-dlp/issues/new/choose) if you need help or want to report a bug.
|
||||
|
||||
|
||||
|
||||
## [shirt](https://github.com/shirt-dev)
|
||||
|
||||
[](https://ko-fi.com/shirt)
|
||||
|
||||
* Multithreading (`-N`) and aria2c support for fragment downloads
|
||||
* Support for media initialization and discontinuity in HLS
|
||||
* The self-updater (`-U`)
|
||||
|
||||
|
||||
|
||||
## [coletdjnz](https://github.com/coletdjnz)
|
||||
### [coletdjnz](https://github.com/coletdjnz)
|
||||
|
||||
[](https://github.com/sponsors/coletdjnz)
|
||||
|
||||
* Improved plugin architecture
|
||||
* Rewrote the networking infrastructure, implemented support for `requests`
|
||||
* YouTube improvements including: age-gate bypass, private playlists, multiple-clients (to avoid throttling) and a lot of under-the-hood improvements
|
||||
* Added support for new websites YoutubeWebArchive, MainStreaming, PRX, nzherald, Mediaklikk, StarTV etc
|
||||
* Improved/fixed support for Patreon, panopto, gfycat, itv, pbs, SouthParkDE etc
|
||||
* Overhauled the networking stack and implemented support for `requests` and `curl_cffi` (`--impersonate`) HTTP clients
|
||||
* Reworked the plugin architecture to support installing plugins across all yt-dlp distributions (exe, pip, etc.)
|
||||
* Maintains support for YouTube
|
||||
* Added and fixed support for various other sites
|
||||
|
||||
### [bashonly](https://github.com/bashonly)
|
||||
|
||||
* Rewrote and maintains the build/release workflows and the self-updater: executables, automated/nightly/master releases, `--update-to`
|
||||
* Overhauled external downloader cookie handling
|
||||
* Added `--cookies-from-browser` support for Firefox containers
|
||||
* Overhauled and maintains support for sites like Youtube, Vimeo, Twitter, TikTok, etc
|
||||
* Added support for sites like Dacast, Kick, Loom, SproutVideo, Triller, Weverse, etc
|
||||
|
||||
|
||||
|
||||
## [Ashish0804](https://github.com/Ashish0804) <sub><sup>[Inactive]</sup></sub>
|
||||
|
||||
[](https://ko-fi.com/ashish0804)
|
||||
|
||||
* Added support for new websites BiliIntl, DiscoveryPlusIndia, OlympicsReplay, PlanetMarathi, ShemarooMe, Utreon, Zee5 etc
|
||||
* Added playlist/series downloads for Hotstar, ParamountPlus, Rumble, SonyLIV, Trovo, TubiTv, Voot etc
|
||||
* Improved/fixed support for HiDive, HotStar, Hungama, LBRY, LinkedInLearning, Mxplayer, SonyLiv, TV2, Vimeo, VLive etc
|
||||
|
||||
|
||||
## [bashonly](https://github.com/bashonly)
|
||||
|
||||
* `--update-to`, self-updater rewrite, automated/nightly/master releases
|
||||
* `--cookies-from-browser` support for Firefox containers, external downloader cookie handling overhaul
|
||||
* Added support for new websites like Dacast, Kick, NBCStations, Triller, VideoKen, Weverse, WrestleUniverse etc
|
||||
* Improved/fixed support for Anvato, Brightcove, Reddit, SlidesLive, TikTok, Twitter, Vimeo etc
|
||||
|
||||
|
||||
## [Grub4K](https://github.com/Grub4K)
|
||||
### [Grub4K](https://github.com/Grub4K)
|
||||
|
||||
[](https://github.com/sponsors/Grub4K) [](https://ko-fi.com/Grub4K)
|
||||
|
||||
|
|
@ -63,8 +40,48 @@ You can also find lists of all [contributors of yt-dlp](CONTRIBUTORS) and [autho
|
|||
* Improved/fixed/added Bundestag, crunchyroll, pr0gramm, Twitter, WrestleUniverse etc
|
||||
|
||||
|
||||
## [sepro](https://github.com/seproDev)
|
||||
### [sepro](https://github.com/seproDev)
|
||||
|
||||
* UX improvements: Warn when ffmpeg is missing, warn when double-clicking exe
|
||||
* Code cleanup: Remove dead extractors, mark extractors as broken, enable/apply ruff rules
|
||||
* Improved/fixed/added ArdMediathek, DRTV, Floatplane, MagentaMusik, Naver, Nebula, OnDemandKorea, Vbox7 etc
|
||||
|
||||
|
||||
## Inactive Core Maintainers
|
||||
|
||||
### [pukkandan](https://github.com/pukkandan)
|
||||
|
||||
[](https://ko-fi.com/pukkandan)
|
||||
[](https://github.com/sponsors/pukkandan)
|
||||
|
||||
* Founder of the fork
|
||||
* Lead Maintainer from 2021-2024
|
||||
|
||||
|
||||
### [shirt](https://github.com/shirt-dev)
|
||||
|
||||
[](https://ko-fi.com/shirt)
|
||||
|
||||
* Multithreading (`-N`) and aria2c support for fragment downloads
|
||||
* Support for media initialization and discontinuity in HLS
|
||||
* The self-updater (`-U`)
|
||||
|
||||
|
||||
### [Ashish0804](https://github.com/Ashish0804)
|
||||
|
||||
[](https://ko-fi.com/ashish0804)
|
||||
|
||||
* Added support for new websites BiliIntl, DiscoveryPlusIndia, OlympicsReplay, PlanetMarathi, ShemarooMe, Utreon, Zee5 etc
|
||||
* Added playlist/series downloads for Hotstar, ParamountPlus, Rumble, SonyLIV, Trovo, TubiTv, Voot etc
|
||||
* Improved/fixed support for HiDive, HotStar, Hungama, LBRY, LinkedInLearning, Mxplayer, SonyLiv, TV2, Vimeo, VLive etc
|
||||
|
||||
## Triage Maintainers
|
||||
|
||||
Triage Maintainers are frequent contributors who can manage issues and pull requests.
|
||||
|
||||
- [gamer191](https://github.com/gamer191)
|
||||
- [garret1317](https://github.com/garret1317)
|
||||
- [pzhlkj6612](https://github.com/pzhlkj6612)
|
||||
- [DTrombett](https://github.com/dtrombett)
|
||||
- [doe1080](https://github.com/doe1080)
|
||||
- [grqz](https://github.com/grqz)
|
||||
114
Makefile
114
Makefile
|
|
@ -1,4 +1,5 @@
|
|||
all: lazy-extractors yt-dlp doc pypi-files
|
||||
all-extra: lazy-extractors yt-dlp-extra doc pypi-files
|
||||
clean: clean-test clean-dist
|
||||
clean-all: clean clean-cache
|
||||
completions: completion-bash completion-fish completion-zsh
|
||||
|
|
@ -10,21 +11,27 @@ tar: yt-dlp.tar.gz
|
|||
# intended use: when building a source distribution,
|
||||
# make pypi-files && python3 -m build -sn .
|
||||
pypi-files: AUTHORS Changelog.md LICENSE README.md README.txt supportedsites \
|
||||
completions yt-dlp.1 pyproject.toml setup.cfg devscripts/* test/*
|
||||
completions yt-dlp.1 pyproject.toml devscripts/* test/*
|
||||
|
||||
.PHONY: all clean clean-all clean-test clean-dist clean-cache \
|
||||
completions completion-bash completion-fish completion-zsh \
|
||||
doc issuetemplates supportedsites ot offlinetest codetest test \
|
||||
tar pypi-files lazy-extractors install uninstall
|
||||
tar pypi-files lazy-extractors install uninstall \
|
||||
all-extra yt-dlp-extra current-ejs-version
|
||||
|
||||
.IGNORE: current-ejs-version
|
||||
.SILENT: current-ejs-version
|
||||
|
||||
clean-test:
|
||||
rm -rf test/testdata/sigs/player-*.js tmp/ *.annotations.xml *.aria2 *.description *.dump *.frag \
|
||||
rm -rf tmp/ *.annotations.xml *.aria2 *.description *.dump *.frag \
|
||||
*.frag.aria2 *.frag.urls *.info.json *.live_chat.json *.meta *.part* *.tmp *.temp *.unknown_video *.ytdl \
|
||||
*.3gp *.ape *.ass *.avi *.desktop *.f4v *.flac *.flv *.gif *.jpeg *.jpg *.lrc *.m4a *.m4v *.mhtml *.mkv *.mov *.mp3 *.mp4 \
|
||||
*.mpg *.mpga *.oga *.ogg *.opus *.png *.sbv *.srt *.ssa *.swf *.swp *.tt *.ttml *.url *.vtt *.wav *.webloc *.webm *.webp
|
||||
*.mpg *.mpga *.oga *.ogg *.opus *.png *.sbv *.srt *.ssa *.swf *.tt *.ttml *.url *.vtt *.wav *.webloc *.webm *.webp \
|
||||
test/testdata/sigs/player-*.js test/testdata/thumbnails/empty.webp "test/testdata/thumbnails/foo %d bar/foo_%d."*
|
||||
clean-dist:
|
||||
rm -rf yt-dlp.1.temp.md yt-dlp.1 README.txt MANIFEST build/ dist/ .coverage cover/ yt-dlp.tar.gz completions/ \
|
||||
yt_dlp/extractor/lazy_extractors.py *.spec CONTRIBUTING.md.tmp yt-dlp yt-dlp.exe yt_dlp.egg-info/ AUTHORS
|
||||
yt_dlp/extractor/lazy_extractors.py *.spec CONTRIBUTING.md.tmp yt-dlp yt-dlp.exe yt_dlp.egg-info/ AUTHORS \
|
||||
yt-dlp.zip .ejs-* yt_dlp_ejs/
|
||||
clean-cache:
|
||||
find . \( \
|
||||
-type d -name ".*_cache" -o -type d -name __pycache__ -o -name "*.pyc" -o -name "*.class" \
|
||||
|
|
@ -80,28 +87,49 @@ test:
|
|||
offlinetest: codetest
|
||||
$(PYTHON) -m pytest -Werror -m "not download"
|
||||
|
||||
CODE_FOLDERS_CMD = find yt_dlp -type f -name '__init__.py' | sed 's,/__init__.py,,' | grep -v '/__' | sort
|
||||
CODE_FOLDERS != $(CODE_FOLDERS_CMD)
|
||||
CODE_FOLDERS ?= $(shell $(CODE_FOLDERS_CMD))
|
||||
CODE_FILES_CMD = for f in $(CODE_FOLDERS) ; do echo "$$f" | sed 's,$$,/*.py,' ; done
|
||||
CODE_FILES != $(CODE_FILES_CMD)
|
||||
CODE_FILES ?= $(shell $(CODE_FILES_CMD))
|
||||
yt-dlp: $(CODE_FILES)
|
||||
PY_CODE_FOLDERS_CMD = find yt_dlp -type f -name '__init__.py' | sed 's|/__init__\.py||' | grep -v '/__' | sort
|
||||
PY_CODE_FOLDERS != $(PY_CODE_FOLDERS_CMD)
|
||||
PY_CODE_FOLDERS ?= $(shell $(PY_CODE_FOLDERS_CMD))
|
||||
|
||||
PY_CODE_FILES_CMD = for f in $(PY_CODE_FOLDERS) ; do echo "$$f" | sed 's|$$|/*.py|' ; done
|
||||
PY_CODE_FILES != $(PY_CODE_FILES_CMD)
|
||||
PY_CODE_FILES ?= $(shell $(PY_CODE_FILES_CMD))
|
||||
|
||||
JS_CODE_FOLDERS_CMD = find yt_dlp -type f -name '*.js' | sed 's|/[^/]\{1,\}\.js$$||' | uniq
|
||||
JS_CODE_FOLDERS != $(JS_CODE_FOLDERS_CMD)
|
||||
JS_CODE_FOLDERS ?= $(shell $(JS_CODE_FOLDERS_CMD))
|
||||
|
||||
JS_CODE_FILES_CMD = for f in $(JS_CODE_FOLDERS) ; do echo "$$f" | sed 's|$$|/*.js|' ; done
|
||||
JS_CODE_FILES != $(JS_CODE_FILES_CMD)
|
||||
JS_CODE_FILES ?= $(shell $(JS_CODE_FILES_CMD))
|
||||
|
||||
yt-dlp.zip: $(PY_CODE_FILES) $(JS_CODE_FILES)
|
||||
mkdir -p zip
|
||||
for d in $(CODE_FOLDERS) ; do \
|
||||
for d in $(PY_CODE_FOLDERS) ; do \
|
||||
mkdir -p zip/$$d ;\
|
||||
cp -pPR $$d/*.py zip/$$d/ ;\
|
||||
done
|
||||
(cd zip && touch -t 200001010101 $(CODE_FILES))
|
||||
mv zip/yt_dlp/__main__.py zip/
|
||||
(cd zip && zip -q ../yt-dlp $(CODE_FILES) __main__.py)
|
||||
for d in $(JS_CODE_FOLDERS) ; do \
|
||||
mkdir -p zip/$$d ;\
|
||||
cp -pPR $$d/*.js zip/$$d/ ;\
|
||||
done
|
||||
(cd zip && touch -t 200001010101 $(PY_CODE_FILES) $(JS_CODE_FILES))
|
||||
rm -f zip/yt_dlp/__main__.py
|
||||
(cd zip && zip -q ../yt-dlp.zip $(PY_CODE_FILES) $(JS_CODE_FILES))
|
||||
rm -rf zip
|
||||
|
||||
yt-dlp: yt-dlp.zip
|
||||
mkdir -p zip
|
||||
cp -pP yt_dlp/__main__.py zip/
|
||||
touch -t 200001010101 zip/__main__.py
|
||||
(cd zip && zip -q ../yt-dlp.zip __main__.py)
|
||||
echo '#!$(PYTHON)' > yt-dlp
|
||||
cat yt-dlp.zip >> yt-dlp
|
||||
rm yt-dlp.zip
|
||||
chmod a+x yt-dlp
|
||||
rm -rf zip
|
||||
|
||||
README.md: $(CODE_FILES) devscripts/make_readme.py
|
||||
README.md: $(PY_CODE_FILES) devscripts/make_readme.py
|
||||
COLUMNS=80 $(PYTHON) yt_dlp/__main__.py --ignore-config --help | $(PYTHON) devscripts/make_readme.py
|
||||
|
||||
CONTRIBUTING.md: README.md devscripts/make_contributing.py
|
||||
|
|
@ -126,15 +154,15 @@ yt-dlp.1: README.md devscripts/prepare_manpage.py
|
|||
pandoc -s -f $(MARKDOWN) -t man yt-dlp.1.temp.md -o yt-dlp.1
|
||||
rm -f yt-dlp.1.temp.md
|
||||
|
||||
completions/bash/yt-dlp: $(CODE_FILES) devscripts/bash-completion.in
|
||||
completions/bash/yt-dlp: $(PY_CODE_FILES) devscripts/bash-completion.in
|
||||
mkdir -p completions/bash
|
||||
$(PYTHON) devscripts/bash-completion.py
|
||||
|
||||
completions/zsh/_yt-dlp: $(CODE_FILES) devscripts/zsh-completion.in
|
||||
completions/zsh/_yt-dlp: $(PY_CODE_FILES) devscripts/zsh-completion.in
|
||||
mkdir -p completions/zsh
|
||||
$(PYTHON) devscripts/zsh-completion.py
|
||||
|
||||
completions/fish/yt-dlp.fish: $(CODE_FILES) devscripts/fish-completion.in
|
||||
completions/fish/yt-dlp.fish: $(PY_CODE_FILES) devscripts/fish-completion.in
|
||||
mkdir -p completions/fish
|
||||
$(PYTHON) devscripts/fish-completion.py
|
||||
|
||||
|
|
@ -156,9 +184,9 @@ yt-dlp.tar.gz: all
|
|||
--exclude '.git' \
|
||||
-- \
|
||||
README.md supportedsites.md Changelog.md LICENSE \
|
||||
CONTRIBUTING.md Collaborators.md CONTRIBUTORS AUTHORS \
|
||||
CONTRIBUTING.md Maintainers.md CONTRIBUTORS AUTHORS \
|
||||
Makefile yt-dlp.1 README.txt completions .gitignore \
|
||||
setup.cfg yt-dlp yt_dlp pyproject.toml devscripts test
|
||||
yt-dlp yt_dlp pyproject.toml devscripts test
|
||||
|
||||
AUTHORS: Changelog.md
|
||||
@if [ -d '.git' ] && command -v git > /dev/null ; then \
|
||||
|
|
@ -171,3 +199,45 @@ CONTRIBUTORS: Changelog.md
|
|||
echo 'Updating $@ from git commit history' ; \
|
||||
$(PYTHON) devscripts/make_changelog.py -v -c > /dev/null ; \
|
||||
fi
|
||||
|
||||
# The following EJS_-prefixed variables are auto-generated by devscripts/update_ejs.py
|
||||
# DO NOT EDIT!
|
||||
EJS_VERSION = 0.3.1
|
||||
EJS_WHEEL_NAME = yt_dlp_ejs-0.3.1-py3-none-any.whl
|
||||
EJS_WHEEL_HASH = sha256:a6e3548874db7c774388931752bb46c7f4642c044b2a189e56968f3d5ecab622
|
||||
EJS_PY_FOLDERS = yt_dlp_ejs yt_dlp_ejs/yt yt_dlp_ejs/yt/solver
|
||||
EJS_PY_FILES = yt_dlp_ejs/__init__.py yt_dlp_ejs/_version.py yt_dlp_ejs/yt/__init__.py yt_dlp_ejs/yt/solver/__init__.py
|
||||
EJS_JS_FOLDERS = yt_dlp_ejs/yt/solver
|
||||
EJS_JS_FILES = yt_dlp_ejs/yt/solver/core.min.js yt_dlp_ejs/yt/solver/lib.min.js
|
||||
|
||||
yt-dlp-extra: current-ejs-version .ejs-$(EJS_VERSION) $(EJS_PY_FILES) $(EJS_JS_FILES) yt-dlp.zip
|
||||
mkdir -p zip
|
||||
for d in $(EJS_PY_FOLDERS) ; do \
|
||||
mkdir -p zip/$$d ;\
|
||||
cp -pPR $$d/*.py zip/$$d/ ;\
|
||||
done
|
||||
for d in $(EJS_JS_FOLDERS) ; do \
|
||||
mkdir -p zip/$$d ;\
|
||||
cp -pPR $$d/*.js zip/$$d/ ;\
|
||||
done
|
||||
(cd zip && touch -t 200001010101 $(EJS_PY_FILES) $(EJS_JS_FILES))
|
||||
(cd zip && zip -q ../yt-dlp.zip $(EJS_PY_FILES) $(EJS_JS_FILES))
|
||||
cp -pP yt_dlp/__main__.py zip/
|
||||
touch -t 200001010101 zip/__main__.py
|
||||
(cd zip && zip -q ../yt-dlp.zip __main__.py)
|
||||
echo '#!$(PYTHON)' > yt-dlp
|
||||
cat yt-dlp.zip >> yt-dlp
|
||||
rm yt-dlp.zip
|
||||
chmod a+x yt-dlp
|
||||
rm -rf zip
|
||||
|
||||
.ejs-$(EJS_VERSION):
|
||||
@echo Downloading yt-dlp-ejs
|
||||
@echo "yt-dlp-ejs==$(EJS_VERSION) --hash $(EJS_WHEEL_HASH)" > .ejs-requirements.txt
|
||||
$(PYTHON) -m pip download -d ./build --no-deps --require-hashes -r .ejs-requirements.txt
|
||||
unzip -o build/$(EJS_WHEEL_NAME) "yt_dlp_ejs/*"
|
||||
@touch .ejs-$(EJS_VERSION)
|
||||
|
||||
current-ejs-version:
|
||||
rm -rf .ejs-*
|
||||
touch .ejs-$$($(PYTHON) -c 'import sys; sys.path = [""]; from yt_dlp_ejs import version; print(version)' 2>/dev/null)
|
||||
|
|
|
|||
463
README.md
463
README.md
|
|
@ -4,9 +4,8 @@
|
|||
[](#readme)
|
||||
|
||||
[](#installation "Installation")
|
||||
[](https://pypi.org/project/yt-dlp "PyPi")
|
||||
[](Collaborators.md#collaborators "Donate")
|
||||
[](https://matrix.to/#/#yt-dlp:matrix.org "Matrix")
|
||||
[](https://pypi.org/project/yt-dlp "PyPI")
|
||||
[](Maintainers.md#maintainers "Donate")
|
||||
[](https://discord.gg/H5MNcFW63r "Discord")
|
||||
[](supportedsites.md "Supported Sites")
|
||||
[](LICENSE "License")
|
||||
|
|
@ -45,6 +44,7 @@ yt-dlp is a feature-rich command-line audio/video downloader with support for [t
|
|||
* [Post-processing Options](#post-processing-options)
|
||||
* [SponsorBlock Options](#sponsorblock-options)
|
||||
* [Extractor Options](#extractor-options)
|
||||
* [Preset Aliases](#preset-aliases)
|
||||
* [CONFIGURATION](#configuration)
|
||||
* [Configuration file encoding](#configuration-file-encoding)
|
||||
* [Authentication with netrc](#authentication-with-netrc)
|
||||
|
|
@ -81,7 +81,7 @@ yt-dlp is a feature-rich command-line audio/video downloader with support for [t
|
|||
[](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp.exe)
|
||||
[](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp)
|
||||
[](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_macos)
|
||||
[](https://pypi.org/project/yt-dlp)
|
||||
[](https://pypi.org/project/yt-dlp)
|
||||
[](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp.tar.gz)
|
||||
[](#release-files)
|
||||
[](https://github.com/yt-dlp/yt-dlp/releases)
|
||||
|
|
@ -98,21 +98,28 @@ You can install yt-dlp using [the binaries](#release-files), [pip](https://pypi.
|
|||
File|Description
|
||||
:---|:---
|
||||
[yt-dlp](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp)|Platform-independent [zipimport](https://docs.python.org/3/library/zipimport.html) binary. Needs Python (recommended for **Linux/BSD**)
|
||||
[yt-dlp.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp.exe)|Windows (Win7 SP1+) standalone x64 binary (recommended for **Windows**)
|
||||
[yt-dlp.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp.exe)|Windows (Win8+) standalone x64 binary (recommended for **Windows**)
|
||||
[yt-dlp_macos](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_macos)|Universal MacOS (10.15+) standalone executable (recommended for **MacOS**)
|
||||
|
||||
#### Alternatives
|
||||
|
||||
File|Description
|
||||
:---|:---
|
||||
[yt-dlp_x86.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_x86.exe)|Windows (Win7 SP1+) standalone x86 (32-bit) binary
|
||||
[yt-dlp_min.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_min.exe)|Windows (Win7 SP1+) standalone x64 binary built with `py2exe`<br/> ([Not recommended](#standalone-py2exe-builds-windows))
|
||||
[yt-dlp_linux](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux)|Linux standalone x64 binary
|
||||
[yt-dlp_linux_armv7l](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_armv7l)|Linux standalone armv7l (32-bit) binary
|
||||
[yt-dlp_linux_aarch64](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_aarch64)|Linux standalone aarch64 (64-bit) binary
|
||||
[yt-dlp_win.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win.zip)|Unpackaged Windows executable (no auto-update)
|
||||
[yt-dlp_linux](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux)|Linux (glibc 2.17+) standalone x86_64 binary
|
||||
[yt-dlp_linux.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux.zip)|Unpackaged Linux (glibc 2.17+) x86_64 executable (no auto-update)
|
||||
[yt-dlp_linux_aarch64](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_aarch64)|Linux (glibc 2.17+) standalone aarch64 binary
|
||||
[yt-dlp_linux_aarch64.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_aarch64.zip)|Unpackaged Linux (glibc 2.17+) aarch64 executable (no auto-update)
|
||||
[yt-dlp_linux_armv7l.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_armv7l.zip)|Unpackaged Linux (glibc 2.31+) armv7l executable (no auto-update)
|
||||
[yt-dlp_musllinux](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux)|Linux (musl 1.2+) standalone x86_64 binary
|
||||
[yt-dlp_musllinux.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux.zip)|Unpackaged Linux (musl 1.2+) x86_64 executable (no auto-update)
|
||||
[yt-dlp_musllinux_aarch64](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux_aarch64)|Linux (musl 1.2+) standalone aarch64 binary
|
||||
[yt-dlp_musllinux_aarch64.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux_aarch64.zip)|Unpackaged Linux (musl 1.2+) aarch64 executable (no auto-update)
|
||||
[yt-dlp_x86.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_x86.exe)|Windows (Win8+) standalone x86 (32-bit) binary
|
||||
[yt-dlp_win_x86.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win_x86.zip)|Unpackaged Windows (Win8+) x86 (32-bit) executable (no auto-update)
|
||||
[yt-dlp_arm64.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_arm64.exe)|Windows (Win10+) standalone ARM64 binary
|
||||
[yt-dlp_win_arm64.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win_arm64.zip)|Unpackaged Windows (Win10+) ARM64 executable (no auto-update)
|
||||
[yt-dlp_win.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win.zip)|Unpackaged Windows (Win8+) x64 executable (no auto-update)
|
||||
[yt-dlp_macos.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_macos.zip)|Unpackaged MacOS (10.15+) executable (no auto-update)
|
||||
[yt-dlp_macos_legacy](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_macos_legacy)|MacOS (10.9+) standalone x64 executable
|
||||
|
||||
#### Misc
|
||||
|
||||
|
|
@ -131,6 +138,19 @@ curl -L https://github.com/yt-dlp/yt-dlp/raw/master/public.key | gpg --import
|
|||
gpg --verify SHA2-256SUMS.sig SHA2-256SUMS
|
||||
gpg --verify SHA2-512SUMS.sig SHA2-512SUMS
|
||||
```
|
||||
|
||||
#### Licensing
|
||||
|
||||
While yt-dlp is licensed under the [Unlicense](LICENSE), many of the release files contain code from other projects with different licenses.
|
||||
|
||||
Most notably, the PyInstaller-bundled executables include GPLv3+ licensed code, and as such the combined work is licensed under [GPLv3+](https://www.gnu.org/licenses/gpl-3.0.html).
|
||||
|
||||
The zipimport Unix executable (`yt-dlp`) contains [ISC](https://github.com/meriyah/meriyah/blob/main/LICENSE.md) licensed code from [`meriyah`](https://github.com/meriyah/meriyah) and [MIT](https://github.com/davidbonnet/astring/blob/main/LICENSE) licensed code from [`astring`](https://github.com/davidbonnet/astring).
|
||||
|
||||
See [THIRD_PARTY_LICENSES.txt](THIRD_PARTY_LICENSES.txt) for more details.
|
||||
|
||||
The git repository, the source tarball (`yt-dlp.tar.gz`), the PyPI source distribution and the PyPI built distribution (wheel) only contain code licensed under the [Unlicense](LICENSE).
|
||||
|
||||
<!-- MANPAGE: END EXCLUDED SECTION -->
|
||||
|
||||
**Note**: The manpages, shell completion (autocomplete) files etc. are available inside the [source tarball](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp.tar.gz)
|
||||
|
|
@ -169,18 +189,21 @@ Example usage:
|
|||
yt-dlp --update-to nightly
|
||||
|
||||
# To install nightly with pip:
|
||||
python3 -m pip install -U --pre "yt-dlp[default]"
|
||||
python -m pip install -U --pre "yt-dlp[default]"
|
||||
```
|
||||
|
||||
When running a yt-dlp version that is older than 90 days, you will see a warning message suggesting to update to the latest version.
|
||||
You can suppress this warning by adding `--no-update` to your command or configuration file.
|
||||
|
||||
## DEPENDENCIES
|
||||
Python versions 3.8+ (CPython and PyPy) are supported. Other versions and implementations may or may not work correctly.
|
||||
Python versions 3.10+ (CPython) and 3.11+ (PyPy) are supported. Other versions and implementations may or may not work correctly.
|
||||
|
||||
<!-- Python 3.5+ uses VC++14 and it is already embedded in the binary created
|
||||
<!x-- https://www.microsoft.com/en-us/download/details.aspx?id=26999 --x>
|
||||
On windows, [Microsoft Visual C++ 2010 SP1 Redistributable Package (x86)](https://download.microsoft.com/download/1/6/5/165255E7-1014-4D0A-B094-B6A430A6BFFC/vcredist_x86.exe) is also necessary to run yt-dlp. You probably already have this, but if the executable throws an error due to missing `MSVCR100.dll` you need to install it manually.
|
||||
On Windows, [Microsoft Visual C++ 2010 SP1 Redistributable Package (x86)](https://download.microsoft.com/download/1/6/5/165255E7-1014-4D0A-B094-B6A430A6BFFC/vcredist_x86.exe) is also necessary to run yt-dlp. You probably already have this, but if the executable throws an error due to missing `MSVCR100.dll` you need to install it manually.
|
||||
-->
|
||||
|
||||
While all the other dependencies are optional, `ffmpeg` and `ffprobe` are highly recommended
|
||||
While all the other dependencies are optional, `ffmpeg`, `ffprobe`, `yt-dlp-ejs` and a JavaScript runtime are highly recommended
|
||||
|
||||
### Strongly recommended
|
||||
|
||||
|
|
@ -190,6 +213,10 @@ While all the other dependencies are optional, `ffmpeg` and `ffprobe` are highly
|
|||
|
||||
**Important**: What you need is ffmpeg *binary*, **NOT** [the Python package of the same name](https://pypi.org/project/ffmpeg)
|
||||
|
||||
* [**yt-dlp-ejs**](https://github.com/yt-dlp/ejs) - Required for deciphering YouTube n/sig values. Licensed under [Unlicense](https://github.com/yt-dlp/ejs/blob/main/LICENSE), bundles [MIT](https://github.com/davidbonnet/astring/blob/main/LICENSE) and [ISC](https://github.com/meriyah/meriyah/blob/main/LICENSE.md) components.
|
||||
|
||||
A JavaScript runtime like [**deno**](https://deno.land) (recommended), [**node.js**](https://nodejs.org), [**bun**](https://bun.sh), or [**QuickJS**](https://bellard.org/quickjs/) is also required to run yt-dlp-ejs. See [the wiki](https://github.com/yt-dlp/yt-dlp/wiki/EJS).
|
||||
|
||||
### Networking
|
||||
* [**certifi**](https://github.com/certifi/python-certifi)\* - Provides Mozilla's root certificate bundle. Licensed under [MPLv2](https://github.com/certifi/python-certifi/blob/master/LICENSE)
|
||||
* [**brotli**](https://github.com/google/brotli)\* or [**brotlicffi**](https://github.com/python-hyper/brotlicffi) - [Brotli](https://en.wikipedia.org/wiki/Brotli) content encoding support. Both licensed under MIT <sup>[1](https://github.com/google/brotli/blob/master/LICENSE) [2](https://github.com/python-hyper/brotlicffi/blob/master/LICENSE) </sup>
|
||||
|
|
@ -200,28 +227,26 @@ While all the other dependencies are optional, `ffmpeg` and `ffprobe` are highly
|
|||
|
||||
The following provide support for impersonating browser requests. This may be required for some sites that employ TLS fingerprinting.
|
||||
|
||||
* [**curl_cffi**](https://github.com/yifeikong/curl_cffi) (recommended) - Python binding for [curl-impersonate](https://github.com/lwthiker/curl-impersonate). Provides impersonation targets for Chrome, Edge and Safari. Licensed under [MIT](https://github.com/yifeikong/curl_cffi/blob/main/LICENSE)
|
||||
* [**curl_cffi**](https://github.com/lexiforest/curl_cffi) (recommended) - Python binding for [curl-impersonate](https://github.com/lexiforest/curl-impersonate). Provides impersonation targets for Chrome, Edge and Safari. Licensed under [MIT](https://github.com/lexiforest/curl_cffi/blob/main/LICENSE)
|
||||
* Can be installed with the `curl-cffi` group, e.g. `pip install "yt-dlp[default,curl-cffi]"`
|
||||
* Currently only included in `yt-dlp.exe` and `yt-dlp_macos` builds
|
||||
* Currently included in most builds *except* `yt-dlp` (Unix zipimport binary), `yt-dlp_x86` (Windows 32-bit) and `yt-dlp_musllinux_aarch64`
|
||||
|
||||
|
||||
### Metadata
|
||||
|
||||
* [**mutagen**](https://github.com/quodlibet/mutagen)\* - For `--embed-thumbnail` in certain formats. Licensed under [GPLv2+](https://github.com/quodlibet/mutagen/blob/master/COPYING)
|
||||
* [**AtomicParsley**](https://github.com/wez/atomicparsley) - For `--embed-thumbnail` in `mp4`/`m4a` files when `mutagen`/`ffmpeg` cannot. Licensed under [GPLv2+](https://github.com/wez/atomicparsley/blob/master/COPYING)
|
||||
* [**xattr**](https://github.com/xattr/xattr), [**pyxattr**](https://github.com/iustin/pyxattr) or [**setfattr**](http://savannah.nongnu.org/projects/attr) - For writing xattr metadata (`--xattr`) on **Mac** and **BSD**. Licensed under [MIT](https://github.com/xattr/xattr/blob/master/LICENSE.txt), [LGPL2.1](https://github.com/iustin/pyxattr/blob/master/COPYING) and [GPLv2+](http://git.savannah.nongnu.org/cgit/attr.git/tree/doc/COPYING) respectively
|
||||
* [**xattr**](https://github.com/xattr/xattr), [**pyxattr**](https://github.com/iustin/pyxattr) or [**setfattr**](http://savannah.nongnu.org/projects/attr) - For writing xattr metadata (`--xattrs`) on **Mac** and **BSD**. Licensed under [MIT](https://github.com/xattr/xattr/blob/master/LICENSE.txt), [LGPL2.1](https://github.com/iustin/pyxattr/blob/master/COPYING) and [GPLv2+](http://git.savannah.nongnu.org/cgit/attr.git/tree/doc/COPYING) respectively
|
||||
|
||||
### Misc
|
||||
|
||||
* [**pycryptodomex**](https://github.com/Legrandin/pycryptodome)\* - For decrypting AES-128 HLS streams and various other data. Licensed under [BSD-2-Clause](https://github.com/Legrandin/pycryptodome/blob/master/LICENSE.rst)
|
||||
* [**phantomjs**](https://github.com/ariya/phantomjs) - Used in extractors where javascript needs to be run. Licensed under [BSD-3-Clause](https://github.com/ariya/phantomjs/blob/master/LICENSE.BSD)
|
||||
* [**phantomjs**](https://github.com/ariya/phantomjs) - Used in some extractors where JavaScript needs to be run. No longer used for YouTube. To be deprecated in the near future. Licensed under [BSD-3-Clause](https://github.com/ariya/phantomjs/blob/master/LICENSE.BSD)
|
||||
* [**secretstorage**](https://github.com/mitya57/secretstorage)\* - For `--cookies-from-browser` to access the **Gnome** keyring while decrypting cookies of **Chromium**-based browsers on **Linux**. Licensed under [BSD-3-Clause](https://github.com/mitya57/secretstorage/blob/master/LICENSE)
|
||||
* Any external downloader that you want to use with `--downloader`
|
||||
|
||||
### Deprecated
|
||||
|
||||
* [**avconv** and **avprobe**](https://www.libav.org) - Now **deprecated** alternative to ffmpeg. License [depends on the build](https://libav.org/legal)
|
||||
* [**sponskrub**](https://github.com/faissaloo/SponSkrub) - For using the now **deprecated** [sponskrub options](#sponskrub-options). Licensed under [GPLv3+](https://github.com/faissaloo/SponSkrub/blob/master/LICENCE.md)
|
||||
* [**rtmpdump**](http://rtmpdump.mplayerhq.hu) - For downloading `rtmp` streams. ffmpeg can be used instead with `--downloader ffmpeg`. Licensed under [GPLv2+](http://rtmpdump.mplayerhq.hu)
|
||||
* [**mplayer**](http://mplayerhq.hu/design7/info.html) or [**mpv**](https://mpv.io) - For downloading `rstp`/`mms` streams. ffmpeg can be used instead with `--downloader ffmpeg`. Licensed under [GPLv2+](https://github.com/mpv-player/mpv/blob/master/Copyright)
|
||||
|
||||
|
|
@ -240,12 +265,12 @@ To build the standalone executable, you must have Python and `pyinstaller` (plus
|
|||
You can run the following commands:
|
||||
|
||||
```
|
||||
python3 devscripts/install_deps.py --include pyinstaller
|
||||
python3 devscripts/make_lazy_extractors.py
|
||||
python3 -m bundle.pyinstaller
|
||||
python devscripts/install_deps.py --include-group pyinstaller
|
||||
python devscripts/make_lazy_extractors.py
|
||||
python -m bundle.pyinstaller
|
||||
```
|
||||
|
||||
On some systems, you may need to use `py` or `python` instead of `python3`.
|
||||
On some systems, you may need to use `py` or `python3` instead of `python`.
|
||||
|
||||
`python -m bundle.pyinstaller` accepts any arguments that can be passed to `pyinstaller`, such as `--onefile/-F` or `--onedir/-D`, which is further [documented here](https://pyinstaller.org/en/stable/usage.html#what-to-generate).
|
||||
|
||||
|
|
@ -254,31 +279,19 @@ On some systems, you may need to use `py` or `python` instead of `python3`.
|
|||
**Important**: Running `pyinstaller` directly **instead of** using `python -m bundle.pyinstaller` is **not** officially supported. This may or may not work correctly.
|
||||
|
||||
### Platform-independent Binary (UNIX)
|
||||
You will need the build tools `python` (3.8+), `zip`, `make` (GNU), `pandoc`\* and `pytest`\*.
|
||||
You will need the build tools `python` (3.10+), `zip`, `make` (GNU), `pandoc`\* and `pytest`\*.
|
||||
|
||||
After installing these, simply run `make`.
|
||||
|
||||
You can also run `make yt-dlp` instead to compile only the binary without updating any of the additional files. (The build tools marked with **\*** are not needed for this)
|
||||
|
||||
### Standalone Py2Exe Builds (Windows)
|
||||
|
||||
While we provide the option to build with [py2exe](https://www.py2exe.org), it is recommended to build [using PyInstaller](#standalone-pyinstaller-builds) instead since the py2exe builds **cannot contain `pycryptodomex`/`certifi`/`requests` and need VC++14** on the target computer to run.
|
||||
|
||||
If you wish to build it anyway, install Python (if it is not already installed) and you can run the following commands:
|
||||
|
||||
```
|
||||
py devscripts/install_deps.py --include py2exe
|
||||
py devscripts/make_lazy_extractors.py
|
||||
py -m bundle.py2exe
|
||||
```
|
||||
|
||||
### Related scripts
|
||||
|
||||
* **`devscripts/install_deps.py`** - Install dependencies for yt-dlp.
|
||||
* **`devscripts/update-version.py`** - Update the version number based on the current date.
|
||||
* **`devscripts/set-variant.py`** - Set the build variant of the executable.
|
||||
* **`devscripts/make_changelog.py`** - Create a markdown changelog using short commit messages and update `CONTRIBUTORS` file.
|
||||
* **`devscripts/make_lazy_extractors.py`** - Create lazy extractors. Running this before building the binaries (any variant) will improve their startup performance. Set the environment variable `YTDLP_NO_LAZY_EXTRACTORS=1` if you wish to forcefully disable lazy extractor loading.
|
||||
* **`devscripts/make_lazy_extractors.py`** - Create lazy extractors. Running this before building the binaries (any variant) will improve their startup performance. Set the environment variable `YTDLP_NO_LAZY_EXTRACTORS` to something nonempty to forcefully disable lazy extractor loading.
|
||||
|
||||
Note: See their `--help` for more info.
|
||||
|
||||
|
|
@ -290,7 +303,7 @@ If you fork the project on GitHub, you can run your fork's [build workflow](.git
|
|||
<!-- MANPAGE: BEGIN EXCLUDED SECTION -->
|
||||
yt-dlp [OPTIONS] [--] URL [URL...]
|
||||
|
||||
`Ctrl+F` is your friend :D
|
||||
Tip: Use `CTRL`+`F` (or `Command`+`F`) to search by keywords
|
||||
<!-- MANPAGE: END EXCLUDED SECTION -->
|
||||
|
||||
<!-- Auto generated -->
|
||||
|
|
@ -313,7 +326,6 @@ If you fork the project on GitHub, you can run your fork's [build workflow](.git
|
|||
playlist (default)
|
||||
--abort-on-error Abort downloading of further videos if an
|
||||
error occurs (Alias: --no-ignore-errors)
|
||||
--dump-user-agent Display the current user-agent and exit
|
||||
--list-extractors List all supported extractors and exit
|
||||
--extractor-descriptions Output descriptions of all supported
|
||||
extractors and exit
|
||||
|
|
@ -348,13 +360,53 @@ If you fork the project on GitHub, you can run your fork's [build workflow](.git
|
|||
containing directory ("-" for stdin). Can be
|
||||
used multiple times and inside other
|
||||
configuration files
|
||||
--flat-playlist Do not extract the videos of a playlist,
|
||||
only list them
|
||||
--plugin-dirs DIR Path to an additional directory to search
|
||||
for plugins. This option can be used
|
||||
multiple times to add multiple directories.
|
||||
Use "default" to search the default plugin
|
||||
directories (default)
|
||||
--no-plugin-dirs Clear plugin directories to search,
|
||||
including defaults and those provided by
|
||||
previous --plugin-dirs
|
||||
--js-runtimes RUNTIME[:PATH] Additional JavaScript runtime to enable,
|
||||
with an optional location for the runtime
|
||||
(either the path to the binary or its
|
||||
containing directory). This option can be
|
||||
used multiple times to enable multiple
|
||||
runtimes. Supported runtimes are (in order
|
||||
of priority, from highest to lowest): deno,
|
||||
node, quickjs, bun. Only "deno" is enabled
|
||||
by default. The highest priority runtime
|
||||
that is both enabled and available will be
|
||||
used. In order to use a lower priority
|
||||
runtime when "deno" is available, --no-js-
|
||||
runtimes needs to be passed before enabling
|
||||
other runtimes
|
||||
--no-js-runtimes Clear JavaScript runtimes to enable,
|
||||
including defaults and those provided by
|
||||
previous --js-runtimes
|
||||
--remote-components COMPONENT Remote components to allow yt-dlp to fetch
|
||||
when required. This option is currently not
|
||||
needed if you are using an official
|
||||
executable or have the requisite version of
|
||||
the yt-dlp-ejs package installed. You can
|
||||
use this option multiple times to allow
|
||||
multiple components. Supported values:
|
||||
ejs:npm (external JavaScript components from
|
||||
npm), ejs:github (external JavaScript
|
||||
components from yt-dlp-ejs GitHub). By
|
||||
default, no remote components are allowed
|
||||
--no-remote-components Disallow fetching of all remote components,
|
||||
including any previously allowed by
|
||||
--remote-components or defaults.
|
||||
--flat-playlist Do not extract a playlist's URL result
|
||||
entries; some entry metadata may be missing
|
||||
and downloading may be bypassed
|
||||
--no-flat-playlist Fully extract the videos of a playlist
|
||||
(default)
|
||||
--live-from-start Download livestreams from the start.
|
||||
Currently only supported for YouTube
|
||||
(Experimental)
|
||||
Currently experimental and only supported
|
||||
for YouTube and Twitch
|
||||
--no-live-from-start Download livestreams from the current time
|
||||
(default)
|
||||
--wait-for-video MIN[-MAX] Wait for scheduled streams to become
|
||||
|
|
@ -368,7 +420,9 @@ If you fork the project on GitHub, you can run your fork's [build workflow](.git
|
|||
stderr) to apply the setting to. Can be one
|
||||
of "always", "auto" (default), "never", or
|
||||
"no_color" (use non color terminal
|
||||
sequences). Can be used multiple times
|
||||
sequences). Use "auto-tty" or "no_color-tty"
|
||||
to decide based on terminal support only.
|
||||
Can be used multiple times
|
||||
--compat-options OPTS Options that can help keep compatibility
|
||||
with youtube-dl or youtube-dlc
|
||||
configurations by reverting some of the
|
||||
|
|
@ -378,17 +432,23 @@ If you fork the project on GitHub, you can run your fork's [build workflow](.git
|
|||
an alias starts with a dash "-", it is
|
||||
prefixed with "--". Arguments are parsed
|
||||
according to the Python string formatting
|
||||
mini-language. E.g. --alias get-audio,-X
|
||||
"-S=aext:{0},abr -x --audio-format {0}"
|
||||
creates options "--get-audio" and "-X" that
|
||||
takes an argument (ARG0) and expands to
|
||||
"-S=aext:ARG0,abr -x --audio-format ARG0".
|
||||
All defined aliases are listed in the --help
|
||||
mini-language. E.g. --alias get-audio,-X "-S
|
||||
aext:{0},abr -x --audio-format {0}" creates
|
||||
options "--get-audio" and "-X" that takes an
|
||||
argument (ARG0) and expands to "-S
|
||||
aext:ARG0,abr -x --audio-format ARG0". All
|
||||
defined aliases are listed in the --help
|
||||
output. Alias options can trigger more
|
||||
aliases; so be careful to avoid defining
|
||||
recursive options. As a safety measure, each
|
||||
alias may be triggered a maximum of 100
|
||||
times. This option can be used multiple times
|
||||
-t, --preset-alias PRESET Applies a predefined set of options. e.g.
|
||||
--preset-alias mp3. The following presets
|
||||
are available: mp3, aac, mp4, mkv, sleep.
|
||||
See the "Preset Aliases" section at the end
|
||||
for more info. This option can be used
|
||||
multiple times
|
||||
|
||||
## Network Options:
|
||||
--proxy URL Use the specified HTTP/HTTPS/SOCKS proxy. To
|
||||
|
|
@ -442,10 +502,10 @@ If you fork the project on GitHub, you can run your fork's [build workflow](.git
|
|||
E.g. "--date today-2weeks" downloads only
|
||||
videos uploaded on the same day two weeks ago
|
||||
--datebefore DATE Download only videos uploaded on or before
|
||||
this date. The date formats accepted is the
|
||||
this date. The date formats accepted are the
|
||||
same as --date
|
||||
--dateafter DATE Download only videos uploaded on or after
|
||||
this date. The date formats accepted is the
|
||||
this date. The date formats accepted are the
|
||||
same as --date
|
||||
--match-filters FILTER Generic video filter. Any "OUTPUT TEMPLATE"
|
||||
field can be compared with a number or a
|
||||
|
|
@ -457,17 +517,17 @@ If you fork the project on GitHub, you can run your fork's [build workflow](.git
|
|||
conditions. Use a "\" to escape "&" or
|
||||
quotes if needed. If used multiple times,
|
||||
the filter matches if at least one of the
|
||||
conditions is met. E.g. --match-filter
|
||||
!is_live --match-filter "like_count>?100 &
|
||||
conditions is met. E.g. --match-filters
|
||||
!is_live --match-filters "like_count>?100 &
|
||||
description~='(?i)\bcats \& dogs\b'" matches
|
||||
only videos that are not live OR those that
|
||||
have a like count more than 100 (or the like
|
||||
field is not available) and also has a
|
||||
description that contains the phrase "cats &
|
||||
dogs" (caseless). Use "--match-filter -" to
|
||||
dogs" (caseless). Use "--match-filters -" to
|
||||
interactively ask whether to download each
|
||||
video
|
||||
--no-match-filters Do not use any --match-filter (default)
|
||||
--no-match-filters Do not use any --match-filters (default)
|
||||
--break-match-filters FILTER Same as "--match-filters" but stops the
|
||||
download process when a video is rejected
|
||||
--no-break-match-filters Do not use any --break-match-filters (default)
|
||||
|
|
@ -483,12 +543,13 @@ If you fork the project on GitHub, you can run your fork's [build workflow](.git
|
|||
--no-download-archive Do not use archive file (default)
|
||||
--max-downloads NUMBER Abort after downloading NUMBER files
|
||||
--break-on-existing Stop the download process when encountering
|
||||
a file that is in the archive
|
||||
a file that is in the archive supplied with
|
||||
the --download-archive option
|
||||
--no-break-on-existing Do not stop the download process when
|
||||
encountering a file that is in the archive
|
||||
(default)
|
||||
--break-per-input Alters --max-downloads, --break-on-existing,
|
||||
--break-match-filter, and autonumber to
|
||||
--break-match-filters, and autonumber to
|
||||
reset per input URL
|
||||
--no-break-per-input --break-on-existing and similar options
|
||||
terminates the entire download queue
|
||||
|
|
@ -546,8 +607,6 @@ If you fork the project on GitHub, you can run your fork's [build workflow](.git
|
|||
--playlist-random and --playlist-reverse
|
||||
--no-lazy-playlist Process videos in the playlist only after
|
||||
the entire playlist is parsed (default)
|
||||
--xattr-set-filesize Set file xattribute ytdl.filesize with
|
||||
expected file size
|
||||
--hls-use-mpegts Use the mpegts container for HLS videos;
|
||||
allowing some players to play the video
|
||||
while downloading, and reducing the chance
|
||||
|
|
@ -571,9 +630,9 @@ If you fork the project on GitHub, you can run your fork's [build workflow](.git
|
|||
use (optionally) prefixed by the protocols
|
||||
(http, ftp, m3u8, dash, rstp, rtmp, mms) to
|
||||
use it for. Currently supports native,
|
||||
aria2c, avconv, axel, curl, ffmpeg, httpie,
|
||||
wget. You can use this option multiple times
|
||||
to set different downloaders for different
|
||||
aria2c, axel, curl, ffmpeg, httpie, wget.
|
||||
You can use this option multiple times to
|
||||
set different downloaders for different
|
||||
protocols. E.g. --downloader aria2c
|
||||
--downloader "dash,m3u8:native" will use
|
||||
aria2c for http/ftp downloads, and the
|
||||
|
|
@ -615,8 +674,7 @@ If you fork the project on GitHub, you can run your fork's [build workflow](.git
|
|||
--no-restrict-filenames Allow Unicode characters, "&" and spaces in
|
||||
filenames (default)
|
||||
--windows-filenames Force filenames to be Windows-compatible
|
||||
--no-windows-filenames Make filenames Windows-compatible only if
|
||||
using Windows (default)
|
||||
--no-windows-filenames Sanitize filenames only minimally
|
||||
--trim-filenames LENGTH Limit the filename length (excluding
|
||||
extension) to the specified number of
|
||||
characters
|
||||
|
|
@ -635,9 +693,9 @@ If you fork the project on GitHub, you can run your fork's [build workflow](.git
|
|||
--no-part Do not use .part files - write directly into
|
||||
output file
|
||||
--mtime Use the Last-modified header to set the file
|
||||
modification time (default)
|
||||
modification time
|
||||
--no-mtime Do not use the Last-modified header to set
|
||||
the file modification time
|
||||
the file modification time (default)
|
||||
--write-description Write video description to a .description file
|
||||
--no-write-description Do not write video description (default)
|
||||
--write-info-json Write video metadata to a .info.json file
|
||||
|
|
@ -730,16 +788,16 @@ If you fork the project on GitHub, you can run your fork's [build workflow](.git
|
|||
used. This option can be used multiple times
|
||||
--print-to-file [WHEN:]TEMPLATE FILE
|
||||
Append given template to the file. The
|
||||
values of WHEN and TEMPLATE are same as that
|
||||
of --print. FILE uses the same syntax as the
|
||||
output template. This option can be used
|
||||
multiple times
|
||||
values of WHEN and TEMPLATE are the same as
|
||||
that of --print. FILE uses the same syntax
|
||||
as the output template. This option can be
|
||||
used multiple times
|
||||
-j, --dump-json Quiet, but print JSON information for each
|
||||
video. Simulate unless --no-simulate is
|
||||
used. See "OUTPUT TEMPLATE" for a
|
||||
description of available keys
|
||||
-J, --dump-single-json Quiet, but print JSON information for each
|
||||
url or infojson passed. Simulate unless
|
||||
URL or infojson passed. Simulate unless
|
||||
--no-simulate is used. If the URL refers to
|
||||
a playlist, the whole playlist information
|
||||
is dumped in a single line
|
||||
|
|
@ -814,9 +872,9 @@ If you fork the project on GitHub, you can run your fork's [build workflow](.git
|
|||
--no-audio-multistreams Only one audio stream is downloaded for each
|
||||
output file (default)
|
||||
--prefer-free-formats Prefer video formats with free containers
|
||||
over non-free ones of same quality. Use with
|
||||
"-S ext" to strictly prefer free containers
|
||||
irrespective of quality
|
||||
over non-free ones of the same quality. Use
|
||||
with "-S ext" to strictly prefer free
|
||||
containers irrespective of quality
|
||||
--no-prefer-free-formats Don't give any special preference to free
|
||||
containers (default)
|
||||
--check-formats Make sure formats are selected only from
|
||||
|
|
@ -841,15 +899,17 @@ If you fork the project on GitHub, you can run your fork's [build workflow](.git
|
|||
(default) (Alias: --no-write-automatic-subs)
|
||||
--list-subs List available subtitles of each video.
|
||||
Simulate unless --no-simulate is used
|
||||
--sub-format FORMAT Subtitle format; accepts formats preference,
|
||||
e.g. "srt" or "ass/srt/best"
|
||||
--sub-format FORMAT Subtitle format; accepts formats preference
|
||||
separated by "/", e.g. "srt" or "ass/srt/best"
|
||||
--sub-langs LANGS Languages of the subtitles to download (can
|
||||
be regex) or "all" separated by commas, e.g.
|
||||
--sub-langs "en.*,ja". You can prefix the
|
||||
language code with a "-" to exclude it from
|
||||
the requested languages, e.g. --sub-langs
|
||||
all,-live_chat. Use --list-subs for a list
|
||||
of available language tags
|
||||
--sub-langs "en.*,ja" (where "en.*" is a
|
||||
regex pattern that matches "en" followed by
|
||||
0 or more of any character). You can prefix
|
||||
the language code with a "-" to exclude it
|
||||
from the requested languages, e.g. --sub-
|
||||
langs all,-live_chat. Use --list-subs for a
|
||||
list of available language tags
|
||||
|
||||
## Authentication Options:
|
||||
-u, --username USERNAME Login with this account ID
|
||||
|
|
@ -897,9 +957,9 @@ If you fork the project on GitHub, you can run your fork's [build workflow](.git
|
|||
necessary (currently supported: avi, flv,
|
||||
gif, mkv, mov, mp4, webm, aac, aiff, alac,
|
||||
flac, m4a, mka, mp3, ogg, opus, vorbis,
|
||||
wav). If target container does not support
|
||||
the video/audio codec, remuxing will fail.
|
||||
You can specify multiple rules; e.g.
|
||||
wav). If the target container does not
|
||||
support the video/audio codec, remuxing will
|
||||
fail. You can specify multiple rules; e.g.
|
||||
"aac>m4a/mov>mp4/mkv" will remux aac to m4a,
|
||||
mov to mp4 and anything else to mkv
|
||||
--recode-video FORMAT Re-encode the video into another format if
|
||||
|
|
@ -967,29 +1027,29 @@ If you fork the project on GitHub, you can run your fork's [build workflow](.git
|
|||
are the same as that of --use-postprocessor
|
||||
(default: pre_process)
|
||||
--xattrs Write metadata to the video file's xattrs
|
||||
(using dublin core and xdg standards)
|
||||
(using Dublin Core and XDG standards)
|
||||
--concat-playlist POLICY Concatenate videos in a playlist. One of
|
||||
"never", "always", or "multi_video"
|
||||
(default; only when the videos form a single
|
||||
show). All the video files must have same
|
||||
codecs and number of streams to be
|
||||
concatable. The "pl_video:" prefix can be
|
||||
show). All the video files must have the
|
||||
same codecs and number of streams to be
|
||||
concatenable. The "pl_video:" prefix can be
|
||||
used with "--paths" and "--output" to set
|
||||
the output filename for the concatenated
|
||||
files. See "OUTPUT TEMPLATE" for details
|
||||
--fixup POLICY Automatically correct known faults of the
|
||||
file. One of never (do nothing), warn (only
|
||||
emit a warning), detect_or_warn (the
|
||||
default; fix file if we can, warn
|
||||
otherwise), force (try fixing even if file
|
||||
already exists)
|
||||
default; fix the file if we can, warn
|
||||
otherwise), force (try fixing even if the
|
||||
file already exists)
|
||||
--ffmpeg-location PATH Location of the ffmpeg binary; either the
|
||||
path to the binary or its containing directory
|
||||
--exec [WHEN:]CMD Execute a command, optionally prefixed with
|
||||
when to execute it, separated by a ":".
|
||||
Supported values of "WHEN" are the same as
|
||||
that of --use-postprocessor (default:
|
||||
after_move). Same syntax as the output
|
||||
after_move). The same syntax as the output
|
||||
template can be used to pass any field as
|
||||
arguments to the command. If no fields are
|
||||
passed, %(filepath,_filename|)q is appended
|
||||
|
|
@ -997,12 +1057,16 @@ If you fork the project on GitHub, you can run your fork's [build workflow](.git
|
|||
be used multiple times
|
||||
--no-exec Remove any previously defined --exec
|
||||
--convert-subs FORMAT Convert the subtitles to another format
|
||||
(currently supported: ass, lrc, srt, vtt)
|
||||
(Alias: --convert-subtitles)
|
||||
(currently supported: ass, lrc, srt, vtt).
|
||||
Use "--convert-subs none" to disable
|
||||
conversion (default) (Alias: --convert-
|
||||
subtitles)
|
||||
--convert-thumbnails FORMAT Convert the thumbnails to another format
|
||||
(currently supported: jpg, png, webp). You
|
||||
can specify multiple rules using similar
|
||||
syntax as --remux-video
|
||||
syntax as "--remux-video". Use "--convert-
|
||||
thumbnails none" to disable conversion
|
||||
(default)
|
||||
--split-chapters Split video into multiple files based on
|
||||
internal chapters. The "chapter:" prefix can
|
||||
be used with "--paths" and "--output" to set
|
||||
|
|
@ -1023,7 +1087,7 @@ If you fork the project on GitHub, you can run your fork's [build workflow](.git
|
|||
--no-force-keyframes-at-cuts Do not force keyframes around the chapters
|
||||
when cutting/splitting (default)
|
||||
--use-postprocessor NAME[:ARGS]
|
||||
The (case sensitive) name of plugin
|
||||
The (case-sensitive) name of plugin
|
||||
postprocessors to be enabled, and
|
||||
(optionally) arguments to be passed to it,
|
||||
separated by a colon ":". ARGS are a
|
||||
|
|
@ -1036,8 +1100,8 @@ If you fork the project on GitHub, you can run your fork's [build workflow](.git
|
|||
--print/--output), "before_dl" (before each
|
||||
video download), "post_process" (after each
|
||||
video download; default), "after_move"
|
||||
(after moving video file to its final
|
||||
locations), "after_video" (after downloading
|
||||
(after moving the video file to its final
|
||||
location), "after_video" (after downloading
|
||||
and processing all formats of a video), or
|
||||
"playlist" (at end of playlist). This option
|
||||
can be used multiple times to add different
|
||||
|
|
@ -1052,11 +1116,12 @@ Make chapter entries for, or remove various segments (sponsor,
|
|||
for, separated by commas. Available
|
||||
categories are sponsor, intro, outro,
|
||||
selfpromo, preview, filler, interaction,
|
||||
music_offtopic, poi_highlight, chapter, all
|
||||
and default (=all). You can prefix the
|
||||
category with a "-" to exclude it. See [1]
|
||||
for description of the categories. E.g.
|
||||
--sponsorblock-mark all,-preview
|
||||
music_offtopic, hook, poi_highlight,
|
||||
chapter, all and default (=all). You can
|
||||
prefix the category with a "-" to exclude
|
||||
it. See [1] for descriptions of the
|
||||
categories. E.g. --sponsorblock-mark
|
||||
all,-preview
|
||||
[1] https://wiki.sponsor.ajay.app/w/Segment_Categories
|
||||
--sponsorblock-remove CATS SponsorBlock categories to be removed from
|
||||
the video file, separated by commas. If a
|
||||
|
|
@ -1087,7 +1152,7 @@ Make chapter entries for, or remove various segments (sponsor,
|
|||
(Alias: --no-allow-dynamic-mpd)
|
||||
--hls-split-discontinuity Split HLS playlists to different formats at
|
||||
discontinuities such as ad breaks
|
||||
--no-hls-split-discontinuity Do not split HLS playlists to different
|
||||
--no-hls-split-discontinuity Do not split HLS playlists into different
|
||||
formats at discontinuities such as ad breaks
|
||||
(default)
|
||||
--extractor-args IE_KEY:ARGS Pass ARGS arguments to the IE_KEY extractor.
|
||||
|
|
@ -1095,12 +1160,33 @@ Make chapter entries for, or remove various segments (sponsor,
|
|||
can use this option multiple times to give
|
||||
arguments for different extractors
|
||||
|
||||
## Preset Aliases:
|
||||
Predefined aliases for convenience and ease of use. Note that future
|
||||
versions of yt-dlp may add or adjust presets, but the existing preset
|
||||
names will not be changed or removed
|
||||
|
||||
-t mp3 -f 'ba[acodec^=mp3]/ba/b' -x --audio-format
|
||||
mp3
|
||||
|
||||
-t aac -f
|
||||
'ba[acodec^=aac]/ba[acodec^=mp4a.40.]/ba/b'
|
||||
-x --audio-format aac
|
||||
|
||||
-t mp4 --merge-output-format mp4 --remux-video mp4
|
||||
-S vcodec:h264,lang,quality,res,fps,hdr:12,a
|
||||
codec:aac
|
||||
|
||||
-t mkv --merge-output-format mkv --remux-video mkv
|
||||
|
||||
-t sleep --sleep-subtitles 5 --sleep-requests 0.75
|
||||
--sleep-interval 10 --max-sleep-interval 20
|
||||
|
||||
# CONFIGURATION
|
||||
|
||||
You can configure yt-dlp by placing any supported command line option to a configuration file. The configuration is loaded from the following locations:
|
||||
You can configure yt-dlp by placing any supported command line option in a configuration file. The configuration is loaded from the following locations:
|
||||
|
||||
1. **Main Configuration**:
|
||||
* The file given to `--config-location`
|
||||
* The file given to `--config-locations`
|
||||
1. **Portable Configuration**: (Recommended for portable installations)
|
||||
* If using a binary, `yt-dlp.conf` in the same directory as the binary
|
||||
* If running from source-code, `yt-dlp.conf` in the parent directory of `yt_dlp`
|
||||
|
|
@ -1125,15 +1211,15 @@ You can configure yt-dlp by placing any supported command line option to a confi
|
|||
* `/etc/yt-dlp/config`
|
||||
* `/etc/yt-dlp/config.txt`
|
||||
|
||||
E.g. with the following configuration file, yt-dlp will always extract the audio, not copy the mtime, use a proxy and save all videos under `YouTube` directory in your home directory:
|
||||
E.g. with the following configuration file, yt-dlp will always extract the audio, copy the mtime, use a proxy and save all videos under `YouTube` directory in your home directory:
|
||||
```
|
||||
# Lines starting with # are comments
|
||||
|
||||
# Always extract audio
|
||||
-x
|
||||
|
||||
# Do not copy the mtime
|
||||
--no-mtime
|
||||
# Copy the mtime
|
||||
--mtime
|
||||
|
||||
# Use this proxy
|
||||
--proxy 127.0.0.1:3128
|
||||
|
|
@ -1142,7 +1228,7 @@ E.g. with the following configuration file, yt-dlp will always extract the audio
|
|||
-o ~/YouTube/%(title)s.%(ext)s
|
||||
```
|
||||
|
||||
**Note**: Options in configuration file are just the same options aka switches used in regular command line calls; thus there **must be no whitespace** after `-` or `--`, e.g. `-o` or `--proxy` but not `- o` or `-- proxy`. They must also be quoted when necessary, as if it were a UNIX shell.
|
||||
**Note**: Options in a configuration file are just the same options aka switches used in regular command line calls; thus there **must be no whitespace** after `-` or `--`, e.g. `-o` or `--proxy` but not `- o` or `-- proxy`. They must also be quoted when necessary, as if it were a UNIX shell.
|
||||
|
||||
You can use `--ignore-config` if you want to disable all configuration files for a particular yt-dlp run. If `--ignore-config` is found inside any configuration file, no further configuration will be loaded. For example, having the option in the portable configuration file prevents loading of home, user, and system configurations. Additionally, (for backward compatibility) if `--ignore-config` is found inside the system configuration file, the user configuration is not loaded.
|
||||
|
||||
|
|
@ -1176,13 +1262,13 @@ As an alternative to using the `.netrc` file, which has the disadvantage of keep
|
|||
|
||||
E.g. To use an encrypted `.netrc` file stored as `.authinfo.gpg`
|
||||
```
|
||||
yt-dlp --netrc-cmd 'gpg --decrypt ~/.authinfo.gpg' https://www.youtube.com/watch?v=BaW_jenozKc
|
||||
yt-dlp --netrc-cmd 'gpg --decrypt ~/.authinfo.gpg' 'https://www.youtube.com/watch?v=BaW_jenozKc'
|
||||
```
|
||||
|
||||
|
||||
### Notes about environment variables
|
||||
* Environment variables are normally specified as `${VARIABLE}`/`$VARIABLE` on UNIX and `%VARIABLE%` on Windows; but is always shown as `${VARIABLE}` in this documentation
|
||||
* yt-dlp also allow using UNIX-style variables on Windows for path-like options; e.g. `--output`, `--config-location`
|
||||
* yt-dlp also allows using UNIX-style variables on Windows for path-like options; e.g. `--output`, `--config-locations`
|
||||
* If unset, `${XDG_CONFIG_HOME}` defaults to `~/.config` and `${XDG_CACHE_HOME}` to `~/.cache`
|
||||
* On Windows, `~` points to `${HOME}` if present; or, `${USERPROFILE}` or `${HOMEDRIVE}${HOMEPATH}` otherwise
|
||||
* On Windows, `${USERPROFILE}` generally points to `C:\Users\<user name>` and `${APPDATA}` to `${USERPROFILE}\AppData\Roaming`
|
||||
|
|
@ -1263,7 +1349,7 @@ The available fields are:
|
|||
- `like_count` (numeric): Number of positive ratings of the video
|
||||
- `dislike_count` (numeric): Number of negative ratings of the video
|
||||
- `repost_count` (numeric): Number of reposts of the video
|
||||
- `average_rating` (numeric): Average rating give by users, the scale used depends on the webpage
|
||||
- `average_rating` (numeric): Average rating given by users, the scale used depends on the webpage
|
||||
- `comment_count` (numeric): Number of comments on the video (For some extractors, comments are only downloaded at the end, and so this field cannot be used)
|
||||
- `age_limit` (numeric): Age restriction for the video (years)
|
||||
- `live_status` (string): One of "not_live", "is_live", "is_upcoming", "was_live", "post_live" (was live, but VOD is not yet processed)
|
||||
|
|
@ -1290,10 +1376,11 @@ The available fields are:
|
|||
- `playlist_uploader_id` (string): Nickname or id of the playlist uploader
|
||||
- `playlist_channel` (string): Display name of the channel that uploaded the playlist
|
||||
- `playlist_channel_id` (string): Identifier of the channel that uploaded the playlist
|
||||
- `playlist_webpage_url` (string): URL of the playlist webpage
|
||||
- `webpage_url` (string): A URL to the video webpage which, if given to yt-dlp, should yield the same result again
|
||||
- `webpage_url_basename` (string): The basename of the webpage URL
|
||||
- `webpage_url_domain` (string): The domain of the webpage URL
|
||||
- `original_url` (string): The URL given by the user (or same as `webpage_url` for playlist entries)
|
||||
- `original_url` (string): The URL given by the user (or the same as `webpage_url` for playlist entries)
|
||||
- `categories` (list): List of categories the video belongs to
|
||||
- `tags` (list): List of tags assigned to the video
|
||||
- `cast` (list): List of cast members
|
||||
|
|
@ -1370,7 +1457,7 @@ Each aforementioned sequence when referenced in an output template will be repla
|
|||
|
||||
**Tip**: Look at the `-j` output to identify which fields are available for the particular URL
|
||||
|
||||
For numeric sequences you can use [numeric related formatting](https://docs.python.org/3/library/stdtypes.html#printf-style-string-formatting); e.g. `%(view_count)05d` will result in a string with view count padded with zeros up to 5 characters, like in `00042`.
|
||||
For numeric sequences, you can use [numeric related formatting](https://docs.python.org/3/library/stdtypes.html#printf-style-string-formatting); e.g. `%(view_count)05d` will result in a string with view count padded with zeros up to 5 characters, like in `00042`.
|
||||
|
||||
Output templates can also contain arbitrary hierarchical path, e.g. `-o "%(playlist)s/%(playlist_index)s - %(title)s.%(ext)s"` which will result in downloading each video in a directory corresponding to this path template. Any missing directory will be automatically created for you.
|
||||
|
||||
|
|
@ -1412,7 +1499,7 @@ $ yt-dlp -P "C:/MyVideos" -o "%(series)s/%(season_number)s - %(season)s/%(episod
|
|||
|
||||
# Download video as "C:\MyVideos\uploader\title.ext", subtitles as "C:\MyVideos\subs\uploader\title.ext"
|
||||
# and put all temporary files in "C:\MyVideos\tmp"
|
||||
$ yt-dlp -P "C:/MyVideos" -P "temp:tmp" -P "subtitle:subs" -o "%(uploader)s/%(title)s.%(ext)s" BaW_jenoz --write-subs
|
||||
$ yt-dlp -P "C:/MyVideos" -P "temp:tmp" -P "subtitle:subs" -o "%(uploader)s/%(title)s.%(ext)s" BaW_jenozKc --write-subs
|
||||
|
||||
# Download video as "C:\MyVideos\uploader\title.ext" and subtitles as "C:\MyVideos\uploader\subs\title.ext"
|
||||
$ yt-dlp -P "C:/MyVideos" -o "%(uploader)s/%(title)s.%(ext)s" -o "subtitle:%(uploader)s/subs/%(title)s.%(ext)s" BaW_jenozKc --write-subs
|
||||
|
|
@ -1522,7 +1609,7 @@ The available fields are:
|
|||
- `hasvid`: Gives priority to formats that have a video stream
|
||||
- `hasaud`: Gives priority to formats that have an audio stream
|
||||
- `ie_pref`: The format preference
|
||||
- `lang`: The language preference
|
||||
- `lang`: The language preference as determined by the extractor (e.g. original language preferred over audio description)
|
||||
- `quality`: The quality of the format
|
||||
- `source`: The preference of the source
|
||||
- `proto`: Protocol used for download (`https`/`ftps` > `http`/`ftp` > `m3u8_native`/`m3u8` > `http_dash_segments`> `websocket_frag` > `mms`/`rtsp` > `f4f`/`f4m`)
|
||||
|
|
@ -1551,9 +1638,9 @@ The available fields are:
|
|||
|
||||
All fields, unless specified otherwise, are sorted in descending order. To reverse this, prefix the field with a `+`. E.g. `+res` prefers format with the smallest resolution. Additionally, you can suffix a preferred value for the fields, separated by a `:`. E.g. `res:720` prefers larger videos, but no larger than 720p and the smallest video if there are no videos less than 720p. For `codec` and `ext`, you can provide two preferred values, the first for video and the second for audio. E.g. `+codec:avc:m4a` (equivalent to `+vcodec:avc,+acodec:m4a`) sets the video codec preference to `h264` > `h265` > `vp9` > `vp9.2` > `av01` > `vp8` > `h263` > `theora` and audio codec preference to `mp4a` > `aac` > `vorbis` > `opus` > `mp3` > `ac3` > `dts`. You can also make the sorting prefer the nearest values to the provided by using `~` as the delimiter. E.g. `filesize~1G` prefers the format with filesize closest to 1 GiB.
|
||||
|
||||
The fields `hasvid` and `ie_pref` are always given highest priority in sorting, irrespective of the user-defined order. This behavior can be changed by using `--format-sort-force`. Apart from these, the default order used is: `lang,quality,res,fps,hdr:12,vcodec:vp9.2,channels,acodec,size,br,asr,proto,ext,hasaud,source,id`. The extractors may override this default order, but they cannot override the user-provided order.
|
||||
The fields `hasvid` and `ie_pref` are always given highest priority in sorting, irrespective of the user-defined order. This behavior can be changed by using `--format-sort-force`. Apart from these, the default order used is: `lang,quality,res,fps,hdr:12,vcodec,channels,acodec,size,br,asr,proto,ext,hasaud,source,id`. The extractors may override this default order, but they cannot override the user-provided order.
|
||||
|
||||
Note that the default has `vcodec:vp9.2`; i.e. `av1` is not preferred. Similarly, the default for hdr is `hdr:12`; i.e. Dolby Vision is not preferred. These choices are made since DV and AV1 formats are not yet fully compatible with most devices. This may be changed in the future as more devices become capable of smoothly playing back these formats.
|
||||
Note that the default for hdr is `hdr:12`; i.e. Dolby Vision is not preferred. This choice was made since DV formats are not yet fully compatible with most devices. This may be changed in the future.
|
||||
|
||||
If your format selector is `worst`, the last item is selected after sorting. This means it will select the format that is worst in all respects. Most of the time, what you actually want is the video with the smallest filesize instead. So it is generally better to use `-f best -S +size,+br,+res,+fps`.
|
||||
|
||||
|
|
@ -1630,11 +1717,11 @@ $ yt-dlp -S "res:480"
|
|||
# or the worst video (that also has audio) if there is no video under 50 MB
|
||||
$ yt-dlp -f "b[filesize<50M] / w"
|
||||
|
||||
# Download largest video (that also has audio) but no bigger than 50 MB,
|
||||
# Download the largest video (that also has audio) but no bigger than 50 MB,
|
||||
# or the smallest video (that also has audio) if there is no video under 50 MB
|
||||
$ yt-dlp -f "b" -S "filesize:50M"
|
||||
|
||||
# Download best video (that also has audio) that is closest in size to 50 MB
|
||||
# Download the best video (that also has audio) that is closest in size to 50 MB
|
||||
$ yt-dlp -f "b" -S "filesize~50M"
|
||||
|
||||
|
||||
|
|
@ -1690,7 +1777,7 @@ The metadata obtained by the extractors can be modified by using `--parse-metada
|
|||
|
||||
The general syntax of `--parse-metadata FROM:TO` is to give the name of a field or an [output template](#output-template) to extract data from, and the format to interpret it as, separated by a colon `:`. Either a [Python regular expression](https://docs.python.org/3/library/re.html#regular-expression-syntax) with named capture groups, a single field name, or a similar syntax to the [output template](#output-template) (only `%(field)s` formatting is supported) can be used for `TO`. The option can be used multiple times to parse and modify various fields.
|
||||
|
||||
Note that these options preserve their relative order, allowing replacements to be made in parsed fields and viceversa. Also, any field thus created can be used in the [output template](#output-template) and will also affect the media file's metadata added when using `--embed-metadata`.
|
||||
Note that these options preserve their relative order, allowing replacements to be made in parsed fields and vice versa. Also, any field thus created can be used in the [output template](#output-template) and will also affect the media file's metadata added when using `--embed-metadata`.
|
||||
|
||||
This option also has a few special uses:
|
||||
|
||||
|
|
@ -1756,25 +1843,41 @@ $ yt-dlp --replace-in-metadata "title,uploader" "[ _]" "-"
|
|||
|
||||
# EXTRACTOR ARGUMENTS
|
||||
|
||||
Some extractors accept additional arguments which can be passed using `--extractor-args KEY:ARGS`. `ARGS` is a `;` (semicolon) separated string of `ARG=VAL1,VAL2`. E.g. `--extractor-args "youtube:player-client=android_embedded,web;formats=incomplete" --extractor-args "funimation:version=uncut"`
|
||||
Some extractors accept additional arguments which can be passed using `--extractor-args KEY:ARGS`. `ARGS` is a `;` (semicolon) separated string of `ARG=VAL1,VAL2`. E.g. `--extractor-args "youtube:player-client=tv,mweb;formats=incomplete" --extractor-args "twitter:api=syndication"`
|
||||
|
||||
Note: In CLI, `ARG` can use `-` instead of `_`; e.g. `youtube:player-client"` becomes `youtube:player_client"`
|
||||
|
||||
The following extractors use this feature:
|
||||
|
||||
#### youtube
|
||||
* `lang`: Prefer translated metadata (`title`, `description` etc) of this language code (case-sensitive). By default, the video primary language metadata is preferred, with a fallback to `en` translated. See [youtube.py](https://github.com/yt-dlp/yt-dlp/blob/c26f9b991a0681fd3ea548d535919cec1fbbd430/yt_dlp/extractor/youtube.py#L381-L390) for list of supported content language codes
|
||||
* `lang`: Prefer translated metadata (`title`, `description` etc) of this language code (case-sensitive). By default, the video primary language metadata is preferred, with a fallback to `en` translated. See [youtube/_base.py](https://github.com/yt-dlp/yt-dlp/blob/415b4c9f955b1a0391204bd24a7132590e7b3bdb/yt_dlp/extractor/youtube/_base.py#L402-L409) for the list of supported content language codes
|
||||
* `skip`: One or more of `hls`, `dash` or `translated_subs` to skip extraction of the m3u8 manifests, dash manifests and [auto-translated subtitles](https://github.com/yt-dlp/yt-dlp/issues/4090#issuecomment-1158102032) respectively
|
||||
* `player_client`: Clients to extract video data from. The main clients are `web`, `ios` and `android`, with variants `_music`, `_embedded`, `_embedscreen`, `_creator` (e.g. `web_embedded`); and `mediaconnect`, `mweb`, `mweb_embedscreen` and `tv_embedded` (agegate bypass) with no variants. By default, `ios,web` is used, but `tv_embedded` and `creator` variants are added as required for age-gated videos. Similarly, the music variants are added for `music.youtube.com` urls. The `android` clients will always be given lowest priority since their formats are broken. You can use `all` to use all the clients, and `default` for the default clients.
|
||||
* `player_skip`: Skip some network requests that are generally needed for robust extraction. One or more of `configs` (skip client configs), `webpage` (skip initial webpage), `js` (skip js player). While these options can help reduce the number of requests needed or avoid some rate-limiting, they could cause some issues. See [#860](https://github.com/yt-dlp/yt-dlp/pull/860) for more details
|
||||
* `player_client`: Clients to extract video data from. The currently available clients are `web`, `web_safari`, `web_embedded`, `web_music`, `web_creator`, `mweb`, `ios`, `android`, `android_sdkless`, `android_vr`, `tv`, `tv_simply`, `tv_downgraded`, and `tv_embedded`. By default, `tv,android_sdkless,web` is used. If no JavaScript runtime is available, then `android_sdkless,web_safari,web` is used. If logged-in cookies are passed to yt-dlp, then `tv_downgraded,web_safari,web` is used for free accounts and `tv_downgraded,web_creator,web` is used for premium accounts. The `web_music` client is added for `music.youtube.com` URLs when logged-in cookies are used. The `web_embedded` client is added for age-restricted videos but only works if the video is embeddable. The `tv_embedded` and `web_creator` clients are added for age-restricted videos if account age-verification is required. Some clients, such as `web` and `web_music`, require a `po_token` for their formats to be downloadable. Some clients, such as `web_creator`, will only work with authentication. Not all clients support authentication via cookies. You can use `default` for the default clients, or you can use `all` for all clients (not recommended). You can prefix a client with `-` to exclude it, e.g. `youtube:player_client=default,-ios`
|
||||
* `player_skip`: Skip some network requests that are generally needed for robust extraction. One or more of `configs` (skip client configs), `webpage` (skip initial webpage), `js` (skip js player), `initial_data` (skip initial data/next ep request). While these options can help reduce the number of requests needed or avoid some rate-limiting, they could cause issues such as missing formats or metadata. See [#860](https://github.com/yt-dlp/yt-dlp/pull/860) and [#12826](https://github.com/yt-dlp/yt-dlp/issues/12826) for more details
|
||||
* `webpage_skip`: Skip extraction of embedded webpage data. One or both of `player_response`, `initial_data`. These options are for testing purposes and don't skip any network requests
|
||||
* `player_params`: YouTube player parameters to use for player requests. Will overwrite any default ones set by yt-dlp.
|
||||
* `player_js_variant`: The player javascript variant to use for n/sig deciphering. The known variants are: `main`, `tcc`, `tce`, `es5`, `es6`, `tv`, `tv_es6`, `phone`, `tablet`. The default is `main`, and the others are for debugging purposes. You can use `actual` to go with what is prescribed by the site
|
||||
* `player_js_version`: The player javascript version to use for n/sig deciphering, in the format of `signature_timestamp@hash` (e.g. `20348@0004de42`). The default is to use what is prescribed by the site, and can be selected with `actual`
|
||||
* `comment_sort`: `top` or `new` (default) - choose comment sorting mode (on YouTube's side)
|
||||
* `max_comments`: Limit the amount of comments to gather. Comma-separated list of integers representing `max-comments,max-parents,max-replies,max-replies-per-thread`. Default is `all,all,all,all`
|
||||
* E.g. `all,all,1000,10` will get a maximum of 1000 replies total, with up to 10 replies per thread. `1000,all,100` will get a maximum of 1000 comments, with a maximum of 100 replies total
|
||||
* `formats`: Change the types of formats to return. `dashy` (convert HTTP to DASH), `duplicate` (identical content but different URLs or protocol; includes `dashy`), `incomplete` (cannot be downloaded completely - live dash and post-live m3u8)
|
||||
* `formats`: Change the types of formats to return. `dashy` (convert HTTP to DASH), `duplicate` (identical content but different URLs or protocol; includes `dashy`), `incomplete` (cannot be downloaded completely - live dash and post-live m3u8), `missing_pot` (include formats that require a PO Token but are missing one)
|
||||
* `innertube_host`: Innertube API host to use for all API requests; e.g. `studio.youtube.com`, `youtubei.googleapis.com`. Note that cookies exported from one subdomain will not work on others
|
||||
* `innertube_key`: Innertube API key to use for all API requests
|
||||
* `innertube_key`: Innertube API key to use for all API requests. By default, no API key is used
|
||||
* `raise_incomplete_data`: `Incomplete Data Received` raises an error instead of reporting a warning
|
||||
* `data_sync_id`: Overrides the account Data Sync ID used in Innertube API requests. This may be needed if you are using an account with `youtube:player_skip=webpage,configs` or `youtubetab:skip=webpage`
|
||||
* `visitor_data`: Overrides the Visitor Data used in Innertube API requests. This should be used with `player_skip=webpage,configs` and without cookies. Note: this may have adverse effects if used improperly. If a session from a browser is wanted, you should pass cookies instead (which contain the Visitor ID)
|
||||
* `po_token`: Proof of Origin (PO) Token(s) to use. Comma seperated list of PO Tokens in the format `CLIENT.CONTEXT+PO_TOKEN`, e.g. `youtube:po_token=web.gvs+XXX,web.player=XXX,web_safari.gvs+YYY`. Context can be any of `gvs` (Google Video Server URLs), `player` (Innertube player request) or `subs` (Subtitles)
|
||||
* `pot_trace`: Enable debug logging for PO Token fetching. Either `true` or `false` (default)
|
||||
* `fetch_pot`: Policy to use for fetching a PO Token from providers. One of `always` (always try fetch a PO Token regardless if the client requires one for the given context), `never` (never fetch a PO Token), or `auto` (default; only fetch a PO Token if the client requires one for the given context)
|
||||
* `playback_wait`: Duration (in seconds) to wait inbetween the extraction and download stages in order to ensure the formats are available. The default is `6` seconds
|
||||
* `jsc_trace`: Enable debug logging for JS Challenge fetching. Either `true` or `false` (default)
|
||||
|
||||
#### youtube-ejs
|
||||
* `jitless`: Run suported Javascript engines in JIT-less mode. Supported runtimes are `deno`, `node` and `bun`. Provides better security at the cost of performance/speed. Do note that `node` and `bun` are still considered unsecure. Either `true` or `false` (default)
|
||||
|
||||
#### youtubepot-webpo
|
||||
* `bind_to_visitor_id`: Whether to use the Visitor ID instead of Visitor Data for caching WebPO tokens. Either `true` (default) or `false`
|
||||
|
||||
#### youtubetab (YouTube playlists, channels, feeds, etc.)
|
||||
* `skip`: One or more of `webpage` (skip initial webpage download), `authcheck` (allow the download of playlists requiring authentication when no initial webpage is downloaded. This may cause unwanted behavior, see [#1122](https://github.com/yt-dlp/yt-dlp/pull/1122) for more details)
|
||||
|
|
@ -1786,20 +1889,11 @@ The following extractors use this feature:
|
|||
* `key_query`: Passthrough the master m3u8 URL query to its HLS AES-128 decryption key URI if no value is provided, or else apply the query string given as `key_query=VALUE`. Note that this will have no effect if the key URI is provided via the `hls_key` extractor-arg. Does not apply to ffmpeg
|
||||
* `hls_key`: An HLS AES-128 key URI *or* key (as hex), and optionally the IV (as hex), in the form of `(URI|KEY)[,IV]`; e.g. `generic:hls_key=ABCDEF1234567980,0xFEDCBA0987654321`. Passing any of these values will force usage of the native HLS downloader and override the corresponding values found in the m3u8 playlist
|
||||
* `is_live`: Bypass live HLS detection and manually set `live_status` - a value of `false` will set `not_live`, any other value (or no value) will set `is_live`
|
||||
|
||||
#### funimation
|
||||
* `language`: Audio languages to extract, e.g. `funimation:language=english,japanese`
|
||||
* `version`: The video version to extract - `uncut` or `simulcast`
|
||||
|
||||
#### crunchyrollbeta (Crunchyroll)
|
||||
* `hardsub`: One or more hardsub versions to extract (in order of preference), or `all` (default: `None` = no hardsubs will be extracted), e.g. `crunchyrollbeta:hardsub=en-US,de-DE`
|
||||
* `impersonate`: Target(s) to try and impersonate with the initial webpage request; e.g. `generic:impersonate=safari,chrome-110`. Use `generic:impersonate` to impersonate any available target, and use `generic:impersonate=false` to disable impersonation (default)
|
||||
|
||||
#### vikichannel
|
||||
* `video_types`: Types of videos to download - one or more of `episodes`, `movies`, `clips`, `trailers`
|
||||
|
||||
#### niconico
|
||||
* `segment_duration`: Segment duration in milliseconds for HLS-DMC formats. Use it at your own risk since this feature **may result in your account termination.**
|
||||
|
||||
#### youtubewebarchive
|
||||
* `check_all`: Try to check more at the cost of more requests. One or more of `thumbnails`, `captures`
|
||||
|
||||
|
|
@ -1811,6 +1905,9 @@ The following extractors use this feature:
|
|||
* `vcodec`: vcodec to ignore - one or more of `h264`, `h265`, `dvh265`
|
||||
* `dr`: dynamic range to ignore - one or more of `sdr`, `hdr10`, `dv`
|
||||
|
||||
#### instagram
|
||||
* `app_id`: The value of the `X-IG-App-ID` header used for API requests. Default is the web app ID, `936619743392459`
|
||||
|
||||
#### niconicochannelplus
|
||||
* `max_comments`: Maximum number of comments to extract - default is `120`
|
||||
|
||||
|
|
@ -1851,7 +1948,7 @@ The following extractors use this feature:
|
|||
* `cdn`: One or more CDN IDs to use with the API call for stream URLs, e.g. `gcp_cdn`, `gs_cdn_pc_app`, `gs_cdn_mobile_web`, `gs_cdn_pc_web`
|
||||
|
||||
#### soundcloud
|
||||
* `formats`: Formats to request from the API. Requested values should be in the format of `{protocol}_{extension}` (omitting the bitrate), e.g. `hls_opus,http_aac`. The `*` character functions as a wildcard, e.g. `*_mp3`, and can be passed by itself to request all formats. Known protocols include `http`, `hls` and `hls-aes`; known extensions include `aac`, `opus` and `mp3`. Original `download` formats are always extracted. Default is `http_aac,hls_aac,http_opus,hls_opus,http_mp3,hls_mp3`
|
||||
* `formats`: Formats to request from the API. Requested values should be in the format of `{protocol}_{codec}`, e.g. `hls_opus,http_aac`. The `*` character functions as a wildcard, e.g. `*_mp3`, and can be passed by itself to request all formats. Known protocols include `http`, `hls` and `hls-aes`; known codecs include `aac`, `opus` and `mp3`. Original `download` formats are always extracted. Default is `http_aac,hls_aac,http_opus,hls_opus,http_mp3,hls_mp3`
|
||||
|
||||
#### orfon (orf:on)
|
||||
* `prefer_segments_playlist`: Prefer a playlist of program segments instead of a single complete video when available. If individual segments are desired, use `--concat-playlist never --extractor-args "orfon:prefer_segments_playlist"`
|
||||
|
|
@ -1859,6 +1956,16 @@ The following extractors use this feature:
|
|||
#### bilibili
|
||||
* `prefer_multi_flv`: Prefer extracting flv formats over mp4 for older videos that still provide legacy formats
|
||||
|
||||
#### sonylivseries
|
||||
* `sort_order`: Episode sort order for series extraction - one of `asc` (ascending, oldest first) or `desc` (descending, newest first). Default is `asc`
|
||||
|
||||
#### tver
|
||||
* `backend`: Backend API to use for extraction - one of `streaks` (default) or `brightcove` (deprecated)
|
||||
|
||||
#### vimeo
|
||||
* `client`: Client to extract video data from. The currently available clients are `android`, `ios`, and `web`. Only one client can be used. The `web` client is used by default. The `web` client only works with account cookies or login credentials. The `android` and `ios` clients only work with previously cached OAuth tokens
|
||||
* `original_format_policy`: Policy for when to try extracting original formats. One of `always`, `never`, or `auto`. The default `auto` policy tries to avoid exceeding the web client's API rate-limit by only making an extra request when Vimeo publicizes the video's downloadability
|
||||
|
||||
**Note**: These options may be changed/removed in the future without concern for backward compatibility
|
||||
|
||||
<!-- MANPAGE: MOVE "INSTALLATION" SECTION HERE -->
|
||||
|
|
@ -1885,6 +1992,7 @@ In other words, the file structure on the disk looks something like:
|
|||
myplugin.py
|
||||
|
||||
yt-dlp looks for these `yt_dlp_plugins` namespace folders in many locations (see below) and loads in plugins from **all** of them.
|
||||
Set the environment variable `YTDLP_NO_PLUGINS` to something nonempty to disable loading plugins entirely.
|
||||
|
||||
See the [wiki for some known plugins](https://github.com/yt-dlp/yt-dlp/wiki/Plugins)
|
||||
|
||||
|
|
@ -1912,7 +2020,7 @@ Plugins can be installed using various methods and locations.
|
|||
* Plugin packages can be installed and managed using `pip`. See [yt-dlp-sample-plugins](https://github.com/yt-dlp/yt-dlp-sample-plugins) for an example.
|
||||
* Note: plugin files between plugin packages installed with pip must have unique filenames.
|
||||
* Any path in `PYTHONPATH` is searched in for the `yt_dlp_plugins` namespace folder.
|
||||
* Note: This does not apply for Pyinstaller/py2exe builds.
|
||||
* Note: This does not apply for Pyinstaller builds.
|
||||
|
||||
|
||||
`.zip`, `.egg` and `.whl` archives containing a `yt_dlp_plugins` namespace folder in their root are also supported as plugin packages.
|
||||
|
|
@ -2141,12 +2249,11 @@ with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
|||
|
||||
* **[Format Sorting](#sorting-formats)**: The default format sorting options have been changed so that higher resolution and better codecs will be now preferred instead of simply using larger bitrate. Furthermore, you can now specify the sort order using `-S`. This allows for much easier format selection than what is possible by simply using `--format` ([examples](#format-selection-examples))
|
||||
|
||||
* **Merged with animelover1984/youtube-dl**: You get most of the features and improvements from [animelover1984/youtube-dl](https://github.com/animelover1984/youtube-dl) including `--write-comments`, `BiliBiliSearch`, `BilibiliChannel`, Embedding thumbnail in mp4/ogg/opus, playlist infojson etc. Note that NicoNico livestreams are not available. See [#31](https://github.com/yt-dlp/yt-dlp/pull/31) for details.
|
||||
* **Merged with animelover1984/youtube-dl**: You get most of the features and improvements from [animelover1984/youtube-dl](https://github.com/animelover1984/youtube-dl) including `--write-comments`, `BiliBiliSearch`, `BilibiliChannel`, Embedding thumbnail in mp4/ogg/opus, playlist infojson etc. See [#31](https://github.com/yt-dlp/yt-dlp/pull/31) for details.
|
||||
|
||||
* **YouTube improvements**:
|
||||
* Supports Clips, Stories (`ytstories:<channel UCID>`), Search (including filters)**\***, YouTube Music Search, Channel-specific search, Search prefixes (`ytsearch:`, `ytsearchdate:`)**\***, Mixes, and Feeds (`:ytfav`, `:ytwatchlater`, `:ytsubs`, `:ythistory`, `:ytrec`, `:ytnotif`)
|
||||
* Fix for [n-sig based throttling](https://github.com/ytdl-org/youtube-dl/issues/29326) **\***
|
||||
* Supports some (but not all) age-gated content without cookies
|
||||
* Download livestreams from the start using `--live-from-start` (*experimental*)
|
||||
* Channel URLs download all uploads of the channel, including shorts and live
|
||||
|
||||
|
|
@ -2172,9 +2279,9 @@ with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
|||
|
||||
* **Output template improvements**: Output templates can now have date-time formatting, numeric offsets, object traversal etc. See [output template](#output-template) for details. Even more advanced operations can also be done with the help of `--parse-metadata` and `--replace-in-metadata`
|
||||
|
||||
* **Other new options**: Many new options have been added such as `--alias`, `--print`, `--concat-playlist`, `--wait-for-video`, `--retry-sleep`, `--sleep-requests`, `--convert-thumbnails`, `--force-download-archive`, `--force-overwrites`, `--break-match-filter` etc
|
||||
* **Other new options**: Many new options have been added such as `--alias`, `--print`, `--concat-playlist`, `--wait-for-video`, `--retry-sleep`, `--sleep-requests`, `--convert-thumbnails`, `--force-download-archive`, `--force-overwrites`, `--break-match-filters` etc
|
||||
|
||||
* **Improvements**: Regex and other operators in `--format`/`--match-filter`, multiple `--postprocessor-args` and `--downloader-args`, faster archive checking, more [format selection options](#format-selection), merge multi-video/audio, multiple `--config-locations`, `--exec` at different stages, etc
|
||||
* **Improvements**: Regex and other operators in `--format`/`--match-filters`, multiple `--postprocessor-args` and `--downloader-args`, faster archive checking, more [format selection options](#format-selection), merge multi-video/audio, multiple `--config-locations`, `--exec` at different stages, etc
|
||||
|
||||
* **Plugins**: Extractors and PostProcessors can be loaded from an external file. See [plugins](#plugins) for details
|
||||
|
||||
|
|
@ -2190,12 +2297,12 @@ Features marked with a **\*** have been back-ported to youtube-dl
|
|||
|
||||
Some of yt-dlp's default options are different from that of youtube-dl and youtube-dlc:
|
||||
|
||||
* yt-dlp supports only [Python 3.8+](## "Windows 7"), and *may* remove support for more versions as they [become EOL](https://devguide.python.org/versions/#python-release-cycle); while [youtube-dl still supports Python 2.6+ and 3.2+](https://github.com/ytdl-org/youtube-dl/issues/30568#issue-1118238743)
|
||||
* yt-dlp supports only [Python 3.10+](## "Windows 8"), and will remove support for more versions as they [become EOL](https://devguide.python.org/versions/#python-release-cycle); while [youtube-dl still supports Python 2.6+ and 3.2+](https://github.com/ytdl-org/youtube-dl/issues/30568#issue-1118238743)
|
||||
* The options `--auto-number` (`-A`), `--title` (`-t`) and `--literal` (`-l`), no longer work. See [removed options](#Removed) for details
|
||||
* `avconv` is not supported as an alternative to `ffmpeg`
|
||||
* yt-dlp stores config files in slightly different locations to youtube-dl. See [CONFIGURATION](#configuration) for a list of correct locations
|
||||
* The default [output template](#output-template) is `%(title)s [%(id)s].%(ext)s`. There is no real reason for this change. This was changed before yt-dlp was ever made public and now there are no plans to change it back to `%(title)s-%(id)s.%(ext)s`. Instead, you may use `--compat-options filename`
|
||||
* The default [format sorting](#sorting-formats) is different from youtube-dl and prefers higher resolution and better codecs rather than higher bitrates. You can use the `--format-sort` option to change this to any order you prefer, or use `--compat-options format-sort` to use youtube-dl's sorting order
|
||||
* The default [format sorting](#sorting-formats) is different from youtube-dl and prefers higher resolution and better codecs rather than higher bitrates. You can use the `--format-sort` option to change this to any order you prefer, or use `--compat-options format-sort` to use youtube-dl's sorting order. Older versions of yt-dlp preferred VP9 due to its broader compatibility; you can use `--compat-options prefer-vp9-sort` to revert to that format sorting preference. These two compat options cannot be used together
|
||||
* The default format selector is `bv*+ba/b`. This means that if a combined video + audio format that is better than the best video-only format is found, the former will be preferred. Use `-f bv+ba/b` or `--compat-options format-spec` to revert this
|
||||
* Unlike youtube-dlc, yt-dlp does not allow merging multiple audio/video streams into one file by default (since this conflicts with the use of `-f bv*+ba`). If needed, this feature must be enabled using `--audio-multistreams` and `--video-multistreams`. You can also use `--compat-options multistreams` to enable both
|
||||
* `--no-abort-on-error` is enabled by default. Use `--abort-on-error` or `--compat-options abort-on-error` to abort on errors instead
|
||||
|
|
@ -2207,7 +2314,7 @@ Some of yt-dlp's default options are different from that of youtube-dl and youtu
|
|||
* Live chats (if available) are considered as subtitles. Use `--sub-langs all,-live_chat` to download all subtitles except live chat. You can also use `--compat-options no-live-chat` to prevent any live chat/danmaku from downloading
|
||||
* YouTube channel URLs download all uploads of the channel. To download only the videos in a specific tab, pass the tab's URL. If the channel does not show the requested tab, an error will be raised. Also, `/live` URLs raise an error if there are no live videos instead of silently downloading the entire channel. You may use `--compat-options no-youtube-channel-redirect` to revert all these redirections
|
||||
* Unavailable videos are also listed for YouTube playlists. Use `--compat-options no-youtube-unavailable-videos` to remove this
|
||||
* The upload dates extracted from YouTube are in UTC [when available](https://github.com/yt-dlp/yt-dlp/blob/89e4d86171c7b7c997c77d4714542e0383bf0db0/yt_dlp/extractor/youtube.py#L3898-L3900). Use `--compat-options no-youtube-prefer-utc-upload-date` to prefer the non-UTC upload date.
|
||||
* The upload dates extracted from YouTube are in UTC.
|
||||
* If `ffmpeg` is used as the downloader, the downloading and merging of formats happen in a single step when possible. Use `--compat-options no-direct-merge` to revert this
|
||||
* Thumbnail embedding in `mp4` is done with mutagen if possible. Use `--compat-options embed-thumbnail-atomicparsley` to force the use of AtomicParsley instead
|
||||
* Some internal metadata such as filenames are removed by default from the infojson. Use `--no-clean-infojson` or `--compat-options no-clean-infojson` to revert this
|
||||
|
|
@ -2215,19 +2322,22 @@ Some of yt-dlp's default options are different from that of youtube-dl and youtu
|
|||
* `certifi` will be used for SSL root certificates, if installed. If you want to use system certificates (e.g. self-signed), use `--compat-options no-certifi`
|
||||
* yt-dlp's sanitization of invalid characters in filenames is different/smarter than in youtube-dl. You can use `--compat-options filename-sanitization` to revert to youtube-dl's behavior
|
||||
* ~~yt-dlp tries to parse the external downloader outputs into the standard progress output if possible (Currently implemented: [aria2c](https://github.com/yt-dlp/yt-dlp/issues/5931)). You can use `--compat-options no-external-downloader-progress` to get the downloader output as-is~~
|
||||
* yt-dlp versions between 2021.09.01 and 2023.01.02 applies `--match-filter` to nested playlists. This was an unintentional side-effect of [8f18ac](https://github.com/yt-dlp/yt-dlp/commit/8f18aca8717bb0dd49054555af8d386e5eda3a88) and is fixed in [d7b460](https://github.com/yt-dlp/yt-dlp/commit/d7b460d0e5fc710950582baed2e3fc616ed98a80). Use `--compat-options playlist-match-filter` to revert this
|
||||
* yt-dlp versions between 2021.09.01 and 2023.01.02 applies `--match-filters` to nested playlists. This was an unintentional side-effect of [8f18ac](https://github.com/yt-dlp/yt-dlp/commit/8f18aca8717bb0dd49054555af8d386e5eda3a88) and is fixed in [d7b460](https://github.com/yt-dlp/yt-dlp/commit/d7b460d0e5fc710950582baed2e3fc616ed98a80). Use `--compat-options playlist-match-filter` to revert this
|
||||
* yt-dlp versions between 2021.11.10 and 2023.06.21 estimated `filesize_approx` values for fragmented/manifest formats. This was added for convenience in [f2fe69](https://github.com/yt-dlp/yt-dlp/commit/f2fe69c7b0d208bdb1f6292b4ae92bc1e1a7444a), but was reverted in [0dff8e](https://github.com/yt-dlp/yt-dlp/commit/0dff8e4d1e6e9fb938f4256ea9af7d81f42fd54f) due to the potentially extreme inaccuracy of the estimated values. Use `--compat-options manifest-filesize-approx` to keep extracting the estimated values
|
||||
* yt-dlp uses modern http client backends such as `requests`. Use `--compat-options prefer-legacy-http-handler` to prefer the legacy http handler (`urllib`) to be used for standard http requests.
|
||||
* The sub-modules `swfinterp`, `casefold` are removed.
|
||||
* Passing `--simulate` (or calling `extract_info` with `download=False`) no longer alters the default format selection. See [#9843](https://github.com/yt-dlp/yt-dlp/issues/9843) for details.
|
||||
* yt-dlp no longer applies the server modified time to downloaded files by default. Use `--mtime` or `--compat-options mtime-by-default` to revert this.
|
||||
|
||||
For ease of use, a few more compat options are available:
|
||||
|
||||
* `--compat-options all`: Use all compat options (Do NOT use)
|
||||
* `--compat-options youtube-dl`: Same as `--compat-options all,-multistreams,-playlist-match-filter,-manifest-filesize-approx`
|
||||
* `--compat-options youtube-dlc`: Same as `--compat-options all,-no-live-chat,-no-youtube-channel-redirect,-playlist-match-filter,-manifest-filesize-approx`
|
||||
* `--compat-options 2021`: Same as `--compat-options 2022,no-certifi,filename-sanitization,no-youtube-prefer-utc-upload-date`
|
||||
* `--compat-options all`: Use all compat options (**Do NOT use this!**)
|
||||
* `--compat-options youtube-dl`: Same as `--compat-options all,-multistreams,-playlist-match-filter,-manifest-filesize-approx,-allow-unsafe-ext,-prefer-vp9-sort`
|
||||
* `--compat-options youtube-dlc`: Same as `--compat-options all,-no-live-chat,-no-youtube-channel-redirect,-playlist-match-filter,-manifest-filesize-approx,-allow-unsafe-ext,-prefer-vp9-sort`
|
||||
* `--compat-options 2021`: Same as `--compat-options 2022,no-certifi,filename-sanitization`
|
||||
* `--compat-options 2022`: Same as `--compat-options 2023,playlist-match-filter,no-external-downloader-progress,prefer-legacy-http-handler,manifest-filesize-approx`
|
||||
* `--compat-options 2023`: Currently does nothing. Use this to enable all future compat options
|
||||
* `--compat-options 2023`: Same as `--compat-options 2024,prefer-vp9-sort`
|
||||
* `--compat-options 2024`: Same as `--compat-options mtime-by-default`. Use this to enable all future compat options
|
||||
|
||||
The following compat options restore vulnerable behavior from before security patches:
|
||||
|
||||
|
|
@ -2260,13 +2370,13 @@ While these options are redundant, they are still expected to be used due to the
|
|||
--get-thumbnail --print thumbnail
|
||||
-e, --get-title --print title
|
||||
-g, --get-url --print urls
|
||||
--match-title REGEX --match-filter "title ~= (?i)REGEX"
|
||||
--reject-title REGEX --match-filter "title !~= (?i)REGEX"
|
||||
--min-views COUNT --match-filter "view_count >=? COUNT"
|
||||
--max-views COUNT --match-filter "view_count <=? COUNT"
|
||||
--break-on-reject Use --break-match-filter
|
||||
--user-agent UA --add-header "User-Agent:UA"
|
||||
--referer URL --add-header "Referer:URL"
|
||||
--match-title REGEX --match-filters "title ~= (?i)REGEX"
|
||||
--reject-title REGEX --match-filters "title !~= (?i)REGEX"
|
||||
--min-views COUNT --match-filters "view_count >=? COUNT"
|
||||
--max-views COUNT --match-filters "view_count <=? COUNT"
|
||||
--break-on-reject Use --break-match-filters
|
||||
--user-agent UA --add-headers "User-Agent:UA"
|
||||
--referer URL --add-headers "Referer:URL"
|
||||
--playlist-start NUMBER -I NUMBER:
|
||||
--playlist-end NUMBER -I :NUMBER
|
||||
--playlist-reverse -I ::-1
|
||||
|
|
@ -2289,11 +2399,7 @@ While these options still work, their use is not recommended since there are oth
|
|||
--hls-prefer-native --downloader "m3u8:native"
|
||||
--hls-prefer-ffmpeg --downloader "m3u8:ffmpeg"
|
||||
--list-formats-old --compat-options list-formats (Alias: --no-list-formats-as-table)
|
||||
--list-formats-as-table --compat-options -list-formats [Default] (Alias: --no-list-formats-old)
|
||||
--youtube-skip-dash-manifest --extractor-args "youtube:skip=dash" (Alias: --no-youtube-include-dash-manifest)
|
||||
--youtube-skip-hls-manifest --extractor-args "youtube:skip=hls" (Alias: --no-youtube-include-hls-manifest)
|
||||
--youtube-include-dash-manifest Default (Alias: --no-youtube-skip-dash-manifest)
|
||||
--youtube-include-hls-manifest Default (Alias: --no-youtube-skip-hls-manifest)
|
||||
--list-formats-as-table --compat-options -list-formats [Default]
|
||||
--geo-bypass --xff "default"
|
||||
--no-geo-bypass --xff "never"
|
||||
--geo-bypass-country CODE --xff CODE
|
||||
|
|
@ -2304,20 +2410,14 @@ These options are not intended to be used by the end-user
|
|||
|
||||
--test Download only part of video for testing extractors
|
||||
--load-pages Load pages dumped by --write-pages
|
||||
--youtube-print-sig-code For testing youtube signatures
|
||||
--allow-unplayable-formats List unplayable formats also
|
||||
--no-allow-unplayable-formats Default
|
||||
|
||||
#### Old aliases
|
||||
These are aliases that are no longer documented for various reasons
|
||||
|
||||
--avconv-location --ffmpeg-location
|
||||
--clean-infojson --clean-info-json
|
||||
--cn-verification-proxy URL --geo-verification-proxy URL
|
||||
--dump-headers --print-traffic
|
||||
--dump-intermediate-pages --dump-pages
|
||||
--force-write-download-archive --force-write-archive
|
||||
--load-info --load-info-json
|
||||
--no-clean-infojson --no-clean-info-json
|
||||
--no-split-tracks --no-split-chapters
|
||||
--no-write-srt --no-write-subs
|
||||
|
|
@ -2330,7 +2430,7 @@ These are aliases that are no longer documented for various reasons
|
|||
--yes-overwrites --force-overwrites
|
||||
|
||||
#### Sponskrub Options
|
||||
Support for [SponSkrub](https://github.com/faissaloo/SponSkrub) has been deprecated in favor of the `--sponsorblock` options
|
||||
Support for [SponSkrub](https://github.com/faissaloo/SponSkrub) has been removed in favor of the `--sponsorblock` options
|
||||
|
||||
--sponskrub --sponsorblock-mark all
|
||||
--no-sponskrub --no-sponsorblock
|
||||
|
|
@ -2352,6 +2452,17 @@ These options may no longer work as intended
|
|||
--no-include-ads Default
|
||||
--write-annotations No supported site has annotations now
|
||||
--no-write-annotations Default
|
||||
--avconv-location Removed alias for --ffmpeg-location
|
||||
--cn-verification-proxy URL Removed alias for --geo-verification-proxy URL
|
||||
--dump-headers Removed alias for --print-traffic
|
||||
--dump-intermediate-pages Removed alias for --dump-pages
|
||||
--youtube-skip-dash-manifest Removed alias for --extractor-args "youtube:skip=dash" (Alias: --no-youtube-include-dash-manifest)
|
||||
--youtube-skip-hls-manifest Removed alias for --extractor-args "youtube:skip=hls" (Alias: --no-youtube-include-hls-manifest)
|
||||
--youtube-include-dash-manifest Default (Alias: --no-youtube-skip-dash-manifest)
|
||||
--youtube-include-hls-manifest Default (Alias: --no-youtube-skip-hls-manifest)
|
||||
--youtube-print-sig-code Removed testing functionality
|
||||
--dump-user-agent No longer supported
|
||||
--xattr-set-filesize No longer supported
|
||||
--compat-options seperate-video-versions No longer needed
|
||||
--compat-options no-youtube-prefer-utc-upload-date No longer supported
|
||||
|
||||
|
|
|
|||
4473
THIRD_PARTY_LICENSES.txt
Normal file
4473
THIRD_PARTY_LICENSES.txt
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -1,10 +1,178 @@
|
|||
services:
|
||||
static:
|
||||
build: static
|
||||
|
||||
linux_x86_64:
|
||||
build:
|
||||
context: linux
|
||||
target: build
|
||||
platforms:
|
||||
- "linux/amd64"
|
||||
args:
|
||||
BUILDIMAGE: ghcr.io/yt-dlp/manylinux2014_x86_64-shared:latest
|
||||
environment:
|
||||
channel: ${channel}
|
||||
origin: ${origin}
|
||||
version: ${version}
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
CHANNEL: ${CHANNEL:?}
|
||||
ORIGIN: ${ORIGIN:?}
|
||||
VERSION:
|
||||
PYTHON_VERSION:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ~/build:/build
|
||||
- ../..:/yt-dlp
|
||||
|
||||
linux_x86_64_verify:
|
||||
build:
|
||||
context: linux
|
||||
target: verify
|
||||
platforms:
|
||||
- "linux/amd64"
|
||||
args:
|
||||
VERIFYIMAGE: quay.io/pypa/manylinux2014_x86_64:latest
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
UPDATE_TO:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../../dist:/build
|
||||
|
||||
linux_aarch64:
|
||||
build:
|
||||
context: linux
|
||||
target: build
|
||||
platforms:
|
||||
- "linux/arm64"
|
||||
args:
|
||||
BUILDIMAGE: ghcr.io/yt-dlp/manylinux2014_aarch64-shared:latest
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
CHANNEL: ${CHANNEL:?}
|
||||
ORIGIN: ${ORIGIN:?}
|
||||
VERSION:
|
||||
PYTHON_VERSION:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../..:/yt-dlp
|
||||
|
||||
linux_aarch64_verify:
|
||||
build:
|
||||
context: linux
|
||||
target: verify
|
||||
platforms:
|
||||
- "linux/arm64"
|
||||
args:
|
||||
VERIFYIMAGE: quay.io/pypa/manylinux2014_aarch64:latest
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
UPDATE_TO:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../../dist:/build
|
||||
|
||||
linux_armv7l:
|
||||
build:
|
||||
context: linux
|
||||
target: build
|
||||
platforms:
|
||||
- "linux/arm/v7"
|
||||
args:
|
||||
BUILDIMAGE: ghcr.io/yt-dlp/manylinux_2_31_armv7l-shared:latest
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
CHANNEL: ${CHANNEL:?}
|
||||
ORIGIN: ${ORIGIN:?}
|
||||
VERSION:
|
||||
PYTHON_VERSION:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../..:/yt-dlp
|
||||
- ../../venv:/yt-dlp-build-venv
|
||||
|
||||
linux_armv7l_verify:
|
||||
build:
|
||||
context: linux
|
||||
target: verify
|
||||
platforms:
|
||||
- "linux/arm/v7"
|
||||
args:
|
||||
VERIFYIMAGE: arm32v7/debian:bullseye
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
UPDATE_TO:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../../dist:/build
|
||||
|
||||
musllinux_x86_64:
|
||||
build:
|
||||
context: linux
|
||||
target: build
|
||||
platforms:
|
||||
- "linux/amd64"
|
||||
args:
|
||||
BUILDIMAGE: ghcr.io/yt-dlp/musllinux_1_2_x86_64-shared:latest
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
CHANNEL: ${CHANNEL:?}
|
||||
ORIGIN: ${ORIGIN:?}
|
||||
VERSION:
|
||||
PYTHON_VERSION:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../..:/yt-dlp
|
||||
|
||||
musllinux_x86_64_verify:
|
||||
build:
|
||||
context: linux
|
||||
target: verify
|
||||
platforms:
|
||||
- "linux/amd64"
|
||||
args:
|
||||
VERIFYIMAGE: alpine:3.22
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
UPDATE_TO:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../../dist:/build
|
||||
|
||||
musllinux_aarch64:
|
||||
build:
|
||||
context: linux
|
||||
target: build
|
||||
platforms:
|
||||
- "linux/arm64"
|
||||
args:
|
||||
BUILDIMAGE: ghcr.io/yt-dlp/musllinux_1_2_aarch64-shared:latest
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
CHANNEL: ${CHANNEL:?}
|
||||
ORIGIN: ${ORIGIN:?}
|
||||
VERSION:
|
||||
PYTHON_VERSION:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
EXCLUDE_CURL_CFFI: "1"
|
||||
volumes:
|
||||
- ../..:/yt-dlp
|
||||
|
||||
musllinux_aarch64_verify:
|
||||
build:
|
||||
context: linux
|
||||
target: verify
|
||||
platforms:
|
||||
- "linux/arm64"
|
||||
args:
|
||||
VERIFYIMAGE: alpine:3.22
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
UPDATE_TO:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../../dist:/build
|
||||
|
|
|
|||
16
bundle/docker/linux/Dockerfile
Normal file
16
bundle/docker/linux/Dockerfile
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
ARG BUILDIMAGE=ghcr.io/yt-dlp/manylinux2014_x86_64-shared:latest
|
||||
ARG VERIFYIMAGE=alpine:3.22
|
||||
|
||||
|
||||
FROM $BUILDIMAGE AS build
|
||||
|
||||
WORKDIR /yt-dlp
|
||||
COPY build.sh /build.sh
|
||||
ENTRYPOINT ["/build.sh"]
|
||||
|
||||
|
||||
FROM $VERIFYIMAGE AS verify
|
||||
|
||||
WORKDIR /testing
|
||||
COPY verify.sh /verify.sh
|
||||
ENTRYPOINT ["/verify.sh"]
|
||||
48
bundle/docker/linux/build.sh
Executable file
48
bundle/docker/linux/build.sh
Executable file
|
|
@ -0,0 +1,48 @@
|
|||
#!/bin/bash
|
||||
set -exuo pipefail
|
||||
|
||||
if [[ -z "${PYTHON_VERSION:-}" ]]; then
|
||||
PYTHON_VERSION="3.13"
|
||||
echo "Defaulting to using Python ${PYTHON_VERSION}"
|
||||
fi
|
||||
|
||||
function runpy {
|
||||
"/opt/shared-cpython-${PYTHON_VERSION}/bin/python${PYTHON_VERSION}" "$@"
|
||||
}
|
||||
|
||||
function venvpy {
|
||||
"python${PYTHON_VERSION}" "$@"
|
||||
}
|
||||
|
||||
INCLUDES=(
|
||||
--include-group pyinstaller
|
||||
--include-group secretstorage
|
||||
)
|
||||
|
||||
if [[ -z "${EXCLUDE_CURL_CFFI:-}" ]]; then
|
||||
INCLUDES+=(--include-group curl-cffi)
|
||||
fi
|
||||
|
||||
runpy -m venv /yt-dlp-build-venv
|
||||
# shellcheck disable=SC1091
|
||||
source /yt-dlp-build-venv/bin/activate
|
||||
# Inside the venv we use venvpy instead of runpy
|
||||
venvpy -m ensurepip --upgrade --default-pip
|
||||
venvpy -m devscripts.install_deps --only-optional-groups --include-group build
|
||||
venvpy -m devscripts.install_deps "${INCLUDES[@]}"
|
||||
venvpy -m devscripts.make_lazy_extractors
|
||||
venvpy devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}"
|
||||
|
||||
if [[ -z "${SKIP_ONEDIR_BUILD:-}" ]]; then
|
||||
mkdir -p /build
|
||||
venvpy -m bundle.pyinstaller --onedir --distpath=/build
|
||||
pushd "/build/${EXE_NAME}"
|
||||
chmod +x "${EXE_NAME}"
|
||||
venvpy -m zipfile -c "/yt-dlp/dist/${EXE_NAME}.zip" ./
|
||||
popd
|
||||
fi
|
||||
|
||||
if [[ -z "${SKIP_ONEFILE_BUILD:-}" ]]; then
|
||||
venvpy -m bundle.pyinstaller
|
||||
chmod +x "./dist/${EXE_NAME}"
|
||||
fi
|
||||
51
bundle/docker/linux/verify.sh
Executable file
51
bundle/docker/linux/verify.sh
Executable file
|
|
@ -0,0 +1,51 @@
|
|||
#!/bin/sh
|
||||
set -eu
|
||||
|
||||
if [ -n "${SKIP_ONEFILE_BUILD:-}" ]; then
|
||||
if [ -n "${SKIP_ONEDIR_BUILD:-}" ]; then
|
||||
echo "All executable builds were skipped"
|
||||
exit 1
|
||||
fi
|
||||
echo "Extracting zip to verify onedir build"
|
||||
if command -v python3 >/dev/null 2>&1; then
|
||||
python3 -m zipfile -e "/build/${EXE_NAME}.zip" ./
|
||||
else
|
||||
echo "Attempting to install unzip"
|
||||
if command -v dnf >/dev/null 2>&1; then
|
||||
dnf -y install --allowerasing unzip
|
||||
elif command -v yum >/dev/null 2>&1; then
|
||||
yum -y install unzip
|
||||
elif command -v apt-get >/dev/null 2>&1; then
|
||||
DEBIAN_FRONTEND=noninteractive apt-get update -qq
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -qq -y --no-install-recommends unzip
|
||||
elif command -v apk >/dev/null 2>&1; then
|
||||
apk add --no-cache unzip
|
||||
else
|
||||
echo "Unsupported image"
|
||||
exit 1
|
||||
fi
|
||||
unzip "/build/${EXE_NAME}.zip" -d ./
|
||||
fi
|
||||
chmod +x "./${EXE_NAME}"
|
||||
"./${EXE_NAME}" -v || true
|
||||
"./${EXE_NAME}" --version
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Verifying onefile build"
|
||||
cp "/build/${EXE_NAME}" ./
|
||||
chmod +x "./${EXE_NAME}"
|
||||
|
||||
if [ -z "${UPDATE_TO:-}" ]; then
|
||||
"./${EXE_NAME}" -v || true
|
||||
"./${EXE_NAME}" --version
|
||||
exit 0
|
||||
fi
|
||||
|
||||
cp "./${EXE_NAME}" "./${EXE_NAME}_downgraded"
|
||||
version="$("./${EXE_NAME}" --version)"
|
||||
"./${EXE_NAME}_downgraded" -v --update-to "${UPDATE_TO}"
|
||||
downgraded_version="$("./${EXE_NAME}_downgraded" --version)"
|
||||
if [ "${version}" = "${downgraded_version}" ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
FROM alpine:3.19 as base
|
||||
|
||||
RUN apk --update add --no-cache \
|
||||
build-base \
|
||||
python3 \
|
||||
pipx \
|
||||
;
|
||||
|
||||
RUN pipx install pyinstaller
|
||||
# Requires above step to prepare the shared venv
|
||||
RUN ~/.local/share/pipx/shared/bin/python -m pip install -U wheel
|
||||
RUN apk --update add --no-cache \
|
||||
scons \
|
||||
patchelf \
|
||||
binutils \
|
||||
;
|
||||
RUN pipx install staticx
|
||||
|
||||
WORKDIR /yt-dlp
|
||||
COPY entrypoint.sh /entrypoint.sh
|
||||
ENTRYPOINT /entrypoint.sh
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
#!/bin/ash
|
||||
set -e
|
||||
|
||||
source ~/.local/share/pipx/venvs/pyinstaller/bin/activate
|
||||
python -m devscripts.install_deps --include secretstorage
|
||||
python -m devscripts.make_lazy_extractors
|
||||
python devscripts/update-version.py -c "${channel}" -r "${origin}" "${version}"
|
||||
python -m bundle.pyinstaller
|
||||
deactivate
|
||||
|
||||
source ~/.local/share/pipx/venvs/staticx/bin/activate
|
||||
staticx /yt-dlp/dist/yt-dlp_linux /build/yt-dlp_linux
|
||||
deactivate
|
||||
|
|
@ -1,59 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Allow execution from anywhere
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
import warnings
|
||||
|
||||
from py2exe import freeze
|
||||
|
||||
from devscripts.utils import read_version
|
||||
|
||||
VERSION = read_version()
|
||||
|
||||
|
||||
def main():
|
||||
warnings.warn(
|
||||
'py2exe builds do not support pycryptodomex and needs VC++14 to run. '
|
||||
'It is recommended to run "pyinst.py" to build using pyinstaller instead')
|
||||
|
||||
freeze(
|
||||
console=[{
|
||||
'script': './yt_dlp/__main__.py',
|
||||
'dest_base': 'yt-dlp',
|
||||
'icon_resources': [(1, 'devscripts/logo.ico')],
|
||||
}],
|
||||
version_info={
|
||||
'version': VERSION,
|
||||
'description': 'A feature-rich command-line audio/video downloader',
|
||||
'comments': 'Official repository: <https://github.com/yt-dlp/yt-dlp>',
|
||||
'product_name': 'yt-dlp',
|
||||
'product_version': VERSION,
|
||||
},
|
||||
options={
|
||||
'bundle_files': 0,
|
||||
'compressed': 1,
|
||||
'optimize': 2,
|
||||
'dist_dir': './dist',
|
||||
'excludes': [
|
||||
# py2exe cannot import Crypto
|
||||
'Crypto',
|
||||
'Cryptodome',
|
||||
# requests >=2.32.0 breaks py2exe builds due to certifi dependency
|
||||
'requests',
|
||||
'urllib3',
|
||||
],
|
||||
'dll_excludes': ['w9xpopen.exe', 'crypt32.dll'],
|
||||
# Modules that are only imported dynamically must be added here
|
||||
'includes': ['yt_dlp.compat._legacy', 'yt_dlp.compat._deprecated',
|
||||
'yt_dlp.utils._legacy', 'yt_dlp.utils._deprecated'],
|
||||
},
|
||||
zipfile=None,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
@ -13,6 +13,8 @@ from PyInstaller.__main__ import run as run_pyinstaller
|
|||
from devscripts.utils import read_version
|
||||
|
||||
OS_NAME, MACHINE, ARCH = sys.platform, platform.machine().lower(), platform.architecture()[0][:2]
|
||||
if OS_NAME == 'linux' and platform.libc_ver()[0] != 'glibc':
|
||||
OS_NAME = 'musllinux'
|
||||
if MACHINE in ('x86', 'x86_64', 'amd64', 'i386', 'i686'):
|
||||
MACHINE = 'x86' if ARCH == '32' else ''
|
||||
|
||||
|
|
@ -36,6 +38,9 @@ def main():
|
|||
f'--name={name}',
|
||||
'--icon=devscripts/logo.ico',
|
||||
'--upx-exclude=vcruntime140.dll',
|
||||
# Ref: https://github.com/yt-dlp/yt-dlp/issues/13311
|
||||
# https://github.com/pyinstaller/pyinstaller/issues/9149
|
||||
'--exclude-module=pkg_resources',
|
||||
'--noconfirm',
|
||||
'--additional-hooks-dir=yt_dlp/__pyinstaller',
|
||||
*opts,
|
||||
|
|
@ -59,16 +64,22 @@ def parse_options():
|
|||
|
||||
def exe(onedir):
|
||||
"""@returns (name, path)"""
|
||||
platform_name, machine, extension = {
|
||||
'win32': (None, MACHINE, '.exe'),
|
||||
'darwin': ('macos', None, None),
|
||||
}.get(OS_NAME, (OS_NAME, MACHINE, None))
|
||||
|
||||
name = '_'.join(filter(None, (
|
||||
'yt-dlp',
|
||||
{'win32': '', 'darwin': 'macos'}.get(OS_NAME, OS_NAME),
|
||||
MACHINE,
|
||||
platform_name,
|
||||
machine,
|
||||
)))
|
||||
|
||||
return name, ''.join(filter(None, (
|
||||
'dist/',
|
||||
onedir and f'{name}/',
|
||||
name,
|
||||
OS_NAME == 'win32' and '.exe',
|
||||
extension,
|
||||
)))
|
||||
|
||||
|
||||
|
|
@ -118,7 +129,6 @@ def windows_set_version(exe, version):
|
|||
StringStruct('FileDescription', 'yt-dlp%s' % (MACHINE and f' ({MACHINE})')),
|
||||
StringStruct('FileVersion', version),
|
||||
StringStruct('InternalName', f'yt-dlp{suffix}'),
|
||||
StringStruct('LegalCopyright', 'pukkandan.ytdlp@gmail.com | UNLICENSE'),
|
||||
StringStruct('OriginalFilename', f'yt-dlp{suffix}.exe'),
|
||||
StringStruct('ProductName', f'yt-dlp{suffix}'),
|
||||
StringStruct(
|
||||
|
|
|
|||
|
|
@ -6,13 +6,17 @@ __yt_dlp()
|
|||
prev="${COMP_WORDS[COMP_CWORD-1]}"
|
||||
opts="{{flags}}"
|
||||
keywords=":ytfavorites :ytrecommended :ytsubscriptions :ytwatchlater :ythistory"
|
||||
fileopts="-a|--batch-file|--download-archive|--cookies|--load-info"
|
||||
fileopts="-a|--batch-file|--download-archive|--cookies|--load-info-json"
|
||||
diropts="--cache-dir"
|
||||
|
||||
if [[ ${prev} =~ ${fileopts} ]]; then
|
||||
local IFS=$'\n'
|
||||
type compopt &>/dev/null && compopt -o filenames
|
||||
COMPREPLY=( $(compgen -f -- ${cur}) )
|
||||
return 0
|
||||
elif [[ ${prev} =~ ${diropts} ]]; then
|
||||
local IFS=$'\n'
|
||||
type compopt &>/dev/null && compopt -o dirnames
|
||||
COMPREPLY=( $(compgen -d -- ${cur}) )
|
||||
return 0
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -179,6 +179,145 @@
|
|||
{
|
||||
"action": "add",
|
||||
"when": "6aaf96a3d6e7d0d426e97e11a2fcf52fda00e733",
|
||||
"short": "[priority] Security: [[CVE-2024-10123](https://nvd.nist.gov/vuln/detail/CVE-2024-10123)] [Properly sanitize file-extension to prevent file system modification and RCE](https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-79w7-vh3h-8g4j)\n - Unsafe extensions are now blocked from being downloaded"
|
||||
"short": "[priority] Security: [[CVE-2024-38519](https://nvd.nist.gov/vuln/detail/CVE-2024-38519)] [Properly sanitize file-extension to prevent file system modification and RCE](https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-79w7-vh3h-8g4j)\n - Unsafe extensions are now blocked from being downloaded"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "6075a029dba70a89675ae1250e7cdfd91f0eba41",
|
||||
"short": "[priority] Security: [[ie/douyutv] Do not use dangerous javascript source/URL](https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-3v33-3wmw-3785)\n - A dependency on potentially malicious third-party JavaScript code has been removed from the Douyu extractors"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "fb8b7f226d251e521a89b23c415e249e5b788e5c",
|
||||
"short": "[priority] **The minimum *recommended* Python version has been raised to 3.9**\nSince Python 3.8 will reach end-of-life in October 2024, support for it will be dropped soon. [Read more](https://github.com/yt-dlp/yt-dlp/issues/10086)"
|
||||
},
|
||||
{
|
||||
"action": "change",
|
||||
"when": "b31b81d85f00601710d4fac590c3e4efb4133283",
|
||||
"short": "[ci] Rerun failed tests (#11143)",
|
||||
"authors": ["Grub4K"]
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "a886cf3e900f4a2ec00af705f883539269545609",
|
||||
"short": "[priority] **py2exe is no longer supported**\nThis release's `yt-dlp_min.exe` will be the last, and it's actually a PyInstaller-bundled executable so that yt-dlp users updating their py2exe build with `-U` will be automatically migrated. [Read more](https://github.com/yt-dlp/yt-dlp/issues/10087)"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "a886cf3e900f4a2ec00af705f883539269545609",
|
||||
"short": "[priority] **Following this release, yt-dlp's Python dependencies *must* be installed using the `default` group**\nIf you're installing yt-dlp with pip/pipx or requiring yt-dlp in your own Python project, you'll need to specify `yt-dlp[default]` if you want to also install yt-dlp's optional dependencies (which were previously included by default). [Read more](https://github.com/yt-dlp/yt-dlp/pull/11255)"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "87884f15580910e4e0fe0e1db73508debc657471",
|
||||
"short": "[priority] **Beginning with this release, yt-dlp's Python dependencies *must* be installed using the `default` group**\nIf you're installing yt-dlp with pip/pipx or requiring yt-dlp in your own Python project, you'll need to specify `yt-dlp[default]` if you want to also install yt-dlp's optional dependencies (which were previously included by default). [Read more](https://github.com/yt-dlp/yt-dlp/pull/11255)"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "d784464399b600ba9516bbcec6286f11d68974dd",
|
||||
"short": "[priority] **The minimum *required* Python version has been raised to 3.9**\nPython 3.8 reached its end-of-life on 2024.10.07, and yt-dlp has now removed support for it. As an unfortunate side effect, the official `yt-dlp.exe` and `yt-dlp_x86.exe` binaries are no longer supported on Windows 7. [Read more](https://github.com/yt-dlp/yt-dlp/issues/10086)"
|
||||
},
|
||||
{
|
||||
"action": "change",
|
||||
"when": "914af9a0cf51c9a3f74aa88d952bee8334c67511",
|
||||
"short": "Expand paths in `--plugin-dirs` (#11334)",
|
||||
"authors": ["bashonly"]
|
||||
},
|
||||
{
|
||||
"action": "change",
|
||||
"when": "c29f5a7fae93a08f3cfbb6127b2faa75145b06a0",
|
||||
"short": "[ie/generic] Do not impersonate by default (#11336)",
|
||||
"authors": ["bashonly"]
|
||||
},
|
||||
{
|
||||
"action": "change",
|
||||
"when": "57212a5f97ce367590aaa5c3e9a135eead8f81f7",
|
||||
"short": "[ie/vimeo] Fix API retries (#11351)",
|
||||
"authors": ["bashonly"]
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "52c0ffe40ad6e8404d93296f575007b05b04c686",
|
||||
"short": "[priority] **Login with OAuth is no longer supported for YouTube**\nDue to a change made by the site, yt-dlp is no longer able to support OAuth login for YouTube. [Read more](https://github.com/yt-dlp/yt-dlp/issues/11462#issuecomment-2471703090)"
|
||||
},
|
||||
{
|
||||
"action": "change",
|
||||
"when": "76ac023ff02f06e8c003d104f02a03deeddebdcd",
|
||||
"short": "[ie/youtube:tab] Improve shorts title extraction (#11997)",
|
||||
"authors": ["bashonly", "d3d9"]
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "88eb1e7a9a2720ac89d653c0d0e40292388823bb",
|
||||
"short": "[priority] **New option `--preset-alias`/`-t` has been added**\nThis provides convenient predefined aliases for common use cases. Available presets include `mp4`, `mp3`, `mkv`, `aac`, and `sleep`. See [the README](https://github.com/yt-dlp/yt-dlp/blob/master/README.md#preset-aliases) for more details."
|
||||
},
|
||||
{
|
||||
"action": "remove",
|
||||
"when": "d596824c2f8428362c072518856065070616e348"
|
||||
},
|
||||
{
|
||||
"action": "remove",
|
||||
"when": "7b81634fb1d15999757e7a9883daa6ef09ea785b"
|
||||
},
|
||||
{
|
||||
"action": "remove",
|
||||
"when": "500761e41acb96953a5064e951d41d190c287e46"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "f3008bc5f89d2691f2f8dfc51b406ef4e25281c3",
|
||||
"short": "[priority] **Default behaviour changed from `--mtime` to `--no-mtime`**\nyt-dlp no longer applies the server modified time to downloaded files by default. [Read more](https://github.com/yt-dlp/yt-dlp/issues/12780)"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "959ac99e98c3215437e573c22d64be42d361e863",
|
||||
"short": "[priority] Security: [[CVE-2025-54072](https://nvd.nist.gov/vuln/detail/CVE-2025-54072)] [Fix `--exec` placeholder expansion on Windows](https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-45hg-7f49-5h56)\n - When `--exec` is used on Windows, the filepath expanded from `{}` (or the default placeholder) is now properly escaped"
|
||||
},
|
||||
{
|
||||
"action": "change",
|
||||
"when": "b831406a1d3be34c159835079d12bae624c43610",
|
||||
"short": "[ie/rtve.es:program] Add extractor (#12955)",
|
||||
"authors": ["meGAmeS1", "seproDev"]
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "23c658b9cbe34a151f8f921ab1320bb5d4e40a4d",
|
||||
"short": "[priority] **The minimum *recommended* Python version has been raised to 3.10**\nSince Python 3.9 will reach end-of-life in October 2025, support for it will be dropped soon. [Read more](https://github.com/yt-dlp/yt-dlp/issues/13858)"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "cc5a5caac5fbc0d605b52bde0778d6fd5f97b5ab",
|
||||
"short": "[priority] **darwin_legacy_exe builds are being discontinued**\nThis release's `yt-dlp_macos_legacy` binary will likely be the last one. [Read more](https://github.com/yt-dlp/yt-dlp/issues/13856)"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "c76ce28e06c816eb5b261dfb6aff6e69dd9b7382",
|
||||
"short": "[priority] **linux_armv7l_exe builds are being discontinued**\nThis release's `yt-dlp_linux_armv7l` binary could be the last one. [Read more](https://github.com/yt-dlp/yt-dlp/issues/13976)"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "08d78996831bd8e1e3c2592d740c3def00bbf548",
|
||||
"short": "[priority] **Several options have been deprecated**\nIn order to simplify the codebase and reduce maintenance burden, various options have been deprecated. Please remove them from your commands/configurations. [Read more](https://github.com/yt-dlp/yt-dlp/issues/14198)"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "4e6a693057cfaf1ce1f07b019ed3bfce2bf936f6",
|
||||
"short": "[priority] **The minimum *required* Python version has been raised to 3.10**\nPython 3.9 has reached its end-of-life as of October 2025, and yt-dlp has now removed support for it. [Read more](https://github.com/yt-dlp/yt-dlp/issues/13858)"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "2c9091e355a7ba5d1edb69796ecdca48199b77fb",
|
||||
"short": "[priority] **A stopgap release with a *TEMPORARY partial* fix for YouTube support**\nSome formats may still be unavailable, especially if cookies are passed to yt-dlp. The ***NEXT*** release, expected very soon, **will require an external JS runtime (e.g. Deno)** in order for YouTube downloads to work properly. [Read more](https://github.com/yt-dlp/yt-dlp/issues/14404)"
|
||||
},
|
||||
{
|
||||
"action": "change",
|
||||
"when": "8636a9bac3bed99984c1e297453660468ecf504b",
|
||||
"short": "Fix 6224a3898821965a7d6a2cb9cc2de40a0fd6e6bc",
|
||||
"authors": ["Grub4K"]
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "6224a3898821965a7d6a2cb9cc2de40a0fd6e6bc",
|
||||
"short": "[priority] **An external JavaScript runtime is now required for full YouTube support**\nyt-dlp now requires users to have an external JavaScript runtime (e.g. Deno) installed in order to solve the JavaScript challenges presented by YouTube. [Read more](https://github.com/yt-dlp/yt-dlp/issues/15012)"
|
||||
}
|
||||
]
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ def parse_patched_options(opts):
|
|||
'fragment_retries': 0,
|
||||
'extract_flat': False,
|
||||
'concat_playlist': 'never',
|
||||
'update_self': False,
|
||||
})
|
||||
yt_dlp.options.create_parser = lambda: patched_parser
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -11,13 +11,12 @@ import codecs
|
|||
import subprocess
|
||||
|
||||
from yt_dlp.aes import aes_encrypt, key_expansion
|
||||
from yt_dlp.utils import intlist_to_bytes
|
||||
|
||||
secret_msg = b'Secret message goes here'
|
||||
|
||||
|
||||
def hex_str(int_list):
|
||||
return codecs.encode(intlist_to_bytes(int_list), 'hex')
|
||||
return codecs.encode(bytes(int_list), 'hex')
|
||||
|
||||
|
||||
def openssl_encode(algo, key, iv):
|
||||
|
|
|
|||
329
devscripts/generate_third_party_licenses.py
Normal file
329
devscripts/generate_third_party_licenses.py
Normal file
|
|
@ -0,0 +1,329 @@
|
|||
import requests
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
import hashlib
|
||||
|
||||
DEFAULT_OUTPUT = 'THIRD_PARTY_LICENSES.txt'
|
||||
CACHE_LOCATION = '.license_cache'
|
||||
HEADER = '''THIRD-PARTY LICENSES
|
||||
|
||||
This file aggregates license texts of third-party components included with the yt-dlp PyInstaller-bundled executables.
|
||||
yt-dlp itself is licensed under the Unlicense (see LICENSE file).
|
||||
Source code for bundled third-party components is available from the original projects.
|
||||
If you cannot obtain it, the maintainers will provide it as per license obligation; maintainer emails are listed in pyproject.toml.'''
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Dependency:
|
||||
name: str
|
||||
license_url: str
|
||||
project_url: str = ''
|
||||
license: str = ''
|
||||
comment: str = ''
|
||||
|
||||
|
||||
DEPENDENCIES: list[Dependency] = [
|
||||
# Core runtime environment components
|
||||
Dependency(
|
||||
name='Python',
|
||||
license='PSF-2.0',
|
||||
license_url='https://raw.githubusercontent.com/python/cpython/refs/heads/main/LICENSE',
|
||||
project_url='https://www.python.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='Microsoft Distributable Code',
|
||||
license_url='https://raw.githubusercontent.com/python/cpython/refs/heads/main/PC/crtlicense.txt',
|
||||
comment='Only included in Windows builds',
|
||||
),
|
||||
Dependency(
|
||||
name='bzip2',
|
||||
license='bzip2-1.0.6',
|
||||
license_url='https://gitlab.com/federicomenaquintero/bzip2/-/raw/master/COPYING',
|
||||
project_url='https://sourceware.org/bzip2/',
|
||||
),
|
||||
Dependency(
|
||||
name='libffi',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/libffi/libffi/refs/heads/master/LICENSE',
|
||||
project_url='https://sourceware.org/libffi/',
|
||||
),
|
||||
Dependency(
|
||||
name='OpenSSL 3.0+',
|
||||
license='Apache-2.0',
|
||||
license_url='https://raw.githubusercontent.com/openssl/openssl/refs/heads/master/LICENSE.txt',
|
||||
project_url='https://www.openssl.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='SQLite',
|
||||
license='Public Domain', # Technically does not need to be included
|
||||
license_url='https://sqlite.org/src/raw/e108e1e69ae8e8a59e93c455654b8ac9356a11720d3345df2a4743e9590fb20d?at=LICENSE.md',
|
||||
project_url='https://www.sqlite.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='liblzma',
|
||||
license='0BSD', # Technically does not need to be included
|
||||
license_url='https://raw.githubusercontent.com/tukaani-project/xz/refs/heads/master/COPYING',
|
||||
project_url='https://tukaani.org/xz/',
|
||||
),
|
||||
Dependency(
|
||||
name='mpdecimal',
|
||||
license='BSD-2-Clause',
|
||||
# No official repo URL
|
||||
license_url='https://gist.githubusercontent.com/seproDev/9e5dbfc08af35c3f2463e64eb9b27161/raw/61f5a98bc1a4ad7d48b1c793fc3314d4d43c2ab1/mpdecimal_COPYRIGHT.txt',
|
||||
project_url='https://www.bytereef.org/mpdecimal/',
|
||||
),
|
||||
Dependency(
|
||||
name='zlib',
|
||||
license='zlib',
|
||||
license_url='https://raw.githubusercontent.com/madler/zlib/refs/heads/develop/LICENSE',
|
||||
project_url='https://zlib.net/',
|
||||
),
|
||||
Dependency(
|
||||
name='Expat',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/libexpat/libexpat/refs/heads/master/COPYING',
|
||||
project_url='https://libexpat.github.io/',
|
||||
),
|
||||
Dependency(
|
||||
name='ncurses',
|
||||
license='X11-distribute-modifications-variant',
|
||||
license_url='https://raw.githubusercontent.com/mirror/ncurses/refs/heads/master/COPYING',
|
||||
comment='Only included in Linux/macOS builds',
|
||||
project_url='https://invisible-island.net/ncurses/',
|
||||
),
|
||||
Dependency(
|
||||
name='GNU Readline',
|
||||
license='GPL-3.0-or-later',
|
||||
license_url='https://tiswww.case.edu/php/chet/readline/COPYING',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://www.gnu.org/software/readline/',
|
||||
),
|
||||
Dependency(
|
||||
name='libstdc++',
|
||||
license='GPL-3.0-with-GCC-exception',
|
||||
license_url='https://raw.githubusercontent.com/gcc-mirror/gcc/refs/heads/master/COPYING.RUNTIME',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://gcc.gnu.org/onlinedocs/libstdc++/',
|
||||
),
|
||||
Dependency(
|
||||
name='libgcc',
|
||||
license='GPL-3.0-with-GCC-exception',
|
||||
license_url='https://raw.githubusercontent.com/gcc-mirror/gcc/refs/heads/master/COPYING.RUNTIME',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://gcc.gnu.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='libuuid',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://git.kernel.org/pub/scm/fs/ext2/e2fsprogs.git/plain/lib/uuid/COPYING',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://git.kernel.org/pub/scm/fs/ext2/e2fsprogs.git/tree/lib/uuid',
|
||||
),
|
||||
Dependency(
|
||||
name='libintl',
|
||||
license='LGPL-2.1-or-later',
|
||||
license_url='https://raw.githubusercontent.com/autotools-mirror/gettext/refs/heads/master/gettext-runtime/intl/COPYING.LIB',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://www.gnu.org/software/gettext/',
|
||||
),
|
||||
Dependency(
|
||||
name='libidn2',
|
||||
license='LGPL-3.0-or-later',
|
||||
license_url='https://gitlab.com/libidn/libidn2/-/raw/master/COPYING.LESSERv3',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://www.gnu.org/software/libidn/',
|
||||
),
|
||||
Dependency(
|
||||
name='libidn2 (Unicode character data files)',
|
||||
license='Unicode-TOU AND Unicode-DFS-2016',
|
||||
license_url='https://gitlab.com/libidn/libidn2/-/raw/master/COPYING.unicode',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://www.gnu.org/software/libidn/',
|
||||
),
|
||||
Dependency(
|
||||
name='libunistring',
|
||||
license='LGPL-3.0-or-later',
|
||||
license_url='https://gitweb.git.savannah.gnu.org/gitweb/?p=libunistring.git;a=blob_plain;f=COPYING.LIB;hb=HEAD',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://www.gnu.org/software/libunistring/',
|
||||
),
|
||||
Dependency(
|
||||
name='librtmp',
|
||||
license='LGPL-2.1-or-later',
|
||||
# No official repo URL
|
||||
license_url='https://gist.githubusercontent.com/seproDev/31d8c691ccddebe37b8b379307cb232d/raw/053408e98547ea8c7d9ba3a80c965f33e163b881/librtmp_COPYING.txt',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://rtmpdump.mplayerhq.hu/',
|
||||
),
|
||||
Dependency(
|
||||
name='zstd',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://raw.githubusercontent.com/facebook/zstd/refs/heads/dev/LICENSE',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://facebook.github.io/zstd/',
|
||||
),
|
||||
|
||||
# Python packages
|
||||
Dependency(
|
||||
name='brotli',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/google/brotli/refs/heads/master/LICENSE',
|
||||
project_url='https://brotli.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='curl_cffi',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/lexiforest/curl_cffi/refs/heads/main/LICENSE',
|
||||
comment='Not included in `yt-dlp_x86` and `yt-dlp_musllinux_aarch64` builds',
|
||||
project_url='https://curl-cffi.readthedocs.io/',
|
||||
),
|
||||
# Dependency of curl_cffi
|
||||
Dependency(
|
||||
name='curl-impersonate',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/lexiforest/curl-impersonate/refs/heads/main/LICENSE',
|
||||
comment='Not included in `yt-dlp_x86` and `yt-dlp_musllinux_aarch64` builds',
|
||||
project_url='https://github.com/lexiforest/curl-impersonate',
|
||||
),
|
||||
Dependency(
|
||||
name='cffi',
|
||||
license='MIT-0', # Technically does not need to be included
|
||||
license_url='https://raw.githubusercontent.com/python-cffi/cffi/refs/heads/main/LICENSE',
|
||||
project_url='https://cffi.readthedocs.io/',
|
||||
),
|
||||
# Dependecy of cffi
|
||||
Dependency(
|
||||
name='pycparser',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://raw.githubusercontent.com/eliben/pycparser/refs/heads/main/LICENSE',
|
||||
project_url='https://github.com/eliben/pycparser',
|
||||
),
|
||||
Dependency(
|
||||
name='mutagen',
|
||||
license='GPL-2.0-or-later',
|
||||
license_url='https://raw.githubusercontent.com/quodlibet/mutagen/refs/heads/main/COPYING',
|
||||
project_url='https://mutagen.readthedocs.io/',
|
||||
),
|
||||
Dependency(
|
||||
name='PyCryptodome',
|
||||
license='Public Domain and BSD-2-Clause',
|
||||
license_url='https://raw.githubusercontent.com/Legrandin/pycryptodome/refs/heads/master/LICENSE.rst',
|
||||
project_url='https://www.pycryptodome.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='certifi',
|
||||
license='MPL-2.0',
|
||||
license_url='https://raw.githubusercontent.com/certifi/python-certifi/refs/heads/master/LICENSE',
|
||||
project_url='https://github.com/certifi/python-certifi',
|
||||
),
|
||||
Dependency(
|
||||
name='requests',
|
||||
license='Apache-2.0',
|
||||
license_url='https://raw.githubusercontent.com/psf/requests/refs/heads/main/LICENSE',
|
||||
project_url='https://requests.readthedocs.io/',
|
||||
),
|
||||
# Dependency of requests
|
||||
Dependency(
|
||||
name='charset-normalizer',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/jawah/charset_normalizer/refs/heads/master/LICENSE',
|
||||
project_url='https://charset-normalizer.readthedocs.io/',
|
||||
),
|
||||
# Dependency of requests
|
||||
Dependency(
|
||||
name='idna',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://raw.githubusercontent.com/kjd/idna/refs/heads/master/LICENSE.md',
|
||||
project_url='https://github.com/kjd/idna',
|
||||
),
|
||||
Dependency(
|
||||
name='urllib3',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/urllib3/urllib3/refs/heads/main/LICENSE.txt',
|
||||
project_url='https://urllib3.readthedocs.io/',
|
||||
),
|
||||
Dependency(
|
||||
name='SecretStorage',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://raw.githubusercontent.com/mitya57/secretstorage/refs/heads/master/LICENSE',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://secretstorage.readthedocs.io/',
|
||||
),
|
||||
# Dependency of SecretStorage
|
||||
Dependency(
|
||||
name='cryptography',
|
||||
license='Apache-2.0', # Also available as BSD-3-Clause
|
||||
license_url='https://raw.githubusercontent.com/pyca/cryptography/refs/heads/main/LICENSE.APACHE',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://cryptography.io/',
|
||||
),
|
||||
# Dependency of SecretStorage
|
||||
Dependency(
|
||||
name='Jeepney',
|
||||
license='MIT',
|
||||
license_url='https://gitlab.com/takluyver/jeepney/-/raw/master/LICENSE',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://jeepney.readthedocs.io/',
|
||||
),
|
||||
Dependency(
|
||||
name='websockets',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://raw.githubusercontent.com/python-websockets/websockets/refs/heads/main/LICENSE',
|
||||
project_url='https://websockets.readthedocs.io/',
|
||||
),
|
||||
# Dependencies of yt-dlp-ejs
|
||||
Dependency(
|
||||
name='Meriyah',
|
||||
license='ISC',
|
||||
license_url='https://raw.githubusercontent.com/meriyah/meriyah/refs/heads/main/LICENSE.md',
|
||||
project_url='https://github.com/meriyah/meriyah',
|
||||
),
|
||||
Dependency(
|
||||
name='Astring',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/davidbonnet/astring/refs/heads/main/LICENSE',
|
||||
project_url='https://github.com/davidbonnet/astring/',
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def fetch_text(dep: Dependency) -> str:
|
||||
cache_dir = Path(CACHE_LOCATION)
|
||||
cache_dir.mkdir(exist_ok=True)
|
||||
url_hash = hashlib.sha256(dep.license_url.encode('utf-8')).hexdigest()
|
||||
cache_file = cache_dir / f'{url_hash}.txt'
|
||||
|
||||
if cache_file.exists():
|
||||
return cache_file.read_text()
|
||||
|
||||
# UA needed since some domains block requests default UA
|
||||
req = requests.get(dep.license_url, headers={'User-Agent': 'yt-dlp license fetcher'})
|
||||
req.raise_for_status()
|
||||
text = req.text
|
||||
cache_file.write_text(text)
|
||||
return text
|
||||
|
||||
|
||||
def build_output() -> str:
|
||||
lines = [HEADER]
|
||||
for d in DEPENDENCIES:
|
||||
lines.append('\n')
|
||||
lines.append('-' * 80)
|
||||
header = f'{d.name}'
|
||||
if d.license:
|
||||
header += f' | {d.license}'
|
||||
if d.comment:
|
||||
header += f'\nNote: {d.comment}'
|
||||
if d.project_url:
|
||||
header += f'\nURL: {d.project_url}'
|
||||
lines.append(header)
|
||||
lines.append('-' * 80)
|
||||
|
||||
text = fetch_text(d)
|
||||
lines.append(text.strip('\n') + '\n')
|
||||
return '\n'.join(lines)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
content = build_output()
|
||||
Path(DEFAULT_OUTPUT).write_text(content)
|
||||
|
|
@ -22,14 +22,19 @@ def parse_args():
|
|||
'input', nargs='?', metavar='TOMLFILE', default=Path(__file__).parent.parent / 'pyproject.toml',
|
||||
help='input file (default: %(default)s)')
|
||||
parser.add_argument(
|
||||
'-e', '--exclude', metavar='DEPENDENCY', action='append',
|
||||
help='exclude a dependency')
|
||||
'-e', '--exclude-dependency', metavar='DEPENDENCY', action='append',
|
||||
help='exclude a dependency (can be used multiple times)')
|
||||
parser.add_argument(
|
||||
'-i', '--include', metavar='GROUP', action='append',
|
||||
help='include an optional dependency group')
|
||||
'-i', '--include-group', metavar='GROUP', action='append',
|
||||
help='include an optional dependency group (can be used multiple times)')
|
||||
parser.add_argument(
|
||||
'-o', '--only-optional', action='store_true',
|
||||
help='only install optional dependencies')
|
||||
'-c', '--cherry-pick', metavar='DEPENDENCY', action='append',
|
||||
help=(
|
||||
'only include a specific dependency from the resulting dependency list '
|
||||
'(can be used multiple times)'))
|
||||
parser.add_argument(
|
||||
'-o', '--only-optional-groups', action='store_true',
|
||||
help='omit default dependencies unless the "default" group is specified with --include-group')
|
||||
parser.add_argument(
|
||||
'-p', '--print', action='store_true',
|
||||
help='only print requirements to stdout')
|
||||
|
|
@ -39,30 +44,41 @@ def parse_args():
|
|||
return parser.parse_args()
|
||||
|
||||
|
||||
def uniq(arg) -> dict[str, None]:
|
||||
return dict.fromkeys(map(str.lower, arg or ()))
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
project_table = parse_toml(read_file(args.input))['project']
|
||||
recursive_pattern = re.compile(rf'{project_table["name"]}\[(?P<group_name>[\w-]+)\]')
|
||||
optional_groups = project_table['optional-dependencies']
|
||||
excludes = args.exclude or []
|
||||
|
||||
excludes = uniq(args.exclude_dependency)
|
||||
only_includes = uniq(args.cherry_pick)
|
||||
include_groups = uniq(args.include_group)
|
||||
|
||||
def yield_deps(group):
|
||||
for dep in group:
|
||||
if mobj := recursive_pattern.fullmatch(dep):
|
||||
yield from optional_groups.get(mobj.group('group_name'), [])
|
||||
yield from optional_groups.get(mobj.group('group_name'), ())
|
||||
else:
|
||||
yield dep
|
||||
|
||||
targets = []
|
||||
if not args.only_optional: # `-o` should exclude 'dependencies' and the 'default' group
|
||||
targets.extend(project_table['dependencies'])
|
||||
if 'default' not in excludes: # `--exclude default` should exclude entire 'default' group
|
||||
targets.extend(yield_deps(optional_groups['default']))
|
||||
targets = {}
|
||||
if not args.only_optional_groups:
|
||||
# legacy: 'dependencies' is empty now
|
||||
targets.update(dict.fromkeys(project_table['dependencies']))
|
||||
targets.update(dict.fromkeys(yield_deps(optional_groups['default'])))
|
||||
|
||||
for include in filter(None, map(optional_groups.get, args.include or [])):
|
||||
targets.extend(yield_deps(include))
|
||||
for include in filter(None, map(optional_groups.get, include_groups)):
|
||||
targets.update(dict.fromkeys(yield_deps(include)))
|
||||
|
||||
targets = [t for t in targets if re.match(r'[\w-]+', t).group(0).lower() not in excludes]
|
||||
def target_filter(target):
|
||||
name = re.match(r'[\w-]+', target).group(0).lower()
|
||||
return name not in excludes and (not only_includes or name in only_includes)
|
||||
|
||||
targets = list(filter(target_filter, targets))
|
||||
|
||||
if args.print:
|
||||
for target in targets:
|
||||
|
|
|
|||
|
|
@ -71,14 +71,13 @@ class CommitGroup(enum.Enum):
|
|||
def get(cls, value: str) -> tuple[CommitGroup | None, str | None]:
|
||||
group, _, subgroup = (group.strip().lower() for group in value.partition('/'))
|
||||
|
||||
result = cls.group_lookup().get(group)
|
||||
if not result:
|
||||
if subgroup:
|
||||
return None, value
|
||||
subgroup = group
|
||||
result = cls.subgroup_lookup().get(subgroup)
|
||||
if result := cls.group_lookup().get(group):
|
||||
return result, subgroup or None
|
||||
|
||||
return result, subgroup or None
|
||||
if subgroup:
|
||||
return None, value
|
||||
|
||||
return cls.subgroup_lookup().get(group), group or None
|
||||
|
||||
|
||||
@dataclass
|
||||
|
|
@ -136,8 +135,7 @@ class Changelog:
|
|||
first = False
|
||||
yield '\n<details><summary><h3>Changelog</h3></summary>\n'
|
||||
|
||||
group = groups[item]
|
||||
if group:
|
||||
if group := groups[item]:
|
||||
yield self.format_module(item.value, group)
|
||||
|
||||
if self._collapsible:
|
||||
|
|
@ -253,7 +251,7 @@ class CommitRange:
|
|||
''', re.VERBOSE | re.DOTALL)
|
||||
EXTRACTOR_INDICATOR_RE = re.compile(r'(?:Fix|Add)\s+Extractors?', re.IGNORECASE)
|
||||
REVERT_RE = re.compile(r'(?:\[[^\]]+\]\s+)?(?i:Revert)\s+([\da-f]{40})')
|
||||
FIXES_RE = re.compile(r'(?i:Fix(?:es)?(?:\s+bugs?)?(?:\s+in|\s+for)?|Revert|Improve)\s+([\da-f]{40})')
|
||||
FIXES_RE = re.compile(r'(?i:(?:bug\s*)?fix(?:es)?(?:\s+bugs?)?(?:\s+in|\s+for)?|Improve)\s+([\da-f]{40})')
|
||||
UPSTREAM_MERGE_RE = re.compile(r'Update to ytdl-commit-([\da-f]+)')
|
||||
|
||||
def __init__(self, start, end, default_author=None):
|
||||
|
|
@ -287,11 +285,16 @@ class CommitRange:
|
|||
short = next(lines)
|
||||
skip = short.startswith('Release ') or short == '[version] update'
|
||||
|
||||
fix_commitish = None
|
||||
if match := self.FIXES_RE.search(short):
|
||||
fix_commitish = match.group(1)
|
||||
|
||||
authors = [default_author] if default_author else []
|
||||
for line in iter(lambda: next(lines), self.COMMIT_SEPARATOR):
|
||||
match = self.AUTHOR_INDICATOR_RE.match(line)
|
||||
if match:
|
||||
if match := self.AUTHOR_INDICATOR_RE.match(line):
|
||||
authors = sorted(map(str.strip, line[match.end():].split(',')), key=str.casefold)
|
||||
if not fix_commitish and (match := self.FIXES_RE.fullmatch(line)):
|
||||
fix_commitish = match.group(1)
|
||||
|
||||
commit = Commit(commit_hash, short, authors)
|
||||
if skip and (self._start or not i):
|
||||
|
|
@ -301,21 +304,17 @@ class CommitRange:
|
|||
logger.debug(f'Reached Release commit, breaking: {commit}')
|
||||
break
|
||||
|
||||
revert_match = self.REVERT_RE.fullmatch(commit.short)
|
||||
if revert_match:
|
||||
reverts[revert_match.group(1)] = commit
|
||||
if match := self.REVERT_RE.fullmatch(commit.short):
|
||||
reverts[match.group(1)] = commit
|
||||
continue
|
||||
|
||||
fix_match = self.FIXES_RE.search(commit.short)
|
||||
if fix_match:
|
||||
commitish = fix_match.group(1)
|
||||
fixes[commitish].append(commit)
|
||||
if fix_commitish:
|
||||
fixes[fix_commitish].append(commit)
|
||||
|
||||
commits[commit.hash] = commit
|
||||
|
||||
for commitish, revert_commit in reverts.items():
|
||||
reverted = commits.pop(commitish, None)
|
||||
if reverted:
|
||||
if reverted := commits.pop(commitish, None):
|
||||
logger.debug(f'{commitish} fully reverted {reverted}')
|
||||
else:
|
||||
commits[revert_commit.hash] = revert_commit
|
||||
|
|
@ -354,6 +353,13 @@ class CommitRange:
|
|||
continue
|
||||
commit = Commit(override_hash, override['short'], override.get('authors') or [])
|
||||
logger.info(f'CHANGE {self._commits[commit.hash]} -> {commit}')
|
||||
if match := self.FIXES_RE.search(commit.short):
|
||||
fix_commitish = match.group(1)
|
||||
if fix_commitish in self._commits:
|
||||
del self._commits[commit.hash]
|
||||
self._fixes[fix_commitish].append(commit)
|
||||
logger.info(f'Found fix for {fix_commitish[:HASH_LENGTH]}: {commit.hash[:HASH_LENGTH]}')
|
||||
continue
|
||||
self._commits[commit.hash] = commit
|
||||
|
||||
self._commits = dict(reversed(self._commits.items()))
|
||||
|
|
@ -374,7 +380,7 @@ class CommitRange:
|
|||
issues = [issue.strip()[1:] for issue in issues.split(',')] if issues else []
|
||||
|
||||
if prefix:
|
||||
groups, details, sub_details = zip(*map(self.details_from_prefix, prefix.split(',')))
|
||||
groups, details, sub_details = zip(*map(self.details_from_prefix, prefix.split(',')), strict=True)
|
||||
group = next(iter(filter(None, groups)), None)
|
||||
details = ', '.join(unique(details))
|
||||
sub_details = list(itertools.chain.from_iterable(sub_details))
|
||||
|
|
@ -461,8 +467,7 @@ def create_changelog(args):
|
|||
|
||||
logger.info(f'Loaded {len(commits)} commits')
|
||||
|
||||
new_contributors = get_new_contributors(args.contributors_path, commits)
|
||||
if new_contributors:
|
||||
if new_contributors := get_new_contributors(args.contributors_path, commits):
|
||||
if args.contributors:
|
||||
write_file(args.contributors_path, '\n'.join(new_contributors) + '\n', mode='a')
|
||||
logger.info(f'New contributors: {", ".join(new_contributors)}')
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ def main():
|
|||
return # This is unused in yt-dlp
|
||||
|
||||
parser = optparse.OptionParser(usage='%prog INFILE OUTFILE')
|
||||
options, args = parser.parse_args()
|
||||
_, args = parser.parse_args()
|
||||
if len(args) != 2:
|
||||
parser.error('Expected an input and an output filename')
|
||||
|
||||
|
|
|
|||
|
|
@ -11,11 +11,13 @@ import re
|
|||
|
||||
from devscripts.utils import get_filename_args, read_file, write_file
|
||||
|
||||
VERBOSE_TMPL = '''
|
||||
VERBOSE = '''
|
||||
- type: checkboxes
|
||||
id: verbose
|
||||
attributes:
|
||||
label: Provide verbose output that clearly demonstrates the problem
|
||||
description: |
|
||||
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||
options:
|
||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||
required: true
|
||||
|
|
@ -32,14 +34,15 @@ VERBOSE_TMPL = '''
|
|||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||
[debug] Proxy map: {}
|
||||
[debug] Request Handlers: urllib, requests
|
||||
[debug] Loaded 1893 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
||||
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||
[debug] Loaded 1838 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||
<more lines>
|
||||
|
|
@ -49,20 +52,20 @@ VERBOSE_TMPL = '''
|
|||
'''.strip()
|
||||
|
||||
NO_SKIP = '''
|
||||
- type: checkboxes
|
||||
- type: markdown
|
||||
attributes:
|
||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||
description: Fill all fields even if you think it is irrelevant for the issue
|
||||
options:
|
||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\\* field
|
||||
required: true
|
||||
value: |
|
||||
> [!IMPORTANT]
|
||||
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||
'''.strip()
|
||||
|
||||
|
||||
def main():
|
||||
fields = {'no_skip': NO_SKIP}
|
||||
fields['verbose'] = VERBOSE_TMPL % fields
|
||||
fields['verbose_optional'] = re.sub(r'(\n\s+validations:)?\n\s+required: true', '', fields['verbose'])
|
||||
fields = {
|
||||
'no_skip': NO_SKIP,
|
||||
'verbose': VERBOSE,
|
||||
'verbose_optional': re.sub(r'(\n\s+validations:)?\n\s+required: true', '', VERBOSE),
|
||||
}
|
||||
|
||||
infile, outfile = get_filename_args(has_infile=True)
|
||||
write_file(outfile, read_file(infile) % fields)
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
|
@ -11,6 +10,9 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|||
from inspect import getsource
|
||||
|
||||
from devscripts.utils import get_filename_args, read_file, write_file
|
||||
from yt_dlp.extractor import import_extractors
|
||||
from yt_dlp.extractor.common import InfoExtractor, SearchInfoExtractor
|
||||
from yt_dlp.globals import extractors
|
||||
|
||||
NO_ATTR = object()
|
||||
STATIC_CLASS_PROPERTIES = [
|
||||
|
|
@ -34,17 +36,12 @@ MODULE_TEMPLATE = read_file('devscripts/lazy_load_template.py')
|
|||
|
||||
|
||||
def main():
|
||||
os.environ['YTDLP_NO_PLUGINS'] = 'true'
|
||||
os.environ['YTDLP_NO_LAZY_EXTRACTORS'] = 'true'
|
||||
|
||||
lazy_extractors_filename = get_filename_args(default_outfile='yt_dlp/extractor/lazy_extractors.py')
|
||||
if os.path.exists(lazy_extractors_filename):
|
||||
os.remove(lazy_extractors_filename)
|
||||
|
||||
_ALL_CLASSES = get_all_ies() # Must be before import
|
||||
|
||||
import yt_dlp.plugins
|
||||
from yt_dlp.extractor.common import InfoExtractor, SearchInfoExtractor
|
||||
|
||||
# Filter out plugins
|
||||
_ALL_CLASSES = [cls for cls in _ALL_CLASSES if not cls.__module__.startswith(f'{yt_dlp.plugins.PACKAGE_NAME}.')]
|
||||
import_extractors()
|
||||
|
||||
DummyInfoExtractor = type('InfoExtractor', (InfoExtractor,), {'IE_NAME': NO_ATTR})
|
||||
module_src = '\n'.join((
|
||||
|
|
@ -52,26 +49,12 @@ def main():
|
|||
' _module = None',
|
||||
*extra_ie_code(DummyInfoExtractor),
|
||||
'\nclass LazyLoadSearchExtractor(LazyLoadExtractor):\n pass\n',
|
||||
*build_ies(_ALL_CLASSES, (InfoExtractor, SearchInfoExtractor), DummyInfoExtractor),
|
||||
*build_ies(list(extractors.value.values()), (InfoExtractor, SearchInfoExtractor), DummyInfoExtractor),
|
||||
))
|
||||
|
||||
write_file(lazy_extractors_filename, f'{module_src}\n')
|
||||
|
||||
|
||||
def get_all_ies():
|
||||
PLUGINS_DIRNAME = 'ytdlp_plugins'
|
||||
BLOCKED_DIRNAME = f'{PLUGINS_DIRNAME}_blocked'
|
||||
if os.path.exists(PLUGINS_DIRNAME):
|
||||
# os.rename cannot be used, e.g. in Docker. See https://github.com/yt-dlp/yt-dlp/pull/4958
|
||||
shutil.move(PLUGINS_DIRNAME, BLOCKED_DIRNAME)
|
||||
try:
|
||||
from yt_dlp.extractor.extractors import _ALL_CLASSES
|
||||
finally:
|
||||
if os.path.exists(BLOCKED_DIRNAME):
|
||||
shutil.move(BLOCKED_DIRNAME, PLUGINS_DIRNAME)
|
||||
return _ALL_CLASSES
|
||||
|
||||
|
||||
def extra_ie_code(ie, base=None):
|
||||
for var in STATIC_CLASS_PROPERTIES:
|
||||
val = getattr(ie, var)
|
||||
|
|
@ -92,7 +75,7 @@ def build_ies(ies, bases, attr_base):
|
|||
if ie in ies:
|
||||
names.append(ie.__name__)
|
||||
|
||||
yield f'\n_ALL_CLASSES = [{", ".join(names)}]'
|
||||
yield '\n_CLASS_LOOKUP = {%s}' % ', '.join(f'{name!r}: {name}' for name in names)
|
||||
|
||||
|
||||
def sort_ies(ies, ignored_bases):
|
||||
|
|
|
|||
|
|
@ -10,10 +10,21 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|||
from devscripts.utils import get_filename_args, write_file
|
||||
from yt_dlp.extractor import list_extractor_classes
|
||||
|
||||
TEMPLATE = '''\
|
||||
# Supported sites
|
||||
|
||||
Below is a list of all extractors that are currently included with yt-dlp.
|
||||
If a site is not listed here, it might still be supported by yt-dlp's embed extraction or generic extractor.
|
||||
Not all sites listed here are guaranteed to work; websites are constantly changing and sometimes this breaks yt-dlp's support for them.
|
||||
The only reliable way to check if a site is supported is to try it.
|
||||
|
||||
{ie_list}
|
||||
'''
|
||||
|
||||
|
||||
def main():
|
||||
out = '\n'.join(ie.description() for ie in list_extractor_classes() if ie.IE_DESC is not False)
|
||||
write_file(get_filename_args(), f'# Supported sites\n{out}\n')
|
||||
write_file(get_filename_args(), TEMPLATE.format(ie_list=out))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
|||
|
|
@ -16,7 +16,19 @@ fix_test_name = functools.partial(re.compile(r'IE(_all|_\d+)?$').sub, r'\1')
|
|||
def parse_args():
|
||||
parser = argparse.ArgumentParser(description='Run selected yt-dlp tests')
|
||||
parser.add_argument(
|
||||
'test', help='a extractor tests, or one of "core" or "download"', nargs='*')
|
||||
'test', help='an extractor test, test path, or one of "core" or "download"', nargs='*')
|
||||
parser.add_argument(
|
||||
'--flaky',
|
||||
action='store_true',
|
||||
default=None,
|
||||
help='Allow running flaky tests. (default: run, unless in CI)',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--no-flaky',
|
||||
action='store_false',
|
||||
dest='flaky',
|
||||
help=argparse.SUPPRESS,
|
||||
)
|
||||
parser.add_argument(
|
||||
'-k', help='run a test matching EXPRESSION. Same as "pytest -k"', metavar='EXPRESSION')
|
||||
parser.add_argument(
|
||||
|
|
@ -24,10 +36,11 @@ def parse_args():
|
|||
return parser.parse_args()
|
||||
|
||||
|
||||
def run_tests(*tests, pattern=None, ci=False):
|
||||
run_core = 'core' in tests or (not pattern and not tests)
|
||||
def run_tests(*tests, pattern=None, ci=False, flaky: bool | None = None):
|
||||
# XXX: hatch uses `tests` if no arguments are passed
|
||||
run_core = 'core' in tests or 'tests' in tests or (not pattern and not tests)
|
||||
run_download = 'download' in tests
|
||||
tests = list(map(fix_test_name, tests))
|
||||
run_flaky = flaky or (flaky is None and not ci)
|
||||
|
||||
pytest_args = args.pytest_args or os.getenv('HATCH_TEST_ARGS', '')
|
||||
arguments = ['pytest', '-Werror', '--tb=short', *shlex.split(pytest_args)]
|
||||
|
|
@ -41,7 +54,11 @@ def run_tests(*tests, pattern=None, ci=False):
|
|||
arguments.extend(['-m', 'download'])
|
||||
else:
|
||||
arguments.extend(
|
||||
f'test/test_download.py::TestDownload::test_{test}' for test in tests)
|
||||
test if '/' in test
|
||||
else f'test/test_download.py::TestDownload::test_{fix_test_name(test)}'
|
||||
for test in tests)
|
||||
if not run_flaky:
|
||||
arguments.append('--disallow-flaky')
|
||||
|
||||
print(f'Running {arguments}', flush=True)
|
||||
try:
|
||||
|
|
@ -70,6 +87,11 @@ if __name__ == '__main__':
|
|||
args = parse_args()
|
||||
|
||||
os.chdir(Path(__file__).parent.parent)
|
||||
sys.exit(run_tests(*args.test, pattern=args.k, ci=bool(os.getenv('CI'))))
|
||||
sys.exit(run_tests(
|
||||
*args.test,
|
||||
pattern=args.k,
|
||||
ci=bool(os.getenv('CI')),
|
||||
flaky=args.flaky,
|
||||
))
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
|
|
|||
157
devscripts/setup_variables.py
Normal file
157
devscripts/setup_variables.py
Normal file
|
|
@ -0,0 +1,157 @@
|
|||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
import datetime as dt
|
||||
import json
|
||||
|
||||
from devscripts.utils import calculate_version
|
||||
|
||||
|
||||
STABLE_REPOSITORY = 'yt-dlp/yt-dlp'
|
||||
|
||||
|
||||
def setup_variables(environment):
|
||||
"""
|
||||
`environment` must contain these keys:
|
||||
REPOSITORY, INPUTS, PROCESSED,
|
||||
PUSH_VERSION_COMMIT, PYPI_PROJECT,
|
||||
SOURCE_PYPI_PROJECT, SOURCE_PYPI_SUFFIX,
|
||||
TARGET_PYPI_PROJECT, TARGET_PYPI_SUFFIX,
|
||||
SOURCE_ARCHIVE_REPO, TARGET_ARCHIVE_REPO,
|
||||
HAS_SOURCE_ARCHIVE_REPO_TOKEN,
|
||||
HAS_TARGET_ARCHIVE_REPO_TOKEN,
|
||||
HAS_ARCHIVE_REPO_TOKEN
|
||||
|
||||
`INPUTS` must contain these keys:
|
||||
prerelease
|
||||
|
||||
`PROCESSED` must contain these keys:
|
||||
source_repo, source_tag,
|
||||
target_repo, target_tag
|
||||
"""
|
||||
REPOSITORY = environment['REPOSITORY']
|
||||
INPUTS = json.loads(environment['INPUTS'])
|
||||
PROCESSED = json.loads(environment['PROCESSED'])
|
||||
|
||||
source_channel = None
|
||||
does_not_have_needed_token = False
|
||||
target_repo_token = None
|
||||
pypi_project = None
|
||||
pypi_suffix = None
|
||||
|
||||
source_repo = PROCESSED['source_repo']
|
||||
source_tag = PROCESSED['source_tag']
|
||||
if source_repo == 'stable':
|
||||
source_repo = STABLE_REPOSITORY
|
||||
if not source_repo:
|
||||
source_repo = REPOSITORY
|
||||
elif environment['SOURCE_ARCHIVE_REPO']:
|
||||
source_channel = environment['SOURCE_ARCHIVE_REPO']
|
||||
elif not source_tag and '/' not in source_repo:
|
||||
source_tag = source_repo
|
||||
source_repo = REPOSITORY
|
||||
|
||||
resolved_source = source_repo
|
||||
if source_tag:
|
||||
resolved_source = f'{resolved_source}@{source_tag}'
|
||||
elif source_repo == STABLE_REPOSITORY:
|
||||
resolved_source = 'stable'
|
||||
|
||||
revision = None
|
||||
if INPUTS['prerelease'] or not environment['PUSH_VERSION_COMMIT']:
|
||||
revision = dt.datetime.now(tz=dt.timezone.utc).strftime('%H%M%S')
|
||||
|
||||
version = calculate_version(INPUTS.get('version') or revision)
|
||||
|
||||
target_repo = PROCESSED['target_repo']
|
||||
target_tag = PROCESSED['target_tag']
|
||||
if target_repo:
|
||||
if target_repo == 'stable':
|
||||
target_repo = STABLE_REPOSITORY
|
||||
if not target_tag:
|
||||
if target_repo == STABLE_REPOSITORY:
|
||||
target_tag = version
|
||||
elif environment['TARGET_ARCHIVE_REPO']:
|
||||
target_tag = source_tag or version
|
||||
else:
|
||||
target_tag = target_repo
|
||||
target_repo = REPOSITORY
|
||||
if target_repo != REPOSITORY:
|
||||
target_repo = environment['TARGET_ARCHIVE_REPO']
|
||||
target_repo_token = f'{PROCESSED["target_repo"].upper()}_ARCHIVE_REPO_TOKEN'
|
||||
if not json.loads(environment['HAS_TARGET_ARCHIVE_REPO_TOKEN']):
|
||||
does_not_have_needed_token = True
|
||||
pypi_project = environment['TARGET_PYPI_PROJECT'] or None
|
||||
pypi_suffix = environment['TARGET_PYPI_SUFFIX'] or None
|
||||
else:
|
||||
target_tag = source_tag or version
|
||||
if source_channel:
|
||||
target_repo = source_channel
|
||||
target_repo_token = f'{PROCESSED["source_repo"].upper()}_ARCHIVE_REPO_TOKEN'
|
||||
if not json.loads(environment['HAS_SOURCE_ARCHIVE_REPO_TOKEN']):
|
||||
does_not_have_needed_token = True
|
||||
pypi_project = environment['SOURCE_PYPI_PROJECT'] or None
|
||||
pypi_suffix = environment['SOURCE_PYPI_SUFFIX'] or None
|
||||
else:
|
||||
target_repo = REPOSITORY
|
||||
|
||||
if does_not_have_needed_token:
|
||||
if not json.loads(environment['HAS_ARCHIVE_REPO_TOKEN']):
|
||||
print(f'::error::Repository access secret {target_repo_token} not found')
|
||||
return None
|
||||
target_repo_token = 'ARCHIVE_REPO_TOKEN'
|
||||
|
||||
if target_repo == REPOSITORY and not INPUTS['prerelease']:
|
||||
pypi_project = environment['PYPI_PROJECT'] or None
|
||||
|
||||
return {
|
||||
'channel': resolved_source,
|
||||
'version': version,
|
||||
'target_repo': target_repo,
|
||||
'target_repo_token': target_repo_token,
|
||||
'target_tag': target_tag,
|
||||
'pypi_project': pypi_project,
|
||||
'pypi_suffix': pypi_suffix,
|
||||
}
|
||||
|
||||
|
||||
def process_inputs(inputs):
|
||||
outputs = {}
|
||||
for key in ('source', 'target'):
|
||||
repo, _, tag = inputs.get(key, '').partition('@')
|
||||
outputs[f'{key}_repo'] = repo
|
||||
outputs[f'{key}_tag'] = tag
|
||||
return outputs
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if not os.getenv('GITHUB_OUTPUT'):
|
||||
print('This script is only intended for use with GitHub Actions', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if 'process_inputs' in sys.argv:
|
||||
inputs = json.loads(os.environ['INPUTS'])
|
||||
print('::group::Inputs')
|
||||
print(json.dumps(inputs, indent=2))
|
||||
print('::endgroup::')
|
||||
outputs = process_inputs(inputs)
|
||||
print('::group::Processed')
|
||||
print(json.dumps(outputs, indent=2))
|
||||
print('::endgroup::')
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||
f.write('\n'.join(f'{key}={value}' for key, value in outputs.items()))
|
||||
sys.exit(0)
|
||||
|
||||
outputs = setup_variables(dict(os.environ))
|
||||
if not outputs:
|
||||
sys.exit(1)
|
||||
|
||||
print('::group::Output variables')
|
||||
print(json.dumps(outputs, indent=2))
|
||||
print('::endgroup::')
|
||||
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||
f.write('\n'.join(f'{key}={value or ""}' for key, value in outputs.items()))
|
||||
324
devscripts/setup_variables_tests.py
Normal file
324
devscripts/setup_variables_tests.py
Normal file
|
|
@ -0,0 +1,324 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
import datetime as dt
|
||||
import json
|
||||
|
||||
from devscripts.setup_variables import STABLE_REPOSITORY, process_inputs, setup_variables
|
||||
from devscripts.utils import calculate_version
|
||||
|
||||
|
||||
def _test(github_repository, note, repo_vars, repo_secrets, inputs, expected=None, ignore_revision=False):
|
||||
inp = inputs.copy()
|
||||
inp.setdefault('linux_armv7l', True)
|
||||
inp.setdefault('prerelease', False)
|
||||
processed = process_inputs(inp)
|
||||
source_repo = processed['source_repo'].upper()
|
||||
target_repo = processed['target_repo'].upper()
|
||||
variables = {k.upper(): v for k, v in repo_vars.items()}
|
||||
secrets = {k.upper(): v for k, v in repo_secrets.items()}
|
||||
|
||||
env = {
|
||||
# Keep this in sync with prepare.setup_variables in release.yml
|
||||
'INPUTS': json.dumps(inp),
|
||||
'PROCESSED': json.dumps(processed),
|
||||
'REPOSITORY': github_repository,
|
||||
'PUSH_VERSION_COMMIT': variables.get('PUSH_VERSION_COMMIT') or '',
|
||||
'PYPI_PROJECT': variables.get('PYPI_PROJECT') or '',
|
||||
'SOURCE_PYPI_PROJECT': variables.get(f'{source_repo}_PYPI_PROJECT') or '',
|
||||
'SOURCE_PYPI_SUFFIX': variables.get(f'{source_repo}_PYPI_SUFFIX') or '',
|
||||
'TARGET_PYPI_PROJECT': variables.get(f'{target_repo}_PYPI_PROJECT') or '',
|
||||
'TARGET_PYPI_SUFFIX': variables.get(f'{target_repo}_PYPI_SUFFIX') or '',
|
||||
'SOURCE_ARCHIVE_REPO': variables.get(f'{source_repo}_ARCHIVE_REPO') or '',
|
||||
'TARGET_ARCHIVE_REPO': variables.get(f'{target_repo}_ARCHIVE_REPO') or '',
|
||||
'HAS_SOURCE_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get(f'{source_repo}_ARCHIVE_REPO_TOKEN'))),
|
||||
'HAS_TARGET_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get(f'{target_repo}_ARCHIVE_REPO_TOKEN'))),
|
||||
'HAS_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get('ARCHIVE_REPO_TOKEN'))),
|
||||
}
|
||||
|
||||
result = setup_variables(env)
|
||||
if not expected:
|
||||
print(' {\n' + '\n'.join(f' {k!r}: {v!r},' for k, v in result.items()) + '\n }')
|
||||
return
|
||||
|
||||
exp = expected.copy()
|
||||
if ignore_revision:
|
||||
assert len(result['version']) == len(exp['version']), f'revision missing: {github_repository} {note}'
|
||||
version_is_tag = result['version'] == result['target_tag']
|
||||
for dct in (result, exp):
|
||||
dct['version'] = '.'.join(dct['version'].split('.')[:3])
|
||||
if version_is_tag:
|
||||
dct['target_tag'] = dct['version']
|
||||
assert result == exp, f'unexpected result: {github_repository} {note}'
|
||||
|
||||
|
||||
def test_setup_variables():
|
||||
DEFAULT_VERSION_WITH_REVISION = dt.datetime.now(tz=dt.timezone.utc).strftime('%Y.%m.%d.%H%M%S')
|
||||
DEFAULT_VERSION = calculate_version()
|
||||
BASE_REPO_VARS = {
|
||||
'MASTER_ARCHIVE_REPO': 'yt-dlp/yt-dlp-master-builds',
|
||||
'NIGHTLY_ARCHIVE_REPO': 'yt-dlp/yt-dlp-nightly-builds',
|
||||
'NIGHTLY_PYPI_PROJECT': 'yt-dlp',
|
||||
'NIGHTLY_PYPI_SUFFIX': 'dev',
|
||||
'PUSH_VERSION_COMMIT': '1',
|
||||
'PYPI_PROJECT': 'yt-dlp',
|
||||
}
|
||||
BASE_REPO_SECRETS = {
|
||||
'ARCHIVE_REPO_TOKEN': '1',
|
||||
}
|
||||
FORK_REPOSITORY = 'fork/yt-dlp'
|
||||
FORK_ORG = FORK_REPOSITORY.partition('/')[0]
|
||||
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, stable',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {}, {
|
||||
'channel': 'stable',
|
||||
'version': DEFAULT_VERSION,
|
||||
'target_repo': STABLE_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': DEFAULT_VERSION,
|
||||
'pypi_project': 'yt-dlp',
|
||||
'pypi_suffix': None,
|
||||
})
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, nightly (w/o target)',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||
'source': 'nightly',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': 'nightly',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': 'yt-dlp/yt-dlp-nightly-builds',
|
||||
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': 'yt-dlp',
|
||||
'pypi_suffix': 'dev',
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, nightly',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||
'source': 'nightly',
|
||||
'target': 'nightly',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': 'nightly',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': 'yt-dlp/yt-dlp-nightly-builds',
|
||||
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': 'yt-dlp',
|
||||
'pypi_suffix': 'dev',
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, master (w/o target)',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||
'source': 'master',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': 'master',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': 'yt-dlp/yt-dlp-master-builds',
|
||||
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, master',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||
'source': 'master',
|
||||
'target': 'master',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': 'master',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': 'yt-dlp/yt-dlp-master-builds',
|
||||
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, special tag, updates to stable',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||
'target': f'{STABLE_REPOSITORY}@experimental',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': 'stable',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': STABLE_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'experimental',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, special tag, "stable" as target repo',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||
'target': 'stable@experimental',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': 'stable',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': STABLE_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'experimental',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/o vars/secrets, stable',
|
||||
{}, {}, {}, {
|
||||
'channel': FORK_REPOSITORY,
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/o vars/secrets, prerelease',
|
||||
{}, {}, {'prerelease': True}, {
|
||||
'channel': FORK_REPOSITORY,
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/o vars/secrets, nightly',
|
||||
{}, {}, {
|
||||
'prerelease': True,
|
||||
'source': 'nightly',
|
||||
'target': 'nightly',
|
||||
}, {
|
||||
'channel': f'{FORK_REPOSITORY}@nightly',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'nightly',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/o vars/secrets, master',
|
||||
{}, {}, {
|
||||
'prerelease': True,
|
||||
'source': 'master',
|
||||
'target': 'master',
|
||||
}, {
|
||||
'channel': f'{FORK_REPOSITORY}@master',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'master',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/o vars/secrets, revision',
|
||||
{}, {}, {'version': '123'}, {
|
||||
'channel': FORK_REPOSITORY,
|
||||
'version': f'{DEFAULT_VERSION[:10]}.123',
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': f'{DEFAULT_VERSION[:10]}.123',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
})
|
||||
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/ PUSH_VERSION_COMMIT, stable',
|
||||
{'PUSH_VERSION_COMMIT': '1'}, {}, {}, {
|
||||
'channel': FORK_REPOSITORY,
|
||||
'version': DEFAULT_VERSION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': DEFAULT_VERSION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
})
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/ PUSH_VERSION_COMMIT, prerelease',
|
||||
{'PUSH_VERSION_COMMIT': '1'}, {}, {'prerelease': True}, {
|
||||
'channel': FORK_REPOSITORY,
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/NIGHTLY_ARCHIVE_REPO_TOKEN, nightly', {
|
||||
'NIGHTLY_ARCHIVE_REPO': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||
'PYPI_PROJECT': 'yt-dlp-test',
|
||||
}, {
|
||||
'NIGHTLY_ARCHIVE_REPO_TOKEN': '1',
|
||||
}, {
|
||||
'source': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||
'target': 'nightly',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||
'target_repo_token': 'NIGHTLY_ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/MASTER_ARCHIVE_REPO_TOKEN, master', {
|
||||
'MASTER_ARCHIVE_REPO': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||
'MASTER_PYPI_PROJECT': 'yt-dlp-test',
|
||||
'MASTER_PYPI_SUFFIX': 'dev',
|
||||
}, {
|
||||
'MASTER_ARCHIVE_REPO_TOKEN': '1',
|
||||
}, {
|
||||
'source': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||
'target': 'master',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||
'target_repo_token': 'MASTER_ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': 'yt-dlp-test',
|
||||
'pypi_suffix': 'dev',
|
||||
}, ignore_revision=True)
|
||||
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork, non-numeric tag',
|
||||
{}, {}, {'source': 'experimental'}, {
|
||||
'channel': f'{FORK_REPOSITORY}@experimental',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'experimental',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork, non-numeric tag, updates to stable',
|
||||
{}, {}, {
|
||||
'prerelease': True,
|
||||
'source': 'stable',
|
||||
'target': 'experimental',
|
||||
}, {
|
||||
'channel': 'stable',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'experimental',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
|
|
@ -9,24 +9,9 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|||
|
||||
import argparse
|
||||
import contextlib
|
||||
import datetime as dt
|
||||
import sys
|
||||
|
||||
from devscripts.utils import read_version, run_process, write_file
|
||||
|
||||
|
||||
def get_new_version(version, revision):
|
||||
if not version:
|
||||
version = dt.datetime.now(dt.timezone.utc).strftime('%Y.%m.%d')
|
||||
|
||||
if revision:
|
||||
assert revision.isdecimal(), 'Revision must be a number'
|
||||
else:
|
||||
old_version = read_version().split('.')
|
||||
if version.split('.') == old_version[:3]:
|
||||
revision = str(int(([*old_version, 0])[3]) + 1)
|
||||
|
||||
return f'{version}.{revision}' if revision else version
|
||||
from devscripts.utils import calculate_version, run_process, write_file
|
||||
|
||||
|
||||
def get_git_head():
|
||||
|
|
@ -72,9 +57,7 @@ if __name__ == '__main__':
|
|||
args = parser.parse_args()
|
||||
|
||||
git_head = get_git_head()
|
||||
version = (
|
||||
args.version if args.version and '.' in args.version
|
||||
else get_new_version(None, args.version))
|
||||
version = calculate_version(args.version)
|
||||
write_file(args.output, VERSION_TEMPLATE.format(
|
||||
version=version, git_head=git_head, channel=args.channel, origin=args.origin,
|
||||
package_version=f'{version}{args.suffix}'))
|
||||
|
|
|
|||
|
|
@ -20,7 +20,9 @@ if __name__ == '__main__':
|
|||
'--changelog-path', type=Path, default=Path(__file__).parent.parent / 'Changelog.md',
|
||||
help='path to the Changelog file')
|
||||
args = parser.parse_args()
|
||||
new_entry = create_changelog(args)
|
||||
|
||||
header, sep, changelog = read_file(args.changelog_path).partition('\n### ')
|
||||
write_file(args.changelog_path, f'{header}{sep}{read_version()}\n{new_entry}\n{sep}{changelog}')
|
||||
current_version = read_version()
|
||||
if current_version != changelog.splitlines()[0]:
|
||||
new_entry = create_changelog(args)
|
||||
write_file(args.changelog_path, f'{header}{sep}{current_version}\n{new_entry}\n{sep}{changelog}')
|
||||
|
|
|
|||
166
devscripts/update_ejs.py
Executable file
166
devscripts/update_ejs.py
Executable file
|
|
@ -0,0 +1,166 @@
|
|||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import io
|
||||
import json
|
||||
import hashlib
|
||||
import pathlib
|
||||
import urllib.request
|
||||
import zipfile
|
||||
|
||||
|
||||
TEMPLATE = '''\
|
||||
# This file is generated by devscripts/update_ejs.py. DO NOT MODIFY!
|
||||
|
||||
VERSION = {version!r}
|
||||
HASHES = {{
|
||||
{hash_mapping}
|
||||
}}
|
||||
'''
|
||||
PREFIX = ' "yt-dlp-ejs=='
|
||||
BASE_PATH = pathlib.Path(__file__).parent.parent
|
||||
PYPROJECT_PATH = BASE_PATH / 'pyproject.toml'
|
||||
PACKAGE_PATH = BASE_PATH / 'yt_dlp/extractor/youtube/jsc/_builtin/vendor'
|
||||
RELEASE_URL = 'https://api.github.com/repos/yt-dlp/ejs/releases/latest'
|
||||
ASSETS = {
|
||||
'yt.solver.lib.js': False,
|
||||
'yt.solver.lib.min.js': False,
|
||||
'yt.solver.deno.lib.js': True,
|
||||
'yt.solver.bun.lib.js': True,
|
||||
'yt.solver.core.min.js': False,
|
||||
'yt.solver.core.js': True,
|
||||
}
|
||||
MAKEFILE_PATH = BASE_PATH / 'Makefile'
|
||||
|
||||
|
||||
def request(url: str):
|
||||
return contextlib.closing(urllib.request.urlopen(url))
|
||||
|
||||
|
||||
def makefile_variables(
|
||||
version: str | None = None,
|
||||
name: str | None = None,
|
||||
digest: str | None = None,
|
||||
data: bytes | None = None,
|
||||
keys_only: bool = False,
|
||||
) -> dict[str, str | None]:
|
||||
assert keys_only or all(arg is not None for arg in (version, name, digest, data))
|
||||
|
||||
return {
|
||||
'EJS_VERSION': None if keys_only else version,
|
||||
'EJS_WHEEL_NAME': None if keys_only else name,
|
||||
'EJS_WHEEL_HASH': None if keys_only else digest,
|
||||
'EJS_PY_FOLDERS': None if keys_only else list_wheel_contents(data, 'py', files=False),
|
||||
'EJS_PY_FILES': None if keys_only else list_wheel_contents(data, 'py', folders=False),
|
||||
'EJS_JS_FOLDERS': None if keys_only else list_wheel_contents(data, 'js', files=False),
|
||||
'EJS_JS_FILES': None if keys_only else list_wheel_contents(data, 'js', folders=False),
|
||||
}
|
||||
|
||||
|
||||
def list_wheel_contents(
|
||||
wheel_data: bytes,
|
||||
suffix: str | None = None,
|
||||
folders: bool = True,
|
||||
files: bool = True,
|
||||
) -> str:
|
||||
assert folders or files, 'at least one of "folders" or "files" must be True'
|
||||
|
||||
with zipfile.ZipFile(io.BytesIO(wheel_data)) as zipf:
|
||||
path_gen = (zinfo.filename for zinfo in zipf.infolist())
|
||||
|
||||
filtered = filter(lambda path: path.startswith('yt_dlp_ejs/'), path_gen)
|
||||
if suffix:
|
||||
filtered = filter(lambda path: path.endswith(f'.{suffix}'), filtered)
|
||||
|
||||
files_list = list(filtered)
|
||||
if not folders:
|
||||
return ' '.join(files_list)
|
||||
|
||||
folders_list = list(dict.fromkeys(path.rpartition('/')[0] for path in files_list))
|
||||
if not files:
|
||||
return ' '.join(folders_list)
|
||||
|
||||
return ' '.join(folders_list + files_list)
|
||||
|
||||
|
||||
def main():
|
||||
current_version = None
|
||||
with PYPROJECT_PATH.open() as file:
|
||||
for line in file:
|
||||
if not line.startswith(PREFIX):
|
||||
continue
|
||||
current_version, _, _ = line.removeprefix(PREFIX).partition('"')
|
||||
|
||||
if not current_version:
|
||||
print('yt-dlp-ejs dependency line could not be found')
|
||||
return
|
||||
|
||||
makefile_info = makefile_variables(keys_only=True)
|
||||
prefixes = tuple(f'{key} = ' for key in makefile_info)
|
||||
with MAKEFILE_PATH.open() as file:
|
||||
for line in file:
|
||||
if not line.startswith(prefixes):
|
||||
continue
|
||||
key, _, val = line.partition(' = ')
|
||||
makefile_info[key] = val.rstrip()
|
||||
|
||||
with request(RELEASE_URL) as resp:
|
||||
info = json.load(resp)
|
||||
|
||||
version = info['tag_name']
|
||||
if version == current_version:
|
||||
print(f'yt-dlp-ejs is up to date! ({version})')
|
||||
return
|
||||
|
||||
print(f'Updating yt-dlp-ejs from {current_version} to {version}')
|
||||
hashes = []
|
||||
wheel_info = {}
|
||||
for asset in info['assets']:
|
||||
name = asset['name']
|
||||
is_wheel = name.startswith('yt_dlp_ejs-') and name.endswith('.whl')
|
||||
if not is_wheel and name not in ASSETS:
|
||||
continue
|
||||
with request(asset['browser_download_url']) as resp:
|
||||
data = resp.read()
|
||||
|
||||
# verify digest from github
|
||||
digest = asset['digest']
|
||||
algo, _, expected = digest.partition(':')
|
||||
hexdigest = hashlib.new(algo, data).hexdigest()
|
||||
assert hexdigest == expected, f'downloaded attest mismatch ({hexdigest!r} != {expected!r})'
|
||||
|
||||
if is_wheel:
|
||||
wheel_info = makefile_variables(version, name, digest, data)
|
||||
continue
|
||||
|
||||
# calculate sha3-512 digest
|
||||
asset_hash = hashlib.sha3_512(data).hexdigest()
|
||||
hashes.append(f' {name!r}: {asset_hash!r},')
|
||||
|
||||
if ASSETS[name]:
|
||||
(PACKAGE_PATH / name).write_bytes(data)
|
||||
|
||||
hash_mapping = '\n'.join(hashes)
|
||||
for asset_name in ASSETS:
|
||||
assert asset_name in hash_mapping, f'{asset_name} not found in release'
|
||||
|
||||
assert all(wheel_info.get(key) for key in makefile_info), 'wheel info not found in release'
|
||||
|
||||
(PACKAGE_PATH / '_info.py').write_text(TEMPLATE.format(
|
||||
version=version,
|
||||
hash_mapping=hash_mapping,
|
||||
))
|
||||
|
||||
content = PYPROJECT_PATH.read_text()
|
||||
updated = content.replace(PREFIX + current_version, PREFIX + version)
|
||||
PYPROJECT_PATH.write_text(updated)
|
||||
|
||||
makefile = MAKEFILE_PATH.read_text()
|
||||
for key in wheel_info:
|
||||
makefile = makefile.replace(f'{key} = {makefile_info[key]}', f'{key} = {wheel_info[key]}')
|
||||
MAKEFILE_PATH.write_text(makefile)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
@ -1,5 +1,7 @@
|
|||
import argparse
|
||||
import datetime as dt
|
||||
import functools
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
|
||||
|
|
@ -20,6 +22,23 @@ def read_version(fname='yt_dlp/version.py', varname='__version__'):
|
|||
return items[varname]
|
||||
|
||||
|
||||
def calculate_version(version=None, fname='yt_dlp/version.py'):
|
||||
if version and '.' in version:
|
||||
return version
|
||||
|
||||
revision = version
|
||||
version = dt.datetime.now(dt.timezone.utc).strftime('%Y.%m.%d')
|
||||
|
||||
if revision:
|
||||
assert re.fullmatch(r'[0-9]+', revision), 'Revision must be numeric'
|
||||
else:
|
||||
old_version = read_version(fname=fname).split('.')
|
||||
if version.split('.') == old_version[:3]:
|
||||
revision = str(int(([*old_version, 0])[3]) + 1)
|
||||
|
||||
return f'{version}.{revision}' if revision else version
|
||||
|
||||
|
||||
def get_filename_args(has_infile=False, default_outfile=None):
|
||||
parser = argparse.ArgumentParser()
|
||||
if has_infile:
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ def build_completion(opt_parser):
|
|||
for opt in group.option_list]
|
||||
opts_file = [opt for opt in opts if opt.metavar == 'FILE']
|
||||
opts_dir = [opt for opt in opts if opt.metavar == 'DIR']
|
||||
opts_path = [opt for opt in opts if opt.metavar == 'PATH']
|
||||
|
||||
fileopts = []
|
||||
for opt in opts_file:
|
||||
|
|
@ -26,6 +27,12 @@ def build_completion(opt_parser):
|
|||
if opt._long_opts:
|
||||
fileopts.extend(opt._long_opts)
|
||||
|
||||
for opt in opts_path:
|
||||
if opt._short_opts:
|
||||
fileopts.extend(opt._short_opts)
|
||||
if opt._long_opts:
|
||||
fileopts.extend(opt._long_opts)
|
||||
|
||||
diropts = []
|
||||
for opt in opts_dir:
|
||||
if opt._short_opts:
|
||||
|
|
|
|||
|
|
@ -1,68 +1,75 @@
|
|||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
requires = ["hatchling>=1.27.0"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[project]
|
||||
name = "yt-dlp"
|
||||
maintainers = [
|
||||
authors = [
|
||||
{name = "pukkandan", email = "pukkandan.ytdlp@gmail.com"},
|
||||
]
|
||||
maintainers = [
|
||||
{email = "maintainers@yt-dlp.org"},
|
||||
{name = "Grub4K", email = "contact@grub4k.xyz"},
|
||||
{name = "bashonly", email = "bashonly@protonmail.com"},
|
||||
{name = "coletdjnz", email = "coletdjnz@protonmail.com"},
|
||||
{name = "sepro", email = "sepro@sepr0.com"},
|
||||
]
|
||||
description = "A feature-rich command-line audio/video downloader"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.8"
|
||||
requires-python = ">=3.10"
|
||||
keywords = [
|
||||
"cli",
|
||||
"downloader",
|
||||
"youtube-dl",
|
||||
"video-downloader",
|
||||
"youtube-downloader",
|
||||
"sponsorblock",
|
||||
"youtube-dlc",
|
||||
"yt-dlp",
|
||||
]
|
||||
license = {file = "LICENSE"}
|
||||
license = "Unlicense"
|
||||
license-files = ["LICENSE"]
|
||||
classifiers = [
|
||||
"Topic :: Multimedia :: Video",
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Environment :: Console",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: 3.13",
|
||||
"Programming Language :: Python :: 3.14",
|
||||
"Programming Language :: Python :: Implementation",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
"License :: OSI Approved :: The Unlicense (Unlicense)",
|
||||
"Operating System :: OS Independent",
|
||||
]
|
||||
dynamic = ["version"]
|
||||
dependencies = [
|
||||
dependencies = []
|
||||
|
||||
[project.optional-dependencies]
|
||||
default = [
|
||||
"brotli; implementation_name=='cpython'",
|
||||
"brotlicffi; implementation_name!='cpython'",
|
||||
"certifi",
|
||||
"mutagen",
|
||||
"pycryptodomex",
|
||||
"requests>=2.32.2,<3",
|
||||
"urllib3>=1.26.17,<3",
|
||||
"websockets>=12.0",
|
||||
"urllib3>=2.0.2,<3",
|
||||
"websockets>=13.0",
|
||||
"yt-dlp-ejs==0.3.1",
|
||||
]
|
||||
curl-cffi = [
|
||||
"curl-cffi>=0.5.10,!=0.6.*,!=0.7.*,!=0.8.*,!=0.9.*,<0.14; implementation_name=='cpython'",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
default = []
|
||||
curl-cffi = ["curl-cffi==0.5.10; implementation_name=='cpython'"]
|
||||
secretstorage = [
|
||||
"cffi",
|
||||
"secretstorage",
|
||||
]
|
||||
build = [
|
||||
"build",
|
||||
"hatchling",
|
||||
"hatchling>=1.27.0",
|
||||
"pip",
|
||||
"setuptools",
|
||||
"setuptools>=71.0.2,<81", # See https://github.com/pyinstaller/pyinstaller/issues/9149
|
||||
"wheel",
|
||||
]
|
||||
dev = [
|
||||
|
|
@ -72,23 +79,21 @@ dev = [
|
|||
]
|
||||
static-analysis = [
|
||||
"autopep8~=2.0",
|
||||
"ruff~=0.5.0",
|
||||
"ruff~=0.14.0",
|
||||
]
|
||||
test = [
|
||||
"pytest~=8.1",
|
||||
"pytest-rerunfailures~=14.0",
|
||||
]
|
||||
pyinstaller = [
|
||||
"pyinstaller>=6.7.0", # for compat with setuptools>=70
|
||||
]
|
||||
py2exe = [
|
||||
"py2exe>=0.12",
|
||||
"pyinstaller>=6.13.0", # Windows temp cleanup fixed in 6.13.0
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
Documentation = "https://github.com/yt-dlp/yt-dlp#readme"
|
||||
Repository = "https://github.com/yt-dlp/yt-dlp"
|
||||
Tracker = "https://github.com/yt-dlp/yt-dlp/issues"
|
||||
Funding = "https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators"
|
||||
Funding = "https://github.com/yt-dlp/yt-dlp/blob/master/Maintainers.md#maintainers"
|
||||
|
||||
[project.scripts]
|
||||
yt-dlp = "yt_dlp:main"
|
||||
|
|
@ -106,7 +111,6 @@ include = [
|
|||
"/LICENSE", # included as license
|
||||
"/pyproject.toml", # included by default
|
||||
"/README.md", # included as readme
|
||||
"/setup.cfg",
|
||||
"/supportedsites.md",
|
||||
]
|
||||
artifacts = [
|
||||
|
|
@ -119,7 +123,12 @@ artifacts = [
|
|||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["yt_dlp"]
|
||||
artifacts = ["/yt_dlp/extractor/lazy_extractors.py"]
|
||||
artifacts = [
|
||||
"/yt_dlp/extractor/lazy_extractors.py",
|
||||
]
|
||||
exclude = [
|
||||
"/yt_dlp/**/*.md",
|
||||
]
|
||||
|
||||
[tool.hatch.build.targets.wheel.shared-data]
|
||||
"completions/bash/yt-dlp" = "share/bash-completion/completions/yt-dlp"
|
||||
|
|
@ -158,7 +167,6 @@ lint-fix = "ruff check --fix {args:.}"
|
|||
features = ["test"]
|
||||
dependencies = [
|
||||
"pytest-randomly~=3.15",
|
||||
"pytest-rerunfailures~=14.0",
|
||||
"pytest-xdist[psutil]~=3.5",
|
||||
]
|
||||
|
||||
|
|
@ -168,14 +176,12 @@ run-cov = "echo Code coverage not implemented && exit 1"
|
|||
|
||||
[[tool.hatch.envs.hatch-test.matrix]]
|
||||
python = [
|
||||
"3.8",
|
||||
"3.9",
|
||||
"3.10",
|
||||
"3.11",
|
||||
"3.12",
|
||||
"pypy3.8",
|
||||
"pypy3.9",
|
||||
"pypy3.10",
|
||||
"3.13",
|
||||
"3.14",
|
||||
"pypy3.11",
|
||||
]
|
||||
|
||||
[tool.ruff]
|
||||
|
|
@ -187,6 +193,7 @@ ignore = [
|
|||
"E501", # line-too-long
|
||||
"E731", # lambda-assignment
|
||||
"E741", # ambiguous-variable-name
|
||||
"UP031", # printf-string-formatting
|
||||
"UP036", # outdated-version-block
|
||||
"B006", # mutable-argument-default
|
||||
"B008", # function-call-in-default-argument
|
||||
|
|
@ -195,6 +202,7 @@ ignore = [
|
|||
"B023", # function-uses-loop-variable (false positives)
|
||||
"B028", # no-explicit-stacklevel
|
||||
"B904", # raise-without-from-inside-except
|
||||
"A005", # stdlib-module-shadowing
|
||||
"C401", # unnecessary-generator-set
|
||||
"C402", # unnecessary-generator-dict
|
||||
"PIE790", # unnecessary-placeholder
|
||||
|
|
@ -210,10 +218,12 @@ ignore = [
|
|||
"TD001", # invalid-todo-tag
|
||||
"TD002", # missing-todo-author
|
||||
"TD003", # missing-todo-link
|
||||
"PLC0415", # import-outside-top-level
|
||||
"PLE0604", # invalid-all-object (false positives)
|
||||
"PLE0643", # potential-index-error (false positives)
|
||||
"PLW0603", # global-statement
|
||||
"PLW1510", # subprocess-run-without-check
|
||||
"PLW1641", # eq-without-hash
|
||||
"PLW2901", # redefined-loop-name
|
||||
"RUF001", # ambiguous-unicode-character-string
|
||||
"RUF012", # mutable-class-default
|
||||
|
|
@ -259,9 +269,6 @@ select = [
|
|||
"A002", # builtin-argument-shadowing
|
||||
"C408", # unnecessary-collection-call
|
||||
]
|
||||
"yt_dlp/jsinterp.py" = [
|
||||
"UP031", # printf-string-formatting
|
||||
]
|
||||
|
||||
[tool.ruff.lint.isort]
|
||||
known-first-party = [
|
||||
|
|
@ -314,6 +321,17 @@ banned-from = [
|
|||
"yt_dlp.compat.compat_urllib_parse_urlparse".msg = "Use `urllib.parse.urlparse` instead."
|
||||
"yt_dlp.compat.compat_shlex_quote".msg = "Use `yt_dlp.utils.shell_quote` instead."
|
||||
"yt_dlp.utils.error_to_compat_str".msg = "Use `str` instead."
|
||||
"yt_dlp.utils.bytes_to_intlist".msg = "Use `list` instead."
|
||||
"yt_dlp.utils.intlist_to_bytes".msg = "Use `bytes` instead."
|
||||
"yt_dlp.utils.jwt_encode_hs256".msg = "Use `yt_dlp.utils.jwt_encode` instead."
|
||||
"yt_dlp.utils.decodeArgument".msg = "Do not use"
|
||||
"yt_dlp.utils.decodeFilename".msg = "Do not use"
|
||||
"yt_dlp.utils.encodeFilename".msg = "Do not use"
|
||||
"yt_dlp.compat.compat_os_name".msg = "Use `os.name` instead."
|
||||
"yt_dlp.compat.compat_realpath".msg = "Use `os.path.realpath` instead."
|
||||
"yt_dlp.compat.functools".msg = "Use `functools` instead."
|
||||
"yt_dlp.utils.decodeOption".msg = "Do not use"
|
||||
"yt_dlp.utils.compiled_regex_type".msg = "Use `re.Pattern` instead."
|
||||
|
||||
[tool.autopep8]
|
||||
max_line_length = 120
|
||||
|
|
@ -376,9 +394,14 @@ select = [
|
|||
"W391",
|
||||
"W504",
|
||||
]
|
||||
exclude = "*/extractor/lazy_extractors.py,*venv*,*/test/testdata/sigs/player-*.js,.idea,.vscode"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
addopts = "-ra -v --strict-markers"
|
||||
addopts = [
|
||||
"-ra", # summary: all except passed
|
||||
"--verbose",
|
||||
"--strict-markers",
|
||||
]
|
||||
markers = [
|
||||
"download",
|
||||
]
|
||||
|
|
|
|||
39
setup.cfg
39
setup.cfg
|
|
@ -1,39 +0,0 @@
|
|||
[flake8]
|
||||
exclude = build,venv,.tox,.git,.pytest_cache
|
||||
ignore = E402,E501,E731,E741,W503
|
||||
max_line_length = 120
|
||||
per_file_ignores =
|
||||
devscripts/lazy_load_template.py: F401
|
||||
|
||||
|
||||
[autoflake]
|
||||
ignore-init-module-imports = true
|
||||
ignore-pass-after-docstring = true
|
||||
remove-all-unused-imports = true
|
||||
remove-duplicate-keys = true
|
||||
remove-unused-variables = true
|
||||
|
||||
|
||||
[tox:tox]
|
||||
skipsdist = true
|
||||
envlist = py{38,39,310,311,312},pypy{38,39,310}
|
||||
skip_missing_interpreters = true
|
||||
|
||||
[testenv] # tox
|
||||
deps =
|
||||
pytest
|
||||
commands = pytest {posargs:"-m not download"}
|
||||
passenv = HOME # For test_compat_expanduser
|
||||
setenv =
|
||||
# PYTHONWARNINGS = error # Catches PIP's warnings too
|
||||
|
||||
|
||||
[isort]
|
||||
py_version = 38
|
||||
multi_line_output = VERTICAL_HANGING_INDENT
|
||||
line_length = 80
|
||||
reverse_relative = true
|
||||
ensure_newline_before_comments = true
|
||||
include_trailing_comma = true
|
||||
known_first_party =
|
||||
test
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -52,6 +52,33 @@ def skip_handlers_if(request, handler):
|
|||
pytest.skip(marker.args[1] if len(marker.args) > 1 else '')
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def handler_flaky(request, handler):
|
||||
"""Mark a certain handler as being flaky.
|
||||
|
||||
This will skip the test if pytest does not get run using `--allow-flaky`
|
||||
|
||||
usage:
|
||||
pytest.mark.handler_flaky('my_handler', os.name != 'nt', reason='reason')
|
||||
"""
|
||||
for marker in request.node.iter_markers(handler_flaky.__name__):
|
||||
if (
|
||||
marker.args[0] == handler.RH_KEY
|
||||
and (not marker.args[1:] or any(marker.args[1:]))
|
||||
and request.config.getoption('disallow_flaky')
|
||||
):
|
||||
reason = marker.kwargs.get('reason')
|
||||
pytest.skip(f'flaky: {reason}' if reason else 'flaky')
|
||||
|
||||
|
||||
def pytest_addoption(parser, pluginmanager):
|
||||
parser.addoption(
|
||||
'--disallow-flaky',
|
||||
action='store_true',
|
||||
help='disallow flaky tests from running.',
|
||||
)
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
config.addinivalue_line(
|
||||
'markers', 'skip_handler(handler): skip test for the given handler',
|
||||
|
|
@ -62,3 +89,6 @@ def pytest_configure(config):
|
|||
config.addinivalue_line(
|
||||
'markers', 'skip_handlers_if(handler): skip test for handlers when the condition is true',
|
||||
)
|
||||
config.addinivalue_line(
|
||||
'markers', 'handler_flaky(handler): mark handler as flaky if condition is true',
|
||||
)
|
||||
|
|
|
|||
197
test/helper.py
197
test/helper.py
|
|
@ -9,7 +9,6 @@ import types
|
|||
|
||||
import yt_dlp.extractor
|
||||
from yt_dlp import YoutubeDL
|
||||
from yt_dlp.compat import compat_os_name
|
||||
from yt_dlp.utils import preferredencoding, try_call, write_string, find_available_port
|
||||
|
||||
if 'pytest' in sys.modules:
|
||||
|
|
@ -49,7 +48,7 @@ def report_warning(message, *args, **kwargs):
|
|||
Print the message to stderr, it will be prefixed with 'WARNING:'
|
||||
If stderr is a tty file the 'WARNING:' will be colored
|
||||
"""
|
||||
if sys.stderr.isatty() and compat_os_name != 'nt':
|
||||
if sys.stderr.isatty() and os.name != 'nt':
|
||||
_msg_header = '\033[0;33mWARNING:\033[0m'
|
||||
else:
|
||||
_msg_header = 'WARNING:'
|
||||
|
|
@ -102,87 +101,109 @@ def getwebpagetestcases():
|
|||
md5 = lambda s: hashlib.md5(s.encode()).hexdigest()
|
||||
|
||||
|
||||
def expect_value(self, got, expected, field):
|
||||
if isinstance(expected, str) and expected.startswith('re:'):
|
||||
match_str = expected[len('re:'):]
|
||||
match_rex = re.compile(match_str)
|
||||
def _iter_differences(got, expected, field):
|
||||
if isinstance(expected, str):
|
||||
op, _, val = expected.partition(':')
|
||||
if op in ('mincount', 'maxcount', 'count'):
|
||||
if not isinstance(got, (list, dict)):
|
||||
yield field, f'expected either {list.__name__} or {dict.__name__}, got {type(got).__name__}'
|
||||
return
|
||||
|
||||
self.assertTrue(
|
||||
isinstance(got, str),
|
||||
f'Expected a {str.__name__} object, but got {type(got).__name__} for field {field}')
|
||||
self.assertTrue(
|
||||
match_rex.match(got),
|
||||
f'field {field} (value: {got!r}) should match {match_str!r}')
|
||||
elif isinstance(expected, str) and expected.startswith('startswith:'):
|
||||
start_str = expected[len('startswith:'):]
|
||||
self.assertTrue(
|
||||
isinstance(got, str),
|
||||
f'Expected a {str.__name__} object, but got {type(got).__name__} for field {field}')
|
||||
self.assertTrue(
|
||||
got.startswith(start_str),
|
||||
f'field {field} (value: {got!r}) should start with {start_str!r}')
|
||||
elif isinstance(expected, str) and expected.startswith('contains:'):
|
||||
contains_str = expected[len('contains:'):]
|
||||
self.assertTrue(
|
||||
isinstance(got, str),
|
||||
f'Expected a {str.__name__} object, but got {type(got).__name__} for field {field}')
|
||||
self.assertTrue(
|
||||
contains_str in got,
|
||||
f'field {field} (value: {got!r}) should contain {contains_str!r}')
|
||||
elif isinstance(expected, type):
|
||||
self.assertTrue(
|
||||
isinstance(got, expected),
|
||||
f'Expected type {expected!r} for field {field}, but got value {got!r} of type {type(got)!r}')
|
||||
elif isinstance(expected, dict) and isinstance(got, dict):
|
||||
expect_dict(self, got, expected)
|
||||
elif isinstance(expected, list) and isinstance(got, list):
|
||||
self.assertEqual(
|
||||
len(expected), len(got),
|
||||
f'Expect a list of length {len(expected)}, but got a list of length {len(got)} for field {field}')
|
||||
for index, (item_got, item_expected) in enumerate(zip(got, expected)):
|
||||
type_got = type(item_got)
|
||||
type_expected = type(item_expected)
|
||||
self.assertEqual(
|
||||
type_expected, type_got,
|
||||
f'Type mismatch for list item at index {index} for field {field}, '
|
||||
f'expected {type_expected!r}, got {type_got!r}')
|
||||
expect_value(self, item_got, item_expected, field)
|
||||
else:
|
||||
if isinstance(expected, str) and expected.startswith('md5:'):
|
||||
self.assertTrue(
|
||||
isinstance(got, str),
|
||||
f'Expected field {field} to be a unicode object, but got value {got!r} of type {type(got)!r}')
|
||||
got = 'md5:' + md5(got)
|
||||
elif isinstance(expected, str) and re.match(r'^(?:min|max)?count:\d+', expected):
|
||||
self.assertTrue(
|
||||
isinstance(got, (list, dict)),
|
||||
f'Expected field {field} to be a list or a dict, but it is of type {type(got).__name__}')
|
||||
op, _, expected_num = expected.partition(':')
|
||||
expected_num = int(expected_num)
|
||||
expected_num = int(val)
|
||||
got_num = len(got)
|
||||
if op == 'mincount':
|
||||
assert_func = assertGreaterEqual
|
||||
msg_tmpl = 'Expected %d items in field %s, but only got %d'
|
||||
elif op == 'maxcount':
|
||||
assert_func = assertLessEqual
|
||||
msg_tmpl = 'Expected maximum %d items in field %s, but got %d'
|
||||
elif op == 'count':
|
||||
assert_func = assertEqual
|
||||
msg_tmpl = 'Expected exactly %d items in field %s, but got %d'
|
||||
else:
|
||||
assert False
|
||||
assert_func(
|
||||
self, len(got), expected_num,
|
||||
msg_tmpl % (expected_num, field, len(got)))
|
||||
if got_num < expected_num:
|
||||
yield field, f'expected at least {val} items, got {got_num}'
|
||||
return
|
||||
|
||||
if op == 'maxcount':
|
||||
if got_num > expected_num:
|
||||
yield field, f'expected at most {val} items, got {got_num}'
|
||||
return
|
||||
|
||||
assert op == 'count'
|
||||
if got_num != expected_num:
|
||||
yield field, f'expected exactly {val} items, got {got_num}'
|
||||
return
|
||||
self.assertEqual(
|
||||
expected, got,
|
||||
f'Invalid value for field {field}, expected {expected!r}, got {got!r}')
|
||||
|
||||
if not isinstance(got, str):
|
||||
yield field, f'expected {str.__name__}, got {type(got).__name__}'
|
||||
return
|
||||
|
||||
if op == 're':
|
||||
if not re.match(val, got):
|
||||
yield field, f'should match {val!r}, got {got!r}'
|
||||
return
|
||||
|
||||
if op == 'startswith':
|
||||
if not got.startswith(val):
|
||||
yield field, f'should start with {val!r}, got {got!r}'
|
||||
return
|
||||
|
||||
if op == 'contains':
|
||||
if not val.startswith(got):
|
||||
yield field, f'should contain {val!r}, got {got!r}'
|
||||
return
|
||||
|
||||
if op == 'md5':
|
||||
hash_val = md5(got)
|
||||
if hash_val != val:
|
||||
yield field, f'expected hash {val}, got {hash_val}'
|
||||
return
|
||||
|
||||
if got != expected:
|
||||
yield field, f'expected {expected!r}, got {got!r}'
|
||||
return
|
||||
|
||||
if isinstance(expected, dict) and isinstance(got, dict):
|
||||
for key, expected_val in expected.items():
|
||||
if key not in got:
|
||||
yield field, f'missing key: {key!r}'
|
||||
continue
|
||||
|
||||
field_name = key if field is None else f'{field}.{key}'
|
||||
yield from _iter_differences(got[key], expected_val, field_name)
|
||||
return
|
||||
|
||||
if isinstance(expected, type):
|
||||
if not isinstance(got, expected):
|
||||
yield field, f'expected {expected.__name__}, got {type(got).__name__}'
|
||||
return
|
||||
|
||||
if isinstance(expected, list) and isinstance(got, list):
|
||||
# TODO: clever diffing algorithm lmao
|
||||
if len(expected) != len(got):
|
||||
yield field, f'expected length of {len(expected)}, got {len(got)}'
|
||||
return
|
||||
|
||||
for index, (got_val, expected_val) in enumerate(zip(got, expected, strict=True)):
|
||||
field_name = str(index) if field is None else f'{field}.{index}'
|
||||
yield from _iter_differences(got_val, expected_val, field_name)
|
||||
return
|
||||
|
||||
if got != expected:
|
||||
yield field, f'expected {expected!r}, got {got!r}'
|
||||
|
||||
|
||||
def _expect_value(message, got, expected, field):
|
||||
mismatches = list(_iter_differences(got, expected, field))
|
||||
if not mismatches:
|
||||
return
|
||||
|
||||
fields = [field for field, _ in mismatches if field is not None]
|
||||
return ''.join((
|
||||
message, f' ({", ".join(fields)})' if fields else '',
|
||||
*(f'\n\t{field}: {message}' for field, message in mismatches)))
|
||||
|
||||
|
||||
def expect_value(self, got, expected, field):
|
||||
if message := _expect_value('values differ', got, expected, field):
|
||||
self.fail(message)
|
||||
|
||||
|
||||
def expect_dict(self, got_dict, expected_dict):
|
||||
for info_field, expected in expected_dict.items():
|
||||
got = got_dict.get(info_field)
|
||||
expect_value(self, got, expected, info_field)
|
||||
if message := _expect_value('dictionaries differ', got_dict, expected_dict, None):
|
||||
self.fail(message)
|
||||
|
||||
|
||||
def sanitize_got_info_dict(got_dict):
|
||||
|
|
@ -238,6 +259,20 @@ def sanitize_got_info_dict(got_dict):
|
|||
|
||||
|
||||
def expect_info_dict(self, got_dict, expected_dict):
|
||||
ALLOWED_KEYS_SORT_ORDER = (
|
||||
# NB: Keep in sync with the docstring of extractor/common.py
|
||||
'id', 'ext', 'direct', 'display_id', 'title', 'alt_title', 'description', 'media_type',
|
||||
'uploader', 'uploader_id', 'uploader_url', 'channel', 'channel_id', 'channel_url', 'channel_is_verified',
|
||||
'channel_follower_count', 'comment_count', 'view_count', 'concurrent_view_count',
|
||||
'like_count', 'dislike_count', 'repost_count', 'average_rating', 'age_limit', 'duration', 'thumbnail', 'heatmap',
|
||||
'chapters', 'chapter', 'chapter_number', 'chapter_id', 'start_time', 'end_time', 'section_start', 'section_end',
|
||||
'categories', 'tags', 'cast', 'composers', 'artists', 'album_artists', 'creators', 'genres',
|
||||
'track', 'track_number', 'track_id', 'album', 'album_type', 'disc_number',
|
||||
'series', 'series_id', 'season', 'season_number', 'season_id', 'episode', 'episode_number', 'episode_id',
|
||||
'timestamp', 'upload_date', 'release_timestamp', 'release_date', 'release_year', 'modified_timestamp', 'modified_date',
|
||||
'playable_in_embed', 'availability', 'live_status', 'location', 'license', '_old_archive_ids',
|
||||
)
|
||||
|
||||
expect_dict(self, got_dict, expected_dict)
|
||||
# Check for the presence of mandatory fields
|
||||
if got_dict.get('_type') not in ('playlist', 'multi_video'):
|
||||
|
|
@ -253,7 +288,13 @@ def expect_info_dict(self, got_dict, expected_dict):
|
|||
|
||||
test_info_dict = sanitize_got_info_dict(got_dict)
|
||||
|
||||
missing_keys = set(test_info_dict.keys()) - set(expected_dict.keys())
|
||||
# Check for invalid/misspelled field names being returned by the extractor
|
||||
invalid_keys = sorted(test_info_dict.keys() - ALLOWED_KEYS_SORT_ORDER)
|
||||
self.assertFalse(invalid_keys, f'Invalid fields returned by the extractor: {", ".join(invalid_keys)}')
|
||||
|
||||
missing_keys = sorted(
|
||||
test_info_dict.keys() - expected_dict.keys(),
|
||||
key=lambda x: ALLOWED_KEYS_SORT_ORDER.index(x))
|
||||
if missing_keys:
|
||||
def _repr(v):
|
||||
if isinstance(v, str):
|
||||
|
|
|
|||
|
|
@ -36,7 +36,6 @@
|
|||
"verbose": true,
|
||||
"writedescription": false,
|
||||
"writeinfojson": true,
|
||||
"writeannotations": false,
|
||||
"writelink": false,
|
||||
"writeurllink": false,
|
||||
"writewebloclink": false,
|
||||
|
|
|
|||
|
|
@ -36,6 +36,18 @@ class InfoExtractorTestRequestHandler(http.server.BaseHTTPRequestHandler):
|
|||
self.send_header('Content-Type', 'text/html; charset=utf-8')
|
||||
self.end_headers()
|
||||
self.wfile.write(TEAPOT_RESPONSE_BODY.encode())
|
||||
elif self.path == '/fake.m3u8':
|
||||
self.send_response(200)
|
||||
self.send_header('Content-Length', '1024')
|
||||
self.end_headers()
|
||||
self.wfile.write(1024 * b'\x00')
|
||||
elif self.path == '/bipbop.m3u8':
|
||||
with open('test/testdata/m3u8/bipbop_16x9.m3u8', 'rb') as f:
|
||||
data = f.read()
|
||||
self.send_response(200)
|
||||
self.send_header('Content-Length', str(len(data)))
|
||||
self.end_headers()
|
||||
self.wfile.write(data)
|
||||
else:
|
||||
assert False
|
||||
|
||||
|
|
@ -53,6 +65,18 @@ class TestInfoExtractor(unittest.TestCase):
|
|||
def test_ie_key(self):
|
||||
self.assertEqual(get_info_extractor(YoutubeIE.ie_key()), YoutubeIE)
|
||||
|
||||
def test_get_netrc_login_info(self):
|
||||
for params in [
|
||||
{'usenetrc': True, 'netrc_location': './test/testdata/netrc/netrc'},
|
||||
{'netrc_cmd': f'{sys.executable} ./test/testdata/netrc/print_netrc.py'},
|
||||
]:
|
||||
ie = DummyIE(FakeYDL(params))
|
||||
self.assertEqual(ie._get_netrc_login_info(netrc_machine='normal_use'), ('user', 'pass'))
|
||||
self.assertEqual(ie._get_netrc_login_info(netrc_machine='empty_user'), ('', 'pass'))
|
||||
self.assertEqual(ie._get_netrc_login_info(netrc_machine='empty_pass'), ('user', ''))
|
||||
self.assertEqual(ie._get_netrc_login_info(netrc_machine='both_empty'), ('', ''))
|
||||
self.assertEqual(ie._get_netrc_login_info(netrc_machine='nonexistent'), (None, None))
|
||||
|
||||
def test_html_search_regex(self):
|
||||
html = '<p id="foo">Watch this <a href="http://www.youtube.com/watch?v=BaW_jenozKc">video</a></p>'
|
||||
search = lambda re, *args: self.ie._html_search_regex(re, html, *args)
|
||||
|
|
@ -302,6 +326,20 @@ class TestInfoExtractor(unittest.TestCase):
|
|||
},
|
||||
{},
|
||||
),
|
||||
(
|
||||
# test thumbnail_url key without URL scheme
|
||||
r'''
|
||||
<script type="application/ld+json">
|
||||
{
|
||||
"@context": "https://schema.org",
|
||||
"@type": "VideoObject",
|
||||
"thumbnail_url": "//www.nobelprize.org/images/12693-landscape-medium-gallery.jpg"
|
||||
}</script>''',
|
||||
{
|
||||
'thumbnails': [{'url': 'https://www.nobelprize.org/images/12693-landscape-medium-gallery.jpg'}],
|
||||
},
|
||||
{},
|
||||
),
|
||||
]
|
||||
for html, expected_dict, search_json_ld_kwargs in _TESTS:
|
||||
expect_dict(
|
||||
|
|
@ -626,6 +664,7 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||
'img_bipbop_adv_example_fmp4',
|
||||
'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
|
||||
[{
|
||||
# 60kbps (bitrate not provided in m3u8); sorted as worst because it's grouped with lowest bitrate video track
|
||||
'format_id': 'aud1-English',
|
||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/a1/prog_index.m3u8',
|
||||
'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
|
||||
|
|
@ -633,15 +672,9 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||
'ext': 'mp4',
|
||||
'protocol': 'm3u8_native',
|
||||
'audio_ext': 'mp4',
|
||||
'source_preference': 0,
|
||||
}, {
|
||||
'format_id': 'aud2-English',
|
||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/a2/prog_index.m3u8',
|
||||
'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
|
||||
'language': 'en',
|
||||
'ext': 'mp4',
|
||||
'protocol': 'm3u8_native',
|
||||
'audio_ext': 'mp4',
|
||||
}, {
|
||||
# 192kbps (bitrate not provided in m3u8)
|
||||
'format_id': 'aud3-English',
|
||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/a3/prog_index.m3u8',
|
||||
'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
|
||||
|
|
@ -649,6 +682,17 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||
'ext': 'mp4',
|
||||
'protocol': 'm3u8_native',
|
||||
'audio_ext': 'mp4',
|
||||
'source_preference': 1,
|
||||
}, {
|
||||
# 384kbps (bitrate not provided in m3u8); sorted as best because it's grouped with the highest bitrate video track
|
||||
'format_id': 'aud2-English',
|
||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/a2/prog_index.m3u8',
|
||||
'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
|
||||
'language': 'en',
|
||||
'ext': 'mp4',
|
||||
'protocol': 'm3u8_native',
|
||||
'audio_ext': 'mp4',
|
||||
'source_preference': 2,
|
||||
}, {
|
||||
'format_id': '530',
|
||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v2/prog_index.m3u8',
|
||||
|
|
@ -1901,7 +1945,7 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||
server_thread.daemon = True
|
||||
server_thread.start()
|
||||
|
||||
(content, urlh) = self.ie._download_webpage_handle(
|
||||
content, _ = self.ie._download_webpage_handle(
|
||||
f'http://127.0.0.1:{port}/teapot', None,
|
||||
expected_status=TEAPOT_RESPONSE_STATUS)
|
||||
self.assertEqual(content, TEAPOT_RESPONSE_BODY)
|
||||
|
|
@ -1915,6 +1959,208 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||
with self.assertWarns(DeprecationWarning):
|
||||
self.assertEqual(self.ie._search_nextjs_data('', None, default='{}'), {})
|
||||
|
||||
def test_search_nextjs_v13_data(self):
|
||||
HTML = R'''
|
||||
<script>(self.__next_f=self.__next_f||[]).push([0])</script>
|
||||
<script>self.__next_f.push([2,"0:[\"$\",\"$L0\",null,{\"do_not_add_this\":\"fail\"}]\n"])</script>
|
||||
<script>self.__next_f.push([1,"1:I[46975,[],\"HTTPAccessFallbackBoundary\"]\n2:I[32630,[\"8183\",\"static/chunks/8183-768193f6a9e33cdd.js\"]]\n"])</script>
|
||||
<script nonce="abc123">self.__next_f.push([1,"e:[false,[\"$\",\"div\",null,{\"children\":[\"$\",\"$L18\",null,{\"foo\":\"bar\"}]}],false]\n "])</script>
|
||||
<script>self.__next_f.push([1,"2a:[[\"$\",\"div\",null,{\"className\":\"flex flex-col\",\"children\":[]}],[\"$\",\"$L16\",null,{\"meta\":{\"dateCreated\":1730489700,\"uuid\":\"40cac41d-8d29-4ef5-aa11-75047b9f0907\"}}]]\n"])</script>
|
||||
<script>self.__next_f.push([1,"df:[\"$undefined\",[\"$\",\"div\",null,{\"children\":[\"$\",\"$L17\",null,{}],\"do_not_include_this_field\":\"fail\"}],[\"$\",\"div\",null,{\"children\":[[\"$\",\"$L19\",null,{\"duplicated_field_name\":{\"x\":1}}],[\"$\",\"$L20\",null,{\"duplicated_field_name\":{\"y\":2}}]]}],\"$undefined\"]\n"])</script>
|
||||
<script>self.__next_f.push([3,"MzM6WyIkIiwiJEwzMiIsbnVsbCx7ImRlY29kZWQiOiJzdWNjZXNzIn1d"])</script>
|
||||
'''
|
||||
EXPECTED = {
|
||||
'18': {
|
||||
'foo': 'bar',
|
||||
},
|
||||
'16': {
|
||||
'meta': {
|
||||
'dateCreated': 1730489700,
|
||||
'uuid': '40cac41d-8d29-4ef5-aa11-75047b9f0907',
|
||||
},
|
||||
},
|
||||
'19': {
|
||||
'duplicated_field_name': {'x': 1},
|
||||
},
|
||||
'20': {
|
||||
'duplicated_field_name': {'y': 2},
|
||||
},
|
||||
}
|
||||
self.assertEqual(self.ie._search_nextjs_v13_data(HTML, None), EXPECTED)
|
||||
self.assertEqual(self.ie._search_nextjs_v13_data('', None, fatal=False), {})
|
||||
self.assertEqual(self.ie._search_nextjs_v13_data(None, None, fatal=False), {})
|
||||
|
||||
def test_search_nuxt_json(self):
|
||||
HTML_TMPL = '<script data-ssr="true" id="__NUXT_DATA__" type="application/json">[{}]</script>'
|
||||
VALID_DATA = '''
|
||||
["ShallowReactive",1],
|
||||
{"data":2,"state":21,"once":25,"_errors":28,"_server_errors":30},
|
||||
["ShallowReactive",3],
|
||||
{"$abcdef123456":4},
|
||||
{"podcast":5,"activeEpisodeData":7},
|
||||
{"podcast":6,"seasons":14},
|
||||
{"title":10,"id":11},
|
||||
["Reactive",8],
|
||||
{"episode":9,"creators":18,"empty_list":20},
|
||||
{"title":12,"id":13,"refs":34,"empty_refs":35},
|
||||
"Series Title",
|
||||
"podcast-id-01",
|
||||
"Episode Title",
|
||||
"episode-id-99",
|
||||
[15,16,17],
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
[19],
|
||||
"Podcast Creator",
|
||||
[],
|
||||
{"$ssite-config":22},
|
||||
{"env":23,"name":24,"map":26,"numbers":14},
|
||||
"production",
|
||||
"podcast-website",
|
||||
["Set"],
|
||||
["Reactive",27],
|
||||
["Map"],
|
||||
["ShallowReactive",29],
|
||||
{},
|
||||
["NuxtError",31],
|
||||
{"status":32,"message":33},
|
||||
503,
|
||||
"Service Unavailable",
|
||||
[36,37],
|
||||
[38,39],
|
||||
["Ref",40],
|
||||
["ShallowRef",41],
|
||||
["EmptyRef",42],
|
||||
["EmptyShallowRef",43],
|
||||
"ref",
|
||||
"shallow_ref",
|
||||
"{\\"ref\\":1}",
|
||||
"{\\"shallow_ref\\":2}"
|
||||
'''
|
||||
PAYLOAD = {
|
||||
'data': {
|
||||
'$abcdef123456': {
|
||||
'podcast': {
|
||||
'podcast': {
|
||||
'title': 'Series Title',
|
||||
'id': 'podcast-id-01',
|
||||
},
|
||||
'seasons': [1, 2, 3],
|
||||
},
|
||||
'activeEpisodeData': {
|
||||
'episode': {
|
||||
'title': 'Episode Title',
|
||||
'id': 'episode-id-99',
|
||||
'refs': ['ref', 'shallow_ref'],
|
||||
'empty_refs': [{'ref': 1}, {'shallow_ref': 2}],
|
||||
},
|
||||
'creators': ['Podcast Creator'],
|
||||
'empty_list': [],
|
||||
},
|
||||
},
|
||||
},
|
||||
'state': {
|
||||
'$ssite-config': {
|
||||
'env': 'production',
|
||||
'name': 'podcast-website',
|
||||
'map': [],
|
||||
'numbers': [1, 2, 3],
|
||||
},
|
||||
},
|
||||
'once': [],
|
||||
'_errors': {},
|
||||
'_server_errors': {
|
||||
'status': 503,
|
||||
'message': 'Service Unavailable',
|
||||
},
|
||||
}
|
||||
PARTIALLY_INVALID = [(
|
||||
'''
|
||||
{"data":1},
|
||||
{"invalid_raw_list":2},
|
||||
[15,16,17]
|
||||
''',
|
||||
{'data': {'invalid_raw_list': [None, None, None]}},
|
||||
), (
|
||||
'''
|
||||
{"data":1},
|
||||
["EmptyRef",2],
|
||||
"not valid JSON"
|
||||
''',
|
||||
{'data': None},
|
||||
), (
|
||||
'''
|
||||
{"data":1},
|
||||
["EmptyShallowRef",2],
|
||||
"not valid JSON"
|
||||
''',
|
||||
{'data': None},
|
||||
)]
|
||||
INVALID = [
|
||||
'''
|
||||
[]
|
||||
''',
|
||||
'''
|
||||
["unsupported",1],
|
||||
{"data":2},
|
||||
{}
|
||||
''',
|
||||
]
|
||||
DEFAULT = object()
|
||||
|
||||
self.assertEqual(self.ie._search_nuxt_json(HTML_TMPL.format(VALID_DATA), None), PAYLOAD)
|
||||
self.assertEqual(self.ie._search_nuxt_json('', None, fatal=False), {})
|
||||
self.assertIs(self.ie._search_nuxt_json('', None, default=DEFAULT), DEFAULT)
|
||||
|
||||
for data, expected in PARTIALLY_INVALID:
|
||||
self.assertEqual(
|
||||
self.ie._search_nuxt_json(HTML_TMPL.format(data), None, fatal=False), expected)
|
||||
|
||||
for data in INVALID:
|
||||
self.assertIs(
|
||||
self.ie._search_nuxt_json(HTML_TMPL.format(data), None, default=DEFAULT), DEFAULT)
|
||||
|
||||
|
||||
class TestInfoExtractorNetwork(unittest.TestCase):
|
||||
def setUp(self, /):
|
||||
self.httpd = http.server.HTTPServer(
|
||||
('127.0.0.1', 0), InfoExtractorTestRequestHandler)
|
||||
self.port = http_server_port(self.httpd)
|
||||
|
||||
self.server_thread = threading.Thread(target=self.httpd.serve_forever)
|
||||
self.server_thread.daemon = True
|
||||
self.server_thread.start()
|
||||
|
||||
self.called = False
|
||||
|
||||
def require_warning(*args, **kwargs):
|
||||
self.called = True
|
||||
|
||||
self.ydl = FakeYDL()
|
||||
self.ydl.report_warning = require_warning
|
||||
self.ie = DummyIE(self.ydl)
|
||||
|
||||
def tearDown(self, /):
|
||||
self.ydl.close()
|
||||
self.httpd.shutdown()
|
||||
self.httpd.server_close()
|
||||
self.server_thread.join(1)
|
||||
|
||||
def test_extract_m3u8_formats(self):
|
||||
formats, subtitles = self.ie._extract_m3u8_formats_and_subtitles(
|
||||
f'http://127.0.0.1:{self.port}/bipbop.m3u8', None, fatal=False)
|
||||
self.assertFalse(self.called)
|
||||
self.assertTrue(formats)
|
||||
self.assertTrue(subtitles)
|
||||
|
||||
def test_extract_m3u8_formats_warning(self):
|
||||
formats, subtitles = self.ie._extract_m3u8_formats_and_subtitles(
|
||||
f'http://127.0.0.1:{self.port}/fake.m3u8', None, fatal=False)
|
||||
self.assertTrue(self.called, 'Warning was not issued for binary m3u8 file')
|
||||
self.assertFalse(formats)
|
||||
self.assertFalse(subtitles)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
|||
|
|
@ -4,6 +4,9 @@
|
|||
import os
|
||||
import sys
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
|
||||
from yt_dlp.globals import all_plugins_loaded
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
|
@ -14,8 +17,6 @@ import json
|
|||
|
||||
from test.helper import FakeYDL, assertRegexpMatches, try_rm
|
||||
from yt_dlp import YoutubeDL
|
||||
from yt_dlp.compat import compat_os_name
|
||||
from yt_dlp.extractor import YoutubeIE
|
||||
from yt_dlp.extractor.common import InfoExtractor
|
||||
from yt_dlp.postprocessor.common import PostProcessor
|
||||
from yt_dlp.utils import (
|
||||
|
|
@ -235,6 +236,35 @@ class TestFormatSelection(unittest.TestCase):
|
|||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], 'vid-vcodec-dot')
|
||||
|
||||
def test_format_selection_by_vcodec_sort(self):
|
||||
formats = [
|
||||
{'format_id': 'av1-format', 'ext': 'mp4', 'vcodec': 'av1', 'acodec': 'none', 'url': TEST_URL},
|
||||
{'format_id': 'vp9-hdr-format', 'ext': 'mp4', 'vcodec': 'vp09.02.50.10.01.09.18.09.00', 'acodec': 'none', 'url': TEST_URL},
|
||||
{'format_id': 'vp9-sdr-format', 'ext': 'mp4', 'vcodec': 'vp09.00.50.08', 'acodec': 'none', 'url': TEST_URL},
|
||||
{'format_id': 'h265-format', 'ext': 'mp4', 'vcodec': 'h265', 'acodec': 'none', 'url': TEST_URL},
|
||||
]
|
||||
info_dict = _make_result(formats)
|
||||
|
||||
ydl = YDL({'format': 'bestvideo', 'format_sort': ['vcodec:vp9.2']})
|
||||
ydl.process_ie_result(info_dict.copy())
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], 'vp9-hdr-format')
|
||||
|
||||
ydl = YDL({'format': 'bestvideo', 'format_sort': ['vcodec:vp9']})
|
||||
ydl.process_ie_result(info_dict.copy())
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], 'vp9-sdr-format')
|
||||
|
||||
ydl = YDL({'format': 'bestvideo', 'format_sort': ['+vcodec:vp9.2']})
|
||||
ydl.process_ie_result(info_dict.copy())
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], 'vp9-hdr-format')
|
||||
|
||||
ydl = YDL({'format': 'bestvideo', 'format_sort': ['+vcodec:vp9']})
|
||||
ydl.process_ie_result(info_dict.copy())
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], 'vp9-sdr-format')
|
||||
|
||||
def test_format_selection_string_ops(self):
|
||||
formats = [
|
||||
{'format_id': 'abc-cba', 'ext': 'mp4', 'url': TEST_URL},
|
||||
|
|
@ -305,99 +335,6 @@ class TestFormatSelection(unittest.TestCase):
|
|||
ydl = YDL({'format': '[format_id!*=-]'})
|
||||
self.assertRaises(ExtractorError, ydl.process_ie_result, info_dict.copy())
|
||||
|
||||
def test_youtube_format_selection(self):
|
||||
# FIXME: Rewrite in accordance with the new format sorting options
|
||||
return
|
||||
|
||||
order = [
|
||||
'38', '37', '46', '22', '45', '35', '44', '18', '34', '43', '6', '5', '17', '36', '13',
|
||||
# Apple HTTP Live Streaming
|
||||
'96', '95', '94', '93', '92', '132', '151',
|
||||
# 3D
|
||||
'85', '84', '102', '83', '101', '82', '100',
|
||||
# Dash video
|
||||
'137', '248', '136', '247', '135', '246',
|
||||
'245', '244', '134', '243', '133', '242', '160',
|
||||
# Dash audio
|
||||
'141', '172', '140', '171', '139',
|
||||
]
|
||||
|
||||
def format_info(f_id):
|
||||
info = YoutubeIE._formats[f_id].copy()
|
||||
|
||||
# XXX: In real cases InfoExtractor._parse_mpd_formats() fills up 'acodec'
|
||||
# and 'vcodec', while in tests such information is incomplete since
|
||||
# commit a6c2c24479e5f4827ceb06f64d855329c0a6f593
|
||||
# test_YoutubeDL.test_youtube_format_selection is broken without
|
||||
# this fix
|
||||
if 'acodec' in info and 'vcodec' not in info:
|
||||
info['vcodec'] = 'none'
|
||||
elif 'vcodec' in info and 'acodec' not in info:
|
||||
info['acodec'] = 'none'
|
||||
|
||||
info['format_id'] = f_id
|
||||
info['url'] = 'url:' + f_id
|
||||
return info
|
||||
formats_order = [format_info(f_id) for f_id in order]
|
||||
|
||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||
ydl = YDL({'format': 'bestvideo+bestaudio'})
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], '248+172')
|
||||
self.assertEqual(downloaded['ext'], 'mp4')
|
||||
|
||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||
ydl = YDL({'format': 'bestvideo[height>=999999]+bestaudio/best'})
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], '38')
|
||||
|
||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||
ydl = YDL({'format': 'bestvideo/best,bestaudio'})
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||
self.assertEqual(downloaded_ids, ['137', '141'])
|
||||
|
||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||
ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])+bestaudio'})
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||
self.assertEqual(downloaded_ids, ['137+141', '248+141'])
|
||||
|
||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||
ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])[height<=720]+bestaudio'})
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||
self.assertEqual(downloaded_ids, ['136+141', '247+141'])
|
||||
|
||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||
ydl = YDL({'format': '(bestvideo[ext=none]/bestvideo[ext=webm])+bestaudio'})
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||
self.assertEqual(downloaded_ids, ['248+141'])
|
||||
|
||||
for f1, f2 in zip(formats_order, formats_order[1:]):
|
||||
info_dict = _make_result([f1, f2], extractor='youtube')
|
||||
ydl = YDL({'format': 'best/bestvideo'})
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], f1['format_id'])
|
||||
|
||||
info_dict = _make_result([f2, f1], extractor='youtube')
|
||||
ydl = YDL({'format': 'best/bestvideo'})
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], f1['format_id'])
|
||||
|
||||
def test_audio_only_extractor_format_selection(self):
|
||||
# For extractors with incomplete formats (all formats are audio-only or
|
||||
# video-only) best and worst should fallback to corresponding best/worst
|
||||
|
|
@ -457,11 +394,11 @@ class TestFormatSelection(unittest.TestCase):
|
|||
|
||||
def test_format_filtering(self):
|
||||
formats = [
|
||||
{'format_id': 'A', 'filesize': 500, 'width': 1000},
|
||||
{'format_id': 'B', 'filesize': 1000, 'width': 500},
|
||||
{'format_id': 'C', 'filesize': 1000, 'width': 400},
|
||||
{'format_id': 'D', 'filesize': 2000, 'width': 600},
|
||||
{'format_id': 'E', 'filesize': 3000},
|
||||
{'format_id': 'A', 'filesize': 500, 'width': 1000, 'aspect_ratio': 1.0},
|
||||
{'format_id': 'B', 'filesize': 1000, 'width': 500, 'aspect_ratio': 1.33},
|
||||
{'format_id': 'C', 'filesize': 1000, 'width': 400, 'aspect_ratio': 1.5},
|
||||
{'format_id': 'D', 'filesize': 2000, 'width': 600, 'aspect_ratio': 1.78},
|
||||
{'format_id': 'E', 'filesize': 3000, 'aspect_ratio': 0.56},
|
||||
{'format_id': 'F'},
|
||||
{'format_id': 'G', 'filesize': 1000000},
|
||||
]
|
||||
|
|
@ -520,7 +457,58 @@ class TestFormatSelection(unittest.TestCase):
|
|||
ydl.process_ie_result(info_dict)
|
||||
self.assertEqual(ydl.downloaded_info_dicts, [])
|
||||
|
||||
def test_default_format_spec(self):
|
||||
ydl = YDL({'format': 'best[aspect_ratio=1]'})
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], 'A')
|
||||
|
||||
ydl = YDL({'format': 'all[aspect_ratio > 1.00]'})
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||
self.assertEqual(downloaded_ids, ['D', 'C', 'B'])
|
||||
|
||||
ydl = YDL({'format': 'all[aspect_ratio < 1.00]'})
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||
self.assertEqual(downloaded_ids, ['E'])
|
||||
|
||||
ydl = YDL({'format': 'best[aspect_ratio=1.5]'})
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], 'C')
|
||||
|
||||
ydl = YDL({'format': 'all[aspect_ratio!=1]'})
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||
self.assertEqual(downloaded_ids, ['E', 'D', 'C', 'B'])
|
||||
|
||||
@patch('yt_dlp.postprocessor.ffmpeg.FFmpegMergerPP.available', False)
|
||||
def test_default_format_spec_without_ffmpeg(self):
|
||||
ydl = YDL({})
|
||||
self.assertEqual(ydl._default_format_spec({}), 'best/bestvideo+bestaudio')
|
||||
|
||||
ydl = YDL({'simulate': True})
|
||||
self.assertEqual(ydl._default_format_spec({}), 'best/bestvideo+bestaudio')
|
||||
|
||||
ydl = YDL({})
|
||||
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
|
||||
|
||||
ydl = YDL({'simulate': True})
|
||||
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
|
||||
|
||||
ydl = YDL({'outtmpl': '-'})
|
||||
self.assertEqual(ydl._default_format_spec({}), 'best/bestvideo+bestaudio')
|
||||
|
||||
ydl = YDL({})
|
||||
self.assertEqual(ydl._default_format_spec({}), 'best/bestvideo+bestaudio')
|
||||
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
|
||||
|
||||
@patch('yt_dlp.postprocessor.ffmpeg.FFmpegMergerPP.available', True)
|
||||
@patch('yt_dlp.postprocessor.ffmpeg.FFmpegMergerPP.can_merge', lambda _: True)
|
||||
def test_default_format_spec_with_ffmpeg(self):
|
||||
ydl = YDL({})
|
||||
self.assertEqual(ydl._default_format_spec({}), 'bestvideo*+bestaudio/best')
|
||||
|
||||
ydl = YDL({'simulate': True})
|
||||
self.assertEqual(ydl._default_format_spec({}), 'bestvideo*+bestaudio/best')
|
||||
|
||||
|
|
@ -528,13 +516,13 @@ class TestFormatSelection(unittest.TestCase):
|
|||
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
|
||||
|
||||
ydl = YDL({'simulate': True})
|
||||
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'bestvideo*+bestaudio/best')
|
||||
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
|
||||
|
||||
ydl = YDL({'outtmpl': '-'})
|
||||
self.assertEqual(ydl._default_format_spec({}), 'best/bestvideo+bestaudio')
|
||||
|
||||
ydl = YDL({})
|
||||
self.assertEqual(ydl._default_format_spec({}, download=False), 'bestvideo*+bestaudio/best')
|
||||
self.assertEqual(ydl._default_format_spec({}), 'bestvideo*+bestaudio/best')
|
||||
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
|
||||
|
||||
|
||||
|
|
@ -667,7 +655,7 @@ class TestYoutubeDL(unittest.TestCase):
|
|||
|
||||
if not isinstance(expected, (list, tuple)):
|
||||
expected = (expected, expected)
|
||||
for (name, got), expect in zip((('outtmpl', out), ('filename', fname)), expected):
|
||||
for (name, got), expect in zip((('outtmpl', out), ('filename', fname)), expected, strict=True):
|
||||
if callable(expect):
|
||||
self.assertTrue(expect(got), f'Wrong {name} from {tmpl}')
|
||||
elif expect is not None:
|
||||
|
|
@ -706,6 +694,13 @@ class TestYoutubeDL(unittest.TestCase):
|
|||
test('%(width)06d.%%(ext)s', 'NA.%(ext)s')
|
||||
test('%%(width)06d.%(ext)s', '%(width)06d.mp4')
|
||||
|
||||
# Sanitization options
|
||||
test('%(title3)s', (None, 'foo⧸bar⧹test'))
|
||||
test('%(title5)s', (None, 'aei_A'), restrictfilenames=True)
|
||||
test('%(title3)s', (None, 'foo_bar_test'), windowsfilenames=False, restrictfilenames=True)
|
||||
if sys.platform != 'win32':
|
||||
test('%(title3)s', (None, 'foo⧸bar\\test'), windowsfilenames=False)
|
||||
|
||||
# ID sanitization
|
||||
test('%(id)s', '_abcd', info={'id': '_abcd'})
|
||||
test('%(some_id)s', '_abcd', info={'some_id': '_abcd'})
|
||||
|
|
@ -783,8 +778,8 @@ class TestYoutubeDL(unittest.TestCase):
|
|||
test('%(filesize)#D', '1Ki')
|
||||
test('%(height)5.2D', ' 1.08k')
|
||||
test('%(title4)#S', 'foo_bar_test')
|
||||
test('%(title4).10S', ('foo "bar" ', 'foo "bar"' + ('#' if compat_os_name == 'nt' else ' ')))
|
||||
if compat_os_name == 'nt':
|
||||
test('%(title4).10S', ('foo "bar" ', 'foo "bar"' + ('#' if os.name == 'nt' else ' ')))
|
||||
if os.name == 'nt':
|
||||
test('%(title4)q', ('"foo ""bar"" test"', None))
|
||||
test('%(formats.:.id)#q', ('"id 1" "id 2" "id 3"', None))
|
||||
test('%(formats.0.id)#q', ('"id 1"', None))
|
||||
|
|
@ -847,9 +842,9 @@ class TestYoutubeDL(unittest.TestCase):
|
|||
|
||||
# Environment variable expansion for prepare_filename
|
||||
os.environ['__yt_dlp_var'] = 'expanded'
|
||||
envvar = '%__yt_dlp_var%' if compat_os_name == 'nt' else '$__yt_dlp_var'
|
||||
envvar = '%__yt_dlp_var%' if os.name == 'nt' else '$__yt_dlp_var'
|
||||
test(envvar, (envvar, 'expanded'))
|
||||
if compat_os_name == 'nt':
|
||||
if os.name == 'nt':
|
||||
test('%s%', ('%s%', '%s%'))
|
||||
os.environ['s'] = 'expanded'
|
||||
test('%s%', ('%s%', 'expanded')) # %s% should be expanded before escaping %s
|
||||
|
|
@ -1058,7 +1053,7 @@ class TestYoutubeDL(unittest.TestCase):
|
|||
entries = func(evaluated)
|
||||
results = [(v['playlist_autonumber'] - 1, (int(v['id']), v['playlist_index']))
|
||||
for v in get_downloaded_info_dicts(params, entries)]
|
||||
self.assertEqual(results, list(enumerate(zip(expected_ids, expected_ids))), f'Entries of {name} for {params}')
|
||||
self.assertEqual(results, list(enumerate(zip(expected_ids, expected_ids, strict=True))), f'Entries of {name} for {params}')
|
||||
self.assertEqual(sorted(evaluated), expected_eval, f'Evaluation of {name} for {params}')
|
||||
|
||||
test_selection({}, INDICES)
|
||||
|
|
@ -1340,6 +1335,33 @@ class TestYoutubeDL(unittest.TestCase):
|
|||
self.assertFalse(result.get('cookies'), msg='Cookies set in cookies field for wrong domain')
|
||||
self.assertFalse(ydl.cookiejar.get_cookie_header(fmt['url']), msg='Cookies set in cookiejar for wrong domain')
|
||||
|
||||
def test_load_plugins_compat(self):
|
||||
# Should try to reload plugins if they haven't already been loaded
|
||||
all_plugins_loaded.value = False
|
||||
FakeYDL().close()
|
||||
assert all_plugins_loaded.value
|
||||
|
||||
def test_close_hooks(self):
|
||||
# Should call all registered close hooks on close
|
||||
close_hook_called = False
|
||||
close_hook_two_called = False
|
||||
|
||||
def close_hook():
|
||||
nonlocal close_hook_called
|
||||
close_hook_called = True
|
||||
|
||||
def close_hook_two():
|
||||
nonlocal close_hook_two_called
|
||||
close_hook_two_called = True
|
||||
|
||||
ydl = FakeYDL()
|
||||
ydl.add_close_hook(close_hook)
|
||||
ydl.add_close_hook(close_hook_two)
|
||||
|
||||
ydl.close()
|
||||
self.assertTrue(close_hook_called, 'Close hook was not called')
|
||||
self.assertTrue(close_hook_two_called, 'Close hook two was not called')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
|||
|
|
@ -27,7 +27,6 @@ from yt_dlp.aes import (
|
|||
pad_block,
|
||||
)
|
||||
from yt_dlp.dependencies import Cryptodome
|
||||
from yt_dlp.utils import bytes_to_intlist, intlist_to_bytes
|
||||
|
||||
# the encrypted data can be generate with 'devscripts/generate_aes_testdata.py'
|
||||
|
||||
|
|
@ -40,33 +39,33 @@ class TestAES(unittest.TestCase):
|
|||
def test_encrypt(self):
|
||||
msg = b'message'
|
||||
key = list(range(16))
|
||||
encrypted = aes_encrypt(bytes_to_intlist(msg), key)
|
||||
decrypted = intlist_to_bytes(aes_decrypt(encrypted, key))
|
||||
encrypted = aes_encrypt(list(msg), key)
|
||||
decrypted = bytes(aes_decrypt(encrypted, key))
|
||||
self.assertEqual(decrypted, msg)
|
||||
|
||||
def test_cbc_decrypt(self):
|
||||
data = b'\x97\x92+\xe5\x0b\xc3\x18\x91ky9m&\xb3\xb5@\xe6\x27\xc2\x96.\xc8u\x88\xab9-[\x9e|\xf1\xcd'
|
||||
decrypted = intlist_to_bytes(aes_cbc_decrypt(bytes_to_intlist(data), self.key, self.iv))
|
||||
decrypted = bytes(aes_cbc_decrypt(list(data), self.key, self.iv))
|
||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||
if Cryptodome.AES:
|
||||
decrypted = aes_cbc_decrypt_bytes(data, intlist_to_bytes(self.key), intlist_to_bytes(self.iv))
|
||||
decrypted = aes_cbc_decrypt_bytes(data, bytes(self.key), bytes(self.iv))
|
||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||
|
||||
def test_cbc_encrypt(self):
|
||||
data = bytes_to_intlist(self.secret_msg)
|
||||
encrypted = intlist_to_bytes(aes_cbc_encrypt(data, self.key, self.iv))
|
||||
data = list(self.secret_msg)
|
||||
encrypted = bytes(aes_cbc_encrypt(data, self.key, self.iv))
|
||||
self.assertEqual(
|
||||
encrypted,
|
||||
b'\x97\x92+\xe5\x0b\xc3\x18\x91ky9m&\xb3\xb5@\xe6\'\xc2\x96.\xc8u\x88\xab9-[\x9e|\xf1\xcd')
|
||||
|
||||
def test_ctr_decrypt(self):
|
||||
data = bytes_to_intlist(b'\x03\xc7\xdd\xd4\x8e\xb3\xbc\x1a*O\xdc1\x12+8Aio\xd1z\xb5#\xaf\x08')
|
||||
decrypted = intlist_to_bytes(aes_ctr_decrypt(data, self.key, self.iv))
|
||||
data = list(b'\x03\xc7\xdd\xd4\x8e\xb3\xbc\x1a*O\xdc1\x12+8Aio\xd1z\xb5#\xaf\x08')
|
||||
decrypted = bytes(aes_ctr_decrypt(data, self.key, self.iv))
|
||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||
|
||||
def test_ctr_encrypt(self):
|
||||
data = bytes_to_intlist(self.secret_msg)
|
||||
encrypted = intlist_to_bytes(aes_ctr_encrypt(data, self.key, self.iv))
|
||||
data = list(self.secret_msg)
|
||||
encrypted = bytes(aes_ctr_encrypt(data, self.key, self.iv))
|
||||
self.assertEqual(
|
||||
encrypted,
|
||||
b'\x03\xc7\xdd\xd4\x8e\xb3\xbc\x1a*O\xdc1\x12+8Aio\xd1z\xb5#\xaf\x08')
|
||||
|
|
@ -75,47 +74,59 @@ class TestAES(unittest.TestCase):
|
|||
data = b'\x159Y\xcf5eud\x90\x9c\x85&]\x14\x1d\x0f.\x08\xb4T\xe4/\x17\xbd'
|
||||
authentication_tag = b'\xe8&I\x80rI\x07\x9d}YWuU@:e'
|
||||
|
||||
decrypted = intlist_to_bytes(aes_gcm_decrypt_and_verify(
|
||||
bytes_to_intlist(data), self.key, bytes_to_intlist(authentication_tag), self.iv[:12]))
|
||||
decrypted = bytes(aes_gcm_decrypt_and_verify(
|
||||
list(data), self.key, list(authentication_tag), self.iv[:12]))
|
||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||
if Cryptodome.AES:
|
||||
decrypted = aes_gcm_decrypt_and_verify_bytes(
|
||||
data, intlist_to_bytes(self.key), authentication_tag, intlist_to_bytes(self.iv[:12]))
|
||||
data, bytes(self.key), authentication_tag, bytes(self.iv[:12]))
|
||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||
|
||||
def test_gcm_aligned_decrypt(self):
|
||||
data = b'\x159Y\xcf5eud\x90\x9c\x85&]\x14\x1d\x0f'
|
||||
authentication_tag = b'\x08\xb1\x9d!&\x98\xd0\xeaRq\x90\xe6;\xb5]\xd8'
|
||||
|
||||
decrypted = bytes(aes_gcm_decrypt_and_verify(
|
||||
list(data), self.key, list(authentication_tag), self.iv[:12]))
|
||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg[:16])
|
||||
if Cryptodome.AES:
|
||||
decrypted = aes_gcm_decrypt_and_verify_bytes(
|
||||
data, bytes(self.key), authentication_tag, bytes(self.iv[:12]))
|
||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg[:16])
|
||||
|
||||
def test_decrypt_text(self):
|
||||
password = intlist_to_bytes(self.key).decode()
|
||||
password = bytes(self.key).decode()
|
||||
encrypted = base64.b64encode(
|
||||
intlist_to_bytes(self.iv[:8])
|
||||
bytes(self.iv[:8])
|
||||
+ b'\x17\x15\x93\xab\x8d\x80V\xcdV\xe0\t\xcdo\xc2\xa5\xd8ksM\r\xe27N\xae',
|
||||
).decode()
|
||||
decrypted = (aes_decrypt_text(encrypted, password, 16))
|
||||
self.assertEqual(decrypted, self.secret_msg)
|
||||
|
||||
password = intlist_to_bytes(self.key).decode()
|
||||
password = bytes(self.key).decode()
|
||||
encrypted = base64.b64encode(
|
||||
intlist_to_bytes(self.iv[:8])
|
||||
bytes(self.iv[:8])
|
||||
+ b'\x0b\xe6\xa4\xd9z\x0e\xb8\xb9\xd0\xd4i_\x85\x1d\x99\x98_\xe5\x80\xe7.\xbf\xa5\x83',
|
||||
).decode()
|
||||
decrypted = (aes_decrypt_text(encrypted, password, 32))
|
||||
self.assertEqual(decrypted, self.secret_msg)
|
||||
|
||||
def test_ecb_encrypt(self):
|
||||
data = bytes_to_intlist(self.secret_msg)
|
||||
encrypted = intlist_to_bytes(aes_ecb_encrypt(data, self.key))
|
||||
data = list(self.secret_msg)
|
||||
encrypted = bytes(aes_ecb_encrypt(data, self.key))
|
||||
self.assertEqual(
|
||||
encrypted,
|
||||
b'\xaa\x86]\x81\x97>\x02\x92\x9d\x1bR[[L/u\xd3&\xd1(h\xde{\x81\x94\xba\x02\xae\xbd\xa6\xd0:')
|
||||
|
||||
def test_ecb_decrypt(self):
|
||||
data = bytes_to_intlist(b'\xaa\x86]\x81\x97>\x02\x92\x9d\x1bR[[L/u\xd3&\xd1(h\xde{\x81\x94\xba\x02\xae\xbd\xa6\xd0:')
|
||||
decrypted = intlist_to_bytes(aes_ecb_decrypt(data, self.key, self.iv))
|
||||
data = list(b'\xaa\x86]\x81\x97>\x02\x92\x9d\x1bR[[L/u\xd3&\xd1(h\xde{\x81\x94\xba\x02\xae\xbd\xa6\xd0:')
|
||||
decrypted = bytes(aes_ecb_decrypt(data, self.key, self.iv))
|
||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||
|
||||
def test_key_expansion(self):
|
||||
key = '4f6bdaa39e2f8cb07f5e722d9edef314'
|
||||
|
||||
self.assertEqual(key_expansion(bytes_to_intlist(bytearray.fromhex(key))), [
|
||||
self.assertEqual(key_expansion(list(bytearray.fromhex(key))), [
|
||||
0x4F, 0x6B, 0xDA, 0xA3, 0x9E, 0x2F, 0x8C, 0xB0, 0x7F, 0x5E, 0x72, 0x2D, 0x9E, 0xDE, 0xF3, 0x14,
|
||||
0x53, 0x66, 0x20, 0xA8, 0xCD, 0x49, 0xAC, 0x18, 0xB2, 0x17, 0xDE, 0x35, 0x2C, 0xC9, 0x2D, 0x21,
|
||||
0x8C, 0xBE, 0xDD, 0xD9, 0x41, 0xF7, 0x71, 0xC1, 0xF3, 0xE0, 0xAF, 0xF4, 0xDF, 0x29, 0x82, 0xD5,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import datetime as dt
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
|
|
@ -12,12 +13,7 @@ import struct
|
|||
|
||||
from yt_dlp import compat
|
||||
from yt_dlp.compat import urllib # isort: split
|
||||
from yt_dlp.compat import (
|
||||
compat_etree_fromstring,
|
||||
compat_expanduser,
|
||||
compat_urllib_parse_unquote, # noqa: TID251
|
||||
compat_urllib_parse_urlencode, # noqa: TID251
|
||||
)
|
||||
from yt_dlp.compat import compat_etree_fromstring, compat_expanduser, compat_datetime_from_timestamp
|
||||
from yt_dlp.compat.urllib.request import getproxies
|
||||
|
||||
|
||||
|
|
@ -26,9 +22,6 @@ class TestCompat(unittest.TestCase):
|
|||
with self.assertWarns(DeprecationWarning):
|
||||
_ = compat.compat_basestring
|
||||
|
||||
with self.assertWarns(DeprecationWarning):
|
||||
_ = compat.WINDOWS_VT_MODE
|
||||
|
||||
self.assertEqual(urllib.request.getproxies, getproxies)
|
||||
|
||||
with self.assertWarns(DeprecationWarning):
|
||||
|
|
@ -43,39 +36,6 @@ class TestCompat(unittest.TestCase):
|
|||
finally:
|
||||
os.environ['HOME'] = old_home or ''
|
||||
|
||||
def test_compat_urllib_parse_unquote(self):
|
||||
self.assertEqual(compat_urllib_parse_unquote('abc%20def'), 'abc def')
|
||||
self.assertEqual(compat_urllib_parse_unquote('%7e/abc+def'), '~/abc+def')
|
||||
self.assertEqual(compat_urllib_parse_unquote(''), '')
|
||||
self.assertEqual(compat_urllib_parse_unquote('%'), '%')
|
||||
self.assertEqual(compat_urllib_parse_unquote('%%'), '%%')
|
||||
self.assertEqual(compat_urllib_parse_unquote('%%%'), '%%%')
|
||||
self.assertEqual(compat_urllib_parse_unquote('%2F'), '/')
|
||||
self.assertEqual(compat_urllib_parse_unquote('%2f'), '/')
|
||||
self.assertEqual(compat_urllib_parse_unquote('%E6%B4%A5%E6%B3%A2'), '津波')
|
||||
self.assertEqual(
|
||||
compat_urllib_parse_unquote('''<meta property="og:description" content="%E2%96%81%E2%96%82%E2%96%83%E2%96%84%25%E2%96%85%E2%96%86%E2%96%87%E2%96%88" />
|
||||
%<a href="https://ar.wikipedia.org/wiki/%D8%AA%D8%B3%D9%88%D9%86%D8%A7%D9%85%D9%8A">%a'''),
|
||||
'''<meta property="og:description" content="▁▂▃▄%▅▆▇█" />
|
||||
%<a href="https://ar.wikipedia.org/wiki/تسونامي">%a''')
|
||||
self.assertEqual(
|
||||
compat_urllib_parse_unquote('''%28%5E%E2%97%A3_%E2%97%A2%5E%29%E3%81%A3%EF%B8%BB%E3%83%87%E2%95%90%E4%B8%80 %E2%87%80 %E2%87%80 %E2%87%80 %E2%87%80 %E2%87%80 %E2%86%B6%I%Break%25Things%'''),
|
||||
'''(^◣_◢^)っ︻デ═一 ⇀ ⇀ ⇀ ⇀ ⇀ ↶%I%Break%Things%''')
|
||||
|
||||
def test_compat_urllib_parse_unquote_plus(self):
|
||||
self.assertEqual(urllib.parse.unquote_plus('abc%20def'), 'abc def')
|
||||
self.assertEqual(urllib.parse.unquote_plus('%7e/abc+def'), '~/abc def')
|
||||
|
||||
def test_compat_urllib_parse_urlencode(self):
|
||||
self.assertEqual(compat_urllib_parse_urlencode({'abc': 'def'}), 'abc=def')
|
||||
self.assertEqual(compat_urllib_parse_urlencode({'abc': b'def'}), 'abc=def')
|
||||
self.assertEqual(compat_urllib_parse_urlencode({b'abc': 'def'}), 'abc=def')
|
||||
self.assertEqual(compat_urllib_parse_urlencode({b'abc': b'def'}), 'abc=def')
|
||||
self.assertEqual(compat_urllib_parse_urlencode([('abc', 'def')]), 'abc=def')
|
||||
self.assertEqual(compat_urllib_parse_urlencode([('abc', b'def')]), 'abc=def')
|
||||
self.assertEqual(compat_urllib_parse_urlencode([(b'abc', 'def')]), 'abc=def')
|
||||
self.assertEqual(compat_urllib_parse_urlencode([(b'abc', b'def')]), 'abc=def')
|
||||
|
||||
def test_compat_etree_fromstring(self):
|
||||
xml = '''
|
||||
<root foo="bar" spam="中文">
|
||||
|
|
@ -100,6 +60,45 @@ class TestCompat(unittest.TestCase):
|
|||
def test_struct_unpack(self):
|
||||
self.assertEqual(struct.unpack('!B', b'\x00'), (0,))
|
||||
|
||||
def test_compat_datetime_from_timestamp(self):
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(0),
|
||||
dt.datetime(1970, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(1),
|
||||
dt.datetime(1970, 1, 1, 0, 0, 1, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(3600),
|
||||
dt.datetime(1970, 1, 1, 1, 0, 0, tzinfo=dt.timezone.utc))
|
||||
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(-1),
|
||||
dt.datetime(1969, 12, 31, 23, 59, 59, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(-86400),
|
||||
dt.datetime(1969, 12, 31, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(0.5),
|
||||
dt.datetime(1970, 1, 1, 0, 0, 0, 500000, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(1.000001),
|
||||
dt.datetime(1970, 1, 1, 0, 0, 1, 1, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(-1.25),
|
||||
dt.datetime(1969, 12, 31, 23, 59, 58, 750000, tzinfo=dt.timezone.utc))
|
||||
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(-1577923200),
|
||||
dt.datetime(1920, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(4102444800),
|
||||
dt.datetime(2100, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(173568960000),
|
||||
dt.datetime(7470, 3, 8, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
|||
|
|
@ -58,6 +58,14 @@ class TestCookies(unittest.TestCase):
|
|||
({'DESKTOP_SESSION': 'kde'}, _LinuxDesktopEnvironment.KDE3),
|
||||
({'DESKTOP_SESSION': 'xfce'}, _LinuxDesktopEnvironment.XFCE),
|
||||
|
||||
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'gnome'}, _LinuxDesktopEnvironment.GNOME),
|
||||
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'mate'}, _LinuxDesktopEnvironment.GNOME),
|
||||
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'kde4'}, _LinuxDesktopEnvironment.KDE4),
|
||||
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'kde'}, _LinuxDesktopEnvironment.KDE3),
|
||||
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'xfce'}, _LinuxDesktopEnvironment.XFCE),
|
||||
|
||||
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'my_custom_de', 'GNOME_DESKTOP_SESSION_ID': 1}, _LinuxDesktopEnvironment.GNOME),
|
||||
|
||||
({'GNOME_DESKTOP_SESSION_ID': 1}, _LinuxDesktopEnvironment.GNOME),
|
||||
({'KDE_FULL_SESSION': 1}, _LinuxDesktopEnvironment.KDE3),
|
||||
({'KDE_FULL_SESSION': 1, 'DESKTOP_SESSION': 'kde4'}, _LinuxDesktopEnvironment.KDE4),
|
||||
|
|
@ -105,6 +113,13 @@ class TestCookies(unittest.TestCase):
|
|||
decryptor = LinuxChromeCookieDecryptor('Chrome', Logger())
|
||||
self.assertEqual(decryptor.decrypt(encrypted_value), value)
|
||||
|
||||
def test_chrome_cookie_decryptor_linux_v10_meta24(self):
|
||||
with MonkeyPatch(cookies, {'_get_linux_keyring_password': lambda *args, **kwargs: b''}):
|
||||
encrypted_value = b'v10\x1f\xe4\x0e[\x83\x0c\xcc*kPi \xce\x8d\x1d\xbb\x80\r\x11\t\xbb\x9e^Hy\x94\xf4\x963\x9f\x82\xba\xfe\xa1\xed\xb9\xf1)\x00710\x92\xc8/<\x96B'
|
||||
value = 'DE'
|
||||
decryptor = LinuxChromeCookieDecryptor('Chrome', Logger(), meta_version=24)
|
||||
self.assertEqual(decryptor.decrypt(encrypted_value), value)
|
||||
|
||||
def test_chrome_cookie_decryptor_windows_v10(self):
|
||||
with MonkeyPatch(cookies, {
|
||||
'_get_windows_v10_key': lambda *args, **kwargs: b'Y\xef\xad\xad\xeerp\xf0Y\xe6\x9b\x12\xc2<z\x16]\n\xbb\xb8\xcb\xd7\x9bA\xc3\x14e\x99{\xd6\xf4&',
|
||||
|
|
@ -114,6 +129,15 @@ class TestCookies(unittest.TestCase):
|
|||
decryptor = WindowsChromeCookieDecryptor('', Logger())
|
||||
self.assertEqual(decryptor.decrypt(encrypted_value), value)
|
||||
|
||||
def test_chrome_cookie_decryptor_windows_v10_meta24(self):
|
||||
with MonkeyPatch(cookies, {
|
||||
'_get_windows_v10_key': lambda *args, **kwargs: b'\xea\x8b\x02\xc3\xc6\xc5\x99\xc3\xa3[ j\xfa\xf6\xfcU\xac\x13u\xdc\x0c\x0e\xf1\x03\x90\xb6\xdf\xbb\x8fL\xb1\xb2',
|
||||
}):
|
||||
encrypted_value = b'v10dN\xe1\xacy\x84^\xe1I\xact\x03r\xfb\xe2\xce{^\x0e<(\xb0y\xeb\x01\xfb@"\x9e\x8c\xa53~\xdb*\x8f\xac\x8b\xe3\xfd3\x06\xe5\x93\x19OyOG\xb2\xfb\x1d$\xc0\xda\x13j\x9e\xfe\xc5\xa3\xa8\xfe\xd9'
|
||||
value = '1234'
|
||||
decryptor = WindowsChromeCookieDecryptor('', Logger(), meta_version=24)
|
||||
self.assertEqual(decryptor.decrypt(encrypted_value), value)
|
||||
|
||||
def test_chrome_cookie_decryptor_mac_v10(self):
|
||||
with MonkeyPatch(cookies, {'_get_mac_keyring_password': lambda *args, **kwargs: b'6eIDUdtKAacvlHwBVwvg/Q=='}):
|
||||
encrypted_value = b'v10\xb3\xbe\xad\xa1[\x9fC\xa1\x98\xe0\x9a\x01\xd9\xcf\xbfc'
|
||||
|
|
|
|||
235
test/test_devalue.py
Normal file
235
test/test_devalue.py
Normal file
|
|
@ -0,0 +1,235 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import datetime as dt
|
||||
import json
|
||||
import math
|
||||
import re
|
||||
import unittest
|
||||
|
||||
from yt_dlp.utils.jslib import devalue
|
||||
|
||||
|
||||
TEST_CASES_EQUALS = [{
|
||||
'name': 'int',
|
||||
'unparsed': [-42],
|
||||
'parsed': -42,
|
||||
}, {
|
||||
'name': 'str',
|
||||
'unparsed': ['woo!!!'],
|
||||
'parsed': 'woo!!!',
|
||||
}, {
|
||||
'name': 'Number',
|
||||
'unparsed': [['Object', 42]],
|
||||
'parsed': 42,
|
||||
}, {
|
||||
'name': 'String',
|
||||
'unparsed': [['Object', 'yar']],
|
||||
'parsed': 'yar',
|
||||
}, {
|
||||
'name': 'Infinity',
|
||||
'unparsed': -4,
|
||||
'parsed': math.inf,
|
||||
}, {
|
||||
'name': 'negative Infinity',
|
||||
'unparsed': -5,
|
||||
'parsed': -math.inf,
|
||||
}, {
|
||||
'name': 'negative zero',
|
||||
'unparsed': -6,
|
||||
'parsed': -0.0,
|
||||
}, {
|
||||
'name': 'RegExp',
|
||||
'unparsed': [['RegExp', 'regexp', 'gim']], # XXX: flags are ignored
|
||||
'parsed': re.compile('regexp'),
|
||||
}, {
|
||||
'name': 'Date',
|
||||
'unparsed': [['Date', '2001-09-09T01:46:40.000Z']],
|
||||
'parsed': dt.datetime.fromtimestamp(1e9, tz=dt.timezone.utc),
|
||||
}, {
|
||||
'name': 'Array',
|
||||
'unparsed': [[1, 2, 3], 'a', 'b', 'c'],
|
||||
'parsed': ['a', 'b', 'c'],
|
||||
}, {
|
||||
'name': 'Array (empty)',
|
||||
'unparsed': [[]],
|
||||
'parsed': [],
|
||||
}, {
|
||||
'name': 'Array (sparse)',
|
||||
'unparsed': [[-2, 1, -2], 'b'],
|
||||
'parsed': [None, 'b', None],
|
||||
}, {
|
||||
'name': 'Object',
|
||||
'unparsed': [{'foo': 1, 'x-y': 2}, 'bar', 'z'],
|
||||
'parsed': {'foo': 'bar', 'x-y': 'z'},
|
||||
}, {
|
||||
'name': 'Set',
|
||||
'unparsed': [['Set', 1, 2, 3], 1, 2, 3],
|
||||
'parsed': [1, 2, 3],
|
||||
}, {
|
||||
'name': 'Map',
|
||||
'unparsed': [['Map', 1, 2], 'a', 'b'],
|
||||
'parsed': [['a', 'b']],
|
||||
}, {
|
||||
'name': 'BigInt',
|
||||
'unparsed': [['BigInt', '1']],
|
||||
'parsed': 1,
|
||||
}, {
|
||||
'name': 'Uint8Array',
|
||||
'unparsed': [['Uint8Array', 'AQID']],
|
||||
'parsed': [1, 2, 3],
|
||||
}, {
|
||||
'name': 'ArrayBuffer',
|
||||
'unparsed': [['ArrayBuffer', 'AQID']],
|
||||
'parsed': [1, 2, 3],
|
||||
}, {
|
||||
'name': 'str (repetition)',
|
||||
'unparsed': [[1, 1], 'a string'],
|
||||
'parsed': ['a string', 'a string'],
|
||||
}, {
|
||||
'name': 'None (repetition)',
|
||||
'unparsed': [[1, 1], None],
|
||||
'parsed': [None, None],
|
||||
}, {
|
||||
'name': 'dict (repetition)',
|
||||
'unparsed': [[1, 1], {}],
|
||||
'parsed': [{}, {}],
|
||||
}, {
|
||||
'name': 'Object without prototype',
|
||||
'unparsed': [['null']],
|
||||
'parsed': {},
|
||||
}, {
|
||||
'name': 'cross-realm POJO',
|
||||
'unparsed': [{}],
|
||||
'parsed': {},
|
||||
}]
|
||||
|
||||
TEST_CASES_IS = [{
|
||||
'name': 'bool',
|
||||
'unparsed': [True],
|
||||
'parsed': True,
|
||||
}, {
|
||||
'name': 'Boolean',
|
||||
'unparsed': [['Object', False]],
|
||||
'parsed': False,
|
||||
}, {
|
||||
'name': 'undefined',
|
||||
'unparsed': -1,
|
||||
'parsed': None,
|
||||
}, {
|
||||
'name': 'null',
|
||||
'unparsed': [None],
|
||||
'parsed': None,
|
||||
}, {
|
||||
'name': 'NaN',
|
||||
'unparsed': -3,
|
||||
'parsed': math.nan,
|
||||
}]
|
||||
|
||||
TEST_CASES_INVALID = [{
|
||||
'name': 'empty string',
|
||||
'unparsed': '',
|
||||
'error': ValueError,
|
||||
'pattern': r'expected int or list as input',
|
||||
}, {
|
||||
'name': 'hole',
|
||||
'unparsed': -2,
|
||||
'error': ValueError,
|
||||
'pattern': r'invalid integer input',
|
||||
}, {
|
||||
'name': 'string',
|
||||
'unparsed': 'hello',
|
||||
'error': ValueError,
|
||||
'pattern': r'expected int or list as input',
|
||||
}, {
|
||||
'name': 'number',
|
||||
'unparsed': 42,
|
||||
'error': ValueError,
|
||||
'pattern': r'invalid integer input',
|
||||
}, {
|
||||
'name': 'boolean',
|
||||
'unparsed': True,
|
||||
'error': ValueError,
|
||||
'pattern': r'expected int or list as input',
|
||||
}, {
|
||||
'name': 'null',
|
||||
'unparsed': None,
|
||||
'error': ValueError,
|
||||
'pattern': r'expected int or list as input',
|
||||
}, {
|
||||
'name': 'object',
|
||||
'unparsed': {},
|
||||
'error': ValueError,
|
||||
'pattern': r'expected int or list as input',
|
||||
}, {
|
||||
'name': 'empty array',
|
||||
'unparsed': [],
|
||||
'error': ValueError,
|
||||
'pattern': r'expected a non-empty list as input',
|
||||
}, {
|
||||
'name': 'Python negative indexing',
|
||||
'unparsed': [[1, 2, 3, 4, 5, 6, 7, -7], 1, 2, 3, 4, 5, 6, 7],
|
||||
'error': IndexError,
|
||||
'pattern': r'invalid index: -7',
|
||||
}]
|
||||
|
||||
|
||||
class TestDevalue(unittest.TestCase):
|
||||
def test_devalue_parse_equals(self):
|
||||
for tc in TEST_CASES_EQUALS:
|
||||
self.assertEqual(devalue.parse(tc['unparsed']), tc['parsed'], tc['name'])
|
||||
|
||||
def test_devalue_parse_is(self):
|
||||
for tc in TEST_CASES_IS:
|
||||
self.assertIs(devalue.parse(tc['unparsed']), tc['parsed'], tc['name'])
|
||||
|
||||
def test_devalue_parse_invalid(self):
|
||||
for tc in TEST_CASES_INVALID:
|
||||
with self.assertRaisesRegex(tc['error'], tc['pattern'], msg=tc['name']):
|
||||
devalue.parse(tc['unparsed'])
|
||||
|
||||
def test_devalue_parse_cyclical(self):
|
||||
name = 'Map (cyclical)'
|
||||
result = devalue.parse([['Map', 1, 0], 'self'])
|
||||
self.assertEqual(result[0][0], 'self', name)
|
||||
self.assertIs(result, result[0][1], name)
|
||||
|
||||
name = 'Set (cyclical)'
|
||||
result = devalue.parse([['Set', 0, 1], 42])
|
||||
self.assertEqual(result[1], 42, name)
|
||||
self.assertIs(result, result[0], name)
|
||||
|
||||
result = devalue.parse([[0]])
|
||||
self.assertIs(result, result[0], 'Array (cyclical)')
|
||||
|
||||
name = 'Object (cyclical)'
|
||||
result = devalue.parse([{'self': 0}])
|
||||
self.assertIs(result, result['self'], name)
|
||||
|
||||
name = 'Object with null prototype (cyclical)'
|
||||
result = devalue.parse([['null', 'self', 0]])
|
||||
self.assertIs(result, result['self'], name)
|
||||
|
||||
name = 'Objects (cyclical)'
|
||||
result = devalue.parse([[1, 2], {'second': 2}, {'first': 1}])
|
||||
self.assertIs(result[0], result[1]['first'], name)
|
||||
self.assertIs(result[1], result[0]['second'], name)
|
||||
|
||||
def test_devalue_parse_revivers(self):
|
||||
self.assertEqual(
|
||||
devalue.parse([['indirect', 1], {'a': 2}, 'b'], revivers={'indirect': lambda x: x}),
|
||||
{'a': 'b'}, 'revivers (indirect)')
|
||||
|
||||
self.assertEqual(
|
||||
devalue.parse([['parse', 1], '{"a":0}'], revivers={'parse': lambda x: json.loads(x)}),
|
||||
{'a': 0}, 'revivers (parse)')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
@ -14,6 +14,7 @@ import json
|
|||
|
||||
from test.helper import (
|
||||
assertGreaterEqual,
|
||||
assertLessEqual,
|
||||
expect_info_dict,
|
||||
expect_warnings,
|
||||
get_params,
|
||||
|
|
@ -65,10 +66,6 @@ tests_counter = collections.defaultdict(collections.Counter)
|
|||
|
||||
@is_download_test
|
||||
class TestDownload(unittest.TestCase):
|
||||
# Parallel testing in nosetests. See
|
||||
# http://nose.readthedocs.org/en/latest/doc_tests/test_multiprocess/multiprocess.html
|
||||
_multiprocess_shared_ = True
|
||||
|
||||
maxDiff = None
|
||||
|
||||
COMPLETED_TESTS = {}
|
||||
|
|
@ -121,10 +118,13 @@ def generator(test_case, tname):
|
|||
params = get_params(test_case.get('params', {}))
|
||||
params['outtmpl'] = tname + '_' + params['outtmpl']
|
||||
if is_playlist and 'playlist' not in test_case:
|
||||
params.setdefault('extract_flat', 'in_playlist')
|
||||
params.setdefault('playlistend', test_case.get(
|
||||
'playlist_mincount', test_case.get('playlist_count', -2) + 1))
|
||||
params.setdefault('playlistend', max(
|
||||
test_case.get('playlist_mincount', -1),
|
||||
test_case.get('playlist_count', -2) + 1,
|
||||
test_case.get('playlist_maxcount', -2) + 1))
|
||||
params.setdefault('skip_download', True)
|
||||
if 'playlist_duration_sum' not in test_case:
|
||||
params.setdefault('extract_flat', 'in_playlist')
|
||||
|
||||
ydl = YoutubeDL(params, auto_init=False)
|
||||
ydl.add_default_info_extractors()
|
||||
|
|
@ -159,6 +159,7 @@ def generator(test_case, tname):
|
|||
try_rm(os.path.splitext(tc_filename)[0] + '.info.json')
|
||||
try_rm_tcs_files()
|
||||
try:
|
||||
test_url = test_case['url']
|
||||
try_num = 1
|
||||
while True:
|
||||
try:
|
||||
|
|
@ -166,7 +167,7 @@ def generator(test_case, tname):
|
|||
# for outside error handling, and returns the exit code
|
||||
# instead of the result dict.
|
||||
res_dict = ydl.extract_info(
|
||||
test_case['url'],
|
||||
test_url,
|
||||
force_generic_extractor=params.get('force_generic_extractor', False))
|
||||
except (DownloadError, ExtractorError) as err:
|
||||
# Check if the exception is not a network related one
|
||||
|
|
@ -194,23 +195,23 @@ def generator(test_case, tname):
|
|||
self.assertTrue('entries' in res_dict)
|
||||
expect_info_dict(self, res_dict, test_case.get('info_dict', {}))
|
||||
|
||||
num_entries = len(res_dict.get('entries', []))
|
||||
if 'playlist_mincount' in test_case:
|
||||
mincount = test_case['playlist_mincount']
|
||||
assertGreaterEqual(
|
||||
self,
|
||||
len(res_dict['entries']),
|
||||
test_case['playlist_mincount'],
|
||||
'Expected at least %d in playlist %s, but got only %d' % (
|
||||
test_case['playlist_mincount'], test_case['url'],
|
||||
len(res_dict['entries'])))
|
||||
self, num_entries, mincount,
|
||||
f'Expected at least {mincount} entries in playlist {test_url}, but got only {num_entries}')
|
||||
if 'playlist_count' in test_case:
|
||||
count = test_case['playlist_count']
|
||||
got = num_entries if num_entries <= count else 'more'
|
||||
self.assertEqual(
|
||||
len(res_dict['entries']),
|
||||
test_case['playlist_count'],
|
||||
'Expected %d entries in playlist %s, but got %d.' % (
|
||||
test_case['playlist_count'],
|
||||
test_case['url'],
|
||||
len(res_dict['entries']),
|
||||
))
|
||||
num_entries, count,
|
||||
f'Expected exactly {count} entries in playlist {test_url}, but got {got}')
|
||||
if 'playlist_maxcount' in test_case:
|
||||
maxcount = test_case['playlist_maxcount']
|
||||
assertLessEqual(
|
||||
self, num_entries, maxcount,
|
||||
f'Expected at most {maxcount} entries in playlist {test_url}, but got more')
|
||||
if 'playlist_duration_sum' in test_case:
|
||||
got_duration = sum(e['duration'] for e in res_dict['entries'])
|
||||
self.assertEqual(
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ import threading
|
|||
from test.helper import http_server_port, try_rm
|
||||
from yt_dlp import YoutubeDL
|
||||
from yt_dlp.downloader.http import HttpFD
|
||||
from yt_dlp.utils import encodeFilename
|
||||
from yt_dlp.utils._utils import _YDLLogger as FakeLogger
|
||||
|
||||
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
|
@ -82,12 +81,12 @@ class TestHttpFD(unittest.TestCase):
|
|||
ydl = YoutubeDL(params)
|
||||
downloader = HttpFD(ydl, params)
|
||||
filename = 'testfile.mp4'
|
||||
try_rm(encodeFilename(filename))
|
||||
try_rm(filename)
|
||||
self.assertTrue(downloader.real_download(filename, {
|
||||
'url': f'http://127.0.0.1:{self.port}/{ep}',
|
||||
}), ep)
|
||||
self.assertEqual(os.path.getsize(encodeFilename(filename)), TEST_SIZE, ep)
|
||||
try_rm(encodeFilename(filename))
|
||||
self.assertEqual(os.path.getsize(filename), TEST_SIZE, ep)
|
||||
try_rm(filename)
|
||||
|
||||
def download_all(self, params):
|
||||
for ep in ('regular', 'no-content-length', 'no-range', 'no-range-no-content-length'):
|
||||
|
|
|
|||
|
|
@ -247,6 +247,7 @@ def ctx(request):
|
|||
|
||||
@pytest.mark.parametrize(
|
||||
'handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
@pytest.mark.handler_flaky('CurlCFFI', reason='segfaults')
|
||||
@pytest.mark.parametrize('ctx', ['http'], indirect=True) # pure http proxy can only support http
|
||||
class TestHTTPProxy:
|
||||
def test_http_no_auth(self, handler, ctx):
|
||||
|
|
@ -315,6 +316,7 @@ class TestHTTPProxy:
|
|||
('Requests', 'https'),
|
||||
('CurlCFFI', 'https'),
|
||||
], indirect=True)
|
||||
@pytest.mark.handler_flaky('CurlCFFI', reason='segfaults')
|
||||
class TestHTTPConnectProxy:
|
||||
def test_http_connect_no_auth(self, handler, ctx):
|
||||
with ctx.http_server(HTTPConnectProxyHandler) as server_address:
|
||||
|
|
@ -331,10 +333,6 @@ class TestHTTPConnectProxy:
|
|||
assert proxy_info['proxy'] == server_address
|
||||
assert 'Proxy-Authorization' in proxy_info['headers']
|
||||
|
||||
@pytest.mark.skip_handler(
|
||||
'Requests',
|
||||
'bug in urllib3 causes unclosed socket: https://github.com/urllib3/urllib3/issues/3374',
|
||||
)
|
||||
def test_http_connect_bad_auth(self, handler, ctx):
|
||||
with ctx.http_server(HTTPConnectProxyHandler, username='test', password='test') as server_address:
|
||||
with handler(verify=False, proxies={ctx.REQUEST_PROTO: f'http://test:bad@{server_address}'}) as rh:
|
||||
|
|
|
|||
60
test/test_jsc/conftest.py
Normal file
60
test/test_jsc/conftest.py
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
import re
|
||||
import pathlib
|
||||
|
||||
import pytest
|
||||
|
||||
import yt_dlp.globals
|
||||
from yt_dlp import YoutubeDL
|
||||
from yt_dlp.extractor.common import InfoExtractor
|
||||
|
||||
|
||||
_TESTDATA_PATH = pathlib.Path(__file__).parent.parent / 'testdata/sigs'
|
||||
_player_re = re.compile(r'^.+/player/(?P<id>[a-zA-Z0-9_/.-]+)\.js$')
|
||||
_player_id_trans = str.maketrans(dict.fromkeys('/.-', '_'))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def ie() -> InfoExtractor:
|
||||
runtime_names = yt_dlp.globals.supported_js_runtimes.value
|
||||
ydl = YoutubeDL({'js_runtimes': {key: {} for key in runtime_names}})
|
||||
ie = ydl.get_info_extractor('Youtube')
|
||||
|
||||
def _load_player(video_id, player_url, fatal=True):
|
||||
match = _player_re.match(player_url)
|
||||
test_id = match.group('id').translate(_player_id_trans)
|
||||
cached_file = _TESTDATA_PATH / f'player-{test_id}.js'
|
||||
|
||||
if cached_file.exists():
|
||||
return cached_file.read_text()
|
||||
|
||||
if code := ie._download_webpage(player_url, video_id, fatal=fatal):
|
||||
_TESTDATA_PATH.mkdir(exist_ok=True, parents=True)
|
||||
cached_file.write_text(code)
|
||||
return code
|
||||
|
||||
return None
|
||||
|
||||
ie._load_player = _load_player
|
||||
return ie
|
||||
|
||||
|
||||
class MockLogger:
|
||||
def trace(self, message: str):
|
||||
print(f'trace: {message}')
|
||||
|
||||
def debug(self, message: str, *, once=False):
|
||||
print(f'debug: {message}')
|
||||
|
||||
def info(self, message: str):
|
||||
print(f'info: {message}')
|
||||
|
||||
def warning(self, message: str, *, once=False):
|
||||
print(f'warning: {message}')
|
||||
|
||||
def error(self, message: str):
|
||||
print(f'error: {message}')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def logger():
|
||||
return MockLogger()
|
||||
128
test/test_jsc/test_ejs_integration.py
Normal file
128
test/test_jsc/test_ejs_integration.py
Normal file
|
|
@ -0,0 +1,128 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
import enum
|
||||
import importlib.util
|
||||
import json
|
||||
|
||||
import pytest
|
||||
|
||||
from yt_dlp.extractor.youtube.jsc.provider import (
|
||||
JsChallengeRequest,
|
||||
JsChallengeType,
|
||||
JsChallengeProviderResponse,
|
||||
JsChallengeResponse,
|
||||
NChallengeInput,
|
||||
NChallengeOutput,
|
||||
SigChallengeInput,
|
||||
SigChallengeOutput,
|
||||
)
|
||||
from yt_dlp.extractor.youtube.jsc._builtin.bun import BunJCP
|
||||
from yt_dlp.extractor.youtube.jsc._builtin.deno import DenoJCP
|
||||
from yt_dlp.extractor.youtube.jsc._builtin.node import NodeJCP
|
||||
from yt_dlp.extractor.youtube.jsc._builtin.quickjs import QuickJSJCP
|
||||
|
||||
|
||||
_has_ejs = bool(importlib.util.find_spec('yt_dlp_ejs'))
|
||||
pytestmark = pytest.mark.skipif(not _has_ejs, reason='yt-dlp-ejs not available')
|
||||
|
||||
|
||||
class Variant(enum.Enum):
|
||||
main = 'player_ias.vflset/en_US/base.js'
|
||||
tcc = 'player_ias_tcc.vflset/en_US/base.js'
|
||||
tce = 'player_ias_tce.vflset/en_US/base.js'
|
||||
es5 = 'player_es5.vflset/en_US/base.js'
|
||||
es6 = 'player_es6.vflset/en_US/base.js'
|
||||
tv = 'tv-player-ias.vflset/tv-player-ias.js'
|
||||
tv_es6 = 'tv-player-es6.vflset/tv-player-es6.js'
|
||||
phone = 'player-plasma-ias-phone-en_US.vflset/base.js'
|
||||
tablet = 'player-plasma-ias-tablet-en_US.vflset/base.js'
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class Challenge:
|
||||
player: str
|
||||
variant: Variant
|
||||
type: JsChallengeType
|
||||
values: dict[str, str] = dataclasses.field(default_factory=dict)
|
||||
|
||||
def url(self, /):
|
||||
return f'https://www.youtube.com/s/player/{self.player}/{self.variant.value}'
|
||||
|
||||
|
||||
CHALLENGES: list[Challenge] = [
|
||||
Challenge('3d3ba064', Variant.tce, JsChallengeType.N, {
|
||||
'ZdZIqFPQK-Ty8wId': 'qmtUsIz04xxiNW',
|
||||
'4GMrWHyKI5cEvhDO': 'N9gmEX7YhKTSmw',
|
||||
}),
|
||||
Challenge('3d3ba064', Variant.tce, JsChallengeType.SIG, {
|
||||
'gN7a-hudCuAuPH6fByOk1_GNXN0yNMHShjZXS2VOgsEItAJz0tipeavEOmNdYN-wUtcEqD3bCXjc0iyKfAyZxCBGgIARwsSdQfJ2CJtt':
|
||||
'ttJC2JfQdSswRAIgGBCxZyAfKyi0cjXCb3gqEctUw-NYdNmOEvaepit0zJAtIEsgOV2SXZjhSHMNy0NXNG_1kNyBf6HPuAuCduh-a7O',
|
||||
}),
|
||||
Challenge('5ec65609', Variant.tce, JsChallengeType.N, {
|
||||
'0eRGgQWJGfT5rFHFj': '4SvMpDQH-vBJCw',
|
||||
}),
|
||||
Challenge('5ec65609', Variant.tce, JsChallengeType.SIG, {
|
||||
'AAJAJfQdSswRQIhAMG5SN7-cAFChdrE7tLA6grH0rTMICA1mmDc0HoXgW3CAiAQQ4=CspfaF_vt82XH5yewvqcuEkvzeTsbRuHssRMyJQ=I':
|
||||
'AJfQdSswRQIhAMG5SN7-cAFChdrE7tLA6grI0rTMICA1mmDc0HoXgW3CAiAQQ4HCspfaF_vt82XH5yewvqcuEkvzeTsbRuHssRMyJQ==',
|
||||
}),
|
||||
Challenge('6742b2b9', Variant.tce, JsChallengeType.N, {
|
||||
'_HPB-7GFg1VTkn9u': 'qUAsPryAO_ByYg',
|
||||
'K1t_fcB6phzuq2SF': 'Y7PcOt3VE62mog',
|
||||
}),
|
||||
Challenge('6742b2b9', Variant.tce, JsChallengeType.SIG, {
|
||||
'MMGZJMUucirzS_SnrSPYsc85CJNnTUi6GgR5NKn-znQEICACojE8MHS6S7uYq4TGjQX_D4aPk99hNU6wbTvorvVVMgIARwsSdQfJAA':
|
||||
'AJfQdSswRAIgMVVvrovTbw6UNh99kPa4D_XQjGT4qYu7S6SHM8EjoCACIEQnz-nKN5RgG6iUTnNJC58csYPSrnS_SzricuUMJZGM',
|
||||
}),
|
||||
Challenge('2b83d2e0', Variant.main, JsChallengeType.N, {
|
||||
'0eRGgQWJGfT5rFHFj': 'euHbygrCMLksxd',
|
||||
}),
|
||||
Challenge('2b83d2e0', Variant.main, JsChallengeType.SIG, {
|
||||
'MMGZJMUucirzS_SnrSPYsc85CJNnTUi6GgR5NKn-znQEICACojE8MHS6S7uYq4TGjQX_D4aPk99hNU6wbTvorvVVMgIARwsSdQfJA':
|
||||
'-MGZJMUucirzS_SnrSPYsc85CJNnTUi6GgR5NKnMznQEICACojE8MHS6S7uYq4TGjQX_D4aPk99hNU6wbTvorvVVMgIARwsSdQfJ',
|
||||
}),
|
||||
Challenge('638ec5c6', Variant.main, JsChallengeType.N, {
|
||||
'ZdZIqFPQK-Ty8wId': '1qov8-KM-yH',
|
||||
}),
|
||||
Challenge('638ec5c6', Variant.main, JsChallengeType.SIG, {
|
||||
'gN7a-hudCuAuPH6fByOk1_GNXN0yNMHShjZXS2VOgsEItAJz0tipeavEOmNdYN-wUtcEqD3bCXjc0iyKfAyZxCBGgIARwsSdQfJ2CJtt':
|
||||
'MhudCuAuP-6fByOk1_GNXN7gNHHShjyXS2VOgsEItAJz0tipeav0OmNdYN-wUtcEqD3bCXjc0iyKfAyZxCBGgIARwsSdQfJ2CJtt',
|
||||
}),
|
||||
]
|
||||
|
||||
requests: list[JsChallengeRequest] = []
|
||||
responses: list[JsChallengeProviderResponse] = []
|
||||
for test in CHALLENGES:
|
||||
input_type, output_type = {
|
||||
JsChallengeType.N: (NChallengeInput, NChallengeOutput),
|
||||
JsChallengeType.SIG: (SigChallengeInput, SigChallengeOutput),
|
||||
}[test.type]
|
||||
|
||||
request = JsChallengeRequest(test.type, input_type(test.url(), list(test.values.keys())), test.player)
|
||||
requests.append(request)
|
||||
responses.append(JsChallengeProviderResponse(request, JsChallengeResponse(test.type, output_type(test.values))))
|
||||
|
||||
|
||||
@pytest.fixture(params=[BunJCP, DenoJCP, NodeJCP, QuickJSJCP])
|
||||
def jcp(request, ie, logger):
|
||||
obj = request.param(ie, logger, None)
|
||||
if not obj.is_available():
|
||||
pytest.skip(f'{obj.PROVIDER_NAME} is not available')
|
||||
obj.is_dev = True
|
||||
return obj
|
||||
|
||||
|
||||
@pytest.mark.download
|
||||
def test_bulk_requests(jcp):
|
||||
assert list(jcp.bulk_solve(requests)) == responses
|
||||
|
||||
|
||||
@pytest.mark.download
|
||||
def test_using_cached_player(jcp):
|
||||
first_player_requests = requests[:3]
|
||||
player = jcp._get_player(first_player_requests[0].video_id, first_player_requests[0].input.player_url)
|
||||
initial = json.loads(jcp._run_js_runtime(jcp._construct_stdin(player, False, first_player_requests)))
|
||||
preprocessed = initial.pop('preprocessed_player')
|
||||
result = json.loads(jcp._run_js_runtime(jcp._construct_stdin(preprocessed, True, first_player_requests)))
|
||||
|
||||
assert initial == result
|
||||
194
test/test_jsc/test_provider.py
Normal file
194
test/test_jsc/test_provider.py
Normal file
|
|
@ -0,0 +1,194 @@
|
|||
|
||||
import pytest
|
||||
|
||||
from yt_dlp.extractor.youtube.jsc.provider import (
|
||||
JsChallengeProvider,
|
||||
JsChallengeRequest,
|
||||
JsChallengeProviderResponse,
|
||||
JsChallengeProviderRejectedRequest,
|
||||
JsChallengeType,
|
||||
JsChallengeResponse,
|
||||
NChallengeOutput,
|
||||
NChallengeInput,
|
||||
JsChallengeProviderError,
|
||||
register_provider,
|
||||
register_preference,
|
||||
)
|
||||
from yt_dlp.extractor.youtube.pot._provider import IEContentProvider
|
||||
from yt_dlp.utils import ExtractorError
|
||||
from yt_dlp.extractor.youtube.jsc._registry import _jsc_preferences, _jsc_providers
|
||||
|
||||
|
||||
class ExampleJCP(JsChallengeProvider):
|
||||
PROVIDER_NAME = 'example-provider'
|
||||
PROVIDER_VERSION = '0.0.1'
|
||||
BUG_REPORT_LOCATION = 'https://example.com/issues'
|
||||
|
||||
_SUPPORTED_TYPES = [JsChallengeType.N]
|
||||
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
def _real_bulk_solve(self, requests):
|
||||
for request in requests:
|
||||
results = dict.fromkeys(request.input.challenges, 'example-solution')
|
||||
response = JsChallengeResponse(
|
||||
type=request.type,
|
||||
output=NChallengeOutput(results=results))
|
||||
yield JsChallengeProviderResponse(request=request, response=response)
|
||||
|
||||
|
||||
PLAYER_URL = 'https://example.com/player.js'
|
||||
|
||||
|
||||
class TestJsChallengeProvider:
|
||||
# note: some test covered in TestPoTokenProvider which shares the same base class
|
||||
def test_base_type(self):
|
||||
assert issubclass(JsChallengeProvider, IEContentProvider)
|
||||
|
||||
def test_create_provider_missing_bulk_solve_method(self, ie, logger):
|
||||
class MissingMethodsJCP(JsChallengeProvider):
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
with pytest.raises(TypeError, match='bulk_solve'):
|
||||
MissingMethodsJCP(ie=ie, logger=logger, settings={})
|
||||
|
||||
def test_create_provider_missing_available_method(self, ie, logger):
|
||||
class MissingMethodsJCP(JsChallengeProvider):
|
||||
def _real_bulk_solve(self, requests):
|
||||
raise JsChallengeProviderRejectedRequest('Not implemented')
|
||||
|
||||
with pytest.raises(TypeError, match='is_available'):
|
||||
MissingMethodsJCP(ie=ie, logger=logger, settings={})
|
||||
|
||||
def test_barebones_provider(self, ie, logger):
|
||||
class BarebonesProviderJCP(JsChallengeProvider):
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
def _real_bulk_solve(self, requests):
|
||||
raise JsChallengeProviderRejectedRequest('Not implemented')
|
||||
|
||||
provider = BarebonesProviderJCP(ie=ie, logger=logger, settings={})
|
||||
assert provider.PROVIDER_NAME == 'BarebonesProvider'
|
||||
assert provider.PROVIDER_KEY == 'BarebonesProvider'
|
||||
assert provider.PROVIDER_VERSION == '0.0.0'
|
||||
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at (developer has not provided a bug report location) .'
|
||||
|
||||
def test_example_provider_success(self, ie, logger):
|
||||
provider = ExampleJCP(ie=ie, logger=logger, settings={})
|
||||
|
||||
request = JsChallengeRequest(
|
||||
type=JsChallengeType.N,
|
||||
input=NChallengeInput(player_url=PLAYER_URL, challenges=['example-challenge']))
|
||||
|
||||
request_two = JsChallengeRequest(
|
||||
type=JsChallengeType.N,
|
||||
input=NChallengeInput(player_url=PLAYER_URL, challenges=['example-challenge-2']))
|
||||
|
||||
responses = list(provider.bulk_solve([request, request_two]))
|
||||
assert len(responses) == 2
|
||||
assert all(isinstance(r, JsChallengeProviderResponse) for r in responses)
|
||||
assert responses == [
|
||||
JsChallengeProviderResponse(
|
||||
request=request,
|
||||
response=JsChallengeResponse(
|
||||
type=JsChallengeType.N,
|
||||
output=NChallengeOutput(results={'example-challenge': 'example-solution'}),
|
||||
),
|
||||
),
|
||||
JsChallengeProviderResponse(
|
||||
request=request_two,
|
||||
response=JsChallengeResponse(
|
||||
type=JsChallengeType.N,
|
||||
output=NChallengeOutput(results={'example-challenge-2': 'example-solution'}),
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
def test_provider_unsupported_challenge_type(self, ie, logger):
|
||||
provider = ExampleJCP(ie=ie, logger=logger, settings={})
|
||||
request_supported = JsChallengeRequest(
|
||||
type=JsChallengeType.N,
|
||||
input=NChallengeInput(player_url=PLAYER_URL, challenges=['example-challenge']))
|
||||
request_unsupported = JsChallengeRequest(
|
||||
type=JsChallengeType.SIG,
|
||||
input=NChallengeInput(player_url=PLAYER_URL, challenges=['example-challenge']))
|
||||
responses = list(provider.bulk_solve([request_supported, request_unsupported, request_supported]))
|
||||
assert len(responses) == 3
|
||||
# Requests are validated first before continuing to _real_bulk_solve
|
||||
assert isinstance(responses[0], JsChallengeProviderResponse)
|
||||
assert isinstance(responses[0].error, JsChallengeProviderRejectedRequest)
|
||||
assert responses[0].request is request_unsupported
|
||||
assert str(responses[0].error) == 'JS Challenge type "JsChallengeType.SIG" is not supported by example-provider'
|
||||
|
||||
assert responses[1:] == [
|
||||
JsChallengeProviderResponse(
|
||||
request=request_supported,
|
||||
response=JsChallengeResponse(
|
||||
type=JsChallengeType.N,
|
||||
output=NChallengeOutput(results={'example-challenge': 'example-solution'}),
|
||||
),
|
||||
),
|
||||
JsChallengeProviderResponse(
|
||||
request=request_supported,
|
||||
response=JsChallengeResponse(
|
||||
type=JsChallengeType.N,
|
||||
output=NChallengeOutput(results={'example-challenge': 'example-solution'}),
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
def test_provider_get_player(self, ie, logger):
|
||||
ie._load_player = lambda video_id, player_url, fatal: (video_id, player_url, fatal)
|
||||
provider = ExampleJCP(ie=ie, logger=logger, settings={})
|
||||
assert provider._get_player('video123', PLAYER_URL) == ('video123', PLAYER_URL, True)
|
||||
|
||||
def test_provider_get_player_error(self, ie, logger):
|
||||
def raise_error(video_id, player_url, fatal):
|
||||
raise ExtractorError('Failed to load player')
|
||||
|
||||
ie._load_player = raise_error
|
||||
provider = ExampleJCP(ie=ie, logger=logger, settings={})
|
||||
with pytest.raises(JsChallengeProviderError, match='Failed to load player for JS challenge'):
|
||||
provider._get_player('video123', PLAYER_URL)
|
||||
|
||||
def test_require_class_end_with_suffix(self, ie, logger):
|
||||
class InvalidSuffix(JsChallengeProvider):
|
||||
PROVIDER_NAME = 'invalid-suffix'
|
||||
|
||||
def _real_bulk_solve(self, requests):
|
||||
raise JsChallengeProviderRejectedRequest('Not implemented')
|
||||
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
provider = InvalidSuffix(ie=ie, logger=logger, settings={})
|
||||
|
||||
with pytest.raises(AssertionError):
|
||||
provider.PROVIDER_KEY # noqa: B018
|
||||
|
||||
|
||||
def test_register_provider(ie):
|
||||
|
||||
@register_provider
|
||||
class UnavailableProviderJCP(JsChallengeProvider):
|
||||
def is_available(self) -> bool:
|
||||
return False
|
||||
|
||||
def _real_bulk_solve(self, requests):
|
||||
raise JsChallengeProviderRejectedRequest('Not implemented')
|
||||
|
||||
assert _jsc_providers.value.get('UnavailableProvider') == UnavailableProviderJCP
|
||||
_jsc_providers.value.pop('UnavailableProvider')
|
||||
|
||||
|
||||
def test_register_preference(ie):
|
||||
before = len(_jsc_preferences.value)
|
||||
|
||||
@register_preference(ExampleJCP)
|
||||
def unavailable_preference(*args, **kwargs):
|
||||
return 1
|
||||
|
||||
assert len(_jsc_preferences.value) == before + 1
|
||||
|
|
@ -9,7 +9,7 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|||
|
||||
import math
|
||||
|
||||
from yt_dlp.jsinterp import JS_Undefined, JSInterpreter
|
||||
from yt_dlp.jsinterp import JS_Undefined, JSInterpreter, js_number_to_string
|
||||
|
||||
|
||||
class NaN:
|
||||
|
|
@ -93,6 +93,16 @@ class TestJSInterpreter(unittest.TestCase):
|
|||
self._test('function f(){return 0 ?? 42;}', 0)
|
||||
self._test('function f(){return "life, the universe and everything" < 42;}', False)
|
||||
self._test('function f(){return 0 - 7 * - 6;}', 42)
|
||||
self._test('function f(){return true << "5";}', 32)
|
||||
self._test('function f(){return true << true;}', 2)
|
||||
self._test('function f(){return "19" & "21.9";}', 17)
|
||||
self._test('function f(){return "19" & false;}', 0)
|
||||
self._test('function f(){return "11.0" >> "2.1";}', 2)
|
||||
self._test('function f(){return 5 ^ 9;}', 12)
|
||||
self._test('function f(){return 0.0 << NaN}', 0)
|
||||
self._test('function f(){return null << undefined}', 0)
|
||||
# TODO: Does not work due to number too large
|
||||
# self._test('function f(){return 21 << 4294967297}', 42)
|
||||
|
||||
def test_array_access(self):
|
||||
self._test('function f(){var x = [1,2,3]; x[0] = 4; x[0] = 5; x[2.0] = 7; return x;}', [5, 2, 7])
|
||||
|
|
@ -108,6 +118,7 @@ class TestJSInterpreter(unittest.TestCase):
|
|||
self._test('function f(){var x = 20; x = 30 + 1; return x;}', 31)
|
||||
self._test('function f(){var x = 20; x += 30 + 1; return x;}', 51)
|
||||
self._test('function f(){var x = 20; x -= 30 + 1; return x;}', -11)
|
||||
self._test('function f(){var x = 2; var y = ["a", "b"]; y[x%y["length"]]="z"; return y}', ['z', 'b'])
|
||||
|
||||
@unittest.skip('Not implemented')
|
||||
def test_comments(self):
|
||||
|
|
@ -374,7 +385,161 @@ class TestJSInterpreter(unittest.TestCase):
|
|||
@unittest.skip('Not implemented')
|
||||
def test_packed(self):
|
||||
jsi = JSInterpreter('''function f(p,a,c,k,e,d){while(c--)if(k[c])p=p.replace(new RegExp('\\b'+c.toString(a)+'\\b','g'),k[c]);return p}''')
|
||||
self.assertEqual(jsi.call_function('f', '''h 7=g("1j");7.7h({7g:[{33:"w://7f-7e-7d-7c.v.7b/7a/79/78/77/76.74?t=73&s=2s&e=72&f=2t&71=70.0.0.1&6z=6y&6x=6w"}],6v:"w://32.v.u/6u.31",16:"r%",15:"r%",6t:"6s",6r:"",6q:"l",6p:"l",6o:"6n",6m:\'6l\',6k:"6j",9:[{33:"/2u?b=6i&n=50&6h=w://32.v.u/6g.31",6f:"6e"}],1y:{6d:1,6c:\'#6b\',6a:\'#69\',68:"67",66:30,65:r,},"64":{63:"%62 2m%m%61%5z%5y%5x.u%5w%5v%5u.2y%22 2k%m%1o%22 5t%m%1o%22 5s%m%1o%22 2j%m%5r%22 16%m%5q%22 15%m%5p%22 5o%2z%5n%5m%2z",5l:"w://v.u/d/1k/5k.2y",5j:[]},\'5i\':{"5h":"5g"},5f:"5e",5d:"w://v.u",5c:{},5b:l,1x:[0.25,0.50,0.75,1,1.25,1.5,2]});h 1m,1n,5a;h 59=0,58=0;h 7=g("1j");h 2x=0,57=0,56=0;$.55({54:{\'53-52\':\'2i-51\'}});7.j(\'4z\',6(x){c(5>0&&x.1l>=5&&1n!=1){1n=1;$(\'q.4y\').4x(\'4w\')}});7.j(\'13\',6(x){2x=x.1l});7.j(\'2g\',6(x){2w(x)});7.j(\'4v\',6(){$(\'q.2v\').4u()});6 2w(x){$(\'q.2v\').4t();c(1m)19;1m=1;17=0;c(4s.4r===l){17=1}$.4q(\'/2u?b=4p&2l=1k&4o=2t-4n-4m-2s-4l&4k=&4j=&4i=&17=\'+17,6(2r){$(\'#4h\').4g(2r)});$(\'.3-8-4f-4e:4d("4c")\').2h(6(e){2q();g().4b(0);g().4a(l)});6 2q(){h $14=$("<q />").2p({1l:"49",16:"r%",15:"r%",48:0,2n:0,2o:47,46:"45(10%, 10%, 10%, 0.4)","44-43":"42"});$("<41 />").2p({16:"60%",15:"60%",2o:40,"3z-2n":"3y"}).3x({\'2m\':\'/?b=3w&2l=1k\',\'2k\':\'0\',\'2j\':\'2i\'}).2f($14);$14.2h(6(){$(3v).3u();g().2g()});$14.2f($(\'#1j\'))}g().13(0);}6 3t(){h 9=7.1b(2e);2d.2c(9);c(9.n>1){1r(i=0;i<9.n;i++){c(9[i].1a==2e){2d.2c(\'!!=\'+i);7.1p(i)}}}}7.j(\'3s\',6(){g().1h("/2a/3r.29","3q 10 28",6(){g().13(g().27()+10)},"2b");$("q[26=2b]").23().21(\'.3-20-1z\');g().1h("/2a/3p.29","3o 10 28",6(){h 12=g().27()-10;c(12<0)12=0;g().13(12)},"24");$("q[26=24]").23().21(\'.3-20-1z\');});6 1i(){}7.j(\'3n\',6(){1i()});7.j(\'3m\',6(){1i()});7.j("k",6(y){h 9=7.1b();c(9.n<2)19;$(\'.3-8-3l-3k\').3j(6(){$(\'#3-8-a-k\').1e(\'3-8-a-z\');$(\'.3-a-k\').p(\'o-1f\',\'11\')});7.1h("/3i/3h.3g","3f 3e",6(){$(\'.3-1w\').3d(\'3-8-1v\');$(\'.3-8-1y, .3-8-1x\').p(\'o-1g\',\'11\');c($(\'.3-1w\').3c(\'3-8-1v\')){$(\'.3-a-k\').p(\'o-1g\',\'l\');$(\'.3-a-k\').p(\'o-1f\',\'l\');$(\'.3-8-a\').1e(\'3-8-a-z\');$(\'.3-8-a:1u\').3b(\'3-8-a-z\')}3a{$(\'.3-a-k\').p(\'o-1g\',\'11\');$(\'.3-a-k\').p(\'o-1f\',\'11\');$(\'.3-8-a:1u\').1e(\'3-8-a-z\')}},"39");7.j("38",6(y){1d.37(\'1c\',y.9[y.36].1a)});c(1d.1t(\'1c\')){35("1s(1d.1t(\'1c\'));",34)}});h 18;6 1s(1q){h 9=7.1b();c(9.n>1){1r(i=0;i<9.n;i++){c(9[i].1a==1q){c(i==18){19}18=i;7.1p(i)}}}}',36,270,'|||jw|||function|player|settings|tracks|submenu||if||||jwplayer|var||on|audioTracks|true|3D|length|aria|attr|div|100|||sx|filemoon|https||event|active||false|tt|seek|dd|height|width|adb|current_audio|return|name|getAudioTracks|default_audio|localStorage|removeClass|expanded|checked|addButton|callMeMaybe|vplayer|0fxcyc2ajhp1|position|vvplay|vvad|220|setCurrentAudioTrack|audio_name|for|audio_set|getItem|last|open|controls|playbackRates|captions|rewind|icon|insertAfter||detach|ff00||button|getPosition|sec|png|player8|ff11|log|console|track_name|appendTo|play|click|no|scrolling|frameborder|file_code|src|top|zIndex|css|showCCform|data|1662367683|383371|dl|video_ad|doPlay|prevt|mp4|3E||jpg|thumbs|file|300|setTimeout|currentTrack|setItem|audioTrackChanged|dualSound|else|addClass|hasClass|toggleClass|Track|Audio|svg|dualy|images|mousedown|buttons|topbar|playAttemptFailed|beforePlay|Rewind|fr|Forward|ff|ready|set_audio_track|remove|this|upload_srt|prop|50px|margin|1000001|iframe|center|align|text|rgba|background|1000000|left|absolute|pause|setCurrentCaptions|Upload|contains|item|content|html|fviews|referer|prem|embed|3e57249ef633e0d03bf76ceb8d8a4b65|216|83|hash|view|get|TokenZir|window|hide|show|complete|slow|fadeIn|video_ad_fadein|time||cache|Cache|Content|headers|ajaxSetup|v2done|tott|vastdone2|vastdone1|vvbefore|playbackRateControls|cast|aboutlink|FileMoon|abouttext|UHD|1870|qualityLabels|sites|GNOME_POWER|link|2Fiframe|3C|allowfullscreen|22360|22640|22no|marginheight|marginwidth|2FGNOME_POWER|2F0fxcyc2ajhp1|2Fe|2Ffilemoon|2F|3A||22https|3Ciframe|code|sharing|fontOpacity|backgroundOpacity|Tahoma|fontFamily|303030|backgroundColor|FFFFFF|color|userFontScale|thumbnails|kind|0fxcyc2ajhp10000|url|get_slides|start|startparam|none|preload|html5|primary|hlshtml|androidhls|duration|uniform|stretching|0fxcyc2ajhp1_xt|image|2048|sp|6871|asn|127|srv|43200|_g3XlBcu2lmD9oDexD2NLWSmah2Nu3XcDrl93m9PwXY|m3u8||master|0fxcyc2ajhp1_x|00076|01|hls2|to|s01|delivery|storage|moon|sources|setup'''.split('|')))
|
||||
self.assertEqual(jsi.call_function('f', '''h 7=g("1j");7.7h({7g:[{33:"w://7f-7e-7d-7c.v.7b/7a/79/78/77/76.74?t=73&s=2s&e=72&f=2t&71=70.0.0.1&6z=6y&6x=6w"}],6v:"w://32.v.u/6u.31",16:"r%",15:"r%",6t:"6s",6r:"",6q:"l",6p:"l",6o:"6n",6m:\'6l\',6k:"6j",9:[{33:"/2u?b=6i&n=50&6h=w://32.v.u/6g.31",6f:"6e"}],1y:{6d:1,6c:\'#6b\',6a:\'#69\',68:"67",66:30,65:r,},"64":{63:"%62 2m%m%61%5z%5y%5x.u%5w%5v%5u.2y%22 2k%m%1o%22 5t%m%1o%22 5s%m%1o%22 2j%m%5r%22 16%m%5q%22 15%m%5p%22 5o%2z%5n%5m%2z",5l:"w://v.u/d/1k/5k.2y",5j:[]},\'5i\':{"5h":"5g"},5f:"5e",5d:"w://v.u",5c:{},5b:l,1x:[0.25,0.50,0.75,1,1.25,1.5,2]});h 1m,1n,5a;h 59=0,58=0;h 7=g("1j");h 2x=0,57=0,56=0;$.55({54:{\'53-52\':\'2i-51\'}});7.j(\'4z\',6(x){c(5>0&&x.1l>=5&&1n!=1){1n=1;$(\'q.4y\').4x(\'4w\')}});7.j(\'13\',6(x){2x=x.1l});7.j(\'2g\',6(x){2w(x)});7.j(\'4v\',6(){$(\'q.2v\').4u()});6 2w(x){$(\'q.2v\').4t();c(1m)19;1m=1;17=0;c(4s.4r===l){17=1}$.4q(\'/2u?b=4p&2l=1k&4o=2t-4n-4m-2s-4l&4k=&4j=&4i=&17=\'+17,6(2r){$(\'#4h\').4g(2r)});$(\'.3-8-4f-4e:4d("4c")\').2h(6(e){2q();g().4b(0);g().4a(l)});6 2q(){h $14=$("<q />").2p({1l:"49",16:"r%",15:"r%",48:0,2n:0,2o:47,46:"45(10%, 10%, 10%, 0.4)","44-43":"42"});$("<41 />").2p({16:"60%",15:"60%",2o:40,"3z-2n":"3y"}).3x({\'2m\':\'/?b=3w&2l=1k\',\'2k\':\'0\',\'2j\':\'2i\'}).2f($14);$14.2h(6(){$(3v).3u();g().2g()});$14.2f($(\'#1j\'))}g().13(0);}6 3t(){h 9=7.1b(2e);2d.2c(9);c(9.n>1){1r(i=0;i<9.n;i++){c(9[i].1a==2e){2d.2c(\'!!=\'+i);7.1p(i)}}}}7.j(\'3s\',6(){g().1h("/2a/3r.29","3q 10 28",6(){g().13(g().27()+10)},"2b");$("q[26=2b]").23().21(\'.3-20-1z\');g().1h("/2a/3p.29","3o 10 28",6(){h 12=g().27()-10;c(12<0)12=0;g().13(12)},"24");$("q[26=24]").23().21(\'.3-20-1z\');});6 1i(){}7.j(\'3n\',6(){1i()});7.j(\'3m\',6(){1i()});7.j("k",6(y){h 9=7.1b();c(9.n<2)19;$(\'.3-8-3l-3k\').3j(6(){$(\'#3-8-a-k\').1e(\'3-8-a-z\');$(\'.3-a-k\').p(\'o-1f\',\'11\')});7.1h("/3i/3h.3g","3f 3e",6(){$(\'.3-1w\').3d(\'3-8-1v\');$(\'.3-8-1y, .3-8-1x\').p(\'o-1g\',\'11\');c($(\'.3-1w\').3c(\'3-8-1v\')){$(\'.3-a-k\').p(\'o-1g\',\'l\');$(\'.3-a-k\').p(\'o-1f\',\'l\');$(\'.3-8-a\').1e(\'3-8-a-z\');$(\'.3-8-a:1u\').3b(\'3-8-a-z\')}3a{$(\'.3-a-k\').p(\'o-1g\',\'11\');$(\'.3-a-k\').p(\'o-1f\',\'11\');$(\'.3-8-a:1u\').1e(\'3-8-a-z\')}},"39");7.j("38",6(y){1d.37(\'1c\',y.9[y.36].1a)});c(1d.1t(\'1c\')){35("1s(1d.1t(\'1c\'));",34)}});h 18;6 1s(1q){h 9=7.1b();c(9.n>1){1r(i=0;i<9.n;i++){c(9[i].1a==1q){c(i==18){19}18=i;7.1p(i)}}}}',36,270,'|||jw|||function|player|settings|tracks|submenu||if||||jwplayer|var||on|audioTracks|true|3D|length|aria|attr|div|100|||sx|filemoon|https||event|active||false|tt|seek|dd|height|width|adb|current_audio|return|name|getAudioTracks|default_audio|localStorage|removeClass|expanded|checked|addButton|callMeMaybe|vplayer|0fxcyc2ajhp1|position|vvplay|vvad|220|setCurrentAudioTrack|audio_name|for|audio_set|getItem|last|open|controls|playbackRates|captions|rewind|icon|insertAfter||detach|ff00||button|getPosition|sec|png|player8|ff11|log|console|track_name|appendTo|play|click|no|scrolling|frameborder|file_code|src|top|zIndex|css|showCCform|data|1662367683|383371|dl|video_ad|doPlay|prevt|mp4|3E||jpg|thumbs|file|300|setTimeout|currentTrack|setItem|audioTrackChanged|dualSound|else|addClass|hasClass|toggleClass|Track|Audio|svg|dualy|images|mousedown|buttons|topbar|playAttemptFailed|beforePlay|Rewind|fr|Forward|ff|ready|set_audio_track|remove|this|upload_srt|prop|50px|margin|1000001|iframe|center|align|text|rgba|background|1000000|left|absolute|pause|setCurrentCaptions|Upload|contains|item|content|html|fviews|referer|prem|embed|3e57249ef633e0d03bf76ceb8d8a4b65|216|83|hash|view|get|TokenZir|window|hide|show|complete|slow|fadeIn|video_ad_fadein|time||cache|Cache|Content|headers|ajaxSetup|v2done|tott|vastdone2|vastdone1|vvbefore|playbackRateControls|cast|aboutlink|FileMoon|abouttext|UHD|1870|qualityLabels|sites|GNOME_POWER|link|2Fiframe|3C|allowfullscreen|22360|22640|22no|marginheight|marginwidth|2FGNOME_POWER|2F0fxcyc2ajhp1|2Fe|2Ffilemoon|2F|3A||22https|3Ciframe|code|sharing|fontOpacity|backgroundOpacity|Tahoma|fontFamily|303030|backgroundColor|FFFFFF|color|userFontScale|thumbnails|kind|0fxcyc2ajhp10000|url|get_slides|start|startparam|none|preload|html5|primary|hlshtml|androidhls|duration|uniform|stretching|0fxcyc2ajhp1_xt|image|2048|sp|6871|asn|127|srv|43200|_g3XlBcu2lmD9oDexD2NLWSmah2Nu3XcDrl93m9PwXY|m3u8||master|0fxcyc2ajhp1_x|00076|01|hls2|to|s01|delivery|storage|moon|sources|setup'''.split('|'))) # noqa: SIM905
|
||||
|
||||
def test_join(self):
|
||||
test_input = list('test')
|
||||
tests = [
|
||||
'function f(a, b){return a.join(b)}',
|
||||
'function f(a, b){return Array.prototype.join.call(a, b)}',
|
||||
'function f(a, b){return Array.prototype.join.apply(a, [b])}',
|
||||
]
|
||||
for test in tests:
|
||||
jsi = JSInterpreter(test)
|
||||
self._test(jsi, 'test', args=[test_input, ''])
|
||||
self._test(jsi, 't-e-s-t', args=[test_input, '-'])
|
||||
self._test(jsi, '', args=[[], '-'])
|
||||
|
||||
def test_split(self):
|
||||
test_result = list('test')
|
||||
tests = [
|
||||
'function f(a, b){return a.split(b)}',
|
||||
'function f(a, b){return a["split"](b)}',
|
||||
'function f(a, b){let x = ["split"]; return a[x[0]](b)}',
|
||||
'function f(a, b){return String.prototype.split.call(a, b)}',
|
||||
'function f(a, b){return String.prototype.split.apply(a, [b])}',
|
||||
]
|
||||
for test in tests:
|
||||
jsi = JSInterpreter(test)
|
||||
self._test(jsi, test_result, args=['test', ''])
|
||||
self._test(jsi, test_result, args=['t-e-s-t', '-'])
|
||||
self._test(jsi, [''], args=['', '-'])
|
||||
self._test(jsi, [], args=['', ''])
|
||||
|
||||
def test_slice(self):
|
||||
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice()}', [0, 1, 2, 3, 4, 5, 6, 7, 8])
|
||||
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(0)}', [0, 1, 2, 3, 4, 5, 6, 7, 8])
|
||||
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(5)}', [5, 6, 7, 8])
|
||||
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(99)}', [])
|
||||
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(-2)}', [7, 8])
|
||||
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(-99)}', [0, 1, 2, 3, 4, 5, 6, 7, 8])
|
||||
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(0, 0)}', [])
|
||||
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(1, 0)}', [])
|
||||
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(0, 1)}', [0])
|
||||
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(3, 6)}', [3, 4, 5])
|
||||
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(1, -1)}', [1, 2, 3, 4, 5, 6, 7])
|
||||
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(-1, 1)}', [])
|
||||
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(-3, -1)}', [6, 7])
|
||||
self._test('function f(){return "012345678".slice()}', '012345678')
|
||||
self._test('function f(){return "012345678".slice(0)}', '012345678')
|
||||
self._test('function f(){return "012345678".slice(5)}', '5678')
|
||||
self._test('function f(){return "012345678".slice(99)}', '')
|
||||
self._test('function f(){return "012345678".slice(-2)}', '78')
|
||||
self._test('function f(){return "012345678".slice(-99)}', '012345678')
|
||||
self._test('function f(){return "012345678".slice(0, 0)}', '')
|
||||
self._test('function f(){return "012345678".slice(1, 0)}', '')
|
||||
self._test('function f(){return "012345678".slice(0, 1)}', '0')
|
||||
self._test('function f(){return "012345678".slice(3, 6)}', '345')
|
||||
self._test('function f(){return "012345678".slice(1, -1)}', '1234567')
|
||||
self._test('function f(){return "012345678".slice(-1, 1)}', '')
|
||||
self._test('function f(){return "012345678".slice(-3, -1)}', '67')
|
||||
|
||||
def test_splice(self):
|
||||
self._test('function f(){var T = ["0", "1", "2"]; T["splice"](2, 1, "0")[0]; return T }', ['0', '1', '0'])
|
||||
|
||||
def test_js_number_to_string(self):
|
||||
for test, radix, expected in [
|
||||
(0, None, '0'),
|
||||
(-0, None, '0'),
|
||||
(0.0, None, '0'),
|
||||
(-0.0, None, '0'),
|
||||
(math.nan, None, 'NaN'),
|
||||
(-math.nan, None, 'NaN'),
|
||||
(math.inf, None, 'Infinity'),
|
||||
(-math.inf, None, '-Infinity'),
|
||||
(10 ** 21.5, 8, '526665530627250154000000'),
|
||||
(6, 2, '110'),
|
||||
(254, 16, 'fe'),
|
||||
(-10, 2, '-1010'),
|
||||
(-0xff, 2, '-11111111'),
|
||||
(0.1 + 0.2, 16, '0.4cccccccccccd'),
|
||||
(1234.1234, 10, '1234.1234'),
|
||||
# (1000000000000000128, 10, '1000000000000000100')
|
||||
]:
|
||||
assert js_number_to_string(test, radix) == expected
|
||||
|
||||
def test_extract_function(self):
|
||||
jsi = JSInterpreter('function a(b) { return b + 1; }')
|
||||
func = jsi.extract_function('a')
|
||||
self.assertEqual(func([2]), 3)
|
||||
|
||||
def test_extract_function_with_global_stack(self):
|
||||
jsi = JSInterpreter('function c(d) { return d + e + f + g; }')
|
||||
func = jsi.extract_function('c', {'e': 10}, {'f': 100, 'g': 1000})
|
||||
self.assertEqual(func([1]), 1111)
|
||||
|
||||
def test_extract_object(self):
|
||||
jsi = JSInterpreter('var a={};a.xy={};var xy;var zxy={};xy={z:function(){return "abc"}};')
|
||||
self.assertTrue('z' in jsi.extract_object('xy', None))
|
||||
|
||||
def test_increment_decrement(self):
|
||||
self._test('function f() { var x = 1; return ++x; }', 2)
|
||||
self._test('function f() { var x = 1; return x++; }', 1)
|
||||
self._test('function f() { var x = 1; x--; return x }', 0)
|
||||
self._test('function f() { var y; var x = 1; x++, --x, x--, x--, y="z", "abc", x++; return --x }', -1)
|
||||
self._test('function f() { var a = "test--"; return a; }', 'test--')
|
||||
self._test('function f() { var b = 1; var a = "b--"; return a; }', 'b--')
|
||||
|
||||
def test_nested_function_scoping(self):
|
||||
self._test(R'''
|
||||
function f() {
|
||||
var g = function() {
|
||||
var P = 2;
|
||||
return P;
|
||||
};
|
||||
var P = 1;
|
||||
g();
|
||||
return P;
|
||||
}
|
||||
''', 1)
|
||||
self._test(R'''
|
||||
function f() {
|
||||
var x = function() {
|
||||
for (var w = 1, M = []; w < 2; w++) switch (w) {
|
||||
case 1:
|
||||
M.push("a");
|
||||
case 2:
|
||||
M.push("b");
|
||||
}
|
||||
return M
|
||||
};
|
||||
var w = "c";
|
||||
var M = "d";
|
||||
var y = x();
|
||||
y.push(w);
|
||||
y.push(M);
|
||||
return y;
|
||||
}
|
||||
''', ['a', 'b', 'c', 'd'])
|
||||
self._test(R'''
|
||||
function f() {
|
||||
var P, Q;
|
||||
var z = 100;
|
||||
var g = function() {
|
||||
var P, Q; P = 2; Q = 15;
|
||||
z = 0;
|
||||
return P+Q;
|
||||
};
|
||||
P = 1; Q = 10;
|
||||
var x = g(), y = 3;
|
||||
return P+Q+x+y+z;
|
||||
}
|
||||
''', 31)
|
||||
|
||||
def test_undefined_varnames(self):
|
||||
jsi = JSInterpreter('function f(){ var a; return [a, b]; }')
|
||||
self._test(jsi, [JS_Undefined, JS_Undefined])
|
||||
self.assertEqual(jsi._undefined_varnames, {'b'})
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
|
|
@ -22,7 +23,6 @@ import ssl
|
|||
import tempfile
|
||||
import threading
|
||||
import time
|
||||
import urllib.error
|
||||
import urllib.request
|
||||
import warnings
|
||||
import zlib
|
||||
|
|
@ -39,6 +39,7 @@ from yt_dlp.cookies import YoutubeDLCookieJar
|
|||
from yt_dlp.dependencies import brotli, curl_cffi, requests, urllib3
|
||||
from yt_dlp.networking import (
|
||||
HEADRequest,
|
||||
PATCHRequest,
|
||||
PUTRequest,
|
||||
Request,
|
||||
RequestDirector,
|
||||
|
|
@ -222,10 +223,7 @@ class HTTPTestRequestHandler(http.server.BaseHTTPRequestHandler):
|
|||
if encoding == 'br' and brotli:
|
||||
payload = brotli.compress(payload)
|
||||
elif encoding == 'gzip':
|
||||
buf = io.BytesIO()
|
||||
with gzip.GzipFile(fileobj=buf, mode='wb') as f:
|
||||
f.write(payload)
|
||||
payload = buf.getvalue()
|
||||
payload = gzip.compress(payload, mtime=0)
|
||||
elif encoding == 'deflate':
|
||||
payload = zlib.compress(payload)
|
||||
elif encoding == 'unsupported':
|
||||
|
|
@ -265,6 +263,11 @@ class HTTPTestRequestHandler(http.server.BaseHTTPRequestHandler):
|
|||
self.end_headers()
|
||||
self.wfile.write(payload)
|
||||
self.finish()
|
||||
elif self.path == '/get_cookie':
|
||||
self.send_response(200)
|
||||
self.send_header('Set-Cookie', 'test=ytdlp; path=/')
|
||||
self.end_headers()
|
||||
self.finish()
|
||||
else:
|
||||
self._status(404)
|
||||
|
||||
|
|
@ -309,6 +312,7 @@ class TestRequestHandlerBase:
|
|||
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
@pytest.mark.handler_flaky('CurlCFFI', os.name == 'nt', reason='segfaults')
|
||||
class TestHTTPRequestHandler(TestRequestHandlerBase):
|
||||
|
||||
def test_verify_cert(self, handler):
|
||||
|
|
@ -338,6 +342,52 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
|
|||
validate_and_send(rh, Request(f'https://127.0.0.1:{https_port}/headers'))
|
||||
assert not issubclass(exc_info.type, CertificateVerifyError)
|
||||
|
||||
@pytest.mark.skip_handler('CurlCFFI', 'legacy_ssl ignored by CurlCFFI')
|
||||
def test_legacy_ssl_extension(self, handler):
|
||||
# HTTPS server with old ciphers
|
||||
# XXX: is there a better way to test this than to create a new server?
|
||||
https_httpd = http.server.ThreadingHTTPServer(
|
||||
('127.0.0.1', 0), HTTPTestRequestHandler)
|
||||
sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||
sslctx.maximum_version = ssl.TLSVersion.TLSv1_2
|
||||
sslctx.set_ciphers('SHA1:AESCCM:aDSS:eNULL:aNULL')
|
||||
sslctx.load_cert_chain(os.path.join(TEST_DIR, 'testcert.pem'), None)
|
||||
https_httpd.socket = sslctx.wrap_socket(https_httpd.socket, server_side=True)
|
||||
https_port = http_server_port(https_httpd)
|
||||
https_server_thread = threading.Thread(target=https_httpd.serve_forever)
|
||||
https_server_thread.daemon = True
|
||||
https_server_thread.start()
|
||||
|
||||
with handler(verify=False) as rh:
|
||||
res = validate_and_send(rh, Request(f'https://127.0.0.1:{https_port}/headers', extensions={'legacy_ssl': True}))
|
||||
assert res.status == 200
|
||||
res.close()
|
||||
|
||||
# Ensure only applies to request extension
|
||||
with pytest.raises(SSLError):
|
||||
validate_and_send(rh, Request(f'https://127.0.0.1:{https_port}/headers'))
|
||||
|
||||
@pytest.mark.skip_handler('CurlCFFI', 'legacy_ssl ignored by CurlCFFI')
|
||||
def test_legacy_ssl_support(self, handler):
|
||||
# HTTPS server with old ciphers
|
||||
# XXX: is there a better way to test this than to create a new server?
|
||||
https_httpd = http.server.ThreadingHTTPServer(
|
||||
('127.0.0.1', 0), HTTPTestRequestHandler)
|
||||
sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||
sslctx.maximum_version = ssl.TLSVersion.TLSv1_2
|
||||
sslctx.set_ciphers('SHA1:AESCCM:aDSS:eNULL:aNULL')
|
||||
sslctx.load_cert_chain(os.path.join(TEST_DIR, 'testcert.pem'), None)
|
||||
https_httpd.socket = sslctx.wrap_socket(https_httpd.socket, server_side=True)
|
||||
https_port = http_server_port(https_httpd)
|
||||
https_server_thread = threading.Thread(target=https_httpd.serve_forever)
|
||||
https_server_thread.daemon = True
|
||||
https_server_thread.start()
|
||||
|
||||
with handler(verify=False, legacy_ssl_support=True) as rh:
|
||||
res = validate_and_send(rh, Request(f'https://127.0.0.1:{https_port}/headers'))
|
||||
assert res.status == 200
|
||||
res.close()
|
||||
|
||||
def test_percent_encode(self, handler):
|
||||
with handler() as rh:
|
||||
# Unicode characters should be encoded with uppercase percent-encoding
|
||||
|
|
@ -490,6 +540,24 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
|
|||
rh, Request(f'http://127.0.0.1:{self.http_port}/headers', extensions={'cookiejar': cookiejar})).read()
|
||||
assert b'cookie: test=ytdlp' in data.lower()
|
||||
|
||||
def test_cookie_sync_only_cookiejar(self, handler):
|
||||
# Ensure that cookies are ONLY being handled by the cookiejar
|
||||
with handler() as rh:
|
||||
validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/get_cookie', extensions={'cookiejar': YoutubeDLCookieJar()}))
|
||||
data = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/headers', extensions={'cookiejar': YoutubeDLCookieJar()})).read()
|
||||
assert b'cookie: test=ytdlp' not in data.lower()
|
||||
|
||||
def test_cookie_sync_delete_cookie(self, handler):
|
||||
# Ensure that cookies are ONLY being handled by the cookiejar
|
||||
cookiejar = YoutubeDLCookieJar()
|
||||
with handler(cookiejar=cookiejar) as rh:
|
||||
validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/get_cookie'))
|
||||
data = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/headers')).read()
|
||||
assert b'cookie: test=ytdlp' in data.lower()
|
||||
cookiejar.clear_session_cookies()
|
||||
data = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/headers')).read()
|
||||
assert b'cookie: test=ytdlp' not in data.lower()
|
||||
|
||||
def test_headers(self, handler):
|
||||
|
||||
with handler(headers=HTTPHeaderDict({'test1': 'test', 'test2': 'test2'})) as rh:
|
||||
|
|
@ -545,12 +613,14 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
|
|||
rh, Request(f'http://127.0.0.1:{self.http_port}/source_address')).read().decode()
|
||||
assert source_address == data
|
||||
|
||||
# Not supported by CurlCFFI
|
||||
@pytest.mark.skip_handler('CurlCFFI', 'not supported by curl-cffi')
|
||||
def test_gzip_trailing_garbage(self, handler):
|
||||
with handler() as rh:
|
||||
data = validate_and_send(rh, Request(f'http://localhost:{self.http_port}/trailing_garbage')).read().decode()
|
||||
res = validate_and_send(rh, Request(f'http://localhost:{self.http_port}/trailing_garbage'))
|
||||
data = res.read().decode()
|
||||
assert data == '<html><video src="/vid.mp4" /></html>'
|
||||
# Should auto-close and mark the response adaptor as closed
|
||||
assert res.closed
|
||||
|
||||
@pytest.mark.skip_handler('CurlCFFI', 'not applicable to curl-cffi')
|
||||
@pytest.mark.skipif(not brotli, reason='brotli support is not installed')
|
||||
|
|
@ -562,6 +632,8 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
|
|||
headers={'ytdl-encoding': 'br'}))
|
||||
assert res.headers.get('Content-Encoding') == 'br'
|
||||
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
||||
# Should auto-close and mark the response adaptor as closed
|
||||
assert res.closed
|
||||
|
||||
def test_deflate(self, handler):
|
||||
with handler() as rh:
|
||||
|
|
@ -571,6 +643,8 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
|
|||
headers={'ytdl-encoding': 'deflate'}))
|
||||
assert res.headers.get('Content-Encoding') == 'deflate'
|
||||
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
||||
# Should auto-close and mark the response adaptor as closed
|
||||
assert res.closed
|
||||
|
||||
def test_gzip(self, handler):
|
||||
with handler() as rh:
|
||||
|
|
@ -580,6 +654,8 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
|
|||
headers={'ytdl-encoding': 'gzip'}))
|
||||
assert res.headers.get('Content-Encoding') == 'gzip'
|
||||
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
||||
# Should auto-close and mark the response adaptor as closed
|
||||
assert res.closed
|
||||
|
||||
def test_multiple_encodings(self, handler):
|
||||
with handler() as rh:
|
||||
|
|
@ -590,6 +666,8 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
|
|||
headers={'ytdl-encoding': pair}))
|
||||
assert res.headers.get('Content-Encoding') == pair
|
||||
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
||||
# Should auto-close and mark the response adaptor as closed
|
||||
assert res.closed
|
||||
|
||||
@pytest.mark.skip_handler('CurlCFFI', 'not supported by curl-cffi')
|
||||
def test_unsupported_encoding(self, handler):
|
||||
|
|
@ -600,6 +678,8 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
|
|||
headers={'ytdl-encoding': 'unsupported', 'Accept-Encoding': '*'}))
|
||||
assert res.headers.get('Content-Encoding') == 'unsupported'
|
||||
assert res.read() == b'raw'
|
||||
# Should auto-close and mark the response adaptor as closed
|
||||
assert res.closed
|
||||
|
||||
def test_read(self, handler):
|
||||
with handler() as rh:
|
||||
|
|
@ -607,9 +687,13 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
|
|||
rh, Request(f'http://127.0.0.1:{self.http_port}/headers'))
|
||||
assert res.readable()
|
||||
assert res.read(1) == b'H'
|
||||
# Ensure we don't close the adaptor yet
|
||||
assert not res.closed
|
||||
assert res.read(3) == b'ost'
|
||||
assert res.read().decode().endswith('\n\n')
|
||||
assert res.read() == b''
|
||||
# Should auto-close and mark the response adaptor as closed
|
||||
assert res.closed
|
||||
|
||||
def test_request_disable_proxy(self, handler):
|
||||
for proxy_proto in handler._SUPPORTED_PROXY_SCHEMES or ['http']:
|
||||
|
|
@ -651,8 +735,40 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
|
|||
rh, Request(
|
||||
f'http://127.0.0.1:{self.http_port}/headers', proxies={'all': 'http://10.255.255.255'})).close()
|
||||
|
||||
@pytest.mark.skip_handlers_if(lambda _, handler: handler not in ['Urllib', 'CurlCFFI'], 'handler does not support keep_header_casing')
|
||||
def test_keep_header_casing(self, handler):
|
||||
with handler() as rh:
|
||||
res = validate_and_send(
|
||||
rh, Request(
|
||||
f'http://127.0.0.1:{self.http_port}/headers', headers={'X-test-heaDer': 'test'}, extensions={'keep_header_casing': True})).read().decode()
|
||||
|
||||
assert 'X-test-heaDer: test' in res
|
||||
|
||||
def test_partial_read_then_full_read(self, handler):
|
||||
with handler() as rh:
|
||||
for encoding in ('', 'gzip', 'deflate'):
|
||||
res = validate_and_send(rh, Request(
|
||||
f'http://127.0.0.1:{self.http_port}/content-encoding',
|
||||
headers={'ytdl-encoding': encoding}))
|
||||
assert res.headers.get('Content-Encoding') == encoding
|
||||
assert res.read(6) == b'<html>'
|
||||
assert res.read(0) == b''
|
||||
assert res.read() == b'<video src="/vid.mp4" /></html>'
|
||||
|
||||
def test_partial_read_greater_than_response_then_full_read(self, handler):
|
||||
with handler() as rh:
|
||||
for encoding in ('', 'gzip', 'deflate'):
|
||||
res = validate_and_send(rh, Request(
|
||||
f'http://127.0.0.1:{self.http_port}/content-encoding',
|
||||
headers={'ytdl-encoding': encoding}))
|
||||
assert res.headers.get('Content-Encoding') == encoding
|
||||
assert res.read(512) == b'<html><video src="/vid.mp4" /></html>'
|
||||
assert res.read(0) == b''
|
||||
assert res.read() == b''
|
||||
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
@pytest.mark.handler_flaky('CurlCFFI', reason='segfaults')
|
||||
class TestClientCertificate:
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
|
|
@ -753,6 +869,24 @@ class TestRequestHandlerMisc:
|
|||
rh.close()
|
||||
assert len(logging_handlers) == before_count
|
||||
|
||||
def test_wrap_request_errors(self):
|
||||
class TestRequestHandler(RequestHandler):
|
||||
def _validate(self, request):
|
||||
if request.headers.get('x-fail'):
|
||||
raise UnsupportedRequest('test error')
|
||||
|
||||
def _send(self, request: Request):
|
||||
raise RequestError('test error')
|
||||
|
||||
with TestRequestHandler(logger=FakeLogger()) as rh:
|
||||
with pytest.raises(UnsupportedRequest, match='test error') as exc_info:
|
||||
rh.validate(Request('http://example.com', headers={'x-fail': '1'}))
|
||||
assert exc_info.value.handler is rh
|
||||
|
||||
with pytest.raises(RequestError, match='test error') as exc_info:
|
||||
rh.send(Request('http://example.com'))
|
||||
assert exc_info.value.handler is rh
|
||||
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib'], indirect=True)
|
||||
class TestUrllibRequestHandler(TestRequestHandlerBase):
|
||||
|
|
@ -772,11 +906,53 @@ class TestUrllibRequestHandler(TestRequestHandlerBase):
|
|||
|
||||
with handler(enable_file_urls=True) as rh:
|
||||
res = validate_and_send(rh, req)
|
||||
assert res.read() == b'foobar'
|
||||
res.close()
|
||||
assert res.read(1) == b'f'
|
||||
assert not res.fp.closed
|
||||
assert res.read() == b'oobar'
|
||||
# Should automatically close the underlying file object
|
||||
assert res.fp.closed
|
||||
|
||||
os.unlink(tf.name)
|
||||
|
||||
def test_data_uri_auto_close(self, handler):
|
||||
with handler() as rh:
|
||||
res = validate_and_send(rh, Request('data:text/plain,hello%20world'))
|
||||
assert res.read() == b'hello world'
|
||||
# Should automatically close the underlying file object
|
||||
assert res.fp.closed
|
||||
assert res.closed
|
||||
|
||||
def test_http_response_auto_close(self, handler):
|
||||
with handler() as rh:
|
||||
res = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/gen_200'))
|
||||
assert res.read() == b'<html></html>'
|
||||
# Should automatically close the underlying file object in the HTTP Response
|
||||
assert isinstance(res.fp, http.client.HTTPResponse)
|
||||
assert res.fp.fp is None
|
||||
assert res.closed
|
||||
|
||||
def test_data_uri_partial_read_then_full_read(self, handler):
|
||||
with handler() as rh:
|
||||
res = validate_and_send(rh, Request('data:text/plain,hello%20world'))
|
||||
assert res.read(6) == b'hello '
|
||||
assert res.read(0) == b''
|
||||
assert res.read() == b'world'
|
||||
# Should automatically close the underlying file object
|
||||
assert res.fp.closed
|
||||
assert res.closed
|
||||
|
||||
def test_data_uri_partial_read_greater_than_response_then_full_read(self, handler):
|
||||
with handler() as rh:
|
||||
res = validate_and_send(rh, Request('data:text/plain,hello%20world'))
|
||||
assert res.read(512) == b'hello world'
|
||||
# Response and its underlying file object should already be closed now
|
||||
assert res.fp.closed
|
||||
assert res.closed
|
||||
assert res.read(0) == b''
|
||||
assert res.read() == b''
|
||||
assert res.fp.closed
|
||||
assert res.closed
|
||||
|
||||
def test_http_error_returns_content(self, handler):
|
||||
# urllib HTTPError will try close the underlying response if reference to the HTTPError object is lost
|
||||
def get_response():
|
||||
|
|
@ -909,12 +1085,20 @@ class TestRequestsRequestHandler(TestRequestHandlerBase):
|
|||
rh.close()
|
||||
assert called
|
||||
|
||||
def test_http_response_auto_close(self, handler):
|
||||
with handler() as rh:
|
||||
res = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/gen_200'))
|
||||
assert res.read() == b'<html></html>'
|
||||
# Should automatically close the underlying file object in the HTTP Response
|
||||
assert res.fp.closed
|
||||
assert res.closed
|
||||
|
||||
|
||||
@pytest.mark.parametrize('handler', ['CurlCFFI'], indirect=True)
|
||||
@pytest.mark.handler_flaky('CurlCFFI', os.name == 'nt', reason='segfaults')
|
||||
class TestCurlCFFIRequestHandler(TestRequestHandlerBase):
|
||||
|
||||
@pytest.mark.parametrize('params,extensions', [
|
||||
({}, {'impersonate': ImpersonateTarget('chrome')}),
|
||||
({'impersonate': ImpersonateTarget('chrome', '110')}, {}),
|
||||
({'impersonate': ImpersonateTarget('chrome', '99')}, {'impersonate': ImpersonateTarget('chrome', '110')}),
|
||||
])
|
||||
|
|
@ -1075,6 +1259,14 @@ class TestCurlCFFIRequestHandler(TestRequestHandlerBase):
|
|||
assert res4.closed
|
||||
assert res4._buffer == b''
|
||||
|
||||
def test_http_response_auto_close(self, handler):
|
||||
with handler() as rh:
|
||||
res = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/gen_200'))
|
||||
assert res.read() == b'<html></html>'
|
||||
# Should automatically close the underlying file object in the HTTP Response
|
||||
assert res.fp.closed
|
||||
assert res.closed
|
||||
|
||||
|
||||
def run_validation(handler, error, req, **handler_kwargs):
|
||||
with handler(**handler_kwargs) as rh:
|
||||
|
|
@ -1200,6 +1392,10 @@ class TestRequestHandlerValidation:
|
|||
({'timeout': 1}, False),
|
||||
({'timeout': 'notatimeout'}, AssertionError),
|
||||
({'unsupported': 'value'}, UnsupportedRequest),
|
||||
({'legacy_ssl': False}, False),
|
||||
({'legacy_ssl': True}, False),
|
||||
({'legacy_ssl': 'notabool'}, AssertionError),
|
||||
({'keep_header_casing': True}, UnsupportedRequest),
|
||||
]),
|
||||
('Requests', 'http', [
|
||||
({'cookiejar': 'notacookiejar'}, AssertionError),
|
||||
|
|
@ -1207,6 +1403,12 @@ class TestRequestHandlerValidation:
|
|||
({'timeout': 1}, False),
|
||||
({'timeout': 'notatimeout'}, AssertionError),
|
||||
({'unsupported': 'value'}, UnsupportedRequest),
|
||||
({'legacy_ssl': False}, False),
|
||||
({'legacy_ssl': True}, False),
|
||||
({'legacy_ssl': 'notabool'}, AssertionError),
|
||||
({'keep_header_casing': False}, False),
|
||||
({'keep_header_casing': True}, False),
|
||||
({'keep_header_casing': 'notabool'}, AssertionError),
|
||||
]),
|
||||
('CurlCFFI', 'http', [
|
||||
({'cookiejar': 'notacookiejar'}, AssertionError),
|
||||
|
|
@ -1220,6 +1422,9 @@ class TestRequestHandlerValidation:
|
|||
({'impersonate': ImpersonateTarget(None, None, None, None)}, False),
|
||||
({'impersonate': ImpersonateTarget()}, False),
|
||||
({'impersonate': 'chrome'}, AssertionError),
|
||||
({'legacy_ssl': False}, False),
|
||||
({'legacy_ssl': True}, False),
|
||||
({'legacy_ssl': 'notabool'}, AssertionError),
|
||||
]),
|
||||
(NoCheckRH, 'http', [
|
||||
({'cookiejar': 'notacookiejar'}, False),
|
||||
|
|
@ -1228,6 +1433,9 @@ class TestRequestHandlerValidation:
|
|||
('Websockets', 'ws', [
|
||||
({'cookiejar': YoutubeDLCookieJar()}, False),
|
||||
({'timeout': 2}, False),
|
||||
({'legacy_ssl': False}, False),
|
||||
({'legacy_ssl': True}, False),
|
||||
({'legacy_ssl': 'notabool'}, AssertionError),
|
||||
]),
|
||||
]
|
||||
|
||||
|
|
@ -1746,6 +1954,7 @@ class TestRequest:
|
|||
|
||||
def test_request_helpers(self):
|
||||
assert HEADRequest('http://example.com').method == 'HEAD'
|
||||
assert PATCHRequest('http://example.com').method == 'PATCH'
|
||||
assert PUTRequest('http://example.com').method == 'PUT'
|
||||
|
||||
def test_headers(self):
|
||||
|
|
@ -1913,6 +2122,30 @@ class TestResponse:
|
|||
assert res.info() is res.headers
|
||||
assert res.getheader('test') == res.get_header('test')
|
||||
|
||||
def test_auto_close(self):
|
||||
# Should mark the response as closed if the underlying file is closed
|
||||
class AutoCloseBytesIO(io.BytesIO):
|
||||
def read(self, size=-1, /):
|
||||
data = super().read(size)
|
||||
self.close()
|
||||
return data
|
||||
|
||||
fp = AutoCloseBytesIO(b'test')
|
||||
res = Response(fp, url='test://', headers={}, status=200)
|
||||
assert not res.closed
|
||||
res.read()
|
||||
assert res.closed
|
||||
|
||||
def test_close(self):
|
||||
# Should not call close() on the underlying file when already closed
|
||||
fp = MagicMock()
|
||||
fp.closed = False
|
||||
res = Response(fp, url='test://', headers={}, status=200)
|
||||
res.close()
|
||||
fp.closed = True
|
||||
res.close()
|
||||
assert fp.close.call_count == 1
|
||||
|
||||
|
||||
class TestImpersonateTarget:
|
||||
@pytest.mark.parametrize('target_str,expected', [
|
||||
|
|
|
|||
|
|
@ -20,7 +20,6 @@ from yt_dlp.networking._helper import (
|
|||
add_accept_encoding_header,
|
||||
get_redirect_method,
|
||||
make_socks_proxy_opts,
|
||||
select_proxy,
|
||||
ssl_load_certs,
|
||||
)
|
||||
from yt_dlp.networking.exceptions import (
|
||||
|
|
@ -28,7 +27,7 @@ from yt_dlp.networking.exceptions import (
|
|||
IncompleteRead,
|
||||
)
|
||||
from yt_dlp.socks import ProxyType
|
||||
from yt_dlp.utils.networking import HTTPHeaderDict
|
||||
from yt_dlp.utils.networking import HTTPHeaderDict, select_proxy
|
||||
|
||||
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ class TestOverwrites(unittest.TestCase):
|
|||
'-o', 'test.webm',
|
||||
'https://www.youtube.com/watch?v=jNQXAC9IVRw',
|
||||
], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
sout, serr = outp.communicate()
|
||||
sout, _ = outp.communicate()
|
||||
self.assertTrue(b'has already been downloaded' in sout)
|
||||
# if the file has no content, it has not been redownloaded
|
||||
self.assertTrue(os.path.getsize(download_file) < 1)
|
||||
|
|
@ -41,7 +41,7 @@ class TestOverwrites(unittest.TestCase):
|
|||
'-o', 'test.webm',
|
||||
'https://www.youtube.com/watch?v=jNQXAC9IVRw',
|
||||
], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
sout, serr = outp.communicate()
|
||||
sout, _ = outp.communicate()
|
||||
self.assertTrue(b'has already been downloaded' not in sout)
|
||||
# if the file has no content, it has not been redownloaded
|
||||
self.assertTrue(os.path.getsize(download_file) > 1)
|
||||
|
|
|
|||
|
|
@ -10,21 +10,71 @@ TEST_DATA_DIR = Path(os.path.dirname(os.path.abspath(__file__)), 'testdata')
|
|||
sys.path.append(str(TEST_DATA_DIR))
|
||||
importlib.invalidate_caches()
|
||||
|
||||
from yt_dlp.plugins import PACKAGE_NAME, directories, load_plugins
|
||||
from yt_dlp.plugins import (
|
||||
PACKAGE_NAME,
|
||||
PluginSpec,
|
||||
directories,
|
||||
load_plugins,
|
||||
load_all_plugins,
|
||||
register_plugin_spec,
|
||||
)
|
||||
|
||||
from yt_dlp.globals import (
|
||||
extractors,
|
||||
postprocessors,
|
||||
plugin_dirs,
|
||||
plugin_ies,
|
||||
plugin_pps,
|
||||
all_plugins_loaded,
|
||||
plugin_specs,
|
||||
)
|
||||
|
||||
|
||||
EXTRACTOR_PLUGIN_SPEC = PluginSpec(
|
||||
module_name='extractor',
|
||||
suffix='IE',
|
||||
destination=extractors,
|
||||
plugin_destination=plugin_ies,
|
||||
)
|
||||
|
||||
POSTPROCESSOR_PLUGIN_SPEC = PluginSpec(
|
||||
module_name='postprocessor',
|
||||
suffix='PP',
|
||||
destination=postprocessors,
|
||||
plugin_destination=plugin_pps,
|
||||
)
|
||||
|
||||
|
||||
def reset_plugins():
|
||||
plugin_ies.value = {}
|
||||
plugin_pps.value = {}
|
||||
plugin_dirs.value = ['default']
|
||||
plugin_specs.value = {}
|
||||
all_plugins_loaded.value = False
|
||||
# Clearing override plugins is probably difficult
|
||||
for module_name in tuple(sys.modules):
|
||||
for plugin_type in ('extractor', 'postprocessor'):
|
||||
if module_name.startswith(f'{PACKAGE_NAME}.{plugin_type}.'):
|
||||
del sys.modules[module_name]
|
||||
|
||||
importlib.invalidate_caches()
|
||||
|
||||
|
||||
class TestPlugins(unittest.TestCase):
|
||||
|
||||
TEST_PLUGIN_DIR = TEST_DATA_DIR / PACKAGE_NAME
|
||||
|
||||
def setUp(self):
|
||||
reset_plugins()
|
||||
|
||||
def tearDown(self):
|
||||
reset_plugins()
|
||||
|
||||
def test_directories_containing_plugins(self):
|
||||
self.assertIn(self.TEST_PLUGIN_DIR, map(Path, directories()))
|
||||
|
||||
def test_extractor_classes(self):
|
||||
for module_name in tuple(sys.modules):
|
||||
if module_name.startswith(f'{PACKAGE_NAME}.extractor'):
|
||||
del sys.modules[module_name]
|
||||
plugins_ie = load_plugins('extractor', 'IE')
|
||||
plugins_ie = load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||
|
||||
self.assertIn(f'{PACKAGE_NAME}.extractor.normal', sys.modules.keys())
|
||||
self.assertIn('NormalPluginIE', plugins_ie.keys())
|
||||
|
|
@ -34,17 +84,29 @@ class TestPlugins(unittest.TestCase):
|
|||
f'{PACKAGE_NAME}.extractor._ignore' in sys.modules,
|
||||
'loaded module beginning with underscore')
|
||||
self.assertNotIn('IgnorePluginIE', plugins_ie.keys())
|
||||
self.assertNotIn('IgnorePluginIE', plugin_ies.value)
|
||||
|
||||
# Don't load extractors with underscore prefix
|
||||
self.assertNotIn('_IgnoreUnderscorePluginIE', plugins_ie.keys())
|
||||
self.assertNotIn('_IgnoreUnderscorePluginIE', plugin_ies.value)
|
||||
|
||||
# Don't load extractors not specified in __all__ (if supplied)
|
||||
self.assertNotIn('IgnoreNotInAllPluginIE', plugins_ie.keys())
|
||||
self.assertNotIn('IgnoreNotInAllPluginIE', plugin_ies.value)
|
||||
self.assertIn('InAllPluginIE', plugins_ie.keys())
|
||||
self.assertIn('InAllPluginIE', plugin_ies.value)
|
||||
|
||||
# Don't load override extractors
|
||||
self.assertNotIn('OverrideGenericIE', plugins_ie.keys())
|
||||
self.assertNotIn('OverrideGenericIE', plugin_ies.value)
|
||||
self.assertNotIn('_UnderscoreOverrideGenericIE', plugins_ie.keys())
|
||||
self.assertNotIn('_UnderscoreOverrideGenericIE', plugin_ies.value)
|
||||
|
||||
def test_postprocessor_classes(self):
|
||||
plugins_pp = load_plugins('postprocessor', 'PP')
|
||||
plugins_pp = load_plugins(POSTPROCESSOR_PLUGIN_SPEC)
|
||||
self.assertIn('NormalPluginPP', plugins_pp.keys())
|
||||
self.assertIn(f'{PACKAGE_NAME}.postprocessor.normal', sys.modules.keys())
|
||||
self.assertIn('NormalPluginPP', plugin_pps.value)
|
||||
|
||||
def test_importing_zipped_module(self):
|
||||
zip_path = TEST_DATA_DIR / 'zipped_plugins.zip'
|
||||
|
|
@ -57,10 +119,10 @@ class TestPlugins(unittest.TestCase):
|
|||
package = importlib.import_module(f'{PACKAGE_NAME}.{plugin_type}')
|
||||
self.assertIn(zip_path / PACKAGE_NAME / plugin_type, map(Path, package.__path__))
|
||||
|
||||
plugins_ie = load_plugins('extractor', 'IE')
|
||||
plugins_ie = load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||
self.assertIn('ZippedPluginIE', plugins_ie.keys())
|
||||
|
||||
plugins_pp = load_plugins('postprocessor', 'PP')
|
||||
plugins_pp = load_plugins(POSTPROCESSOR_PLUGIN_SPEC)
|
||||
self.assertIn('ZippedPluginPP', plugins_pp.keys())
|
||||
|
||||
finally:
|
||||
|
|
@ -68,6 +130,117 @@ class TestPlugins(unittest.TestCase):
|
|||
os.remove(zip_path)
|
||||
importlib.invalidate_caches() # reset the import caches
|
||||
|
||||
def test_reloading_plugins(self):
|
||||
reload_plugins_path = TEST_DATA_DIR / 'reload_plugins'
|
||||
load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||
load_plugins(POSTPROCESSOR_PLUGIN_SPEC)
|
||||
|
||||
# Remove default folder and add reload_plugin path
|
||||
sys.path.remove(str(TEST_DATA_DIR))
|
||||
sys.path.append(str(reload_plugins_path))
|
||||
importlib.invalidate_caches()
|
||||
try:
|
||||
for plugin_type in ('extractor', 'postprocessor'):
|
||||
package = importlib.import_module(f'{PACKAGE_NAME}.{plugin_type}')
|
||||
self.assertIn(reload_plugins_path / PACKAGE_NAME / plugin_type, map(Path, package.__path__))
|
||||
|
||||
plugins_ie = load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||
self.assertIn('NormalPluginIE', plugins_ie.keys())
|
||||
self.assertTrue(
|
||||
plugins_ie['NormalPluginIE'].REPLACED,
|
||||
msg='Reloading has not replaced original extractor plugin')
|
||||
self.assertTrue(
|
||||
extractors.value['NormalPluginIE'].REPLACED,
|
||||
msg='Reloading has not replaced original extractor plugin globally')
|
||||
|
||||
plugins_pp = load_plugins(POSTPROCESSOR_PLUGIN_SPEC)
|
||||
self.assertIn('NormalPluginPP', plugins_pp.keys())
|
||||
self.assertTrue(plugins_pp['NormalPluginPP'].REPLACED,
|
||||
msg='Reloading has not replaced original postprocessor plugin')
|
||||
self.assertTrue(
|
||||
postprocessors.value['NormalPluginPP'].REPLACED,
|
||||
msg='Reloading has not replaced original postprocessor plugin globally')
|
||||
|
||||
finally:
|
||||
sys.path.remove(str(reload_plugins_path))
|
||||
sys.path.append(str(TEST_DATA_DIR))
|
||||
importlib.invalidate_caches()
|
||||
|
||||
def test_extractor_override_plugin(self):
|
||||
load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||
|
||||
from yt_dlp.extractor.generic import GenericIE
|
||||
|
||||
self.assertEqual(GenericIE.TEST_FIELD, 'override')
|
||||
self.assertEqual(GenericIE.SECONDARY_TEST_FIELD, 'underscore-override')
|
||||
|
||||
self.assertEqual(GenericIE.IE_NAME, 'generic+override+underscore-override')
|
||||
importlib.invalidate_caches()
|
||||
# test that loading a second time doesn't wrap a second time
|
||||
load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||
from yt_dlp.extractor.generic import GenericIE
|
||||
self.assertEqual(GenericIE.IE_NAME, 'generic+override+underscore-override')
|
||||
|
||||
def test_load_all_plugin_types(self):
|
||||
|
||||
# no plugin specs registered
|
||||
load_all_plugins()
|
||||
|
||||
self.assertNotIn(f'{PACKAGE_NAME}.extractor.normal', sys.modules.keys())
|
||||
self.assertNotIn(f'{PACKAGE_NAME}.postprocessor.normal', sys.modules.keys())
|
||||
|
||||
register_plugin_spec(EXTRACTOR_PLUGIN_SPEC)
|
||||
register_plugin_spec(POSTPROCESSOR_PLUGIN_SPEC)
|
||||
load_all_plugins()
|
||||
self.assertTrue(all_plugins_loaded.value)
|
||||
|
||||
self.assertIn(f'{PACKAGE_NAME}.extractor.normal', sys.modules.keys())
|
||||
self.assertIn(f'{PACKAGE_NAME}.postprocessor.normal', sys.modules.keys())
|
||||
|
||||
def test_no_plugin_dirs(self):
|
||||
register_plugin_spec(EXTRACTOR_PLUGIN_SPEC)
|
||||
register_plugin_spec(POSTPROCESSOR_PLUGIN_SPEC)
|
||||
|
||||
plugin_dirs.value = []
|
||||
load_all_plugins()
|
||||
|
||||
self.assertNotIn(f'{PACKAGE_NAME}.extractor.normal', sys.modules.keys())
|
||||
self.assertNotIn(f'{PACKAGE_NAME}.postprocessor.normal', sys.modules.keys())
|
||||
|
||||
def test_set_plugin_dirs(self):
|
||||
custom_plugin_dir = str(TEST_DATA_DIR / 'plugin_packages')
|
||||
plugin_dirs.value = [custom_plugin_dir]
|
||||
|
||||
load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||
|
||||
self.assertIn(f'{PACKAGE_NAME}.extractor.package', sys.modules.keys())
|
||||
self.assertIn('PackagePluginIE', plugin_ies.value)
|
||||
|
||||
def test_invalid_plugin_dir(self):
|
||||
plugin_dirs.value = ['invalid_dir']
|
||||
with self.assertRaises(ValueError):
|
||||
load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||
|
||||
def test_append_plugin_dirs(self):
|
||||
custom_plugin_dir = str(TEST_DATA_DIR / 'plugin_packages')
|
||||
|
||||
self.assertEqual(plugin_dirs.value, ['default'])
|
||||
plugin_dirs.value.append(custom_plugin_dir)
|
||||
self.assertEqual(plugin_dirs.value, ['default', custom_plugin_dir])
|
||||
|
||||
load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||
|
||||
self.assertIn(f'{PACKAGE_NAME}.extractor.package', sys.modules.keys())
|
||||
self.assertIn('PackagePluginIE', plugin_ies.value)
|
||||
|
||||
def test_get_plugin_spec(self):
|
||||
register_plugin_spec(EXTRACTOR_PLUGIN_SPEC)
|
||||
register_plugin_spec(POSTPROCESSOR_PLUGIN_SPEC)
|
||||
|
||||
self.assertEqual(plugin_specs.value.get('extractor'), EXTRACTOR_PLUGIN_SPEC)
|
||||
self.assertEqual(plugin_specs.value.get('postprocessor'), POSTPROCESSOR_PLUGIN_SPEC)
|
||||
self.assertIsNone(plugin_specs.value.get('invalid'))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
|||
|
|
@ -8,6 +8,8 @@ import unittest
|
|||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import subprocess
|
||||
|
||||
from yt_dlp import YoutubeDL
|
||||
from yt_dlp.utils import shell_quote
|
||||
from yt_dlp.postprocessor import (
|
||||
|
|
@ -47,7 +49,18 @@ class TestConvertThumbnail(unittest.TestCase):
|
|||
print('Skipping: ffmpeg not found')
|
||||
return
|
||||
|
||||
file = 'test/testdata/thumbnails/foo %d bar/foo_%d.{}'
|
||||
test_data_dir = 'test/testdata/thumbnails'
|
||||
generated_file = f'{test_data_dir}/empty.webp'
|
||||
|
||||
subprocess.check_call([
|
||||
pp.executable, '-y', '-f', 'lavfi', '-i', 'color=c=black:s=320x320',
|
||||
'-c:v', 'libwebp', '-pix_fmt', 'yuv420p', '-vframes', '1', generated_file,
|
||||
], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||
|
||||
file = test_data_dir + '/foo %d bar/foo_%d.{}'
|
||||
initial_file = file.format('webp')
|
||||
os.replace(generated_file, initial_file)
|
||||
|
||||
tests = (('webp', 'png'), ('png', 'jpg'))
|
||||
|
||||
for inp, out in tests:
|
||||
|
|
@ -55,11 +68,13 @@ class TestConvertThumbnail(unittest.TestCase):
|
|||
if os.path.exists(out_file):
|
||||
os.remove(out_file)
|
||||
pp.convert_thumbnail(file.format(inp), out)
|
||||
assert os.path.exists(out_file)
|
||||
self.assertTrue(os.path.exists(out_file))
|
||||
|
||||
for _, out in tests:
|
||||
os.remove(file.format(out))
|
||||
|
||||
os.remove(initial_file)
|
||||
|
||||
|
||||
class TestExec(unittest.TestCase):
|
||||
def test_parse_cmd(self):
|
||||
|
|
@ -100,7 +115,7 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||
self.assertEqual(len(ends), len(titles))
|
||||
start = 0
|
||||
chapters = []
|
||||
for e, t in zip(ends, titles):
|
||||
for e, t in zip(ends, titles, strict=True):
|
||||
chapters.append(self._chapter(start, e, t))
|
||||
start = e
|
||||
return chapters
|
||||
|
|
@ -610,3 +625,7 @@ outpoint 10.000000
|
|||
self.assertEqual(
|
||||
r"'special '\'' characters '\'' galore'\'\'\'",
|
||||
self._pp._quote_for_ffmpeg("special ' characters ' galore'''"))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
|||
71
test/test_pot/conftest.py
Normal file
71
test/test_pot/conftest.py
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
import collections
|
||||
|
||||
import pytest
|
||||
|
||||
from yt_dlp import YoutubeDL
|
||||
from yt_dlp.cookies import YoutubeDLCookieJar
|
||||
from yt_dlp.extractor.common import InfoExtractor
|
||||
from yt_dlp.extractor.youtube.pot._provider import IEContentProviderLogger
|
||||
from yt_dlp.extractor.youtube.pot.provider import PoTokenRequest, PoTokenContext
|
||||
from yt_dlp.utils.networking import HTTPHeaderDict
|
||||
|
||||
|
||||
class MockLogger(IEContentProviderLogger):
|
||||
|
||||
log_level = IEContentProviderLogger.LogLevel.TRACE
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.messages = collections.defaultdict(list)
|
||||
|
||||
def trace(self, message: str):
|
||||
self.messages['trace'].append(message)
|
||||
|
||||
def debug(self, message: str):
|
||||
self.messages['debug'].append(message)
|
||||
|
||||
def info(self, message: str):
|
||||
self.messages['info'].append(message)
|
||||
|
||||
def warning(self, message: str, *, once=False):
|
||||
self.messages['warning'].append(message)
|
||||
|
||||
def error(self, message: str):
|
||||
self.messages['error'].append(message)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def ie() -> InfoExtractor:
|
||||
ydl = YoutubeDL()
|
||||
return ydl.get_info_extractor('Youtube')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def logger() -> MockLogger:
|
||||
return MockLogger()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def pot_request() -> PoTokenRequest:
|
||||
return PoTokenRequest(
|
||||
context=PoTokenContext.GVS,
|
||||
innertube_context={'client': {'clientName': 'WEB'}},
|
||||
innertube_host='youtube.com',
|
||||
session_index=None,
|
||||
player_url=None,
|
||||
is_authenticated=False,
|
||||
video_webpage=None,
|
||||
|
||||
visitor_data='example-visitor-data',
|
||||
data_sync_id='example-data-sync-id',
|
||||
video_id='example-video-id',
|
||||
|
||||
request_cookiejar=YoutubeDLCookieJar(),
|
||||
request_proxy=None,
|
||||
request_headers=HTTPHeaderDict(),
|
||||
request_timeout=None,
|
||||
request_source_address=None,
|
||||
request_verify_tls=True,
|
||||
|
||||
bypass_cache=False,
|
||||
)
|
||||
117
test/test_pot/test_pot_builtin_memorycache.py
Normal file
117
test/test_pot/test_pot_builtin_memorycache.py
Normal file
|
|
@ -0,0 +1,117 @@
|
|||
import threading
|
||||
import time
|
||||
from collections import OrderedDict
|
||||
import pytest
|
||||
from yt_dlp.extractor.youtube.pot._provider import IEContentProvider, BuiltinIEContentProvider
|
||||
from yt_dlp.utils import bug_reports_message
|
||||
from yt_dlp.extractor.youtube.pot._builtin.memory_cache import MemoryLRUPCP, memorylru_preference, initialize_global_cache
|
||||
from yt_dlp.version import __version__
|
||||
from yt_dlp.extractor.youtube.pot._registry import _pot_cache_providers, _pot_memory_cache
|
||||
|
||||
|
||||
class TestMemoryLRUPCS:
|
||||
|
||||
def test_base_type(self):
|
||||
assert issubclass(MemoryLRUPCP, IEContentProvider)
|
||||
assert issubclass(MemoryLRUPCP, BuiltinIEContentProvider)
|
||||
|
||||
@pytest.fixture
|
||||
def pcp(self, ie, logger) -> MemoryLRUPCP:
|
||||
return MemoryLRUPCP(ie, logger, {}, initialize_cache=lambda max_size: (OrderedDict(), threading.Lock(), max_size))
|
||||
|
||||
def test_is_registered(self):
|
||||
assert _pot_cache_providers.value.get('MemoryLRU') == MemoryLRUPCP
|
||||
|
||||
def test_initialization(self, pcp):
|
||||
assert pcp.PROVIDER_NAME == 'memory'
|
||||
assert pcp.PROVIDER_VERSION == __version__
|
||||
assert pcp.BUG_REPORT_MESSAGE == bug_reports_message(before='')
|
||||
assert pcp.is_available()
|
||||
|
||||
def test_store_and_get(self, pcp):
|
||||
pcp.store('key1', 'value1', int(time.time()) + 60)
|
||||
assert pcp.get('key1') == 'value1'
|
||||
assert len(pcp.cache) == 1
|
||||
|
||||
def test_store_ignore_expired(self, pcp):
|
||||
pcp.store('key1', 'value1', int(time.time()) - 1)
|
||||
assert len(pcp.cache) == 0
|
||||
assert pcp.get('key1') is None
|
||||
assert len(pcp.cache) == 0
|
||||
|
||||
def test_store_override_existing_key(self, ie, logger):
|
||||
MAX_SIZE = 2
|
||||
pcp = MemoryLRUPCP(ie, logger, {}, initialize_cache=lambda max_size: (OrderedDict(), threading.Lock(), MAX_SIZE))
|
||||
pcp.store('key1', 'value1', int(time.time()) + 60)
|
||||
pcp.store('key2', 'value2', int(time.time()) + 60)
|
||||
assert len(pcp.cache) == 2
|
||||
pcp.store('key1', 'value2', int(time.time()) + 60)
|
||||
# Ensure that the override key gets added to the end of the cache instead of in the same position
|
||||
pcp.store('key3', 'value3', int(time.time()) + 60)
|
||||
assert pcp.get('key1') == 'value2'
|
||||
|
||||
def test_store_ignore_expired_existing_key(self, pcp):
|
||||
pcp.store('key1', 'value2', int(time.time()) + 60)
|
||||
pcp.store('key1', 'value1', int(time.time()) - 1)
|
||||
assert len(pcp.cache) == 1
|
||||
assert pcp.get('key1') == 'value2'
|
||||
assert len(pcp.cache) == 1
|
||||
|
||||
def test_get_key_expired(self, pcp):
|
||||
pcp.store('key1', 'value1', int(time.time()) + 60)
|
||||
assert pcp.get('key1') == 'value1'
|
||||
assert len(pcp.cache) == 1
|
||||
pcp.cache['key1'] = ('value1', int(time.time()) - 1)
|
||||
assert pcp.get('key1') is None
|
||||
assert len(pcp.cache) == 0
|
||||
|
||||
def test_lru_eviction(self, ie, logger):
|
||||
MAX_SIZE = 2
|
||||
provider = MemoryLRUPCP(ie, logger, {}, initialize_cache=lambda max_size: (OrderedDict(), threading.Lock(), MAX_SIZE))
|
||||
provider.store('key1', 'value1', int(time.time()) + 5)
|
||||
provider.store('key2', 'value2', int(time.time()) + 5)
|
||||
assert len(provider.cache) == 2
|
||||
|
||||
assert provider.get('key1') == 'value1'
|
||||
|
||||
provider.store('key3', 'value3', int(time.time()) + 5)
|
||||
assert len(provider.cache) == 2
|
||||
|
||||
assert provider.get('key2') is None
|
||||
|
||||
provider.store('key4', 'value4', int(time.time()) + 5)
|
||||
assert len(provider.cache) == 2
|
||||
|
||||
assert provider.get('key1') is None
|
||||
assert provider.get('key3') == 'value3'
|
||||
assert provider.get('key4') == 'value4'
|
||||
|
||||
def test_delete(self, pcp):
|
||||
pcp.store('key1', 'value1', int(time.time()) + 5)
|
||||
assert len(pcp.cache) == 1
|
||||
assert pcp.get('key1') == 'value1'
|
||||
pcp.delete('key1')
|
||||
assert len(pcp.cache) == 0
|
||||
assert pcp.get('key1') is None
|
||||
|
||||
def test_use_global_cache_default(self, ie, logger):
|
||||
pcp = MemoryLRUPCP(ie, logger, {})
|
||||
assert pcp.max_size == _pot_memory_cache.value['max_size'] == 25
|
||||
assert pcp.cache is _pot_memory_cache.value['cache']
|
||||
assert pcp.lock is _pot_memory_cache.value['lock']
|
||||
|
||||
pcp2 = MemoryLRUPCP(ie, logger, {})
|
||||
assert pcp.max_size == pcp2.max_size == _pot_memory_cache.value['max_size'] == 25
|
||||
assert pcp.cache is pcp2.cache is _pot_memory_cache.value['cache']
|
||||
assert pcp.lock is pcp2.lock is _pot_memory_cache.value['lock']
|
||||
|
||||
def test_fail_max_size_change_global(self, ie, logger):
|
||||
pcp = MemoryLRUPCP(ie, logger, {})
|
||||
assert pcp.max_size == _pot_memory_cache.value['max_size'] == 25
|
||||
with pytest.raises(ValueError, match='Cannot change max_size of initialized global memory cache'):
|
||||
initialize_global_cache(50)
|
||||
|
||||
assert pcp.max_size == _pot_memory_cache.value['max_size'] == 25
|
||||
|
||||
def test_memory_lru_preference(self, pcp, ie, pot_request):
|
||||
assert memorylru_preference(pcp, pot_request) == 10000
|
||||
52
test/test_pot/test_pot_builtin_utils.py
Normal file
52
test/test_pot/test_pot_builtin_utils.py
Normal file
|
|
@ -0,0 +1,52 @@
|
|||
import pytest
|
||||
from yt_dlp.extractor.youtube.pot.provider import (
|
||||
PoTokenContext,
|
||||
|
||||
)
|
||||
|
||||
from yt_dlp.extractor.youtube.pot.utils import get_webpo_content_binding, ContentBindingType
|
||||
|
||||
|
||||
class TestGetWebPoContentBinding:
|
||||
|
||||
@pytest.mark.parametrize('client_name, context, is_authenticated, expected', [
|
||||
*[(client, context, is_authenticated, expected) for client in [
|
||||
'WEB', 'MWEB', 'TVHTML5', 'WEB_EMBEDDED_PLAYER', 'WEB_CREATOR', 'TVHTML5_SIMPLY_EMBEDDED_PLAYER', 'TVHTML5_SIMPLY']
|
||||
for context, is_authenticated, expected in [
|
||||
(PoTokenContext.GVS, False, ('example-visitor-data', ContentBindingType.VISITOR_DATA)),
|
||||
(PoTokenContext.PLAYER, False, ('example-video-id', ContentBindingType.VIDEO_ID)),
|
||||
(PoTokenContext.SUBS, False, ('example-video-id', ContentBindingType.VIDEO_ID)),
|
||||
(PoTokenContext.GVS, True, ('example-data-sync-id', ContentBindingType.DATASYNC_ID)),
|
||||
]],
|
||||
('WEB_REMIX', PoTokenContext.GVS, False, ('example-visitor-data', ContentBindingType.VISITOR_DATA)),
|
||||
('WEB_REMIX', PoTokenContext.PLAYER, False, ('example-visitor-data', ContentBindingType.VISITOR_DATA)),
|
||||
('ANDROID', PoTokenContext.GVS, False, (None, None)),
|
||||
('IOS', PoTokenContext.GVS, False, (None, None)),
|
||||
])
|
||||
def test_get_webpo_content_binding(self, pot_request, client_name, context, is_authenticated, expected):
|
||||
pot_request.innertube_context['client']['clientName'] = client_name
|
||||
pot_request.context = context
|
||||
pot_request.is_authenticated = is_authenticated
|
||||
assert get_webpo_content_binding(pot_request) == expected
|
||||
|
||||
def test_extract_visitor_id(self, pot_request):
|
||||
pot_request.visitor_data = 'CgsxMjNhYmNYWVpfLSiA4s%2DqBg%3D%3D'
|
||||
assert get_webpo_content_binding(pot_request, bind_to_visitor_id=True) == ('123abcXYZ_-', ContentBindingType.VISITOR_ID)
|
||||
|
||||
def test_invalid_visitor_id(self, pot_request):
|
||||
# visitor id not alphanumeric (i.e. protobuf extraction failed)
|
||||
pot_request.visitor_data = 'CggxMjM0NTY3OCiA4s-qBg%3D%3D'
|
||||
assert get_webpo_content_binding(pot_request, bind_to_visitor_id=True) == (pot_request.visitor_data, ContentBindingType.VISITOR_DATA)
|
||||
|
||||
def test_no_visitor_id(self, pot_request):
|
||||
pot_request.visitor_data = 'KIDiz6oG'
|
||||
assert get_webpo_content_binding(pot_request, bind_to_visitor_id=True) == (pot_request.visitor_data, ContentBindingType.VISITOR_DATA)
|
||||
|
||||
def test_invalid_base64(self, pot_request):
|
||||
pot_request.visitor_data = 'invalid-base64'
|
||||
assert get_webpo_content_binding(pot_request, bind_to_visitor_id=True) == (pot_request.visitor_data, ContentBindingType.VISITOR_DATA)
|
||||
|
||||
def test_gvs_video_id_binding_experiment(self, pot_request):
|
||||
pot_request.context = PoTokenContext.GVS
|
||||
pot_request._gvs_bind_to_video_id = True
|
||||
assert get_webpo_content_binding(pot_request) == ('example-video-id', ContentBindingType.VIDEO_ID)
|
||||
92
test/test_pot/test_pot_builtin_webpospec.py
Normal file
92
test/test_pot/test_pot_builtin_webpospec.py
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
import pytest
|
||||
|
||||
from yt_dlp.extractor.youtube.pot._provider import IEContentProvider, BuiltinIEContentProvider
|
||||
from yt_dlp.extractor.youtube.pot.cache import CacheProviderWritePolicy
|
||||
from yt_dlp.utils import bug_reports_message
|
||||
from yt_dlp.extractor.youtube.pot.provider import (
|
||||
PoTokenRequest,
|
||||
PoTokenContext,
|
||||
|
||||
)
|
||||
from yt_dlp.version import __version__
|
||||
|
||||
from yt_dlp.extractor.youtube.pot._builtin.webpo_cachespec import WebPoPCSP
|
||||
from yt_dlp.extractor.youtube.pot._registry import _pot_pcs_providers
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def pot_request(pot_request) -> PoTokenRequest:
|
||||
pot_request.visitor_data = 'CgsxMjNhYmNYWVpfLSiA4s%2DqBg%3D%3D' # visitor_id=123abcXYZ_-
|
||||
return pot_request
|
||||
|
||||
|
||||
class TestWebPoPCSP:
|
||||
def test_base_type(self):
|
||||
assert issubclass(WebPoPCSP, IEContentProvider)
|
||||
assert issubclass(WebPoPCSP, BuiltinIEContentProvider)
|
||||
|
||||
def test_init(self, ie, logger):
|
||||
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||
assert pcs.PROVIDER_NAME == 'webpo'
|
||||
assert pcs.PROVIDER_VERSION == __version__
|
||||
assert pcs.BUG_REPORT_MESSAGE == bug_reports_message(before='')
|
||||
assert pcs.is_available()
|
||||
|
||||
def test_is_registered(self):
|
||||
assert _pot_pcs_providers.value.get('WebPo') == WebPoPCSP
|
||||
|
||||
@pytest.mark.parametrize('client_name, context, is_authenticated', [
|
||||
('ANDROID', PoTokenContext.GVS, False),
|
||||
('IOS', PoTokenContext.GVS, False),
|
||||
('IOS', PoTokenContext.PLAYER, False),
|
||||
])
|
||||
def test_not_supports(self, ie, logger, pot_request, client_name, context, is_authenticated):
|
||||
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||
pot_request.innertube_context['client']['clientName'] = client_name
|
||||
pot_request.context = context
|
||||
pot_request.is_authenticated = is_authenticated
|
||||
assert pcs.generate_cache_spec(pot_request) is None
|
||||
|
||||
@pytest.mark.parametrize('client_name, context, is_authenticated, remote_host, source_address, request_proxy, expected', [
|
||||
*[(client, context, is_authenticated, remote_host, source_address, request_proxy, expected) for client in [
|
||||
'WEB', 'MWEB', 'TVHTML5', 'WEB_EMBEDDED_PLAYER', 'WEB_CREATOR', 'TVHTML5_SIMPLY_EMBEDDED_PLAYER', 'TVHTML5_SIMPLY']
|
||||
for context, is_authenticated, remote_host, source_address, request_proxy, expected in [
|
||||
(PoTokenContext.GVS, False, 'example-remote-host', 'example-source-address', 'example-request-proxy', {'t': 'webpo', 'ip': 'example-remote-host', 'sa': 'example-source-address', 'px': 'example-request-proxy', 'cb': '123abcXYZ_-', 'cbt': 'visitor_id'}),
|
||||
(PoTokenContext.PLAYER, False, 'example-remote-host', 'example-source-address', 'example-request-proxy', {'t': 'webpo', 'ip': 'example-remote-host', 'sa': 'example-source-address', 'px': 'example-request-proxy', 'cb': '123abcXYZ_-', 'cbt': 'video_id'}),
|
||||
(PoTokenContext.GVS, True, 'example-remote-host', 'example-source-address', 'example-request-proxy', {'t': 'webpo', 'ip': 'example-remote-host', 'sa': 'example-source-address', 'px': 'example-request-proxy', 'cb': 'example-data-sync-id', 'cbt': 'datasync_id'}),
|
||||
]],
|
||||
('WEB_REMIX', PoTokenContext.PLAYER, False, 'example-remote-host', 'example-source-address', 'example-request-proxy', {'t': 'webpo', 'ip': 'example-remote-host', 'sa': 'example-source-address', 'px': 'example-request-proxy', 'cb': '123abcXYZ_-', 'cbt': 'visitor_id'}),
|
||||
('WEB', PoTokenContext.GVS, False, None, None, None, {'t': 'webpo', 'cb': '123abcXYZ_-', 'cbt': 'visitor_id', 'ip': None, 'sa': None, 'px': None}),
|
||||
('TVHTML5', PoTokenContext.PLAYER, False, None, None, 'http://example.com', {'t': 'webpo', 'cb': '123abcXYZ_-', 'cbt': 'video_id', 'ip': None, 'sa': None, 'px': 'http://example.com'}),
|
||||
|
||||
])
|
||||
def test_generate_key_bindings(self, ie, logger, pot_request, client_name, context, is_authenticated, remote_host, source_address, request_proxy, expected):
|
||||
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||
pot_request.innertube_context['client']['clientName'] = client_name
|
||||
pot_request.context = context
|
||||
pot_request.is_authenticated = is_authenticated
|
||||
pot_request.innertube_context['client']['remoteHost'] = remote_host
|
||||
pot_request.request_source_address = source_address
|
||||
pot_request.request_proxy = request_proxy
|
||||
pot_request.video_id = '123abcXYZ_-' # same as visitor id to test type
|
||||
|
||||
assert pcs.generate_cache_spec(pot_request).key_bindings == expected
|
||||
|
||||
def test_no_bind_visitor_id(self, ie, logger, pot_request):
|
||||
# Should not bind to visitor id if setting is set to False
|
||||
pcs = WebPoPCSP(ie=ie, logger=logger, settings={'bind_to_visitor_id': ['false']})
|
||||
pot_request.innertube_context['client']['clientName'] = 'WEB'
|
||||
pot_request.context = PoTokenContext.GVS
|
||||
pot_request.is_authenticated = False
|
||||
assert pcs.generate_cache_spec(pot_request).key_bindings == {'t': 'webpo', 'ip': None, 'sa': None, 'px': None, 'cb': 'CgsxMjNhYmNYWVpfLSiA4s%2DqBg%3D%3D', 'cbt': 'visitor_data'}
|
||||
|
||||
def test_default_ttl(self, ie, logger, pot_request):
|
||||
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||
assert pcs.generate_cache_spec(pot_request).default_ttl == 6 * 60 * 60 # should default to 6 hours
|
||||
|
||||
def test_write_policy(self, ie, logger, pot_request):
|
||||
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||
pot_request.context = PoTokenContext.GVS
|
||||
assert pcs.generate_cache_spec(pot_request).write_policy == CacheProviderWritePolicy.WRITE_ALL
|
||||
pot_request.context = PoTokenContext.PLAYER
|
||||
assert pcs.generate_cache_spec(pot_request).write_policy == CacheProviderWritePolicy.WRITE_FIRST
|
||||
1529
test/test_pot/test_pot_director.py
Normal file
1529
test/test_pot/test_pot_director.py
Normal file
File diff suppressed because it is too large
Load diff
639
test/test_pot/test_pot_framework.py
Normal file
639
test/test_pot/test_pot_framework.py
Normal file
|
|
@ -0,0 +1,639 @@
|
|||
import pytest
|
||||
|
||||
from yt_dlp.extractor.youtube.pot._provider import IEContentProvider, configuration_arg
|
||||
from yt_dlp.cookies import YoutubeDLCookieJar
|
||||
from yt_dlp.utils.networking import HTTPHeaderDict
|
||||
from yt_dlp.extractor.youtube.pot.provider import (
|
||||
PoTokenRequest,
|
||||
PoTokenContext,
|
||||
ExternalRequestFeature,
|
||||
|
||||
)
|
||||
|
||||
from yt_dlp.extractor.youtube.pot.cache import (
|
||||
PoTokenCacheProvider,
|
||||
PoTokenCacheSpec,
|
||||
PoTokenCacheSpecProvider,
|
||||
CacheProviderWritePolicy,
|
||||
)
|
||||
|
||||
import yt_dlp.extractor.youtube.pot.cache as cache
|
||||
|
||||
from yt_dlp.networking import Request
|
||||
from yt_dlp.extractor.youtube.pot.provider import (
|
||||
PoTokenResponse,
|
||||
PoTokenProvider,
|
||||
PoTokenProviderRejectedRequest,
|
||||
provider_bug_report_message,
|
||||
register_provider,
|
||||
register_preference,
|
||||
)
|
||||
|
||||
from yt_dlp.extractor.youtube.pot._registry import _pot_providers, _ptp_preferences, _pot_pcs_providers, _pot_cache_providers, _pot_cache_provider_preferences
|
||||
|
||||
|
||||
class ExamplePTP(PoTokenProvider):
|
||||
PROVIDER_NAME = 'example'
|
||||
PROVIDER_VERSION = '0.0.1'
|
||||
BUG_REPORT_LOCATION = 'https://example.com/issues'
|
||||
|
||||
_SUPPORTED_CLIENTS = ('WEB',)
|
||||
_SUPPORTED_CONTEXTS = (PoTokenContext.GVS, )
|
||||
|
||||
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = (
|
||||
ExternalRequestFeature.PROXY_SCHEME_HTTP,
|
||||
ExternalRequestFeature.PROXY_SCHEME_SOCKS5H,
|
||||
)
|
||||
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
def _real_request_pot(self, request: PoTokenRequest) -> PoTokenResponse:
|
||||
return PoTokenResponse('example-token', expires_at=123)
|
||||
|
||||
|
||||
class ExampleCacheProviderPCP(PoTokenCacheProvider):
|
||||
|
||||
PROVIDER_NAME = 'example'
|
||||
PROVIDER_VERSION = '0.0.1'
|
||||
BUG_REPORT_LOCATION = 'https://example.com/issues'
|
||||
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
def get(self, key: str):
|
||||
return 'example-cache'
|
||||
|
||||
def store(self, key: str, value: str, expires_at: int):
|
||||
pass
|
||||
|
||||
def delete(self, key: str):
|
||||
pass
|
||||
|
||||
|
||||
class ExampleCacheSpecProviderPCSP(PoTokenCacheSpecProvider):
|
||||
|
||||
PROVIDER_NAME = 'example'
|
||||
PROVIDER_VERSION = '0.0.1'
|
||||
BUG_REPORT_LOCATION = 'https://example.com/issues'
|
||||
|
||||
def generate_cache_spec(self, request: PoTokenRequest):
|
||||
return PoTokenCacheSpec(
|
||||
key_bindings={'field': 'example-key'},
|
||||
default_ttl=60,
|
||||
write_policy=CacheProviderWritePolicy.WRITE_FIRST,
|
||||
)
|
||||
|
||||
|
||||
class TestPoTokenProvider:
|
||||
|
||||
def test_base_type(self):
|
||||
assert issubclass(PoTokenProvider, IEContentProvider)
|
||||
|
||||
def test_create_provider_missing_fetch_method(self, ie, logger):
|
||||
class MissingMethodsPTP(PoTokenProvider):
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
MissingMethodsPTP(ie=ie, logger=logger, settings={})
|
||||
|
||||
def test_create_provider_missing_available_method(self, ie, logger):
|
||||
class MissingMethodsPTP(PoTokenProvider):
|
||||
def _real_request_pot(self, request: PoTokenRequest) -> PoTokenResponse:
|
||||
raise PoTokenProviderRejectedRequest('Not implemented')
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
MissingMethodsPTP(ie=ie, logger=logger, settings={})
|
||||
|
||||
def test_barebones_provider(self, ie, logger):
|
||||
class BarebonesProviderPTP(PoTokenProvider):
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
def _real_request_pot(self, request: PoTokenRequest) -> PoTokenResponse:
|
||||
raise PoTokenProviderRejectedRequest('Not implemented')
|
||||
|
||||
provider = BarebonesProviderPTP(ie=ie, logger=logger, settings={})
|
||||
assert provider.PROVIDER_NAME == 'BarebonesProvider'
|
||||
assert provider.PROVIDER_KEY == 'BarebonesProvider'
|
||||
assert provider.PROVIDER_VERSION == '0.0.0'
|
||||
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at (developer has not provided a bug report location) .'
|
||||
|
||||
def test_example_provider_success(self, ie, logger, pot_request):
|
||||
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||
assert provider.PROVIDER_NAME == 'example'
|
||||
assert provider.PROVIDER_KEY == 'Example'
|
||||
assert provider.PROVIDER_VERSION == '0.0.1'
|
||||
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at https://example.com/issues .'
|
||||
assert provider.is_available()
|
||||
|
||||
response = provider.request_pot(pot_request)
|
||||
|
||||
assert response.po_token == 'example-token'
|
||||
assert response.expires_at == 123
|
||||
|
||||
def test_provider_unsupported_context(self, ie, logger, pot_request):
|
||||
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||
pot_request.context = PoTokenContext.PLAYER
|
||||
|
||||
with pytest.raises(PoTokenProviderRejectedRequest):
|
||||
provider.request_pot(pot_request)
|
||||
|
||||
def test_provider_unsupported_client(self, ie, logger, pot_request):
|
||||
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||
pot_request.innertube_context['client']['clientName'] = 'ANDROID'
|
||||
|
||||
with pytest.raises(PoTokenProviderRejectedRequest):
|
||||
provider.request_pot(pot_request)
|
||||
|
||||
def test_provider_unsupported_proxy_scheme(self, ie, logger, pot_request):
|
||||
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||
pot_request.request_proxy = 'socks4://example.com'
|
||||
|
||||
with pytest.raises(
|
||||
PoTokenProviderRejectedRequest,
|
||||
match=r'External requests by "example" provider do not support proxy scheme "socks4"\. Supported proxy '
|
||||
'schemes: http, socks5h',
|
||||
):
|
||||
provider.request_pot(pot_request)
|
||||
|
||||
pot_request.request_proxy = 'http://example.com'
|
||||
|
||||
assert provider.request_pot(pot_request)
|
||||
|
||||
def test_provider_ignore_external_request_features(self, ie, logger, pot_request):
|
||||
class InternalPTP(ExamplePTP):
|
||||
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = None
|
||||
|
||||
provider = InternalPTP(ie=ie, logger=logger, settings={})
|
||||
|
||||
pot_request.request_proxy = 'socks5://example.com'
|
||||
assert provider.request_pot(pot_request)
|
||||
pot_request.request_source_address = '0.0.0.0'
|
||||
assert provider.request_pot(pot_request)
|
||||
|
||||
def test_provider_unsupported_external_request_source_address(self, ie, logger, pot_request):
|
||||
class InternalPTP(ExamplePTP):
|
||||
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = tuple()
|
||||
|
||||
provider = InternalPTP(ie=ie, logger=logger, settings={})
|
||||
|
||||
pot_request.request_source_address = None
|
||||
assert provider.request_pot(pot_request)
|
||||
|
||||
pot_request.request_source_address = '0.0.0.0'
|
||||
with pytest.raises(
|
||||
PoTokenProviderRejectedRequest,
|
||||
match='External requests by "example" provider do not support setting source address',
|
||||
):
|
||||
provider.request_pot(pot_request)
|
||||
|
||||
def test_provider_supported_external_request_source_address(self, ie, logger, pot_request):
|
||||
class InternalPTP(ExamplePTP):
|
||||
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = (
|
||||
ExternalRequestFeature.SOURCE_ADDRESS,
|
||||
)
|
||||
|
||||
provider = InternalPTP(ie=ie, logger=logger, settings={})
|
||||
|
||||
pot_request.request_source_address = None
|
||||
assert provider.request_pot(pot_request)
|
||||
|
||||
pot_request.request_source_address = '0.0.0.0'
|
||||
assert provider.request_pot(pot_request)
|
||||
|
||||
def test_provider_unsupported_external_request_tls_verification(self, ie, logger, pot_request):
|
||||
class InternalPTP(ExamplePTP):
|
||||
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = tuple()
|
||||
|
||||
provider = InternalPTP(ie=ie, logger=logger, settings={})
|
||||
|
||||
pot_request.request_verify_tls = True
|
||||
assert provider.request_pot(pot_request)
|
||||
|
||||
pot_request.request_verify_tls = False
|
||||
with pytest.raises(
|
||||
PoTokenProviderRejectedRequest,
|
||||
match='External requests by "example" provider do not support ignoring TLS certificate failures',
|
||||
):
|
||||
provider.request_pot(pot_request)
|
||||
|
||||
def test_provider_supported_external_request_tls_verification(self, ie, logger, pot_request):
|
||||
class InternalPTP(ExamplePTP):
|
||||
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = (
|
||||
ExternalRequestFeature.DISABLE_TLS_VERIFICATION,
|
||||
)
|
||||
|
||||
provider = InternalPTP(ie=ie, logger=logger, settings={})
|
||||
|
||||
pot_request.request_verify_tls = True
|
||||
assert provider.request_pot(pot_request)
|
||||
|
||||
pot_request.request_verify_tls = False
|
||||
assert provider.request_pot(pot_request)
|
||||
|
||||
def test_provider_request_webpage(self, ie, logger, pot_request):
|
||||
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||
|
||||
cookiejar = YoutubeDLCookieJar()
|
||||
pot_request.request_headers = HTTPHeaderDict({'User-Agent': 'example-user-agent'})
|
||||
pot_request.request_proxy = 'socks5://example-proxy.com'
|
||||
pot_request.request_cookiejar = cookiejar
|
||||
|
||||
def mock_urlopen(request):
|
||||
return request
|
||||
|
||||
ie._downloader.urlopen = mock_urlopen
|
||||
|
||||
sent_request = provider._request_webpage(Request(
|
||||
'https://example.com',
|
||||
), pot_request=pot_request)
|
||||
|
||||
assert sent_request.url == 'https://example.com'
|
||||
assert sent_request.headers['User-Agent'] == 'example-user-agent'
|
||||
assert sent_request.proxies == {'all': 'socks5://example-proxy.com'}
|
||||
assert sent_request.extensions['cookiejar'] is cookiejar
|
||||
assert 'Requesting webpage' in logger.messages['info']
|
||||
|
||||
def test_provider_request_webpage_override(self, ie, logger, pot_request):
|
||||
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||
|
||||
cookiejar_request = YoutubeDLCookieJar()
|
||||
pot_request.request_headers = HTTPHeaderDict({'User-Agent': 'example-user-agent'})
|
||||
pot_request.request_proxy = 'socks5://example-proxy.com'
|
||||
pot_request.request_cookiejar = cookiejar_request
|
||||
|
||||
def mock_urlopen(request):
|
||||
return request
|
||||
|
||||
ie._downloader.urlopen = mock_urlopen
|
||||
|
||||
sent_request = provider._request_webpage(Request(
|
||||
'https://example.com',
|
||||
headers={'User-Agent': 'override-user-agent-override'},
|
||||
proxies={'http': 'http://example-proxy-override.com'},
|
||||
extensions={'cookiejar': YoutubeDLCookieJar()},
|
||||
), pot_request=pot_request, note='Custom requesting webpage')
|
||||
|
||||
assert sent_request.url == 'https://example.com'
|
||||
assert sent_request.headers['User-Agent'] == 'override-user-agent-override'
|
||||
assert sent_request.proxies == {'http': 'http://example-proxy-override.com'}
|
||||
assert sent_request.extensions['cookiejar'] is not cookiejar_request
|
||||
assert 'Custom requesting webpage' in logger.messages['info']
|
||||
|
||||
def test_provider_request_webpage_no_log(self, ie, logger, pot_request):
|
||||
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||
|
||||
def mock_urlopen(request):
|
||||
return request
|
||||
|
||||
ie._downloader.urlopen = mock_urlopen
|
||||
|
||||
sent_request = provider._request_webpage(Request(
|
||||
'https://example.com',
|
||||
), note=False)
|
||||
|
||||
assert sent_request.url == 'https://example.com'
|
||||
assert 'info' not in logger.messages
|
||||
|
||||
def test_provider_request_webpage_no_pot_request(self, ie, logger):
|
||||
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||
|
||||
def mock_urlopen(request):
|
||||
return request
|
||||
|
||||
ie._downloader.urlopen = mock_urlopen
|
||||
|
||||
sent_request = provider._request_webpage(Request(
|
||||
'https://example.com',
|
||||
), pot_request=None)
|
||||
|
||||
assert sent_request.url == 'https://example.com'
|
||||
|
||||
def test_get_config_arg(self, ie, logger):
|
||||
provider = ExamplePTP(ie=ie, logger=logger, settings={'abc': ['123D'], 'xyz': ['456a', '789B']})
|
||||
|
||||
assert provider._configuration_arg('abc') == ['123d']
|
||||
assert provider._configuration_arg('abc', default=['default']) == ['123d']
|
||||
assert provider._configuration_arg('ABC', default=['default']) == ['default']
|
||||
assert provider._configuration_arg('abc', casesense=True) == ['123D']
|
||||
assert provider._configuration_arg('xyz', casesense=False) == ['456a', '789b']
|
||||
|
||||
def test_require_class_end_with_suffix(self, ie, logger):
|
||||
class InvalidSuffix(PoTokenProvider):
|
||||
PROVIDER_NAME = 'invalid-suffix'
|
||||
|
||||
def _real_request_pot(self, request: PoTokenRequest) -> PoTokenResponse:
|
||||
raise PoTokenProviderRejectedRequest('Not implemented')
|
||||
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
provider = InvalidSuffix(ie=ie, logger=logger, settings={})
|
||||
|
||||
with pytest.raises(AssertionError):
|
||||
provider.PROVIDER_KEY # noqa: B018
|
||||
|
||||
|
||||
class TestPoTokenCacheProvider:
|
||||
|
||||
def test_base_type(self):
|
||||
assert issubclass(PoTokenCacheProvider, IEContentProvider)
|
||||
|
||||
def test_create_provider_missing_get_method(self, ie, logger):
|
||||
class MissingMethodsPCP(PoTokenCacheProvider):
|
||||
def store(self, key: str, value: str, expires_at: int):
|
||||
pass
|
||||
|
||||
def delete(self, key: str):
|
||||
pass
|
||||
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
MissingMethodsPCP(ie=ie, logger=logger, settings={})
|
||||
|
||||
def test_create_provider_missing_store_method(self, ie, logger):
|
||||
class MissingMethodsPCP(PoTokenCacheProvider):
|
||||
def get(self, key: str):
|
||||
pass
|
||||
|
||||
def delete(self, key: str):
|
||||
pass
|
||||
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
MissingMethodsPCP(ie=ie, logger=logger, settings={})
|
||||
|
||||
def test_create_provider_missing_delete_method(self, ie, logger):
|
||||
class MissingMethodsPCP(PoTokenCacheProvider):
|
||||
def get(self, key: str):
|
||||
pass
|
||||
|
||||
def store(self, key: str, value: str, expires_at: int):
|
||||
pass
|
||||
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
MissingMethodsPCP(ie=ie, logger=logger, settings={})
|
||||
|
||||
def test_create_provider_missing_is_available_method(self, ie, logger):
|
||||
class MissingMethodsPCP(PoTokenCacheProvider):
|
||||
def get(self, key: str):
|
||||
pass
|
||||
|
||||
def store(self, key: str, value: str, expires_at: int):
|
||||
pass
|
||||
|
||||
def delete(self, key: str):
|
||||
pass
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
MissingMethodsPCP(ie=ie, logger=logger, settings={})
|
||||
|
||||
def test_barebones_provider(self, ie, logger):
|
||||
class BarebonesProviderPCP(PoTokenCacheProvider):
|
||||
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
def get(self, key: str):
|
||||
return 'example-cache'
|
||||
|
||||
def store(self, key: str, value: str, expires_at: int):
|
||||
pass
|
||||
|
||||
def delete(self, key: str):
|
||||
pass
|
||||
|
||||
provider = BarebonesProviderPCP(ie=ie, logger=logger, settings={})
|
||||
assert provider.PROVIDER_NAME == 'BarebonesProvider'
|
||||
assert provider.PROVIDER_KEY == 'BarebonesProvider'
|
||||
assert provider.PROVIDER_VERSION == '0.0.0'
|
||||
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at (developer has not provided a bug report location) .'
|
||||
|
||||
def test_create_provider_example(self, ie, logger):
|
||||
provider = ExampleCacheProviderPCP(ie=ie, logger=logger, settings={})
|
||||
assert provider.PROVIDER_NAME == 'example'
|
||||
assert provider.PROVIDER_KEY == 'ExampleCacheProvider'
|
||||
assert provider.PROVIDER_VERSION == '0.0.1'
|
||||
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at https://example.com/issues .'
|
||||
assert provider.is_available()
|
||||
|
||||
def test_get_config_arg(self, ie, logger):
|
||||
provider = ExampleCacheProviderPCP(ie=ie, logger=logger, settings={'abc': ['123D'], 'xyz': ['456a', '789B']})
|
||||
assert provider._configuration_arg('abc') == ['123d']
|
||||
assert provider._configuration_arg('abc', default=['default']) == ['123d']
|
||||
assert provider._configuration_arg('ABC', default=['default']) == ['default']
|
||||
assert provider._configuration_arg('abc', casesense=True) == ['123D']
|
||||
assert provider._configuration_arg('xyz', casesense=False) == ['456a', '789b']
|
||||
|
||||
def test_require_class_end_with_suffix(self, ie, logger):
|
||||
class InvalidSuffix(PoTokenCacheProvider):
|
||||
def get(self, key: str):
|
||||
return 'example-cache'
|
||||
|
||||
def store(self, key: str, value: str, expires_at: int):
|
||||
pass
|
||||
|
||||
def delete(self, key: str):
|
||||
pass
|
||||
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
provider = InvalidSuffix(ie=ie, logger=logger, settings={})
|
||||
|
||||
with pytest.raises(AssertionError):
|
||||
provider.PROVIDER_KEY # noqa: B018
|
||||
|
||||
|
||||
class TestPoTokenCacheSpecProvider:
|
||||
|
||||
def test_base_type(self):
|
||||
assert issubclass(PoTokenCacheSpecProvider, IEContentProvider)
|
||||
|
||||
def test_create_provider_missing_supports_method(self, ie, logger):
|
||||
class MissingMethodsPCS(PoTokenCacheSpecProvider):
|
||||
pass
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
MissingMethodsPCS(ie=ie, logger=logger, settings={})
|
||||
|
||||
def test_create_provider_barebones(self, ie, pot_request, logger):
|
||||
class BarebonesProviderPCSP(PoTokenCacheSpecProvider):
|
||||
def generate_cache_spec(self, request: PoTokenRequest):
|
||||
return PoTokenCacheSpec(
|
||||
default_ttl=100,
|
||||
key_bindings={},
|
||||
)
|
||||
|
||||
provider = BarebonesProviderPCSP(ie=ie, logger=logger, settings={})
|
||||
assert provider.PROVIDER_NAME == 'BarebonesProvider'
|
||||
assert provider.PROVIDER_KEY == 'BarebonesProvider'
|
||||
assert provider.PROVIDER_VERSION == '0.0.0'
|
||||
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at (developer has not provided a bug report location) .'
|
||||
assert provider.is_available()
|
||||
assert provider.generate_cache_spec(request=pot_request).default_ttl == 100
|
||||
assert provider.generate_cache_spec(request=pot_request).key_bindings == {}
|
||||
assert provider.generate_cache_spec(request=pot_request).write_policy == CacheProviderWritePolicy.WRITE_ALL
|
||||
|
||||
def test_create_provider_example(self, ie, pot_request, logger):
|
||||
provider = ExampleCacheSpecProviderPCSP(ie=ie, logger=logger, settings={})
|
||||
assert provider.PROVIDER_NAME == 'example'
|
||||
assert provider.PROVIDER_KEY == 'ExampleCacheSpecProvider'
|
||||
assert provider.PROVIDER_VERSION == '0.0.1'
|
||||
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at https://example.com/issues .'
|
||||
assert provider.is_available()
|
||||
assert provider.generate_cache_spec(pot_request)
|
||||
assert provider.generate_cache_spec(pot_request).key_bindings == {'field': 'example-key'}
|
||||
assert provider.generate_cache_spec(pot_request).default_ttl == 60
|
||||
assert provider.generate_cache_spec(pot_request).write_policy == CacheProviderWritePolicy.WRITE_FIRST
|
||||
|
||||
def test_get_config_arg(self, ie, logger):
|
||||
provider = ExampleCacheSpecProviderPCSP(ie=ie, logger=logger, settings={'abc': ['123D'], 'xyz': ['456a', '789B']})
|
||||
|
||||
assert provider._configuration_arg('abc') == ['123d']
|
||||
assert provider._configuration_arg('abc', default=['default']) == ['123d']
|
||||
assert provider._configuration_arg('ABC', default=['default']) == ['default']
|
||||
assert provider._configuration_arg('abc', casesense=True) == ['123D']
|
||||
assert provider._configuration_arg('xyz', casesense=False) == ['456a', '789b']
|
||||
|
||||
def test_require_class_end_with_suffix(self, ie, logger):
|
||||
class InvalidSuffix(PoTokenCacheSpecProvider):
|
||||
def generate_cache_spec(self, request: PoTokenRequest):
|
||||
return None
|
||||
|
||||
provider = InvalidSuffix(ie=ie, logger=logger, settings={})
|
||||
|
||||
with pytest.raises(AssertionError):
|
||||
provider.PROVIDER_KEY # noqa: B018
|
||||
|
||||
|
||||
class TestPoTokenRequest:
|
||||
def test_copy_request(self, pot_request):
|
||||
copied_request = pot_request.copy()
|
||||
|
||||
assert copied_request is not pot_request
|
||||
assert copied_request.context == pot_request.context
|
||||
assert copied_request.innertube_context == pot_request.innertube_context
|
||||
assert copied_request.innertube_context is not pot_request.innertube_context
|
||||
copied_request.innertube_context['client']['clientName'] = 'ANDROID'
|
||||
assert pot_request.innertube_context['client']['clientName'] != 'ANDROID'
|
||||
assert copied_request.innertube_host == pot_request.innertube_host
|
||||
assert copied_request.session_index == pot_request.session_index
|
||||
assert copied_request.player_url == pot_request.player_url
|
||||
assert copied_request.is_authenticated == pot_request.is_authenticated
|
||||
assert copied_request.visitor_data == pot_request.visitor_data
|
||||
assert copied_request.data_sync_id == pot_request.data_sync_id
|
||||
assert copied_request.video_id == pot_request.video_id
|
||||
assert copied_request.request_cookiejar is pot_request.request_cookiejar
|
||||
assert copied_request.request_proxy == pot_request.request_proxy
|
||||
assert copied_request.request_headers == pot_request.request_headers
|
||||
assert copied_request.request_headers is not pot_request.request_headers
|
||||
assert copied_request.request_timeout == pot_request.request_timeout
|
||||
assert copied_request.request_source_address == pot_request.request_source_address
|
||||
assert copied_request.request_verify_tls == pot_request.request_verify_tls
|
||||
assert copied_request.bypass_cache == pot_request.bypass_cache
|
||||
|
||||
|
||||
def test_provider_bug_report_message(ie, logger):
|
||||
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at https://example.com/issues .'
|
||||
|
||||
message = provider_bug_report_message(provider)
|
||||
assert message == '; please report this issue to the provider developer at https://example.com/issues .'
|
||||
|
||||
message_before = provider_bug_report_message(provider, before='custom message!')
|
||||
assert message_before == 'custom message! Please report this issue to the provider developer at https://example.com/issues .'
|
||||
|
||||
|
||||
def test_register_provider(ie):
|
||||
|
||||
@register_provider
|
||||
class UnavailableProviderPTP(PoTokenProvider):
|
||||
def is_available(self) -> bool:
|
||||
return False
|
||||
|
||||
def _real_request_pot(self, request: PoTokenRequest) -> PoTokenResponse:
|
||||
raise PoTokenProviderRejectedRequest('Not implemented')
|
||||
|
||||
assert _pot_providers.value.get('UnavailableProvider') == UnavailableProviderPTP
|
||||
_pot_providers.value.pop('UnavailableProvider')
|
||||
|
||||
|
||||
def test_register_pot_preference(ie):
|
||||
before = len(_ptp_preferences.value)
|
||||
|
||||
@register_preference(ExamplePTP)
|
||||
def unavailable_preference(provider: PoTokenProvider, request: PoTokenRequest):
|
||||
return 1
|
||||
|
||||
assert len(_ptp_preferences.value) == before + 1
|
||||
|
||||
|
||||
def test_register_cache_provider(ie):
|
||||
|
||||
@cache.register_provider
|
||||
class UnavailableCacheProviderPCP(PoTokenCacheProvider):
|
||||
def is_available(self) -> bool:
|
||||
return False
|
||||
|
||||
def get(self, key: str):
|
||||
return 'example-cache'
|
||||
|
||||
def store(self, key: str, value: str, expires_at: int):
|
||||
pass
|
||||
|
||||
def delete(self, key: str):
|
||||
pass
|
||||
|
||||
assert _pot_cache_providers.value.get('UnavailableCacheProvider') == UnavailableCacheProviderPCP
|
||||
_pot_cache_providers.value.pop('UnavailableCacheProvider')
|
||||
|
||||
|
||||
def test_register_cache_provider_spec(ie):
|
||||
|
||||
@cache.register_spec
|
||||
class UnavailableCacheProviderPCSP(PoTokenCacheSpecProvider):
|
||||
def is_available(self) -> bool:
|
||||
return False
|
||||
|
||||
def generate_cache_spec(self, request: PoTokenRequest):
|
||||
return None
|
||||
|
||||
assert _pot_pcs_providers.value.get('UnavailableCacheProvider') == UnavailableCacheProviderPCSP
|
||||
_pot_pcs_providers.value.pop('UnavailableCacheProvider')
|
||||
|
||||
|
||||
def test_register_cache_provider_preference(ie):
|
||||
before = len(_pot_cache_provider_preferences.value)
|
||||
|
||||
@cache.register_preference(ExampleCacheProviderPCP)
|
||||
def unavailable_preference(provider: PoTokenCacheProvider, request: PoTokenRequest):
|
||||
return 1
|
||||
|
||||
assert len(_pot_cache_provider_preferences.value) == before + 1
|
||||
|
||||
|
||||
def test_logger_log_level(logger):
|
||||
assert logger.LogLevel('INFO') == logger.LogLevel.INFO
|
||||
assert logger.LogLevel('debuG') == logger.LogLevel.DEBUG
|
||||
assert logger.LogLevel(10) == logger.LogLevel.DEBUG
|
||||
assert logger.LogLevel('UNKNOWN') == logger.LogLevel.INFO
|
||||
|
||||
|
||||
def test_configuration_arg():
|
||||
config = {'abc': ['123D'], 'xyz': ['456a', '789B']}
|
||||
|
||||
assert configuration_arg(config, 'abc') == ['123d']
|
||||
assert configuration_arg(config, 'abc', default=['default']) == ['123d']
|
||||
assert configuration_arg(config, 'ABC', default=['default']) == ['default']
|
||||
assert configuration_arg(config, 'abc', casesense=True) == ['123D']
|
||||
assert configuration_arg(config, 'xyz', casesense=False) == ['456a', '789b']
|
||||
|
|
@ -216,7 +216,9 @@ class SocksWebSocketTestRequestHandler(SocksTestRequestHandler):
|
|||
protocol = websockets.ServerProtocol()
|
||||
connection = websockets.sync.server.ServerConnection(socket=self.request, protocol=protocol, close_timeout=0)
|
||||
connection.handshake()
|
||||
connection.send(json.dumps(self.socks_info))
|
||||
for message in connection:
|
||||
if message == 'socks_info':
|
||||
connection.send(json.dumps(self.socks_info))
|
||||
connection.close()
|
||||
|
||||
|
||||
|
|
@ -293,6 +295,7 @@ def ctx(request):
|
|||
('Websockets', 'ws'),
|
||||
('CurlCFFI', 'http'),
|
||||
], indirect=True)
|
||||
@pytest.mark.handler_flaky('CurlCFFI', reason='segfaults')
|
||||
class TestSocks4Proxy:
|
||||
def test_socks4_no_auth(self, handler, ctx):
|
||||
with handler() as rh:
|
||||
|
|
@ -368,6 +371,7 @@ class TestSocks4Proxy:
|
|||
('Websockets', 'ws'),
|
||||
('CurlCFFI', 'http'),
|
||||
], indirect=True)
|
||||
@pytest.mark.handler_flaky('CurlCFFI', reason='segfaults')
|
||||
class TestSocks5Proxy:
|
||||
|
||||
def test_socks5_no_auth(self, handler, ctx):
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@ from yt_dlp.extractor import (
|
|||
NRKTVIE,
|
||||
PBSIE,
|
||||
CeskaTelevizeIE,
|
||||
ComedyCentralIE,
|
||||
DailymotionIE,
|
||||
DemocracynowIE,
|
||||
LyndaIE,
|
||||
|
|
@ -23,7 +22,6 @@ from yt_dlp.extractor import (
|
|||
TedTalkIE,
|
||||
ThePlatformFeedIE,
|
||||
ThePlatformIE,
|
||||
VikiIE,
|
||||
VimeoIE,
|
||||
WallaIE,
|
||||
YoutubeIE,
|
||||
|
|
@ -280,23 +278,6 @@ class TestNPOSubtitles(BaseTestSubtitles):
|
|||
self.assertEqual(md5(subtitles['nl']), 'fc6435027572b63fb4ab143abd5ad3f4')
|
||||
|
||||
|
||||
@is_download_test
|
||||
@unittest.skip('IE broken')
|
||||
class TestMTVSubtitles(BaseTestSubtitles):
|
||||
url = 'http://www.cc.com/video-clips/p63lk0/adam-devine-s-house-party-chasing-white-swans'
|
||||
IE = ComedyCentralIE
|
||||
|
||||
def getInfoDict(self):
|
||||
return super().getInfoDict()['entries'][0]
|
||||
|
||||
def test_allsubtitles(self):
|
||||
self.DL.params['writesubtitles'] = True
|
||||
self.DL.params['allsubtitles'] = True
|
||||
subtitles = self.getSubtitles()
|
||||
self.assertEqual(set(subtitles.keys()), {'en'})
|
||||
self.assertEqual(md5(subtitles['en']), '78206b8d8a0cfa9da64dc026eea48961')
|
||||
|
||||
|
||||
@is_download_test
|
||||
class TestNRKSubtitles(BaseTestSubtitles):
|
||||
url = 'http://tv.nrk.no/serie/ikke-gjoer-dette-hjemme/DMPV73000411/sesong-2/episode-1'
|
||||
|
|
@ -331,20 +312,6 @@ class TestRaiPlaySubtitles(BaseTestSubtitles):
|
|||
self.assertEqual(md5(subtitles['it']), '4b3264186fbb103508abe5311cfcb9cd')
|
||||
|
||||
|
||||
@is_download_test
|
||||
@unittest.skip('IE broken - DRM only')
|
||||
class TestVikiSubtitles(BaseTestSubtitles):
|
||||
url = 'http://www.viki.com/videos/1060846v-punch-episode-18'
|
||||
IE = VikiIE
|
||||
|
||||
def test_allsubtitles(self):
|
||||
self.DL.params['writesubtitles'] = True
|
||||
self.DL.params['allsubtitles'] = True
|
||||
subtitles = self.getSubtitles()
|
||||
self.assertEqual(set(subtitles.keys()), {'en'})
|
||||
self.assertEqual(md5(subtitles['en']), '53cb083a5914b2d84ef1ab67b880d18a')
|
||||
|
||||
|
||||
@is_download_test
|
||||
class TestThePlatformSubtitles(BaseTestSubtitles):
|
||||
# from http://www.3playmedia.com/services-features/tools/integrations/theplatform/
|
||||
|
|
|
|||
|
|
@ -4,8 +4,23 @@ import xml.etree.ElementTree
|
|||
|
||||
import pytest
|
||||
|
||||
from yt_dlp.utils import dict_get, int_or_none, str_or_none
|
||||
from yt_dlp.utils.traversal import traverse_obj
|
||||
from yt_dlp.utils import (
|
||||
ExtractorError,
|
||||
determine_ext,
|
||||
dict_get,
|
||||
int_or_none,
|
||||
join_nonempty,
|
||||
str_or_none,
|
||||
)
|
||||
from yt_dlp.utils.traversal import (
|
||||
find_element,
|
||||
find_elements,
|
||||
require,
|
||||
subs_list_to_dict,
|
||||
traverse_obj,
|
||||
trim_str,
|
||||
unpack,
|
||||
)
|
||||
|
||||
_TEST_DATA = {
|
||||
100: 100,
|
||||
|
|
@ -24,6 +39,14 @@ _TEST_DATA = {
|
|||
'dict': {},
|
||||
}
|
||||
|
||||
_TEST_HTML = '''<html><body>
|
||||
<div class="a">1</div>
|
||||
<div class="a" id="x" custom="z">2</div>
|
||||
<div class="b" data-id="y" custom="z">3</div>
|
||||
<p class="a">4</p>
|
||||
<p id="d" custom="e">5</p>
|
||||
</body></html>'''
|
||||
|
||||
|
||||
class TestTraversal:
|
||||
def test_traversal_base(self):
|
||||
|
|
@ -393,18 +416,8 @@ class TestTraversal:
|
|||
'`any` should allow further branching'
|
||||
|
||||
def test_traversal_morsel(self):
|
||||
values = {
|
||||
'expires': 'a',
|
||||
'path': 'b',
|
||||
'comment': 'c',
|
||||
'domain': 'd',
|
||||
'max-age': 'e',
|
||||
'secure': 'f',
|
||||
'httponly': 'g',
|
||||
'version': 'h',
|
||||
'samesite': 'i',
|
||||
}
|
||||
morsel = http.cookies.Morsel()
|
||||
values = dict(zip(morsel, 'abcdefghijklmnop', strict=False))
|
||||
morsel.set('item_key', 'item_value', 'coded_value')
|
||||
morsel.update(values)
|
||||
values['key'] = 'item_key'
|
||||
|
|
@ -420,6 +433,186 @@ class TestTraversal:
|
|||
assert traverse_obj(morsel, [(None,), any]) == morsel, \
|
||||
'Morsel should not be implicitly changed to dict on usage'
|
||||
|
||||
def test_traversal_filter(self):
|
||||
data = [None, False, True, 0, 1, 0.0, 1.1, '', 'str', {}, {0: 0}, [], [1]]
|
||||
|
||||
assert traverse_obj(data, [..., filter]) == [True, 1, 1.1, 'str', {0: 0}, [1]], \
|
||||
'`filter` should filter falsy values'
|
||||
|
||||
|
||||
class TestTraversalHelpers:
|
||||
def test_traversal_require(self):
|
||||
with pytest.raises(ExtractorError):
|
||||
traverse_obj(_TEST_DATA, ['None', {require('value')}])
|
||||
assert traverse_obj(_TEST_DATA, ['str', {require('value')}]) == 'str', \
|
||||
'`require` should pass through non `None` values'
|
||||
|
||||
def test_subs_list_to_dict(self):
|
||||
assert traverse_obj([
|
||||
{'name': 'de', 'url': 'https://example.com/subs/de.vtt'},
|
||||
{'name': 'en', 'url': 'https://example.com/subs/en1.ass'},
|
||||
{'name': 'en', 'url': 'https://example.com/subs/en2.ass'},
|
||||
], [..., {
|
||||
'id': 'name',
|
||||
'url': 'url',
|
||||
}, all, {subs_list_to_dict}]) == {
|
||||
'de': [{'url': 'https://example.com/subs/de.vtt'}],
|
||||
'en': [
|
||||
{'url': 'https://example.com/subs/en1.ass'},
|
||||
{'url': 'https://example.com/subs/en2.ass'},
|
||||
],
|
||||
}, 'function should build subtitle dict from list of subtitles'
|
||||
assert traverse_obj([
|
||||
{'name': 'de', 'url': 'https://example.com/subs/de.ass'},
|
||||
{'name': 'de'},
|
||||
{'name': 'en', 'content': 'content'},
|
||||
{'url': 'https://example.com/subs/en'},
|
||||
], [..., {
|
||||
'id': 'name',
|
||||
'data': 'content',
|
||||
'url': 'url',
|
||||
}, all, {subs_list_to_dict(lang=None)}]) == {
|
||||
'de': [{'url': 'https://example.com/subs/de.ass'}],
|
||||
'en': [{'data': 'content'}],
|
||||
}, 'subs with mandatory items missing should be filtered'
|
||||
assert traverse_obj([
|
||||
{'url': 'https://example.com/subs/de.ass', 'name': 'de'},
|
||||
{'url': 'https://example.com/subs/en', 'name': 'en'},
|
||||
], [..., {
|
||||
'id': 'name',
|
||||
'ext': ['url', {determine_ext(default_ext=None)}],
|
||||
'url': 'url',
|
||||
}, all, {subs_list_to_dict(ext='ext')}]) == {
|
||||
'de': [{'url': 'https://example.com/subs/de.ass', 'ext': 'ass'}],
|
||||
'en': [{'url': 'https://example.com/subs/en', 'ext': 'ext'}],
|
||||
}, '`ext` should set default ext but leave existing value untouched'
|
||||
assert traverse_obj([
|
||||
{'name': 'en', 'url': 'https://example.com/subs/en2', 'prio': True},
|
||||
{'name': 'en', 'url': 'https://example.com/subs/en1', 'prio': False},
|
||||
], [..., {
|
||||
'id': 'name',
|
||||
'quality': ['prio', {int}],
|
||||
'url': 'url',
|
||||
}, all, {subs_list_to_dict(ext='ext')}]) == {'en': [
|
||||
{'url': 'https://example.com/subs/en1', 'ext': 'ext'},
|
||||
{'url': 'https://example.com/subs/en2', 'ext': 'ext'},
|
||||
]}, '`quality` key should sort subtitle list accordingly'
|
||||
assert traverse_obj([
|
||||
{'name': 'de', 'url': 'https://example.com/subs/de.ass'},
|
||||
{'name': 'de'},
|
||||
{'name': 'en', 'content': 'content'},
|
||||
{'url': 'https://example.com/subs/en'},
|
||||
], [..., {
|
||||
'id': 'name',
|
||||
'url': 'url',
|
||||
'data': 'content',
|
||||
}, all, {subs_list_to_dict(lang='en')}]) == {
|
||||
'de': [{'url': 'https://example.com/subs/de.ass'}],
|
||||
'en': [
|
||||
{'data': 'content'},
|
||||
{'url': 'https://example.com/subs/en'},
|
||||
],
|
||||
}, 'optionally provided lang should be used if no id available'
|
||||
assert traverse_obj([
|
||||
{'name': 1, 'url': 'https://example.com/subs/de1'},
|
||||
{'name': {}, 'url': 'https://example.com/subs/de2'},
|
||||
{'name': 'de', 'ext': 1, 'url': 'https://example.com/subs/de3'},
|
||||
{'name': 'de', 'ext': {}, 'url': 'https://example.com/subs/de4'},
|
||||
], [..., {
|
||||
'id': 'name',
|
||||
'url': 'url',
|
||||
'ext': 'ext',
|
||||
}, all, {subs_list_to_dict(lang=None)}]) == {
|
||||
'de': [
|
||||
{'url': 'https://example.com/subs/de3'},
|
||||
{'url': 'https://example.com/subs/de4'},
|
||||
],
|
||||
}, 'non str types should be ignored for id and ext'
|
||||
assert traverse_obj([
|
||||
{'name': 1, 'url': 'https://example.com/subs/de1'},
|
||||
{'name': {}, 'url': 'https://example.com/subs/de2'},
|
||||
{'name': 'de', 'ext': 1, 'url': 'https://example.com/subs/de3'},
|
||||
{'name': 'de', 'ext': {}, 'url': 'https://example.com/subs/de4'},
|
||||
], [..., {
|
||||
'id': 'name',
|
||||
'url': 'url',
|
||||
'ext': 'ext',
|
||||
}, all, {subs_list_to_dict(lang='de')}]) == {
|
||||
'de': [
|
||||
{'url': 'https://example.com/subs/de1'},
|
||||
{'url': 'https://example.com/subs/de2'},
|
||||
{'url': 'https://example.com/subs/de3'},
|
||||
{'url': 'https://example.com/subs/de4'},
|
||||
],
|
||||
}, 'non str types should be replaced by default id'
|
||||
|
||||
def test_trim_str(self):
|
||||
with pytest.raises(TypeError):
|
||||
trim_str('positional')
|
||||
|
||||
assert callable(trim_str(start='a'))
|
||||
assert trim_str(start='ab')('abc') == 'c'
|
||||
assert trim_str(end='bc')('abc') == 'a'
|
||||
assert trim_str(start='a', end='c')('abc') == 'b'
|
||||
assert trim_str(start='ab', end='c')('abc') == ''
|
||||
assert trim_str(start='a', end='bc')('abc') == ''
|
||||
assert trim_str(start='ab', end='bc')('abc') == ''
|
||||
assert trim_str(start='abc', end='abc')('abc') == ''
|
||||
assert trim_str(start='', end='')('abc') == 'abc'
|
||||
|
||||
def test_unpack(self):
|
||||
assert unpack(lambda *x: ''.join(map(str, x)))([1, 2, 3]) == '123'
|
||||
assert unpack(join_nonempty)([1, 2, 3]) == '1-2-3'
|
||||
assert unpack(join_nonempty, delim=' ')([1, 2, 3]) == '1 2 3'
|
||||
with pytest.raises(TypeError):
|
||||
unpack(join_nonempty)()
|
||||
with pytest.raises(TypeError):
|
||||
unpack()
|
||||
|
||||
def test_find_element(self):
|
||||
for improper_kwargs in [
|
||||
dict(attr='data-id'),
|
||||
dict(value='y'),
|
||||
dict(attr='data-id', value='y', cls='a'),
|
||||
dict(attr='data-id', value='y', id='x'),
|
||||
dict(cls='a', id='x'),
|
||||
dict(cls='a', tag='p'),
|
||||
dict(cls='[ab]', regex=True),
|
||||
]:
|
||||
with pytest.raises(AssertionError):
|
||||
find_element(**improper_kwargs)(_TEST_HTML)
|
||||
|
||||
assert find_element(cls='a')(_TEST_HTML) == '1'
|
||||
assert find_element(cls='a', html=True)(_TEST_HTML) == '<div class="a">1</div>'
|
||||
assert find_element(id='x')(_TEST_HTML) == '2'
|
||||
assert find_element(id='[ex]')(_TEST_HTML) is None
|
||||
assert find_element(id='[ex]', regex=True)(_TEST_HTML) == '2'
|
||||
assert find_element(id='x', html=True)(_TEST_HTML) == '<div class="a" id="x" custom="z">2</div>'
|
||||
assert find_element(attr='data-id', value='y')(_TEST_HTML) == '3'
|
||||
assert find_element(attr='data-id', value='y(?:es)?')(_TEST_HTML) is None
|
||||
assert find_element(attr='data-id', value='y(?:es)?', regex=True)(_TEST_HTML) == '3'
|
||||
assert find_element(
|
||||
attr='data-id', value='y', html=True)(_TEST_HTML) == '<div class="b" data-id="y" custom="z">3</div>'
|
||||
|
||||
def test_find_elements(self):
|
||||
for improper_kwargs in [
|
||||
dict(tag='p'),
|
||||
dict(attr='data-id'),
|
||||
dict(value='y'),
|
||||
dict(attr='data-id', value='y', cls='a'),
|
||||
dict(cls='a', tag='div'),
|
||||
dict(cls='[ab]', regex=True),
|
||||
]:
|
||||
with pytest.raises(AssertionError):
|
||||
find_elements(**improper_kwargs)(_TEST_HTML)
|
||||
|
||||
assert find_elements(cls='a')(_TEST_HTML) == ['1', '2', '4']
|
||||
assert find_elements(cls='a', html=True)(_TEST_HTML) == [
|
||||
'<div class="a">1</div>', '<div class="a" id="x" custom="z">2</div>', '<p class="a">4</p>']
|
||||
assert find_elements(attr='custom', value='z')(_TEST_HTML) == ['2', '3']
|
||||
assert find_elements(attr='custom', value='[ez]')(_TEST_HTML) == []
|
||||
assert find_elements(attr='custom', value='[ez]', regex=True)(_TEST_HTML) == ['2', '3', '5']
|
||||
|
||||
|
||||
class TestDictGet:
|
||||
def test_dict_get(self):
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|||
|
||||
|
||||
from test.helper import FakeYDL, report_warning
|
||||
from yt_dlp.update import UpdateInfo, Updater
|
||||
from yt_dlp.update import UpdateInfo, Updater, UPDATE_SOURCES, _make_label
|
||||
|
||||
|
||||
# XXX: Keep in sync with yt_dlp.update.UPDATE_SOURCES
|
||||
|
|
@ -82,16 +82,36 @@ TEST_LOCKFILE_V1 = rf'''{TEST_LOCKFILE_COMMENT}
|
|||
lock 2022.08.18.36 .+ Python 3\.6
|
||||
lock 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
||||
lock 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||
lock 2024.10.22 py2exe .+
|
||||
lock 2024.10.22 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
||||
lock 2024.10.22 zip Python 3\.8
|
||||
lock 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||
lock 2025.08.11 darwin_legacy_exe .+
|
||||
'''
|
||||
|
||||
TEST_LOCKFILE_V2_TMPL = r'''%s
|
||||
lockV2 yt-dlp/yt-dlp 2022.08.18.36 .+ Python 3\.6
|
||||
lockV2 yt-dlp/yt-dlp 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||
lockV2 yt-dlp/yt-dlp 2024.10.22 py2exe .+
|
||||
lockV2 yt-dlp/yt-dlp 2024.10.22 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
||||
lockV2 yt-dlp/yt-dlp 2024.10.22 zip Python 3\.8
|
||||
lockV2 yt-dlp/yt-dlp 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||
lockV2 yt-dlp/yt-dlp 2025.08.11 darwin_legacy_exe .+
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 (?!win_x86_exe).+ Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 py2exe .+
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 zip Python 3\.8
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.12.233030 darwin_legacy_exe .+
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 (?!win_x86_exe).+ Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.045052 py2exe .+
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 zip Python 3\.8
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2025.08.12.232447 darwin_legacy_exe .+
|
||||
'''
|
||||
|
||||
TEST_LOCKFILE_V2 = TEST_LOCKFILE_V2_TMPL % TEST_LOCKFILE_COMMENT
|
||||
|
|
@ -145,43 +165,80 @@ class TestUpdate(unittest.TestCase):
|
|||
for lockfile in (TEST_LOCKFILE_V1, TEST_LOCKFILE_V2, TEST_LOCKFILE_ACTUAL, TEST_LOCKFILE_FORK):
|
||||
# Normal operation
|
||||
test(lockfile, 'zip Python 3.12.0', '2023.12.31', '2023.12.31')
|
||||
test(lockfile, 'zip stable Python 3.12.0', '2023.12.31', '2023.12.31', exact=True)
|
||||
# Python 3.6 --update should update only to its lock
|
||||
test(lockfile, 'zip Python 3.12.0', '2023.12.31', '2023.12.31', exact=True)
|
||||
# py2exe should never update beyond 2024.10.22
|
||||
test(lockfile, 'py2exe Python 3.8', '2025.01.01', '2024.10.22')
|
||||
test(lockfile, 'py2exe Python 3.8', '2025.01.01', None, exact=True)
|
||||
# Python 3.6 --update should update only to the py3.6 lock
|
||||
test(lockfile, 'zip Python 3.6.0', '2023.11.16', '2022.08.18.36')
|
||||
# --update-to an exact version later than the lock should return None
|
||||
test(lockfile, 'zip stable Python 3.6.0', '2023.11.16', None, exact=True)
|
||||
# Python 3.7 should be able to update to its lock
|
||||
# Python 3.6 --update-to an exact version later than the py3.6 lock should return None
|
||||
test(lockfile, 'zip Python 3.6.0', '2023.11.16', None, exact=True)
|
||||
# Python 3.7 should be able to update to the py3.7 lock
|
||||
test(lockfile, 'zip Python 3.7.0', '2023.11.16', '2023.11.16')
|
||||
test(lockfile, 'zip stable Python 3.7.1', '2023.11.16', '2023.11.16', exact=True)
|
||||
# Non-win_x86_exe builds on py3.7 must be locked
|
||||
test(lockfile, 'zip Python 3.7.1', '2023.11.16', '2023.11.16', exact=True)
|
||||
# Non-win_x86_exe builds on py3.7 must be locked at py3.7 lock
|
||||
test(lockfile, 'zip Python 3.7.1', '2023.12.31', '2023.11.16')
|
||||
test(lockfile, 'zip stable Python 3.7.1', '2023.12.31', None, exact=True)
|
||||
test( # Windows Vista w/ win_x86_exe must be locked
|
||||
lockfile, 'win_x86_exe stable Python 3.7.9 (CPython x86 32bit) - Windows-Vista-6.0.6003-SP2',
|
||||
test(lockfile, 'zip Python 3.7.1', '2023.12.31', None, exact=True)
|
||||
# Python 3.8 should only update to the py3.8 lock
|
||||
test(lockfile, 'zip Python 3.8.10', '2025.01.01', '2024.10.22')
|
||||
test(lockfile, 'zip Python 3.8.110', '2025.01.01', None, exact=True)
|
||||
test( # Windows Vista w/ win_x86_exe must be locked at Vista lock
|
||||
lockfile, 'win_x86_exe Python 3.7.9 (CPython x86 32bit) - Windows-Vista-6.0.6003-SP2',
|
||||
'2023.12.31', '2023.11.16')
|
||||
test( # Windows 2008Server w/ win_x86_exe must be locked
|
||||
test( # Windows 2008Server w/ win_x86_exe must be locked at Vista lock
|
||||
lockfile, 'win_x86_exe Python 3.7.9 (CPython x86 32bit) - Windows-2008Server',
|
||||
'2023.12.31', None, exact=True)
|
||||
test( # Windows 7 w/ win_x86_exe py3.7 build should be able to update beyond lock
|
||||
lockfile, 'win_x86_exe stable Python 3.7.9 (CPython x86 32bit) - Windows-7-6.1.7601-SP1',
|
||||
'2023.12.31', '2023.12.31')
|
||||
test( # Windows 8.1 w/ '2008Server' in platform string should be able to update beyond lock
|
||||
test( # Windows 7 w/ win_x86_exe py3.7 build should be able to update beyond py3.7 lock
|
||||
lockfile, 'win_x86_exe Python 3.7.9 (CPython x86 32bit) - Windows-7-6.1.7601-SP1',
|
||||
'2023.12.31', '2023.12.31', exact=True)
|
||||
test( # Windows 7 win_x86_exe should only update to Win7 lock
|
||||
lockfile, 'win_x86_exe Python 3.7.9 (CPython x86 32bit) - Windows-7-6.1.7601-SP1',
|
||||
'2025.01.01', '2024.10.22')
|
||||
test( # Windows 2008ServerR2 win_exe should only update to Win7 lock
|
||||
lockfile, 'win_exe Python 3.8.10 (CPython x86 32bit) - Windows-2008ServerR2',
|
||||
'2025.12.31', '2024.10.22')
|
||||
test( # Windows 8.1 w/ '2008Server' in platform string should be able to update beyond py3.7 lock
|
||||
lockfile, 'win_x86_exe Python 3.7.9 (CPython x86 32bit) - Windows-post2008Server-6.2.9200',
|
||||
'2023.12.31', '2023.12.31', exact=True)
|
||||
test( # win_exe built w/Python 3.8 on Windows>=8 should be able to update beyond py3.8 lock
|
||||
lockfile, 'win_exe Python 3.8.10 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0',
|
||||
'2025.01.01', '2025.01.01', exact=True)
|
||||
test( # linux_armv7l_exe w/glibc2.7 should only update to glibc<2.31 lock
|
||||
lockfile, 'linux_armv7l_exe Python 3.8.0 (CPython armv7l 32bit) - Linux-6.5.0-1025-azure-armv7l-with-glibc2.7',
|
||||
'2025.01.01', '2024.10.22')
|
||||
test( # linux_armv7l_exe w/Python 3.8 and glibc>=2.31 should be able to update beyond py3.8 and glibc<2.31 locks
|
||||
lockfile, 'linux_armv7l_exe Python 3.8.0 (CPython armv7l 32bit) - Linux-6.5.0-1025-azure-armv7l-with-glibc2.31',
|
||||
'2025.01.01', '2025.01.01')
|
||||
test( # linux_armv7l_exe w/glibc2.30 should only update to glibc<2.31 lock
|
||||
lockfile, 'linux_armv7l_exe Python 3.8.0 (CPython armv7l 64bit) - Linux-6.5.0-1025-azure-aarch64-with-glibc2.30 (OpenSSL',
|
||||
'2025.01.01', '2024.10.22')
|
||||
test( # linux_aarch64_exe w/glibc2.17 should only update to glibc<2.31 lock
|
||||
lockfile, 'linux_aarch64_exe Python 3.8.0 (CPython aarch64 64bit) - Linux-6.5.0-1025-azure-aarch64-with-glibc2.17',
|
||||
'2025.01.01', '2024.10.22')
|
||||
test( # linux_aarch64_exe w/glibc2.40 and glibc>=2.31 should be able to update beyond py3.8 and glibc<2.31 locks
|
||||
lockfile, 'linux_aarch64_exe Python 3.8.0 (CPython aarch64 64bit) - Linux-6.5.0-1025-azure-aarch64-with-glibc2.40',
|
||||
'2025.01.01', '2025.01.01')
|
||||
test( # linux_aarch64_exe w/glibc2.3 should only update to glibc<2.31 lock
|
||||
lockfile, 'linux_aarch64_exe Python 3.8.0 (CPython aarch64 64bit) - Linux-6.5.0-1025-azure-aarch64-with-glibc2.3 (OpenSSL',
|
||||
'2025.01.01', '2024.10.22')
|
||||
test(lockfile, 'darwin_legacy_exe Python 3.10.5', '2025.08.11', '2025.08.11')
|
||||
test(lockfile, 'darwin_legacy_exe Python 3.10.5', '2025.08.11', '2025.08.11', exact=True)
|
||||
test(lockfile, 'darwin_legacy_exe Python 3.10.5', '2025.08.12', '2025.08.11')
|
||||
test(lockfile, 'darwin_legacy_exe Python 3.10.5', '2025.08.12', None, exact=True)
|
||||
|
||||
# Forks can block updates to non-numeric tags rather than lock
|
||||
test(TEST_LOCKFILE_FORK, 'zip Python 3.6.3', 'pr0000', None, repo='fork/yt-dlp')
|
||||
test(TEST_LOCKFILE_FORK, 'zip stable Python 3.7.4', 'pr0000', 'pr0000', repo='fork/yt-dlp')
|
||||
test(TEST_LOCKFILE_FORK, 'zip stable Python 3.7.4', 'pr1234', None, repo='fork/yt-dlp')
|
||||
test(TEST_LOCKFILE_FORK, 'zip Python 3.7.4', 'pr0000', 'pr0000', repo='fork/yt-dlp')
|
||||
test(TEST_LOCKFILE_FORK, 'zip Python 3.7.4', 'pr1234', None, repo='fork/yt-dlp')
|
||||
test(TEST_LOCKFILE_FORK, 'zip Python 3.8.1', 'pr1234', 'pr1234', repo='fork/yt-dlp', exact=True)
|
||||
test(
|
||||
TEST_LOCKFILE_FORK, 'win_x86_exe stable Python 3.7.9 (CPython x86 32bit) - Windows-Vista-6.0.6003-SP2',
|
||||
TEST_LOCKFILE_FORK, 'win_x86_exe Python 3.7.9 (CPython x86 32bit) - Windows-Vista-6.0.6003-SP2',
|
||||
'pr1234', None, repo='fork/yt-dlp')
|
||||
test(
|
||||
TEST_LOCKFILE_FORK, 'win_x86_exe stable Python 3.7.9 (CPython x86 32bit) - Windows-7-6.1.7601-SP1',
|
||||
TEST_LOCKFILE_FORK, 'win_x86_exe Python 3.7.9 (CPython x86 32bit) - Windows-7-6.1.7601-SP1',
|
||||
'2023.12.31', '2023.12.31', repo='fork/yt-dlp')
|
||||
test(TEST_LOCKFILE_FORK, 'zip Python 3.11.2', 'pr9999', None, repo='fork/yt-dlp', exact=True)
|
||||
test(TEST_LOCKFILE_FORK, 'zip stable Python 3.12.0', 'pr9999', 'pr9999', repo='fork/yt-dlp')
|
||||
test(TEST_LOCKFILE_FORK, 'zip Python 3.12.0', 'pr9999', 'pr9999', repo='fork/yt-dlp')
|
||||
|
||||
def test_query_update(self):
|
||||
ydl = FakeYDL()
|
||||
|
|
@ -223,6 +280,26 @@ class TestUpdate(unittest.TestCase):
|
|||
test('testing', None, current_commit='9' * 40)
|
||||
test('testing', UpdateInfo('testing', commit='9' * 40))
|
||||
|
||||
def test_make_label(self):
|
||||
STABLE_REPO = UPDATE_SOURCES['stable']
|
||||
NIGHTLY_REPO = UPDATE_SOURCES['nightly']
|
||||
MASTER_REPO = UPDATE_SOURCES['master']
|
||||
|
||||
for inputs, expected in [
|
||||
([STABLE_REPO, '2025.09.02', '2025.09.02'], f'stable@2025.09.02 from {STABLE_REPO}'),
|
||||
([NIGHTLY_REPO, '2025.09.02.123456', '2025.09.02.123456'], f'nightly@2025.09.02.123456 from {NIGHTLY_REPO}'),
|
||||
([MASTER_REPO, '2025.09.02.987654', '2025.09.02.987654'], f'master@2025.09.02.987654 from {MASTER_REPO}'),
|
||||
(['fork/yt-dlp', 'experimental', '2025.12.31.000000'], 'fork/yt-dlp@experimental build 2025.12.31.000000'),
|
||||
(['fork/yt-dlp', '2025.09.02', '2025.09.02'], 'fork/yt-dlp@2025.09.02'),
|
||||
([STABLE_REPO, 'experimental', '2025.12.31.000000'], f'{STABLE_REPO}@experimental build 2025.12.31.000000'),
|
||||
([STABLE_REPO, 'experimental'], f'{STABLE_REPO}@experimental'),
|
||||
(['fork/yt-dlp', 'experimental'], 'fork/yt-dlp@experimental'),
|
||||
]:
|
||||
result = _make_label(*inputs)
|
||||
self.assertEqual(
|
||||
result, expected,
|
||||
f'{inputs!r} returned {result!r} instead of {expected!r}')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
|||
|
|
@ -3,24 +3,26 @@
|
|||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
import warnings
|
||||
import datetime as dt
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import contextlib
|
||||
import datetime as dt
|
||||
import io
|
||||
import itertools
|
||||
import json
|
||||
import ntpath
|
||||
import pickle
|
||||
import subprocess
|
||||
import unittest
|
||||
import unittest.mock
|
||||
import warnings
|
||||
import xml.etree.ElementTree
|
||||
|
||||
from yt_dlp.compat import (
|
||||
compat_etree_fromstring,
|
||||
compat_HTMLParseError,
|
||||
compat_os_name,
|
||||
)
|
||||
from yt_dlp.utils import (
|
||||
Config,
|
||||
|
|
@ -48,7 +50,6 @@ from yt_dlp.utils import (
|
|||
dfxp2srt,
|
||||
encode_base_n,
|
||||
encode_compat_str,
|
||||
encodeFilename,
|
||||
expand_path,
|
||||
extract_attributes,
|
||||
extract_basic_auth,
|
||||
|
|
@ -68,10 +69,11 @@ from yt_dlp.utils import (
|
|||
get_elements_html_by_class,
|
||||
get_elements_text_and_html_by_attribute,
|
||||
int_or_none,
|
||||
intlist_to_bytes,
|
||||
iri_to_uri,
|
||||
is_html,
|
||||
js_to_json,
|
||||
jwt_decode_hs256,
|
||||
jwt_encode,
|
||||
limit_length,
|
||||
locked_file,
|
||||
lowercase_escape,
|
||||
|
|
@ -100,11 +102,13 @@ from yt_dlp.utils import (
|
|||
remove_start,
|
||||
render_table,
|
||||
replace_extension,
|
||||
datetime_round,
|
||||
rot47,
|
||||
sanitize_filename,
|
||||
sanitize_path,
|
||||
sanitize_url,
|
||||
shell_quote,
|
||||
strftime_or_none,
|
||||
smuggle_url,
|
||||
str_to_int,
|
||||
strip_jsonp,
|
||||
|
|
@ -220,10 +224,8 @@ class TestUtil(unittest.TestCase):
|
|||
self.assertEqual(sanitize_filename('_BD_eEpuzXw', is_id=True), '_BD_eEpuzXw')
|
||||
self.assertEqual(sanitize_filename('N0Y__7-UOdI', is_id=True), 'N0Y__7-UOdI')
|
||||
|
||||
@unittest.mock.patch('sys.platform', 'win32')
|
||||
def test_sanitize_path(self):
|
||||
if sys.platform != 'win32':
|
||||
return
|
||||
|
||||
self.assertEqual(sanitize_path('abc'), 'abc')
|
||||
self.assertEqual(sanitize_path('abc/def'), 'abc\\def')
|
||||
self.assertEqual(sanitize_path('abc\\def'), 'abc\\def')
|
||||
|
|
@ -250,11 +252,26 @@ class TestUtil(unittest.TestCase):
|
|||
self.assertEqual(sanitize_path('abc/def...'), 'abc\\def..#')
|
||||
self.assertEqual(sanitize_path('abc.../def'), 'abc..#\\def')
|
||||
self.assertEqual(sanitize_path('abc.../def...'), 'abc..#\\def..#')
|
||||
self.assertEqual(sanitize_path('C:\\abc:%(title)s.%(ext)s'), 'C:\\abc#%(title)s.%(ext)s')
|
||||
|
||||
self.assertEqual(sanitize_path('../abc'), '..\\abc')
|
||||
self.assertEqual(sanitize_path('../../abc'), '..\\..\\abc')
|
||||
self.assertEqual(sanitize_path('./abc'), 'abc')
|
||||
self.assertEqual(sanitize_path('./../abc'), '..\\abc')
|
||||
for test, expected in [
|
||||
('C:\\', 'C:\\'),
|
||||
('../abc', '..\\abc'),
|
||||
('../../abc', '..\\..\\abc'),
|
||||
('./abc', 'abc'),
|
||||
('./../abc', '..\\abc'),
|
||||
('\\abc', '\\abc'),
|
||||
('C:abc', 'C:abc'),
|
||||
('C:abc\\..\\', 'C:'),
|
||||
('C:abc\\..\\def\\..\\..\\', 'C:..'),
|
||||
('C:\\abc\\xyz///..\\def\\', 'C:\\abc\\def'),
|
||||
('abc/../', '.'),
|
||||
('./abc/../', '.'),
|
||||
]:
|
||||
result = sanitize_path(test)
|
||||
assert result == expected, f'{test} was incorrectly resolved'
|
||||
assert result == sanitize_path(result), f'{test} changed after sanitizing again'
|
||||
assert result == ntpath.normpath(test), f'{test} does not match ntpath.normpath'
|
||||
|
||||
def test_sanitize_url(self):
|
||||
self.assertEqual(sanitize_url('//foo.bar'), 'http://foo.bar')
|
||||
|
|
@ -337,11 +354,13 @@ class TestUtil(unittest.TestCase):
|
|||
self.assertEqual(remove_start(None, 'A - '), None)
|
||||
self.assertEqual(remove_start('A - B', 'A - '), 'B')
|
||||
self.assertEqual(remove_start('B - A', 'A - '), 'B - A')
|
||||
self.assertEqual(remove_start('non-empty', ''), 'non-empty')
|
||||
|
||||
def test_remove_end(self):
|
||||
self.assertEqual(remove_end(None, ' - B'), None)
|
||||
self.assertEqual(remove_end('A - B', ' - B'), 'A')
|
||||
self.assertEqual(remove_end('B - A', ' - B'), 'B - A')
|
||||
self.assertEqual(remove_end('non-empty', ''), 'non-empty')
|
||||
|
||||
def test_remove_quotes(self):
|
||||
self.assertEqual(remove_quotes(None), None)
|
||||
|
|
@ -386,6 +405,25 @@ class TestUtil(unittest.TestCase):
|
|||
self.assertEqual(datetime_from_str('now+1day', precision='hour'), datetime_from_str('now+24hours', precision='auto'))
|
||||
self.assertEqual(datetime_from_str('now+23hours', precision='hour'), datetime_from_str('now+23hours', precision='auto'))
|
||||
|
||||
def test_datetime_round(self):
|
||||
self.assertEqual(datetime_round(dt.datetime.strptime('1820-05-12T01:23:45Z', '%Y-%m-%dT%H:%M:%SZ')),
|
||||
dt.datetime(1820, 5, 12, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(datetime_round(dt.datetime.strptime('1969-12-31T23:34:45Z', '%Y-%m-%dT%H:%M:%SZ'), 'hour'),
|
||||
dt.datetime(1970, 1, 1, 0, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(datetime_round(dt.datetime.strptime('2024-12-25T01:23:45Z', '%Y-%m-%dT%H:%M:%SZ'), 'minute'),
|
||||
dt.datetime(2024, 12, 25, 1, 24, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(datetime_round(dt.datetime.strptime('2024-12-25T01:23:45.123Z', '%Y-%m-%dT%H:%M:%S.%fZ'), 'second'),
|
||||
dt.datetime(2024, 12, 25, 1, 23, 45, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(datetime_round(dt.datetime.strptime('2024-12-25T01:23:45.678Z', '%Y-%m-%dT%H:%M:%S.%fZ'), 'second'),
|
||||
dt.datetime(2024, 12, 25, 1, 23, 46, tzinfo=dt.timezone.utc))
|
||||
|
||||
def test_strftime_or_none(self):
|
||||
self.assertEqual(strftime_or_none(-4722192000), '18200512')
|
||||
self.assertEqual(strftime_or_none(0), '19700101')
|
||||
self.assertEqual(strftime_or_none(1735084800), '20241225')
|
||||
# Throws OverflowError
|
||||
self.assertEqual(strftime_or_none(1735084800000), None)
|
||||
|
||||
def test_daterange(self):
|
||||
_20century = DateRange('19000101', '20000101')
|
||||
self.assertFalse('17890714' in _20century)
|
||||
|
|
@ -444,6 +482,8 @@ class TestUtil(unittest.TestCase):
|
|||
self.assertEqual(unified_timestamp('Sep 11, 2013 | 5:49 AM'), 1378878540)
|
||||
self.assertEqual(unified_timestamp('December 15, 2017 at 7:49 am'), 1513324140)
|
||||
self.assertEqual(unified_timestamp('2018-03-14T08:32:43.1493874+00:00'), 1521016363)
|
||||
self.assertEqual(unified_timestamp('Sunday, 26 Nov 2006, 19:00'), 1164567600)
|
||||
self.assertEqual(unified_timestamp('wed, aug 16, 2008, 12:00pm'), 1218931200)
|
||||
|
||||
self.assertEqual(unified_timestamp('December 31 1969 20:00:01 EDT'), 1)
|
||||
self.assertEqual(unified_timestamp('Wednesday 31 December 1969 18:01:26 MDT'), 86)
|
||||
|
|
@ -555,10 +595,10 @@ class TestUtil(unittest.TestCase):
|
|||
self.assertEqual(res_data, {'a': 'b', 'c': 'd'})
|
||||
|
||||
def test_shell_quote(self):
|
||||
args = ['ffmpeg', '-i', encodeFilename('ñ€ß\'.mp4')]
|
||||
args = ['ffmpeg', '-i', 'ñ€ß\'.mp4']
|
||||
self.assertEqual(
|
||||
shell_quote(args),
|
||||
"""ffmpeg -i 'ñ€ß'"'"'.mp4'""" if compat_os_name != 'nt' else '''ffmpeg -i "ñ€ß'.mp4"''')
|
||||
"""ffmpeg -i 'ñ€ß'"'"'.mp4'""" if os.name != 'nt' else '''ffmpeg -i "ñ€ß'.mp4"''')
|
||||
|
||||
def test_float_or_none(self):
|
||||
self.assertEqual(float_or_none('42.42'), 42.42)
|
||||
|
|
@ -636,6 +676,8 @@ class TestUtil(unittest.TestCase):
|
|||
self.assertEqual(url_or_none('mms://foo.de'), 'mms://foo.de')
|
||||
self.assertEqual(url_or_none('rtspu://foo.de'), 'rtspu://foo.de')
|
||||
self.assertEqual(url_or_none('ftps://foo.de'), 'ftps://foo.de')
|
||||
self.assertEqual(url_or_none('ws://foo.de'), 'ws://foo.de')
|
||||
self.assertEqual(url_or_none('wss://foo.de'), 'wss://foo.de')
|
||||
|
||||
def test_parse_age_limit(self):
|
||||
self.assertEqual(parse_age_limit(None), None)
|
||||
|
|
@ -919,6 +961,11 @@ class TestUtil(unittest.TestCase):
|
|||
'acodec': 'none',
|
||||
'dynamic_range': 'HDR10',
|
||||
})
|
||||
self.assertEqual(parse_codecs('vp09.02.50.10.01.09.18.09.00'), {
|
||||
'vcodec': 'vp09.02.50.10.01.09.18.09.00',
|
||||
'acodec': 'none',
|
||||
'dynamic_range': 'HDR10',
|
||||
})
|
||||
self.assertEqual(parse_codecs('av01.0.12M.10.0.110.09.16.09.0'), {
|
||||
'vcodec': 'av01.0.12M.10.0.110.09.16.09.0',
|
||||
'acodec': 'none',
|
||||
|
|
@ -929,6 +976,11 @@ class TestUtil(unittest.TestCase):
|
|||
'acodec': 'none',
|
||||
'dynamic_range': 'DV',
|
||||
})
|
||||
self.assertEqual(parse_codecs('fLaC'), {
|
||||
'vcodec': 'none',
|
||||
'acodec': 'flac',
|
||||
'dynamic_range': None,
|
||||
})
|
||||
self.assertEqual(parse_codecs('theora, vorbis'), {
|
||||
'vcodec': 'theora',
|
||||
'acodec': 'vorbis',
|
||||
|
|
@ -1227,6 +1279,7 @@ class TestUtil(unittest.TestCase):
|
|||
def test_js_to_json_malformed(self):
|
||||
self.assertEqual(js_to_json('42a1'), '42"a1"')
|
||||
self.assertEqual(js_to_json('42a-1'), '42"a"-1')
|
||||
self.assertEqual(js_to_json('{a: `${e("")}`}'), '{"a": "\\"e\\"(\\"\\")"}')
|
||||
|
||||
def test_js_to_json_template_literal(self):
|
||||
self.assertEqual(js_to_json('`Hello ${name}`', {'name': '"world"'}), '"Hello world"')
|
||||
|
|
@ -1288,15 +1341,10 @@ class TestUtil(unittest.TestCase):
|
|||
self.assertEqual(clean_html('a:\n "b"'), 'a: "b"')
|
||||
self.assertEqual(clean_html('a<br>\xa0b'), 'a\nb')
|
||||
|
||||
def test_intlist_to_bytes(self):
|
||||
self.assertEqual(
|
||||
intlist_to_bytes([0, 1, 127, 128, 255]),
|
||||
b'\x00\x01\x7f\x80\xff')
|
||||
|
||||
def test_args_to_str(self):
|
||||
self.assertEqual(
|
||||
args_to_str(['foo', 'ba/r', '-baz', '2 be', '']),
|
||||
'foo ba/r -baz \'2 be\' \'\'' if compat_os_name != 'nt' else 'foo ba/r -baz "2 be" ""',
|
||||
'foo ba/r -baz \'2 be\' \'\'' if os.name != 'nt' else 'foo ba/r -baz "2 be" ""',
|
||||
)
|
||||
|
||||
def test_parse_filesize(self):
|
||||
|
|
@ -1342,6 +1390,7 @@ class TestUtil(unittest.TestCase):
|
|||
self.assertEqual(parse_resolution('pre_1920x1080_post'), {'width': 1920, 'height': 1080})
|
||||
self.assertEqual(parse_resolution('ep1x2'), {})
|
||||
self.assertEqual(parse_resolution('1920, 1080'), {'width': 1920, 'height': 1080})
|
||||
self.assertEqual(parse_resolution('1920w', lenient=True), {'width': 1920})
|
||||
|
||||
def test_parse_bitrate(self):
|
||||
self.assertEqual(parse_bitrate(None), None)
|
||||
|
|
@ -1814,7 +1863,7 @@ Line 1
|
|||
|
||||
self.assertEqual(
|
||||
list(get_elements_text_and_html_by_attribute('class', 'foo bar', html)),
|
||||
list(zip(['nice', 'also nice'], self.GET_ELEMENTS_BY_CLASS_RES)))
|
||||
list(zip(['nice', 'also nice'], self.GET_ELEMENTS_BY_CLASS_RES, strict=True)))
|
||||
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'foo', html)), [])
|
||||
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'no-such-foo', html)), [])
|
||||
|
||||
|
|
@ -2055,21 +2104,26 @@ Line 1
|
|||
headers = HTTPHeaderDict()
|
||||
headers['ytdl-test'] = b'0'
|
||||
self.assertEqual(list(headers.items()), [('Ytdl-Test', '0')])
|
||||
self.assertEqual(list(headers.sensitive().items()), [('ytdl-test', '0')])
|
||||
headers['ytdl-test'] = 1
|
||||
self.assertEqual(list(headers.items()), [('Ytdl-Test', '1')])
|
||||
self.assertEqual(list(headers.sensitive().items()), [('ytdl-test', '1')])
|
||||
headers['Ytdl-test'] = '2'
|
||||
self.assertEqual(list(headers.items()), [('Ytdl-Test', '2')])
|
||||
self.assertEqual(list(headers.sensitive().items()), [('Ytdl-test', '2')])
|
||||
self.assertTrue('ytDl-Test' in headers)
|
||||
self.assertEqual(str(headers), str(dict(headers)))
|
||||
self.assertEqual(repr(headers), str(dict(headers)))
|
||||
|
||||
headers.update({'X-dlp': 'data'})
|
||||
self.assertEqual(set(headers.items()), {('Ytdl-Test', '2'), ('X-Dlp', 'data')})
|
||||
self.assertEqual(set(headers.sensitive().items()), {('Ytdl-test', '2'), ('X-dlp', 'data')})
|
||||
self.assertEqual(dict(headers), {'Ytdl-Test': '2', 'X-Dlp': 'data'})
|
||||
self.assertEqual(len(headers), 2)
|
||||
self.assertEqual(headers.copy(), headers)
|
||||
headers2 = HTTPHeaderDict({'X-dlp': 'data3'}, **headers, **{'X-dlp': 'data2'})
|
||||
headers2 = HTTPHeaderDict({'X-dlp': 'data3'}, headers, **{'X-dlP': 'data2'})
|
||||
self.assertEqual(set(headers2.items()), {('Ytdl-Test', '2'), ('X-Dlp', 'data2')})
|
||||
self.assertEqual(set(headers2.sensitive().items()), {('Ytdl-test', '2'), ('X-dlP', 'data2')})
|
||||
self.assertEqual(len(headers2), 2)
|
||||
headers2.clear()
|
||||
self.assertEqual(len(headers2), 0)
|
||||
|
|
@ -2077,16 +2131,23 @@ Line 1
|
|||
# ensure we prefer latter headers
|
||||
headers3 = HTTPHeaderDict({'Ytdl-TeSt': 1}, {'Ytdl-test': 2})
|
||||
self.assertEqual(set(headers3.items()), {('Ytdl-Test', '2')})
|
||||
self.assertEqual(set(headers3.sensitive().items()), {('Ytdl-test', '2')})
|
||||
del headers3['ytdl-tesT']
|
||||
self.assertEqual(dict(headers3), {})
|
||||
|
||||
headers4 = HTTPHeaderDict({'ytdl-test': 'data;'})
|
||||
self.assertEqual(set(headers4.items()), {('Ytdl-Test', 'data;')})
|
||||
self.assertEqual(set(headers4.sensitive().items()), {('ytdl-test', 'data;')})
|
||||
|
||||
# common mistake: strip whitespace from values
|
||||
# https://github.com/yt-dlp/yt-dlp/issues/8729
|
||||
headers5 = HTTPHeaderDict({'ytdl-test': ' data; '})
|
||||
self.assertEqual(set(headers5.items()), {('Ytdl-Test', 'data;')})
|
||||
self.assertEqual(set(headers5.sensitive().items()), {('ytdl-test', 'data;')})
|
||||
|
||||
# test if picklable
|
||||
headers6 = HTTPHeaderDict(a=1, b=2)
|
||||
self.assertEqual(pickle.loads(pickle.dumps(headers6)), headers6)
|
||||
|
||||
def test_extract_basic_auth(self):
|
||||
assert extract_basic_auth('http://:foo.bar') == ('http://:foo.bar', None)
|
||||
|
|
@ -2096,7 +2157,7 @@ Line 1
|
|||
assert extract_basic_auth('http://user:@foo.bar') == ('http://foo.bar', 'Basic dXNlcjo=')
|
||||
assert extract_basic_auth('http://user:pass@foo.bar') == ('http://foo.bar', 'Basic dXNlcjpwYXNz')
|
||||
|
||||
@unittest.skipUnless(compat_os_name == 'nt', 'Only relevant on Windows')
|
||||
@unittest.skipUnless(os.name == 'nt', 'Only relevant on Windows')
|
||||
def test_windows_escaping(self):
|
||||
tests = [
|
||||
'test"&',
|
||||
|
|
@ -2130,6 +2191,47 @@ Line 1
|
|||
assert run_shell(args) == expected
|
||||
assert run_shell(shell_quote(args, shell=True)) == expected
|
||||
|
||||
def test_partial_application(self):
|
||||
assert callable(int_or_none(scale=10)), 'missing positional parameter should apply partially'
|
||||
assert int_or_none(10, scale=0.1) == 100, 'positionally passed argument should call function'
|
||||
assert int_or_none(v=10) == 10, 'keyword passed positional should call function'
|
||||
assert int_or_none(scale=0.1)(10) == 100, 'call after partial application should call the function'
|
||||
|
||||
_JWT_KEY = '12345678'
|
||||
_JWT_HEADERS_1 = {'a': 'b'}
|
||||
_JWT_HEADERS_2 = {'typ': 'JWT', 'alg': 'HS256'}
|
||||
_JWT_HEADERS_3 = {'typ': 'JWT', 'alg': 'RS256'}
|
||||
_JWT_HEADERS_4 = {'c': 'd', 'alg': 'ES256'}
|
||||
_JWT_DECODED = {
|
||||
'foo': 'bar',
|
||||
'qux': 'baz',
|
||||
}
|
||||
_JWT_SIMPLE = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJmb28iOiJiYXIiLCJxdXgiOiJiYXoifQ.fKojvTWqnjNTbsdoDTmYNc4tgYAG3h_SWRzM77iLH0U'
|
||||
_JWT_WITH_EXTRA_HEADERS = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCIsImEiOiJiIn0.eyJmb28iOiJiYXIiLCJxdXgiOiJiYXoifQ.Ia91-B77yasfYM7jsB6iVKLew-3rO6ITjNmjWUVXCvQ'
|
||||
_JWT_WITH_REORDERED_HEADERS = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJmb28iOiJiYXIiLCJxdXgiOiJiYXoifQ.slg-7COta5VOfB36p3tqV4MGPV6TTA_ouGnD48UEVq4'
|
||||
_JWT_WITH_REORDERED_HEADERS_AND_RS256_ALG = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJmb28iOiJiYXIiLCJxdXgiOiJiYXoifQ.XWp496oVgQnoits0OOocutdjxoaQwn4GUWWxUsKENPM'
|
||||
_JWT_WITH_EXTRA_HEADERS_AND_ES256_ALG = 'eyJhbGciOiJFUzI1NiIsInR5cCI6IkpXVCIsImMiOiJkIn0.eyJmb28iOiJiYXIiLCJxdXgiOiJiYXoifQ.oM_tc7IkfrwkoRh43rFFE1wOi3J3mQGwx7_lMyKQqDg'
|
||||
|
||||
def test_jwt_encode(self):
|
||||
def test(expected, headers={}):
|
||||
self.assertEqual(jwt_encode(self._JWT_DECODED, self._JWT_KEY, headers=headers), expected)
|
||||
|
||||
test(self._JWT_SIMPLE)
|
||||
test(self._JWT_WITH_EXTRA_HEADERS, headers=self._JWT_HEADERS_1)
|
||||
test(self._JWT_WITH_REORDERED_HEADERS, headers=self._JWT_HEADERS_2)
|
||||
test(self._JWT_WITH_REORDERED_HEADERS_AND_RS256_ALG, headers=self._JWT_HEADERS_3)
|
||||
test(self._JWT_WITH_EXTRA_HEADERS_AND_ES256_ALG, headers=self._JWT_HEADERS_4)
|
||||
|
||||
def test_jwt_decode_hs256(self):
|
||||
def test(inp):
|
||||
self.assertEqual(jwt_decode_hs256(inp), self._JWT_DECODED)
|
||||
|
||||
test(self._JWT_SIMPLE)
|
||||
test(self._JWT_WITH_EXTRA_HEADERS)
|
||||
test(self._JWT_WITH_REORDERED_HEADERS)
|
||||
test(self._JWT_WITH_REORDERED_HEADERS_AND_RS256_ALG)
|
||||
test(self._JWT_WITH_EXTRA_HEADERS_AND_ES256_ALG)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ class TestVerboseOutput(unittest.TestCase):
|
|||
'--username', 'johnsmith@gmail.com',
|
||||
'--password', 'my_secret_password',
|
||||
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
sout, serr = outp.communicate()
|
||||
_, serr = outp.communicate()
|
||||
self.assertTrue(b'--username' in serr)
|
||||
self.assertTrue(b'johnsmith' not in serr)
|
||||
self.assertTrue(b'--password' in serr)
|
||||
|
|
@ -36,7 +36,7 @@ class TestVerboseOutput(unittest.TestCase):
|
|||
'-u', 'johnsmith@gmail.com',
|
||||
'-p', 'my_secret_password',
|
||||
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
sout, serr = outp.communicate()
|
||||
_, serr = outp.communicate()
|
||||
self.assertTrue(b'-u' in serr)
|
||||
self.assertTrue(b'johnsmith' not in serr)
|
||||
self.assertTrue(b'-p' in serr)
|
||||
|
|
@ -50,7 +50,7 @@ class TestVerboseOutput(unittest.TestCase):
|
|||
'--username=johnsmith@gmail.com',
|
||||
'--password=my_secret_password',
|
||||
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
sout, serr = outp.communicate()
|
||||
_, serr = outp.communicate()
|
||||
self.assertTrue(b'--username' in serr)
|
||||
self.assertTrue(b'johnsmith' not in serr)
|
||||
self.assertTrue(b'--password' in serr)
|
||||
|
|
@ -64,7 +64,7 @@ class TestVerboseOutput(unittest.TestCase):
|
|||
'-u=johnsmith@gmail.com',
|
||||
'-p=my_secret_password',
|
||||
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
sout, serr = outp.communicate()
|
||||
_, serr = outp.communicate()
|
||||
self.assertTrue(b'-u' in serr)
|
||||
self.assertTrue(b'johnsmith' not in serr)
|
||||
self.assertTrue(b'-p' in serr)
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue